diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs
index 6412a537a7933..507cbf20d89e5 100644
--- a/compiler/rustc_codegen_llvm/src/llvm_util.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs
@@ -273,6 +273,12 @@ pub(crate) fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> Option<LLVMFea
         ("aarch64", "fpmr") => None, // only existed in 18
         ("arm", "fp16") => Some(LLVMFeature::new("fullfp16")),
         // Filter out features that are not supported by the current LLVM version
+        ("loongarch64", "div32" | "lam-bh" | "lamcas" | "ld-seq-sa" | "scq")
+            if get_version().0 < 20 =>
+        {
+            None
+        }
+        // Filter out features that are not supported by the current LLVM version
         ("riscv32" | "riscv64", "zacas") if get_version().0 < 20 => None,
         // Enable the evex512 target feature if an avx512 target feature is enabled.
         ("x86", s) if s.starts_with("avx512") => {
diff --git a/compiler/rustc_hir_typeck/src/writeback.rs b/compiler/rustc_hir_typeck/src/writeback.rs
index c5000171ad75e..b70e381c5646e 100644
--- a/compiler/rustc_hir_typeck/src/writeback.rs
+++ b/compiler/rustc_hir_typeck/src/writeback.rs
@@ -1,6 +1,12 @@
-// Type resolution: the phase that finds all the types in the AST with
-// unresolved type variables and replaces "ty_var" types with their
-// generic parameters.
+//! During type inference, partially inferred terms are
+//! represented using inference variables (ty::Infer). These don't appear in
+//! the final [`ty::TypeckResults`] since all of the types should have been
+//! inferred once typeck is done.
+//!
+//! When type inference is running however, having to update the typeck results
+//! every time a new type is inferred would be unreasonably slow, so instead all
+//! of the replacement happens at the end in [`FnCtxt::resolve_type_vars_in_body`],
+//! which creates a new `TypeckResults` which doesn't contain any inference variables.
 
 use std::mem;
 
@@ -27,15 +33,6 @@ use crate::FnCtxt;
 ///////////////////////////////////////////////////////////////////////////
 // Entry point
 
-// During type inference, partially inferred types are
-// represented using Type variables (ty::Infer). These don't appear in
-// the final TypeckResults since all of the types should have been
-// inferred once typeck is done.
-// When type inference is running however, having to update the typeck
-// typeck results every time a new type is inferred would be unreasonably slow,
-// so instead all of the replacement happens at the end in
-// resolve_type_vars_in_body, which creates a new TypeTables which
-// doesn't contain any inference types.
 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
     pub(crate) fn resolve_type_vars_in_body(
         &self,
@@ -90,14 +87,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
     }
 }
 
-///////////////////////////////////////////////////////////////////////////
-// The Writeback context. This visitor walks the HIR, checking the
-// fn-specific typeck results to find references to types or regions. It
-// resolves those regions to remove inference variables and writes the
-// final result back into the master typeck results in the tcx. Here and
-// there, it applies a few ad-hoc checks that were not convenient to
-// do elsewhere.
-
+/// The Writeback context. This visitor walks the HIR, checking the
+/// fn-specific typeck results to find inference variables. It resolves
+/// those inference variables and writes the final result into the
+/// `TypeckResults`. It also applies a few ad-hoc checks that were not
+/// convenient to do elsewhere.
 struct WritebackCx<'cx, 'tcx> {
     fcx: &'cx FnCtxt<'cx, 'tcx>,
 
@@ -897,7 +891,7 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
             let cause = ObligationCause::misc(self.span.to_span(tcx), body_id);
             let at = self.fcx.at(&cause, self.fcx.param_env);
             let universes = vec![None; outer_exclusive_binder(value).as_usize()];
-            match solve::deeply_normalize_with_skipped_universes_and_ambiguous_goals(
+            match solve::deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
                 at, value, universes,
             ) {
                 Ok((value, goals)) => {
diff --git a/compiler/rustc_middle/src/mir/interpret/queries.rs b/compiler/rustc_middle/src/mir/interpret/queries.rs
index 4222a68e5447d..e5d1dda3aa0f4 100644
--- a/compiler/rustc_middle/src/mir/interpret/queries.rs
+++ b/compiler/rustc_middle/src/mir/interpret/queries.rs
@@ -115,15 +115,16 @@ impl<'tcx> TyCtxt<'tcx> {
                     // @lcnr believes that successfully evaluating even though there are
                     // used generic parameters is a bug of evaluation, so checking for it
                     // here does feel somewhat sensible.
-                    if !self.features().generic_const_exprs() && ct.args.has_non_region_param() {
-                        let def_kind = self.def_kind(instance.def_id());
-                        assert!(
-                            matches!(
-                                def_kind,
-                                DefKind::InlineConst | DefKind::AnonConst | DefKind::AssocConst
-                            ),
-                            "{cid:?} is {def_kind:?}",
-                        );
+                    if !self.features().generic_const_exprs()
+                        && ct.args.has_non_region_param()
+                        // We only FCW for anon consts as repeat expr counts with anon consts are the only place
+                        // that we have a back compat hack for. We don't need to check this is a const argument
+                        // as only anon consts as const args should get evaluated "for the type system".
+                        //
+                        // If we don't *only* FCW anon consts we can wind up incorrectly FCW'ing uses of assoc
+                        // consts in pattern positions. #140447
+                        && self.def_kind(instance.def_id()) == DefKind::AnonConst
+                    {
                         let mir_body = self.mir_for_ctfe(instance.def_id());
                         if mir_body.is_polymorphic {
                             let Some(local_def_id) = ct.def.as_local() else { return };
diff --git a/compiler/rustc_mir_transform/src/elaborate_drop.rs b/compiler/rustc_mir_transform/src/elaborate_drop.rs
index 6f867f8105d50..73a58160a6aac 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drop.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drop.rs
@@ -376,7 +376,7 @@ where
         if self.tcx().features().async_drop()
             && self.elaborator.body().coroutine.is_some()
             && self.elaborator.allow_async_drops()
-            && !self.elaborator.body()[bb].is_cleanup
+            && !self.elaborator.patch_ref().block(self.elaborator.body(), bb).is_cleanup
             && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
         {
             self.build_async_drop(
diff --git a/compiler/rustc_mir_transform/src/patch.rs b/compiler/rustc_mir_transform/src/patch.rs
index c7eb2a921c735..a872eae15f185 100644
--- a/compiler/rustc_mir_transform/src/patch.rs
+++ b/compiler/rustc_mir_transform/src/patch.rs
@@ -148,11 +148,20 @@ impl<'tcx> MirPatch<'tcx> {
         self.term_patch_map[bb].is_some()
     }
 
+    /// Universal getter for block data, either it is in 'old' blocks or in patched ones
+    pub(crate) fn block<'a>(
+        &'a self,
+        body: &'a Body<'tcx>,
+        bb: BasicBlock,
+    ) -> &'a BasicBlockData<'tcx> {
+        match bb.index().checked_sub(body.basic_blocks.len()) {
+            Some(new) => &self.new_blocks[new],
+            None => &body[bb],
+        }
+    }
+
     pub(crate) fn terminator_loc(&self, body: &Body<'tcx>, bb: BasicBlock) -> Location {
-        let offset = match bb.index().checked_sub(body.basic_blocks.len()) {
-            Some(index) => self.new_blocks[index].statements.len(),
-            None => body[bb].statements.len(),
-        };
+        let offset = self.block(body, bb).statements.len();
         Location { block: bb, statement_index: offset }
     }
 
@@ -284,10 +293,7 @@ impl<'tcx> MirPatch<'tcx> {
     }
 
     pub(crate) fn source_info_for_location(&self, body: &Body<'tcx>, loc: Location) -> SourceInfo {
-        let data = match loc.block.index().checked_sub(body.basic_blocks.len()) {
-            Some(new) => &self.new_blocks[new],
-            None => &body[loc.block],
-        };
+        let data = self.block(body, loc.block);
         Self::source_info_for_index(data, loc)
     }
 }
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 5500fba58a534..5d8479f0ae642 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -62,13 +62,62 @@ mod mut_visit {
 }
 
 bitflags::bitflags! {
+    /// Restrictions applied while parsing.
+    ///
+    /// The parser maintains a bitset of restrictions it will honor while
+    /// parsing. This is essentially used as a way of tracking state of what
+    /// is being parsed and to change behavior based on that.
     #[derive(Clone, Copy, Debug)]
     struct Restrictions: u8 {
+        /// Restricts expressions for use in statement position.
+        ///
+        /// When expressions are used in various places, like statements or
+        /// match arms, this is used to stop parsing once certain tokens are
+        /// reached.
+        ///
+        /// For example, `if true {} & 1` with `STMT_EXPR` in effect is parsed
+        /// as two separate expression statements (`if` and a reference to 1).
+        /// Otherwise it is parsed as a bitwise AND where `if` is on the left
+        /// and 1 is on the right.
         const STMT_EXPR         = 1 << 0;
+        /// Do not allow struct literals.
+        ///
+        /// There are several places in the grammar where we don't want to
+        /// allow struct literals because they can require lookahead, or
+        /// otherwise could be ambiguous or cause confusion. For example,
+        /// `if Foo {} {}` isn't clear if it is `Foo{}` struct literal, or
+        /// just `Foo` is the condition, followed by a consequent block,
+        /// followed by an empty block.
+        ///
+        /// See [RFC 92](https://rust-lang.github.io/rfcs/0092-struct-grammar.html).
         const NO_STRUCT_LITERAL = 1 << 1;
+        /// Used to provide better error messages for const generic arguments.
+        ///
+        /// An un-braced const generic argument is limited to a very small
+        /// subset of expressions. This is used to detect the situation where
+        /// an expression outside of that subset is used, and to suggest to
+        /// wrap the expression in braces.
         const CONST_EXPR        = 1 << 2;
+        /// Allows `let` expressions.
+        ///
+        /// `let pattern = scrutinee` is parsed as an expression, but it is
+        /// only allowed in let chains (`if` and `while` conditions).
+        /// Otherwise it is not an expression (note that `let` in statement
+        /// positions is treated as a `StmtKind::Let` statement, which has a
+        /// slightly different grammar).
         const ALLOW_LET         = 1 << 3;
+        /// Used to detect a missing `=>` in a match guard.
+        ///
+        /// This is used for error handling in a match guard to give a better
+        /// error message if the `=>` is missing. It is set when parsing the
+        /// guard expression.
         const IN_IF_GUARD       = 1 << 4;
+        /// Used to detect the incorrect use of expressions in patterns.
+        ///
+        /// This is used for error handling while parsing a pattern. During
+        /// error recovery, this will be set to try to parse the pattern as an
+        /// expression, but halts parsing the expression when reaching certain
+        /// tokens like `=`.
         const IS_PAT            = 1 << 5;
     }
 }
diff --git a/compiler/rustc_target/src/target_features.rs b/compiler/rustc_target/src/target_features.rs
index 69c8b9119ab23..007bfea887c80 100644
--- a/compiler/rustc_target/src/target_features.rs
+++ b/compiler/rustc_target/src/target_features.rs
@@ -102,6 +102,9 @@ impl Stability {
 // check whether they're named already elsewhere in rust
 // e.g. in stdarch and whether the given name matches LLVM's
 // if it doesn't, to_llvm_feature in llvm_util in rustc_codegen_llvm needs to be adapted.
+// Additionally, if the feature is not available in older version of LLVM supported by the current
+// rust, the same function must be updated to filter out these features to avoid triggering
+// warnings.
 //
 // Also note that all target features listed here must be purely additive: for target_feature 1.1 to
 // be sound, we can never allow features like `+soft-float` (on x86) to be controlled on a
diff --git a/compiler/rustc_trait_selection/src/solve.rs b/compiler/rustc_trait_selection/src/solve.rs
index 0c2451a80a705..5a5d16167d28d 100644
--- a/compiler/rustc_trait_selection/src/solve.rs
+++ b/compiler/rustc_trait_selection/src/solve.rs
@@ -11,6 +11,6 @@ pub use fulfill::{FulfillmentCtxt, NextSolverError};
 pub(crate) use normalize::deeply_normalize_for_diagnostics;
 pub use normalize::{
     deeply_normalize, deeply_normalize_with_skipped_universes,
-    deeply_normalize_with_skipped_universes_and_ambiguous_goals,
+    deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals,
 };
 pub use select::InferCtxtSelectExt;
diff --git a/compiler/rustc_trait_selection/src/solve/normalize.rs b/compiler/rustc_trait_selection/src/solve/normalize.rs
index 5f1e63ab225ab..d903f94b489d3 100644
--- a/compiler/rustc_trait_selection/src/solve/normalize.rs
+++ b/compiler/rustc_trait_selection/src/solve/normalize.rs
@@ -45,9 +45,11 @@ where
     T: TypeFoldable<TyCtxt<'tcx>>,
     E: FromSolverError<'tcx, NextSolverError<'tcx>>,
 {
-    let (value, goals) =
-        deeply_normalize_with_skipped_universes_and_ambiguous_goals(at, value, universes)?;
-    assert_eq!(goals, vec![]);
+    let (value, coroutine_goals) =
+        deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
+            at, value, universes,
+        )?;
+    assert_eq!(coroutine_goals, vec![]);
 
     Ok(value)
 }
@@ -59,9 +61,9 @@ where
 /// entered before passing `value` to the function. This is currently needed for
 /// `normalize_erasing_regions`, which skips binders as it walks through a type.
 ///
-/// This returns a set of stalled obligations if the typing mode of the underlying infcx
-/// has any stalled coroutine def ids.
-pub fn deeply_normalize_with_skipped_universes_and_ambiguous_goals<'tcx, T, E>(
+/// This returns a set of stalled obligations involving coroutines if the typing mode of
+/// the underlying infcx has any stalled coroutine def ids.
+pub fn deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals<'tcx, T, E>(
     at: At<'_, 'tcx>,
     value: T,
     universes: Vec<Option<UniverseIndex>>,
@@ -71,11 +73,16 @@ where
     E: FromSolverError<'tcx, NextSolverError<'tcx>>,
 {
     let fulfill_cx = FulfillmentCtxt::new(at.infcx);
-    let mut folder =
-        NormalizationFolder { at, fulfill_cx, depth: 0, universes, stalled_goals: vec![] };
+    let mut folder = NormalizationFolder {
+        at,
+        fulfill_cx,
+        depth: 0,
+        universes,
+        stalled_coroutine_goals: vec![],
+    };
     let value = value.try_fold_with(&mut folder)?;
     let errors = folder.fulfill_cx.select_all_or_error(at.infcx);
-    if errors.is_empty() { Ok((value, folder.stalled_goals)) } else { Err(errors) }
+    if errors.is_empty() { Ok((value, folder.stalled_coroutine_goals)) } else { Err(errors) }
 }
 
 struct NormalizationFolder<'me, 'tcx, E> {
@@ -83,7 +90,7 @@ struct NormalizationFolder<'me, 'tcx, E> {
     fulfill_cx: FulfillmentCtxt<'tcx, E>,
     depth: usize,
     universes: Vec<Option<UniverseIndex>>,
-    stalled_goals: Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
+    stalled_coroutine_goals: Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
 }
 
 impl<'tcx, E> NormalizationFolder<'_, 'tcx, E>
@@ -182,7 +189,7 @@ where
             return Err(errors);
         }
 
-        self.stalled_goals.extend(
+        self.stalled_coroutine_goals.extend(
             self.fulfill_cx
                 .drain_stalled_obligations_for_coroutines(self.at.infcx)
                 .into_iter()
@@ -298,13 +305,13 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for DeeplyNormalizeForDiagnosticsFolder<'_,
 
     fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
         let infcx = self.at.infcx;
-        let result =
-            infcx.commit_if_ok(|_| {
-                deeply_normalize_with_skipped_universes_and_ambiguous_goals::<
-                    _,
-                    ScrubbedTraitError<'tcx>,
-                >(self.at, ty, vec![None; ty.outer_exclusive_binder().as_usize()])
-            });
+        let result: Result<_, Vec<ScrubbedTraitError<'tcx>>> = infcx.commit_if_ok(|_| {
+            deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
+                self.at,
+                ty,
+                vec![None; ty.outer_exclusive_binder().as_usize()],
+            )
+        });
         match result {
             Ok((ty, _)) => ty,
             Err(_) => ty.super_fold_with(self),
@@ -313,13 +320,13 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for DeeplyNormalizeForDiagnosticsFolder<'_,
 
     fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
         let infcx = self.at.infcx;
-        let result =
-            infcx.commit_if_ok(|_| {
-                deeply_normalize_with_skipped_universes_and_ambiguous_goals::<
-                    _,
-                    ScrubbedTraitError<'tcx>,
-                >(self.at, ct, vec![None; ct.outer_exclusive_binder().as_usize()])
-            });
+        let result: Result<_, Vec<ScrubbedTraitError<'tcx>>> = infcx.commit_if_ok(|_| {
+            deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
+                self.at,
+                ct,
+                vec![None; ct.outer_exclusive_binder().as_usize()],
+            )
+        });
         match result {
             Ok((ct, _)) => ct,
             Err(_) => ct.super_fold_with(self),
diff --git a/compiler/rustc_trait_selection/src/traits/normalize.rs b/compiler/rustc_trait_selection/src/traits/normalize.rs
index d38ddbc825c4e..5f0acd46f86ae 100644
--- a/compiler/rustc_trait_selection/src/traits/normalize.rs
+++ b/compiler/rustc_trait_selection/src/traits/normalize.rs
@@ -260,11 +260,14 @@ impl<'a, 'b, 'tcx> TypeFolder<TyCtxt<'tcx>> for AssocTypeNormalizer<'a, 'b, 'tcx
             }
 
             ty::Projection if !data.has_escaping_bound_vars() => {
-                // This branch is *mostly* just an optimization: when we don't
-                // have escaping bound vars, we don't need to replace them with
-                // placeholders (see branch below). *Also*, we know that we can
-                // register an obligation to *later* project, since we know
-                // there won't be bound vars there.
+                // When we don't have escaping bound vars we can normalize ambig aliases
+                // to inference variables (done in `normalize_projection_ty`). This would
+                // be wrong if there were escaping bound vars as even if we instantiated
+                // the bound vars with placeholders, we wouldn't be able to map them back
+                // after normalization succeeded.
+                //
+                // Also, as an optimization: when we don't have escaping bound vars, we don't
+                // need to replace them with placeholders (see branch below).
                 let data = data.fold_with(self);
                 let normalized_ty = project::normalize_projection_ty(
                     self.selcx,
diff --git a/compiler/rustc_transmute/Cargo.toml b/compiler/rustc_transmute/Cargo.toml
index 0250cc0ea0788..246b66d3d0307 100644
--- a/compiler/rustc_transmute/Cargo.toml
+++ b/compiler/rustc_transmute/Cargo.toml
@@ -5,7 +5,6 @@ edition = "2024"
 
 [dependencies]
 # tidy-alphabetical-start
-itertools = "0.12"
 rustc_abi = { path = "../rustc_abi", optional = true }
 rustc_data_structures = { path = "../rustc_data_structures" }
 rustc_hir = { path = "../rustc_hir", optional = true }
@@ -15,6 +14,11 @@ smallvec = "1.8.1"
 tracing = "0.1"
 # tidy-alphabetical-end
 
+[dev-dependencies]
+# tidy-alphabetical-start
+itertools = "0.12"
+# tidy-alphabetical-end
+
 [features]
 rustc = [
     "dep:rustc_abi",
diff --git a/compiler/rustc_transmute/src/layout/dfa.rs b/compiler/rustc_transmute/src/layout/dfa.rs
index d1f58157b696b..05afa28db31a9 100644
--- a/compiler/rustc_transmute/src/layout/dfa.rs
+++ b/compiler/rustc_transmute/src/layout/dfa.rs
@@ -1,5 +1,5 @@
 use std::fmt;
-use std::ops::RangeInclusive;
+use std::iter::Peekable;
 use std::sync::atomic::{AtomicU32, Ordering};
 
 use super::{Byte, Ref, Tree, Uninhabited};
@@ -211,15 +211,15 @@ where
             let b_transitions =
                 b_src.and_then(|b_src| b.transitions.get(&b_src)).unwrap_or(&empty_transitions);
 
-            let byte_transitions =
-                a_transitions.byte_transitions.union(&b_transitions.byte_transitions);
-
-            let byte_transitions = byte_transitions.map_states(|(a_dst, b_dst)| {
-                assert!(a_dst.is_some() || b_dst.is_some());
+            let byte_transitions = a_transitions.byte_transitions.union(
+                &b_transitions.byte_transitions,
+                |a_dst, b_dst| {
+                    assert!(a_dst.is_some() || b_dst.is_some());
 
-                queue.enqueue(a_dst, b_dst);
-                mapped((a_dst, b_dst))
-            });
+                    queue.enqueue(a_dst, b_dst);
+                    mapped((a_dst, b_dst))
+                },
+            );
 
             let ref_transitions =
                 a_transitions.ref_transitions.keys().chain(b_transitions.ref_transitions.keys());
@@ -245,18 +245,6 @@ where
         Self { transitions, start, accept }
     }
 
-    pub(crate) fn states_from(
-        &self,
-        state: State,
-        src_validity: RangeInclusive<u8>,
-    ) -> impl Iterator<Item = (Byte, State)> {
-        self.transitions
-            .get(&state)
-            .map(move |t| t.byte_transitions.states_from(src_validity))
-            .into_iter()
-            .flatten()
-    }
-
     pub(crate) fn get_uninit_edge_dst(&self, state: State) -> Option<State> {
         let transitions = self.transitions.get(&state)?;
         transitions.byte_transitions.get_uninit_edge_dst()
@@ -334,95 +322,31 @@ where
 
 use edge_set::EdgeSet;
 mod edge_set {
-    use std::cmp;
-
-    use run::*;
-    use smallvec::{SmallVec, smallvec};
+    use smallvec::SmallVec;
 
     use super::*;
-    mod run {
-        use std::ops::{Range, RangeInclusive};
-
-        use super::*;
-        use crate::layout::Byte;
-
-        /// A logical set of edges.
-        ///
-        /// A `Run` encodes one edge for every byte value in `start..=end`
-        /// pointing to `dst`.
-        #[derive(Eq, PartialEq, Copy, Clone, Debug)]
-        pub(super) struct Run<S> {
-            // `start` and `end` are both inclusive (ie, closed) bounds, as this
-            // is required in order to be able to store 0..=255. We provide
-            // setters and getters which operate on closed/open ranges, which
-            // are more intuitive and easier for performing offset math.
-            start: u8,
-            end: u8,
-            pub(super) dst: S,
-        }
-
-        impl<S> Run<S> {
-            pub(super) fn new(range: RangeInclusive<u8>, dst: S) -> Self {
-                Self { start: *range.start(), end: *range.end(), dst }
-            }
-
-            pub(super) fn from_inclusive_exclusive(range: Range<u16>, dst: S) -> Self {
-                Self {
-                    start: range.start.try_into().unwrap(),
-                    end: (range.end - 1).try_into().unwrap(),
-                    dst,
-                }
-            }
-
-            pub(super) fn contains(&self, idx: u16) -> bool {
-                idx >= u16::from(self.start) && idx <= u16::from(self.end)
-            }
-
-            pub(super) fn as_inclusive_exclusive(&self) -> (u16, u16) {
-                (u16::from(self.start), u16::from(self.end) + 1)
-            }
-
-            pub(super) fn as_byte(&self) -> Byte {
-                Byte::new(self.start..=self.end)
-            }
 
-            pub(super) fn map_state<SS>(self, f: impl FnOnce(S) -> SS) -> Run<SS> {
-                let Run { start, end, dst } = self;
-                Run { start, end, dst: f(dst) }
-            }
-
-            /// Produces a new `Run` whose lower bound is the greater of
-            /// `self`'s existing lower bound and `lower_bound`.
-            pub(super) fn clamp_lower(self, lower_bound: u8) -> Self {
-                let Run { start, end, dst } = self;
-                Run { start: cmp::max(start, lower_bound), end, dst }
-            }
-        }
-    }
-
-    /// The set of outbound byte edges associated with a DFA node (not including
-    /// reference edges).
+    /// The set of outbound byte edges associated with a DFA node.
     #[derive(Eq, PartialEq, Clone, Debug)]
     pub(super) struct EdgeSet<S = State> {
-        // A sequence of runs stored in ascending order. Since the graph is a
-        // DFA, these must be non-overlapping with one another.
-        runs: SmallVec<[Run<S>; 1]>,
-        // The edge labeled with the uninit byte, if any.
+        // A sequence of byte edges with contiguous byte values and a common
+        // destination is stored as a single run.
         //
-        // FIXME(@joshlf): Make `State` a `NonZero` so that this is NPO'd.
-        uninit: Option<S>,
+        // Runs are non-empty, non-overlapping, and stored in ascending order.
+        runs: SmallVec<[(Byte, S); 1]>,
     }
 
     impl<S> EdgeSet<S> {
-        pub(crate) fn new(byte: Byte, dst: S) -> Self {
-            match byte.range() {
-                Some(range) => Self { runs: smallvec![Run::new(range, dst)], uninit: None },
-                None => Self { runs: SmallVec::new(), uninit: Some(dst) },
+        pub(crate) fn new(range: Byte, dst: S) -> Self {
+            let mut this = Self { runs: SmallVec::new() };
+            if !range.is_empty() {
+                this.runs.push((range, dst));
             }
+            this
         }
 
         pub(crate) fn empty() -> Self {
-            Self { runs: SmallVec::new(), uninit: None }
+            Self { runs: SmallVec::new() }
         }
 
         #[cfg(test)]
@@ -431,43 +355,23 @@ mod edge_set {
             S: Ord,
         {
             edges.sort();
-            Self {
-                runs: edges
-                    .into_iter()
-                    .map(|(byte, state)| Run::new(byte.range().unwrap(), state))
-                    .collect(),
-                uninit: None,
-            }
+            Self { runs: edges.into() }
         }
 
         pub(crate) fn iter(&self) -> impl Iterator<Item = (Byte, S)>
         where
             S: Copy,
         {
-            self.uninit
-                .map(|dst| (Byte::uninit(), dst))
-                .into_iter()
-                .chain(self.runs.iter().map(|run| (run.as_byte(), run.dst)))
-        }
-
-        pub(crate) fn states_from(
-            &self,
-            byte: RangeInclusive<u8>,
-        ) -> impl Iterator<Item = (Byte, S)>
-        where
-            S: Copy,
-        {
-            // FIXME(@joshlf): Optimize this. A manual scan over `self.runs` may
-            // permit us to more efficiently discard runs which will not be
-            // produced by this iterator.
-            self.iter().filter(move |(o, _)| Byte::new(byte.clone()).transmutable_into(&o))
+            self.runs.iter().copied()
         }
 
         pub(crate) fn get_uninit_edge_dst(&self) -> Option<S>
         where
             S: Copy,
         {
-            self.uninit
+            // Uninit is ordered last.
+            let &(range, dst) = self.runs.last()?;
+            if range.contains_uninit() { Some(dst) } else { None }
         }
 
         pub(crate) fn map_states<SS>(self, mut f: impl FnMut(S) -> SS) -> EdgeSet<SS> {
@@ -478,95 +382,106 @@ mod edge_set {
                 // allocates the correct number of elements once up-front [1].
                 //
                 // [1] https://doc.rust-lang.org/1.85.0/src/alloc/vec/spec_from_iter_nested.rs.html#47
-                runs: self.runs.into_iter().map(|run| run.map_state(&mut f)).collect(),
-                uninit: self.uninit.map(f),
+                runs: self.runs.into_iter().map(|(b, s)| (b, f(s))).collect(),
             }
         }
 
         /// Unions two edge sets together.
         ///
         /// If `u = a.union(b)`, then for each byte value, `u` will have an edge
-        /// with that byte value and with the destination `(Some(_), None)`,
-        /// `(None, Some(_))`, or `(Some(_), Some(_))` depending on whether `a`,
+        /// with that byte value and with the destination `join(Some(_), None)`,
+        /// `join(None, Some(_))`, or `join(Some(_), Some(_))` depending on whether `a`,
         /// `b`, or both have an edge with that byte value.
         ///
         /// If neither `a` nor `b` have an edge with a particular byte value,
         /// then no edge with that value will be present in `u`.
-        pub(crate) fn union(&self, other: &Self) -> EdgeSet<(Option<S>, Option<S>)>
+        pub(crate) fn union(
+            &self,
+            other: &Self,
+            mut join: impl FnMut(Option<S>, Option<S>) -> S,
+        ) -> EdgeSet<S>
         where
             S: Copy,
         {
-            let uninit = match (self.uninit, other.uninit) {
-                (None, None) => None,
-                (s, o) => Some((s, o)),
-            };
-
-            let mut runs = SmallVec::new();
-
-            // Iterate over `self.runs` and `other.runs` simultaneously,
-            // advancing `idx` as we go. At each step, we advance `idx` as far
-            // as we can without crossing a run boundary in either `self.runs`
-            // or `other.runs`.
-
-            // INVARIANT: `idx < s[0].end && idx < o[0].end`.
-            let (mut s, mut o) = (self.runs.as_slice(), other.runs.as_slice());
-            let mut idx = 0u16;
-            while let (Some((s_run, s_rest)), Some((o_run, o_rest))) =
-                (s.split_first(), o.split_first())
-            {
-                let (s_start, s_end) = s_run.as_inclusive_exclusive();
-                let (o_start, o_end) = o_run.as_inclusive_exclusive();
-
-                // Compute `end` as the end of the current run (which starts
-                // with `idx`).
-                let (end, dst) = match (s_run.contains(idx), o_run.contains(idx)) {
-                    // `idx` is in an existing run in both `s` and `o`, so `end`
-                    // is equal to the smallest of the two ends of those runs.
-                    (true, true) => (cmp::min(s_end, o_end), (Some(s_run.dst), Some(o_run.dst))),
-                    // `idx` is in an existing run in `s`, but not in any run in
-                    // `o`. `end` is either the end of the `s` run or the
-                    // beginning of the next `o` run, whichever comes first.
-                    (true, false) => (cmp::min(s_end, o_start), (Some(s_run.dst), None)),
-                    // The inverse of the previous case.
-                    (false, true) => (cmp::min(s_start, o_end), (None, Some(o_run.dst))),
-                    // `idx` is not in a run in either `s` or `o`, so advance it
-                    // to the beginning of the next run.
-                    (false, false) => {
-                        idx = cmp::min(s_start, o_start);
-                        continue;
-                    }
-                };
+            let xs = self.runs.iter().copied();
+            let ys = other.runs.iter().copied();
+            // FIXME(@joshlf): Merge contiguous runs with common destination.
+            EdgeSet { runs: union(xs, ys).map(|(range, (x, y))| (range, join(x, y))).collect() }
+        }
+    }
+}
+
+/// Merges two sorted sequences into one sorted sequence.
+pub(crate) fn union<S: Copy, X: Iterator<Item = (Byte, S)>, Y: Iterator<Item = (Byte, S)>>(
+    xs: X,
+    ys: Y,
+) -> UnionIter<X, Y> {
+    UnionIter { xs: xs.peekable(), ys: ys.peekable() }
+}
+
+pub(crate) struct UnionIter<X: Iterator, Y: Iterator> {
+    xs: Peekable<X>,
+    ys: Peekable<Y>,
+}
+
+// FIXME(jswrenn) we'd likely benefit from specializing try_fold here.
+impl<S: Copy, X: Iterator<Item = (Byte, S)>, Y: Iterator<Item = (Byte, S)>> Iterator
+    for UnionIter<X, Y>
+{
+    type Item = (Byte, (Option<S>, Option<S>));
 
-                // FIXME(@joshlf): If this is contiguous with the previous run
-                // and has the same `dst`, just merge it into that run rather
-                // than adding a new one.
-                runs.push(Run::from_inclusive_exclusive(idx..end, dst));
-                idx = end;
+    fn next(&mut self) -> Option<Self::Item> {
+        use std::cmp::{self, Ordering};
 
-                if idx >= s_end {
-                    s = s_rest;
+        let ret;
+        match (self.xs.peek_mut(), self.ys.peek_mut()) {
+            (None, None) => {
+                ret = None;
+            }
+            (Some(x), None) => {
+                ret = Some((x.0, (Some(x.1), None)));
+                self.xs.next();
+            }
+            (None, Some(y)) => {
+                ret = Some((y.0, (None, Some(y.1))));
+                self.ys.next();
+            }
+            (Some(x), Some(y)) => {
+                let start;
+                let end;
+                let dst;
+                match x.0.start.cmp(&y.0.start) {
+                    Ordering::Less => {
+                        start = x.0.start;
+                        end = cmp::min(x.0.end, y.0.start);
+                        dst = (Some(x.1), None);
+                    }
+                    Ordering::Greater => {
+                        start = y.0.start;
+                        end = cmp::min(x.0.start, y.0.end);
+                        dst = (None, Some(y.1));
+                    }
+                    Ordering::Equal => {
+                        start = x.0.start;
+                        end = cmp::min(x.0.end, y.0.end);
+                        dst = (Some(x.1), Some(y.1));
+                    }
                 }
-                if idx >= o_end {
-                    o = o_rest;
+                ret = Some((Byte { start, end }, dst));
+                if start == x.0.start {
+                    x.0.start = end;
+                }
+                if start == y.0.start {
+                    y.0.start = end;
+                }
+                if x.0.is_empty() {
+                    self.xs.next();
+                }
+                if y.0.is_empty() {
+                    self.ys.next();
                 }
             }
-
-            // At this point, either `s` or `o` have been exhausted, so the
-            // remaining elements in the other slice are guaranteed to be
-            // non-overlapping. We can add all remaining runs to `runs` with no
-            // further processing.
-            if let Ok(idx) = u8::try_from(idx) {
-                let (slc, map) = if !s.is_empty() {
-                    let map: fn(_) -> _ = |st| (Some(st), None);
-                    (s, map)
-                } else {
-                    let map: fn(_) -> _ = |st| (None, Some(st));
-                    (o, map)
-                };
-                runs.extend(slc.iter().map(|run| run.clamp_lower(idx).map_state(map)));
-            }
-
-            EdgeSet { runs, uninit }
         }
+        ret
     }
 }
diff --git a/compiler/rustc_transmute/src/layout/mod.rs b/compiler/rustc_transmute/src/layout/mod.rs
index 4d5f630ae229e..c08bf440734e2 100644
--- a/compiler/rustc_transmute/src/layout/mod.rs
+++ b/compiler/rustc_transmute/src/layout/mod.rs
@@ -6,61 +6,61 @@ pub(crate) mod tree;
 pub(crate) use tree::Tree;
 
 pub(crate) mod dfa;
-pub(crate) use dfa::Dfa;
+pub(crate) use dfa::{Dfa, union};
 
 #[derive(Debug)]
 pub(crate) struct Uninhabited;
 
-/// A range of byte values, or the uninit byte.
+/// A range of byte values (including an uninit byte value).
 #[derive(Hash, Eq, PartialEq, Ord, PartialOrd, Clone, Copy)]
 pub(crate) struct Byte {
-    // An inclusive-inclusive range. We use this instead of `RangeInclusive`
-    // because `RangeInclusive: !Copy`.
+    // An inclusive-exclusive range. We use this instead of `Range` because `Range: !Copy`.
     //
-    // `None` means uninit.
-    //
-    // FIXME(@joshlf): Optimize this representation. Some pairs of values (where
-    // `lo > hi`) are illegal, and we could use these to represent `None`.
-    range: Option<(u8, u8)>,
+    // Uninit byte value is represented by 256.
+    pub(crate) start: u16,
+    pub(crate) end: u16,
 }
 
 impl Byte {
+    const UNINIT: u16 = 256;
+
+    #[inline]
     fn new(range: RangeInclusive<u8>) -> Self {
-        Self { range: Some((*range.start(), *range.end())) }
+        let start: u16 = (*range.start()).into();
+        let end: u16 = (*range.end()).into();
+        Byte { start, end: end + 1 }
     }
 
+    #[inline]
     fn from_val(val: u8) -> Self {
-        Self { range: Some((val, val)) }
+        let val: u16 = val.into();
+        Byte { start: val, end: val + 1 }
     }
 
-    pub(crate) fn uninit() -> Byte {
-        Byte { range: None }
+    #[inline]
+    fn uninit() -> Byte {
+        Byte { start: 0, end: Self::UNINIT + 1 }
     }
 
-    /// Returns `None` if `self` is the uninit byte.
-    pub(crate) fn range(&self) -> Option<RangeInclusive<u8>> {
-        self.range.map(|(lo, hi)| lo..=hi)
+    #[inline]
+    fn is_empty(&self) -> bool {
+        self.start == self.end
     }
 
-    /// Are any of the values in `self` transmutable into `other`?
-    ///
-    /// Note two special cases: An uninit byte is only transmutable into another
-    /// uninit byte. Any byte is transmutable into an uninit byte.
-    pub(crate) fn transmutable_into(&self, other: &Byte) -> bool {
-        match (self.range, other.range) {
-            (None, None) => true,
-            (None, Some(_)) => false,
-            (Some(_), None) => true,
-            (Some((slo, shi)), Some((olo, ohi))) => slo <= ohi && olo <= shi,
-        }
+    #[inline]
+    fn contains_uninit(&self) -> bool {
+        self.start <= Self::UNINIT && Self::UNINIT < self.end
     }
 }
 
 impl fmt::Debug for Byte {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self.range {
-            None => write!(f, "uninit"),
-            Some((lo, hi)) => write!(f, "{lo}..={hi}"),
+        if self.start == Self::UNINIT && self.end == Self::UNINIT + 1 {
+            write!(f, "uninit")
+        } else if self.start <= Self::UNINIT && self.end == Self::UNINIT + 1 {
+            write!(f, "{}..{}|uninit", self.start, self.end - 1)
+        } else {
+            write!(f, "{}..{}", self.start, self.end)
         }
     }
 }
@@ -72,6 +72,7 @@ impl From<RangeInclusive<u8>> for Byte {
 }
 
 impl From<u8> for Byte {
+    #[inline]
     fn from(src: u8) -> Self {
         Self::from_val(src)
     }
diff --git a/compiler/rustc_transmute/src/maybe_transmutable/mod.rs b/compiler/rustc_transmute/src/maybe_transmutable/mod.rs
index 0a19cccc2ed03..6307f0cd840c4 100644
--- a/compiler/rustc_transmute/src/maybe_transmutable/mod.rs
+++ b/compiler/rustc_transmute/src/maybe_transmutable/mod.rs
@@ -1,14 +1,10 @@
-use std::rc::Rc;
-use std::{cmp, iter};
-
-use itertools::Either;
 use tracing::{debug, instrument, trace};
 
 pub(crate) mod query_context;
 #[cfg(test)]
 mod tests;
 
-use crate::layout::{self, Byte, Def, Dfa, Ref, Tree, dfa};
+use crate::layout::{self, Def, Dfa, Ref, Tree, dfa, union};
 use crate::maybe_transmutable::query_context::QueryContext;
 use crate::{Answer, Condition, Map, Reason};
 
@@ -197,122 +193,20 @@ where
                     Quantifier::ForAll
                 };
 
-                let c = &core::cell::RefCell::new(&mut *cache);
                 let bytes_answer = src_quantifier.apply(
-                    // for each of the byte set transitions out of the `src_state`...
-                    self.src.bytes_from(src_state).flat_map(
-                        move |(src_validity, src_state_prime)| {
-                            // ...find all matching transitions out of `dst_state`.
-
-                            let Some(src_validity) = src_validity.range() else {
-                                // NOTE: We construct an iterator here rather
-                                // than just computing the value directly (via
-                                // `self.answer_memo`) so that, if the iterator
-                                // we produce from this branch is
-                                // short-circuited, we don't waste time
-                                // computing `self.answer_memo` unnecessarily.
-                                // That will specifically happen if
-                                // `src_quantifier == Quantifier::ThereExists`,
-                                // since we emit `Answer::Yes` first (before
-                                // chaining `answer_iter`).
-                                let answer_iter = if let Some(dst_state_prime) =
-                                    self.dst.get_uninit_edge_dst(dst_state)
-                                {
-                                    Either::Left(iter::once_with(move || {
-                                        let mut c = c.borrow_mut();
-                                        self.answer_memo(&mut *c, src_state_prime, dst_state_prime)
-                                    }))
-                                } else {
-                                    Either::Right(iter::once(Answer::No(
-                                        Reason::DstIsBitIncompatible,
-                                    )))
-                                };
-
-                                // When `answer == Answer::No(...)`, there are
-                                // two cases to consider:
-                                // - If `assume.validity`, then we should
-                                //   succeed because the user is responsible for
-                                //   ensuring that the *specific* byte value
-                                //   appearing at runtime is valid for the
-                                //   destination type. When `assume.validity`,
-                                //   `src_quantifier ==
-                                //   Quantifier::ThereExists`, so adding an
-                                //   `Answer::Yes` has the effect of ensuring
-                                //   that the "there exists" is always
-                                //   satisfied.
-                                // - If `!assume.validity`, then we should fail.
-                                //   In this case, `src_quantifier ==
-                                //   Quantifier::ForAll`, so adding an
-                                //   `Answer::Yes` has no effect.
-                                return Either::Left(iter::once(Answer::Yes).chain(answer_iter));
-                            };
-
-                            #[derive(Copy, Clone, Debug)]
-                            struct Accum {
-                                // The number of matching byte edges that we
-                                // have found in the destination so far.
-                                sum: usize,
-                                found_uninit: bool,
-                            }
-
-                            let accum1 = Rc::new(std::cell::Cell::new(Accum {
-                                sum: 0,
-                                found_uninit: false,
-                            }));
-                            let accum2 = Rc::clone(&accum1);
-                            let sv = src_validity.clone();
-                            let update_accum = move |mut accum: Accum, dst_validity: Byte| {
-                                if let Some(dst_validity) = dst_validity.range() {
-                                    // Only add the part of `dst_validity` that
-                                    // overlaps with `src_validity`.
-                                    let start = cmp::max(*sv.start(), *dst_validity.start());
-                                    let end = cmp::min(*sv.end(), *dst_validity.end());
-
-                                    // We add 1 here to account for the fact
-                                    // that `end` is an inclusive bound.
-                                    accum.sum += 1 + usize::from(end.saturating_sub(start));
-                                } else {
-                                    accum.found_uninit = true;
+                    union(self.src.bytes_from(src_state), self.dst.bytes_from(dst_state))
+                        .filter_map(|(_range, (src_state_prime, dst_state_prime))| {
+                            match (src_state_prime, dst_state_prime) {
+                                // No matching transitions in `src`. Skip.
+                                (None, _) => None,
+                                // No matching transitions in `dst`. Fail.
+                                (Some(_), None) => Some(Answer::No(Reason::DstIsBitIncompatible)),
+                                // Matching transitions. Continue with successor states.
+                                (Some(src_state_prime), Some(dst_state_prime)) => {
+                                    Some(self.answer_memo(cache, src_state_prime, dst_state_prime))
                                 }
-                                accum
-                            };
-
-                            let answers = self
-                                .dst
-                                .states_from(dst_state, src_validity.clone())
-                                .map(move |(dst_validity, dst_state_prime)| {
-                                    let mut c = c.borrow_mut();
-                                    accum1.set(update_accum(accum1.get(), dst_validity));
-                                    let answer =
-                                        self.answer_memo(&mut *c, src_state_prime, dst_state_prime);
-                                    answer
-                                })
-                                .chain(
-                                    iter::once_with(move || {
-                                        let src_validity_len = usize::from(*src_validity.end())
-                                            - usize::from(*src_validity.start())
-                                            + 1;
-                                        let accum = accum2.get();
-
-                                        // If this condition is false, then
-                                        // there are some byte values in the
-                                        // source which have no corresponding
-                                        // transition in the destination DFA. In
-                                        // that case, we add a `No` to our list
-                                        // of answers. When
-                                        // `!self.assume.validity`, this will
-                                        // cause the query to fail.
-                                        if accum.found_uninit || accum.sum == src_validity_len {
-                                            None
-                                        } else {
-                                            Some(Answer::No(Reason::DstIsBitIncompatible))
-                                        }
-                                    })
-                                    .flatten(),
-                                );
-                            Either::Right(answers)
-                        },
-                    ),
+                            }
+                        }),
                 );
 
                 // The below early returns reflect how this code would behave:
diff --git a/compiler/rustc_transmute/src/maybe_transmutable/tests.rs b/compiler/rustc_transmute/src/maybe_transmutable/tests.rs
index 992fcb7cc4c81..fbb4639dbd630 100644
--- a/compiler/rustc_transmute/src/maybe_transmutable/tests.rs
+++ b/compiler/rustc_transmute/src/maybe_transmutable/tests.rs
@@ -400,16 +400,23 @@ mod r#ref {
     fn should_permit_identity_transmutation() {
         type Tree = crate::layout::Tree<Def, [(); 1]>;
 
-        let layout = Tree::Seq(vec![Tree::byte(0x00), Tree::Ref([()])]);
+        for validity in [false, true] {
+            let layout = Tree::Seq(vec![Tree::byte(0x00), Tree::Ref([()])]);
 
-        let answer = crate::maybe_transmutable::MaybeTransmutableQuery::new(
-            layout.clone(),
-            layout,
-            Assume::default(),
-            UltraMinimal::default(),
-        )
-        .answer();
-        assert_eq!(answer, Answer::If(crate::Condition::IfTransmutable { src: [()], dst: [()] }));
+            let assume = Assume { validity, ..Assume::default() };
+
+            let answer = crate::maybe_transmutable::MaybeTransmutableQuery::new(
+                layout.clone(),
+                layout,
+                assume,
+                UltraMinimal::default(),
+            )
+            .answer();
+            assert_eq!(
+                answer,
+                Answer::If(crate::Condition::IfTransmutable { src: [()], dst: [()] })
+            );
+        }
     }
 }
 
diff --git a/src/ci/docker/scripts/rfl-build.sh b/src/ci/docker/scripts/rfl-build.sh
index ea8066d95e028..1d280948ebe68 100755
--- a/src/ci/docker/scripts/rfl-build.sh
+++ b/src/ci/docker/scripts/rfl-build.sh
@@ -2,7 +2,7 @@
 
 set -euo pipefail
 
-LINUX_VERSION=v6.14-rc3
+LINUX_VERSION=v6.15-rc4
 
 # Build rustc, rustdoc, cargo, clippy-driver and rustfmt
 ../x.py build --stage 2 library rustdoc clippy rustfmt
diff --git a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md
index 0d99d06bcddee..8333cf08929f8 100644
--- a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md
@@ -8,7 +8,7 @@ assignees: ''
 ---
 
 <!--
-Troubleshooting guide: https://rust-analyzer.github.io/manual.html#troubleshooting
+Troubleshooting guide: https://rust-analyzer.github.io/book/troubleshooting.html
 Forum for questions: https://users.rust-lang.org/c/ide/14
 
 Before submitting, please make sure that you're not running into one of these known issues:
@@ -28,7 +28,7 @@ Otherwise please try to provide information which will help us to fix the issue
 
 **repository link (if public, optional)**: (eg. [rust-analyzer](https://github.com/rust-lang/rust-analyzer))
 
-**code snippet to reproduce**: 
+**code snippet to reproduce**:
 ```rust
 // add your code here
 
diff --git a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/critical_nightly_regression.md b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/critical_nightly_regression.md
index ad220ff65ca14..23c43443c84f6 100644
--- a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/critical_nightly_regression.md
+++ b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/critical_nightly_regression.md
@@ -8,7 +8,7 @@ assignees: ''
 ---
 
 <!--
-Troubleshooting guide: https://rust-analyzer.github.io/manual.html#troubleshooting
+Troubleshooting guide: https://rust-analyzer.github.io/book/troubleshooting.html
 
 Please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3.
 -->
diff --git a/src/tools/rust-analyzer/.github/workflows/ci.yaml b/src/tools/rust-analyzer/.github/workflows/ci.yaml
index 7a6b43a053155..79fb7a2d2ea96 100644
--- a/src/tools/rust-analyzer/.github/workflows/ci.yaml
+++ b/src/tools/rust-analyzer/.github/workflows/ci.yaml
@@ -15,7 +15,6 @@ env:
   CARGO_NET_RETRY: 10
   CI: 1
   RUST_BACKTRACE: short
-  RUSTFLAGS: "-D warnings -D elided_lifetimes_in_paths -D explicit_outlives_requirements -D unsafe_op_in_unsafe_fn -D unused_extern_crates -D unused_lifetimes -D unreachable_pub"
   RUSTUP_MAX_RETRIES: 10
 
 jobs:
@@ -25,7 +24,6 @@ jobs:
       pull-requests: read
     outputs:
       typescript: ${{ steps.filter.outputs.typescript }}
-      proc_macros: ${{ steps.filter.outputs.proc_macros }}
     steps:
       - uses: actions/checkout@v4
       - uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242
@@ -34,52 +32,54 @@ jobs:
           filters: |
             typescript:
               - 'editors/code/**'
-            proc_macros:
-              - 'crates/tt/**'
-              - 'crates/proc-macro-api/**'
-              - 'crates/proc-macro-srv/**'
-              - 'crates/proc-macro-srv-cli/**'
 
   proc-macro-srv:
-    needs: changes
-    if: github.repository == 'rust-lang/rust-analyzer' && needs.changes.outputs.proc_macros == 'true'
+    if: github.repository == 'rust-lang/rust-analyzer'
     name: proc-macro-srv
     runs-on: ubuntu-latest
+    env:
+      RUSTFLAGS: "-D warnings"
+
     steps:
       - name: Checkout repository
         uses: actions/checkout@v4
         with:
           ref: ${{ github.event.pull_request.head.sha }}
 
+      - name: Install rustup-toolchain-install-master
+        run: cargo install rustup-toolchain-install-master@1.6.0
+
+      # Install a pinned rustc commit to avoid surprises
       - name: Install Rust toolchain
         run: |
-          rustup update --no-self-update nightly
-          rustup default nightly
-          rustup component add --toolchain nightly rust-src rustfmt
+          RUSTC_VERSION=`cat rust-version`
+          rustup-toolchain-install-master ${RUSTC_VERSION} -c rust-src -c rustfmt
+          rustup default ${RUSTC_VERSION}
+
+      # Emulate a nightly toolchain, because the toolchain installed above does not have "nightly"
+      # in its version string.
+      - name: Emulate a nightly toolchain
+        run: echo "RUSTC_BOOTSTRAP=1" >> $GITHUB_ENV
+
       # https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json
       - name: Install Rust Problem Matcher
-        if: matrix.os == 'ubuntu-latest'
         run: echo "::add-matcher::.github/rust.json"
 
-      - name: Cache Dependencies
-        uses: Swatinem/rust-cache@9bdad043e88c75890e36ad3bbc8d27f0090dd609
-
-      - name: Bump opt-level
-        if: matrix.os == 'ubuntu-latest'
-        run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml
-
       - name: Test
-        run: cargo test --features sysroot-abi -p rust-analyzer -p proc-macro-srv -p proc-macro-srv-cli -p proc-macro-api -- --quiet
+        run: cargo test --features sysroot-abi -p proc-macro-srv -p proc-macro-srv-cli -p proc-macro-api -- --quiet
+
+      - name: Check salsa dependency
+        run: "! (cargo tree -p proc-macro-srv-cli | grep -q salsa)"
 
   rust:
     if: github.repository == 'rust-lang/rust-analyzer'
     name: Rust
     runs-on: ${{ matrix.os }}
     env:
+      RUSTFLAGS: "-Dwarnings"
       CC: deny_c
 
     strategy:
-      fail-fast: false
       matrix:
         os: [ubuntu-latest, windows-latest, macos-latest]
 
@@ -93,7 +93,7 @@ jobs:
         run: |
           rustup update --no-self-update stable
           rustup default stable
-          rustup component add --toolchain stable rust-src
+          rustup component add --toolchain stable rust-src clippy
           # We always use a nightly rustfmt, regardless of channel, because we need
           # --file-lines.
           rustup toolchain install nightly --profile minimal --component rustfmt
@@ -102,51 +102,105 @@ jobs:
         if: matrix.os == 'ubuntu-latest'
         run: echo "::add-matcher::.github/rust.json"
 
-      - name: Cache Dependencies
-        uses: Swatinem/rust-cache@9bdad043e88c75890e36ad3bbc8d27f0090dd609
+      # - name: Cache Dependencies
+      #   uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+      #   with:
+      #     workspaces: |
+      #       . -> target
+      #       ./crates/proc-macro-srv/proc-macro-test/imp -> target
 
-      - name: Bump opt-level
-        if: matrix.os == 'ubuntu-latest'
-        run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml
+      - name: Install nextest
+        uses: taiki-e/install-action@nextest
 
       - name: Codegen checks (rust-analyzer)
+        if: matrix.os == 'ubuntu-latest'
         run: cargo codegen --check
 
-      - name: Compile (tests)
-        run: cargo test --no-run --locked
+      - name: Compile tests
+        run: cargo test --no-run
 
-      # It's faster to `test` before `build` ¯\_(ツ)_/¯
-      - name: Compile (rust-analyzer)
-        if: matrix.os == 'ubuntu-latest'
-        run: cargo build --quiet
+      - name: Run tests
+        run: cargo nextest run --no-fail-fast --hide-progress-bar --status-level fail
 
-      - name: Test
-        if: matrix.os == 'ubuntu-latest' || matrix.os == 'windows-latest' || github.event_name == 'push'
-        run: cargo test -- --quiet
+      - name: Cancel parallel jobs
+        if: failure()
+        run: |
+          # https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#cancel-a-workflow-run
+          curl -L \
+          -X POST \
+          -H "Accept: application/vnd.github.v3+json" \
+          -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
+          -H "X-GitHub-Api-Version: 2022-11-28" \
+          https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/cancel
+
+      - name: Run Clippy
+        if: matrix.os == 'macos-latest'
+        run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
 
-      - name: Switch to stable toolchain
+  analysis-stats:
+    if: github.repository == 'rust-lang/rust-analyzer'
+    runs-on: ubuntu-latest
+    env:
+      RUSTC_BOOTSTRAP: 1
+
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+
+      - name: Install Rust toolchain
         run: |
           rustup update --no-self-update stable
-          rustup component add --toolchain stable rust-src clippy
           rustup default stable
+          rustup component add rustfmt
 
-      - name: Run analysis-stats on rust-analyzer
-        if: matrix.os == 'ubuntu-latest'
-        run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats .
+      # - name: Cache Dependencies
+      #   uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
 
-      - name: Run analysis-stats on the rust standard libraries
-        if: matrix.os == 'ubuntu-latest'
-        env:
-          RUSTC_BOOTSTRAP: 1
-        run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps --no-sysroot --no-test $(rustc --print sysroot)/lib/rustlib/src/rust/library/
+      - name: Bump opt-level
+        run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml
 
-      - name: clippy
-        if: matrix.os == 'windows-latest'
-        run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
+      - run: cargo build -p rust-analyzer
 
-      - name: rustfmt
-        if: matrix.os == 'ubuntu-latest'
-        run: cargo fmt -- --check
+      - name: ./rust-analyzer
+        run: ./target/debug/rust-analyzer analysis-stats . -q
+
+      - name: sysroot/lib/rustlib/src/rust/library/
+        run: ./target/debug/rust-analyzer analysis-stats --with-deps --no-sysroot --no-test $(rustc --print sysroot)/lib/rustlib/src/rust/library/ -q
+
+  rustfmt:
+    if: github.repository == 'rust-lang/rust-analyzer'
+    runs-on: ubuntu-latest
+
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+
+      - name: Install Rust toolchain
+        run: |
+          rustup update --no-self-update stable
+          rustup default stable
+          rustup component add rustfmt
+
+      - run: cargo fmt -- --check
+
+  miri:
+    if: github.repository == 'rust-lang/rust-analyzer'
+    runs-on: ubuntu-latest
+
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+
+      - name: Install Rust toolchain
+        run: |
+          rustup update --no-self-update nightly
+          rustup default nightly
+          rustup component add miri
+
+      # - name: Cache Dependencies
+      #   uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+
+      - run: cargo miri test -p intern
 
   # Weird targets to catch non-portable code
   rust-cross:
@@ -154,11 +208,16 @@ jobs:
     name: Rust Cross
     runs-on: ubuntu-latest
 
+    strategy:
+      matrix:
+        target: [powerpc-unknown-linux-gnu, x86_64-unknown-linux-musl, wasm32-unknown-unknown]
+        include:
+          # The rust-analyzer binary is not expected to compile on WASM, but the IDE
+          # crate should
+          - target: wasm32-unknown-unknown
+            ide-only: true
     env:
-      targets: "powerpc-unknown-linux-gnu x86_64-unknown-linux-musl"
-      # The rust-analyzer binary is not expected to compile on WASM, but the IDE
-      # crate should
-      targets_ide: "wasm32-unknown-unknown"
+      RUSTFLAGS: "-Dwarnings"
 
     steps:
       - name: Checkout repository
@@ -167,19 +226,15 @@ jobs:
       - name: Install Rust toolchain
         run: |
           rustup update --no-self-update stable
-          rustup target add ${{ env.targets }} ${{ env.targets_ide }}
+          rustup target add ${{ matrix.target }}
 
-      - name: Cache Dependencies
-        uses: Swatinem/rust-cache@9bdad043e88c75890e36ad3bbc8d27f0090dd609
+      # - name: Cache Dependencies
+      #   uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
 
-      - name: Check
-        run: |
-          for target in ${{ env.targets }}; do
-            cargo check --target=$target --all-targets
-          done
-          for target in ${{ env.targets_ide }}; do
-            cargo check -p ide --target=$target --all-targets
-          done
+      - run: cargo check --target=${{ matrix.target }} --all-targets -p ide
+        if: ${{ matrix.ide-only }}
+      - run: cargo check --target=${{ matrix.target }} --all-targets
+        if: ${{ !matrix.ide-only }}
 
   typescript:
     needs: changes
@@ -261,7 +316,7 @@ jobs:
         run: typos
 
   conclusion:
-    needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv]
+    needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv, miri, rustfmt, analysis-stats]
     # We need to ensure this job does *not* get skipped if its dependencies fail,
     # because a skipped job is considered a success by GitHub. So we have to
     # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run
diff --git a/src/tools/rust-analyzer/.github/workflows/release.yaml b/src/tools/rust-analyzer/.github/workflows/release.yaml
index c8e6de72ce98f..a758ecfd46796 100644
--- a/src/tools/rust-analyzer/.github/workflows/release.yaml
+++ b/src/tools/rust-analyzer/.github/workflows/release.yaml
@@ -29,19 +29,25 @@ jobs:
           - os: windows-latest
             target: x86_64-pc-windows-msvc
             code-target: win32-x64
+            pgo: clap-rs/clap@v4.5.36
           - os: windows-latest
             target: i686-pc-windows-msvc
+            pgo: clap-rs/clap@v4.5.36
           - os: windows-latest
             target: aarch64-pc-windows-msvc
             code-target: win32-arm64
           - os: ubuntu-latest
             target: x86_64-unknown-linux-gnu
-            zig_target: x86_64-unknown-linux-gnu.2.28
+            # Use a container with glibc 2.28
+            # Zig is not used because it doesn't work with PGO
+            container: quay.io/pypa/manylinux_2_28_x86_64
             code-target: linux-x64
-          - os: ubuntu-latest
+            pgo: clap-rs/clap@v4.5.36
+          - os: ubuntu-24.04-arm
             target: aarch64-unknown-linux-gnu
-            zig_target: aarch64-unknown-linux-gnu.2.28
+            container: quay.io/pypa/manylinux_2_28_aarch64
             code-target: linux-arm64
+            pgo: clap-rs/clap@v4.5.36
           - os: ubuntu-latest
             target: arm-unknown-linux-gnueabihf
             zig_target: arm-unknown-linux-gnueabihf.2.28
@@ -49,9 +55,11 @@ jobs:
           - os: macos-13
             target: x86_64-apple-darwin
             code-target: darwin-x64
-          - os: macos-13
+            pgo: clap-rs/clap@v4.5.36
+          - os: macos-14
             target: aarch64-apple-darwin
             code-target: darwin-arm64
+            pgo: clap-rs/clap@v4.5.36
 
     name: dist (${{ matrix.target }})
     runs-on: ${{ matrix.os }}
@@ -71,10 +79,17 @@ jobs:
         with:
           node-version: 22
 
+      - name: Install rustup
+        if: ${{ matrix.container }}
+        run: |
+          curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal
+          echo "$HOME/.cargo/bin" >> $GITHUB_PATH
+
       - name: Install Rust toolchain
         run: |
           rustup update --no-self-update stable
-          rustup component add rust-src
+          # llvm-tools contain the llvm-profdata tool which is needed for PGO
+          rustup component add rust-src ${{ matrix.pgo && 'llvm-tools' || '' }}
           rustup target add ${{ matrix.target }}
 
       - name: Install Zig toolchain
@@ -87,11 +102,11 @@ jobs:
 
       - name: Dist (plain)
         if: ${{ !matrix.zig_target }}
-        run: cargo xtask dist --client-patch-version ${{ github.run_number }}
+        run: cargo xtask dist --client-patch-version ${{ github.run_number }} ${{ matrix.pgo && format('--pgo {0}', matrix.pgo) || ''}}
 
       - name: Dist (using zigbuild)
         if: ${{ matrix.zig_target }}
-        run: RA_TARGET=${{ matrix.zig_target}} cargo xtask dist --client-patch-version ${{ github.run_number }} --zig
+        run: RA_TARGET=${{ matrix.zig_target}} cargo xtask dist --client-patch-version ${{ github.run_number }} --zig ${{ matrix.pgo && format('--pgo {0}', matrix.pgo) || ''}}
 
       - run: npm ci
         working-directory: editors/code
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index 2dbb3f5d69ca3..9f6b80c63790f 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -1,65 +1,77 @@
 # This file is automatically @generated by Cargo.
 # It is not intended for manual editing.
-version = 3
+version = 4
 
 [[package]]
 name = "addr2line"
-version = "0.22.0"
+version = "0.24.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678"
+checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
 dependencies = [
  "gimli",
 ]
 
 [[package]]
-name = "adler"
-version = "1.0.2"
+name = "adler2"
+version = "2.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
 
 [[package]]
-name = "always-assert"
-version = "0.2.0"
+name = "aho-corasick"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "allocator-api2"
+version = "0.2.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1078fa1ce1e34b1872d8611ad921196d76bdd7027e949fbe31231abde201892"
+checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
 
 [[package]]
 name = "anyhow"
-version = "1.0.86"
+version = "1.0.97"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
+checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f"
 
 [[package]]
 name = "arbitrary"
-version = "1.3.2"
+version = "1.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110"
+checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223"
+dependencies = [
+ "derive_arbitrary",
+]
 
 [[package]]
 name = "arrayvec"
-version = "0.7.4"
+version = "0.7.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
+checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
 
 [[package]]
 name = "autocfg"
-version = "1.3.0"
+version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
+checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
 
 [[package]]
 name = "backtrace"
-version = "0.3.73"
+version = "0.3.74"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a"
+checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
 dependencies = [
  "addr2line",
- "cc",
  "cfg-if",
  "libc",
  "miniz_oxide",
- "object 0.36.3",
+ "object",
  "rustc-demangle",
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -67,14 +79,14 @@ name = "base-db"
 version = "0.0.0"
 dependencies = [
  "cfg",
+ "dashmap",
  "intern",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "lz4_flex",
- "rustc-hash 2.0.0",
+ "query-group-macro",
+ "rustc-hash 2.1.1",
  "salsa",
  "semver",
  "span",
- "stdx",
  "syntax",
  "tracing",
  "triomphe",
@@ -95,62 +107,65 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
 
 [[package]]
 name = "bitflags"
-version = "2.7.0"
+version = "2.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be"
+checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
 
 [[package]]
 name = "borsh"
-version = "1.5.1"
+version = "1.5.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed"
+checksum = "5430e3be710b68d984d1391c854eb431a9d548640711faa54eecb1df93db91cc"
 dependencies = [
- "cfg_aliases 0.2.1",
+ "cfg_aliases",
 ]
 
 [[package]]
-name = "byteorder"
-version = "1.5.0"
+name = "boxcar"
+version = "0.2.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
+checksum = "6740c6e2fc6360fa57c35214c7493826aee95993926092606f27c983b40837be"
+dependencies = [
+ "loom",
+]
 
 [[package]]
 name = "camino"
-version = "1.1.7"
+version = "1.1.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239"
+checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
 dependencies = [
  "serde",
 ]
 
 [[package]]
 name = "cargo-platform"
-version = "0.1.8"
+version = "0.1.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc"
+checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea"
 dependencies = [
  "serde",
 ]
 
 [[package]]
 name = "cargo_metadata"
-version = "0.18.1"
+version = "0.19.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037"
+checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba"
 dependencies = [
  "camino",
  "cargo-platform",
  "semver",
  "serde",
  "serde_json",
- "thiserror",
+ "thiserror 2.0.12",
 ]
 
 [[package]]
 name = "cc"
-version = "1.1.22"
+version = "1.2.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0"
+checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c"
 dependencies = [
  "shlex",
 ]
@@ -164,7 +179,7 @@ dependencies = [
  "expect-test",
  "intern",
  "oorandom",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "syntax",
  "syntax-bridge",
  "tracing",
@@ -177,12 +192,6 @@ version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 
-[[package]]
-name = "cfg_aliases"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
-
 [[package]]
 name = "cfg_aliases"
 version = "0.2.1"
@@ -191,9 +200,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
 
 [[package]]
 name = "chalk-derive"
-version = "0.100.0"
+version = "0.102.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ab2d131019373f0d0d1f2af0abd4f719739f6583c1b33965112455f643a910af"
+checksum = "feb14e3ff0ebac26d8e58b6ed1417afb60c4a0a44b6425546ee7eb9c75ebb336"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -203,19 +212,19 @@ dependencies = [
 
 [[package]]
 name = "chalk-ir"
-version = "0.100.0"
+version = "0.102.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f114996bda14c0213f014a4ef31a7867dcf5f539a3900477fc6b20138e7a17b"
+checksum = "72f0a61621a088af69fee8df39ec63cf5b6d0b9ab663a740cdeb376aabf2f244"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "chalk-derive",
 ]
 
 [[package]]
 name = "chalk-recursive"
-version = "0.100.0"
+version = "0.102.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "551e956e031c09057c7b21f17d48d91de99c9b6b6e34bceaf5e7202d71021268"
+checksum = "cbd3415cc540015533aa4a8ad007696d585dd9c5f81e7c099872f1dd4bf14894"
 dependencies = [
  "chalk-derive",
  "chalk-ir",
@@ -226,15 +235,15 @@ dependencies = [
 
 [[package]]
 name = "chalk-solve"
-version = "0.100.0"
+version = "0.102.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cd7ca50181156ce649efe8e5dd00580f573651554e4dcd11afa4e2ac93f53324"
+checksum = "747707b0c082b3ecf4b1ae28d0d8df708a46cddd22a386f9cc85a312a4de25ff"
 dependencies = [
  "chalk-derive",
  "chalk-ir",
  "ena",
  "indexmap",
- "itertools",
+ "itertools 0.12.1",
  "petgraph",
  "rustc-hash 1.1.0",
  "tracing",
@@ -263,18 +272,18 @@ dependencies = [
 
 [[package]]
 name = "crossbeam-channel"
-version = "0.5.13"
+version = "0.5.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2"
+checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
 dependencies = [
  "crossbeam-utils",
 ]
 
 [[package]]
 name = "crossbeam-deque"
-version = "0.8.5"
+version = "0.8.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
+checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
 dependencies = [
  "crossbeam-epoch",
  "crossbeam-utils",
@@ -289,30 +298,40 @@ dependencies = [
  "crossbeam-utils",
 ]
 
+[[package]]
+name = "crossbeam-queue"
+version = "0.3.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115"
+dependencies = [
+ "crossbeam-utils",
+]
+
 [[package]]
 name = "crossbeam-utils"
-version = "0.8.20"
+version = "0.8.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
+checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
 
 [[package]]
 name = "ctrlc"
-version = "3.4.4"
+version = "3.4.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345"
+checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3"
 dependencies = [
  "nix",
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
 name = "dashmap"
-version = "5.5.3"
+version = "6.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
+checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf"
 dependencies = [
  "cfg-if",
- "hashbrown",
+ "crossbeam-utils",
+ "hashbrown 0.14.5",
  "lock_api",
  "once_cell",
  "parking_lot_core",
@@ -320,18 +339,18 @@ dependencies = [
 
 [[package]]
 name = "deranged"
-version = "0.3.11"
+version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
+checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
 dependencies = [
  "powerfmt",
 ]
 
 [[package]]
 name = "derive_arbitrary"
-version = "1.3.2"
+version = "1.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611"
+checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -340,39 +359,50 @@ dependencies = [
 
 [[package]]
 name = "directories"
-version = "5.0.1"
+version = "6.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35"
+checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d"
 dependencies = [
  "dirs-sys",
 ]
 
 [[package]]
 name = "dirs"
-version = "5.0.1"
+version = "6.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
+checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e"
 dependencies = [
  "dirs-sys",
 ]
 
 [[package]]
 name = "dirs-sys"
-version = "0.4.1"
+version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
+checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab"
 dependencies = [
  "libc",
  "option-ext",
  "redox_users",
- "windows-sys 0.48.0",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "displaydoc"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
 ]
 
 [[package]]
 name = "dissimilar"
-version = "1.0.9"
+version = "1.0.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d"
+checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921"
 
 [[package]]
 name = "dot"
@@ -392,9 +422,9 @@ version = "0.0.0"
 
 [[package]]
 name = "either"
-version = "1.13.0"
+version = "1.15.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
+checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
 
 [[package]]
 name = "ena"
@@ -407,15 +437,15 @@ dependencies = [
 
 [[package]]
 name = "equivalent"
-version = "1.0.1"
+version = "1.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
 
 [[package]]
 name = "expect-test"
-version = "1.5.0"
+version = "1.5.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e0be0a561335815e06dab7c62e50353134c796e7a6155402a64bcff66b6a5e0"
+checksum = "63af43ff4431e848fb47472a920f14fa71c24de13255a5692e93d4e90302acb0"
 dependencies = [
  "dissimilar",
  "once_cell",
@@ -423,9 +453,9 @@ dependencies = [
 
 [[package]]
 name = "filetime"
-version = "0.2.24"
+version = "0.2.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
+checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586"
 dependencies = [
  "cfg-if",
  "libc",
@@ -441,14 +471,20 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
 
 [[package]]
 name = "flate2"
-version = "1.0.31"
+version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f211bbe8e69bbd0cfdea405084f128ae8b4aaa6b0b522fc8f2b009084797920"
+checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc"
 dependencies = [
  "crc32fast",
  "miniz_oxide",
 ]
 
+[[package]]
+name = "foldhash"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
+
 [[package]]
 name = "form_urlencoded"
 version = "1.2.1"
@@ -473,6 +509,19 @@ version = "0.4.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
 
+[[package]]
+name = "generator"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "log",
+ "rustversion",
+ "windows 0.58.0",
+]
+
 [[package]]
 name = "getrandom"
 version = "0.2.15"
@@ -486,9 +535,9 @@ dependencies = [
 
 [[package]]
 name = "gimli"
-version = "0.29.0"
+version = "0.31.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd"
+checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
 
 [[package]]
 name = "hashbrown"
@@ -496,11 +545,31 @@ version = "0.14.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
 
+[[package]]
+name = "hashbrown"
+version = "0.15.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
+dependencies = [
+ "allocator-api2",
+ "equivalent",
+ "foldhash",
+]
+
+[[package]]
+name = "hashlink"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
+dependencies = [
+ "hashbrown 0.15.2",
+]
+
 [[package]]
 name = "heck"
-version = "0.4.1"
+version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
 
 [[package]]
 name = "hermit-abi"
@@ -522,8 +591,8 @@ dependencies = [
  "hir-ty",
  "indexmap",
  "intern",
- "itertools",
- "rustc-hash 2.0.0",
+ "itertools 0.14.0",
+ "rustc-hash 2.1.1",
  "smallvec",
  "span",
  "stdx",
@@ -542,26 +611,25 @@ version = "0.0.0"
 dependencies = [
  "arrayvec",
  "base-db",
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "cfg",
  "cov-mark",
- "dashmap",
  "drop_bomb",
  "either",
  "expect-test",
  "fst",
- "hashbrown",
  "hir-expand",
  "indexmap",
  "intern",
- "itertools",
+ "itertools 0.14.0",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "mbe",
+ "query-group-macro",
  "ra-ap-rustc_abi",
- "ra-ap-rustc_hashes",
  "ra-ap-rustc_parse_format",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "rustc_apfloat",
+ "salsa",
  "smallvec",
  "span",
  "stdx",
@@ -570,6 +638,7 @@ dependencies = [
  "test-fixture",
  "test-utils",
  "text-size",
+ "thin-vec",
  "tracing",
  "triomphe",
  "tt",
@@ -584,13 +653,13 @@ dependencies = [
  "cov-mark",
  "either",
  "expect-test",
- "hashbrown",
  "intern",
- "itertools",
- "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itertools 0.14.0",
  "mbe",
  "parser",
- "rustc-hash 2.0.0",
+ "query-group-macro",
+ "rustc-hash 2.1.1",
+ "salsa",
  "smallvec",
  "span",
  "stdx",
@@ -607,7 +676,7 @@ version = "0.0.0"
 dependencies = [
  "arrayvec",
  "base-db",
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "chalk-derive",
  "chalk-ir",
  "chalk-recursive",
@@ -620,16 +689,17 @@ dependencies = [
  "hir-expand",
  "indexmap",
  "intern",
- "itertools",
+ "itertools 0.14.0",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "nohash-hasher",
  "oorandom",
  "project-model",
+ "query-group-macro",
  "ra-ap-rustc_abi",
  "ra-ap-rustc_index",
  "ra-ap-rustc_pattern_analysis",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "rustc_apfloat",
+ "salsa",
  "scoped-tls",
  "smallvec",
  "span",
@@ -646,11 +716,129 @@ dependencies = [
 
 [[package]]
 name = "home"
-version = "0.5.9"
+version = "0.5.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
+checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf"
 dependencies = [
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "icu_collections"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
+dependencies = [
+ "displaydoc",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locid"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637"
+dependencies = [
+ "displaydoc",
+ "litemap",
+ "tinystr",
+ "writeable",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locid_transform"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e"
+dependencies = [
+ "displaydoc",
+ "icu_locid",
+ "icu_locid_transform_data",
+ "icu_provider",
+ "tinystr",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locid_transform_data"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e"
+
+[[package]]
+name = "icu_normalizer"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f"
+dependencies = [
+ "displaydoc",
+ "icu_collections",
+ "icu_normalizer_data",
+ "icu_properties",
+ "icu_provider",
+ "smallvec",
+ "utf16_iter",
+ "utf8_iter",
+ "write16",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer_data"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516"
+
+[[package]]
+name = "icu_properties"
+version = "1.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5"
+dependencies = [
+ "displaydoc",
+ "icu_collections",
+ "icu_locid_transform",
+ "icu_properties_data",
+ "icu_provider",
+ "tinystr",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_properties_data"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569"
+
+[[package]]
+name = "icu_provider"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9"
+dependencies = [
+ "displaydoc",
+ "icu_locid",
+ "icu_provider_macros",
+ "stable_deref_trait",
+ "tinystr",
+ "writeable",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_provider_macros"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
 ]
 
 [[package]]
@@ -669,7 +857,7 @@ dependencies = [
  "ide-db",
  "ide-diagnostics",
  "ide-ssr",
- "itertools",
+ "itertools 0.14.0",
  "nohash-hasher",
  "oorandom",
  "profile",
@@ -697,7 +885,7 @@ dependencies = [
  "expect-test",
  "hir",
  "ide-db",
- "itertools",
+ "itertools 0.14.0",
  "smallvec",
  "stdx",
  "syntax",
@@ -715,7 +903,7 @@ dependencies = [
  "expect-test",
  "hir",
  "ide-db",
- "itertools",
+ "itertools 0.14.0",
  "smallvec",
  "stdx",
  "syntax",
@@ -730,7 +918,7 @@ version = "0.0.0"
 dependencies = [
  "arrayvec",
  "base-db",
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "cov-mark",
  "crossbeam-channel",
  "either",
@@ -738,14 +926,16 @@ dependencies = [
  "fst",
  "hir",
  "indexmap",
- "itertools",
+ "itertools 0.14.0",
  "line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "memchr",
  "nohash-hasher",
  "parser",
  "profile",
+ "query-group-macro",
  "rayon",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
+ "salsa",
  "span",
  "stdx",
  "syntax",
@@ -753,6 +943,7 @@ dependencies = [
  "test-utils",
  "tracing",
  "triomphe",
+ "vfs",
 ]
 
 [[package]]
@@ -765,7 +956,7 @@ dependencies = [
  "expect-test",
  "hir",
  "ide-db",
- "itertools",
+ "itertools 0.14.0",
  "paths",
  "serde_json",
  "stdx",
@@ -783,10 +974,8 @@ dependencies = [
  "expect-test",
  "hir",
  "ide-db",
- "itertools",
- "nohash-hasher",
+ "itertools 0.14.0",
  "parser",
- "stdx",
  "syntax",
  "test-fixture",
  "test-utils",
@@ -795,22 +984,34 @@ dependencies = [
 
 [[package]]
 name = "idna"
-version = "0.5.0"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
+dependencies = [
+ "idna_adapter",
+ "smallvec",
+ "utf8_iter",
+]
+
+[[package]]
+name = "idna_adapter"
+version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
+checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71"
 dependencies = [
- "unicode-bidi",
- "unicode-normalization",
+ "icu_normalizer",
+ "icu_properties",
 ]
 
 [[package]]
 name = "indexmap"
-version = "2.3.0"
+version = "2.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
+checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058"
 dependencies = [
  "equivalent",
- "hashbrown",
+ "hashbrown 0.15.2",
+ "serde",
 ]
 
 [[package]]
@@ -819,7 +1020,7 @@ version = "0.11.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "inotify-sys",
  "libc",
 ]
@@ -838,8 +1039,8 @@ name = "intern"
 version = "0.0.0"
 dependencies = [
  "dashmap",
- "hashbrown",
- "rustc-hash 2.0.0",
+ "hashbrown 0.14.5",
+ "rustc-hash 2.1.1",
  "triomphe",
 ]
 
@@ -852,17 +1053,26 @@ dependencies = [
  "either",
 ]
 
+[[package]]
+name = "itertools"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
+dependencies = [
+ "either",
+]
+
 [[package]]
 name = "itoa"
-version = "1.0.11"
+version = "1.0.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
+checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
 
 [[package]]
 name = "jod-thread"
-version = "0.1.2"
+version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
+checksum = "a037eddb7d28de1d0fc42411f501b53b75838d313908078d6698d064f3029b24"
 
 [[package]]
 name = "kqueue"
@@ -908,9 +1118,9 @@ checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
 
 [[package]]
 name = "libloading"
-version = "0.8.5"
+version = "0.8.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
+checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
 dependencies = [
  "cfg-if",
  "windows-targets 0.52.6",
@@ -918,9 +1128,9 @@ dependencies = [
 
 [[package]]
 name = "libmimalloc-sys"
-version = "0.1.39"
+version = "0.1.40"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44"
+checksum = "07d0e07885d6a754b9c7993f2625187ad694ee985d60f23355ff0e7077261502"
 dependencies = [
  "cc",
  "libc",
@@ -932,7 +1142,7 @@ version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "libc",
  "redox_syscall",
 ]
@@ -957,10 +1167,10 @@ dependencies = [
 ]
 
 [[package]]
-name = "linked-hash-map"
-version = "0.5.6"
+name = "litemap"
+version = "0.7.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
+checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856"
 
 [[package]]
 name = "load-cargo"
@@ -971,8 +1181,7 @@ dependencies = [
  "hir-expand",
  "ide-db",
  "intern",
- "itertools",
- "paths",
+ "itertools 0.14.0",
  "proc-macro-api",
  "project-model",
  "span",
@@ -994,30 +1203,44 @@ dependencies = [
 
 [[package]]
 name = "log"
-version = "0.4.22"
+version = "0.4.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
+checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
 
 [[package]]
-name = "lsp-server"
-version = "0.7.7"
+name = "loom"
+version = "0.7.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9"
+checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
+dependencies = [
+ "cfg-if",
+ "generator",
+ "scoped-tls",
+ "tracing",
+ "tracing-subscriber",
+]
+
+[[package]]
+name = "lsp-server"
+version = "0.7.8"
 dependencies = [
  "crossbeam-channel",
+ "ctrlc",
  "log",
+ "lsp-types",
  "serde",
+ "serde_derive",
  "serde_json",
 ]
 
 [[package]]
 name = "lsp-server"
 version = "0.7.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9462c4dc73e17f971ec1f171d44bfffb72e65a130117233388a0ebc7ec5656f9"
 dependencies = [
  "crossbeam-channel",
- "ctrlc",
  "log",
- "lsp-types",
  "serde",
  "serde_derive",
  "serde_json",
@@ -1037,10 +1260,13 @@ dependencies = [
 ]
 
 [[package]]
-name = "lz4_flex"
-version = "0.11.3"
+name = "matchers"
+version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+dependencies = [
+ "regex-automata 0.1.10",
+]
 
 [[package]]
 name = "mbe"
@@ -1052,14 +1278,13 @@ dependencies = [
  "intern",
  "parser",
  "ra-ap-rustc_lexer",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "smallvec",
  "span",
  "stdx",
  "syntax",
  "syntax-bridge",
  "test-utils",
- "tracing",
  "tt",
 ]
 
@@ -1071,9 +1296,9 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
 
 [[package]]
 name = "memmap2"
-version = "0.5.10"
+version = "0.9.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327"
+checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f"
 dependencies = [
  "libc",
 ]
@@ -1089,20 +1314,20 @@ dependencies = [
 
 [[package]]
 name = "mimalloc"
-version = "0.1.43"
+version = "0.1.44"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633"
+checksum = "99585191385958383e13f6b822e6b6d8d9cf928e7d286ceb092da92b43c87bc1"
 dependencies = [
  "libmimalloc-sys",
 ]
 
 [[package]]
 name = "miniz_oxide"
-version = "0.7.4"
+version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08"
+checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
 dependencies = [
- "adler",
+ "adler2",
 ]
 
 [[package]]
@@ -1128,13 +1353,13 @@ dependencies = [
 
 [[package]]
 name = "nix"
-version = "0.28.0"
+version = "0.29.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
+checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "cfg-if",
- "cfg_aliases 0.1.1",
+ "cfg_aliases",
  "libc",
 ]
 
@@ -1150,7 +1375,7 @@ version = "8.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "filetime",
  "fsevent-sys",
  "inotify",
@@ -1169,6 +1394,16 @@ version = "2.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d"
 
+[[package]]
+name = "nu-ansi-term"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+dependencies = [
+ "overload",
+ "winapi",
+]
+
 [[package]]
 name = "nu-ansi-term"
 version = "0.50.1"
@@ -1205,33 +1440,24 @@ dependencies = [
 
 [[package]]
 name = "object"
-version = "0.33.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d8dd6c0cdf9429bce006e1362bfce61fa1bfd8c898a643ed8d2b471934701d3d"
-dependencies = [
- "memchr",
-]
-
-[[package]]
-name = "object"
-version = "0.36.3"
+version = "0.36.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9"
+checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
 dependencies = [
  "memchr",
 ]
 
 [[package]]
 name = "once_cell"
-version = "1.19.0"
+version = "1.21.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc"
 
 [[package]]
 name = "oorandom"
-version = "11.1.4"
+version = "11.1.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9"
+checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e"
 
 [[package]]
 name = "option-ext"
@@ -1239,6 +1465,12 @@ version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
 
+[[package]]
+name = "overload"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+
 [[package]]
 name = "parking_lot"
 version = "0.12.3"
@@ -1325,24 +1557,21 @@ dependencies = [
 
 [[package]]
 name = "pin-project-lite"
-version = "0.2.14"
+version = "0.2.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02"
+checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
 
 [[package]]
-name = "powerfmt"
-version = "0.2.0"
+name = "portable-atomic"
+version = "1.11.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
+checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
 
 [[package]]
-name = "ppv-lite86"
-version = "0.2.20"
+name = "powerfmt"
+version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
-dependencies = [
- "zerocopy",
-]
+checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
 
 [[package]]
 name = "proc-macro-api"
@@ -1351,7 +1580,7 @@ dependencies = [
  "indexmap",
  "intern",
  "paths",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "serde",
  "serde_derive",
  "serde_json",
@@ -1370,12 +1599,11 @@ dependencies = [
  "libc",
  "libloading",
  "memmap2",
- "object 0.33.0",
+ "object",
  "paths",
  "proc-macro-test",
  "ra-ap-rustc_lexer",
  "span",
- "stdx",
  "syntax-bridge",
  "tt",
 ]
@@ -1398,23 +1626,23 @@ dependencies = [
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.93"
+version = "1.0.94"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
+checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
 dependencies = [
  "unicode-ident",
 ]
 
 [[package]]
 name = "process-wrap"
-version = "8.0.2"
+version = "8.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38ee68ae331824036479c84060534b18254c864fa73366c58d86db3b7b811619"
+checksum = "d35f4dc9988d1326b065b4def5e950c3ed727aa03e3151b86cc9e2aec6b03f54"
 dependencies = [
  "indexmap",
  "nix",
  "tracing",
- "windows",
+ "windows 0.59.0",
 ]
 
 [[package]]
@@ -1438,10 +1666,10 @@ dependencies = [
  "cfg",
  "expect-test",
  "intern",
- "itertools",
+ "itertools 0.14.0",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "paths",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "semver",
  "serde",
  "serde_derive",
@@ -1461,7 +1689,7 @@ checksum = "a3a7c64d9bf75b1b8d981124c14c179074e8caa7dfe7b6a12e6222ddcd0c8f72"
 dependencies = [
  "once_cell",
  "protobuf-support",
- "thiserror",
+ "thiserror 1.0.69",
 ]
 
 [[package]]
@@ -1470,7 +1698,7 @@ version = "3.7.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b088fd20b938a875ea00843b6faf48579462630015c3788d397ad6a786663252"
 dependencies = [
- "thiserror",
+ "thiserror 1.0.69",
 ]
 
 [[package]]
@@ -1479,7 +1707,7 @@ version = "0.9.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "memchr",
  "unicase",
 ]
@@ -1493,22 +1721,33 @@ dependencies = [
  "pulldown-cmark",
 ]
 
+[[package]]
+name = "query-group-macro"
+version = "0.0.0"
+dependencies = [
+ "expect-test",
+ "proc-macro2",
+ "quote",
+ "salsa",
+ "syn",
+]
+
 [[package]]
 name = "quote"
-version = "1.0.36"
+version = "1.0.40"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
+checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
 dependencies = [
  "proc-macro2",
 ]
 
 [[package]]
 name = "ra-ap-rustc_abi"
-version = "0.100.0"
+version = "0.110.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1651b0f7e8c3eb7c27a88f39d277e69c32bfe58e3be174d286c1a24d6a7a4d8"
+checksum = "912228bd8ed3beff1f6f9e5e2d4b37c0827ba3e2070060bf3858a311d0e29e30"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "ra-ap-rustc_hashes",
  "ra-ap-rustc_index",
  "tracing",
@@ -1516,18 +1755,18 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_hashes"
-version = "0.100.0"
+version = "0.110.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2bcd85e93dc0ea850bcfe7957a115957df799ccbc9eea488bdee5ec6780d212b"
+checksum = "ba520764daf057a9d963fa769f4762eaf87ac5d4900ae76195eeead64cd35afd"
 dependencies = [
  "rustc-stable-hash",
 ]
 
 [[package]]
 name = "ra-ap-rustc_index"
-version = "0.100.0"
+version = "0.110.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62b295fc0640cd9fe0ecab872ee4a17a96f90a3998ec9f0c4765e9b8415c12cc"
+checksum = "b76b5f9ee55f2d0e5a65bea23f6d738893349ce8d3d17a6720933e647ab04978"
 dependencies = [
  "ra-ap-rustc_index_macros",
  "smallvec",
@@ -1535,9 +1774,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_index_macros"
-version = "0.100.0"
+version = "0.110.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c675f4257023aa933882906f13802cae287e88cc39ab13cbb96809083db0c801"
+checksum = "ddd972eb1face2fcaa0d94c01d97862fb955b5561d4f5932003bce8a6cadd8c6"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1546,9 +1785,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_lexer"
-version = "0.100.0"
+version = "0.110.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8358702c2a510ea84ba5801ddc047d9ad9520902cfb0e6173277610cdce2c9c"
+checksum = "ba3a9876456fb2521097deef33ddeac1c18260c8eafb68054d986f8b9d6ce9fa"
 dependencies = [
  "memchr",
  "unicode-properties",
@@ -1557,97 +1796,111 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_parse_format"
-version = "0.100.0"
+version = "0.110.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b98f402011d46732c35c47bfd111dec0495747fef2ec900ddee7fe15d78449a7"
+checksum = "8e85de58dfcc60a5f9d5ec0157a657e3f84abd8f22c8a0c4d707cfb42c9011f4"
 dependencies = [
- "ra-ap-rustc_index",
  "ra-ap-rustc_lexer",
+ "rustc-literal-escaper",
 ]
 
 [[package]]
 name = "ra-ap-rustc_pattern_analysis"
-version = "0.100.0"
+version = "0.110.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bef3ff73fa4653252ffe1d1e9177a446f49ef46d97140e4816b7ff2dad59ed53"
+checksum = "ceadf9db550db67deff7eff2e2765109b860c9d7e5bdfca144863020289c823d"
 dependencies = [
  "ra-ap-rustc_index",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "rustc_apfloat",
  "smallvec",
  "tracing",
 ]
 
 [[package]]
-name = "rand"
-version = "0.8.5"
+name = "rayon"
+version = "1.10.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
 dependencies = [
- "libc",
- "rand_chacha",
- "rand_core",
+ "either",
+ "rayon-core",
 ]
 
 [[package]]
-name = "rand_chacha"
-version = "0.3.1"
+name = "rayon-core"
+version = "1.12.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
 dependencies = [
- "ppv-lite86",
- "rand_core",
+ "crossbeam-deque",
+ "crossbeam-utils",
 ]
 
 [[package]]
-name = "rand_core"
-version = "0.6.4"
+name = "redox_syscall"
+version = "0.5.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1"
 dependencies = [
- "getrandom",
+ "bitflags 2.9.0",
 ]
 
 [[package]]
-name = "rayon"
-version = "1.10.0"
+name = "redox_users"
+version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
+checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b"
 dependencies = [
- "either",
- "rayon-core",
+ "getrandom",
+ "libredox",
+ "thiserror 2.0.12",
 ]
 
 [[package]]
-name = "rayon-core"
-version = "1.12.1"
+name = "regex"
+version = "1.11.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
+checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
 dependencies = [
- "crossbeam-deque",
- "crossbeam-utils",
+ "aho-corasick",
+ "memchr",
+ "regex-automata 0.4.9",
+ "regex-syntax 0.8.5",
 ]
 
 [[package]]
-name = "redox_syscall"
-version = "0.5.3"
+name = "regex-automata"
+version = "0.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
 dependencies = [
- "bitflags 2.7.0",
+ "regex-syntax 0.6.29",
 ]
 
 [[package]]
-name = "redox_users"
-version = "0.4.5"
+name = "regex-automata"
+version = "0.4.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891"
+checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
 dependencies = [
- "getrandom",
- "libredox",
- "thiserror",
+ "aho-corasick",
+ "memchr",
+ "regex-syntax 0.8.5",
 ]
 
+[[package]]
+name = "regex-syntax"
+version = "0.6.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
+
 [[package]]
 name = "rowan"
 version = "0.15.15"
@@ -1655,7 +1908,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
 dependencies = [
  "countme",
- "hashbrown",
+ "hashbrown 0.14.5",
  "memoffset",
  "rustc-hash 1.1.0",
  "text-size",
@@ -1665,7 +1918,6 @@ dependencies = [
 name = "rust-analyzer"
 version = "0.0.0"
 dependencies = [
- "always-assert",
  "anyhow",
  "base64",
  "cargo_metadata",
@@ -1681,10 +1933,11 @@ dependencies = [
  "ide-completion",
  "ide-db",
  "ide-ssr",
+ "indexmap",
  "intern",
- "itertools",
+ "itertools 0.14.0",
  "load-cargo",
- "lsp-server 0.7.7",
+ "lsp-server 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "lsp-types",
  "memchr",
  "mimalloc",
@@ -1699,7 +1952,7 @@ dependencies = [
  "profile",
  "project-model",
  "rayon",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "scip",
  "semver",
  "serde",
@@ -1740,9 +1993,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
 
 [[package]]
 name = "rustc-hash"
-version = "2.0.0"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
+checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
 
 [[package]]
 name = "rustc-literal-escaper"
@@ -1752,54 +2005,72 @@ checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04"
 
 [[package]]
 name = "rustc-stable-hash"
-version = "0.1.1"
+version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2febf9acc5ee5e99d1ad0afcdbccc02d87aa3f857a1f01f825b80eacf8edfcd1"
+checksum = "781442f29170c5c93b7185ad559492601acdc71d5bb0706f5868094f45cfcd08"
 
 [[package]]
 name = "rustc_apfloat"
-version = "0.2.1+llvm-462a31f5a5ab"
+version = "0.2.2+llvm-462a31f5a5ab"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "886d94c63c812a8037c4faca2607453a0fa4cf82f734665266876b022244543f"
+checksum = "121e2195ff969977a4e2b5c9965ea867fce7e4cb5aee5b09dee698a7932d574f"
 dependencies = [
- "bitflags 1.3.2",
+ "bitflags 2.9.0",
  "smallvec",
 ]
 
+[[package]]
+name = "rustversion"
+version = "1.0.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
+
 [[package]]
 name = "ryu"
-version = "1.0.18"
+version = "1.0.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
+checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
 
 [[package]]
 name = "salsa"
-version = "0.0.0"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1be22155f8d9732518b2db2bf379fe6f0b2375e76b08b7c8fe6c1b887d548c24"
 dependencies = [
- "dissimilar",
- "expect-test",
+ "boxcar",
+ "crossbeam-queue",
+ "dashmap",
+ "hashbrown 0.15.2",
+ "hashlink",
  "indexmap",
- "itertools",
- "linked-hash-map",
- "lock_api",
- "oorandom",
  "parking_lot",
- "rand",
- "rustc-hash 2.0.0",
+ "portable-atomic",
+ "rayon",
+ "rustc-hash 2.1.1",
+ "salsa-macro-rules",
  "salsa-macros",
  "smallvec",
+ "thin-vec",
  "tracing",
- "triomphe",
 ]
 
+[[package]]
+name = "salsa-macro-rules"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f55a7ef0a84e336f7c5f0332d81727f5629fe042d2aa556c75307afebc9f78a5"
+
 [[package]]
 name = "salsa-macros"
-version = "0.0.0"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8d0e88a9c0c0d231a63f83dcd1a2c5e5d11044fac4b65bc9ad3b68ab48b0a0ab"
 dependencies = [
  "heck",
  "proc-macro2",
  "quote",
  "syn",
+ "synstructure",
 ]
 
 [[package]]
@@ -1813,9 +2084,9 @@ dependencies = [
 
 [[package]]
 name = "scip"
-version = "0.5.1"
+version = "0.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8dfafd2fa14c6237fa1fc4310f739d02fa915d92977fa069426591f1de046f81"
+checksum = "fb2b449a5e4660ce817676a0871cd1b4e2ff1023e33a1ac046670fa594b543a2"
 dependencies = [
  "protobuf",
 ]
@@ -1834,27 +2105,27 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
 
 [[package]]
 name = "semver"
-version = "1.0.23"
+version = "1.0.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
+checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
 dependencies = [
  "serde",
 ]
 
 [[package]]
 name = "serde"
-version = "1.0.216"
+version = "1.0.219"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
+checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.216"
+version = "1.0.219"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
+checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1863,9 +2134,9 @@ dependencies = [
 
 [[package]]
 name = "serde_json"
-version = "1.0.124"
+version = "1.0.140"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d"
+checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
 dependencies = [
  "indexmap",
  "itoa",
@@ -1876,9 +2147,9 @@ dependencies = [
 
 [[package]]
 name = "serde_repr"
-version = "0.1.19"
+version = "0.1.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9"
+checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1887,9 +2158,9 @@ dependencies = [
 
 [[package]]
 name = "serde_spanned"
-version = "0.6.7"
+version = "0.6.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d"
+checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1"
 dependencies = [
  "serde",
 ]
@@ -1911,9 +2182,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
 
 [[package]]
 name = "smallvec"
-version = "1.13.2"
+version = "1.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
+checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
 
 [[package]]
 name = "smol_str"
@@ -1929,9 +2200,9 @@ dependencies = [
 name = "span"
 version = "0.0.0"
 dependencies = [
- "hashbrown",
+ "hashbrown 0.14.5",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "salsa",
  "stdx",
  "syntax",
@@ -1939,13 +2210,19 @@ dependencies = [
  "vfs",
 ]
 
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
+
 [[package]]
 name = "stdx"
 version = "0.0.0"
 dependencies = [
  "backtrace",
  "crossbeam-channel",
- "itertools",
+ "itertools 0.14.0",
  "jod-thread",
  "libc",
  "miow",
@@ -1955,9 +2232,9 @@ dependencies = [
 
 [[package]]
 name = "syn"
-version = "2.0.87"
+version = "2.0.100"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
+checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1979,15 +2256,13 @@ dependencies = [
 name = "syntax"
 version = "0.0.0"
 dependencies = [
- "cov-mark",
  "either",
  "expect-test",
- "indexmap",
- "itertools",
+ "itertools 0.14.0",
  "parser",
  "rayon",
  "rowan",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "rustc-literal-escaper",
  "rustc_apfloat",
  "smol_str",
@@ -2003,12 +2278,11 @@ version = "0.0.0"
 dependencies = [
  "intern",
  "parser",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "span",
  "stdx",
  "syntax",
  "test-utils",
- "tracing",
  "tt",
 ]
 
@@ -2026,10 +2300,12 @@ dependencies = [
  "cfg",
  "hir-expand",
  "intern",
- "rustc-hash 2.0.0",
+ "paths",
+ "rustc-hash 2.1.1",
  "span",
  "stdx",
  "test-utils",
+ "triomphe",
  "tt",
 ]
 
@@ -2040,10 +2316,9 @@ dependencies = [
  "dissimilar",
  "paths",
  "profile",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "stdx",
  "text-size",
- "tracing",
 ]
 
 [[package]]
@@ -2052,20 +2327,46 @@ version = "1.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233"
 
+[[package]]
+name = "thin-vec"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d"
+
 [[package]]
 name = "thiserror"
-version = "1.0.63"
+version = "1.0.69"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
+checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
 dependencies = [
- "thiserror-impl",
+ "thiserror-impl 1.0.69",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
+dependencies = [
+ "thiserror-impl 2.0.12",
 ]
 
 [[package]]
 name = "thiserror-impl"
-version = "1.0.63"
+version = "1.0.69"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
+checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "2.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2115,9 +2416,9 @@ dependencies = [
 
 [[package]]
 name = "time"
-version = "0.3.36"
+version = "0.3.40"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885"
+checksum = "9d9c75b47bdff86fa3334a3db91356b8d7d86a9b839dab7d0bdc5c3d3a077618"
 dependencies = [
  "deranged",
  "itoa",
@@ -2132,40 +2433,35 @@ dependencies = [
 
 [[package]]
 name = "time-core"
-version = "0.1.2"
+version = "0.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
+checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
 
 [[package]]
 name = "time-macros"
-version = "0.2.18"
+version = "0.2.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf"
+checksum = "29aa485584182073ed57fd5004aa09c371f021325014694e432313345865fd04"
 dependencies = [
  "num-conv",
  "time-core",
 ]
 
 [[package]]
-name = "tinyvec"
-version = "1.8.0"
+name = "tinystr"
+version = "0.7.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938"
+checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f"
 dependencies = [
- "tinyvec_macros",
+ "displaydoc",
+ "zerovec",
 ]
 
-[[package]]
-name = "tinyvec_macros"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
-
 [[package]]
 name = "toml"
-version = "0.8.19"
+version = "0.8.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e"
+checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148"
 dependencies = [
  "serde",
  "serde_spanned",
@@ -2184,9 +2480,9 @@ dependencies = [
 
 [[package]]
 name = "toml_edit"
-version = "0.22.20"
+version = "0.22.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d"
+checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474"
 dependencies = [
  "indexmap",
  "serde",
@@ -2205,9 +2501,9 @@ dependencies = [
 
 [[package]]
 name = "tracing"
-version = "0.1.40"
+version = "0.1.41"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
+checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
 dependencies = [
  "pin-project-lite",
  "tracing-attributes",
@@ -2216,9 +2512,9 @@ dependencies = [
 
 [[package]]
 name = "tracing-attributes"
-version = "0.1.27"
+version = "0.1.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
+checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2227,9 +2523,9 @@ dependencies = [
 
 [[package]]
 name = "tracing-core"
-version = "0.1.32"
+version = "0.1.33"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
+checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
 dependencies = [
  "once_cell",
  "valuable",
@@ -2248,24 +2544,30 @@ dependencies = [
 
 [[package]]
 name = "tracing-subscriber"
-version = "0.3.18"
+version = "0.3.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
+checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
 dependencies = [
+ "matchers",
+ "nu-ansi-term 0.46.0",
+ "once_cell",
+ "regex",
  "sharded-slab",
+ "smallvec",
  "thread_local",
  "time",
+ "tracing",
  "tracing-core",
  "tracing-log",
 ]
 
 [[package]]
 name = "tracing-tree"
-version = "0.3.1"
+version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b56c62d2c80033cb36fae448730a2f2ef99410fe3ecbffc916681a32f6807dbe"
+checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c"
 dependencies = [
- "nu-ansi-term",
+ "nu-ansi-term 0.50.1",
  "tracing-core",
  "tracing-log",
  "tracing-subscriber",
@@ -2302,51 +2604,33 @@ checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f"
 
 [[package]]
 name = "unicase"
-version = "2.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
-dependencies = [
- "version_check",
-]
-
-[[package]]
-name = "unicode-bidi"
-version = "0.3.15"
+version = "2.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
+checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
 
 [[package]]
 name = "unicode-ident"
-version = "1.0.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
-
-[[package]]
-name = "unicode-normalization"
-version = "0.1.23"
+version = "1.0.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5"
-dependencies = [
- "tinyvec",
-]
+checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
 
 [[package]]
 name = "unicode-properties"
-version = "0.1.1"
+version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291"
+checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0"
 
 [[package]]
 name = "unicode-xid"
-version = "0.2.4"
+version = "0.2.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
+checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
 
 [[package]]
 name = "url"
-version = "2.5.2"
+version = "2.5.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
+checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60"
 dependencies = [
  "form_urlencoded",
  "idna",
@@ -2355,16 +2639,22 @@ dependencies = [
 ]
 
 [[package]]
-name = "valuable"
-version = "0.1.0"
+name = "utf16_iter"
+version = "1.0.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246"
 
 [[package]]
-name = "version_check"
-version = "0.9.5"
+name = "utf8_iter"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
+
+[[package]]
+name = "valuable"
+version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
+checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
 
 [[package]]
 name = "vfs"
@@ -2375,7 +2665,7 @@ dependencies = [
  "indexmap",
  "nohash-hasher",
  "paths",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "stdx",
  "tracing",
 ]
@@ -2388,7 +2678,7 @@ dependencies = [
  "notify",
  "paths",
  "rayon",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "stdx",
  "tracing",
  "vfs",
@@ -2411,6 +2701,22 @@ version = "0.11.0+wasi-snapshot-preview1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
 [[package]]
 name = "winapi-util"
 version = "0.1.9"
@@ -2420,33 +2726,85 @@ dependencies = [
  "windows-sys 0.59.0",
 ]
 
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
 [[package]]
 name = "windows"
-version = "0.56.0"
+version = "0.58.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132"
+checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6"
 dependencies = [
- "windows-core",
+ "windows-core 0.58.0",
  "windows-targets 0.52.6",
 ]
 
+[[package]]
+name = "windows"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1"
+dependencies = [
+ "windows-core 0.59.0",
+ "windows-targets 0.53.0",
+]
+
 [[package]]
 name = "windows-core"
-version = "0.56.0"
+version = "0.58.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4698e52ed2d08f8658ab0c39512a7c00ee5fe2688c65f8c0a4f06750d729f2a6"
+checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99"
 dependencies = [
- "windows-implement",
- "windows-interface",
- "windows-result",
+ "windows-implement 0.58.0",
+ "windows-interface 0.58.0",
+ "windows-result 0.2.0",
+ "windows-strings 0.1.0",
  "windows-targets 0.52.6",
 ]
 
+[[package]]
+name = "windows-core"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "810ce18ed2112484b0d4e15d022e5f598113e220c53e373fb31e67e21670c1ce"
+dependencies = [
+ "windows-implement 0.59.0",
+ "windows-interface 0.59.0",
+ "windows-result 0.3.1",
+ "windows-strings 0.3.1",
+ "windows-targets 0.53.0",
+]
+
 [[package]]
 name = "windows-implement"
-version = "0.56.0"
+version = "0.58.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f6fc35f58ecd95a9b71c4f2329b911016e6bec66b3f2e6a4aad86bd2e99e2f9b"
+checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "windows-implement"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83577b051e2f49a058c308f17f273b570a6a758386fc291b5f6a934dd84e48c1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "windows-interface"
+version = "0.58.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2455,24 +2813,58 @@ dependencies = [
 
 [[package]]
 name = "windows-interface"
-version = "0.56.0"
+version = "0.59.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08990546bf4edef8f431fa6326e032865f27138718c587dc21bc0265bbcb57cc"
+checksum = "cb26fd936d991781ea39e87c3a27285081e3c0da5ca0fcbc02d368cc6f52ff01"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn",
 ]
 
+[[package]]
+name = "windows-link"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3"
+
+[[package]]
+name = "windows-result"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
 [[package]]
 name = "windows-result"
-version = "0.1.2"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06374efe858fab7e4f881500e6e86ec8bc28f9462c47e5a9941a0142ad86b189"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
+name = "windows-strings"
+version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
+checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10"
 dependencies = [
+ "windows-result 0.2.0",
  "windows-targets 0.52.6",
 ]
 
+[[package]]
+name = "windows-strings"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319"
+dependencies = [
+ "windows-link",
+]
+
 [[package]]
 name = "windows-sys"
 version = "0.48.0"
@@ -2524,13 +2916,29 @@ dependencies = [
  "windows_aarch64_gnullvm 0.52.6",
  "windows_aarch64_msvc 0.52.6",
  "windows_i686_gnu 0.52.6",
- "windows_i686_gnullvm",
+ "windows_i686_gnullvm 0.52.6",
  "windows_i686_msvc 0.52.6",
  "windows_x86_64_gnu 0.52.6",
  "windows_x86_64_gnullvm 0.52.6",
  "windows_x86_64_msvc 0.52.6",
 ]
 
+[[package]]
+name = "windows-targets"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b"
+dependencies = [
+ "windows_aarch64_gnullvm 0.53.0",
+ "windows_aarch64_msvc 0.53.0",
+ "windows_i686_gnu 0.53.0",
+ "windows_i686_gnullvm 0.53.0",
+ "windows_i686_msvc 0.53.0",
+ "windows_x86_64_gnu 0.53.0",
+ "windows_x86_64_gnullvm 0.53.0",
+ "windows_x86_64_msvc 0.53.0",
+]
+
 [[package]]
 name = "windows_aarch64_gnullvm"
 version = "0.48.5"
@@ -2543,6 +2951,12 @@ version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
 
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
+
 [[package]]
 name = "windows_aarch64_msvc"
 version = "0.48.5"
@@ -2555,6 +2969,12 @@ version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
 
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
+
 [[package]]
 name = "windows_i686_gnu"
 version = "0.48.5"
@@ -2567,12 +2987,24 @@ version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
 
+[[package]]
+name = "windows_i686_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
+
 [[package]]
 name = "windows_i686_gnullvm"
 version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
 
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
+
 [[package]]
 name = "windows_i686_msvc"
 version = "0.48.5"
@@ -2585,6 +3017,12 @@ version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
 
+[[package]]
+name = "windows_i686_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
+
 [[package]]
 name = "windows_x86_64_gnu"
 version = "0.48.5"
@@ -2597,6 +3035,12 @@ version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
 
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
+
 [[package]]
 name = "windows_x86_64_gnullvm"
 version = "0.48.5"
@@ -2609,6 +3053,12 @@ version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
 
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
+
 [[package]]
 name = "windows_x86_64_msvc"
 version = "0.48.5"
@@ -2621,11 +3071,17 @@ version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
 
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
+
 [[package]]
 name = "winnow"
-version = "0.6.18"
+version = "0.7.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f"
+checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1"
 dependencies = [
  "memchr",
 ]
@@ -2636,6 +3092,18 @@ version = "0.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "23f6174b2566cc4a74f95e1367ec343e7fa80c93cc8087f5c4a3d6a1088b2118"
 
+[[package]]
+name = "write16"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936"
+
+[[package]]
+name = "writeable"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
+
 [[package]]
 name = "xflags"
 version = "0.3.2"
@@ -2675,7 +3143,7 @@ dependencies = [
  "edition",
  "either",
  "flate2",
- "itertools",
+ "itertools 0.14.0",
  "proc-macro2",
  "quote",
  "stdx",
@@ -2688,20 +3156,66 @@ dependencies = [
 ]
 
 [[package]]
-name = "zerocopy"
-version = "0.7.35"
+name = "yoke"
+version = "0.7.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
+checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40"
 dependencies = [
- "byteorder",
- "zerocopy-derive",
+ "serde",
+ "stable_deref_trait",
+ "yoke-derive",
+ "zerofrom",
 ]
 
 [[package]]
-name = "zerocopy-derive"
-version = "0.7.35"
+name = "yoke-derive"
+version = "0.7.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
+checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zerofrom"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
+dependencies = [
+ "zerofrom-derive",
+]
+
+[[package]]
+name = "zerofrom-derive"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zerovec"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
+dependencies = [
+ "yoke",
+ "zerofrom",
+ "zerovec-derive",
+]
+
+[[package]]
+name = "zerovec-derive"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2710,13 +3224,17 @@ dependencies = [
 
 [[package]]
 name = "zip"
-version = "0.6.6"
+version = "2.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
+checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50"
 dependencies = [
- "byteorder",
+ "arbitrary",
  "crc32fast",
  "crossbeam-utils",
+ "displaydoc",
  "flate2",
+ "indexmap",
+ "memchr",
+ "thiserror 2.0.12",
  "time",
 ]
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index e22191397655d..6fa171702dd48 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -4,8 +4,8 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
 resolver = "2"
 
 [workspace.package]
-rust-version = "1.84"
-edition = "2021"
+rust-version = "1.86"
+edition = "2024"
 license = "MIT OR Apache-2.0"
 authors = ["rust-analyzer team"]
 repository = "https://github.com/rust-lang/rust-analyzer"
@@ -46,7 +46,7 @@ debug = 2
 
 # ungrammar = { path = "../ungrammar" }
 
-# rust-analyzer-salsa = { path = "../salsa" }
+# salsa = { path = "../salsa" }
 
 [workspace.dependencies]
 # local crates
@@ -72,7 +72,7 @@ proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" }
 proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" }
 profile = { path = "./crates/profile", version = "0.0.0" }
 project-model = { path = "./crates/project-model", version = "0.0.0" }
-ra-salsa = { path = "./crates/ra-salsa", package = "salsa", version = "0.0.0" }
+query-group = { package = "query-group-macro", path = "./crates/query-group-macro", version = "0.0.0" }
 span = { path = "./crates/span", version = "0.0.0" }
 stdx = { path = "./crates/stdx", version = "0.0.0" }
 syntax = { path = "./crates/syntax", version = "0.0.0" }
@@ -85,72 +85,69 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
 vfs = { path = "./crates/vfs", version = "0.0.0" }
 edition = { path = "./crates/edition", version = "0.0.0" }
 
-ra-ap-rustc_hashes = { version = "0.100", default-features = false }
-ra-ap-rustc_lexer = { version = "0.100", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.100", default-features = false }
-ra-ap-rustc_index = { version = "0.100", default-features = false }
-ra-ap-rustc_abi = { version = "0.100", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.100", default-features = false }
+ra-ap-rustc_lexer = { version = "0.110", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.110", default-features = false }
+ra-ap-rustc_index = { version = "0.110", default-features = false }
+ra-ap-rustc_abi = { version = "0.110", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.110", default-features = false }
 
 # local crates that aren't published to crates.io. These should not have versions.
 
 # in-tree crates that are published separately and follow semver. See lib/README.md
 line-index = { version = "0.1.2" }
 la-arena = { version = "0.3.1" }
-lsp-server = { version = "0.7.6" }
+lsp-server = { version = "0.7.8" }
 
 # non-local crates
-anyhow = "1.0.75"
-arrayvec = "0.7.4"
-bitflags = "2.4.1"
-cargo_metadata = "0.18.1"
-camino = "1.1.6"
-chalk-solve = { version = "0.100.0", default-features = false }
-chalk-ir = "0.100.0"
-chalk-recursive = { version = "0.100.0", default-features = false }
-chalk-derive = "0.100.0"
-crossbeam-channel = "0.5.8"
-dissimilar = "1.0.7"
+anyhow = "1.0.97"
+arrayvec = "0.7.6"
+bitflags = "2.9.0"
+cargo_metadata = "0.19.2"
+camino = "1.1.9"
+chalk-solve = { version = "0.102.0", default-features = false }
+chalk-ir = "0.102.0"
+chalk-recursive = { version = "0.102.0", default-features = false }
+chalk-derive = "0.102.0"
+crossbeam-channel = "0.5.15"
+dissimilar = "1.0.10"
 dot = "0.1.4"
-either = "1.9.0"
-expect-test = "1.4.0"
-hashbrown = { version = "0.14", features = [
-  "inline-more",
-], default-features = false }
-indexmap = "2.1.0"
-itertools = "0.12.0"
-libc = "0.2.150"
-libloading = "0.8.0"
-memmap2 = "0.5.4"
+either = "1.15.0"
+expect-test = "1.5.1"
+indexmap = { version = "2.8.0", features = ["serde"] }
+itertools = "0.14.0"
+libc = "0.2.171"
+libloading = "0.8.6"
+memmap2 = "0.9.5"
 nohash-hasher = "0.2.0"
-oorandom = "11.1.3"
-object = { version = "0.33.0", default-features = false, features = [
+oorandom = "11.1.5"
+object = { version = "0.36.7", default-features = false, features = [
   "std",
   "read_core",
   "elf",
   "macho",
   "pe",
 ] }
-process-wrap = { version = "8.0.2", features = ["std"] }
+process-wrap = { version = "8.2.0", features = ["std"] }
 pulldown-cmark-to-cmark = "10.0.4"
-pulldown-cmark = { version = "0.9.0", default-features = false }
-rayon = "1.8.0"
-rustc-hash = "2.0.0"
+pulldown-cmark = { version = "0.9.6", default-features = false }
+rayon = "1.10.0"
+salsa = "0.20.0"
+semver = "1.0.26"
+serde = { version = "1.0.219" }
+serde_derive = { version = "1.0.219" }
+serde_json = "1.0.140"
+rustc-hash = "2.1.1"
 rustc-literal-escaper = "0.0.2"
-semver = "1.0.14"
-serde = { version = "1.0.192" }
-serde_derive = { version = "1.0.192" }
-serde_json = "1.0.108"
-smallvec = { version = "1.10.0", features = [
+smallvec = { version = "1.14.0", features = [
   "const_new",
   "union",
   "const_generics",
 ] }
 smol_str = "0.3.2"
 text-size = "1.1.1"
-tracing = "0.1.40"
-tracing-tree = "0.3.0"
-tracing-subscriber = { version = "0.3.18", default-features = false, features = [
+tracing = "0.1.41"
+tracing-tree = "0.4.0"
+tracing-subscriber = { version = "0.3.19", default-features = false, features = [
   "registry",
   "fmt",
   "local-time",
@@ -159,12 +156,15 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
   "tracing-log",
 ] }
 triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
-url = "2.3.1"
-xshell = "0.2.5"
-
+url = "2.5.4"
+xshell = "0.2.7"
 
 # We need to freeze the version of the crate, as the raw-api feature is considered unstable
-dashmap = { version = "=5.5.3", features = ["raw-api"] }
+dashmap = { version = "=6.1.0", features = ["raw-api", "inline"] }
+# We need to freeze the version of the crate, as it needs to match with dashmap
+hashbrown = { version = "0.14.0", features = [
+  "inline-more",
+], default-features = false }
 
 [workspace.lints.rust]
 # remember to update RUSTFLAGS in ci.yml if you add something here
@@ -172,6 +172,7 @@ dashmap = { version = "=5.5.3", features = ["raw-api"] }
 elided_lifetimes_in_paths = "warn"
 explicit_outlives_requirements = "warn"
 unsafe_op_in_unsafe_fn = "warn"
+unexpected_cfgs = { level = "warn", check-cfg = ['cfg(bootstrap)'] }
 unused_extern_crates = "warn"
 unused_lifetimes = "warn"
 unreachable_pub = "warn"
diff --git a/src/tools/rust-analyzer/bench_data/numerous_macro_rules b/src/tools/rust-analyzer/bench_data/numerous_macro_rules
index 7610a3ae1e3cb..60997065b2408 100644
--- a/src/tools/rust-analyzer/bench_data/numerous_macro_rules
+++ b/src/tools/rust-analyzer/bench_data/numerous_macro_rules
@@ -528,7 +528,7 @@ macro_rules! __ra_macro_fixture526 {($expr :  expr )=>{|| -> _ { Some ($expr )}(
 macro_rules! __ra_macro_fixture527 {($($arg :  tt )*)=>($crate ::  io ::  _print ($crate ::  format_args ! ($($arg )*))); }
 macro_rules! __ra_macro_fixture528 {($fmt :  literal , $($tt :  tt ),*)=>{ mbe ::  ExpandError ::  ProcMacroError ( tt ::  ExpansionError ::  Unknown ( format ! ($fmt , $($tt ),*)))}; ($fmt :  literal )=>{ mbe ::  ExpandError ::  ProcMacroError ( tt ::  ExpansionError ::  Unknown ($fmt .  to_string ()))}}
 macro_rules! __ra_macro_fixture529 {($($tt :  tt )* )=>{$crate ::  quote ::  IntoTt ::  to_subtree ($crate ::  __quote ! ($($tt )*))}}
-macro_rules! __ra_macro_fixture530 {()=>{ Vec ::<  tt ::  TokenTree >::  new ()}; (@  SUBTREE $delim :  ident $($tt :  tt )* )=>{{ let  children = $crate ::  __quote ! ($($tt )*);  tt ::  Subtree { delimiter :  Some ( tt ::  Delimiter { kind :  tt ::  DelimiterKind ::$delim ,  id :  tt ::  TokenId ::  unspecified (), }),  token_trees : $crate ::  quote ::  IntoTt ::  to_tokens ( children ), }}}; (@  PUNCT $first :  literal )=>{{ vec ! [ tt ::  Leaf ::  Punct ( tt ::  Punct { char : $first ,  spacing :  tt ::  Spacing ::  Alone ,  id :  tt ::  TokenId ::  unspecified (), }).  into ()]}}; (@  PUNCT $first :  literal , $sec :  literal )=>{{ vec ! [ tt ::  Leaf ::  Punct ( tt ::  Punct { char : $first ,  spacing :  tt ::  Spacing ::  Joint ,  id :  tt ::  TokenId ::  unspecified (), }).  into (),  tt ::  Leaf ::  Punct ( tt ::  Punct { char : $sec ,  spacing :  tt ::  Spacing ::  Alone ,  id :  tt ::  TokenId ::  unspecified (), }).  into ()]}}; (# $first :  ident $($tail :  tt )* )=>{{ let  token = $crate ::  quote ::  ToTokenTree ::  to_token ($first );  let  mut  tokens =  vec ! [ token .  into ()];  let  mut  tail_tokens = $crate ::  quote ::  IntoTt ::  to_tokens ($crate ::  __quote ! ($($tail )*));  tokens .  append (&  mut  tail_tokens );  tokens }}; (## $first :  ident $($tail :  tt )* )=>{{ let  mut  tokens = $first .  into_iter ().  map ($crate ::  quote ::  ToTokenTree ::  to_token ).  collect ::<  Vec <  tt ::  TokenTree >> ();  let  mut  tail_tokens = $crate ::  quote ::  IntoTt ::  to_tokens ($crate ::  __quote ! ($($tail )*));  tokens .  append (&  mut  tail_tokens );  tokens }}; ({$($tt :  tt )* })=>{$crate ::  __quote ! (@  SUBTREE  Brace $($tt )*)}; ([$($tt :  tt )* ])=>{$crate ::  __quote ! (@  SUBTREE  Bracket $($tt )*)}; (($($tt :  tt )* ))=>{$crate ::  __quote ! (@  SUBTREE  Parenthesis $($tt )*)}; ($tt :  literal )=>{ vec ! [$crate ::  quote ::  ToTokenTree ::  to_token ($tt ).  into ()]}; ($tt :  ident )=>{ vec ! [{ tt ::  Leaf ::  Ident ( tt ::  Ident { text :  stringify ! ($tt ).  into (),  id :  tt ::  TokenId ::  unspecified (), }).  into ()}]}; (-> )=>{$crate ::  __quote ! (@  PUNCT  '-' ,  '>' )}; (& )=>{$crate ::  __quote ! (@  PUNCT  '&' )}; (, )=>{$crate ::  __quote ! (@  PUNCT  ',' )}; (: )=>{$crate ::  __quote ! (@  PUNCT  ':' )}; (; )=>{$crate ::  __quote ! (@  PUNCT  ';' )}; (:: )=>{$crate ::  __quote ! (@  PUNCT  ':' ,  ':' )}; (. )=>{$crate ::  __quote ! (@  PUNCT  '.' )}; (< )=>{$crate ::  __quote ! (@  PUNCT  '<' )}; (> )=>{$crate ::  __quote ! (@  PUNCT  '>' )}; ($first :  tt $($tail :  tt )+ )=>{{ let  mut  tokens = $crate ::  quote ::  IntoTt ::  to_tokens ($crate ::  __quote ! ($first ));  let  mut  tail_tokens = $crate ::  quote ::  IntoTt ::  to_tokens ($crate ::  __quote ! ($($tail )*));  tokens .  append (&  mut  tail_tokens );  tokens }}; }
+macro_rules! __ra_macro_fixture530 {()=>{ Vec ::<  tt ::  TokenTree >::  new ()}; (@  SUBTREE $delim :  ident $($tt :  tt )* )=>{{ let  children = $crate ::  __quote ! ($($tt )*);  tt ::  Subtree { delimiter :  Some ( tt ::  Delimiter { kind :  tt ::  DelimiterKind ::$delim ,  id :  tt ::  TokenId ::  unspecified (), }),  token_trees : $crate ::  quote ::  IntoTt ::  to_tokens ( children ), }}}; (@  PUNCT $first :  literal )=>{{ vec ! [ tt ::  Leaf ::  Punct ( tt ::  Punct { char : $first ,  spacing :  tt ::  Spacing ::  Alone ,  id :  tt ::  TokenId ::  unspecified (), }).  into ()]}}; (@  PUNCT $first :  literal , $sec :  literal )=>{{ vec ! [ tt ::  Leaf ::  Punct ( tt ::  Punct { char : $first ,  spacing :  tt ::  Spacing ::  Joint ,  id :  tt ::  TokenId ::  unspecified (), }).  into (),  tt ::  Leaf ::  Punct ( tt ::  Punct { char : $sec ,  spacing :  tt ::  Spacing ::  Alone ,  id :  tt ::  TokenId ::  unspecified (), }).  into ()]}}; (# $first :  ident $($tail :  tt )* )=>{{ let  token = $crate ::  quote ::  ToTokenTree ::  to_token ($first );  let  mut  tokens =  vec ! [ token .  into ()];  let  mut  tail_tokens = $crate ::  quote ::  IntoTt ::  to_tokens ($crate ::  __quote ! ($($tail )*));  tokens .  append (&  mut  tail_tokens );  tokens }}; (# # $first :  ident $($tail :  tt )* )=>{{ let  mut  tokens = $first .  into_iter ().  map ($crate ::  quote ::  ToTokenTree ::  to_token ).  collect ::<  Vec <  tt ::  TokenTree >> ();  let  mut  tail_tokens = $crate ::  quote ::  IntoTt ::  to_tokens ($crate ::  __quote ! ($($tail )*));  tokens .  append (&  mut  tail_tokens );  tokens }}; ({$($tt :  tt )* })=>{$crate ::  __quote ! (@  SUBTREE  Brace $($tt )*)}; ([$($tt :  tt )* ])=>{$crate ::  __quote ! (@  SUBTREE  Bracket $($tt )*)}; (($($tt :  tt )* ))=>{$crate ::  __quote ! (@  SUBTREE  Parenthesis $($tt )*)}; ($tt :  literal )=>{ vec ! [$crate ::  quote ::  ToTokenTree ::  to_token ($tt ).  into ()]}; ($tt :  ident )=>{ vec ! [{ tt ::  Leaf ::  Ident ( tt ::  Ident { text :  stringify ! ($tt ).  into (),  id :  tt ::  TokenId ::  unspecified (), }).  into ()}]}; (-> )=>{$crate ::  __quote ! (@  PUNCT  '-' ,  '>' )}; (& )=>{$crate ::  __quote ! (@  PUNCT  '&' )}; (, )=>{$crate ::  __quote ! (@  PUNCT  ',' )}; (: )=>{$crate ::  __quote ! (@  PUNCT  ':' )}; (; )=>{$crate ::  __quote ! (@  PUNCT  ';' )}; (:: )=>{$crate ::  __quote ! (@  PUNCT  ':' ,  ':' )}; (. )=>{$crate ::  __quote ! (@  PUNCT  '.' )}; (< )=>{$crate ::  __quote ! (@  PUNCT  '<' )}; (> )=>{$crate ::  __quote ! (@  PUNCT  '>' )}; ($first :  tt $($tail :  tt )+ )=>{{ let  mut  tokens = $crate ::  quote ::  IntoTt ::  to_tokens ($crate ::  __quote ! ($first ));  let  mut  tail_tokens = $crate ::  quote ::  IntoTt ::  to_tokens ($crate ::  __quote ! ($($tail )*));  tokens .  append (&  mut  tail_tokens );  tokens }}; }
 macro_rules! __ra_macro_fixture531 {($($name :  ident )*)=>{$(if  let  Some ( it )= &  self .$name { f .  field ( stringify ! ($name ),  it ); })*}}
 macro_rules! __ra_macro_fixture532 {($fmt :  expr )=>{ RenameError ( format ! ($fmt ))}; ($fmt :  expr , $($arg :  tt )+)=>{ RenameError ( format ! ($fmt , $($arg )+))}}
 macro_rules! __ra_macro_fixture533 {($($tokens :  tt )*)=>{ return  Err ( format_err ! ($($tokens )*))}}
diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
index 042dd36488aa9..441434504c293 100644
--- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
@@ -12,10 +12,10 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-lz4_flex = { version = "0.11", default-features = false }
-
 la-arena.workspace = true
-ra-salsa.workspace = true
+dashmap.workspace = true
+salsa.workspace = true
+query-group.workspace = true
 rustc-hash.workspace = true
 triomphe.workspace = true
 semver.workspace = true
@@ -23,7 +23,6 @@ tracing.workspace = true
 
 # local deps
 cfg.workspace = true
-stdx.workspace = true
 syntax.workspace = true
 vfs.workspace = true
 span.workspace = true
diff --git a/src/tools/rust-analyzer/crates/base-db/src/change.rs b/src/tools/rust-analyzer/crates/base-db/src/change.rs
index 7e40f5408f144..da2fb27571c2f 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/change.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/change.rs
@@ -3,23 +3,18 @@
 
 use std::fmt;
 
-use ra_salsa::Durability;
-use rustc_hash::FxHashMap;
+use salsa::Durability;
 use triomphe::Arc;
 use vfs::FileId;
 
-use crate::{
-    CrateGraph, CrateId, CrateWorkspaceData, SourceDatabaseFileInputExt, SourceRoot,
-    SourceRootDatabase, SourceRootId,
-};
+use crate::{CrateGraphBuilder, CratesIdMap, RootQueryDb, SourceRoot, SourceRootId};
 
 /// Encapsulate a bunch of raw `.set` calls on the database.
 #[derive(Default)]
 pub struct FileChange {
     pub roots: Option<Vec<SourceRoot>>,
     pub files_changed: Vec<(FileId, Option<String>)>,
-    pub crate_graph: Option<CrateGraph>,
-    pub ws_data: Option<FxHashMap<CrateId, Arc<CrateWorkspaceData>>>,
+    pub crate_graph: Option<CrateGraphBuilder>,
 }
 
 impl fmt::Debug for FileChange {
@@ -39,10 +34,6 @@ impl fmt::Debug for FileChange {
 }
 
 impl FileChange {
-    pub fn new() -> Self {
-        FileChange::default()
-    }
-
     pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
         self.roots = Some(roots);
     }
@@ -51,48 +42,45 @@ impl FileChange {
         self.files_changed.push((file_id, new_text))
     }
 
-    pub fn set_crate_graph(&mut self, graph: CrateGraph) {
+    pub fn set_crate_graph(&mut self, graph: CrateGraphBuilder) {
         self.crate_graph = Some(graph);
     }
 
-    pub fn set_ws_data(&mut self, data: FxHashMap<CrateId, Arc<CrateWorkspaceData>>) {
-        self.ws_data = Some(data);
-    }
-
-    pub fn apply(self, db: &mut dyn SourceRootDatabase) {
+    pub fn apply(self, db: &mut dyn RootQueryDb) -> Option<CratesIdMap> {
         let _p = tracing::info_span!("FileChange::apply").entered();
         if let Some(roots) = self.roots {
             for (idx, root) in roots.into_iter().enumerate() {
                 let root_id = SourceRootId(idx as u32);
-                let durability = durability(&root);
+                let durability = source_root_durability(&root);
                 for file_id in root.iter() {
                     db.set_file_source_root_with_durability(file_id, root_id, durability);
                 }
+
                 db.set_source_root_with_durability(root_id, Arc::new(root), durability);
             }
         }
 
         for (file_id, text) in self.files_changed {
             let source_root_id = db.file_source_root(file_id);
-            let source_root = db.source_root(source_root_id);
-            let durability = durability(&source_root);
+            let source_root = db.source_root(source_root_id.source_root_id(db));
+
+            let durability = file_text_durability(&source_root.source_root(db));
             // XXX: can't actually remove the file, just reset the text
             let text = text.unwrap_or_default();
             db.set_file_text_with_durability(file_id, &text, durability)
         }
+
         if let Some(crate_graph) = self.crate_graph {
-            db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH);
-        }
-        if let Some(data) = self.ws_data {
-            db.set_crate_workspace_data_with_durability(Arc::new(data), Durability::HIGH);
+            return Some(crate_graph.set_in_db(db));
         }
+        None
     }
 }
 
-fn durability(source_root: &SourceRoot) -> Durability {
-    if source_root.is_library {
-        Durability::HIGH
-    } else {
-        Durability::LOW
-    }
+fn source_root_durability(source_root: &SourceRoot) -> Durability {
+    if source_root.is_library { Durability::MEDIUM } else { Durability::LOW }
+}
+
+fn file_text_durability(source_root: &SourceRoot) -> Durability {
+    if source_root.is_library { Durability::HIGH } else { Durability::LOW }
 }
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
index bd08387b58219..499c9b3716b26 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/input.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -6,17 +6,23 @@
 //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
 //! actual IO is done and lowered to input.
 
+use std::hash::BuildHasherDefault;
 use std::{fmt, mem, ops};
 
-use cfg::CfgOptions;
+use cfg::{CfgOptions, HashableCfgOptions};
+use dashmap::DashMap;
+use dashmap::mapref::entry::Entry;
 use intern::Symbol;
 use la_arena::{Arena, Idx, RawIdx};
-use rustc_hash::{FxHashMap, FxHashSet};
-use span::{Edition, EditionedFileId};
+use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
+use salsa::{Durability, Setter};
+use span::Edition;
 use triomphe::Arc;
-use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
+use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet};
 
-pub type ProcMacroPaths = FxHashMap<CrateId, Result<(String, AbsPathBuf), String>>;
+use crate::{CrateWorkspaceData, EditionedFileId, RootQueryDb};
+
+pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct SourceRootId(pub u32);
@@ -64,30 +70,31 @@ impl SourceRoot {
     }
 }
 
-/// `CrateGraph` is a bit of information which turns a set of text files into a
-/// number of Rust crates.
-///
-/// Each crate is defined by the `FileId` of its root module, the set of enabled
-/// `cfg` flags and the set of dependencies.
-///
-/// Note that, due to cfg's, there might be several crates for a single `FileId`!
-///
-/// For the purposes of analysis, a crate does not have a name. Instead, names
-/// are specified on dependency edges. That is, a crate might be known under
-/// different names in different dependent crates.
-///
-/// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust
-/// language proper, not a concept of the build system. In practice, we get
-/// `CrateGraph` by lowering `cargo metadata` output.
-///
-/// `CrateGraph` is `!Serialize` by design, see
-/// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization>
-#[derive(Clone, Default)]
-pub struct CrateGraph {
-    arena: Arena<CrateData>,
+#[derive(Default, Clone)]
+pub struct CrateGraphBuilder {
+    arena: Arena<CrateBuilder>,
+}
+
+pub type CrateBuilderId = Idx<CrateBuilder>;
+
+impl ops::Index<CrateBuilderId> for CrateGraphBuilder {
+    type Output = CrateBuilder;
+
+    fn index(&self, index: CrateBuilderId) -> &Self::Output {
+        &self.arena[index]
+    }
 }
 
-impl fmt::Debug for CrateGraph {
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct CrateBuilder {
+    pub basic: CrateDataBuilder,
+    pub extra: ExtraCrateData,
+    pub cfg_options: CfgOptions,
+    pub env: Env,
+    ws_data: Arc<CrateWorkspaceData>,
+}
+
+impl fmt::Debug for CrateGraphBuilder {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         f.debug_map()
             .entries(self.arena.iter().map(|(id, data)| (u32::from(id.into_raw()), data)))
@@ -95,8 +102,6 @@ impl fmt::Debug for CrateGraph {
     }
 }
 
-pub type CrateId = Idx<CrateData>;
-
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct CrateName(Symbol);
 
@@ -105,11 +110,7 @@ impl CrateName {
     /// Dashes are not allowed in the crate names,
     /// hence the input string is returned as `Err` for those cases.
     pub fn new(name: &str) -> Result<CrateName, &str> {
-        if name.contains('-') {
-            Err(name)
-        } else {
-            Ok(Self(Symbol::intern(name)))
-        }
+        if name.contains('-') { Err(name) } else { Ok(Self(Symbol::intern(name))) }
     }
 
     /// Creates a crate name, unconditionally replacing the dashes with underscores.
@@ -272,10 +273,49 @@ impl ReleaseChannel {
     }
 }
 
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct CrateData {
+/// The crate data from which we derive the `Crate`.
+///
+/// We want this to contain as little data as possible, because if it contains dependencies and
+/// something changes, this crate and all of its dependencies ids are invalidated, which causes
+/// pretty much everything to be recomputed. If the crate id is not invalidated, only this crate's
+/// information needs to be recomputed.
+///
+/// *Most* different crates have different root files (actually, pretty much all of them).
+/// Still, it is possible to have crates distinguished by other factors (e.g. dependencies).
+/// So we store only the root file - unless we find that this crate has the same root file as
+/// another crate, in which case we store all data for one of them (if one is a dependency of
+/// the other, we store for it, because it has more dependencies to be invalidated).
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UniqueCrateData {
+    root_file_id: FileId,
+    disambiguator: Option<Box<(BuiltCrateData, HashableCfgOptions)>>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CrateData<Id> {
     pub root_file_id: FileId,
     pub edition: Edition,
+    /// The dependencies of this crate.
+    ///
+    /// Note that this may contain more dependencies than the crate actually uses.
+    /// A common example is the test crate which is included but only actually is active when
+    /// declared in source via `extern crate test`.
+    pub dependencies: Vec<Dependency<Id>>,
+    pub origin: CrateOrigin,
+    pub is_proc_macro: bool,
+    /// The working directory to run proc-macros in invoked in the context of this crate.
+    /// This is the workspace root of the cargo workspace for workspace members, the crate manifest
+    /// dir otherwise.
+    // FIXME: This ought to be a `VfsPath` or something opaque.
+    pub proc_macro_cwd: Arc<AbsPathBuf>,
+}
+
+pub type CrateDataBuilder = CrateData<CrateBuilderId>;
+pub type BuiltCrateData = CrateData<Crate>;
+
+/// Crate data unrelated to analysis.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ExtraCrateData {
     pub version: Option<String>,
     /// A name used in the package's project declaration: for Cargo projects,
     /// its `[package].name` can be different for other project types or even
@@ -284,21 +324,8 @@ pub struct CrateData {
     /// For purposes of analysis, crates are anonymous (only names in
     /// `Dependency` matters), this name should only be used for UI.
     pub display_name: Option<CrateDisplayName>,
-    pub cfg_options: Arc<CfgOptions>,
     /// The cfg options that could be used by the crate
-    pub potential_cfg_options: Option<Arc<CfgOptions>>,
-    pub env: Env,
-    /// The dependencies of this crate.
-    ///
-    /// Note that this may contain more dependencies than the crate actually uses.
-    /// A common example is the test crate which is included but only actually is active when
-    /// declared in source via `extern crate test`.
-    pub dependencies: Vec<Dependency>,
-    pub origin: CrateOrigin,
-    pub is_proc_macro: bool,
-    /// The working directory to run proc-macros in. This is the workspace root of the cargo workspace
-    /// for workspace members, the crate manifest dir otherwise.
-    pub proc_macro_cwd: Option<AbsPathBuf>,
+    pub potential_cfg_options: Option<CfgOptions>,
 }
 
 #[derive(Default, Clone, PartialEq, Eq)]
@@ -326,22 +353,32 @@ impl fmt::Debug for Env {
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Dependency {
-    pub crate_id: CrateId,
+pub struct Dependency<Id> {
+    pub crate_id: Id,
     pub name: CrateName,
     prelude: bool,
     sysroot: bool,
 }
 
-impl Dependency {
-    pub fn new(name: CrateName, crate_id: CrateId) -> Self {
+pub type DependencyBuilder = Dependency<CrateBuilderId>;
+pub type BuiltDependency = Dependency<Crate>;
+
+impl DependencyBuilder {
+    pub fn new(name: CrateName, crate_id: CrateBuilderId) -> Self {
         Self { name, crate_id, prelude: true, sysroot: false }
     }
 
-    pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool, sysroot: bool) -> Self {
+    pub fn with_prelude(
+        name: CrateName,
+        crate_id: CrateBuilderId,
+        prelude: bool,
+        sysroot: bool,
+    ) -> Self {
         Self { name, crate_id, prelude, sysroot }
     }
+}
 
+impl BuiltDependency {
     /// Whether this dependency is to be added to the depending crate's extern prelude.
     pub fn is_prelude(&self) -> bool {
         self.prelude
@@ -353,41 +390,71 @@ impl Dependency {
     }
 }
 
-impl CrateGraph {
+pub type CratesIdMap = FxHashMap<CrateBuilderId, Crate>;
+
+#[salsa::input]
+#[derive(Debug)]
+pub struct Crate {
+    #[return_ref]
+    pub data: BuiltCrateData,
+    /// Crate data that is not needed for analysis.
+    ///
+    /// This is split into a separate field to increase incrementality.
+    #[return_ref]
+    pub extra_data: ExtraCrateData,
+    // This is in `Arc` because it is shared for all crates in a workspace.
+    #[return_ref]
+    pub workspace_data: Arc<CrateWorkspaceData>,
+    #[return_ref]
+    pub cfg_options: CfgOptions,
+    #[return_ref]
+    pub env: Env,
+}
+
+/// The mapping from [`UniqueCrateData`] to their [`Crate`] input.
+#[derive(Debug, Default)]
+pub struct CratesMap(DashMap<UniqueCrateData, Crate, BuildHasherDefault<FxHasher>>);
+
+impl CrateGraphBuilder {
     pub fn add_crate_root(
         &mut self,
         root_file_id: FileId,
         edition: Edition,
         display_name: Option<CrateDisplayName>,
         version: Option<String>,
-        cfg_options: Arc<CfgOptions>,
-        potential_cfg_options: Option<Arc<CfgOptions>>,
+        mut cfg_options: CfgOptions,
+        mut potential_cfg_options: Option<CfgOptions>,
         mut env: Env,
         origin: CrateOrigin,
         is_proc_macro: bool,
-        proc_macro_cwd: Option<AbsPathBuf>,
-    ) -> CrateId {
+        proc_macro_cwd: Arc<AbsPathBuf>,
+        ws_data: Arc<CrateWorkspaceData>,
+    ) -> CrateBuilderId {
         env.entries.shrink_to_fit();
-        let data = CrateData {
-            root_file_id,
-            edition,
-            version,
-            display_name,
+        cfg_options.shrink_to_fit();
+        if let Some(potential_cfg_options) = &mut potential_cfg_options {
+            potential_cfg_options.shrink_to_fit();
+        }
+        self.arena.alloc(CrateBuilder {
+            basic: CrateData {
+                root_file_id,
+                edition,
+                dependencies: Vec::new(),
+                origin,
+                is_proc_macro,
+                proc_macro_cwd,
+            },
+            extra: ExtraCrateData { version, display_name, potential_cfg_options },
             cfg_options,
-            potential_cfg_options,
             env,
-            dependencies: Vec::new(),
-            origin,
-            is_proc_macro,
-            proc_macro_cwd,
-        };
-        self.arena.alloc(data)
+            ws_data,
+        })
     }
 
     pub fn add_dep(
         &mut self,
-        from: CrateId,
-        dep: Dependency,
+        from: CrateBuilderId,
+        dep: DependencyBuilder,
     ) -> Result<(), CyclicDependenciesError> {
         let _p = tracing::info_span!("add_dep").entered();
 
@@ -395,37 +462,154 @@ impl CrateGraph {
         // that out, look for a  path in the *opposite* direction, from `to` to
         // `from`.
         if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) {
-            let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
+            let path =
+                path.into_iter().map(|it| (it, self[it].extra.display_name.clone())).collect();
             let err = CyclicDependenciesError { path };
             assert!(err.from().0 == from && err.to().0 == dep.crate_id);
             return Err(err);
         }
 
-        self.arena[from].add_dep(dep);
+        self.arena[from].basic.dependencies.push(dep);
         Ok(())
     }
 
-    pub fn is_empty(&self) -> bool {
-        self.arena.is_empty()
-    }
+    pub fn set_in_db(self, db: &mut dyn RootQueryDb) -> CratesIdMap {
+        let mut all_crates = Vec::with_capacity(self.arena.len());
+        let mut visited = FxHashMap::default();
+        let mut visited_root_files = FxHashSet::default();
 
-    pub fn len(&self) -> usize {
-        self.arena.len()
-    }
+        let old_all_crates = db.all_crates();
 
-    pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ {
-        self.arena.iter().map(|(idx, _)| idx)
+        let crates_map = db.crates_map();
+        // salsa doesn't compare new input to old input to see if they are the same, so here we are doing all the work ourselves.
+        for krate in self.iter() {
+            go(
+                &self,
+                db,
+                &crates_map,
+                &mut visited,
+                &mut visited_root_files,
+                &mut all_crates,
+                krate,
+            );
+        }
+
+        if **old_all_crates != *all_crates {
+            db.set_all_crates_with_durability(
+                Arc::new(all_crates.into_boxed_slice()),
+                Durability::MEDIUM,
+            );
+        }
+
+        return visited;
+
+        fn go(
+            graph: &CrateGraphBuilder,
+            db: &mut dyn RootQueryDb,
+            crates_map: &CratesMap,
+            visited: &mut FxHashMap<CrateBuilderId, Crate>,
+            visited_root_files: &mut FxHashSet<FileId>,
+            all_crates: &mut Vec<Crate>,
+            source: CrateBuilderId,
+        ) -> Crate {
+            if let Some(&crate_id) = visited.get(&source) {
+                return crate_id;
+            }
+            let krate = &graph[source];
+            let dependencies = krate
+                .basic
+                .dependencies
+                .iter()
+                .map(|dep| BuiltDependency {
+                    crate_id: go(
+                        graph,
+                        db,
+                        crates_map,
+                        visited,
+                        visited_root_files,
+                        all_crates,
+                        dep.crate_id,
+                    ),
+                    name: dep.name.clone(),
+                    prelude: dep.prelude,
+                    sysroot: dep.sysroot,
+                })
+                .collect::<Vec<_>>();
+            let crate_data = BuiltCrateData {
+                dependencies,
+                edition: krate.basic.edition,
+                is_proc_macro: krate.basic.is_proc_macro,
+                origin: krate.basic.origin.clone(),
+                root_file_id: krate.basic.root_file_id,
+                proc_macro_cwd: krate.basic.proc_macro_cwd.clone(),
+            };
+            let disambiguator = if visited_root_files.insert(krate.basic.root_file_id) {
+                None
+            } else {
+                Some(Box::new((crate_data.clone(), krate.cfg_options.to_hashable())))
+            };
+
+            let unique_crate_data =
+                UniqueCrateData { root_file_id: krate.basic.root_file_id, disambiguator };
+            let crate_input = match crates_map.0.entry(unique_crate_data) {
+                Entry::Occupied(entry) => {
+                    let old_crate = *entry.get();
+                    if crate_data != *old_crate.data(db) {
+                        old_crate.set_data(db).with_durability(Durability::MEDIUM).to(crate_data);
+                    }
+                    if krate.extra != *old_crate.extra_data(db) {
+                        old_crate
+                            .set_extra_data(db)
+                            .with_durability(Durability::MEDIUM)
+                            .to(krate.extra.clone());
+                    }
+                    if krate.cfg_options != *old_crate.cfg_options(db) {
+                        old_crate
+                            .set_cfg_options(db)
+                            .with_durability(Durability::MEDIUM)
+                            .to(krate.cfg_options.clone());
+                    }
+                    if krate.env != *old_crate.env(db) {
+                        old_crate
+                            .set_env(db)
+                            .with_durability(Durability::MEDIUM)
+                            .to(krate.env.clone());
+                    }
+                    if krate.ws_data != *old_crate.workspace_data(db) {
+                        old_crate
+                            .set_workspace_data(db)
+                            .with_durability(Durability::MEDIUM)
+                            .to(krate.ws_data.clone());
+                    }
+                    old_crate
+                }
+                Entry::Vacant(entry) => {
+                    let input = Crate::builder(
+                        crate_data,
+                        krate.extra.clone(),
+                        krate.ws_data.clone(),
+                        krate.cfg_options.clone(),
+                        krate.env.clone(),
+                    )
+                    .durability(Durability::MEDIUM)
+                    .new(db);
+                    entry.insert(input);
+                    input
+                }
+            };
+            all_crates.push(crate_input);
+            visited.insert(source, crate_input);
+            crate_input
+        }
     }
 
-    // FIXME: used for fixing up the toolchain sysroot, should be removed and done differently
-    #[doc(hidden)]
-    pub fn iter_mut(&mut self) -> impl Iterator<Item = (CrateId, &mut CrateData)> + '_ {
-        self.arena.iter_mut()
+    pub fn iter(&self) -> impl Iterator<Item = CrateBuilderId> + '_ {
+        self.arena.iter().map(|(idx, _)| idx)
     }
 
     /// Returns an iterator over all transitive dependencies of the given crate,
     /// including the crate itself.
-    pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
+    pub fn transitive_deps(&self, of: CrateBuilderId) -> impl Iterator<Item = CrateBuilderId> {
         let mut worklist = vec![of];
         let mut deps = FxHashSet::default();
 
@@ -434,42 +618,15 @@ impl CrateGraph {
                 continue;
             }
 
-            worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id));
+            worklist.extend(self[krate].basic.dependencies.iter().map(|dep| dep.crate_id));
         }
 
         deps.into_iter()
     }
 
-    /// Returns all transitive reverse dependencies of the given crate,
-    /// including the crate itself.
-    pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
-        let mut worklist = vec![of];
-        let mut rev_deps = FxHashSet::default();
-        rev_deps.insert(of);
-
-        let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
-        self.arena.iter().for_each(|(krate, data)| {
-            data.dependencies
-                .iter()
-                .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate))
-        });
-
-        while let Some(krate) = worklist.pop() {
-            if let Some(krate_rev_deps) = inverted_graph.get(&krate) {
-                krate_rev_deps
-                    .iter()
-                    .copied()
-                    .filter(|&rev_dep| rev_deps.insert(rev_dep))
-                    .for_each(|rev_dep| worklist.push(rev_dep));
-            }
-        }
-
-        rev_deps.into_iter()
-    }
-
     /// Returns all crates in the graph, sorted in topological order (ie. dependencies of a crate
     /// come before the crate itself).
-    pub fn crates_in_topological_order(&self) -> Vec<CrateId> {
+    fn crates_in_topological_order(&self) -> Vec<CrateBuilderId> {
         let mut res = Vec::new();
         let mut visited = FxHashSet::default();
 
@@ -480,15 +637,15 @@ impl CrateGraph {
         return res;
 
         fn go(
-            graph: &CrateGraph,
-            visited: &mut FxHashSet<CrateId>,
-            res: &mut Vec<CrateId>,
-            source: CrateId,
+            graph: &CrateGraphBuilder,
+            visited: &mut FxHashSet<CrateBuilderId>,
+            res: &mut Vec<CrateBuilderId>,
+            source: CrateBuilderId,
         ) {
             if !visited.insert(source) {
                 return;
             }
-            for dep in graph[source].dependencies.iter() {
+            for dep in graph[source].basic.dependencies.iter() {
                 go(graph, visited, res, dep.crate_id)
             }
             res.push(source)
@@ -504,23 +661,27 @@ impl CrateGraph {
     /// Returns a map mapping `other`'s IDs to the new IDs in `self`.
     pub fn extend(
         &mut self,
-        mut other: CrateGraph,
+        mut other: CrateGraphBuilder,
         proc_macros: &mut ProcMacroPaths,
-    ) -> FxHashMap<CrateId, CrateId> {
+    ) -> FxHashMap<CrateBuilderId, CrateBuilderId> {
         // Sorting here is a bit pointless because the input is likely already sorted.
         // However, the overhead is small and it makes the `extend` method harder to misuse.
         self.arena
             .iter_mut()
-            .for_each(|(_, data)| data.dependencies.sort_by_key(|dep| dep.crate_id));
+            .for_each(|(_, data)| data.basic.dependencies.sort_by_key(|dep| dep.crate_id));
 
-        let m = self.len();
+        let m = self.arena.len();
         let topo = other.crates_in_topological_order();
-        let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default();
+        let mut id_map: FxHashMap<CrateBuilderId, CrateBuilderId> = FxHashMap::default();
         for topo in topo {
             let crate_data = &mut other.arena[topo];
 
-            crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
-            crate_data.dependencies.sort_by_key(|dep| dep.crate_id);
+            crate_data
+                .basic
+                .dependencies
+                .iter_mut()
+                .for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
+            crate_data.basic.dependencies.sort_by_key(|dep| dep.crate_id);
 
             let find = self.arena.iter().take(m).find_map(|(k, v)| (v == crate_data).then_some(k));
             let new_id = find.unwrap_or_else(|| self.arena.alloc(crate_data.clone()));
@@ -534,10 +695,10 @@ impl CrateGraph {
 
     fn find_path(
         &self,
-        visited: &mut FxHashSet<CrateId>,
-        from: CrateId,
-        to: CrateId,
-    ) -> Option<Vec<CrateId>> {
+        visited: &mut FxHashSet<CrateBuilderId>,
+        from: CrateBuilderId,
+        to: CrateBuilderId,
+    ) -> Option<Vec<CrateBuilderId>> {
         if !visited.insert(from) {
             return None;
         }
@@ -546,7 +707,7 @@ impl CrateGraph {
             return Some(vec![to]);
         }
 
-        for dep in &self[from].dependencies {
+        for dep in &self[from].basic.dependencies {
             let crate_id = dep.crate_id;
             if let Some(mut path) = self.find_path(visited, crate_id, to) {
                 path.push(from);
@@ -559,7 +720,10 @@ impl CrateGraph {
 
     /// Removes all crates from this crate graph except for the ones in `to_keep` and fixes up the dependencies.
     /// Returns a mapping from old crate ids to new crate ids.
-    pub fn remove_crates_except(&mut self, to_keep: &[CrateId]) -> Vec<Option<CrateId>> {
+    pub fn remove_crates_except(
+        &mut self,
+        to_keep: &[CrateBuilderId],
+    ) -> Vec<Option<CrateBuilderId>> {
         let mut id_map = vec![None; self.arena.len()];
         self.arena = std::mem::take(&mut self.arena)
             .into_iter()
@@ -567,12 +731,12 @@ impl CrateGraph {
             .enumerate()
             .map(|(new_id, (id, data))| {
                 id_map[id.into_raw().into_u32() as usize] =
-                    Some(CrateId::from_raw(RawIdx::from_u32(new_id as u32)));
+                    Some(CrateBuilderId::from_raw(RawIdx::from_u32(new_id as u32)));
                 data
             })
             .collect();
         for (_, data) in self.arena.iter_mut() {
-            data.dependencies.iter_mut().for_each(|dep| {
+            data.basic.dependencies.iter_mut().for_each(|dep| {
                 dep.crate_id =
                     id_map[dep.crate_id.into_raw().into_u32() as usize].expect("crate was filtered")
             });
@@ -585,22 +749,36 @@ impl CrateGraph {
     }
 }
 
-impl ops::Index<CrateId> for CrateGraph {
-    type Output = CrateData;
-    fn index(&self, crate_id: CrateId) -> &CrateData {
-        &self.arena[crate_id]
+pub(crate) fn transitive_rev_deps(db: &dyn RootQueryDb, of: Crate) -> FxHashSet<Crate> {
+    let mut worklist = vec![of];
+    let mut rev_deps = FxHashSet::default();
+    rev_deps.insert(of);
+
+    let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
+    db.all_crates().iter().for_each(|&krate| {
+        krate
+            .data(db)
+            .dependencies
+            .iter()
+            .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate))
+    });
+
+    while let Some(krate) = worklist.pop() {
+        if let Some(crate_rev_deps) = inverted_graph.get(&krate) {
+            crate_rev_deps
+                .iter()
+                .copied()
+                .filter(|&rev_dep| rev_deps.insert(rev_dep))
+                .for_each(|rev_dep| worklist.push(rev_dep));
+        }
     }
-}
 
-impl CrateData {
-    /// Add a dependency to `self` without checking if the dependency
-    // is existent among `self.dependencies`.
-    fn add_dep(&mut self, dep: Dependency) {
-        self.dependencies.push(dep)
-    }
+    rev_deps
+}
 
-    pub fn root_file_id(&self) -> EditionedFileId {
-        EditionedFileId::new(self.root_file_id, self.edition)
+impl BuiltCrateData {
+    pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
+        EditionedFileId::new(db, self.root_file_id, self.edition)
     }
 }
 
@@ -657,21 +835,21 @@ impl<'a> IntoIterator for &'a Env {
 
 #[derive(Debug)]
 pub struct CyclicDependenciesError {
-    path: Vec<(CrateId, Option<CrateDisplayName>)>,
+    path: Vec<(CrateBuilderId, Option<CrateDisplayName>)>,
 }
 
 impl CyclicDependenciesError {
-    fn from(&self) -> &(CrateId, Option<CrateDisplayName>) {
+    fn from(&self) -> &(CrateBuilderId, Option<CrateDisplayName>) {
         self.path.first().unwrap()
     }
-    fn to(&self) -> &(CrateId, Option<CrateDisplayName>) {
+    fn to(&self) -> &(CrateBuilderId, Option<CrateDisplayName>) {
         self.path.last().unwrap()
     }
 }
 
 impl fmt::Display for CyclicDependenciesError {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let render = |(id, name): &(CrateId, Option<CrateDisplayName>)| match name {
+        let render = |(id, name): &(CrateBuilderId, Option<CrateDisplayName>)| match name {
             Some(it) => format!("{it}({id:?})"),
             None => format!("{id:?}"),
         };
@@ -688,13 +866,20 @@ impl fmt::Display for CyclicDependenciesError {
 
 #[cfg(test)]
 mod tests {
-    use crate::CrateOrigin;
+    use triomphe::Arc;
+    use vfs::AbsPathBuf;
+
+    use crate::{CrateWorkspaceData, DependencyBuilder};
 
-    use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId};
+    use super::{CrateGraphBuilder, CrateName, CrateOrigin, Edition::Edition2018, Env, FileId};
+
+    fn empty_ws_data() -> Arc<CrateWorkspaceData> {
+        Arc::new(CrateWorkspaceData { data_layout: Err("".into()), toolchain: None })
+    }
 
     #[test]
     fn detect_cyclic_dependency_indirect() {
-        let mut graph = CrateGraph::default();
+        let mut graph = CrateGraphBuilder::default();
         let crate1 = graph.add_crate_root(
             FileId::from_raw(1u32),
             Edition2018,
@@ -705,7 +890,8 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
         );
         let crate2 = graph.add_crate_root(
             FileId::from_raw(2u32),
@@ -717,7 +903,8 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
         );
         let crate3 = graph.add_crate_root(
             FileId::from_raw(3u32),
@@ -729,22 +916,29 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
+        );
+        assert!(
+            graph
+                .add_dep(crate1, DependencyBuilder::new(CrateName::new("crate2").unwrap(), crate2,))
+                .is_ok()
+        );
+        assert!(
+            graph
+                .add_dep(crate2, DependencyBuilder::new(CrateName::new("crate3").unwrap(), crate3,))
+                .is_ok()
+        );
+        assert!(
+            graph
+                .add_dep(crate3, DependencyBuilder::new(CrateName::new("crate1").unwrap(), crate1,))
+                .is_err()
         );
-        assert!(graph
-            .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
-            .is_ok());
-        assert!(graph
-            .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,))
-            .is_ok());
-        assert!(graph
-            .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1,))
-            .is_err());
     }
 
     #[test]
     fn detect_cyclic_dependency_direct() {
-        let mut graph = CrateGraph::default();
+        let mut graph = CrateGraphBuilder::default();
         let crate1 = graph.add_crate_root(
             FileId::from_raw(1u32),
             Edition2018,
@@ -755,7 +949,8 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
         );
         let crate2 = graph.add_crate_root(
             FileId::from_raw(2u32),
@@ -767,19 +962,24 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
+        );
+        assert!(
+            graph
+                .add_dep(crate1, DependencyBuilder::new(CrateName::new("crate2").unwrap(), crate2,))
+                .is_ok()
+        );
+        assert!(
+            graph
+                .add_dep(crate2, DependencyBuilder::new(CrateName::new("crate2").unwrap(), crate2,))
+                .is_err()
         );
-        assert!(graph
-            .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
-            .is_ok());
-        assert!(graph
-            .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
-            .is_err());
     }
 
     #[test]
     fn it_works() {
-        let mut graph = CrateGraph::default();
+        let mut graph = CrateGraphBuilder::default();
         let crate1 = graph.add_crate_root(
             FileId::from_raw(1u32),
             Edition2018,
@@ -790,7 +990,8 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
         );
         let crate2 = graph.add_crate_root(
             FileId::from_raw(2u32),
@@ -802,7 +1003,8 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
         );
         let crate3 = graph.add_crate_root(
             FileId::from_raw(3u32),
@@ -814,19 +1016,24 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
+        );
+        assert!(
+            graph
+                .add_dep(crate1, DependencyBuilder::new(CrateName::new("crate2").unwrap(), crate2,))
+                .is_ok()
+        );
+        assert!(
+            graph
+                .add_dep(crate2, DependencyBuilder::new(CrateName::new("crate3").unwrap(), crate3,))
+                .is_ok()
         );
-        assert!(graph
-            .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,))
-            .is_ok());
-        assert!(graph
-            .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,))
-            .is_ok());
     }
 
     #[test]
     fn dashes_are_normalized() {
-        let mut graph = CrateGraph::default();
+        let mut graph = CrateGraphBuilder::default();
         let crate1 = graph.add_crate_root(
             FileId::from_raw(1u32),
             Edition2018,
@@ -837,7 +1044,8 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
         );
         let crate2 = graph.add_crate_root(
             FileId::from_raw(2u32),
@@ -849,17 +1057,25 @@ mod tests {
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
+            Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
+            empty_ws_data(),
+        );
+        assert!(
+            graph
+                .add_dep(
+                    crate1,
+                    DependencyBuilder::new(
+                        CrateName::normalize_dashes("crate-name-with-dashes"),
+                        crate2,
+                    )
+                )
+                .is_ok()
         );
-        assert!(graph
-            .add_dep(
-                crate1,
-                Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2,)
-            )
-            .is_ok());
         assert_eq!(
-            graph[crate1].dependencies,
-            vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2,)]
+            graph.arena[crate1].basic.dependencies,
+            vec![
+                DependencyBuilder::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2,)
+            ]
         );
     }
 }
diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
index eed8c88683951..7f7a712577e41 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
@@ -3,201 +3,373 @@
 mod change;
 mod input;
 
-use std::panic;
-
-use ra_salsa::Durability;
-use rustc_hash::FxHashMap;
-use span::EditionedFileId;
-use syntax::{ast, Parse, SourceFile, SyntaxError};
-use triomphe::Arc;
-use vfs::FileId;
+use std::hash::BuildHasherDefault;
 
 pub use crate::{
     change::FileChange,
     input::{
-        CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env,
-        LangCrateOrigin, ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId,
-        TargetLayoutLoadResult,
+        BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
+        CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
+        DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroPaths, ReleaseChannel,
+        SourceRoot, SourceRootId, TargetLayoutLoadResult, UniqueCrateData,
     },
 };
-pub use ra_salsa::{self, Cancelled};
-pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, VfsPath};
-
+use dashmap::{DashMap, mapref::entry::Entry};
+pub use query_group::{self};
+use rustc_hash::{FxHashSet, FxHasher};
+pub use salsa::{self};
+use salsa::{Durability, Setter};
 pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
+use span::Edition;
+use syntax::{Parse, SyntaxError, ast};
+use triomphe::Arc;
+pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
 
 #[macro_export]
 macro_rules! impl_intern_key {
-    ($name:ident) => {
-        impl $crate::ra_salsa::InternKey for $name {
-            fn from_intern_id(v: $crate::ra_salsa::InternId) -> Self {
-                $name(v)
-            }
-            fn as_intern_id(&self) -> $crate::ra_salsa::InternId {
-                self.0
+    ($id:ident, $loc:ident) => {
+        #[salsa::interned(no_lifetime)]
+        pub struct $id {
+            pub loc: $loc,
+        }
+
+        // If we derive this salsa prints the values recursively, and this causes us to blow.
+        impl ::std::fmt::Debug for $id {
+            fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+                f.debug_tuple(stringify!($id))
+                    .field(&format_args!("{:04x}", self.0.as_u32()))
+                    .finish()
             }
         }
     };
 }
 
-pub trait Upcast<T: ?Sized> {
-    fn upcast(&self) -> &T;
-}
-
 pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16;
 pub const DEFAULT_PARSE_LRU_CAP: u16 = 128;
 pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
 
-pub trait FileLoader {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
-    /// Crates whose root's source root is the same as the source root of `file_id`
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
+#[derive(Debug, Default)]
+pub struct Files {
+    files: Arc<DashMap<vfs::FileId, FileText, BuildHasherDefault<FxHasher>>>,
+    source_roots: Arc<DashMap<SourceRootId, SourceRootInput, BuildHasherDefault<FxHasher>>>,
+    file_source_roots: Arc<DashMap<vfs::FileId, FileSourceRootInput, BuildHasherDefault<FxHasher>>>,
 }
 
-/// Database which stores all significant input facts: source code and project
-/// model. Everything else in rust-analyzer is derived from these queries.
-#[ra_salsa::query_group(SourceDatabaseStorage)]
-pub trait SourceDatabase: FileLoader + std::fmt::Debug {
-    #[ra_salsa::input]
-    fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
+impl Files {
+    pub fn file_text(&self, file_id: vfs::FileId) -> FileText {
+        *self.files.get(&file_id).expect("Unable to fetch file; this is a bug")
+    }
 
-    /// Text of the file.
-    #[ra_salsa::lru]
-    fn file_text(&self, file_id: FileId) -> Arc<str>;
+    pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) {
+        match self.files.entry(file_id) {
+            Entry::Occupied(mut occupied) => {
+                occupied.get_mut().set_text(db).to(Arc::from(text));
+            }
+            Entry::Vacant(vacant) => {
+                let text = FileText::new(db, Arc::from(text), file_id);
+                vacant.insert(text);
+            }
+        };
+    }
 
-    /// Parses the file into the syntax tree.
-    #[ra_salsa::lru]
-    fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
+    pub fn set_file_text_with_durability(
+        &self,
+        db: &mut dyn SourceDatabase,
+        file_id: vfs::FileId,
+        text: &str,
+        durability: Durability,
+    ) {
+        match self.files.entry(file_id) {
+            Entry::Occupied(mut occupied) => {
+                occupied.get_mut().set_text(db).with_durability(durability).to(Arc::from(text));
+            }
+            Entry::Vacant(vacant) => {
+                let text =
+                    FileText::builder(Arc::from(text), file_id).durability(durability).new(db);
+                vacant.insert(text);
+            }
+        };
+    }
 
-    /// Returns the set of errors obtained from parsing the file including validation errors.
-    fn parse_errors(&self, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>>;
+    /// Source root of the file.
+    pub fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
+        let source_root = self
+            .source_roots
+            .get(&source_root_id)
+            .expect("Unable to fetch source root id; this is a bug");
 
-    /// The crate graph.
-    #[ra_salsa::input]
-    fn crate_graph(&self) -> Arc<CrateGraph>;
+        *source_root
+    }
 
-    #[ra_salsa::input]
-    fn crate_workspace_data(&self) -> Arc<FxHashMap<CrateId, Arc<CrateWorkspaceData>>>;
+    pub fn set_source_root_with_durability(
+        &self,
+        db: &mut dyn SourceDatabase,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    ) {
+        match self.source_roots.entry(source_root_id) {
+            Entry::Occupied(mut occupied) => {
+                occupied.get_mut().set_source_root(db).with_durability(durability).to(source_root);
+            }
+            Entry::Vacant(vacant) => {
+                let source_root =
+                    SourceRootInput::builder(source_root).durability(durability).new(db);
+                vacant.insert(source_root);
+            }
+        };
+    }
+
+    pub fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput {
+        let file_source_root = self
+            .file_source_roots
+            .get(&id)
+            .expect("Unable to fetch FileSourceRootInput; this is a bug");
+        *file_source_root
+    }
 
-    #[ra_salsa::transparent]
-    fn toolchain_channel(&self, krate: CrateId) -> Option<ReleaseChannel>;
+    pub fn set_file_source_root_with_durability(
+        &self,
+        db: &mut dyn SourceDatabase,
+        id: vfs::FileId,
+        source_root_id: SourceRootId,
+        durability: Durability,
+    ) {
+        match self.file_source_roots.entry(id) {
+            Entry::Occupied(mut occupied) => {
+                occupied
+                    .get_mut()
+                    .set_source_root_id(db)
+                    .with_durability(durability)
+                    .to(source_root_id);
+            }
+            Entry::Vacant(vacant) => {
+                let file_source_root =
+                    FileSourceRootInput::builder(source_root_id).durability(durability).new(db);
+                vacant.insert(file_source_root);
+            }
+        };
+    }
 }
 
-/// Crate related data shared by the whole workspace.
-#[derive(Debug, PartialEq, Eq, Hash, Clone)]
-pub struct CrateWorkspaceData {
-    // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
-    pub data_layout: TargetLayoutLoadResult,
-    /// Toolchain version used to compile the crate.
-    pub toolchain: Option<Version>,
+#[salsa::interned(no_lifetime, debug, constructor=from_span)]
+pub struct EditionedFileId {
+    pub editioned_file_id: span::EditionedFileId,
 }
 
-fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseChannel> {
-    db.crate_workspace_data()
-        .get(&krate)?
-        .toolchain
-        .as_ref()
-        .and_then(|v| ReleaseChannel::from_str(&v.pre))
+impl EditionedFileId {
+    // Salsa already uses the name `new`...
+    #[inline]
+    pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
+        EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
+    }
+
+    #[inline]
+    pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
+        EditionedFileId::new(db, file_id, Edition::CURRENT)
+    }
+
+    #[inline]
+    pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
+        let id = self.editioned_file_id(db);
+        id.file_id()
+    }
+
+    #[inline]
+    pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
+        let id = self.editioned_file_id(db);
+        (id.file_id(), id.edition())
+    }
+
+    #[inline]
+    pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
+        self.editioned_file_id(db).edition()
+    }
 }
 
-fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
-    let _p = tracing::info_span!("parse", ?file_id).entered();
-    let (file_id, edition) = file_id.unpack();
-    let text = db.file_text(file_id);
-    SourceFile::parse(&text, edition)
+#[salsa::input(debug)]
+pub struct FileText {
+    pub text: Arc<str>,
+    pub file_id: vfs::FileId,
 }
 
-fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
-    let errors = db.parse(file_id).errors();
-    match &*errors {
-        [] => None,
-        [..] => Some(errors.into()),
-    }
+#[salsa::input(debug)]
+pub struct FileSourceRootInput {
+    pub source_root_id: SourceRootId,
 }
 
-fn file_text(db: &dyn SourceDatabase, file_id: FileId) -> Arc<str> {
-    let bytes = db.compressed_file_text(file_id);
-    let bytes =
-        lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
-    let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
-    Arc::from(text)
+#[salsa::input(debug)]
+pub struct SourceRootInput {
+    pub source_root: Arc<SourceRoot>,
 }
 
-/// We don't want to give HIR knowledge of source roots, hence we extract these
-/// methods into a separate DB.
-#[ra_salsa::query_group(SourceRootDatabaseStorage)]
-pub trait SourceRootDatabase: SourceDatabase {
-    /// Path to a file, relative to the root of its source root.
-    /// Source root of the file.
-    #[ra_salsa::input]
-    fn file_source_root(&self, file_id: FileId) -> SourceRootId;
-    /// Contents of the source root.
-    #[ra_salsa::input]
-    fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
+/// Database which stores all significant input facts: source code and project
+/// model. Everything else in rust-analyzer is derived from these queries.
+#[query_group::query_group]
+pub trait RootQueryDb: SourceDatabase + salsa::Database {
+    /// Parses the file into the syntax tree.
+    #[salsa::invoke(parse)]
+    #[salsa::lru(128)]
+    fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
+
+    /// Returns the set of errors obtained from parsing the file including validation errors.
+    #[salsa::transparent]
+    fn parse_errors(&self, file_id: EditionedFileId) -> Option<&[SyntaxError]>;
+
+    #[salsa::transparent]
+    fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
 
     /// Crates whose root file is in `id`.
-    fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
+    #[salsa::invoke_interned(source_root_crates)]
+    fn source_root_crates(&self, id: SourceRootId) -> Arc<[Crate]>;
+
+    #[salsa::transparent]
+    fn relevant_crates(&self, file_id: FileId) -> Arc<[Crate]>;
+
+    /// Returns the crates in topological order.
+    ///
+    /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
+    #[salsa::input]
+    fn all_crates(&self) -> Arc<Box<[Crate]>>;
+
+    /// Returns an iterator over all transitive dependencies of the given crate,
+    /// including the crate itself.
+    ///
+    /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
+    #[salsa::transparent]
+    fn transitive_deps(&self, crate_id: Crate) -> FxHashSet<Crate>;
+
+    /// Returns all transitive reverse dependencies of the given crate,
+    /// including the crate itself.
+    ///
+    /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
+    #[salsa::invoke(input::transitive_rev_deps)]
+    #[salsa::transparent]
+    fn transitive_rev_deps(&self, of: Crate) -> FxHashSet<Crate>;
 }
 
-pub trait SourceDatabaseFileInputExt {
-    fn set_file_text(&mut self, file_id: FileId, text: &str) {
-        self.set_file_text_with_durability(file_id, text, Durability::LOW);
+pub fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet<Crate> {
+    // There is a bit of duplication here and in `CrateGraphBuilder` in the same method, but it's not terrible
+    // and removing that is a bit difficult.
+    let mut worklist = vec![crate_id];
+    let mut deps = FxHashSet::default();
+
+    while let Some(krate) = worklist.pop() {
+        if !deps.insert(krate) {
+            continue;
+        }
+
+        worklist.extend(krate.data(db).dependencies.iter().map(|dep| dep.crate_id));
     }
 
+    deps
+}
+
+#[salsa::db]
+pub trait SourceDatabase: salsa::Database {
+    /// Text of the file.
+    fn file_text(&self, file_id: vfs::FileId) -> FileText;
+
+    fn set_file_text(&mut self, file_id: vfs::FileId, text: &str);
+
     fn set_file_text_with_durability(
         &mut self,
-        file_id: FileId,
+        file_id: vfs::FileId,
         text: &str,
         durability: Durability,
     );
-}
 
-impl<Db: ?Sized + SourceRootDatabase> SourceDatabaseFileInputExt for Db {
-    fn set_file_text_with_durability(
+    /// Contents of the source root.
+    fn source_root(&self, id: SourceRootId) -> SourceRootInput;
+
+    fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput;
+
+    fn set_file_source_root_with_durability(
         &mut self,
-        file_id: FileId,
-        text: &str,
+        id: vfs::FileId,
+        source_root_id: SourceRootId,
         durability: Durability,
-    ) {
-        let bytes = text.as_bytes();
-        let compressed = lz4_flex::compress_prepend_size(bytes);
-        self.set_compressed_file_text_with_durability(
-            file_id,
-            Arc::from(compressed.as_slice()),
-            durability,
-        )
+    );
+
+    /// Source root of the file.
+    fn set_source_root_with_durability(
+        &mut self,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    );
+
+    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+        // FIXME: this *somehow* should be platform agnostic...
+        let source_root = self.file_source_root(path.anchor);
+        let source_root = self.source_root(source_root.source_root_id(self));
+        source_root.source_root(self).resolve_path(path)
+    }
+
+    #[doc(hidden)]
+    fn crates_map(&self) -> Arc<CratesMap>;
+}
+
+/// Crate related data shared by the whole workspace.
+#[derive(Debug, PartialEq, Eq, Hash, Clone)]
+pub struct CrateWorkspaceData {
+    // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
+    pub data_layout: TargetLayoutLoadResult,
+    /// Toolchain version used to compile the crate.
+    pub toolchain: Option<Version>,
+}
+
+impl CrateWorkspaceData {
+    pub fn is_atleast_187(&self) -> bool {
+        const VERSION_187: Version = Version {
+            major: 1,
+            minor: 87,
+            patch: 0,
+            pre: Prerelease::EMPTY,
+            build: BuildMetadata::EMPTY,
+        };
+        self.toolchain.as_ref().map_or(false, |v| *v >= VERSION_187)
     }
 }
 
-fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> {
-    let graph = db.crate_graph();
-    let mut crates = graph
+fn toolchain_channel(db: &dyn RootQueryDb, krate: Crate) -> Option<ReleaseChannel> {
+    krate.workspace_data(db).toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
+}
+
+fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
+    let _p = tracing::info_span!("parse", ?file_id).entered();
+    let (file_id, edition) = file_id.unpack(db.as_dyn_database());
+    let text = db.file_text(file_id).text(db);
+    ast::SourceFile::parse(&text, edition)
+}
+
+fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
+    #[salsa::tracked(return_ref)]
+    fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
+        let errors = db.parse(file_id).errors();
+        match &*errors {
+            [] => None,
+            [..] => Some(errors.into()),
+        }
+    }
+    parse_errors(db, file_id).as_ref().map(|it| &**it)
+}
+
+fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[Crate]> {
+    let crates = db.all_crates();
+    crates
         .iter()
+        .copied()
         .filter(|&krate| {
-            let root_file = graph[krate].root_file_id;
-            db.file_source_root(root_file) == id
+            let root_file = krate.data(db).root_file_id;
+            db.file_source_root(root_file).source_root_id(db) == id
         })
-        .collect::<Vec<_>>();
-    crates.sort();
-    crates.dedup();
-    crates.into_iter().collect()
+        .collect()
 }
 
-// FIXME: Would be nice to get rid of this somehow
-/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split
-/// regarding FileLoader
-pub struct FileLoaderDelegate<T>(pub T);
+fn relevant_crates(db: &dyn RootQueryDb, file_id: FileId) -> Arc<[Crate]> {
+    let _p = tracing::info_span!("relevant_crates").entered();
 
-impl<T: SourceRootDatabase> FileLoader for FileLoaderDelegate<&'_ T> {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
-        // FIXME: this *somehow* should be platform agnostic...
-        let source_root = self.0.file_source_root(path.anchor);
-        let source_root = self.0.source_root(source_root);
-        source_root.resolve_path(path)
-    }
-
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
-        let _p = tracing::info_span!("relevant_crates").entered();
-        let source_root = self.0.file_source_root(file_id);
-        self.0.source_root_crates(source_root)
-    }
+    let source_root = db.file_source_root(file_id);
+    db.source_root_crates(source_root.source_root_id(db))
 }
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
index e887368ef28f8..d7764a16c044c 100644
--- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -20,13 +20,13 @@ tt = { workspace = true, optional = true }
 intern.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.1"
-oorandom = "11.1.3"
+expect-test = "1.5.1"
+oorandom = "11.1.5"
 # We depend on both individually instead of using `features = ["derive"]` to microoptimize the
 # build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
 # supports `arbitrary`. This way, we avoid feature unification.
-arbitrary = "1.3.2"
-derive_arbitrary = "1.3.2"
+arbitrary = "1.4.1"
+derive_arbitrary = "1.4.1"
 
 # local deps
 syntax-bridge.workspace = true
diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
index 08545b685119d..906106ca5db0b 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
@@ -9,7 +9,7 @@ use std::fmt;
 
 use rustc_hash::FxHashSet;
 
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 
 pub use cfg_expr::{CfgAtom, CfgExpr};
 pub use dnf::DnfExpr;
@@ -31,7 +31,7 @@ pub struct CfgOptions {
 
 impl Default for CfgOptions {
     fn default() -> Self {
-        Self { enabled: FxHashSet::from_iter([CfgAtom::Flag(sym::true_.clone())]) }
+        Self { enabled: FxHashSet::from_iter([CfgAtom::Flag(sym::true_)]) }
     }
 }
 
@@ -104,6 +104,17 @@ impl CfgOptions {
             _ => None,
         })
     }
+
+    pub fn to_hashable(&self) -> HashableCfgOptions {
+        let mut enabled = self.enabled.iter().cloned().collect::<Box<[_]>>();
+        enabled.sort_unstable();
+        HashableCfgOptions { _enabled: enabled }
+    }
+
+    #[inline]
+    pub fn shrink_to_fit(&mut self) {
+        self.enabled.shrink_to_fit();
+    }
 }
 
 impl Extend<CfgAtom> for CfgOptions {
@@ -256,3 +267,9 @@ impl fmt::Display for InactiveReason {
         Ok(())
     }
 }
+
+/// A `CfgOptions` that implements `Hash`, for the sake of hashing only.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct HashableCfgOptions {
+    _enabled: Box<[CfgAtom]>,
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
index 6d87d83ad9300..6766748097f00 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
@@ -1,10 +1,11 @@
 use arbitrary::{Arbitrary, Unstructured};
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 use intern::Symbol;
-use syntax::{ast, AstNode, Edition};
+use syntax::{AstNode, Edition, ast};
 use syntax_bridge::{
-    dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
-    syntax_node_to_token_tree, DocCommentDesugarMode,
+    DocCommentDesugarMode,
+    dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
+    syntax_node_to_token_tree,
 };
 
 use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
diff --git a/src/tools/rust-analyzer/crates/edition/src/lib.rs b/src/tools/rust-analyzer/crates/edition/src/lib.rs
index 7e9c94af408c1..f1a1fe596493b 100644
--- a/src/tools/rust-analyzer/crates/edition/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/edition/src/lib.rs
@@ -15,9 +15,19 @@ pub enum Edition {
 impl Edition {
     pub const DEFAULT: Edition = Edition::Edition2015;
     pub const LATEST: Edition = Edition::Edition2024;
-    pub const CURRENT: Edition = Edition::Edition2021;
+    pub const CURRENT: Edition = Edition::Edition2024;
     /// The current latest stable edition, note this is usually not the right choice in code.
-    pub const CURRENT_FIXME: Edition = Edition::Edition2021;
+    pub const CURRENT_FIXME: Edition = Edition::Edition2024;
+
+    pub fn from_u32(u32: u32) -> Edition {
+        match u32 {
+            0 => Edition::Edition2015,
+            1 => Edition::Edition2018,
+            2 => Edition::Edition2021,
+            3 => Edition::Edition2024,
+            _ => panic!("invalid edition"),
+        }
+    }
 
     pub fn at_least_2024(self) -> bool {
         self >= Edition::Edition2024
@@ -31,6 +41,15 @@ impl Edition {
         self >= Edition::Edition2018
     }
 
+    pub fn number(&self) -> usize {
+        match self {
+            Edition::Edition2015 => 2015,
+            Edition::Edition2018 => 2018,
+            Edition::Edition2021 => 2021,
+            Edition::Edition2024 => 2024,
+        }
+    }
+
     pub fn iter() -> impl Iterator<Item = Edition> {
         [Edition::Edition2015, Edition::Edition2018, Edition::Edition2021, Edition::Edition2024]
             .iter()
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
index a22961c26c84c..f97597ffe5a6b 100644
--- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -14,8 +14,7 @@ rust-version.workspace = true
 [dependencies]
 arrayvec.workspace = true
 bitflags.workspace = true
-cov-mark = "2.0.0-pre.1"
-dashmap.workspace = true
+cov-mark = "2.0.0"
 drop_bomb = "0.1.5"
 either.workspace = true
 fst = { version = "0.4.7", default-features = false }
@@ -25,12 +24,12 @@ la-arena.workspace = true
 rustc-hash.workspace = true
 tracing.workspace = true
 smallvec.workspace = true
-hashbrown.workspace = true
 triomphe.workspace = true
-rustc_apfloat = "0.2.0"
+rustc_apfloat = "0.2.2"
 text-size.workspace = true
+salsa.workspace = true
+query-group.workspace = true
 
-ra-ap-rustc_hashes.workspace = true
 ra-ap-rustc_parse_format.workspace = true
 ra-ap-rustc_abi.workspace = true
 
@@ -44,7 +43,7 @@ mbe.workspace = true
 cfg.workspace = true
 tt.workspace = true
 span.workspace = true
-
+thin-vec = "0.2.14"
 
 [dev-dependencies]
 expect-test.workspace = true
@@ -53,6 +52,7 @@ expect-test.workspace = true
 test-utils.workspace = true
 test-fixture.workspace = true
 syntax-bridge.workspace = true
+
 [features]
 in-rust-tree = ["hir-expand/in-rust-tree"]
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
index 710bffcefe900..a80313aba3e49 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -2,31 +2,32 @@
 
 use std::{borrow::Cow, hash::Hash, ops};
 
-use base_db::CrateId;
+use base_db::Crate;
 use cfg::{CfgExpr, CfgOptions};
 use either::Either;
 use hir_expand::{
-    attrs::{collect_attrs, Attr, AttrId, RawAttrs},
     HirFileId, InFile,
+    attrs::{Attr, AttrId, RawAttrs, collect_attrs},
 };
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use la_arena::{ArenaMap, Idx, RawIdx};
 use mbe::DelimiterKind;
+use rustc_abi::ReprOptions;
 use syntax::{
-    ast::{self, HasAttrs},
     AstPtr,
+    ast::{self, HasAttrs},
 };
 use triomphe::Arc;
 use tt::iter::{TtElement, TtIter};
 
 use crate::{
+    AdtId, AttrDefId, GenericParamId, HasModule, ItemTreeLoc, LocalFieldId, Lookup, MacroId,
+    VariantId,
     db::DefDatabase,
     item_tree::{AttrOwner, FieldParent, ItemTreeNode},
     lang_item::LangItem,
     nameres::{ModuleOrigin, ModuleSource},
     src::{HasChildSource, HasSource},
-    AdtId, AttrDefId, GenericParamId, HasModule, ItemTreeLoc, LocalFieldId, Lookup, MacroId,
-    VariantId,
 };
 
 /// Desugared attributes of an item post `cfg_attr` expansion.
@@ -44,8 +45,8 @@ impl Attrs {
         (**self).iter().find(|attr| attr.id == id)
     }
 
-    pub(crate) fn filter(db: &dyn DefDatabase, krate: CrateId, raw_attrs: RawAttrs) -> Attrs {
-        Attrs(raw_attrs.filter(db.upcast(), krate))
+    pub(crate) fn filter(db: &dyn DefDatabase, krate: Crate, raw_attrs: RawAttrs) -> Attrs {
+        Attrs(raw_attrs.filter(db, krate))
     }
 }
 
@@ -75,8 +76,6 @@ impl Attrs {
         let _p = tracing::info_span!("fields_attrs_query").entered();
         // FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
         let mut res = ArenaMap::default();
-
-        let crate_graph = db.crate_graph();
         let item_tree;
         let (parent, fields, krate) = match v {
             VariantId::EnumVariantId(it) => {
@@ -84,7 +83,7 @@ impl Attrs {
                 let krate = loc.parent.lookup(db).container.krate;
                 item_tree = loc.id.item_tree(db);
                 let variant = &item_tree[loc.id.value];
-                (FieldParent::Variant(loc.id.value), &variant.fields, krate)
+                (FieldParent::EnumVariant(loc.id.value), &variant.fields, krate)
             }
             VariantId::StructId(it) => {
                 let loc = it.lookup(db);
@@ -102,7 +101,7 @@ impl Attrs {
             }
         };
 
-        let cfg_options = &crate_graph[krate].cfg_options;
+        let cfg_options = krate.cfg_options(db);
 
         let mut idx = 0;
         for (id, _field) in fields.iter().enumerate() {
@@ -118,17 +117,20 @@ impl Attrs {
 }
 
 impl Attrs {
-    pub fn by_key<'attrs>(&'attrs self, key: &'attrs Symbol) -> AttrQuery<'attrs> {
+    #[inline]
+    pub fn by_key(&self, key: Symbol) -> AttrQuery<'_> {
         AttrQuery { attrs: self, key }
     }
 
+    #[inline]
     pub fn rust_analyzer_tool(&self) -> impl Iterator<Item = &Attr> {
         self.iter()
             .filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer))
     }
 
+    #[inline]
     pub fn cfg(&self) -> Option<CfgExpr> {
-        let mut cfgs = self.by_key(&sym::cfg).tt_values().map(CfgExpr::parse);
+        let mut cfgs = self.by_key(sym::cfg).tt_values().map(CfgExpr::parse);
         let first = cfgs.next()?;
         match cfgs.next() {
             Some(second) => {
@@ -139,10 +141,12 @@ impl Attrs {
         }
     }
 
+    #[inline]
     pub fn cfgs(&self) -> impl Iterator<Item = CfgExpr> + '_ {
-        self.by_key(&sym::cfg).tt_values().map(CfgExpr::parse)
+        self.by_key(sym::cfg).tt_values().map(CfgExpr::parse)
     }
 
+    #[inline]
     pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
         match self.cfg() {
             None => true,
@@ -150,78 +154,225 @@ impl Attrs {
         }
     }
 
+    #[inline]
     pub fn lang(&self) -> Option<&Symbol> {
-        self.by_key(&sym::lang).string_value()
+        self.by_key(sym::lang).string_value()
     }
 
+    #[inline]
     pub fn lang_item(&self) -> Option<LangItem> {
-        self.by_key(&sym::lang).string_value().and_then(LangItem::from_symbol)
+        self.by_key(sym::lang).string_value().and_then(LangItem::from_symbol)
     }
 
+    #[inline]
     pub fn has_doc_hidden(&self) -> bool {
-        self.by_key(&sym::doc).tt_values().any(|tt| {
+        self.by_key(sym::doc).tt_values().any(|tt| {
             tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
                 matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden)
         })
     }
 
+    #[inline]
     pub fn has_doc_notable_trait(&self) -> bool {
-        self.by_key(&sym::doc).tt_values().any(|tt| {
+        self.by_key(sym::doc).tt_values().any(|tt| {
             tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
                 matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait)
         })
     }
 
+    #[inline]
     pub fn doc_exprs(&self) -> impl Iterator<Item = DocExpr> + '_ {
-        self.by_key(&sym::doc).tt_values().map(DocExpr::parse)
+        self.by_key(sym::doc).tt_values().map(DocExpr::parse)
     }
 
+    #[inline]
     pub fn doc_aliases(&self) -> impl Iterator<Item = Symbol> + '_ {
         self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
     }
 
+    #[inline]
     pub fn export_name(&self) -> Option<&Symbol> {
-        self.by_key(&sym::export_name).string_value()
+        self.by_key(sym::export_name).string_value()
     }
 
+    #[inline]
     pub fn is_proc_macro(&self) -> bool {
-        self.by_key(&sym::proc_macro).exists()
+        self.by_key(sym::proc_macro).exists()
     }
 
+    #[inline]
     pub fn is_proc_macro_attribute(&self) -> bool {
-        self.by_key(&sym::proc_macro_attribute).exists()
+        self.by_key(sym::proc_macro_attribute).exists()
     }
 
+    #[inline]
     pub fn is_proc_macro_derive(&self) -> bool {
-        self.by_key(&sym::proc_macro_derive).exists()
+        self.by_key(sym::proc_macro_derive).exists()
     }
 
+    #[inline]
     pub fn is_test(&self) -> bool {
         self.iter().any(|it| {
             it.path()
                 .segments()
                 .iter()
                 .rev()
-                .zip(
-                    [sym::core.clone(), sym::prelude.clone(), sym::v1.clone(), sym::test.clone()]
-                        .iter()
-                        .rev(),
-                )
+                .zip([sym::core, sym::prelude, sym::v1, sym::test].iter().rev())
                 .all(|it| it.0 == it.1)
         })
     }
 
+    #[inline]
     pub fn is_ignore(&self) -> bool {
-        self.by_key(&sym::ignore).exists()
+        self.by_key(sym::ignore).exists()
     }
 
+    #[inline]
     pub fn is_bench(&self) -> bool {
-        self.by_key(&sym::bench).exists()
+        self.by_key(sym::bench).exists()
     }
 
+    #[inline]
     pub fn is_unstable(&self) -> bool {
-        self.by_key(&sym::unstable).exists()
+        self.by_key(sym::unstable).exists()
+    }
+
+    #[inline]
+    pub fn rustc_legacy_const_generics(&self) -> Option<Box<Box<[u32]>>> {
+        self.by_key(sym::rustc_legacy_const_generics)
+            .tt_values()
+            .next()
+            .map(parse_rustc_legacy_const_generics)
+            .filter(|it| !it.is_empty())
+            .map(Box::new)
+    }
+
+    #[inline]
+    pub fn repr(&self) -> Option<ReprOptions> {
+        self.by_key(sym::repr).tt_values().filter_map(parse_repr_tt).fold(None, |acc, repr| {
+            acc.map_or(Some(repr), |mut acc| {
+                merge_repr(&mut acc, repr);
+                Some(acc)
+            })
+        })
+    }
+}
+
+fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> {
+    let mut indices = Vec::new();
+    let mut iter = tt.iter();
+    while let (Some(first), second) = (iter.next(), iter.next()) {
+        match first {
+            TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
+                Ok(index) => indices.push(index),
+                Err(_) => break,
+            },
+            _ => break,
+        }
+
+        if let Some(comma) = second {
+            match comma {
+                TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
+                _ => break,
+            }
+        }
     }
+
+    indices.into_boxed_slice()
+}
+
+fn merge_repr(this: &mut ReprOptions, other: ReprOptions) {
+    let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this;
+    flags.insert(other.flags);
+    *align = (*align).max(other.align);
+    *pack = match (*pack, other.pack) {
+        (Some(pack), None) | (None, Some(pack)) => Some(pack),
+        _ => (*pack).min(other.pack),
+    };
+    if other.int.is_some() {
+        *int = other.int;
+    }
+}
+
+fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option<ReprOptions> {
+    use crate::builtin_type::{BuiltinInt, BuiltinUint};
+    use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
+
+    match tt.top_subtree().delimiter {
+        tt::Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
+        _ => return None,
+    }
+
+    let mut acc = ReprOptions::default();
+    let mut tts = tt.iter();
+    while let Some(tt) = tts.next() {
+        let TtElement::Leaf(tt::Leaf::Ident(ident)) = tt else {
+            continue;
+        };
+        let repr = match &ident.sym {
+            s if *s == sym::packed => {
+                let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
+                    tts.next();
+                    if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() {
+                        lit.symbol.as_str().parse().unwrap_or_default()
+                    } else {
+                        0
+                    }
+                } else {
+                    0
+                };
+                let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE));
+                ReprOptions { pack, ..Default::default() }
+            }
+            s if *s == sym::align => {
+                let mut align = None;
+                if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
+                    tts.next();
+                    if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() {
+                        if let Ok(a) = lit.symbol.as_str().parse() {
+                            align = Align::from_bytes(a).ok();
+                        }
+                    }
+                }
+                ReprOptions { align, ..Default::default() }
+            }
+            s if *s == sym::C => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() },
+            s if *s == sym::transparent => {
+                ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() }
+            }
+            s if *s == sym::simd => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() },
+            repr => {
+                let mut int = None;
+                if let Some(builtin) = BuiltinInt::from_suffix_sym(repr)
+                    .map(Either::Left)
+                    .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right))
+                {
+                    int = Some(match builtin {
+                        Either::Left(bi) => match bi {
+                            BuiltinInt::Isize => IntegerType::Pointer(true),
+                            BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
+                            BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
+                            BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
+                            BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
+                            BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
+                        },
+                        Either::Right(bu) => match bu {
+                            BuiltinUint::Usize => IntegerType::Pointer(false),
+                            BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
+                            BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
+                            BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
+                            BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
+                            BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
+                        },
+                    });
+                }
+                ReprOptions { int, ..Default::default() }
+            }
+        };
+        merge_repr(&mut acc, repr);
+    }
+
+    Some(acc)
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -373,7 +524,7 @@ impl AttrsWithOwner {
                     // FIXME: We should be never getting `None` here.
                     match src.value.get(it.local_id()) {
                         Some(val) => RawAttrs::from_attrs_owner(
-                            db.upcast(),
+                            db,
                             src.with_value(val),
                             db.span_map(src.file_id).as_ref(),
                         ),
@@ -385,7 +536,7 @@ impl AttrsWithOwner {
                     // FIXME: We should be never getting `None` here.
                     match src.value.get(it.local_id()) {
                         Some(val) => RawAttrs::from_attrs_owner(
-                            db.upcast(),
+                            db,
                             src.with_value(val),
                             db.span_map(src.file_id).as_ref(),
                         ),
@@ -397,7 +548,7 @@ impl AttrsWithOwner {
                     // FIXME: We should be never getting `None` here.
                     match src.value.get(it.local_id) {
                         Some(val) => RawAttrs::from_attrs_owner(
-                            db.upcast(),
+                            db,
                             src.with_value(val),
                             db.span_map(src.file_id).as_ref(),
                         ),
@@ -410,7 +561,7 @@ impl AttrsWithOwner {
             AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
         };
 
-        let attrs = raw_attrs.filter(db.upcast(), def.krate(db));
+        let attrs = raw_attrs.filter(db, def.krate(db));
         Attrs(attrs)
     }
 
@@ -547,36 +698,42 @@ impl AttrSourceMap {
     }
 }
 
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone)]
 pub struct AttrQuery<'attr> {
     attrs: &'attr Attrs,
-    key: &'attr Symbol,
+    key: Symbol,
 }
 
 impl<'attr> AttrQuery<'attr> {
+    #[inline]
     pub fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
         self.attrs().filter_map(|attr| attr.token_tree_value())
     }
 
+    #[inline]
     pub fn string_value(self) -> Option<&'attr Symbol> {
         self.attrs().find_map(|attr| attr.string_value())
     }
 
+    #[inline]
     pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
         self.attrs().find_map(|attr| attr.string_value_with_span())
     }
 
+    #[inline]
     pub fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
         self.attrs().find_map(|attr| attr.string_value_unescape())
     }
 
+    #[inline]
     pub fn exists(self) -> bool {
         self.attrs().next().is_some()
     }
 
+    #[inline]
     pub fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
         let key = self.key;
-        self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == *key))
+        self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
     }
 
     /// Find string value for a specific key inside token tree
@@ -585,10 +742,11 @@ impl<'attr> AttrQuery<'attr> {
     /// #[doc(html_root_url = "url")]
     ///       ^^^^^^^^^^^^^ key
     /// ```
-    pub fn find_string_value_in_tt(self, key: &'attr Symbol) -> Option<&'attr str> {
+    #[inline]
+    pub fn find_string_value_in_tt(self, key: Symbol) -> Option<&'attr str> {
         self.tt_values().find_map(|tt| {
             let name = tt.iter()
-                .skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == *key))
+                .skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == key))
                 .nth(2);
 
             match name {
@@ -601,17 +759,14 @@ impl<'attr> AttrQuery<'attr> {
 
 fn any_has_attrs<'db>(
     db: &(dyn DefDatabase + 'db),
-    id: impl Lookup<
-        Database<'db> = dyn DefDatabase + 'db,
-        Data = impl HasSource<Value = impl ast::HasAttrs>,
-    >,
+    id: impl Lookup<Database = dyn DefDatabase, Data = impl HasSource<Value = impl ast::HasAttrs>>,
 ) -> InFile<ast::AnyHasAttrs> {
     id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
 }
 
 fn attrs_from_item_tree_loc<'db, N: ItemTreeNode>(
     db: &(dyn DefDatabase + 'db),
-    lookup: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = impl ItemTreeLoc<Id = N>>,
+    lookup: impl Lookup<Database = dyn DefDatabase, Data = impl ItemTreeLoc<Id = N>>,
 ) -> RawAttrs {
     let id = lookup.lookup(db).item_tree_id();
     let tree = id.item_tree(db);
@@ -649,8 +804,8 @@ mod tests {
 
     use hir_expand::span_map::{RealSpanMap, SpanMap};
     use span::FileId;
-    use syntax::{ast, AstNode, TextRange};
-    use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
+    use syntax::{AstNode, TextRange, ast};
+    use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
 
     use crate::attr::{DocAtom, DocExpr};
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs
index 14b9af84e6ffb..8b61c6a9f0349 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs
@@ -6,7 +6,7 @@
 use std::fmt;
 
 use hir_expand::name::{AsName, Name};
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 /// Different signed int types.
 #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
 pub enum BuiltinInt {
@@ -51,28 +51,28 @@ impl BuiltinType {
     #[rustfmt::skip]
     pub fn all_builtin_types() -> [(Name, BuiltinType); 19] {
         [
-            (Name::new_symbol_root(sym::char.clone()), BuiltinType::Char),
-            (Name::new_symbol_root(sym::bool.clone()), BuiltinType::Bool),
-            (Name::new_symbol_root(sym::str.clone()),  BuiltinType::Str),
-
-            (Name::new_symbol_root(sym::isize.clone()), BuiltinType::Int(BuiltinInt::Isize)),
-            (Name::new_symbol_root(sym::i8.clone()),    BuiltinType::Int(BuiltinInt::I8)),
-            (Name::new_symbol_root(sym::i16.clone()),   BuiltinType::Int(BuiltinInt::I16)),
-            (Name::new_symbol_root(sym::i32.clone()),   BuiltinType::Int(BuiltinInt::I32)),
-            (Name::new_symbol_root(sym::i64.clone()),   BuiltinType::Int(BuiltinInt::I64)),
-            (Name::new_symbol_root(sym::i128.clone()),  BuiltinType::Int(BuiltinInt::I128)),
-
-            (Name::new_symbol_root(sym::usize.clone()), BuiltinType::Uint(BuiltinUint::Usize)),
-            (Name::new_symbol_root(sym::u8.clone()),    BuiltinType::Uint(BuiltinUint::U8)),
-            (Name::new_symbol_root(sym::u16.clone()),   BuiltinType::Uint(BuiltinUint::U16)),
-            (Name::new_symbol_root(sym::u32.clone()),   BuiltinType::Uint(BuiltinUint::U32)),
-            (Name::new_symbol_root(sym::u64.clone()),   BuiltinType::Uint(BuiltinUint::U64)),
-            (Name::new_symbol_root(sym::u128.clone()),  BuiltinType::Uint(BuiltinUint::U128)),
-
-            (Name::new_symbol_root(sym::f16.clone()), BuiltinType::Float(BuiltinFloat::F16)),
-            (Name::new_symbol_root(sym::f32.clone()), BuiltinType::Float(BuiltinFloat::F32)),
-            (Name::new_symbol_root(sym::f64.clone()), BuiltinType::Float(BuiltinFloat::F64)),
-            (Name::new_symbol_root(sym::f128.clone()), BuiltinType::Float(BuiltinFloat::F128)),
+            (Name::new_symbol_root(sym::char), BuiltinType::Char),
+            (Name::new_symbol_root(sym::bool), BuiltinType::Bool),
+            (Name::new_symbol_root(sym::str),  BuiltinType::Str),
+
+            (Name::new_symbol_root(sym::isize), BuiltinType::Int(BuiltinInt::Isize)),
+            (Name::new_symbol_root(sym::i8),    BuiltinType::Int(BuiltinInt::I8)),
+            (Name::new_symbol_root(sym::i16),   BuiltinType::Int(BuiltinInt::I16)),
+            (Name::new_symbol_root(sym::i32),   BuiltinType::Int(BuiltinInt::I32)),
+            (Name::new_symbol_root(sym::i64),   BuiltinType::Int(BuiltinInt::I64)),
+            (Name::new_symbol_root(sym::i128),  BuiltinType::Int(BuiltinInt::I128)),
+
+            (Name::new_symbol_root(sym::usize), BuiltinType::Uint(BuiltinUint::Usize)),
+            (Name::new_symbol_root(sym::u8),    BuiltinType::Uint(BuiltinUint::U8)),
+            (Name::new_symbol_root(sym::u16),   BuiltinType::Uint(BuiltinUint::U16)),
+            (Name::new_symbol_root(sym::u32),   BuiltinType::Uint(BuiltinUint::U32)),
+            (Name::new_symbol_root(sym::u64),   BuiltinType::Uint(BuiltinUint::U64)),
+            (Name::new_symbol_root(sym::u128),  BuiltinType::Uint(BuiltinUint::U128)),
+
+            (Name::new_symbol_root(sym::f16), BuiltinType::Float(BuiltinFloat::F16)),
+            (Name::new_symbol_root(sym::f32), BuiltinType::Float(BuiltinFloat::F32)),
+            (Name::new_symbol_root(sym::f64), BuiltinType::Float(BuiltinFloat::F64)),
+            (Name::new_symbol_root(sym::f128), BuiltinType::Float(BuiltinFloat::F128)),
         ]
     }
 
@@ -86,30 +86,30 @@ impl BuiltinType {
 impl AsName for BuiltinType {
     fn as_name(&self) -> Name {
         match self {
-            BuiltinType::Char => Name::new_symbol_root(sym::char.clone()),
-            BuiltinType::Bool => Name::new_symbol_root(sym::bool.clone()),
-            BuiltinType::Str => Name::new_symbol_root(sym::str.clone()),
+            BuiltinType::Char => Name::new_symbol_root(sym::char),
+            BuiltinType::Bool => Name::new_symbol_root(sym::bool),
+            BuiltinType::Str => Name::new_symbol_root(sym::str),
             BuiltinType::Int(it) => match it {
-                BuiltinInt::Isize => Name::new_symbol_root(sym::isize.clone()),
-                BuiltinInt::I8 => Name::new_symbol_root(sym::i8.clone()),
-                BuiltinInt::I16 => Name::new_symbol_root(sym::i16.clone()),
-                BuiltinInt::I32 => Name::new_symbol_root(sym::i32.clone()),
-                BuiltinInt::I64 => Name::new_symbol_root(sym::i64.clone()),
-                BuiltinInt::I128 => Name::new_symbol_root(sym::i128.clone()),
+                BuiltinInt::Isize => Name::new_symbol_root(sym::isize),
+                BuiltinInt::I8 => Name::new_symbol_root(sym::i8),
+                BuiltinInt::I16 => Name::new_symbol_root(sym::i16),
+                BuiltinInt::I32 => Name::new_symbol_root(sym::i32),
+                BuiltinInt::I64 => Name::new_symbol_root(sym::i64),
+                BuiltinInt::I128 => Name::new_symbol_root(sym::i128),
             },
             BuiltinType::Uint(it) => match it {
-                BuiltinUint::Usize => Name::new_symbol_root(sym::usize.clone()),
-                BuiltinUint::U8 => Name::new_symbol_root(sym::u8.clone()),
-                BuiltinUint::U16 => Name::new_symbol_root(sym::u16.clone()),
-                BuiltinUint::U32 => Name::new_symbol_root(sym::u32.clone()),
-                BuiltinUint::U64 => Name::new_symbol_root(sym::u64.clone()),
-                BuiltinUint::U128 => Name::new_symbol_root(sym::u128.clone()),
+                BuiltinUint::Usize => Name::new_symbol_root(sym::usize),
+                BuiltinUint::U8 => Name::new_symbol_root(sym::u8),
+                BuiltinUint::U16 => Name::new_symbol_root(sym::u16),
+                BuiltinUint::U32 => Name::new_symbol_root(sym::u32),
+                BuiltinUint::U64 => Name::new_symbol_root(sym::u64),
+                BuiltinUint::U128 => Name::new_symbol_root(sym::u128),
             },
             BuiltinType::Float(it) => match it {
-                BuiltinFloat::F16 => Name::new_symbol_root(sym::f16.clone()),
-                BuiltinFloat::F32 => Name::new_symbol_root(sym::f32.clone()),
-                BuiltinFloat::F64 => Name::new_symbol_root(sym::f64.clone()),
-                BuiltinFloat::F128 => Name::new_symbol_root(sym::f128.clone()),
+                BuiltinFloat::F16 => Name::new_symbol_root(sym::f16),
+                BuiltinFloat::F32 => Name::new_symbol_root(sym::f32),
+                BuiltinFloat::F64 => Name::new_symbol_root(sym::f64),
+                BuiltinFloat::F128 => Name::new_symbol_root(sym::f128),
             },
         }
     }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
deleted file mode 100644
index bec662787728c..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs
+++ /dev/null
@@ -1,843 +0,0 @@
-//! Contains basic data about various HIR declarations.
-
-pub mod adt;
-
-use base_db::CrateId;
-use hir_expand::{
-    name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefKind,
-};
-use intern::{sym, Symbol};
-use la_arena::{Idx, RawIdx};
-use smallvec::SmallVec;
-use syntax::{ast, Parse};
-use triomphe::Arc;
-use tt::iter::TtElement;
-
-use crate::{
-    db::DefDatabase,
-    expander::{Expander, Mark},
-    item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
-    macro_call_as_call_id,
-    nameres::{
-        attr_resolution::ResolvedAttr,
-        diagnostics::{DefDiagnostic, DefDiagnostics},
-        proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroKind},
-        DefMap, MacroSubNs,
-    },
-    path::ImportAlias,
-    type_ref::{TraitRef, TypeBound, TypeRefId, TypesMap},
-    visibility::RawVisibility,
-    AssocItemId, AstIdWithPath, ConstId, ConstLoc, ExternCrateId, FunctionId, FunctionLoc,
-    HasModule, ImplId, Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId,
-    ProcMacroId, StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc,
-};
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct FunctionData {
-    pub name: Name,
-    pub params: Box<[TypeRefId]>,
-    pub ret_type: TypeRefId,
-    pub visibility: RawVisibility,
-    pub abi: Option<Symbol>,
-    pub legacy_const_generics_indices: Option<Box<Box<[u32]>>>,
-    pub rustc_allow_incoherent_impl: bool,
-    pub types_map: Arc<TypesMap>,
-    flags: FnFlags,
-}
-
-impl FunctionData {
-    pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc<FunctionData> {
-        let loc = func.lookup(db);
-        let krate = loc.container.module(db).krate;
-        let item_tree = loc.id.item_tree(db);
-        let func = &item_tree[loc.id.value];
-        let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
-            trait_vis(db, trait_id)
-        } else {
-            item_tree[func.visibility].clone()
-        };
-
-        let crate_graph = db.crate_graph();
-        let cfg_options = &crate_graph[krate].cfg_options;
-        let attr_owner = |idx| {
-            item_tree::AttrOwner::Param(loc.id.value, Idx::from_raw(RawIdx::from(idx as u32)))
-        };
-
-        let mut flags = func.flags;
-        if flags.contains(FnFlags::HAS_SELF_PARAM) {
-            // If there's a self param in the syntax, but it is cfg'd out, remove the flag.
-            let is_cfgd_out =
-                !item_tree.attrs(db, krate, attr_owner(0usize)).is_cfg_enabled(cfg_options);
-            if is_cfgd_out {
-                cov_mark::hit!(cfgd_out_self_param);
-                flags.remove(FnFlags::HAS_SELF_PARAM);
-            }
-        }
-        if flags.contains(FnFlags::IS_VARARGS) {
-            if let Some((_, param)) = func.params.iter().enumerate().rev().find(|&(idx, _)| {
-                item_tree.attrs(db, krate, attr_owner(idx)).is_cfg_enabled(cfg_options)
-            }) {
-                if param.type_ref.is_some() {
-                    flags.remove(FnFlags::IS_VARARGS);
-                }
-            } else {
-                flags.remove(FnFlags::IS_VARARGS);
-            }
-        }
-
-        let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
-        let legacy_const_generics_indices = attrs
-            .by_key(&sym::rustc_legacy_const_generics)
-            .tt_values()
-            .next()
-            .map(parse_rustc_legacy_const_generics)
-            .filter(|it| !it.is_empty())
-            .map(Box::new);
-        let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists();
-        if flags.contains(FnFlags::HAS_UNSAFE_KW)
-            && attrs.by_key(&sym::rustc_deprecated_safe_2024).exists()
-        {
-            flags.remove(FnFlags::HAS_UNSAFE_KW);
-            flags.insert(FnFlags::DEPRECATED_SAFE_2024);
-        }
-
-        if attrs.by_key(&sym::target_feature).exists() {
-            flags.insert(FnFlags::HAS_TARGET_FEATURE);
-        }
-
-        Arc::new(FunctionData {
-            name: func.name.clone(),
-            params: func
-                .params
-                .iter()
-                .enumerate()
-                .filter(|&(idx, _)| {
-                    item_tree.attrs(db, krate, attr_owner(idx)).is_cfg_enabled(cfg_options)
-                })
-                .filter_map(|(_, param)| param.type_ref)
-                .collect(),
-            ret_type: func.ret_type,
-            visibility,
-            abi: func.abi.clone(),
-            legacy_const_generics_indices,
-            types_map: func.types_map.clone(),
-            flags,
-            rustc_allow_incoherent_impl,
-        })
-    }
-
-    pub fn has_body(&self) -> bool {
-        self.flags.contains(FnFlags::HAS_BODY)
-    }
-
-    /// True if the first param is `self`. This is relevant to decide whether this
-    /// can be called as a method.
-    pub fn has_self_param(&self) -> bool {
-        self.flags.contains(FnFlags::HAS_SELF_PARAM)
-    }
-
-    pub fn is_default(&self) -> bool {
-        self.flags.contains(FnFlags::HAS_DEFAULT_KW)
-    }
-
-    pub fn is_const(&self) -> bool {
-        self.flags.contains(FnFlags::HAS_CONST_KW)
-    }
-
-    pub fn is_async(&self) -> bool {
-        self.flags.contains(FnFlags::HAS_ASYNC_KW)
-    }
-
-    pub fn is_unsafe(&self) -> bool {
-        self.flags.contains(FnFlags::HAS_UNSAFE_KW)
-    }
-
-    pub fn is_deprecated_safe_2024(&self) -> bool {
-        self.flags.contains(FnFlags::DEPRECATED_SAFE_2024)
-    }
-
-    pub fn is_safe(&self) -> bool {
-        self.flags.contains(FnFlags::HAS_SAFE_KW)
-    }
-
-    pub fn is_varargs(&self) -> bool {
-        self.flags.contains(FnFlags::IS_VARARGS)
-    }
-
-    pub fn has_target_feature(&self) -> bool {
-        self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
-    }
-}
-
-fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> {
-    let mut indices = Vec::new();
-    let mut iter = tt.iter();
-    while let (Some(first), second) = (iter.next(), iter.next()) {
-        match first {
-            TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
-                Ok(index) => indices.push(index),
-                Err(_) => break,
-            },
-            _ => break,
-        }
-
-        if let Some(comma) = second {
-            match comma {
-                TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
-                _ => break,
-            }
-        }
-    }
-
-    indices.into_boxed_slice()
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct TypeAliasData {
-    pub name: Name,
-    pub type_ref: Option<TypeRefId>,
-    pub visibility: RawVisibility,
-    pub is_extern: bool,
-    pub rustc_has_incoherent_inherent_impls: bool,
-    pub rustc_allow_incoherent_impl: bool,
-    /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
-    pub bounds: Box<[TypeBound]>,
-    pub types_map: Arc<TypesMap>,
-}
-
-impl TypeAliasData {
-    pub(crate) fn type_alias_data_query(
-        db: &dyn DefDatabase,
-        typ: TypeAliasId,
-    ) -> Arc<TypeAliasData> {
-        let loc = typ.lookup(db);
-        let item_tree = loc.id.item_tree(db);
-        let typ = &item_tree[loc.id.value];
-        let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
-            trait_vis(db, trait_id)
-        } else {
-            item_tree[typ.visibility].clone()
-        };
-
-        let attrs = item_tree.attrs(
-            db,
-            loc.container.module(db).krate(),
-            ModItem::from(loc.id.value).into(),
-        );
-        let rustc_has_incoherent_inherent_impls =
-            attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists();
-        let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists();
-
-        Arc::new(TypeAliasData {
-            name: typ.name.clone(),
-            type_ref: typ.type_ref,
-            visibility,
-            is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
-            rustc_has_incoherent_inherent_impls,
-            rustc_allow_incoherent_impl,
-            bounds: typ.bounds.clone(),
-            types_map: typ.types_map.clone(),
-        })
-    }
-}
-
-bitflags::bitflags! {
-    #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
-    pub struct TraitFlags: u8 {
-        const IS_AUTO = 1 << 0;
-        const IS_UNSAFE = 1 << 1;
-        const IS_FUNDAMENTAL = 1 << 2;
-        const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 3;
-        const SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 4;
-        const SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 5;
-        const RUSTC_PAREN_SUGAR = 1 << 6;
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct TraitData {
-    pub name: Name,
-    pub items: Box<[(Name, AssocItemId)]>,
-    pub flags: TraitFlags,
-    pub visibility: RawVisibility,
-    // box it as the vec is usually empty anyways
-    pub macro_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
-}
-
-impl TraitData {
-    #[inline]
-    pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> {
-        db.trait_data_with_diagnostics(tr).0
-    }
-
-    pub(crate) fn trait_data_with_diagnostics_query(
-        db: &dyn DefDatabase,
-        tr: TraitId,
-    ) -> (Arc<TraitData>, DefDiagnostics) {
-        let ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
-        let item_tree = tree_id.item_tree(db);
-        let tr_def = &item_tree[tree_id.value];
-        let name = tr_def.name.clone();
-        let visibility = item_tree[tr_def.visibility].clone();
-        let attrs = item_tree.attrs(db, module_id.krate(), ModItem::from(tree_id.value).into());
-
-        let mut flags = TraitFlags::empty();
-
-        if tr_def.is_auto {
-            flags |= TraitFlags::IS_AUTO;
-        }
-        if tr_def.is_unsafe {
-            flags |= TraitFlags::IS_UNSAFE;
-        }
-        if attrs.by_key(&sym::fundamental).exists() {
-            flags |= TraitFlags::IS_FUNDAMENTAL;
-        }
-        if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() {
-            flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
-        }
-        if attrs.by_key(&sym::rustc_paren_sugar).exists() {
-            flags |= TraitFlags::RUSTC_PAREN_SUGAR;
-        }
-
-        let mut skip_array_during_method_dispatch =
-            attrs.by_key(&sym::rustc_skip_array_during_method_dispatch).exists();
-        let mut skip_boxed_slice_during_method_dispatch = false;
-        for tt in attrs.by_key(&sym::rustc_skip_during_method_dispatch).tt_values() {
-            for tt in tt.iter() {
-                if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt {
-                    skip_array_during_method_dispatch |= ident.sym == sym::array;
-                    skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice;
-                }
-            }
-        }
-
-        if skip_array_during_method_dispatch {
-            flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH;
-        }
-        if skip_boxed_slice_during_method_dispatch {
-            flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH;
-        }
-
-        let mut collector =
-            AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
-        collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
-        let (items, macro_calls, diagnostics) = collector.finish();
-
-        (
-            Arc::new(TraitData { name, macro_calls, items, visibility, flags }),
-            DefDiagnostics::new(diagnostics),
-        )
-    }
-
-    pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
-        self.items.iter().filter_map(|(_name, item)| match item {
-            AssocItemId::TypeAliasId(t) => Some(*t),
-            _ => None,
-        })
-    }
-
-    pub fn associated_type_by_name(&self, name: &Name) -> Option<TypeAliasId> {
-        self.items.iter().find_map(|(item_name, item)| match item {
-            AssocItemId::TypeAliasId(t) if item_name == name => Some(*t),
-            _ => None,
-        })
-    }
-
-    pub fn method_by_name(&self, name: &Name) -> Option<FunctionId> {
-        self.items.iter().find_map(|(item_name, item)| match item {
-            AssocItemId::FunctionId(t) if item_name == name => Some(*t),
-            _ => None,
-        })
-    }
-
-    pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
-        self.macro_calls.iter().flat_map(|it| it.iter()).copied()
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct TraitAliasData {
-    pub name: Name,
-    pub visibility: RawVisibility,
-}
-
-impl TraitAliasData {
-    pub(crate) fn trait_alias_query(db: &dyn DefDatabase, id: TraitAliasId) -> Arc<TraitAliasData> {
-        let loc = id.lookup(db);
-        let item_tree = loc.id.item_tree(db);
-        let alias = &item_tree[loc.id.value];
-        let visibility = item_tree[alias.visibility].clone();
-
-        Arc::new(TraitAliasData { name: alias.name.clone(), visibility })
-    }
-}
-
-#[derive(Debug, PartialEq, Eq)]
-pub struct ImplData {
-    pub target_trait: Option<TraitRef>,
-    pub self_ty: TypeRefId,
-    pub items: Box<[(Name, AssocItemId)]>,
-    pub is_negative: bool,
-    pub is_unsafe: bool,
-    // box it as the vec is usually empty anyways
-    pub macro_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
-    pub types_map: Arc<TypesMap>,
-}
-
-impl ImplData {
-    #[inline]
-    pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
-        db.impl_data_with_diagnostics(id).0
-    }
-
-    pub(crate) fn impl_data_with_diagnostics_query(
-        db: &dyn DefDatabase,
-        id: ImplId,
-    ) -> (Arc<ImplData>, DefDiagnostics) {
-        let _p = tracing::info_span!("impl_data_with_diagnostics_query").entered();
-        let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
-
-        let item_tree = tree_id.item_tree(db);
-        let impl_def = &item_tree[tree_id.value];
-        let target_trait = impl_def.target_trait;
-        let self_ty = impl_def.self_ty;
-        let is_negative = impl_def.is_negative;
-        let is_unsafe = impl_def.is_unsafe;
-
-        let mut collector =
-            AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
-        collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
-
-        let (items, macro_calls, diagnostics) = collector.finish();
-
-        (
-            Arc::new(ImplData {
-                target_trait,
-                self_ty,
-                items,
-                is_negative,
-                is_unsafe,
-                macro_calls,
-                types_map: impl_def.types_map.clone(),
-            }),
-            DefDiagnostics::new(diagnostics),
-        )
-    }
-
-    pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
-        self.macro_calls.iter().flat_map(|it| it.iter()).copied()
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct Macro2Data {
-    pub name: Name,
-    pub visibility: RawVisibility,
-    // It's a bit wasteful as currently this is only for builtin `Default` derive macro, but macro2
-    // are rarely used in practice so I think it's okay for now.
-    /// Derive helpers, if this is a derive rustc_builtin_macro
-    pub helpers: Option<Box<[Name]>>,
-}
-
-impl Macro2Data {
-    pub(crate) fn macro2_data_query(db: &dyn DefDatabase, makro: Macro2Id) -> Arc<Macro2Data> {
-        let loc = makro.lookup(db);
-        let item_tree = loc.id.item_tree(db);
-        let makro = &item_tree[loc.id.value];
-
-        let helpers = item_tree
-            .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
-            .by_key(&sym::rustc_builtin_macro)
-            .tt_values()
-            .next()
-            .and_then(parse_macro_name_and_helper_attrs)
-            .map(|(_, helpers)| helpers);
-
-        Arc::new(Macro2Data {
-            name: makro.name.clone(),
-            visibility: item_tree[makro.visibility].clone(),
-            helpers,
-        })
-    }
-}
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct MacroRulesData {
-    pub name: Name,
-    pub macro_export: bool,
-}
-
-impl MacroRulesData {
-    pub(crate) fn macro_rules_data_query(
-        db: &dyn DefDatabase,
-        makro: MacroRulesId,
-    ) -> Arc<MacroRulesData> {
-        let loc = makro.lookup(db);
-        let item_tree = loc.id.item_tree(db);
-        let makro = &item_tree[loc.id.value];
-
-        let macro_export = item_tree
-            .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
-            .by_key(&sym::macro_export)
-            .exists();
-
-        Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct ProcMacroData {
-    pub name: Name,
-    /// Derive helpers, if this is a derive
-    pub helpers: Option<Box<[Name]>>,
-}
-
-impl ProcMacroData {
-    pub(crate) fn proc_macro_data_query(
-        db: &dyn DefDatabase,
-        makro: ProcMacroId,
-    ) -> Arc<ProcMacroData> {
-        let loc = makro.lookup(db);
-        let item_tree = loc.id.item_tree(db);
-        let makro = &item_tree[loc.id.value];
-
-        let (name, helpers) = if let Some(def) = item_tree
-            .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
-            .parse_proc_macro_decl(&makro.name)
-        {
-            (
-                def.name,
-                match def.kind {
-                    ProcMacroKind::Derive { helpers } => Some(helpers),
-                    ProcMacroKind::Bang | ProcMacroKind::Attr => None,
-                },
-            )
-        } else {
-            // eeeh...
-            stdx::never!("proc macro declaration is not a proc macro");
-            (makro.name.clone(), None)
-        };
-        Arc::new(ProcMacroData { name, helpers })
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct ExternCrateDeclData {
-    pub name: Name,
-    pub alias: Option<ImportAlias>,
-    pub visibility: RawVisibility,
-    pub crate_id: Option<CrateId>,
-}
-
-impl ExternCrateDeclData {
-    pub(crate) fn extern_crate_decl_data_query(
-        db: &dyn DefDatabase,
-        extern_crate: ExternCrateId,
-    ) -> Arc<ExternCrateDeclData> {
-        let loc = extern_crate.lookup(db);
-        let item_tree = loc.id.item_tree(db);
-        let extern_crate = &item_tree[loc.id.value];
-
-        let name = extern_crate.name.clone();
-        let krate = loc.container.krate();
-        let crate_id = if name == sym::self_.clone() {
-            Some(krate)
-        } else {
-            db.crate_graph()[krate].dependencies.iter().find_map(|dep| {
-                if dep.name.symbol() == name.symbol() {
-                    Some(dep.crate_id)
-                } else {
-                    None
-                }
-            })
-        };
-
-        Arc::new(Self {
-            name,
-            visibility: item_tree[extern_crate.visibility].clone(),
-            alias: extern_crate.alias.clone(),
-            crate_id,
-        })
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct ConstData {
-    /// `None` for `const _: () = ();`
-    pub name: Option<Name>,
-    pub type_ref: TypeRefId,
-    pub visibility: RawVisibility,
-    pub rustc_allow_incoherent_impl: bool,
-    pub has_body: bool,
-    pub types_map: Arc<TypesMap>,
-}
-
-impl ConstData {
-    pub(crate) fn const_data_query(db: &dyn DefDatabase, konst: ConstId) -> Arc<ConstData> {
-        let loc = konst.lookup(db);
-        let item_tree = loc.id.item_tree(db);
-        let konst = &item_tree[loc.id.value];
-        let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
-            trait_vis(db, trait_id)
-        } else {
-            item_tree[konst.visibility].clone()
-        };
-
-        let rustc_allow_incoherent_impl = item_tree
-            .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into())
-            .by_key(&sym::rustc_allow_incoherent_impl)
-            .exists();
-
-        Arc::new(ConstData {
-            name: konst.name.clone(),
-            type_ref: konst.type_ref,
-            visibility,
-            rustc_allow_incoherent_impl,
-            has_body: konst.has_body,
-            types_map: konst.types_map.clone(),
-        })
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct StaticData {
-    pub name: Name,
-    pub type_ref: TypeRefId,
-    pub visibility: RawVisibility,
-    pub mutable: bool,
-    pub is_extern: bool,
-    pub has_safe_kw: bool,
-    pub has_unsafe_kw: bool,
-    pub types_map: Arc<TypesMap>,
-}
-
-impl StaticData {
-    pub(crate) fn static_data_query(db: &dyn DefDatabase, konst: StaticId) -> Arc<StaticData> {
-        let loc = konst.lookup(db);
-        let item_tree = loc.id.item_tree(db);
-        let statik = &item_tree[loc.id.value];
-
-        Arc::new(StaticData {
-            name: statik.name.clone(),
-            type_ref: statik.type_ref,
-            visibility: item_tree[statik.visibility].clone(),
-            mutable: statik.mutable,
-            is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
-            has_safe_kw: statik.has_safe_kw,
-            has_unsafe_kw: statik.has_unsafe_kw,
-            types_map: statik.types_map.clone(),
-        })
-    }
-}
-
-struct AssocItemCollector<'a> {
-    db: &'a dyn DefDatabase,
-    module_id: ModuleId,
-    def_map: Arc<DefMap>,
-    diagnostics: Vec<DefDiagnostic>,
-    container: ItemContainerId,
-    expander: Expander,
-
-    items: Vec<(Name, AssocItemId)>,
-    macro_calls: Vec<(AstId<ast::Item>, MacroCallId)>,
-}
-
-impl<'a> AssocItemCollector<'a> {
-    fn new(
-        db: &'a dyn DefDatabase,
-        module_id: ModuleId,
-        file_id: HirFileId,
-        container: ItemContainerId,
-    ) -> Self {
-        Self {
-            db,
-            module_id,
-            def_map: module_id.def_map(db),
-            container,
-            expander: Expander::new(db, file_id, module_id),
-            items: Vec::new(),
-            macro_calls: Vec::new(),
-            diagnostics: Vec::new(),
-        }
-    }
-
-    fn finish(
-        self,
-    ) -> (
-        Box<[(Name, AssocItemId)]>,
-        Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
-        Vec<DefDiagnostic>,
-    ) {
-        (
-            self.items.into_boxed_slice(),
-            if self.macro_calls.is_empty() { None } else { Some(Box::new(self.macro_calls)) },
-            self.diagnostics,
-        )
-    }
-
-    fn collect(&mut self, item_tree: &ItemTree, tree_id: TreeId, assoc_items: &[AssocItem]) {
-        let container = self.container;
-        self.items.reserve(assoc_items.len());
-
-        'items: for &item in assoc_items {
-            let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
-            if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
-                self.diagnostics.push(DefDiagnostic::unconfigured_code(
-                    self.module_id.local_id,
-                    tree_id,
-                    ModItem::from(item).into(),
-                    attrs.cfg().unwrap(),
-                    self.expander.cfg_options().clone(),
-                ));
-                continue;
-            }
-
-            'attrs: for attr in &*attrs {
-                let ast_id =
-                    AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast());
-                let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
-
-                match self.def_map.resolve_attr_macro(
-                    self.db,
-                    self.module_id.local_id,
-                    ast_id_with_path,
-                    attr,
-                ) {
-                    Ok(ResolvedAttr::Macro(call_id)) => {
-                        let loc = self.db.lookup_intern_macro_call(call_id);
-                        if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind {
-                            // If there's no expander for the proc macro (e.g. the
-                            // proc macro is ignored, or building the proc macro
-                            // crate failed), skip expansion like we would if it was
-                            // disabled. This is analogous to the handling in
-                            // `DefCollector::collect_macros`.
-                            if let Some(err) = exp.as_expand_error(self.module_id.krate) {
-                                self.diagnostics.push(DefDiagnostic::macro_error(
-                                    self.module_id.local_id,
-                                    ast_id,
-                                    (*attr.path).clone(),
-                                    err,
-                                ));
-                                continue 'attrs;
-                            }
-                        }
-
-                        self.macro_calls.push((ast_id, call_id));
-                        let res =
-                            self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id);
-                        self.collect_macro_items(res);
-                        continue 'items;
-                    }
-                    Ok(_) => (),
-                    Err(_) => {
-                        self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
-                            self.module_id.local_id,
-                            MacroCallKind::Attr {
-                                ast_id,
-                                attr_args: None,
-                                invoc_attr_index: attr.id,
-                            },
-                            attr.path().clone(),
-                        ));
-                    }
-                }
-            }
-
-            self.collect_item(item_tree, tree_id, container, item);
-        }
-    }
-
-    fn collect_item(
-        &mut self,
-        item_tree: &ItemTree,
-        tree_id: TreeId,
-        container: ItemContainerId,
-        item: AssocItem,
-    ) {
-        match item {
-            AssocItem::Function(id) => {
-                let item = &item_tree[id];
-                let def =
-                    FunctionLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
-                self.items.push((item.name.clone(), def.into()));
-            }
-            AssocItem::TypeAlias(id) => {
-                let item = &item_tree[id];
-                let def =
-                    TypeAliasLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
-                self.items.push((item.name.clone(), def.into()));
-            }
-            AssocItem::Const(id) => {
-                let item = &item_tree[id];
-                let Some(name) = item.name.clone() else { return };
-                let def = ConstLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
-                self.items.push((name, def.into()));
-            }
-            AssocItem::MacroCall(call) => {
-                let file_id = self.expander.current_file_id();
-                let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call];
-                let module = self.expander.module.local_id;
-
-                let resolver = |path: &_| {
-                    self.def_map
-                        .resolve_path(
-                            self.db,
-                            module,
-                            path,
-                            crate::item_scope::BuiltinShadowMode::Other,
-                            Some(MacroSubNs::Bang),
-                        )
-                        .0
-                        .take_macros()
-                        .map(|it| self.db.macro_def(it))
-                };
-                match macro_call_as_call_id(
-                    self.db.upcast(),
-                    &AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
-                    ctxt,
-                    expand_to,
-                    self.expander.krate(),
-                    resolver,
-                ) {
-                    Ok(Some(call_id)) => {
-                        let res =
-                            self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id);
-                        self.macro_calls.push((InFile::new(file_id, ast_id.upcast()), call_id));
-                        self.collect_macro_items(res);
-                    }
-                    Ok(None) => (),
-                    Err(_) => {
-                        self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
-                            self.module_id.local_id,
-                            MacroCallKind::FnLike {
-                                ast_id: InFile::new(file_id, ast_id),
-                                expand_to,
-                                eager: None,
-                            },
-                            Clone::clone(path),
-                        ));
-                    }
-                }
-            }
-        }
-    }
-
-    fn collect_macro_items(&mut self, res: ExpandResult<Option<(Mark, Parse<ast::MacroItems>)>>) {
-        let Some((mark, _parse)) = res.value else { return };
-
-        let tree_id = item_tree::TreeId::new(self.expander.current_file_id(), None);
-        let item_tree = tree_id.item_tree(self.db);
-        let iter: SmallVec<[_; 2]> =
-            item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item).collect();
-
-        self.collect(&item_tree, tree_id, &iter);
-
-        self.expander.exit(mark);
-    }
-}
-
-fn trait_vis(db: &dyn DefDatabase, trait_id: TraitId) -> RawVisibility {
-    let ItemLoc { id: tree_id, .. } = trait_id.lookup(db);
-    let item_tree = tree_id.item_tree(db);
-    let tr_def = &item_tree[tree_id.value];
-    item_tree[tr_def.visibility].clone()
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
deleted file mode 100644
index c94622016d355..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
+++ /dev/null
@@ -1,489 +0,0 @@
-//! Defines hir-level representation of structs, enums and unions
-
-use base_db::CrateId;
-use bitflags::bitflags;
-use cfg::CfgOptions;
-use either::Either;
-
-use hir_expand::name::Name;
-use intern::sym;
-use la_arena::Arena;
-use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
-use rustc_hashes::Hash64;
-use triomphe::Arc;
-use tt::iter::TtElement;
-
-use crate::{
-    builtin_type::{BuiltinInt, BuiltinUint},
-    db::DefDatabase,
-    hir::Expr,
-    item_tree::{
-        AttrOwner, Field, FieldParent, FieldsShape, ItemTree, ModItem, RawVisibilityId, TreeId,
-    },
-    lang_item::LangItem,
-    nameres::diagnostics::{DefDiagnostic, DefDiagnostics},
-    tt::{Delimiter, DelimiterKind, Leaf, TopSubtree},
-    type_ref::{TypeRefId, TypesMap},
-    visibility::RawVisibility,
-    EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, VariantId,
-};
-
-/// Note that we use `StructData` for unions as well!
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct StructData {
-    pub name: Name,
-    pub variant_data: Arc<VariantData>,
-    pub repr: Option<ReprOptions>,
-    pub visibility: RawVisibility,
-    pub flags: StructFlags,
-}
-
-bitflags! {
-    #[derive(Debug, Copy, Clone, PartialEq, Eq)]
-    pub struct StructFlags: u8 {
-        const NO_FLAGS         = 0;
-        /// Indicates whether the struct is `PhantomData`.
-        const IS_PHANTOM_DATA  = 1 << 2;
-        /// Indicates whether the struct has a `#[fundamental]` attribute.
-        const IS_FUNDAMENTAL   = 1 << 3;
-        // FIXME: should this be a flag?
-        /// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute.
-        const IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL      = 1 << 4;
-        /// Indicates whether this struct is `Box`.
-        const IS_BOX           = 1 << 5;
-        /// Indicates whether this struct is `ManuallyDrop`.
-        const IS_MANUALLY_DROP = 1 << 6;
-        /// Indicates whether this struct is `UnsafeCell`.
-        const IS_UNSAFE_CELL   = 1 << 7;
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct EnumData {
-    pub name: Name,
-    pub variants: Box<[(EnumVariantId, Name)]>,
-    pub repr: Option<ReprOptions>,
-    pub visibility: RawVisibility,
-    pub rustc_has_incoherent_inherent_impls: bool,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct EnumVariantData {
-    pub name: Name,
-    pub variant_data: Arc<VariantData>,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum VariantData {
-    Record { fields: Arena<FieldData>, types_map: Arc<TypesMap> },
-    Tuple { fields: Arena<FieldData>, types_map: Arc<TypesMap> },
-    Unit,
-}
-
-/// A single field of an enum variant or struct
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct FieldData {
-    pub name: Name,
-    pub type_ref: TypeRefId,
-    pub visibility: RawVisibility,
-}
-
-fn repr_from_value(
-    db: &dyn DefDatabase,
-    krate: CrateId,
-    item_tree: &ItemTree,
-    of: AttrOwner,
-) -> Option<ReprOptions> {
-    item_tree.attrs(db, krate, of).by_key(&sym::repr).tt_values().find_map(parse_repr_tt)
-}
-
-fn parse_repr_tt(tt: &TopSubtree) -> Option<ReprOptions> {
-    match tt.top_subtree().delimiter {
-        Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
-        _ => return None,
-    }
-
-    let mut flags = ReprFlags::empty();
-    let mut int = None;
-    let mut max_align: Option<Align> = None;
-    let mut min_pack: Option<Align> = None;
-
-    let mut tts = tt.iter();
-    while let Some(tt) = tts.next() {
-        if let TtElement::Leaf(Leaf::Ident(ident)) = tt {
-            flags.insert(match &ident.sym {
-                s if *s == sym::packed => {
-                    let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
-                        tts.next();
-                        if let Some(TtElement::Leaf(Leaf::Literal(lit))) = tt_iter.next() {
-                            lit.symbol.as_str().parse().unwrap_or_default()
-                        } else {
-                            0
-                        }
-                    } else {
-                        0
-                    };
-                    let pack = Align::from_bytes(pack).unwrap_or(Align::ONE);
-                    min_pack =
-                        Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack });
-                    ReprFlags::empty()
-                }
-                s if *s == sym::align => {
-                    if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
-                        tts.next();
-                        if let Some(TtElement::Leaf(Leaf::Literal(lit))) = tt_iter.next() {
-                            if let Ok(align) = lit.symbol.as_str().parse() {
-                                let align = Align::from_bytes(align).ok();
-                                max_align = max_align.max(align);
-                            }
-                        }
-                    }
-                    ReprFlags::empty()
-                }
-                s if *s == sym::C => ReprFlags::IS_C,
-                s if *s == sym::transparent => ReprFlags::IS_TRANSPARENT,
-                s if *s == sym::simd => ReprFlags::IS_SIMD,
-                repr => {
-                    if let Some(builtin) = BuiltinInt::from_suffix_sym(repr)
-                        .map(Either::Left)
-                        .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right))
-                    {
-                        int = Some(match builtin {
-                            Either::Left(bi) => match bi {
-                                BuiltinInt::Isize => IntegerType::Pointer(true),
-                                BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
-                                BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
-                                BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
-                                BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
-                                BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
-                            },
-                            Either::Right(bu) => match bu {
-                                BuiltinUint::Usize => IntegerType::Pointer(false),
-                                BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
-                                BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
-                                BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
-                                BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
-                                BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
-                            },
-                        });
-                    }
-                    ReprFlags::empty()
-                }
-            })
-        }
-    }
-
-    Some(ReprOptions {
-        int,
-        align: max_align,
-        pack: min_pack,
-        flags,
-        field_shuffle_seed: Hash64::ZERO,
-    })
-}
-
-impl StructData {
-    #[inline]
-    pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> {
-        db.struct_data_with_diagnostics(id).0
-    }
-
-    pub(crate) fn struct_data_with_diagnostics_query(
-        db: &dyn DefDatabase,
-        id: StructId,
-    ) -> (Arc<StructData>, DefDiagnostics) {
-        let loc = id.lookup(db);
-        let krate = loc.container.krate;
-        let item_tree = loc.id.item_tree(db);
-        let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
-        let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
-
-        let mut flags = StructFlags::NO_FLAGS;
-        if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() {
-            flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL;
-        }
-        if attrs.by_key(&sym::fundamental).exists() {
-            flags |= StructFlags::IS_FUNDAMENTAL;
-        }
-        if let Some(lang) = attrs.lang_item() {
-            match lang {
-                LangItem::PhantomData => flags |= StructFlags::IS_PHANTOM_DATA,
-                LangItem::OwnedBox => flags |= StructFlags::IS_BOX,
-                LangItem::ManuallyDrop => flags |= StructFlags::IS_MANUALLY_DROP,
-                LangItem::UnsafeCell => flags |= StructFlags::IS_UNSAFE_CELL,
-                _ => (),
-            }
-        }
-
-        let strukt = &item_tree[loc.id.value];
-        let (fields, diagnostics) = lower_fields(
-            db,
-            krate,
-            loc.container.local_id,
-            loc.id.tree_id(),
-            &item_tree,
-            &db.crate_graph()[krate].cfg_options,
-            FieldParent::Struct(loc.id.value),
-            &strukt.fields,
-            None,
-        );
-        let types_map = strukt.types_map.clone();
-        (
-            Arc::new(StructData {
-                name: strukt.name.clone(),
-                variant_data: Arc::new(match strukt.shape {
-                    FieldsShape::Record => VariantData::Record { fields, types_map },
-                    FieldsShape::Tuple => VariantData::Tuple { fields, types_map },
-                    FieldsShape::Unit => VariantData::Unit,
-                }),
-                repr,
-                visibility: item_tree[strukt.visibility].clone(),
-                flags,
-            }),
-            DefDiagnostics::new(diagnostics),
-        )
-    }
-
-    #[inline]
-    pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc<StructData> {
-        db.union_data_with_diagnostics(id).0
-    }
-
-    pub(crate) fn union_data_with_diagnostics_query(
-        db: &dyn DefDatabase,
-        id: UnionId,
-    ) -> (Arc<StructData>, DefDiagnostics) {
-        let loc = id.lookup(db);
-        let krate = loc.container.krate;
-        let item_tree = loc.id.item_tree(db);
-        let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
-        let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
-        let mut flags = StructFlags::NO_FLAGS;
-        if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() {
-            flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL;
-        }
-        if attrs.by_key(&sym::fundamental).exists() {
-            flags |= StructFlags::IS_FUNDAMENTAL;
-        }
-
-        let union = &item_tree[loc.id.value];
-        let (fields, diagnostics) = lower_fields(
-            db,
-            krate,
-            loc.container.local_id,
-            loc.id.tree_id(),
-            &item_tree,
-            &db.crate_graph()[krate].cfg_options,
-            FieldParent::Union(loc.id.value),
-            &union.fields,
-            None,
-        );
-        let types_map = union.types_map.clone();
-        (
-            Arc::new(StructData {
-                name: union.name.clone(),
-                variant_data: Arc::new(VariantData::Record { fields, types_map }),
-                repr,
-                visibility: item_tree[union.visibility].clone(),
-                flags,
-            }),
-            DefDiagnostics::new(diagnostics),
-        )
-    }
-}
-
-impl EnumData {
-    pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc<EnumData> {
-        let loc = e.lookup(db);
-        let krate = loc.container.krate;
-        let item_tree = loc.id.item_tree(db);
-        let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
-        let rustc_has_incoherent_inherent_impls = item_tree
-            .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into())
-            .by_key(&sym::rustc_has_incoherent_inherent_impls)
-            .exists();
-
-        let enum_ = &item_tree[loc.id.value];
-
-        Arc::new(EnumData {
-            name: enum_.name.clone(),
-            variants: loc.container.def_map(db).enum_definitions[&e]
-                .iter()
-                .map(|&id| (id, item_tree[id.lookup(db).id.value].name.clone()))
-                .collect(),
-            repr,
-            visibility: item_tree[enum_.visibility].clone(),
-            rustc_has_incoherent_inherent_impls,
-        })
-    }
-
-    pub fn variant(&self, name: &Name) -> Option<EnumVariantId> {
-        let &(id, _) = self.variants.iter().find(|(_id, n)| n == name)?;
-        Some(id)
-    }
-
-    pub fn variant_body_type(&self) -> IntegerType {
-        match self.repr {
-            Some(ReprOptions { int: Some(builtin), .. }) => builtin,
-            _ => IntegerType::Pointer(true),
-        }
-    }
-
-    // [Adopted from rustc](https://github.com/rust-lang/rust/blob/bd53aa3bf7a24a70d763182303bd75e5fc51a9af/compiler/rustc_middle/src/ty/adt.rs#L446-L448)
-    pub fn is_payload_free(&self, db: &dyn DefDatabase) -> bool {
-        self.variants.iter().all(|(v, _)| {
-            // The condition check order is slightly modified from rustc
-            // to improve performance by early returning with relatively fast checks
-            let variant = &db.enum_variant_data(*v).variant_data;
-            if !variant.fields().is_empty() {
-                return false;
-            }
-            // The outer if condition is whether this variant has const ctor or not
-            if !matches!(variant.kind(), StructKind::Unit) {
-                let body = db.body((*v).into());
-                // A variant with explicit discriminant
-                if body.exprs[body.body_expr] != Expr::Missing {
-                    return false;
-                }
-            }
-            true
-        })
-    }
-}
-
-impl EnumVariantData {
-    #[inline]
-    pub(crate) fn enum_variant_data_query(
-        db: &dyn DefDatabase,
-        e: EnumVariantId,
-    ) -> Arc<EnumVariantData> {
-        db.enum_variant_data_with_diagnostics(e).0
-    }
-
-    pub(crate) fn enum_variant_data_with_diagnostics_query(
-        db: &dyn DefDatabase,
-        e: EnumVariantId,
-    ) -> (Arc<EnumVariantData>, DefDiagnostics) {
-        let loc = e.lookup(db);
-        let container = loc.parent.lookup(db).container;
-        let krate = container.krate;
-        let item_tree = loc.id.item_tree(db);
-        let variant = &item_tree[loc.id.value];
-
-        let (fields, diagnostics) = lower_fields(
-            db,
-            krate,
-            container.local_id,
-            loc.id.tree_id(),
-            &item_tree,
-            &db.crate_graph()[krate].cfg_options,
-            FieldParent::Variant(loc.id.value),
-            &variant.fields,
-            Some(item_tree[loc.parent.lookup(db).id.value].visibility),
-        );
-        let types_map = variant.types_map.clone();
-
-        (
-            Arc::new(EnumVariantData {
-                name: variant.name.clone(),
-                variant_data: Arc::new(match variant.shape {
-                    FieldsShape::Record => VariantData::Record { fields, types_map },
-                    FieldsShape::Tuple => VariantData::Tuple { fields, types_map },
-                    FieldsShape::Unit => VariantData::Unit,
-                }),
-            }),
-            DefDiagnostics::new(diagnostics),
-        )
-    }
-}
-
-impl VariantData {
-    pub fn fields(&self) -> &Arena<FieldData> {
-        const EMPTY: &Arena<FieldData> = &Arena::new();
-        match &self {
-            VariantData::Record { fields, .. } | VariantData::Tuple { fields, .. } => fields,
-            _ => EMPTY,
-        }
-    }
-
-    pub fn types_map(&self) -> &TypesMap {
-        match &self {
-            VariantData::Record { types_map, .. } | VariantData::Tuple { types_map, .. } => {
-                types_map
-            }
-            VariantData::Unit => TypesMap::EMPTY,
-        }
-    }
-
-    // FIXME: Linear lookup
-    pub fn field(&self, name: &Name) -> Option<LocalFieldId> {
-        self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None })
-    }
-
-    pub fn kind(&self) -> StructKind {
-        match self {
-            VariantData::Record { .. } => StructKind::Record,
-            VariantData::Tuple { .. } => StructKind::Tuple,
-            VariantData::Unit => StructKind::Unit,
-        }
-    }
-
-    #[allow(clippy::self_named_constructors)]
-    pub(crate) fn variant_data(db: &dyn DefDatabase, id: VariantId) -> Arc<VariantData> {
-        match id {
-            VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
-            VariantId::EnumVariantId(it) => db.enum_variant_data(it).variant_data.clone(),
-            VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
-        }
-    }
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
-pub enum StructKind {
-    Tuple,
-    Record,
-    Unit,
-}
-
-fn lower_fields(
-    db: &dyn DefDatabase,
-    krate: CrateId,
-    container: LocalModuleId,
-    tree_id: TreeId,
-    item_tree: &ItemTree,
-    cfg_options: &CfgOptions,
-    parent: FieldParent,
-    fields: &[Field],
-    override_visibility: Option<RawVisibilityId>,
-) -> (Arena<FieldData>, Vec<DefDiagnostic>) {
-    let mut diagnostics = Vec::new();
-    let mut arena = Arena::new();
-    for (idx, field) in fields.iter().enumerate() {
-        let attr_owner = AttrOwner::make_field_indexed(parent, idx);
-        let attrs = item_tree.attrs(db, krate, attr_owner);
-        if attrs.is_cfg_enabled(cfg_options) {
-            arena.alloc(lower_field(item_tree, field, override_visibility));
-        } else {
-            diagnostics.push(DefDiagnostic::unconfigured_code(
-                container,
-                tree_id,
-                attr_owner,
-                attrs.cfg().unwrap(),
-                cfg_options.clone(),
-            ))
-        }
-    }
-    (arena, diagnostics)
-}
-
-fn lower_field(
-    item_tree: &ItemTree,
-    field: &Field,
-    override_visibility: Option<RawVisibilityId>,
-) -> FieldData {
-    FieldData {
-        name: field.name.clone(),
-        type_ref: field.type_ref,
-        visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(),
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
index 598a850898bb6..34cf42d02bdb0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -1,273 +1,373 @@
 //! Defines database & queries for name resolution.
-use base_db::{ra_salsa, CrateId, SourceDatabase, Upcast};
+use base_db::{Crate, RootQueryDb, SourceDatabase};
 use either::Either;
-use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
+use hir_expand::{EditionedFileId, HirFileId, MacroCallId, MacroDefId, db::ExpandDatabase};
 use intern::sym;
 use la_arena::ArenaMap;
-use span::{EditionedFileId, MacroCallId};
-use syntax::{ast, AstPtr};
+use syntax::{AstPtr, ast};
+use thin_vec::ThinVec;
 use triomphe::Arc;
 
 use crate::{
+    AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, EnumVariantId,
+    EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId,
+    FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc, MacroId,
+    MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId,
+    StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId,
+    TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
     attr::{Attrs, AttrsWithOwner},
-    data::{
-        adt::{EnumData, EnumVariantData, StructData, VariantData},
-        ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData,
-        ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData,
+    expr_store::{
+        Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes,
     },
-    expr_store::{scope::ExprScopes, Body, BodySourceMap},
-    generics::GenericParams,
+    hir::generics::GenericParams,
     import_map::ImportMap,
-    item_tree::{AttrOwner, ItemTree, ItemTreeSourceMaps},
+    item_tree::{AttrOwner, ItemTree},
     lang_item::{self, LangItem, LangItemTarget, LangItems},
-    nameres::{diagnostics::DefDiagnostics, DefMap},
+    nameres::{
+        DefMap, LocalDefMap,
+        assoc::{ImplItems, TraitItems},
+        diagnostics::DefDiagnostics,
+    },
+    signatures::{
+        ConstSignature, EnumSignature, EnumVariants, FunctionSignature, ImplSignature,
+        InactiveEnumVariantCode, StaticSignature, StructSignature, TraitAliasSignature,
+        TraitSignature, TypeAliasSignature, UnionSignature, VariantFields,
+    },
     tt,
-    type_ref::TypesSourceMap,
     visibility::{self, Visibility},
-    AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
-    EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId,
-    ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, InTypeConstId,
-    InTypeConstLoc, LocalFieldId, Macro2Id, Macro2Loc, MacroId, MacroRulesId, MacroRulesLoc,
-    MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc,
-    TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc,
-    UseId, UseLoc, VariantId,
 };
 
-#[ra_salsa::query_group(InternDatabaseStorage)]
-pub trait InternDatabase: SourceDatabase {
+use salsa::plumbing::AsId;
+
+#[query_group::query_group(InternDatabaseStorage)]
+pub trait InternDatabase: RootQueryDb {
     // region: items
-    #[ra_salsa::interned]
+    #[salsa::interned]
     fn intern_use(&self, loc: UseLoc) -> UseId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_struct(&self, loc: StructLoc) -> StructId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_union(&self, loc: UnionLoc) -> UnionId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_enum(&self, loc: EnumLoc) -> EnumId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_enum_variant(&self, loc: EnumVariantLoc) -> EnumVariantId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_const(&self, loc: ConstLoc) -> ConstId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_static(&self, loc: StaticLoc) -> StaticId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_trait(&self, loc: TraitLoc) -> TraitId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_trait_alias(&self, loc: TraitAliasLoc) -> TraitAliasId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_impl(&self, loc: ImplLoc) -> ImplId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_extern_block(&self, loc: ExternBlockLoc) -> ExternBlockId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_macro2(&self, loc: Macro2Loc) -> Macro2Id;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
-    // endregion: items
+    // // endregion: items
 
-    #[ra_salsa::interned]
+    #[salsa::interned]
     fn intern_block(&self, loc: BlockLoc) -> BlockId;
-    #[ra_salsa::interned]
-    fn intern_anonymous_const(&self, id: ConstBlockLoc) -> ConstBlockId;
-    #[ra_salsa::interned]
-    fn intern_in_type_const(&self, id: InTypeConstLoc) -> InTypeConstId;
 }
 
-#[ra_salsa::query_group(DefDatabaseStorage)]
-pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDatabase> {
+#[query_group::query_group]
+pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
     /// Whether to expand procedural macros during name resolution.
-    #[ra_salsa::input]
+    #[salsa::input]
     fn expand_proc_attr_macros(&self) -> bool;
 
     /// Computes an [`ItemTree`] for the given file or macro expansion.
-    #[ra_salsa::invoke(ItemTree::file_item_tree_query)]
+    #[salsa::invoke(ItemTree::file_item_tree_query)]
     fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
 
-    #[ra_salsa::invoke(ItemTree::block_item_tree_query)]
+    #[salsa::invoke(ItemTree::block_item_tree_query)]
     fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>;
 
-    #[ra_salsa::invoke(ItemTree::file_item_tree_with_source_map_query)]
-    fn file_item_tree_with_source_map(
-        &self,
-        file_id: HirFileId,
-    ) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>);
-
-    #[ra_salsa::invoke(ItemTree::block_item_tree_with_source_map_query)]
-    fn block_item_tree_with_source_map(
-        &self,
-        block_id: BlockId,
-    ) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>);
+    #[salsa::invoke(DefMap::crate_local_def_map_query)]
+    fn crate_local_def_map(&self, krate: Crate) -> (Arc<DefMap>, Arc<LocalDefMap>);
 
-    #[ra_salsa::invoke(DefMap::crate_def_map_query)]
-    fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>;
+    #[salsa::invoke(DefMap::crate_def_map_query)]
+    fn crate_def_map(&self, krate: Crate) -> Arc<DefMap>;
 
     /// Computes the block-level `DefMap`.
-    #[ra_salsa::invoke(DefMap::block_def_map_query)]
+    #[salsa::invoke(DefMap::block_def_map_query)]
     fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
 
     /// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
+    #[salsa::invoke(macro_def)]
     fn macro_def(&self, m: MacroId) -> MacroDefId;
 
     // region:data
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(StructData::struct_data_query)]
-    fn struct_data(&self, id: StructId) -> Arc<StructData>;
+    #[salsa::invoke(VariantFields::query)]
+    fn variant_fields_with_source_map(
+        &self,
+        id: VariantId,
+    ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(StructData::struct_data_with_diagnostics_query)]
-    fn struct_data_with_diagnostics(&self, id: StructId) -> (Arc<StructData>, DefDiagnostics);
+    #[salsa::tracked]
+    fn enum_variants(&self, id: EnumId) -> Arc<EnumVariants> {
+        self.enum_variants_with_diagnostics(id).0
+    }
+
+    #[salsa::invoke(EnumVariants::enum_variants_query)]
+    fn enum_variants_with_diagnostics(
+        &self,
+        id: EnumId,
+    ) -> (Arc<EnumVariants>, Option<Arc<ThinVec<InactiveEnumVariantCode>>>);
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(StructData::union_data_query)]
-    fn union_data(&self, id: UnionId) -> Arc<StructData>;
+    #[salsa::transparent]
+    #[salsa::invoke(ImplItems::impl_items_query)]
+    fn impl_items(&self, e: ImplId) -> Arc<ImplItems>;
 
-    #[ra_salsa::invoke(StructData::union_data_with_diagnostics_query)]
-    fn union_data_with_diagnostics(&self, id: UnionId) -> (Arc<StructData>, DefDiagnostics);
+    #[salsa::invoke(ImplItems::impl_items_with_diagnostics_query)]
+    fn impl_items_with_diagnostics(&self, e: ImplId) -> (Arc<ImplItems>, DefDiagnostics);
 
-    #[ra_salsa::invoke(EnumData::enum_data_query)]
-    fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
+    #[salsa::transparent]
+    #[salsa::invoke(TraitItems::trait_items_query)]
+    fn trait_items(&self, e: TraitId) -> Arc<TraitItems>;
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(EnumVariantData::enum_variant_data_query)]
-    fn enum_variant_data(&self, id: EnumVariantId) -> Arc<EnumVariantData>;
+    #[salsa::invoke(TraitItems::trait_items_with_diagnostics_query)]
+    fn trait_items_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitItems>, DefDiagnostics);
 
-    #[ra_salsa::invoke(EnumVariantData::enum_variant_data_with_diagnostics_query)]
-    fn enum_variant_data_with_diagnostics(
-        &self,
-        id: EnumVariantId,
-    ) -> (Arc<EnumVariantData>, DefDiagnostics);
+    #[salsa::tracked]
+    fn variant_fields(&self, id: VariantId) -> Arc<VariantFields> {
+        self.variant_fields_with_source_map(id).0
+    }
+
+    #[salsa::tracked]
+    fn trait_signature(&self, trait_: TraitId) -> Arc<TraitSignature> {
+        self.trait_signature_with_source_map(trait_).0
+    }
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(VariantData::variant_data)]
-    fn variant_data(&self, id: VariantId) -> Arc<VariantData>;
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(ImplData::impl_data_query)]
-    fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
+    #[salsa::tracked]
+    fn impl_signature(&self, impl_: ImplId) -> Arc<ImplSignature> {
+        self.impl_signature_with_source_map(impl_).0
+    }
+
+    #[salsa::tracked]
+    fn struct_signature(&self, struct_: StructId) -> Arc<StructSignature> {
+        self.struct_signature_with_source_map(struct_).0
+    }
+
+    #[salsa::tracked]
+    fn union_signature(&self, union_: UnionId) -> Arc<UnionSignature> {
+        self.union_signature_with_source_map(union_).0
+    }
+
+    #[salsa::tracked]
+    fn enum_signature(&self, e: EnumId) -> Arc<EnumSignature> {
+        self.enum_signature_with_source_map(e).0
+    }
 
-    #[ra_salsa::invoke(ImplData::impl_data_with_diagnostics_query)]
-    fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc<ImplData>, DefDiagnostics);
+    #[salsa::tracked]
+    fn const_signature(&self, e: ConstId) -> Arc<ConstSignature> {
+        self.const_signature_with_source_map(e).0
+    }
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(TraitData::trait_data_query)]
-    fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
+    #[salsa::tracked]
+    fn static_signature(&self, e: StaticId) -> Arc<StaticSignature> {
+        self.static_signature_with_source_map(e).0
+    }
+
+    #[salsa::tracked]
+    fn function_signature(&self, e: FunctionId) -> Arc<FunctionSignature> {
+        self.function_signature_with_source_map(e).0
+    }
+
+    #[salsa::tracked]
+    fn trait_alias_signature(&self, e: TraitAliasId) -> Arc<TraitAliasSignature> {
+        self.trait_alias_signature_with_source_map(e).0
+    }
+
+    #[salsa::tracked]
+    fn type_alias_signature(&self, e: TypeAliasId) -> Arc<TypeAliasSignature> {
+        self.type_alias_signature_with_source_map(e).0
+    }
 
-    #[ra_salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
-    fn trait_data_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitData>, DefDiagnostics);
+    #[salsa::invoke(TraitSignature::query)]
+    fn trait_signature_with_source_map(
+        &self,
+        trait_: TraitId,
+    ) -> (Arc<TraitSignature>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(TraitAliasData::trait_alias_query)]
-    fn trait_alias_data(&self, e: TraitAliasId) -> Arc<TraitAliasData>;
+    #[salsa::invoke(ImplSignature::query)]
+    fn impl_signature_with_source_map(
+        &self,
+        impl_: ImplId,
+    ) -> (Arc<ImplSignature>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(TypeAliasData::type_alias_data_query)]
-    fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
+    #[salsa::invoke(StructSignature::query)]
+    fn struct_signature_with_source_map(
+        &self,
+        struct_: StructId,
+    ) -> (Arc<StructSignature>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(FunctionData::fn_data_query)]
-    fn function_data(&self, func: FunctionId) -> Arc<FunctionData>;
+    #[salsa::invoke(UnionSignature::query)]
+    fn union_signature_with_source_map(
+        &self,
+        union_: UnionId,
+    ) -> (Arc<UnionSignature>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(ConstData::const_data_query)]
-    fn const_data(&self, konst: ConstId) -> Arc<ConstData>;
+    #[salsa::invoke(EnumSignature::query)]
+    fn enum_signature_with_source_map(
+        &self,
+        e: EnumId,
+    ) -> (Arc<EnumSignature>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(StaticData::static_data_query)]
-    fn static_data(&self, statik: StaticId) -> Arc<StaticData>;
+    #[salsa::invoke(ConstSignature::query)]
+    fn const_signature_with_source_map(
+        &self,
+        e: ConstId,
+    ) -> (Arc<ConstSignature>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(Macro2Data::macro2_data_query)]
-    fn macro2_data(&self, makro: Macro2Id) -> Arc<Macro2Data>;
+    #[salsa::invoke(StaticSignature::query)]
+    fn static_signature_with_source_map(
+        &self,
+        e: StaticId,
+    ) -> (Arc<StaticSignature>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(MacroRulesData::macro_rules_data_query)]
-    fn macro_rules_data(&self, makro: MacroRulesId) -> Arc<MacroRulesData>;
+    #[salsa::invoke(FunctionSignature::query)]
+    fn function_signature_with_source_map(
+        &self,
+        e: FunctionId,
+    ) -> (Arc<FunctionSignature>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(ProcMacroData::proc_macro_data_query)]
-    fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
+    #[salsa::invoke(TraitAliasSignature::query)]
+    fn trait_alias_signature_with_source_map(
+        &self,
+        e: TraitAliasId,
+    ) -> (Arc<TraitAliasSignature>, Arc<ExpressionStoreSourceMap>);
 
-    #[ra_salsa::invoke(ExternCrateDeclData::extern_crate_decl_data_query)]
-    fn extern_crate_decl_data(&self, extern_crate: ExternCrateId) -> Arc<ExternCrateDeclData>;
+    #[salsa::invoke(TypeAliasSignature::query)]
+    fn type_alias_signature_with_source_map(
+        &self,
+        e: TypeAliasId,
+    ) -> (Arc<TypeAliasSignature>, Arc<ExpressionStoreSourceMap>);
 
     // endregion:data
 
-    #[ra_salsa::invoke(Body::body_with_source_map_query)]
-    #[ra_salsa::lru]
+    #[salsa::invoke(Body::body_with_source_map_query)]
+    #[salsa::lru(512)]
     fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
 
-    #[ra_salsa::invoke(Body::body_query)]
+    #[salsa::invoke(Body::body_query)]
     fn body(&self, def: DefWithBodyId) -> Arc<Body>;
 
-    #[ra_salsa::invoke(ExprScopes::expr_scopes_query)]
+    #[salsa::invoke(ExprScopes::expr_scopes_query)]
     fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
 
-    #[ra_salsa::invoke(GenericParams::generic_params_query)]
+    #[salsa::transparent]
+    #[salsa::invoke(GenericParams::new)]
     fn generic_params(&self, def: GenericDefId) -> Arc<GenericParams>;
 
-    /// If this returns `None` for the source map, that means it is the same as with the item tree.
-    #[ra_salsa::invoke(GenericParams::generic_params_with_source_map_query)]
-    fn generic_params_with_source_map(
+    #[salsa::transparent]
+    #[salsa::invoke(GenericParams::generic_params_and_store)]
+    fn generic_params_and_store(
         &self,
         def: GenericDefId,
-    ) -> (Arc<GenericParams>, Option<Arc<TypesSourceMap>>);
+    ) -> (Arc<GenericParams>, Arc<ExpressionStore>);
+
+    #[salsa::transparent]
+    #[salsa::invoke(GenericParams::generic_params_and_store_and_source_map)]
+    fn generic_params_and_store_and_source_map(
+        &self,
+        def: GenericDefId,
+    ) -> (Arc<GenericParams>, Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>);
 
     // region:attrs
 
-    #[ra_salsa::invoke(Attrs::fields_attrs_query)]
+    #[salsa::invoke(Attrs::fields_attrs_query)]
     fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
 
     // should this really be a query?
-    #[ra_salsa::invoke(crate::attr::fields_attrs_source_map)]
+    #[salsa::invoke(crate::attr::fields_attrs_source_map)]
     fn fields_attrs_source_map(
         &self,
         def: VariantId,
     ) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
 
-    #[ra_salsa::invoke(AttrsWithOwner::attrs_query)]
+    // FIXME: Make this a non-interned query.
+    #[salsa::invoke_interned(AttrsWithOwner::attrs_query)]
     fn attrs(&self, def: AttrDefId) -> Attrs;
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(lang_item::lang_attr)]
+    #[salsa::transparent]
+    #[salsa::invoke(lang_item::lang_attr)]
     fn lang_attr(&self, def: AttrDefId) -> Option<LangItem>;
 
     // endregion:attrs
 
-    #[ra_salsa::invoke(LangItems::lang_item_query)]
-    fn lang_item(&self, start_crate: CrateId, item: LangItem) -> Option<LangItemTarget>;
+    #[salsa::invoke(LangItems::lang_item_query)]
+    fn lang_item(&self, start_crate: Crate, item: LangItem) -> Option<LangItemTarget>;
 
-    #[ra_salsa::invoke(ImportMap::import_map_query)]
-    fn import_map(&self, krate: CrateId) -> Arc<ImportMap>;
+    #[salsa::invoke(ImportMap::import_map_query)]
+    fn import_map(&self, krate: Crate) -> Arc<ImportMap>;
 
     // region:visibilities
 
-    #[ra_salsa::invoke(visibility::field_visibilities_query)]
+    #[salsa::invoke(visibility::field_visibilities_query)]
     fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
 
     // FIXME: unify function_visibility and const_visibility?
-    #[ra_salsa::invoke(visibility::function_visibility_query)]
+
+    #[salsa::invoke(visibility::function_visibility_query)]
     fn function_visibility(&self, def: FunctionId) -> Visibility;
 
-    #[ra_salsa::invoke(visibility::const_visibility_query)]
+    #[salsa::invoke(visibility::const_visibility_query)]
     fn const_visibility(&self, def: ConstId) -> Visibility;
 
+    #[salsa::invoke(visibility::type_alias_visibility_query)]
+    fn type_alias_visibility(&self, def: TypeAliasId) -> Visibility;
+
     // endregion:visibilities
 
-    #[ra_salsa::invoke(LangItems::crate_lang_items_query)]
-    fn crate_lang_items(&self, krate: CrateId) -> Option<Arc<LangItems>>;
+    #[salsa::invoke(LangItems::crate_lang_items_query)]
+    fn crate_lang_items(&self, krate: Crate) -> Option<Arc<LangItems>>;
 
-    #[ra_salsa::invoke(crate::lang_item::notable_traits_in_deps)]
-    fn notable_traits_in_deps(&self, krate: CrateId) -> Arc<[Arc<[TraitId]>]>;
-    #[ra_salsa::invoke(crate::lang_item::crate_notable_traits)]
-    fn crate_notable_traits(&self, krate: CrateId) -> Option<Arc<[TraitId]>>;
+    #[salsa::invoke(crate::lang_item::notable_traits_in_deps)]
+    fn notable_traits_in_deps(&self, krate: Crate) -> Arc<[Arc<[TraitId]>]>;
+    #[salsa::invoke(crate::lang_item::crate_notable_traits)]
+    fn crate_notable_traits(&self, krate: Crate) -> Option<Arc<[TraitId]>>;
 
-    fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
+    #[salsa::invoke(crate_supports_no_std)]
+    fn crate_supports_no_std(&self, crate_id: Crate) -> bool;
 
-    fn include_macro_invoc(&self, crate_id: CrateId) -> Arc<[(MacroCallId, EditionedFileId)]>;
+    #[salsa::invoke(include_macro_invoc)]
+    fn include_macro_invoc(&self, crate_id: Crate) -> Arc<[(MacroCallId, EditionedFileId)]>;
 }
 
 // return: macro call id and include file id
 fn include_macro_invoc(
     db: &dyn DefDatabase,
-    krate: CrateId,
+    krate: Crate,
 ) -> Arc<[(MacroCallId, EditionedFileId)]> {
     db.crate_def_map(krate)
         .modules
@@ -275,20 +375,20 @@ fn include_macro_invoc(
         .flat_map(|m| m.scope.iter_macro_invoc())
         .filter_map(|invoc| {
             db.lookup_intern_macro_call(*invoc.1)
-                .include_file_id(db.upcast(), *invoc.1)
+                .include_file_id(db, *invoc.1)
                 .map(|x| (*invoc.1, x))
         })
         .collect()
 }
 
-fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
-    let file = db.crate_graph()[crate_id].root_file_id();
+fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
+    let file = crate_id.data(db).root_file_id(db);
     let item_tree = db.file_item_tree(file.into());
     let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
     for attr in &**attrs {
         match attr.path().as_ident() {
-            Some(ident) if *ident == sym::no_std.clone() => return true,
-            Some(ident) if *ident == sym::cfg_attr.clone() => {}
+            Some(ident) if *ident == sym::no_std => return true,
+            Some(ident) if *ident == sym::cfg_attr => {}
             _ => continue,
         }
 
@@ -304,7 +404,7 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
         for output in segments.skip(1) {
             match output.flat_tokens() {
                 [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => {
-                    return true
+                    return true;
                 }
                 _ => {}
             }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
index 8868bc0cd95bd..eed1490a7af62 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
@@ -27,15 +27,15 @@
 pub mod keys {
     use std::marker::PhantomData;
 
-    use hir_expand::{attrs::AttrId, MacroCallId};
+    use hir_expand::{MacroCallId, attrs::AttrId};
     use rustc_hash::FxHashMap;
-    use syntax::{ast, AstNode, AstPtr};
+    use syntax::{AstNode, AstPtr, ast};
 
     use crate::{
-        dyn_map::{DynMap, Policy},
         BlockId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId,
         ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId,
         TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
+        dyn_map::{DynMap, Policy},
     };
 
     pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;
@@ -112,6 +112,10 @@ pub struct Key<K, V, P = (K, V)> {
 }
 
 impl<K, V, P> Key<K, V, P> {
+    #[allow(
+        clippy::new_without_default,
+        reason = "this a const fn, so it can't be default yet. See <https://github.com/rust-lang/rust/issues/63065>"
+    )]
     pub(crate) const fn new() -> Key<K, V, P> {
         Key { _phantom: PhantomData }
     }
@@ -148,16 +152,11 @@ impl<K: Hash + Eq + 'static, V: 'static> Policy for (K, V) {
     }
 }
 
+#[derive(Default)]
 pub struct DynMap {
     pub(crate) map: Map,
 }
 
-impl Default for DynMap {
-    fn default() -> Self {
-        DynMap { map: Map::new() }
-    }
-}
-
 #[repr(transparent)]
 pub struct KeyMap<KEY> {
     map: DynMap,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
deleted file mode 100644
index a1b3123c9914e..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
+++ /dev/null
@@ -1,243 +0,0 @@
-//! Macro expansion utilities.
-
-use std::cell::OnceCell;
-
-use base_db::CrateId;
-use cfg::CfgOptions;
-use drop_bomb::DropBomb;
-use hir_expand::{
-    attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind,
-    ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
-};
-use span::{Edition, SyntaxContextId};
-use syntax::{ast, Parse};
-use triomphe::Arc;
-
-use crate::type_ref::{TypesMap, TypesSourceMap};
-use crate::{
-    attr::Attrs, db::DefDatabase, lower::LowerCtx, path::Path, AsMacroCall, MacroId, ModuleId,
-    UnresolvedMacro,
-};
-
-#[derive(Debug)]
-pub struct Expander {
-    cfg_options: Arc<CfgOptions>,
-    span_map: OnceCell<SpanMap>,
-    current_file_id: HirFileId,
-    pub(crate) module: ModuleId,
-    /// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
-    recursion_depth: u32,
-    recursion_limit: usize,
-}
-
-impl Expander {
-    pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
-        let recursion_limit = module.def_map(db).recursion_limit() as usize;
-        let recursion_limit = if cfg!(test) {
-            // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
-            std::cmp::min(32, recursion_limit)
-        } else {
-            recursion_limit
-        };
-        Expander {
-            current_file_id,
-            module,
-            recursion_depth: 0,
-            recursion_limit,
-            cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
-            span_map: OnceCell::new(),
-        }
-    }
-
-    pub(crate) fn span_map(&self, db: &dyn DefDatabase) -> &SpanMap {
-        self.span_map.get_or_init(|| db.span_map(self.current_file_id))
-    }
-
-    pub fn krate(&self) -> CrateId {
-        self.module.krate
-    }
-
-    pub fn syntax_context(&self) -> SyntaxContextId {
-        // FIXME:
-        SyntaxContextId::root(Edition::CURRENT)
-    }
-
-    pub fn enter_expand<T: ast::AstNode>(
-        &mut self,
-        db: &dyn DefDatabase,
-        macro_call: ast::MacroCall,
-        resolver: impl Fn(&ModPath) -> Option<MacroId>,
-    ) -> Result<ExpandResult<Option<(Mark, Parse<T>)>>, UnresolvedMacro> {
-        // FIXME: within_limit should support this, instead of us having to extract the error
-        let mut unresolved_macro_err = None;
-
-        let result = self.within_limit(db, |this| {
-            let macro_call = this.in_file(&macro_call);
-            match macro_call.as_call_id_with_errors(db.upcast(), this.module.krate(), |path| {
-                resolver(path).map(|it| db.macro_def(it))
-            }) {
-                Ok(call_id) => call_id,
-                Err(resolve_err) => {
-                    unresolved_macro_err = Some(resolve_err);
-                    ExpandResult { value: None, err: None }
-                }
-            }
-        });
-
-        if let Some(err) = unresolved_macro_err {
-            Err(err)
-        } else {
-            Ok(result)
-        }
-    }
-
-    pub fn enter_expand_id<T: ast::AstNode>(
-        &mut self,
-        db: &dyn DefDatabase,
-        call_id: MacroCallId,
-    ) -> ExpandResult<Option<(Mark, Parse<T>)>> {
-        self.within_limit(db, |_this| ExpandResult::ok(Some(call_id)))
-    }
-
-    pub fn exit(&mut self, mut mark: Mark) {
-        self.span_map = mark.span_map;
-        self.current_file_id = mark.file_id;
-        if self.recursion_depth == u32::MAX {
-            // Recursion limit has been reached somewhere in the macro expansion tree. Reset the
-            // depth only when we get out of the tree.
-            if !self.current_file_id.is_macro() {
-                self.recursion_depth = 0;
-            }
-        } else {
-            self.recursion_depth -= 1;
-        }
-        mark.bomb.defuse();
-    }
-
-    pub fn ctx<'a>(
-        &self,
-        db: &'a dyn DefDatabase,
-        types_map: &'a mut TypesMap,
-        types_source_map: &'a mut TypesSourceMap,
-    ) -> LowerCtx<'a> {
-        LowerCtx::with_span_map_cell(
-            db,
-            self.current_file_id,
-            self.span_map.clone(),
-            types_map,
-            types_source_map,
-        )
-    }
-
-    pub(crate) fn in_file<T>(&self, value: T) -> InFile<T> {
-        InFile { file_id: self.current_file_id, value }
-    }
-
-    pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
-        Attrs::filter(
-            db,
-            self.krate(),
-            RawAttrs::new(
-                db.upcast(),
-                owner,
-                self.span_map.get_or_init(|| db.span_map(self.current_file_id)).as_ref(),
-            ),
-        )
-    }
-
-    pub(crate) fn cfg_options(&self) -> &CfgOptions {
-        &self.cfg_options
-    }
-
-    pub fn current_file_id(&self) -> HirFileId {
-        self.current_file_id
-    }
-
-    pub(crate) fn parse_path(
-        &mut self,
-        db: &dyn DefDatabase,
-        path: ast::Path,
-        types_map: &mut TypesMap,
-        types_source_map: &mut TypesSourceMap,
-    ) -> Option<Path> {
-        let mut ctx = LowerCtx::with_span_map_cell(
-            db,
-            self.current_file_id,
-            self.span_map.clone(),
-            types_map,
-            types_source_map,
-        );
-        Path::from_src(&mut ctx, path)
-    }
-
-    fn within_limit<F, T: ast::AstNode>(
-        &mut self,
-        db: &dyn DefDatabase,
-        op: F,
-    ) -> ExpandResult<Option<(Mark, Parse<T>)>>
-    where
-        F: FnOnce(&mut Self) -> ExpandResult<Option<MacroCallId>>,
-    {
-        if self.recursion_depth == u32::MAX {
-            // Recursion limit has been reached somewhere in the macro expansion tree. We should
-            // stop expanding other macro calls in this tree, or else this may result in
-            // exponential number of macro expansions, leading to a hang.
-            //
-            // The overflow error should have been reported when it occurred (see the next branch),
-            // so don't return overflow error here to avoid diagnostics duplication.
-            cov_mark::hit!(overflow_but_not_me);
-            return ExpandResult::ok(None);
-        }
-
-        let ExpandResult { value, err } = op(self);
-        let Some(call_id) = value else {
-            return ExpandResult { value: None, err };
-        };
-        if self.recursion_depth as usize > self.recursion_limit {
-            self.recursion_depth = u32::MAX;
-            cov_mark::hit!(your_stack_belongs_to_me);
-            return ExpandResult::only_err(ExpandError::new(
-                db.macro_arg_considering_derives(call_id, &call_id.lookup(db.upcast()).kind).2,
-                ExpandErrorKind::RecursionOverflow,
-            ));
-        }
-
-        let macro_file = call_id.as_macro_file();
-        let res = db.parse_macro_expansion(macro_file);
-
-        let err = err.or(res.err);
-        ExpandResult {
-            value: match &err {
-                // If proc-macro is disabled or unresolved, we want to expand to a missing expression
-                // instead of an empty tree which might end up in an empty block.
-                Some(e) if matches!(e.kind(), ExpandErrorKind::MissingProcMacroExpander(_)) => None,
-                _ => (|| {
-                    let parse = res.value.0.cast::<T>()?;
-
-                    self.recursion_depth += 1;
-                    let old_span_map = OnceCell::new();
-                    if let Some(prev) = self.span_map.take() {
-                        _ = old_span_map.set(prev);
-                    };
-                    _ = self.span_map.set(SpanMap::ExpansionSpanMap(res.value.1));
-                    let old_file_id =
-                        std::mem::replace(&mut self.current_file_id, macro_file.into());
-                    let mark = Mark {
-                        file_id: old_file_id,
-                        span_map: old_span_map,
-                        bomb: DropBomb::new("expansion mark dropped"),
-                    };
-                    Some((mark, parse))
-                })(),
-            },
-            err,
-        }
-    }
-}
-
-#[derive(Debug)]
-pub struct Mark {
-    file_id: HirFileId,
-    span_map: OnceCell<SpanMap>,
-    bomb: DropBomb,
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs
index 5ff6a7ffe5669..e3775c4931ae8 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs
@@ -1,10 +1,11 @@
 //! Defines `ExpressionStore`: a lowered representation of functions, statics and
 //! consts.
-mod body;
-mod lower;
-mod pretty;
+pub mod body;
+mod expander;
+pub mod lower;
+pub mod path;
+pub mod pretty;
 pub mod scope;
-
 #[cfg(test)]
 mod tests;
 
@@ -12,45 +13,49 @@ use std::ops::{Deref, Index};
 
 use cfg::{CfgExpr, CfgOptions};
 use either::Either;
-use hir_expand::{name::Name, ExpandError, InFile};
+use hir_expand::{ExpandError, InFile, MacroCallId, mod_path::ModPath, name::Name};
 use la_arena::{Arena, ArenaMap};
 use rustc_hash::FxHashMap;
 use smallvec::SmallVec;
-use span::{Edition, MacroFileId, SyntaxContextData};
-use syntax::{ast, AstPtr, SyntaxNodePtr};
+use span::{Edition, SyntaxContext};
+use syntax::{AstPtr, SyntaxNodePtr, ast};
 use triomphe::Arc;
 use tt::TextRange;
 
 use crate::{
+    BlockId, SyntheticSyntax,
     db::DefDatabase,
+    expr_store::path::Path,
     hir::{
         Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, LabelId, Pat,
         PatId, RecordFieldPat, Statement,
     },
     nameres::DefMap,
-    path::{ModPath, Path},
-    type_ref::{TypeRef, TypeRefId, TypesMap, TypesSourceMap},
-    BlockId, DefWithBodyId, Lookup, SyntheticSyntax,
+    type_ref::{LifetimeRef, LifetimeRefId, PathId, TypeRef, TypeRefId},
 };
 
 pub use self::body::{Body, BodySourceMap};
+pub use self::lower::{
+    hir_assoc_type_binding_to_ast, hir_generic_arg_to_ast, hir_segment_to_ast_segment,
+};
 
 /// A wrapper around [`span::SyntaxContextId`] that is intended only for comparisons.
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct HygieneId(span::SyntaxContextId);
+pub struct HygieneId(span::SyntaxContext);
 
 impl HygieneId {
     // The edition doesn't matter here, we only use this for comparisons and to lookup the macro.
-    pub const ROOT: Self = Self(span::SyntaxContextId::root(Edition::Edition2015));
+    pub const ROOT: Self = Self(span::SyntaxContext::root(Edition::Edition2015));
 
-    pub fn new(mut ctx: span::SyntaxContextId) -> Self {
+    pub fn new(mut ctx: span::SyntaxContext) -> Self {
         // See `Name` for why we're doing that.
         ctx.remove_root_edition();
         Self(ctx)
     }
 
-    pub(crate) fn lookup(self, db: &dyn DefDatabase) -> SyntaxContextData {
-        db.lookup_intern_syntax_context(self.0)
+    // FIXME: Inline this
+    pub(crate) fn lookup(self) -> SyntaxContext {
+        self.0
     }
 
     pub(crate) fn is_root(self) -> bool {
@@ -79,27 +84,26 @@ pub type ExprOrPatSource = InFile<ExprOrPatPtr>;
 pub type SelfParamPtr = AstPtr<ast::SelfParam>;
 pub type MacroCallPtr = AstPtr<ast::MacroCall>;
 
+pub type TypePtr = AstPtr<ast::Type>;
+pub type TypeSource = InFile<TypePtr>;
+
+pub type LifetimePtr = AstPtr<ast::Lifetime>;
+pub type LifetimeSource = InFile<LifetimePtr>;
+
 #[derive(Debug, Eq, PartialEq)]
 pub struct ExpressionStore {
     pub exprs: Arena<Expr>,
     pub pats: Arena<Pat>,
     pub bindings: Arena<Binding>,
     pub labels: Arena<Label>,
+    pub types: Arena<TypeRef>,
+    pub lifetimes: Arena<LifetimeRef>,
     /// Id of the closure/coroutine that owns the corresponding binding. If a binding is owned by the
     /// top level expression, it will not be listed in here.
     pub binding_owners: FxHashMap<BindingId, ExprId>,
-    pub types: TypesMap,
     /// Block expressions in this store that may contain inner items.
     block_scopes: Box<[BlockId]>,
 
-    /// A map from binding to its hygiene ID.
-    ///
-    /// Bindings that don't come from macro expansion are not allocated to save space, so not all bindings appear here.
-    /// If a binding does not appear here it has `SyntaxContextId::ROOT`.
-    ///
-    /// Note that this may not be the direct `SyntaxContextId` of the binding's expansion, because transparent
-    /// expansions are attributed to their parent expansion (recursively).
-    binding_hygiene: FxHashMap<BindingId, HygieneId>,
     /// A map from an variable usages to their hygiene ID.
     ///
     /// Expressions (and destructuing patterns) that can be recorded here are single segment path, although not all single segments path refer
@@ -127,15 +131,19 @@ pub struct ExpressionStoreSourceMap {
     field_map_back: FxHashMap<ExprId, FieldSource>,
     pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
 
-    pub types: TypesSourceMap,
+    types_map_back: ArenaMap<TypeRefId, TypeSource>,
+    types_map: FxHashMap<TypeSource, TypeRefId>,
+
+    lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>,
+    lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>,
 
     template_map: Option<Box<FormatTemplate>>,
 
-    expansions: FxHashMap<InFile<MacroCallPtr>, MacroFileId>,
+    pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
 
     /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
     /// the source map (since they're just as volatile).
-    diagnostics: Vec<ExpressionStoreDiagnostics>,
+    pub diagnostics: Vec<ExpressionStoreDiagnostics>,
 }
 
 /// The body of an item (function, const etc.).
@@ -145,10 +153,10 @@ pub struct ExpressionStoreBuilder {
     pub pats: Arena<Pat>,
     pub bindings: Arena<Binding>,
     pub labels: Arena<Label>,
+    pub lifetimes: Arena<LifetimeRef>,
     pub binding_owners: FxHashMap<BindingId, ExprId>,
-    pub types: TypesMap,
+    pub types: Arena<TypeRef>,
     block_scopes: Vec<BlockId>,
-    binding_hygiene: FxHashMap<BindingId, HygieneId>,
     ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
 }
 
@@ -177,7 +185,7 @@ pub enum ExpressionStoreDiagnostics {
 }
 
 impl ExpressionStoreBuilder {
-    fn finish(self) -> ExpressionStore {
+    pub fn finish(self) -> ExpressionStore {
         let Self {
             block_scopes,
             mut exprs,
@@ -185,18 +193,18 @@ impl ExpressionStoreBuilder {
             mut pats,
             mut bindings,
             mut binding_owners,
-            mut binding_hygiene,
             mut ident_hygiene,
             mut types,
+            mut lifetimes,
         } = self;
         exprs.shrink_to_fit();
         labels.shrink_to_fit();
         pats.shrink_to_fit();
         bindings.shrink_to_fit();
         binding_owners.shrink_to_fit();
-        binding_hygiene.shrink_to_fit();
         ident_hygiene.shrink_to_fit();
         types.shrink_to_fit();
+        lifetimes.shrink_to_fit();
 
         ExpressionStore {
             exprs,
@@ -205,8 +213,8 @@ impl ExpressionStoreBuilder {
             labels,
             binding_owners,
             types,
+            lifetimes,
             block_scopes: block_scopes.into_boxed_slice(),
-            binding_hygiene,
             ident_hygiene,
         }
     }
@@ -275,6 +283,9 @@ impl ExpressionStore {
         }
     }
 
+    /// Walks the immediate children expressions and calls `f` for each child expression.
+    ///
+    /// Note that this does not walk const blocks.
     pub fn walk_child_exprs(&self, expr_id: ExprId, mut f: impl FnMut(ExprId)) {
         let expr = &self[expr_id];
         match expr {
@@ -408,6 +419,10 @@ impl ExpressionStore {
         }
     }
 
+    /// Walks the immediate children expressions and calls `f` for each child expression but does
+    /// not walk expressions within patterns.
+    ///
+    /// Note that this does not walk const blocks.
     pub fn walk_child_exprs_without_pats(&self, expr_id: ExprId, mut f: impl FnMut(ExprId)) {
         let expr = &self[expr_id];
         match expr {
@@ -542,7 +557,7 @@ impl ExpressionStore {
     }
 
     fn binding_hygiene(&self, binding: BindingId) -> HygieneId {
-        self.binding_hygiene.get(&binding).copied().unwrap_or(HygieneId::ROOT)
+        self.bindings[binding].hygiene
     }
 
     pub fn expr_path_hygiene(&self, expr: ExprId) -> HygieneId {
@@ -601,6 +616,26 @@ impl Index<TypeRefId> for ExpressionStore {
     }
 }
 
+impl Index<LifetimeRefId> for ExpressionStore {
+    type Output = LifetimeRef;
+
+    fn index(&self, b: LifetimeRefId) -> &LifetimeRef {
+        &self.lifetimes[b]
+    }
+}
+
+impl Index<PathId> for ExpressionStore {
+    type Output = Path;
+
+    #[inline]
+    fn index(&self, index: PathId) -> &Self::Output {
+        let TypeRef::Path(path) = &self[index.type_ref()] else {
+            unreachable!("`PathId` always points to `TypeRef::Path`");
+        };
+        path
+    }
+}
+
 // FIXME: Change `node_` prefix to something more reasonable.
 // Perhaps `expr_syntax` and `expr_id`?
 impl ExpressionStoreSourceMap {
@@ -620,12 +655,12 @@ impl ExpressionStoreSourceMap {
         self.expr_map.get(&src).cloned()
     }
 
-    pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> {
+    pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
         let src = node.map(AstPtr::new);
         self.expansions.get(&src).cloned()
     }
 
-    pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroFileId)> + '_ {
+    pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroCallId)> + '_ {
         self.expansions.iter().map(|(&a, &b)| (a, b))
     }
 
@@ -637,6 +672,14 @@ impl ExpressionStoreSourceMap {
         self.pat_map.get(&node.map(AstPtr::new)).cloned()
     }
 
+    pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> {
+        self.types_map_back.get(id).cloned().ok_or(SyntheticSyntax)
+    }
+
+    pub fn node_type(&self, node: InFile<&ast::Type>) -> Option<TypeRefId> {
+        self.types_map.get(&node.map(AstPtr::new)).cloned()
+    }
+
     pub fn label_syntax(&self, label: LabelId) -> LabelSource {
         self.label_map_back[label]
     }
@@ -663,10 +706,14 @@ impl ExpressionStoreSourceMap {
         self.expr_map.get(&src).copied()
     }
 
-    pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroFileId)> {
+    pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroCallId)> {
         self.expansions.iter()
     }
 
+    pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
+        self.expansions.get(&node.map(AstPtr::new)).copied()
+    }
+
     pub fn implicit_format_args(
         &self,
         node: InFile<&ast::FormatArgsExpr>,
@@ -716,7 +763,10 @@ impl ExpressionStoreSourceMap {
             template_map,
             diagnostics,
             binding_definitions,
-            types,
+            types_map,
+            types_map_back,
+            lifetime_map_back,
+            lifetime_map,
         } = self;
         if let Some(template_map) = template_map {
             let FormatTemplate {
@@ -739,6 +789,9 @@ impl ExpressionStoreSourceMap {
         expansions.shrink_to_fit();
         diagnostics.shrink_to_fit();
         binding_definitions.shrink_to_fit();
-        types.shrink_to_fit();
+        types_map.shrink_to_fit();
+        types_map_back.shrink_to_fit();
+        lifetime_map.shrink_to_fit();
+        lifetime_map_back.shrink_to_fit();
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs
index a55fec4f8b1e1..fb6d931e0e4c9 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs
@@ -3,19 +3,18 @@
 use std::ops;
 
 use hir_expand::{InFile, Lookup};
-use la_arena::{Idx, RawIdx};
 use span::Edition;
 use syntax::ast;
 use triomphe::Arc;
 
 use crate::{
+    DefWithBodyId, HasModule,
     db::DefDatabase,
-    expander::Expander,
-    expr_store::{lower, pretty, ExpressionStore, ExpressionStoreSourceMap, SelfParamPtr},
+    expr_store::{
+        ExpressionStore, ExpressionStoreSourceMap, SelfParamPtr, lower::lower_body, pretty,
+    },
     hir::{BindingId, ExprId, PatId},
-    item_tree::AttrOwner,
     src::HasSource,
-    DefWithBodyId, HasModule,
 };
 
 /// The body of an item (function, const etc.).
@@ -79,31 +78,10 @@ impl Body {
         let InFile { file_id, value: body } = {
             match def {
                 DefWithBodyId::FunctionId(f) => {
-                    let data = db.function_data(f);
                     let f = f.lookup(db);
                     let src = f.source(db);
-                    params = src.value.param_list().map(move |param_list| {
-                        let item_tree = f.id.item_tree(db);
-                        let func = &item_tree[f.id.value];
-                        let krate = f.container.module(db).krate;
-                        let crate_graph = db.crate_graph();
-                        (
-                            param_list,
-                            (0..func.params.len()).map(move |idx| {
-                                item_tree
-                                    .attrs(
-                                        db,
-                                        krate,
-                                        AttrOwner::Param(
-                                            f.id.value,
-                                            Idx::from_raw(RawIdx::from(idx as u32)),
-                                        ),
-                                    )
-                                    .is_cfg_enabled(&crate_graph[krate].cfg_options)
-                            }),
-                        )
-                    });
-                    is_async_fn = data.is_async();
+                    params = src.value.param_list();
+                    is_async_fn = src.value.async_token().is_some();
                     src.map(|it| it.body().map(ast::Expr::from))
                 }
                 DefWithBodyId::ConstId(c) => {
@@ -121,13 +99,11 @@ impl Body {
                     let src = s.source(db);
                     src.map(|it| it.expr())
                 }
-                DefWithBodyId::InTypeConstId(c) => c.lookup(db).id.map(|_| c.source(db).expr()),
             }
         };
         let module = def.module(db);
-        let expander = Expander::new(db, file_id, module);
         let (body, mut source_map) =
-            lower::lower_body(db, def, expander, params, body, module.krate, is_async_fn);
+            lower_body(db, def, file_id, module, params, body, is_async_fn);
         source_map.store.shrink_to_fit();
 
         (Arc::new(body), Arc::new(source_map))
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs
new file mode 100644
index 0000000000000..7eec913dd654b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs
@@ -0,0 +1,220 @@
+//! Macro expansion utilities.
+
+use std::mem;
+
+use base_db::Crate;
+use drop_bomb::DropBomb;
+use hir_expand::{
+    ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
+    attrs::RawAttrs, eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
+};
+use span::{AstIdMap, Edition, SyntaxContext};
+use syntax::ast::HasAttrs;
+use syntax::{Parse, ast};
+use triomphe::Arc;
+use tt::TextRange;
+
+use crate::attr::Attrs;
+use crate::expr_store::HygieneId;
+use crate::nameres::DefMap;
+use crate::{AsMacroCall, MacroId, UnresolvedMacro, db::DefDatabase};
+
+#[derive(Debug)]
+pub(super) struct Expander {
+    span_map: SpanMap,
+    current_file_id: HirFileId,
+    ast_id_map: Arc<AstIdMap>,
+    /// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
+    recursion_depth: u32,
+    recursion_limit: usize,
+}
+
+impl Expander {
+    pub(super) fn new(
+        db: &dyn DefDatabase,
+        current_file_id: HirFileId,
+        def_map: &DefMap,
+    ) -> Expander {
+        let recursion_limit = def_map.recursion_limit() as usize;
+        let recursion_limit = if cfg!(test) {
+            // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
+            std::cmp::min(32, recursion_limit)
+        } else {
+            recursion_limit
+        };
+        Expander {
+            current_file_id,
+            recursion_depth: 0,
+            recursion_limit,
+            span_map: db.span_map(current_file_id),
+            ast_id_map: db.ast_id_map(current_file_id),
+        }
+    }
+
+    pub(super) fn ctx_for_range(&self, range: TextRange) -> SyntaxContext {
+        self.span_map.span_for_range(range).ctx
+    }
+
+    pub(super) fn hygiene_for_range(&self, db: &dyn DefDatabase, range: TextRange) -> HygieneId {
+        match self.span_map.as_ref() {
+            hir_expand::span_map::SpanMapRef::ExpansionSpanMap(span_map) => {
+                HygieneId::new(span_map.span_at(range.start()).ctx.opaque_and_semitransparent(db))
+            }
+            hir_expand::span_map::SpanMapRef::RealSpanMap(_) => HygieneId::ROOT,
+        }
+    }
+
+    pub(super) fn attrs(
+        &self,
+        db: &dyn DefDatabase,
+        krate: Crate,
+        has_attrs: &dyn HasAttrs,
+    ) -> Attrs {
+        Attrs::filter(db, krate, RawAttrs::new(db, has_attrs, self.span_map.as_ref()))
+    }
+
+    pub(super) fn is_cfg_enabled(
+        &self,
+        db: &dyn DefDatabase,
+        krate: Crate,
+        has_attrs: &dyn HasAttrs,
+    ) -> bool {
+        self.attrs(db, krate, has_attrs).is_cfg_enabled(krate.cfg_options(db))
+    }
+
+    pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {
+        // FIXME:
+        SyntaxContext::root(Edition::CURRENT_FIXME)
+    }
+
+    pub(super) fn enter_expand<T: ast::AstNode>(
+        &mut self,
+        db: &dyn DefDatabase,
+        macro_call: ast::MacroCall,
+        krate: Crate,
+        resolver: impl Fn(&ModPath) -> Option<MacroId>,
+        eager_callback: EagerCallBackFn<'_>,
+    ) -> Result<ExpandResult<Option<(Mark, Option<Parse<T>>)>>, UnresolvedMacro> {
+        // FIXME: within_limit should support this, instead of us having to extract the error
+        let mut unresolved_macro_err = None;
+
+        let result = self.within_limit(db, |this| {
+            let macro_call = this.in_file(&macro_call);
+            match macro_call.as_call_id_with_errors(
+                db,
+                krate,
+                |path| resolver(path).map(|it| db.macro_def(it)),
+                eager_callback,
+            ) {
+                Ok(call_id) => call_id,
+                Err(resolve_err) => {
+                    unresolved_macro_err = Some(resolve_err);
+                    ExpandResult { value: None, err: None }
+                }
+            }
+        });
+
+        if let Some(err) = unresolved_macro_err { Err(err) } else { Ok(result) }
+    }
+
+    pub(super) fn enter_expand_id<T: ast::AstNode>(
+        &mut self,
+        db: &dyn DefDatabase,
+        call_id: MacroCallId,
+    ) -> ExpandResult<Option<(Mark, Option<Parse<T>>)>> {
+        self.within_limit(db, |_this| ExpandResult::ok(Some(call_id)))
+    }
+
+    pub(super) fn exit(&mut self, Mark { file_id, span_map, ast_id_map, mut bomb }: Mark) {
+        self.span_map = span_map;
+        self.current_file_id = file_id;
+        self.ast_id_map = ast_id_map;
+        if self.recursion_depth == u32::MAX {
+            // Recursion limit has been reached somewhere in the macro expansion tree. Reset the
+            // depth only when we get out of the tree.
+            if !self.current_file_id.is_macro() {
+                self.recursion_depth = 0;
+            }
+        } else {
+            self.recursion_depth -= 1;
+        }
+        bomb.defuse();
+    }
+
+    pub(super) fn in_file<T>(&self, value: T) -> InFile<T> {
+        InFile { file_id: self.current_file_id, value }
+    }
+
+    pub(super) fn current_file_id(&self) -> HirFileId {
+        self.current_file_id
+    }
+
+    fn within_limit<F, T: ast::AstNode>(
+        &mut self,
+        db: &dyn DefDatabase,
+        op: F,
+    ) -> ExpandResult<Option<(Mark, Option<Parse<T>>)>>
+    where
+        F: FnOnce(&mut Self) -> ExpandResult<Option<MacroCallId>>,
+    {
+        if self.recursion_depth == u32::MAX {
+            // Recursion limit has been reached somewhere in the macro expansion tree. We should
+            // stop expanding other macro calls in this tree, or else this may result in
+            // exponential number of macro expansions, leading to a hang.
+            //
+            // The overflow error should have been reported when it occurred (see the next branch),
+            // so don't return overflow error here to avoid diagnostics duplication.
+            cov_mark::hit!(overflow_but_not_me);
+            return ExpandResult::ok(None);
+        }
+
+        let ExpandResult { value, err } = op(self);
+        let Some(call_id) = value else {
+            return ExpandResult { value: None, err };
+        };
+        if self.recursion_depth as usize > self.recursion_limit {
+            self.recursion_depth = u32::MAX;
+            cov_mark::hit!(your_stack_belongs_to_me);
+            return ExpandResult::only_err(ExpandError::new(
+                db.macro_arg_considering_derives(call_id, &call_id.lookup(db).kind).2,
+                ExpandErrorKind::RecursionOverflow,
+            ));
+        }
+
+        let res = db.parse_macro_expansion(call_id);
+
+        let err = err.or(res.err);
+        ExpandResult {
+            value: {
+                let parse = res.value.0.cast::<T>();
+
+                self.recursion_depth += 1;
+                let old_file_id = std::mem::replace(&mut self.current_file_id, call_id.into());
+                let old_span_map =
+                    std::mem::replace(&mut self.span_map, db.span_map(self.current_file_id));
+                let prev_ast_id_map =
+                    mem::replace(&mut self.ast_id_map, db.ast_id_map(self.current_file_id));
+                let mark = Mark {
+                    file_id: old_file_id,
+                    span_map: old_span_map,
+                    ast_id_map: prev_ast_id_map,
+                    bomb: DropBomb::new("expansion mark dropped"),
+                };
+                Some((mark, parse))
+            },
+            err,
+        }
+    }
+
+    pub(super) fn ast_id_map(&self) -> &AstIdMap {
+        &self.ast_id_map
+    }
+}
+
+#[derive(Debug)]
+pub(super) struct Mark {
+    file_id: HirFileId,
+    span_map: SpanMap,
+    ast_id_map: Arc<AstIdMap>,
+    bomb: DropBomb,
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
index 6e505a6b1126e..7f907fdba8e3c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
@@ -2,69 +2,77 @@
 //! representation.
 
 mod asm;
+mod generics;
+mod path;
 
 use std::mem;
 
-use base_db::CrateId;
 use either::Either;
 use hir_expand::{
+    HirFileId, InFile, Lookup, MacroDefId,
     mod_path::tool_path,
     name::{AsName, Name},
-    span_map::{ExpansionSpanMap, SpanMap},
-    InFile, MacroDefId,
 };
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use rustc_hash::FxHashMap;
-use span::AstIdMap;
 use stdx::never;
 use syntax::{
+    AstNode, AstPtr, AstToken as _, SyntaxNodePtr,
     ast::{
         self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasGenericArgs,
-        HasLoopBody, HasName, RangeItem, SlicePatComponents,
+        HasGenericParams, HasLoopBody, HasName, HasTypeBounds, IsString, RangeItem,
+        SlicePatComponents,
     },
-    AstNode, AstPtr, AstToken as _, SyntaxNodePtr,
 };
-use text_size::TextSize;
+use thin_vec::ThinVec;
 use triomphe::Arc;
+use tt::TextRange;
 
 use crate::{
-    attr::Attrs,
+    AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, ItemTreeLoc,
+    MacroId, ModuleDefId, ModuleId, TraitAliasId, TraitId, TypeAliasId, UnresolvedMacro,
     builtin_type::BuiltinUint,
-    data::adt::StructKind,
     db::DefDatabase,
-    expander::Expander,
     expr_store::{
         Body, BodySourceMap, ExprPtr, ExpressionStore, ExpressionStoreBuilder,
-        ExpressionStoreDiagnostics, ExpressionStoreSourceMap, HygieneId, LabelPtr, PatPtr,
+        ExpressionStoreDiagnostics, ExpressionStoreSourceMap, HygieneId, LabelPtr, LifetimePtr,
+        PatPtr, TypePtr,
+        expander::Expander,
+        lower::generics::ImplTraitLowerFn,
+        path::{AssociatedTypeBinding, GenericArg, GenericArgs, GenericArgsParentheses, Path},
     },
     hir::{
+        Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
+        Expr, ExprId, Item, Label, LabelId, Literal, MatchArm, Movability, OffsetOf, Pat, PatId,
+        RecordFieldPat, RecordLitField, Statement,
         format_args::{
             self, FormatAlignment, FormatArgs, FormatArgsPiece, FormatArgument, FormatArgumentKind,
             FormatArgumentsCollector, FormatCount, FormatDebugHex, FormatOptions,
             FormatPlaceholder, FormatSign, FormatTrait,
         },
-        Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
-        Expr, ExprId, Item, Label, LabelId, Literal, MatchArm, Movability, OffsetOf, Pat, PatId,
-        RecordFieldPat, RecordLitField, Statement,
+        generics::GenericParams,
     },
     item_scope::BuiltinShadowMode,
+    item_tree::FieldsShape,
     lang_item::LangItem,
-    lower::LowerCtx,
-    nameres::{DefMap, MacroSubNs},
-    path::{GenericArgs, Path},
-    type_ref::{Mutability, Rawness, TypeRef},
-    AdtId, BlockId, BlockLoc, ConstBlockLoc, DefWithBodyId, MacroId, ModuleDefId, UnresolvedMacro,
+    nameres::{DefMap, LocalDefMap, MacroSubNs},
+    type_ref::{
+        ArrayType, ConstRef, FnType, LifetimeRef, LifetimeRefId, Mutability, PathId, Rawness,
+        RefType, TraitBoundModifier, TraitRef, TypeBound, TypeRef, TypeRefId, UseArgRef,
+    },
 };
 
+pub use self::path::hir_segment_to_ast_segment;
+
 type FxIndexSet<K> = indexmap::IndexSet<K, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
 
 pub(super) fn lower_body(
     db: &dyn DefDatabase,
     owner: DefWithBodyId,
-    expander: Expander,
-    parameters: Option<(ast::ParamList, impl Iterator<Item = bool>)>,
+    current_file_id: HirFileId,
+    module: ModuleId,
+    parameters: Option<ast::ParamList>,
     body: Option<ast::Expr>,
-    krate: CrateId,
     is_async_fn: bool,
 ) -> (Body, BodySourceMap) {
     // We cannot leave the root span map empty and let any identifier from it be treated as root,
@@ -72,23 +80,18 @@ pub(super) fn lower_body(
     // with the inner macro, and that will cause confusion because they won't be the same as `ROOT`
     // even though they should be the same. Also, when the body comes from multiple expansions, their
     // hygiene is different.
-    let span_map = expander.current_file_id().macro_file().map(|_| {
-        let SpanMap::ExpansionSpanMap(span_map) = expander.span_map(db) else {
-            panic!("in a macro file there should be `ExpansionSpanMap`");
-        };
-        Arc::clone(span_map)
-    });
+
+    let krate = module.krate();
 
     let mut self_param = None;
     let mut source_map_self_param = None;
     let mut params = vec![];
-    let mut collector = ExprCollector::new(db, owner, expander, krate, span_map);
+    let mut collector = ExprCollector::new(db, module, current_file_id);
 
     let skip_body = match owner {
         DefWithBodyId::FunctionId(it) => db.attrs(it.into()),
         DefWithBodyId::StaticId(it) => db.attrs(it.into()),
         DefWithBodyId::ConstId(it) => db.attrs(it.into()),
-        DefWithBodyId::InTypeConstId(_) => Attrs::EMPTY,
         DefWithBodyId::VariantId(it) => db.attrs(it.into()),
     }
     .rust_analyzer_tool()
@@ -96,26 +99,31 @@ pub(super) fn lower_body(
     // If #[rust_analyzer::skip] annotated, only construct enough information for the signature
     // and skip the body.
     if skip_body {
-        if let Some((param_list, mut attr_enabled)) = parameters {
-            if let Some(self_param_syn) =
-                param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
+        if let Some(param_list) = parameters {
+            if let Some(self_param_syn) = param_list
+                .self_param()
+                .filter(|self_param| collector.expander.is_cfg_enabled(db, krate, self_param))
             {
                 let is_mutable =
                     self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
+                let hygiene = self_param_syn
+                    .name()
+                    .map(|name| collector.hygiene_id_for(name.syntax().text_range()))
+                    .unwrap_or(HygieneId::ROOT);
                 let binding_id: la_arena::Idx<Binding> = collector.alloc_binding(
-                    Name::new_symbol_root(sym::self_.clone()),
+                    Name::new_symbol_root(sym::self_),
                     BindingAnnotation::new(is_mutable, false),
+                    hygiene,
                 );
                 self_param = Some(binding_id);
                 source_map_self_param =
                     Some(collector.expander.in_file(AstPtr::new(&self_param_syn)));
             }
-            params = param_list
+            let count = param_list
                 .params()
-                .zip(attr_enabled)
-                .filter(|(_, enabled)| *enabled)
-                .map(|_| collector.missing_pat())
-                .collect();
+                .filter(|it| collector.expander.is_cfg_enabled(db, krate, it))
+                .count();
+            params = (0..count).map(|_| collector.missing_pat()).collect();
         };
         let body_expr = collector.missing_expr();
         return (
@@ -129,30 +137,30 @@ pub(super) fn lower_body(
         );
     }
 
-    if let Some((param_list, mut attr_enabled)) = parameters {
+    if let Some(param_list) = parameters {
         if let Some(self_param_syn) =
-            param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
+            param_list.self_param().filter(|it| collector.expander.is_cfg_enabled(db, krate, it))
         {
             let is_mutable =
                 self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
-            let binding_id: la_arena::Idx<Binding> = collector.alloc_binding(
-                Name::new_symbol_root(sym::self_.clone()),
-                BindingAnnotation::new(is_mutable, false),
-            );
             let hygiene = self_param_syn
                 .name()
-                .map(|name| collector.hygiene_id_for(name.syntax().text_range().start()))
+                .map(|name| collector.hygiene_id_for(name.syntax().text_range()))
                 .unwrap_or(HygieneId::ROOT);
-            if !hygiene.is_root() {
-                collector.store.binding_hygiene.insert(binding_id, hygiene);
-            }
+            let binding_id: la_arena::Idx<Binding> = collector.alloc_binding(
+                Name::new_symbol_root(sym::self_),
+                BindingAnnotation::new(is_mutable, false),
+                hygiene,
+            );
             self_param = Some(binding_id);
             source_map_self_param = Some(collector.expander.in_file(AstPtr::new(&self_param_syn)));
         }
 
-        for (param, _) in param_list.params().zip(attr_enabled).filter(|(_, enabled)| *enabled) {
-            let param_pat = collector.collect_pat_top(param.pat());
-            params.push(param_pat);
+        for param in param_list.params() {
+            if collector.expander.is_cfg_enabled(db, krate, &param) {
+                let param_pat = collector.collect_pat_top(param.pat());
+                params.push(param_pat);
+            }
         }
     };
 
@@ -164,9 +172,7 @@ pub(super) fn lower_body(
             match owner {
                 DefWithBodyId::FunctionId(..) => Awaitable::No("non-async function"),
                 DefWithBodyId::StaticId(..) => Awaitable::No("static"),
-                DefWithBodyId::ConstId(..) | DefWithBodyId::InTypeConstId(..) => {
-                    Awaitable::No("constant")
-                }
+                DefWithBodyId::ConstId(..) => Awaitable::No("constant"),
                 DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"),
             }
         },
@@ -183,41 +189,268 @@ pub(super) fn lower_body(
     )
 }
 
-#[allow(dead_code)]
-pub(super) fn lower(
+pub(crate) fn lower_type_ref(
     db: &dyn DefDatabase,
-    owner: ExprStoreOwnerId,
-    expander: Expander,
-    body: Option<ast::Expr>,
-    krate: CrateId,
-) -> (ExpressionStore, ExpressionStoreSourceMap) {
-    // We cannot leave the root span map empty and let any identifier from it be treated as root,
-    // because when inside nested macros `SyntaxContextId`s from the outer macro will be interleaved
-    // with the inner macro, and that will cause confusion because they won't be the same as `ROOT`
-    // even though they should be the same. Also, when the body comes from multiple expansions, their
-    // hygiene is different.
-    let span_map = expander.current_file_id().macro_file().map(|_| {
-        let SpanMap::ExpansionSpanMap(span_map) = expander.span_map(db) else {
-            panic!("in a macro file there should be `ExpansionSpanMap`");
-        };
-        Arc::clone(span_map)
+    module: ModuleId,
+    type_ref: InFile<Option<ast::Type>>,
+) -> (ExpressionStore, ExpressionStoreSourceMap, TypeRefId) {
+    let mut expr_collector = ExprCollector::new(db, module, type_ref.file_id);
+    let type_ref =
+        expr_collector.lower_type_ref_opt(type_ref.value, &mut ExprCollector::impl_trait_allocator);
+    (expr_collector.store.finish(), expr_collector.source_map, type_ref)
+}
+
+pub(crate) fn lower_generic_params(
+    db: &dyn DefDatabase,
+    module: ModuleId,
+    def: GenericDefId,
+    file_id: HirFileId,
+    param_list: Option<ast::GenericParamList>,
+    where_clause: Option<ast::WhereClause>,
+) -> (Arc<ExpressionStore>, Arc<GenericParams>, ExpressionStoreSourceMap) {
+    let mut expr_collector = ExprCollector::new(db, module, file_id);
+    let mut collector = generics::GenericParamsCollector::new(def);
+    collector.lower(&mut expr_collector, param_list, where_clause);
+    let params = collector.finish();
+    (Arc::new(expr_collector.store.finish()), params, expr_collector.source_map)
+}
+
+pub(crate) fn lower_impl(
+    db: &dyn DefDatabase,
+    module: ModuleId,
+    impl_syntax: InFile<ast::Impl>,
+    impl_id: ImplId,
+) -> (ExpressionStore, ExpressionStoreSourceMap, TypeRefId, Option<TraitRef>, Arc<GenericParams>) {
+    let mut expr_collector = ExprCollector::new(db, module, impl_syntax.file_id);
+    let self_ty =
+        expr_collector.lower_type_ref_opt_disallow_impl_trait(impl_syntax.value.self_ty());
+    let trait_ = impl_syntax.value.trait_().and_then(|it| match &it {
+        ast::Type::PathType(path_type) => {
+            let path = expr_collector
+                .lower_path_type(path_type, &mut ExprCollector::impl_trait_allocator)?;
+            Some(TraitRef { path: expr_collector.alloc_path(path, AstPtr::new(&it)) })
+        }
+        _ => None,
     });
-    let mut expr_collector = ExprCollector::new(db, owner, expander, krate, span_map);
-    expr_collector.collect(body, Awaitable::No("?"));
-    (expr_collector.store.finish(), expr_collector.source_map)
+    let mut collector = generics::GenericParamsCollector::new(impl_id.into());
+    collector.lower(
+        &mut expr_collector,
+        impl_syntax.value.generic_param_list(),
+        impl_syntax.value.where_clause(),
+    );
+    let params = collector.finish();
+    (expr_collector.store.finish(), expr_collector.source_map, self_ty, trait_, params)
+}
+
+pub(crate) fn lower_trait(
+    db: &dyn DefDatabase,
+    module: ModuleId,
+    trait_syntax: InFile<ast::Trait>,
+    trait_id: TraitId,
+) -> (ExpressionStore, ExpressionStoreSourceMap, Arc<GenericParams>) {
+    let mut expr_collector = ExprCollector::new(db, module, trait_syntax.file_id);
+    let mut collector = generics::GenericParamsCollector::with_self_param(
+        &mut expr_collector,
+        trait_id.into(),
+        trait_syntax.value.type_bound_list(),
+    );
+    collector.lower(
+        &mut expr_collector,
+        trait_syntax.value.generic_param_list(),
+        trait_syntax.value.where_clause(),
+    );
+    let params = collector.finish();
+    (expr_collector.store.finish(), expr_collector.source_map, params)
+}
+
+pub(crate) fn lower_trait_alias(
+    db: &dyn DefDatabase,
+    module: ModuleId,
+    trait_syntax: InFile<ast::TraitAlias>,
+    trait_id: TraitAliasId,
+) -> (ExpressionStore, ExpressionStoreSourceMap, Arc<GenericParams>) {
+    let mut expr_collector = ExprCollector::new(db, module, trait_syntax.file_id);
+    let mut collector = generics::GenericParamsCollector::with_self_param(
+        &mut expr_collector,
+        trait_id.into(),
+        trait_syntax.value.type_bound_list(),
+    );
+    collector.lower(
+        &mut expr_collector,
+        trait_syntax.value.generic_param_list(),
+        trait_syntax.value.where_clause(),
+    );
+    let params = collector.finish();
+    (expr_collector.store.finish(), expr_collector.source_map, params)
+}
+
+pub(crate) fn lower_type_alias(
+    db: &dyn DefDatabase,
+    module: ModuleId,
+    alias: InFile<ast::TypeAlias>,
+    type_alias_id: TypeAliasId,
+) -> (
+    ExpressionStore,
+    ExpressionStoreSourceMap,
+    Arc<GenericParams>,
+    Box<[TypeBound]>,
+    Option<TypeRefId>,
+) {
+    let mut expr_collector = ExprCollector::new(db, module, alias.file_id);
+    let bounds = alias
+        .value
+        .type_bound_list()
+        .map(|bounds| {
+            bounds
+                .bounds()
+                .map(|bound| {
+                    expr_collector.lower_type_bound(bound, &mut ExprCollector::impl_trait_allocator)
+                })
+                .collect()
+        })
+        .unwrap_or_default();
+    let mut collector = generics::GenericParamsCollector::new(type_alias_id.into());
+    collector.lower(
+        &mut expr_collector,
+        alias.value.generic_param_list(),
+        alias.value.where_clause(),
+    );
+    let params = collector.finish();
+    let type_ref = alias
+        .value
+        .ty()
+        .map(|ty| expr_collector.lower_type_ref(ty, &mut ExprCollector::impl_trait_allocator));
+    (expr_collector.store.finish(), expr_collector.source_map, params, bounds, type_ref)
 }
 
-type ExprStoreOwnerId = DefWithBodyId;
+pub(crate) fn lower_function(
+    db: &dyn DefDatabase,
+    module: ModuleId,
+    fn_: InFile<ast::Fn>,
+    function_id: FunctionId,
+) -> (
+    ExpressionStore,
+    ExpressionStoreSourceMap,
+    Arc<GenericParams>,
+    Box<[TypeRefId]>,
+    Option<TypeRefId>,
+    bool,
+    bool,
+) {
+    let mut expr_collector = ExprCollector::new(db, module, fn_.file_id);
+    let mut collector = generics::GenericParamsCollector::new(function_id.into());
+    collector.lower(&mut expr_collector, fn_.value.generic_param_list(), fn_.value.where_clause());
+    let mut params = vec![];
+    let mut has_self_param = false;
+    let mut has_variadic = false;
+    collector.collect_impl_trait(&mut expr_collector, |collector, mut impl_trait_lower_fn| {
+        if let Some(param_list) = fn_.value.param_list() {
+            if let Some(param) = param_list.self_param() {
+                let enabled = collector.expander.is_cfg_enabled(db, module.krate(), &param);
+                if enabled {
+                    has_self_param = true;
+                    params.push(match param.ty() {
+                        Some(ty) => collector.lower_type_ref(ty, &mut impl_trait_lower_fn),
+                        None => {
+                            let self_type = collector.alloc_type_ref_desugared(TypeRef::Path(
+                                Name::new_symbol_root(sym::Self_).into(),
+                            ));
+                            let lifetime = param
+                                .lifetime()
+                                .map(|lifetime| collector.lower_lifetime_ref(lifetime));
+                            match param.kind() {
+                                ast::SelfParamKind::Owned => self_type,
+                                ast::SelfParamKind::Ref => collector.alloc_type_ref_desugared(
+                                    TypeRef::Reference(Box::new(RefType {
+                                        ty: self_type,
+                                        lifetime,
+                                        mutability: Mutability::Shared,
+                                    })),
+                                ),
+                                ast::SelfParamKind::MutRef => collector.alloc_type_ref_desugared(
+                                    TypeRef::Reference(Box::new(RefType {
+                                        ty: self_type,
+                                        lifetime,
+                                        mutability: Mutability::Mut,
+                                    })),
+                                ),
+                            }
+                        }
+                    });
+                }
+            }
+            let p = param_list
+                .params()
+                .filter(|param| collector.expander.is_cfg_enabled(db, module.krate(), param))
+                .filter(|param| {
+                    let is_variadic = param.dotdotdot_token().is_some();
+                    has_variadic |= is_variadic;
+                    !is_variadic
+                })
+                .map(|param| param.ty())
+                // FIXME
+                .collect::<Vec<_>>();
+            for p in p {
+                params.push(collector.lower_type_ref_opt(p, &mut impl_trait_lower_fn));
+            }
+        }
+    });
+    let generics = collector.finish();
+    let return_type = fn_.value.ret_type().map(|ret_type| {
+        expr_collector.lower_type_ref_opt(ret_type.ty(), &mut ExprCollector::impl_trait_allocator)
+    });
+
+    let return_type = if fn_.value.async_token().is_some() {
+        let path = hir_expand::mod_path::path![core::future::Future];
+        let mut generic_args: Vec<_> =
+            std::iter::repeat_n(None, path.segments().len() - 1).collect();
+        let binding = AssociatedTypeBinding {
+            name: Name::new_symbol_root(sym::Output),
+            args: None,
+            type_ref: Some(
+                return_type
+                    .unwrap_or_else(|| expr_collector.alloc_type_ref_desugared(TypeRef::unit())),
+            ),
+            bounds: Box::default(),
+        };
+        generic_args
+            .push(Some(GenericArgs { bindings: Box::new([binding]), ..GenericArgs::empty() }));
+
+        let path = Path::from_known_path(path, generic_args);
+        let path = PathId::from_type_ref_unchecked(
+            expr_collector.alloc_type_ref_desugared(TypeRef::Path(path)),
+        );
+        let ty_bound = TypeBound::Path(path, TraitBoundModifier::None);
+        Some(
+            expr_collector
+                .alloc_type_ref_desugared(TypeRef::ImplTrait(ThinVec::from_iter([ty_bound]))),
+        )
+    } else {
+        return_type
+    };
+    (
+        expr_collector.store.finish(),
+        expr_collector.source_map,
+        generics,
+        params.into_boxed_slice(),
+        return_type,
+        has_self_param,
+        has_variadic,
+    )
+}
 
-struct ExprCollector<'a> {
-    db: &'a dyn DefDatabase,
+pub struct ExprCollector<'db> {
+    db: &'db dyn DefDatabase,
     expander: Expander,
-    owner: ExprStoreOwnerId,
     def_map: Arc<DefMap>,
-    ast_id_map: Arc<AstIdMap>,
-    krate: CrateId,
-    store: ExpressionStoreBuilder,
-    source_map: ExpressionStoreSourceMap,
+    local_def_map: Arc<LocalDefMap>,
+    module: ModuleId,
+    pub store: ExpressionStoreBuilder,
+    pub(crate) source_map: ExpressionStoreSourceMap,
+
+    // state stuff
+    // Prevent nested impl traits like `impl Foo<impl Bar>`.
+    outer_impl_trait: bool,
 
     is_lowering_coroutine: bool,
 
@@ -225,17 +458,8 @@ struct ExprCollector<'a> {
     /// and we need to find the current definition. So we track the number of definitions we saw.
     current_block_legacy_macro_defs_count: FxHashMap<Name, usize>,
 
-    current_span_map: Option<Arc<ExpansionSpanMap>>,
-
     current_try_block_label: Option<LabelId>,
-    // points to the expression that a try expression will target (replaces current_try_block_label)
-    // catch_scope: Option<ExprId>,
-    // points to the expression that an unlabeled control flow will target
-    // loop_scope: Option<ExprId>,
-    // needed to diagnose non label control flow in while conditions
-    // is_in_loop_condition: bool,
-
-    // resolution
+
     label_ribs: Vec<LabelRib>,
     current_binding_owner: Option<ExprId>,
 
@@ -292,13 +516,10 @@ impl BindingList {
         hygiene: HygieneId,
         mode: BindingAnnotation,
     ) -> BindingId {
-        let id = *self.map.entry((name, hygiene)).or_insert_with_key(|(name, _)| {
-            let id = ec.alloc_binding(name.clone(), mode);
-            if !hygiene.is_root() {
-                ec.store.binding_hygiene.insert(id, hygiene);
-            }
-            id
-        });
+        let id = *self
+            .map
+            .entry((name, hygiene))
+            .or_insert_with_key(|(name, hygiene)| ec.alloc_binding(name.clone(), mode, *hygiene));
         if ec.store.bindings[id].mode != mode {
             ec.store.bindings[id].problems = Some(BindingProblems::BoundInconsistently);
         }
@@ -323,20 +544,19 @@ impl BindingList {
 }
 
 impl ExprCollector<'_> {
-    fn new(
+    pub fn new(
         db: &dyn DefDatabase,
-        owner: ExprStoreOwnerId,
-        expander: Expander,
-        krate: CrateId,
-        span_map: Option<Arc<ExpansionSpanMap>>,
+        module: ModuleId,
+        current_file_id: HirFileId,
     ) -> ExprCollector<'_> {
+        let (def_map, local_def_map) = module.local_def_map(db);
+        let expander = Expander::new(db, current_file_id, &def_map);
         ExprCollector {
             db,
-            owner,
-            krate,
-            def_map: expander.module.def_map(db),
+            module,
+            def_map,
+            local_def_map,
             source_map: ExpressionStoreSourceMap::default(),
-            ast_id_map: db.ast_id_map(expander.current_file_id()),
             store: ExpressionStoreBuilder::default(),
             expander,
             current_try_block_label: None,
@@ -344,9 +564,353 @@ impl ExprCollector<'_> {
             label_ribs: Vec::new(),
             current_binding_owner: None,
             awaitable_context: None,
-            current_span_map: span_map,
             current_block_legacy_macro_defs_count: FxHashMap::default(),
+            outer_impl_trait: false,
+        }
+    }
+
+    pub fn lower_lifetime_ref(&mut self, lifetime: ast::Lifetime) -> LifetimeRefId {
+        // FIXME: Keyword check?
+        let lifetime_ref = match &*lifetime.text() {
+            "" | "'" => LifetimeRef::Error,
+            "'static" => LifetimeRef::Static,
+            "'_" => LifetimeRef::Placeholder,
+            text => LifetimeRef::Named(Name::new_lifetime(text)),
+        };
+        self.alloc_lifetime_ref(lifetime_ref, AstPtr::new(&lifetime))
+    }
+
+    pub fn lower_lifetime_ref_opt(&mut self, lifetime: Option<ast::Lifetime>) -> LifetimeRefId {
+        match lifetime {
+            Some(lifetime) => self.lower_lifetime_ref(lifetime),
+            None => self.alloc_lifetime_ref_desugared(LifetimeRef::Placeholder),
+        }
+    }
+
+    /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
+    pub fn lower_type_ref(
+        &mut self,
+        node: ast::Type,
+        impl_trait_lower_fn: ImplTraitLowerFn<'_>,
+    ) -> TypeRefId {
+        let ty = match &node {
+            ast::Type::ParenType(inner) => {
+                return self.lower_type_ref_opt(inner.ty(), impl_trait_lower_fn);
+            }
+            ast::Type::TupleType(inner) => TypeRef::Tuple(ThinVec::from_iter(Vec::from_iter(
+                inner.fields().map(|it| self.lower_type_ref(it, impl_trait_lower_fn)),
+            ))),
+            ast::Type::NeverType(..) => TypeRef::Never,
+            ast::Type::PathType(inner) => inner
+                .path()
+                .and_then(|it| self.lower_path(it, impl_trait_lower_fn))
+                .map(TypeRef::Path)
+                .unwrap_or(TypeRef::Error),
+            ast::Type::PtrType(inner) => {
+                let inner_ty = self.lower_type_ref_opt(inner.ty(), impl_trait_lower_fn);
+                let mutability = Mutability::from_mutable(inner.mut_token().is_some());
+                TypeRef::RawPtr(inner_ty, mutability)
+            }
+            ast::Type::ArrayType(inner) => {
+                let len = self.lower_const_arg_opt(inner.const_arg());
+                TypeRef::Array(ArrayType {
+                    ty: self.lower_type_ref_opt(inner.ty(), impl_trait_lower_fn),
+                    len,
+                })
+            }
+            ast::Type::SliceType(inner) => {
+                TypeRef::Slice(self.lower_type_ref_opt(inner.ty(), impl_trait_lower_fn))
+            }
+            ast::Type::RefType(inner) => {
+                let inner_ty = self.lower_type_ref_opt(inner.ty(), impl_trait_lower_fn);
+                let lifetime = inner.lifetime().map(|lt| self.lower_lifetime_ref(lt));
+                let mutability = Mutability::from_mutable(inner.mut_token().is_some());
+                TypeRef::Reference(Box::new(RefType { ty: inner_ty, lifetime, mutability }))
+            }
+            ast::Type::InferType(_inner) => TypeRef::Placeholder,
+            ast::Type::FnPtrType(inner) => {
+                let ret_ty = inner
+                    .ret_type()
+                    .and_then(|rt| rt.ty())
+                    .map(|it| self.lower_type_ref(it, impl_trait_lower_fn))
+                    .unwrap_or_else(|| self.alloc_type_ref_desugared(TypeRef::unit()));
+                let mut is_varargs = false;
+                let mut params = if let Some(pl) = inner.param_list() {
+                    if let Some(param) = pl.params().last() {
+                        is_varargs = param.dotdotdot_token().is_some();
+                    }
+
+                    pl.params()
+                        .map(|it| {
+                            let type_ref = self.lower_type_ref_opt(it.ty(), impl_trait_lower_fn);
+                            let name = match it.pat() {
+                                Some(ast::Pat::IdentPat(it)) => Some(
+                                    it.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing),
+                                ),
+                                _ => None,
+                            };
+                            (name, type_ref)
+                        })
+                        .collect()
+                } else {
+                    Vec::with_capacity(1)
+                };
+                fn lower_abi(abi: ast::Abi) -> Symbol {
+                    match abi.abi_string() {
+                        Some(tok) => Symbol::intern(tok.text_without_quotes()),
+                        // `extern` default to be `extern "C"`.
+                        _ => sym::C,
+                    }
+                }
+
+                let abi = inner.abi().map(lower_abi);
+                params.push((None, ret_ty));
+                TypeRef::Fn(Box::new(FnType {
+                    is_varargs,
+                    is_unsafe: inner.unsafe_token().is_some(),
+                    abi,
+                    params: params.into_boxed_slice(),
+                }))
+            }
+            // for types are close enough for our purposes to the inner type for now...
+            ast::Type::ForType(inner) => {
+                return self.lower_type_ref_opt(inner.ty(), impl_trait_lower_fn);
+            }
+            ast::Type::ImplTraitType(inner) => {
+                if self.outer_impl_trait {
+                    // Disallow nested impl traits
+                    TypeRef::Error
+                } else {
+                    return self.with_outer_impl_trait_scope(true, |this| {
+                        let type_bounds =
+                            this.type_bounds_from_ast(inner.type_bound_list(), impl_trait_lower_fn);
+                        impl_trait_lower_fn(this, AstPtr::new(&node), type_bounds)
+                    });
+                }
+            }
+            ast::Type::DynTraitType(inner) => TypeRef::DynTrait(
+                self.type_bounds_from_ast(inner.type_bound_list(), impl_trait_lower_fn),
+            ),
+            ast::Type::MacroType(mt) => match mt.macro_call() {
+                Some(mcall) => {
+                    let macro_ptr = AstPtr::new(&mcall);
+                    let src = self.expander.in_file(AstPtr::new(&node));
+                    let id = self.collect_macro_call(mcall, macro_ptr, true, |this, expansion| {
+                        this.lower_type_ref_opt(expansion, impl_trait_lower_fn)
+                    });
+                    self.source_map.types_map.insert(src, id);
+                    return id;
+                }
+                None => TypeRef::Error,
+            },
+        };
+        self.alloc_type_ref(ty, AstPtr::new(&node))
+    }
+
+    pub(crate) fn lower_type_ref_disallow_impl_trait(&mut self, node: ast::Type) -> TypeRefId {
+        self.lower_type_ref(node, &mut Self::impl_trait_error_allocator)
+    }
+
+    pub(crate) fn lower_type_ref_opt(
+        &mut self,
+        node: Option<ast::Type>,
+        impl_trait_lower_fn: ImplTraitLowerFn<'_>,
+    ) -> TypeRefId {
+        match node {
+            Some(node) => self.lower_type_ref(node, impl_trait_lower_fn),
+            None => self.alloc_error_type(),
+        }
+    }
+
+    pub(crate) fn lower_type_ref_opt_disallow_impl_trait(
+        &mut self,
+        node: Option<ast::Type>,
+    ) -> TypeRefId {
+        self.lower_type_ref_opt(node, &mut Self::impl_trait_error_allocator)
+    }
+
+    fn alloc_type_ref(&mut self, type_ref: TypeRef, node: TypePtr) -> TypeRefId {
+        let id = self.store.types.alloc(type_ref);
+        let ptr = self.expander.in_file(node);
+        self.source_map.types_map_back.insert(id, ptr);
+        self.source_map.types_map.insert(ptr, id);
+        id
+    }
+
+    fn alloc_lifetime_ref(
+        &mut self,
+        lifetime_ref: LifetimeRef,
+        node: LifetimePtr,
+    ) -> LifetimeRefId {
+        let id = self.store.lifetimes.alloc(lifetime_ref);
+        let ptr = self.expander.in_file(node);
+        self.source_map.lifetime_map_back.insert(id, ptr);
+        self.source_map.lifetime_map.insert(ptr, id);
+        id
+    }
+
+    fn alloc_type_ref_desugared(&mut self, type_ref: TypeRef) -> TypeRefId {
+        self.store.types.alloc(type_ref)
+    }
+
+    fn alloc_lifetime_ref_desugared(&mut self, lifetime_ref: LifetimeRef) -> LifetimeRefId {
+        self.store.lifetimes.alloc(lifetime_ref)
+    }
+
+    fn alloc_error_type(&mut self) -> TypeRefId {
+        self.store.types.alloc(TypeRef::Error)
+    }
+
+    pub fn lower_path(
+        &mut self,
+        ast: ast::Path,
+        impl_trait_lower_fn: ImplTraitLowerFn<'_>,
+    ) -> Option<Path> {
+        super::lower::path::lower_path(self, ast, impl_trait_lower_fn)
+    }
+
+    fn with_outer_impl_trait_scope<R>(
+        &mut self,
+        impl_trait: bool,
+        f: impl FnOnce(&mut Self) -> R,
+    ) -> R {
+        let old = mem::replace(&mut self.outer_impl_trait, impl_trait);
+        let result = f(self);
+        self.outer_impl_trait = old;
+        result
+    }
+
+    pub fn impl_trait_error_allocator(
+        ec: &mut ExprCollector<'_>,
+        ptr: TypePtr,
+        _: ThinVec<TypeBound>,
+    ) -> TypeRefId {
+        ec.alloc_type_ref(TypeRef::Error, ptr)
+    }
+
+    fn impl_trait_allocator(
+        ec: &mut ExprCollector<'_>,
+        ptr: TypePtr,
+        bounds: ThinVec<TypeBound>,
+    ) -> TypeRefId {
+        ec.alloc_type_ref(TypeRef::ImplTrait(bounds), ptr)
+    }
+
+    fn alloc_path(&mut self, path: Path, node: TypePtr) -> PathId {
+        PathId::from_type_ref_unchecked(self.alloc_type_ref(TypeRef::Path(path), node))
+    }
+
+    /// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
+    /// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
+    pub fn lower_generic_args_from_fn_path(
+        &mut self,
+        args: Option<ast::ParenthesizedArgList>,
+        ret_type: Option<ast::RetType>,
+        impl_trait_lower_fn: ImplTraitLowerFn<'_>,
+    ) -> Option<GenericArgs> {
+        let params = args?;
+        let mut param_types = Vec::new();
+        for param in params.type_args() {
+            let type_ref = self.lower_type_ref_opt(param.ty(), impl_trait_lower_fn);
+            param_types.push(type_ref);
+        }
+        let args = Box::new([GenericArg::Type(
+            self.alloc_type_ref_desugared(TypeRef::Tuple(ThinVec::from_iter(param_types))),
+        )]);
+        let bindings = if let Some(ret_type) = ret_type {
+            let type_ref = self.lower_type_ref_opt(ret_type.ty(), impl_trait_lower_fn);
+            Box::new([AssociatedTypeBinding {
+                name: Name::new_symbol_root(sym::Output),
+                args: None,
+                type_ref: Some(type_ref),
+                bounds: Box::default(),
+            }])
+        } else {
+            // -> ()
+            let type_ref = self.alloc_type_ref_desugared(TypeRef::unit());
+            Box::new([AssociatedTypeBinding {
+                name: Name::new_symbol_root(sym::Output),
+                args: None,
+                type_ref: Some(type_ref),
+                bounds: Box::default(),
+            }])
+        };
+        Some(GenericArgs {
+            args,
+            has_self_type: false,
+            bindings,
+            parenthesized: GenericArgsParentheses::ParenSugar,
+        })
+    }
+
+    pub(super) fn lower_generic_args(
+        &mut self,
+        node: ast::GenericArgList,
+        impl_trait_lower_fn: ImplTraitLowerFn<'_>,
+    ) -> Option<GenericArgs> {
+        // This needs to be kept in sync with `hir_generic_arg_to_ast()`.
+        let mut args = Vec::new();
+        let mut bindings = Vec::new();
+        for generic_arg in node.generic_args() {
+            match generic_arg {
+                ast::GenericArg::TypeArg(type_arg) => {
+                    let type_ref = self.lower_type_ref_opt(type_arg.ty(), impl_trait_lower_fn);
+                    args.push(GenericArg::Type(type_ref));
+                }
+                ast::GenericArg::AssocTypeArg(assoc_type_arg) => {
+                    // This needs to be kept in sync with `hir_assoc_type_binding_to_ast()`.
+                    if assoc_type_arg.param_list().is_some() {
+                        // We currently ignore associated return type bounds.
+                        continue;
+                    }
+                    if let Some(name_ref) = assoc_type_arg.name_ref() {
+                        // Nested impl traits like `impl Foo<Assoc = impl Bar>` are allowed
+                        self.with_outer_impl_trait_scope(false, |this| {
+                            let name = name_ref.as_name();
+                            let args = assoc_type_arg
+                                .generic_arg_list()
+                                .and_then(|args| this.lower_generic_args(args, impl_trait_lower_fn))
+                                .or_else(|| {
+                                    assoc_type_arg
+                                        .return_type_syntax()
+                                        .map(|_| GenericArgs::return_type_notation())
+                                });
+                            let type_ref = assoc_type_arg
+                                .ty()
+                                .map(|it| this.lower_type_ref(it, impl_trait_lower_fn));
+                            let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
+                                l.bounds()
+                                    .map(|it| this.lower_type_bound(it, impl_trait_lower_fn))
+                                    .collect()
+                            } else {
+                                Box::default()
+                            };
+                            bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds });
+                        });
+                    }
+                }
+                ast::GenericArg::LifetimeArg(lifetime_arg) => {
+                    if let Some(lifetime) = lifetime_arg.lifetime() {
+                        let lifetime_ref = self.lower_lifetime_ref(lifetime);
+                        args.push(GenericArg::Lifetime(lifetime_ref))
+                    }
+                }
+                ast::GenericArg::ConstArg(arg) => {
+                    let arg = self.lower_const_arg(arg);
+                    args.push(GenericArg::Const(arg))
+                }
+            }
+        }
+
+        if args.is_empty() && bindings.is_empty() {
+            return None;
         }
+        Some(GenericArgs {
+            args: args.into_boxed_slice(),
+            has_self_type: false,
+            bindings: bindings.into_boxed_slice(),
+            parenthesized: GenericArgsParentheses::No,
+        })
     }
 
     fn collect(&mut self, expr: Option<ast::Expr>, awaitable: Awaitable) -> ExprId {
@@ -370,8 +934,89 @@ impl ExprCollector<'_> {
         })
     }
 
-    fn ctx(&mut self) -> LowerCtx<'_> {
-        self.expander.ctx(self.db, &mut self.store.types, &mut self.source_map.types)
+    fn type_bounds_from_ast(
+        &mut self,
+        type_bounds_opt: Option<ast::TypeBoundList>,
+        impl_trait_lower_fn: ImplTraitLowerFn<'_>,
+    ) -> ThinVec<TypeBound> {
+        if let Some(type_bounds) = type_bounds_opt {
+            ThinVec::from_iter(Vec::from_iter(
+                type_bounds.bounds().map(|it| self.lower_type_bound(it, impl_trait_lower_fn)),
+            ))
+        } else {
+            ThinVec::from_iter([])
+        }
+    }
+
+    fn lower_path_type(
+        &mut self,
+        path_type: &ast::PathType,
+        impl_trait_lower_fn: ImplTraitLowerFn<'_>,
+    ) -> Option<Path> {
+        let path = self.lower_path(path_type.path()?, impl_trait_lower_fn)?;
+        Some(path)
+    }
+
+    fn lower_type_bound(
+        &mut self,
+        node: ast::TypeBound,
+        impl_trait_lower_fn: ImplTraitLowerFn<'_>,
+    ) -> TypeBound {
+        match node.kind() {
+            ast::TypeBoundKind::PathType(path_type) => {
+                let m = match node.question_mark_token() {
+                    Some(_) => TraitBoundModifier::Maybe,
+                    None => TraitBoundModifier::None,
+                };
+                self.lower_path_type(&path_type, impl_trait_lower_fn)
+                    .map(|p| {
+                        TypeBound::Path(self.alloc_path(p, AstPtr::new(&path_type).upcast()), m)
+                    })
+                    .unwrap_or(TypeBound::Error)
+            }
+            ast::TypeBoundKind::ForType(for_type) => {
+                let lt_refs = match for_type.generic_param_list() {
+                    Some(gpl) => gpl
+                        .lifetime_params()
+                        .flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(&lt.text())))
+                        .collect(),
+                    None => ThinVec::default(),
+                };
+                let path = for_type.ty().and_then(|ty| match &ty {
+                    ast::Type::PathType(path_type) => {
+                        self.lower_path_type(path_type, impl_trait_lower_fn).map(|p| (p, ty))
+                    }
+                    _ => None,
+                });
+                match path {
+                    Some((p, ty)) => {
+                        TypeBound::ForLifetime(lt_refs, self.alloc_path(p, AstPtr::new(&ty)))
+                    }
+                    None => TypeBound::Error,
+                }
+            }
+            ast::TypeBoundKind::Use(gal) => TypeBound::Use(
+                gal.use_bound_generic_args()
+                    .map(|p| match p {
+                        ast::UseBoundGenericArg::Lifetime(l) => {
+                            UseArgRef::Lifetime(self.lower_lifetime_ref(l))
+                        }
+                        ast::UseBoundGenericArg::NameRef(n) => UseArgRef::Name(n.as_name()),
+                    })
+                    .collect(),
+            ),
+            ast::TypeBoundKind::Lifetime(lifetime) => {
+                TypeBound::Lifetime(self.lower_lifetime_ref(lifetime))
+            }
+        }
+    }
+
+    fn lower_const_arg_opt(&mut self, arg: Option<ast::ConstArg>) -> ConstRef {
+        ConstRef { expr: self.collect_expr_opt(arg.and_then(|it| it.expr())) }
+    }
+
+    fn lower_const_arg(&mut self, arg: ast::ConstArg) -> ConstRef {
+        ConstRef { expr: self.collect_expr_opt(arg.expr()) }
     }
 
     fn collect_expr(&mut self, expr: ast::Expr) -> ExprId {
@@ -415,7 +1060,7 @@ impl ExprCollector<'_> {
                     })
                 }
                 Some(ast::BlockModifier::Label(label)) => {
-                    let label_hygiene = self.hygiene_id_for(label.syntax().text_range().start());
+                    let label_hygiene = self.hygiene_id_for(label.syntax().text_range());
                     let label_id = self.collect_label(label);
                     self.with_labeled_rib(label_id, label_hygiene, |this| {
                         this.collect_block_(e, |id, statements, tail| Expr::Block {
@@ -443,11 +1088,7 @@ impl ExprCollector<'_> {
                             let (result_expr_id, prev_binding_owner) =
                                 this.initialize_binding_owner(syntax_ptr);
                             let inner_expr = this.collect_block(e);
-                            let it = this.db.intern_anonymous_const(ConstBlockLoc {
-                                parent: this.owner,
-                                root: inner_expr,
-                            });
-                            this.store.exprs[result_expr_id] = Expr::Const(it);
+                            this.store.exprs[result_expr_id] = Expr::Const(inner_expr);
                             this.current_binding_owner = prev_binding_owner;
                             result_expr_id
                         })
@@ -465,10 +1106,7 @@ impl ExprCollector<'_> {
             },
             ast::Expr::LoopExpr(e) => {
                 let label = e.label().map(|label| {
-                    (
-                        self.hygiene_id_for(label.syntax().text_range().start()),
-                        self.collect_label(label),
-                    )
+                    (self.hygiene_id_for(label.syntax().text_range()), self.collect_label(label))
                 });
                 let body = self.collect_labelled_block_opt(label, e.loop_body());
                 self.alloc_expr(Expr::Loop { body, label: label.map(|it| it.1) }, syntax_ptr)
@@ -503,7 +1141,9 @@ impl ExprCollector<'_> {
                 let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
                 let generic_args = e
                     .generic_arg_list()
-                    .and_then(|it| GenericArgs::from_ast(&mut self.ctx(), it))
+                    .and_then(|it| {
+                        self.lower_generic_args(it, &mut Self::impl_trait_error_allocator)
+                    })
                     .map(Box::new);
                 self.alloc_expr(
                     Expr::MethodCall { receiver, method_name, args, generic_args },
@@ -582,7 +1222,10 @@ impl ExprCollector<'_> {
                 self.alloc_expr(Expr::Yeet { expr }, syntax_ptr)
             }
             ast::Expr::RecordExpr(e) => {
-                let path = e.path().and_then(|path| self.parse_path(path)).map(Box::new);
+                let path = e
+                    .path()
+                    .and_then(|path| self.lower_path(path, &mut Self::impl_trait_error_allocator))
+                    .map(Box::new);
                 let record_lit = if let Some(nfl) = e.record_expr_field_list() {
                     let fields = nfl
                         .fields()
@@ -621,7 +1264,7 @@ impl ExprCollector<'_> {
                 if let Awaitable::No(location) = self.is_lowering_awaitable_block() {
                     self.source_map.diagnostics.push(
                         ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
-                            node: InFile::new(self.expander.current_file_id(), AstPtr::new(&e)),
+                            node: self.expander.in_file(AstPtr::new(&e)),
                             location: location.to_string(),
                         },
                     );
@@ -631,18 +1274,14 @@ impl ExprCollector<'_> {
             ast::Expr::TryExpr(e) => self.collect_try_operator(syntax_ptr, e),
             ast::Expr::CastExpr(e) => {
                 let expr = self.collect_expr_opt(e.expr());
-                let type_ref = TypeRef::from_ast_opt(&mut self.ctx(), e.ty());
+                let type_ref = self.lower_type_ref_opt_disallow_impl_trait(e.ty());
                 self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr)
             }
             ast::Expr::RefExpr(e) => {
                 let expr = self.collect_expr_opt(e.expr());
                 let raw_tok = e.raw_token().is_some();
                 let mutability = if raw_tok {
-                    if e.mut_token().is_some() {
-                        Mutability::Mut
-                    } else {
-                        Mutability::Shared
-                    }
+                    if e.mut_token().is_some() { Mutability::Mut } else { Mutability::Shared }
                 } else {
                     Mutability::from_mutable(e.mut_token().is_some())
                 };
@@ -667,7 +1306,8 @@ impl ExprCollector<'_> {
                     arg_types.reserve_exact(num_params);
                     for param in pl.params() {
                         let pat = this.collect_pat_top(param.pat());
-                        let type_ref = param.ty().map(|it| TypeRef::from_ast(&mut this.ctx(), it));
+                        let type_ref =
+                            param.ty().map(|it| this.lower_type_ref_disallow_impl_trait(it));
                         args.push(pat);
                         arg_types.push(type_ref);
                     }
@@ -675,7 +1315,7 @@ impl ExprCollector<'_> {
                 let ret_type = e
                     .ret_type()
                     .and_then(|r| r.ty())
-                    .map(|it| TypeRef::from_ast(&mut this.ctx(), it));
+                    .map(|it| this.lower_type_ref_disallow_impl_trait(it));
 
                 let prev_is_lowering_coroutine = mem::take(&mut this.is_lowering_coroutine);
                 let prev_try_block_label = this.current_try_block_label.take();
@@ -802,7 +1442,7 @@ impl ExprCollector<'_> {
             ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr),
             ast::Expr::AsmExpr(e) => self.lower_inline_asm(e, syntax_ptr),
             ast::Expr::OffsetOfExpr(e) => {
-                let container = TypeRef::from_ast_opt(&mut self.ctx(), e.ty());
+                let container = self.lower_type_ref_opt_disallow_impl_trait(e.ty());
                 let fields = e.fields().map(|it| it.as_name()).collect();
                 self.alloc_expr(Expr::OffsetOf(OffsetOf { container, fields }), syntax_ptr)
             }
@@ -810,17 +1450,13 @@ impl ExprCollector<'_> {
         })
     }
 
-    fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
-        self.expander.parse_path(self.db, path, &mut self.store.types, &mut self.source_map.types)
-    }
-
     fn collect_expr_path(&mut self, e: ast::PathExpr) -> Option<(Path, HygieneId)> {
         e.path().and_then(|path| {
-            let path = self.parse_path(path)?;
+            let path = self.lower_path(path, &mut Self::impl_trait_error_allocator)?;
             // Need to enable `mod_path.len() < 1` for `self`.
             let may_be_variable = matches!(&path, Path::BarePath(mod_path) if mod_path.len() <= 1);
             let hygiene = if may_be_variable {
-                self.hygiene_id_for(e.syntax().text_range().start())
+                self.hygiene_id_for(e.syntax().text_range())
             } else {
                 HygieneId::ROOT
             };
@@ -883,7 +1519,10 @@ impl ExprCollector<'_> {
             }
             ast::Expr::CallExpr(e) => {
                 let path = collect_path(self, e.expr()?)?;
-                let path = path.path().and_then(|path| self.parse_path(path)).map(Box::new);
+                let path = path
+                    .path()
+                    .and_then(|path| self.lower_path(path, &mut Self::impl_trait_error_allocator))
+                    .map(Box::new);
                 let (ellipsis, args) = collect_tuple(self, e.arg_list()?.args());
                 self.alloc_pat_from_expr(Pat::TupleStruct { path, args, ellipsis }, syntax_ptr)
             }
@@ -909,7 +1548,10 @@ impl ExprCollector<'_> {
                 id
             }
             ast::Expr::RecordExpr(e) => {
-                let path = e.path().and_then(|path| self.parse_path(path)).map(Box::new);
+                let path = e
+                    .path()
+                    .and_then(|path| self.lower_path(path, &mut Self::impl_trait_error_allocator))
+                    .map(Box::new);
                 let record_field_list = e.record_expr_field_list()?;
                 let ellipsis = record_field_list.dotdot_token().is_some();
                 // FIXME: Report an error here if `record_field_list.spread().is_some()`.
@@ -1035,9 +1677,7 @@ impl ExprCollector<'_> {
     /// `try { <stmts>; }` into `'<new_label>: { <stmts>; ::std::ops::Try::from_output(()) }`
     /// and save the `<new_label>` to use it as a break target for desugaring of the `?` operator.
     fn desugar_try_block(&mut self, e: BlockExpr) -> ExprId {
-        let Some(try_from_output) = self.lang_path(LangItem::TryTraitFromOutput) else {
-            return self.collect_block(e);
-        };
+        let try_from_output = self.lang_path(LangItem::TryTraitFromOutput);
         let label = self.alloc_label_desugared(Label {
             name: Name::generate_new_name(self.store.labels.len()),
         });
@@ -1053,7 +1693,8 @@ impl ExprCollector<'_> {
             (btail, block)
         });
 
-        let callee = self.alloc_expr_desugared_with_ptr(Expr::Path(try_from_output), ptr);
+        let callee = self
+            .alloc_expr_desugared_with_ptr(try_from_output.map_or(Expr::Missing, Expr::Path), ptr);
         let next_tail = match btail {
             Some(tail) => self
                 .alloc_expr_desugared_with_ptr(Expr::Call { callee, args: Box::new([tail]) }, ptr),
@@ -1089,7 +1730,7 @@ impl ExprCollector<'_> {
     /// to preserve drop semantics. We should probably do the same in future.
     fn collect_while_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::WhileExpr) -> ExprId {
         let label = e.label().map(|label| {
-            (self.hygiene_id_for(label.syntax().text_range().start()), self.collect_label(label))
+            (self.hygiene_id_for(label.syntax().text_range()), self.collect_label(label))
         });
         let body = self.collect_labelled_block_opt(label, e.loop_body());
 
@@ -1135,35 +1776,29 @@ impl ExprCollector<'_> {
     /// }
     /// ```
     fn collect_for_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::ForExpr) -> ExprId {
-        let Some((into_iter_fn, iter_next_fn, option_some, option_none)) = (|| {
-            Some((
-                self.lang_path(LangItem::IntoIterIntoIter)?,
-                self.lang_path(LangItem::IteratorNext)?,
-                self.lang_path(LangItem::OptionSome)?,
-                self.lang_path(LangItem::OptionNone)?,
-            ))
-        })() else {
-            // Some of the needed lang items are missing, so we can't desugar
-            return self.alloc_expr(Expr::Missing, syntax_ptr);
-        };
+        let into_iter_fn = self.lang_path(LangItem::IntoIterIntoIter);
+        let iter_next_fn = self.lang_path(LangItem::IteratorNext);
+        let option_some = self.lang_path(LangItem::OptionSome);
+        let option_none = self.lang_path(LangItem::OptionNone);
         let head = self.collect_expr_opt(e.iterable());
-        let into_iter_fn_expr = self.alloc_expr(Expr::Path(into_iter_fn), syntax_ptr);
+        let into_iter_fn_expr =
+            self.alloc_expr(into_iter_fn.map_or(Expr::Missing, Expr::Path), syntax_ptr);
         let iterator = self.alloc_expr(
             Expr::Call { callee: into_iter_fn_expr, args: Box::new([head]) },
             syntax_ptr,
         );
         let none_arm = MatchArm {
-            pat: self.alloc_pat_desugared(Pat::Path(option_none)),
+            pat: self.alloc_pat_desugared(option_none.map_or(Pat::Missing, Pat::Path)),
             guard: None,
             expr: self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr),
         };
         let some_pat = Pat::TupleStruct {
-            path: Some(Box::new(option_some)),
+            path: option_some.map(Box::new),
             args: Box::new([self.collect_pat_top(e.pat())]),
             ellipsis: None,
         };
         let label = e.label().map(|label| {
-            (self.hygiene_id_for(label.syntax().text_range().start()), self.collect_label(label))
+            (self.hygiene_id_for(label.syntax().text_range()), self.collect_label(label))
         });
         let some_arm = MatchArm {
             pat: self.alloc_pat_desugared(some_pat),
@@ -1178,7 +1813,8 @@ impl ExprCollector<'_> {
             Expr::Ref { expr: iter_expr, rawness: Rawness::Ref, mutability: Mutability::Mut },
             syntax_ptr,
         );
-        let iter_next_fn_expr = self.alloc_expr(Expr::Path(iter_next_fn), syntax_ptr);
+        let iter_next_fn_expr =
+            self.alloc_expr(iter_next_fn.map_or(Expr::Missing, Expr::Path), syntax_ptr);
         let iter_next_expr = self.alloc_expr(
             Expr::Call { callee: iter_next_fn_expr, args: Box::new([iter_expr_mut]) },
             syntax_ptr,
@@ -1198,7 +1834,8 @@ impl ExprCollector<'_> {
         );
         let loop_outer = self
             .alloc_expr(Expr::Loop { body: loop_inner, label: label.map(|it| it.1) }, syntax_ptr);
-        let iter_binding = self.alloc_binding(iter_name, BindingAnnotation::Mutable);
+        let iter_binding =
+            self.alloc_binding(iter_name, BindingAnnotation::Mutable, HygieneId::ROOT);
         let iter_pat = self.alloc_pat_desugared(Pat::Bind { id: iter_binding, subpat: None });
         self.add_definition_to_binding(iter_binding, iter_pat);
         self.alloc_expr(
@@ -1222,30 +1859,26 @@ impl ExprCollector<'_> {
     /// }
     /// ```
     fn collect_try_operator(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::TryExpr) -> ExprId {
-        let Some((try_branch, cf_continue, cf_break, try_from_residual)) = (|| {
-            Some((
-                self.lang_path(LangItem::TryTraitBranch)?,
-                self.lang_path(LangItem::ControlFlowContinue)?,
-                self.lang_path(LangItem::ControlFlowBreak)?,
-                self.lang_path(LangItem::TryTraitFromResidual)?,
-            ))
-        })() else {
-            // Some of the needed lang items are missing, so we can't desugar
-            return self.alloc_expr(Expr::Missing, syntax_ptr);
-        };
+        let try_branch = self.lang_path(LangItem::TryTraitBranch);
+        let cf_continue = self.lang_path(LangItem::ControlFlowContinue);
+        let cf_break = self.lang_path(LangItem::ControlFlowBreak);
+        let try_from_residual = self.lang_path(LangItem::TryTraitFromResidual);
         let operand = self.collect_expr_opt(e.expr());
-        let try_branch = self.alloc_expr(Expr::Path(try_branch), syntax_ptr);
+        let try_branch = self.alloc_expr(try_branch.map_or(Expr::Missing, Expr::Path), syntax_ptr);
         let expr = self
             .alloc_expr(Expr::Call { callee: try_branch, args: Box::new([operand]) }, syntax_ptr);
         let continue_name = Name::generate_new_name(self.store.bindings.len());
-        let continue_binding =
-            self.alloc_binding(continue_name.clone(), BindingAnnotation::Unannotated);
+        let continue_binding = self.alloc_binding(
+            continue_name.clone(),
+            BindingAnnotation::Unannotated,
+            HygieneId::ROOT,
+        );
         let continue_bpat =
             self.alloc_pat_desugared(Pat::Bind { id: continue_binding, subpat: None });
         self.add_definition_to_binding(continue_binding, continue_bpat);
         let continue_arm = MatchArm {
             pat: self.alloc_pat_desugared(Pat::TupleStruct {
-                path: Some(Box::new(cf_continue)),
+                path: cf_continue.map(Box::new),
                 args: Box::new([continue_bpat]),
                 ellipsis: None,
             }),
@@ -1253,19 +1886,21 @@ impl ExprCollector<'_> {
             expr: self.alloc_expr(Expr::Path(Path::from(continue_name)), syntax_ptr),
         };
         let break_name = Name::generate_new_name(self.store.bindings.len());
-        let break_binding = self.alloc_binding(break_name.clone(), BindingAnnotation::Unannotated);
+        let break_binding =
+            self.alloc_binding(break_name.clone(), BindingAnnotation::Unannotated, HygieneId::ROOT);
         let break_bpat = self.alloc_pat_desugared(Pat::Bind { id: break_binding, subpat: None });
         self.add_definition_to_binding(break_binding, break_bpat);
         let break_arm = MatchArm {
             pat: self.alloc_pat_desugared(Pat::TupleStruct {
-                path: Some(Box::new(cf_break)),
+                path: cf_break.map(Box::new),
                 args: Box::new([break_bpat]),
                 ellipsis: None,
             }),
             guard: None,
             expr: {
                 let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr);
-                let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr);
+                let callee = self
+                    .alloc_expr(try_from_residual.map_or(Expr::Missing, Expr::Path), syntax_ptr);
                 let result =
                     self.alloc_expr(Expr::Call { callee, args: Box::new([it]) }, syntax_ptr);
                 self.alloc_expr(
@@ -1291,30 +1926,39 @@ impl ExprCollector<'_> {
     where
         T: ast::AstNode,
     {
-        // File containing the macro call. Expansion errors will be attached here.
-        let outer_file = self.expander.current_file_id();
-
         let macro_call_ptr = self.expander.in_file(syntax_ptr);
-        let module = self.expander.module.local_id;
+        let module = self.module.local_id;
 
-        let res = match self.def_map.modules[module]
-            .scope
-            .macro_invoc(InFile::new(outer_file, self.ast_id_map.ast_id_for_ptr(syntax_ptr)))
-        {
+        let block_call = self.def_map.modules[self.module.local_id].scope.macro_invoc(
+            self.expander.in_file(self.expander.ast_id_map().ast_id_for_ptr(syntax_ptr)),
+        );
+        let res = match block_call {
             // fast path, macro call is in a block module
             Some(call) => Ok(self.expander.enter_expand_id(self.db, call)),
-            None => self.expander.enter_expand(self.db, mcall, |path| {
-                self.def_map
-                    .resolve_path(
-                        self.db,
-                        module,
-                        path,
-                        crate::item_scope::BuiltinShadowMode::Other,
-                        Some(MacroSubNs::Bang),
-                    )
-                    .0
-                    .take_macros()
-            }),
+            None => {
+                let resolver = |path: &_| {
+                    self.def_map
+                        .resolve_path(
+                            &self.local_def_map,
+                            self.db,
+                            module,
+                            path,
+                            crate::item_scope::BuiltinShadowMode::Other,
+                            Some(MacroSubNs::Bang),
+                        )
+                        .0
+                        .take_macros()
+                };
+                self.expander.enter_expand(
+                    self.db,
+                    mcall,
+                    self.module.krate(),
+                    resolver,
+                    &mut |ptr, call| {
+                        _ = self.source_map.expansions.insert(ptr.map(|(it, _)| it), call);
+                    },
+                )
+            }
         };
 
         let res = match res {
@@ -1323,7 +1967,7 @@ impl ExprCollector<'_> {
                 if record_diagnostics {
                     self.source_map.diagnostics.push(
                         ExpressionStoreDiagnostics::UnresolvedMacroCall {
-                            node: InFile::new(outer_file, syntax_ptr),
+                            node: self.expander.in_file(syntax_ptr),
                             path,
                         },
                     );
@@ -1333,10 +1977,9 @@ impl ExprCollector<'_> {
         };
         if record_diagnostics {
             if let Some(err) = res.err {
-                self.source_map.diagnostics.push(ExpressionStoreDiagnostics::MacroError {
-                    node: InFile::new(outer_file, syntax_ptr),
-                    err,
-                });
+                self.source_map
+                    .diagnostics
+                    .push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err });
             }
         }
 
@@ -1347,24 +1990,12 @@ impl ExprCollector<'_> {
                 if let Some(macro_file) = self.expander.current_file_id().macro_file() {
                     self.source_map.expansions.insert(macro_call_ptr, macro_file);
                 }
-                let prev_ast_id_map = mem::replace(
-                    &mut self.ast_id_map,
-                    self.db.ast_id_map(self.expander.current_file_id()),
-                );
 
                 if record_diagnostics {
                     // FIXME: Report parse errors here
                 }
 
-                let SpanMap::ExpansionSpanMap(new_span_map) = self.expander.span_map(self.db)
-                else {
-                    panic!("just expanded a macro, ExpansionSpanMap should be available");
-                };
-                let old_span_map =
-                    mem::replace(&mut self.current_span_map, Some(new_span_map.clone()));
-                let id = collector(self, Some(expansion.tree()));
-                self.current_span_map = old_span_map;
-                self.ast_id_map = prev_ast_id_map;
+                let id = collector(self, expansion.map(|it| it.tree()));
                 self.expander.exit(mark);
                 id
             }
@@ -1417,7 +2048,7 @@ impl ExprCollector<'_> {
                     return;
                 }
                 let pat = self.collect_pat_top(stmt.pat());
-                let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&mut self.ctx(), it));
+                let type_ref = stmt.ty().map(|it| self.lower_type_ref_disallow_impl_trait(it));
                 let initializer = stmt.initializer().map(|e| self.collect_expr(e));
                 let else_branch = stmt
                     .let_else()
@@ -1516,9 +2147,9 @@ impl ExprCollector<'_> {
         };
 
         let block_id = if block_has_items {
-            let file_local_id = self.ast_id_map.ast_id(&block);
+            let file_local_id = self.expander.ast_id_map().ast_id(&block);
             let ast_id = self.expander.in_file(file_local_id);
-            Some(self.db.intern_block(BlockLoc { ast_id, module: self.expander.module }))
+            Some(self.db.intern_block(BlockLoc { ast_id, module: self.module }))
         } else {
             None
         };
@@ -1529,10 +2160,10 @@ impl ExprCollector<'_> {
                     self.store.block_scopes.push(block_id);
                     (def_map.module_id(DefMap::ROOT), def_map)
                 }
-                None => (self.expander.module, self.def_map.clone()),
+                None => (self.module, self.def_map.clone()),
             };
         let prev_def_map = mem::replace(&mut self.def_map, def_map);
-        let prev_local_module = mem::replace(&mut self.expander.module, module);
+        let prev_local_module = mem::replace(&mut self.module, module);
         let prev_legacy_macros_count = mem::take(&mut self.current_block_legacy_macro_defs_count);
 
         let mut statements = Vec::new();
@@ -1555,7 +2186,7 @@ impl ExprCollector<'_> {
             .alloc_expr(mk_block(block_id, statements.into_boxed_slice(), tail), syntax_node_ptr);
 
         self.def_map = prev_def_map;
-        self.expander.module = prev_local_module;
+        self.module = prev_local_module;
         self.current_block_legacy_macro_defs_count = prev_legacy_macros_count;
         expr_id
     }
@@ -1595,7 +2226,7 @@ impl ExprCollector<'_> {
                 let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
                 let hygiene = bp
                     .name()
-                    .map(|name| self.hygiene_id_for(name.syntax().text_range().start()))
+                    .map(|name| self.hygiene_id_for(name.syntax().text_range()))
                     .unwrap_or(HygieneId::ROOT);
 
                 let annotation =
@@ -1608,8 +2239,9 @@ impl ExprCollector<'_> {
                     // This could also be a single-segment path pattern. To
                     // decide that, we need to try resolving the name.
                     let (resolved, _) = self.def_map.resolve_path(
+                        &self.local_def_map,
                         self.db,
-                        self.expander.module.local_id,
+                        self.module.local_id,
                         &name.clone().into(),
                         BuiltinShadowMode::Other,
                         None,
@@ -1620,13 +2252,17 @@ impl ExprCollector<'_> {
                     match resolved.take_values() {
                         Some(ModuleDefId::ConstId(_)) => (None, Pat::Path(name.into())),
                         Some(ModuleDefId::EnumVariantId(variant))
-                            if self.db.variant_data(variant.into()).kind()
-                                != StructKind::Record =>
+                            if {
+                                let loc = variant.lookup(self.db);
+                                let tree = loc.item_tree_id().item_tree(self.db);
+                                tree[loc.id.value].shape != FieldsShape::Record
+                            } =>
                         {
                             (None, Pat::Path(name.into()))
                         }
                         Some(ModuleDefId::AdtId(AdtId::StructId(s)))
-                            if self.db.struct_data(s).variant_data.kind() != StructKind::Record =>
+                        // FIXME: This can cause a cycle if the user is writing invalid code
+                            if self.db.struct_signature(s).shape != FieldsShape::Record =>
                         {
                             (None, Pat::Path(name.into()))
                         }
@@ -1649,7 +2285,10 @@ impl ExprCollector<'_> {
                 return pat;
             }
             ast::Pat::TupleStructPat(p) => {
-                let path = p.path().and_then(|path| self.parse_path(path)).map(Box::new);
+                let path = p
+                    .path()
+                    .and_then(|path| self.lower_path(path, &mut Self::impl_trait_error_allocator))
+                    .map(Box::new);
                 let (args, ellipsis) = self.collect_tuple_pat(
                     p.fields(),
                     comma_follows_token(p.l_paren_token()),
@@ -1663,7 +2302,9 @@ impl ExprCollector<'_> {
                 Pat::Ref { pat, mutability }
             }
             ast::Pat::PathPat(p) => {
-                let path = p.path().and_then(|path| self.parse_path(path));
+                let path = p
+                    .path()
+                    .and_then(|path| self.lower_path(path, &mut Self::impl_trait_error_allocator));
                 path.map(Pat::Path).unwrap_or(Pat::Missing)
             }
             ast::Pat::OrPat(p) => 'b: {
@@ -1710,7 +2351,10 @@ impl ExprCollector<'_> {
             }
             ast::Pat::WildcardPat(_) => Pat::Wild,
             ast::Pat::RecordPat(p) => {
-                let path = p.path().and_then(|path| self.parse_path(path)).map(Box::new);
+                let path = p
+                    .path()
+                    .and_then(|path| self.lower_path(path, &mut Self::impl_trait_error_allocator))
+                    .map(Box::new);
                 let record_pat_field_list =
                     &p.record_pat_field_list().expect("every struct should have a field list");
                 let args = record_pat_field_list
@@ -1805,7 +2449,9 @@ impl ExprCollector<'_> {
                                 .map(|path| self.alloc_expr_from_pat(Expr::Path(path), ptr)),
                             ast::Pat::PathPat(p) => p
                                 .path()
-                                .and_then(|path| self.parse_path(path))
+                                .and_then(|path| {
+                                    self.lower_path(path, &mut Self::impl_trait_error_allocator)
+                                })
                                 .map(|parsed| self.alloc_expr_from_pat(Expr::Path(parsed), ptr)),
                             // We only need to handle literal, ident (if bare) and path patterns here,
                             // as any other pattern as a range pattern operand is semantically invalid.
@@ -1891,16 +2537,19 @@ impl ExprCollector<'_> {
     /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
     /// not.
     fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> Option<()> {
-        match self.expander.parse_attrs(self.db, owner).cfg() {
+        let attrs = self.expander.attrs(self.db, self.module.krate(), owner);
+        match attrs.cfg() {
             Some(cfg) => {
-                if self.expander.cfg_options().check(&cfg) != Some(false) {
+                let cfg_options = self.module.krate().cfg_options(self.db);
+
+                if cfg_options.check(&cfg) != Some(false) {
                     return Some(());
                 }
 
                 self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
                     node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
                     cfg,
-                    opts: self.expander.cfg_options().clone(),
+                    opts: cfg_options.clone(),
                 });
 
                 None
@@ -1917,7 +2566,10 @@ impl ExprCollector<'_> {
 
     fn collect_label(&mut self, ast_label: ast::Label) -> LabelId {
         let label = Label {
-            name: ast_label.lifetime().as_ref().map_or_else(Name::missing, Name::new_lifetime),
+            name: ast_label
+                .lifetime()
+                .as_ref()
+                .map_or_else(Name::missing, |lt| Name::new_lifetime(&lt.text())),
         };
         self.alloc_label(label, AstPtr::new(&ast_label))
     }
@@ -1927,20 +2579,17 @@ impl ExprCollector<'_> {
         lifetime: Option<ast::Lifetime>,
     ) -> Result<Option<LabelId>, ExpressionStoreDiagnostics> {
         let Some(lifetime) = lifetime else { return Ok(None) };
-        let (mut hygiene_id, mut hygiene_info) = match &self.current_span_map {
-            None => (HygieneId::ROOT, None),
-            Some(span_map) => {
-                let span = span_map.span_at(lifetime.syntax().text_range().start());
-                let ctx = self.db.lookup_intern_syntax_context(span.ctx);
-                let hygiene_id = HygieneId::new(ctx.opaque_and_semitransparent);
-                let hygiene_info = ctx.outer_expn.map(|expansion| {
-                    let expansion = self.db.lookup_intern_macro_call(expansion);
-                    (ctx.parent, expansion.def)
-                });
-                (hygiene_id, hygiene_info)
-            }
+        let mut hygiene_id =
+            self.expander.hygiene_for_range(self.db, lifetime.syntax().text_range());
+        let mut hygiene_info = if hygiene_id.is_root() {
+            None
+        } else {
+            hygiene_id.lookup().outer_expn(self.db).map(|expansion| {
+                let expansion = self.db.lookup_intern_macro_call(expansion.into());
+                (hygiene_id.lookup().parent(self.db), expansion.def)
+            })
         };
-        let name = Name::new_lifetime(&lifetime);
+        let name = Name::new_lifetime(&lifetime.text());
 
         for (rib_idx, rib) in self.label_ribs.iter().enumerate().rev() {
             match &rib.kind {
@@ -1962,11 +2611,12 @@ impl ExprCollector<'_> {
                             // A macro is allowed to refer to labels from before its declaration.
                             // Therefore, if we got to the rib of its declaration, give up its hygiene
                             // and use its parent expansion.
-                            let parent_ctx = self.db.lookup_intern_syntax_context(parent_ctx);
-                            hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent);
-                            hygiene_info = parent_ctx.outer_expn.map(|expansion| {
-                                let expansion = self.db.lookup_intern_macro_call(expansion);
-                                (parent_ctx.parent, expansion.def)
+
+                            hygiene_id =
+                                HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db));
+                            hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| {
+                                let expansion = self.db.lookup_intern_macro_call(expansion.into());
+                                (parent_ctx.parent(self.db), expansion.def)
                             });
                         }
                     }
@@ -2035,7 +2685,7 @@ impl ExprCollector<'_> {
                 return match l.kind() {
                     ast::LiteralKind::String(s) => Some((s, true)),
                     _ => None,
-                }
+                };
             }
             _ => return None,
         };
@@ -2052,7 +2702,7 @@ impl ExprCollector<'_> {
         f: ast::FormatArgsExpr,
         syntax_ptr: AstPtr<ast::Expr>,
     ) -> ExprId {
-        let mut args = FormatArgumentsCollector::new();
+        let mut args = FormatArgumentsCollector::default();
         f.args().for_each(|arg| {
             args.add(FormatArgument {
                 kind: match arg.name() {
@@ -2075,8 +2725,8 @@ impl ExprCollector<'_> {
             self.expand_macros_to_string(template.clone()).map(|it| (it, template))
         }) {
             Some(((s, is_direct_literal), template)) => {
-                let call_ctx = self.expander.syntax_context();
-                let hygiene = self.hygiene_id_for(s.syntax().text_range().start());
+                let call_ctx = self.expander.call_syntax_ctx();
+                let hygiene = self.hygiene_id_for(s.syntax().text_range());
                 let fmt = format_args::parse(
                     &s,
                     fmt_snippet,
@@ -2224,23 +2874,21 @@ impl ExprCollector<'_> {
         //         unsafe { ::core::fmt::UnsafeArg::new() }
         //     )
 
-        let Some(new_v1_formatted) = LangItem::FormatArguments.ty_rel_path(
+        let new_v1_formatted = LangItem::FormatArguments.ty_rel_path(
             self.db,
-            self.krate,
-            Name::new_symbol_root(sym::new_v1_formatted.clone()),
-        ) else {
-            return self.missing_expr();
-        };
-        let Some(unsafe_arg_new) = LangItem::FormatUnsafeArg.ty_rel_path(
+            self.module.krate(),
+            Name::new_symbol_root(sym::new_v1_formatted),
+        );
+        let unsafe_arg_new = LangItem::FormatUnsafeArg.ty_rel_path(
             self.db,
-            self.krate,
-            Name::new_symbol_root(sym::new.clone()),
-        ) else {
-            return self.missing_expr();
-        };
-        let new_v1_formatted = self.alloc_expr_desugared(Expr::Path(new_v1_formatted));
+            self.module.krate(),
+            Name::new_symbol_root(sym::new),
+        );
+        let new_v1_formatted =
+            self.alloc_expr_desugared(new_v1_formatted.map_or(Expr::Missing, Expr::Path));
 
-        let unsafe_arg_new = self.alloc_expr_desugared(Expr::Path(unsafe_arg_new));
+        let unsafe_arg_new =
+            self.alloc_expr_desugared(unsafe_arg_new.map_or(Expr::Missing, Expr::Path));
         let unsafe_arg_new =
             self.alloc_expr_desugared(Expr::Call { callee: unsafe_arg_new, args: Box::default() });
         let mut unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe {
@@ -2320,54 +2968,94 @@ impl ExprCollector<'_> {
             zero_pad,
             debug_hex,
         } = &placeholder.format_options;
-        let fill = self.alloc_expr_desugared(Expr::Literal(Literal::Char(fill.unwrap_or(' '))));
-
-        let align = {
-            let align = LangItem::FormatAlignment.ty_rel_path(
-                self.db,
-                self.krate,
-                match alignment {
-                    Some(FormatAlignment::Left) => Name::new_symbol_root(sym::Left.clone()),
-                    Some(FormatAlignment::Right) => Name::new_symbol_root(sym::Right.clone()),
-                    Some(FormatAlignment::Center) => Name::new_symbol_root(sym::Center.clone()),
-                    None => Name::new_symbol_root(sym::Unknown.clone()),
-                },
-            );
-            match align {
-                Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
-                None => self.missing_expr(),
-            }
-        };
-        // This needs to match `Flag` in library/core/src/fmt/rt.rs.
-        let flags: u32 = ((sign == Some(FormatSign::Plus)) as u32)
-            | (((sign == Some(FormatSign::Minus)) as u32) << 1)
-            | ((alternate as u32) << 2)
-            | ((zero_pad as u32) << 3)
-            | (((debug_hex == Some(FormatDebugHex::Lower)) as u32) << 4)
-            | (((debug_hex == Some(FormatDebugHex::Upper)) as u32) << 5);
-        let flags = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
-            flags as u128,
-            Some(BuiltinUint::U32),
-        )));
-        let precision = self.make_count(precision, argmap);
-        let width = self.make_count(width, argmap);
 
-        let format_placeholder_new = {
-            let format_placeholder_new = LangItem::FormatPlaceholder.ty_rel_path(
-                self.db,
-                self.krate,
-                Name::new_symbol_root(sym::new.clone()),
-            );
-            match format_placeholder_new {
-                Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
-                None => self.missing_expr(),
-            }
-        };
+        let precision_expr = self.make_count(precision, argmap);
+        let width_expr = self.make_count(width, argmap);
 
-        self.alloc_expr_desugared(Expr::Call {
-            callee: format_placeholder_new,
-            args: Box::new([position, fill, align, flags, precision, width]),
-        })
+        if self.module.krate().workspace_data(self.db).is_atleast_187() {
+            // These need to match the constants in library/core/src/fmt/rt.rs.
+            let align = match alignment {
+                Some(FormatAlignment::Left) => 0,
+                Some(FormatAlignment::Right) => 1,
+                Some(FormatAlignment::Center) => 2,
+                None => 3,
+            };
+            // This needs to match `Flag` in library/core/src/fmt/rt.rs.
+            let flags = fill.unwrap_or(' ') as u32
+                | ((sign == Some(FormatSign::Plus)) as u32) << 21
+                | ((sign == Some(FormatSign::Minus)) as u32) << 22
+                | (alternate as u32) << 23
+                | (zero_pad as u32) << 24
+                | ((debug_hex == Some(FormatDebugHex::Lower)) as u32) << 25
+                | ((debug_hex == Some(FormatDebugHex::Upper)) as u32) << 26
+                | (width.is_some() as u32) << 27
+                | (precision.is_some() as u32) << 28
+                | align << 29
+                | 1 << 31; // Highest bit always set.
+            let flags = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
+                flags as u128,
+                Some(BuiltinUint::U32),
+            )));
+
+            let position =
+                RecordLitField { name: Name::new_symbol_root(sym::position), expr: position };
+            let flags = RecordLitField { name: Name::new_symbol_root(sym::flags), expr: flags };
+            let precision = RecordLitField {
+                name: Name::new_symbol_root(sym::precision),
+                expr: precision_expr,
+            };
+            let width =
+                RecordLitField { name: Name::new_symbol_root(sym::width), expr: width_expr };
+            self.alloc_expr_desugared(Expr::RecordLit {
+                path: LangItem::FormatPlaceholder.path(self.db, self.module.krate()).map(Box::new),
+                fields: Box::new([position, flags, precision, width]),
+                spread: None,
+            })
+        } else {
+            let format_placeholder_new = {
+                let format_placeholder_new = LangItem::FormatPlaceholder.ty_rel_path(
+                    self.db,
+                    self.module.krate(),
+                    Name::new_symbol_root(sym::new),
+                );
+                match format_placeholder_new {
+                    Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
+                    None => self.missing_expr(),
+                }
+            };
+            // This needs to match `Flag` in library/core/src/fmt/rt.rs.
+            let flags: u32 = ((sign == Some(FormatSign::Plus)) as u32)
+                | (((sign == Some(FormatSign::Minus)) as u32) << 1)
+                | ((alternate as u32) << 2)
+                | ((zero_pad as u32) << 3)
+                | (((debug_hex == Some(FormatDebugHex::Lower)) as u32) << 4)
+                | (((debug_hex == Some(FormatDebugHex::Upper)) as u32) << 5);
+            let flags = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
+                flags as u128,
+                Some(BuiltinUint::U32),
+            )));
+            let fill = self.alloc_expr_desugared(Expr::Literal(Literal::Char(fill.unwrap_or(' '))));
+            let align = {
+                let align = LangItem::FormatAlignment.ty_rel_path(
+                    self.db,
+                    self.module.krate(),
+                    match alignment {
+                        Some(FormatAlignment::Left) => Name::new_symbol_root(sym::Left),
+                        Some(FormatAlignment::Right) => Name::new_symbol_root(sym::Right),
+                        Some(FormatAlignment::Center) => Name::new_symbol_root(sym::Center),
+                        None => Name::new_symbol_root(sym::Unknown),
+                    },
+                );
+                match align {
+                    Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
+                    None => self.missing_expr(),
+                }
+            };
+            self.alloc_expr_desugared(Expr::Call {
+                callee: format_placeholder_new,
+                args: Box::new([position, fill, align, flags, precision_expr, width_expr]),
+            })
+        }
     }
 
     /// Generate a hir expression for a format_args Count.
@@ -2398,12 +3086,13 @@ impl ExprCollector<'_> {
             Some(FormatCount::Literal(n)) => {
                 let args = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
                     *n as u128,
-                    Some(BuiltinUint::Usize),
+                    // FIXME: Change this to Some(BuiltinUint::U16) once we drop support for toolchains < 1.88
+                    None,
                 )));
                 let count_is = match LangItem::FormatCount.ty_rel_path(
                     self.db,
-                    self.krate,
-                    Name::new_symbol_root(sym::Is.clone()),
+                    self.module.krate(),
+                    Name::new_symbol_root(sym::Is),
                 ) {
                     Some(count_is) => self.alloc_expr_desugared(Expr::Path(count_is)),
                     None => self.missing_expr(),
@@ -2420,8 +3109,8 @@ impl ExprCollector<'_> {
                     )));
                     let count_param = match LangItem::FormatCount.ty_rel_path(
                         self.db,
-                        self.krate,
-                        Name::new_symbol_root(sym::Param.clone()),
+                        self.module.krate(),
+                        Name::new_symbol_root(sym::Param),
                     ) {
                         Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)),
                         None => self.missing_expr(),
@@ -2438,8 +3127,8 @@ impl ExprCollector<'_> {
             }
             None => match LangItem::FormatCount.ty_rel_path(
                 self.db,
-                self.krate,
-                Name::new_symbol_root(sym::Implied.clone()),
+                self.module.krate(),
+                Name::new_symbol_root(sym::Implied),
             ) {
                 Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)),
                 None => self.missing_expr(),
@@ -2460,18 +3149,18 @@ impl ExprCollector<'_> {
 
         let new_fn = match LangItem::FormatArgument.ty_rel_path(
             self.db,
-            self.krate,
+            self.module.krate(),
             Name::new_symbol_root(match ty {
-                Format(Display) => sym::new_display.clone(),
-                Format(Debug) => sym::new_debug.clone(),
-                Format(LowerExp) => sym::new_lower_exp.clone(),
-                Format(UpperExp) => sym::new_upper_exp.clone(),
-                Format(Octal) => sym::new_octal.clone(),
-                Format(Pointer) => sym::new_pointer.clone(),
-                Format(Binary) => sym::new_binary.clone(),
-                Format(LowerHex) => sym::new_lower_hex.clone(),
-                Format(UpperHex) => sym::new_upper_hex.clone(),
-                Usize => sym::from_usize.clone(),
+                Format(Display) => sym::new_display,
+                Format(Debug) => sym::new_debug,
+                Format(LowerExp) => sym::new_lower_exp,
+                Format(UpperExp) => sym::new_upper_exp,
+                Format(Octal) => sym::new_octal,
+                Format(Pointer) => sym::new_pointer,
+                Format(Binary) => sym::new_binary,
+                Format(LowerHex) => sym::new_lower_hex,
+                Format(UpperHex) => sym::new_upper_hex,
+                Usize => sym::from_usize,
             }),
         ) {
             Some(new_fn) => self.alloc_expr_desugared(Expr::Path(new_fn)),
@@ -2483,7 +3172,7 @@ impl ExprCollector<'_> {
     // endregion: format
 
     fn lang_path(&self, lang: LangItem) -> Option<Path> {
-        lang.path(self.db, self.krate)
+        lang.path(self.db, self.module.krate())
     }
 }
 
@@ -2521,8 +3210,13 @@ impl ExprCollector<'_> {
         self.alloc_expr_desugared(Expr::Missing)
     }
 
-    fn alloc_binding(&mut self, name: Name, mode: BindingAnnotation) -> BindingId {
-        let binding = self.store.bindings.alloc(Binding { name, mode, problems: None });
+    fn alloc_binding(
+        &mut self,
+        name: Name,
+        mode: BindingAnnotation,
+        hygiene: HygieneId,
+    ) -> BindingId {
+        let binding = self.store.bindings.alloc(Binding { name, mode, problems: None, hygiene });
         if let Some(owner) = self.current_binding_owner {
             self.store.binding_owners.insert(binding, owner);
         }
@@ -2587,15 +3281,8 @@ impl ExprCollector<'_> {
         res
     }
 
-    /// If this returns `HygieneId::ROOT`, do not allocate to save space.
-    fn hygiene_id_for(&self, span_start: TextSize) -> HygieneId {
-        match &self.current_span_map {
-            None => HygieneId::ROOT,
-            Some(span_map) => {
-                let ctx = span_map.span_at(span_start).ctx;
-                HygieneId::new(self.db.lookup_intern_syntax_context(ctx).opaque_and_semitransparent)
-            }
-        }
+    fn hygiene_id_for(&self, range: TextRange) -> HygieneId {
+        self.expander.hygiene_for_range(self.db, range)
     }
 }
 
@@ -2609,3 +3296,33 @@ enum ArgumentType {
     Format(FormatTrait),
     Usize,
 }
+
+/// This function find the AST fragment that corresponds to an `AssociatedTypeBinding` in the HIR.
+pub fn hir_assoc_type_binding_to_ast(
+    segment_args: &ast::GenericArgList,
+    binding_idx: u32,
+) -> Option<ast::AssocTypeArg> {
+    segment_args
+        .generic_args()
+        .filter_map(|arg| match arg {
+            ast::GenericArg::AssocTypeArg(it) => Some(it),
+            _ => None,
+        })
+        .filter(|binding| binding.param_list().is_none() && binding.name_ref().is_some())
+        .nth(binding_idx as usize)
+}
+
+/// This function find the AST generic argument from the one in the HIR. Does not support the `Self` argument.
+pub fn hir_generic_arg_to_ast(
+    args: &ast::GenericArgList,
+    arg_idx: u32,
+    has_self_arg: bool,
+) -> Option<ast::GenericArg> {
+    args.generic_args()
+        .filter(|arg| match arg {
+            ast::GenericArg::AssocTypeArg(_) => false,
+            ast::GenericArg::LifetimeArg(arg) => arg.lifetime().is_some(),
+            ast::GenericArg::ConstArg(_) | ast::GenericArg::TypeArg(_) => true,
+        })
+        .nth(arg_idx as usize - has_self_arg as usize)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs
index 032c18688ea71..9ef03065651a5 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs
@@ -3,8 +3,8 @@ use hir_expand::name::Name;
 use intern::Symbol;
 use rustc_hash::{FxHashMap, FxHashSet};
 use syntax::{
-    ast::{self, HasName, IsString},
     AstNode, AstPtr, AstToken, T,
+    ast::{self, HasName, IsString},
 };
 use tt::TextRange;
 
@@ -158,7 +158,12 @@ impl ExprCollector<'_> {
                                 AsmOperand::Const(self.collect_expr_opt(c.expr()))
                             }
                             ast::AsmOperand::AsmSym(s) => {
-                                let Some(path) = s.path().and_then(|p| self.parse_path(p)) else {
+                                let Some(path) = s.path().and_then(|p| {
+                                    self.lower_path(
+                                        p,
+                                        &mut ExprCollector::impl_trait_error_allocator,
+                                    )
+                                }) else {
                                     continue;
                                 };
                                 AsmOperand::Sym(path)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs
new file mode 100644
index 0000000000000..9485e703d9cbe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs
@@ -0,0 +1,289 @@
+//! Many kinds of items or constructs can have generic parameters: functions,
+//! structs, impls, traits, etc. This module provides a common HIR for these
+//! generic parameters. See also the `Generics` type and the `generics_of` query
+//! in rustc.
+
+use std::sync::LazyLock;
+
+use either::Either;
+use hir_expand::name::{AsName, Name};
+use intern::sym;
+use la_arena::Arena;
+use syntax::ast::{self, HasName, HasTypeBounds};
+use thin_vec::ThinVec;
+use triomphe::Arc;
+
+use crate::{
+    GenericDefId, TypeOrConstParamId, TypeParamId,
+    expr_store::{TypePtr, lower::ExprCollector},
+    hir::generics::{
+        ConstParamData, GenericParams, LifetimeParamData, TypeOrConstParamData, TypeParamData,
+        TypeParamProvenance, WherePredicate,
+    },
+    type_ref::{LifetimeRef, LifetimeRefId, TypeBound, TypeRef, TypeRefId},
+};
+
+pub(crate) type ImplTraitLowerFn<'l> = &'l mut dyn for<'ec, 'db> FnMut(
+    &'ec mut ExprCollector<'db>,
+    TypePtr,
+    ThinVec<TypeBound>,
+) -> TypeRefId;
+
+pub(crate) struct GenericParamsCollector {
+    type_or_consts: Arena<TypeOrConstParamData>,
+    lifetimes: Arena<LifetimeParamData>,
+    where_predicates: Vec<WherePredicate>,
+    parent: GenericDefId,
+}
+
+impl GenericParamsCollector {
+    pub(crate) fn new(parent: GenericDefId) -> Self {
+        Self {
+            type_or_consts: Default::default(),
+            lifetimes: Default::default(),
+            where_predicates: Default::default(),
+            parent,
+        }
+    }
+    pub(crate) fn with_self_param(
+        ec: &mut ExprCollector<'_>,
+        parent: GenericDefId,
+        bounds: Option<ast::TypeBoundList>,
+    ) -> Self {
+        let mut this = Self::new(parent);
+        this.fill_self_param(ec, bounds);
+        this
+    }
+
+    pub(crate) fn lower(
+        &mut self,
+        ec: &mut ExprCollector<'_>,
+        generic_param_list: Option<ast::GenericParamList>,
+        where_clause: Option<ast::WhereClause>,
+    ) {
+        if let Some(params) = generic_param_list {
+            self.lower_param_list(ec, params)
+        }
+        if let Some(where_clause) = where_clause {
+            self.lower_where_predicates(ec, where_clause);
+        }
+    }
+
+    pub(crate) fn collect_impl_trait<R>(
+        &mut self,
+        ec: &mut ExprCollector<'_>,
+        cb: impl FnOnce(&mut ExprCollector<'_>, ImplTraitLowerFn<'_>) -> R,
+    ) -> R {
+        cb(
+            ec,
+            &mut Self::lower_argument_impl_trait(
+                &mut self.type_or_consts,
+                &mut self.where_predicates,
+                self.parent,
+            ),
+        )
+    }
+
+    pub(crate) fn finish(self) -> Arc<GenericParams> {
+        let Self { mut lifetimes, mut type_or_consts, mut where_predicates, parent: _ } = self;
+
+        if lifetimes.is_empty() && type_or_consts.is_empty() && where_predicates.is_empty() {
+            static EMPTY: LazyLock<Arc<GenericParams>> = LazyLock::new(|| {
+                Arc::new(GenericParams {
+                    lifetimes: Arena::new(),
+                    type_or_consts: Arena::new(),
+                    where_predicates: Box::default(),
+                })
+            });
+            return Arc::clone(&EMPTY);
+        }
+
+        lifetimes.shrink_to_fit();
+        type_or_consts.shrink_to_fit();
+        where_predicates.shrink_to_fit();
+        Arc::new(GenericParams {
+            type_or_consts,
+            lifetimes,
+            where_predicates: where_predicates.into_boxed_slice(),
+        })
+    }
+
+    fn lower_param_list(&mut self, ec: &mut ExprCollector<'_>, params: ast::GenericParamList) {
+        for generic_param in params.generic_params() {
+            let enabled = ec.expander.is_cfg_enabled(ec.db, ec.module.krate(), &generic_param);
+            if !enabled {
+                continue;
+            }
+
+            match generic_param {
+                ast::GenericParam::TypeParam(type_param) => {
+                    let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
+                    let default = type_param.default_type().map(|it| {
+                        ec.lower_type_ref(it, &mut ExprCollector::impl_trait_error_allocator)
+                    });
+                    let param = TypeParamData {
+                        name: Some(name.clone()),
+                        default,
+                        provenance: TypeParamProvenance::TypeParamList,
+                    };
+                    let idx = self.type_or_consts.alloc(param.into());
+                    let type_ref =
+                        TypeRef::TypeParam(TypeParamId::from_unchecked(TypeOrConstParamId {
+                            parent: self.parent,
+                            local_id: idx,
+                        }));
+                    let type_ref = ec.alloc_type_ref_desugared(type_ref);
+                    self.lower_bounds(ec, type_param.type_bound_list(), Either::Left(type_ref));
+                }
+                ast::GenericParam::ConstParam(const_param) => {
+                    let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
+                    let ty = ec.lower_type_ref_opt(
+                        const_param.ty(),
+                        &mut ExprCollector::impl_trait_error_allocator,
+                    );
+                    let param = ConstParamData {
+                        name,
+                        ty,
+                        default: const_param.default_val().map(|it| ec.lower_const_arg(it)),
+                    };
+                    let _idx = self.type_or_consts.alloc(param.into());
+                }
+                ast::GenericParam::LifetimeParam(lifetime_param) => {
+                    let lifetime = ec.lower_lifetime_ref_opt(lifetime_param.lifetime());
+                    if let LifetimeRef::Named(name) = &ec.store.lifetimes[lifetime] {
+                        let param = LifetimeParamData { name: name.clone() };
+                        let _idx = self.lifetimes.alloc(param);
+                        self.lower_bounds(
+                            ec,
+                            lifetime_param.type_bound_list(),
+                            Either::Right(lifetime),
+                        );
+                    }
+                }
+            }
+        }
+    }
+
+    fn lower_where_predicates(
+        &mut self,
+        ec: &mut ExprCollector<'_>,
+        where_clause: ast::WhereClause,
+    ) {
+        for pred in where_clause.predicates() {
+            let target = if let Some(type_ref) = pred.ty() {
+                Either::Left(
+                    ec.lower_type_ref(type_ref, &mut ExprCollector::impl_trait_error_allocator),
+                )
+            } else if let Some(lifetime) = pred.lifetime() {
+                Either::Right(ec.lower_lifetime_ref(lifetime))
+            } else {
+                continue;
+            };
+
+            let lifetimes: Option<Box<_>> = pred.generic_param_list().map(|param_list| {
+                // Higher-Ranked Trait Bounds
+                param_list
+                    .lifetime_params()
+                    .map(|lifetime_param| {
+                        lifetime_param
+                            .lifetime()
+                            .map_or_else(Name::missing, |lt| Name::new_lifetime(&lt.text()))
+                    })
+                    .collect()
+            });
+            for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
+                self.lower_type_bound_as_predicate(ec, bound, lifetimes.as_deref(), target);
+            }
+        }
+    }
+
+    fn lower_bounds(
+        &mut self,
+        ec: &mut ExprCollector<'_>,
+        type_bounds: Option<ast::TypeBoundList>,
+        target: Either<TypeRefId, LifetimeRefId>,
+    ) {
+        for bound in type_bounds.iter().flat_map(|type_bound_list| type_bound_list.bounds()) {
+            self.lower_type_bound_as_predicate(ec, bound, None, target);
+        }
+    }
+
+    fn lower_type_bound_as_predicate(
+        &mut self,
+        ec: &mut ExprCollector<'_>,
+        bound: ast::TypeBound,
+        hrtb_lifetimes: Option<&[Name]>,
+        target: Either<TypeRefId, LifetimeRefId>,
+    ) {
+        let bound = ec.lower_type_bound(
+            bound,
+            &mut Self::lower_argument_impl_trait(
+                &mut self.type_or_consts,
+                &mut self.where_predicates,
+                self.parent,
+            ),
+        );
+        let predicate = match (target, bound) {
+            (_, TypeBound::Error | TypeBound::Use(_)) => return,
+            (Either::Left(type_ref), bound) => match hrtb_lifetimes {
+                Some(hrtb_lifetimes) => WherePredicate::ForLifetime {
+                    lifetimes: ThinVec::from_iter(hrtb_lifetimes.iter().cloned()),
+                    target: type_ref,
+                    bound,
+                },
+                None => WherePredicate::TypeBound { target: type_ref, bound },
+            },
+            (Either::Right(lifetime), TypeBound::Lifetime(bound)) => {
+                WherePredicate::Lifetime { target: lifetime, bound }
+            }
+            (Either::Right(_), TypeBound::ForLifetime(..) | TypeBound::Path(..)) => return,
+        };
+        self.where_predicates.push(predicate);
+    }
+
+    fn lower_argument_impl_trait(
+        type_or_consts: &mut Arena<TypeOrConstParamData>,
+        where_predicates: &mut Vec<WherePredicate>,
+        parent: GenericDefId,
+    ) -> impl for<'ec, 'db> FnMut(&'ec mut ExprCollector<'db>, TypePtr, ThinVec<TypeBound>) -> TypeRefId
+    {
+        move |ec, ptr, impl_trait_bounds| {
+            let param = TypeParamData {
+                name: None,
+                default: None,
+                provenance: TypeParamProvenance::ArgumentImplTrait,
+            };
+            let param_id = TypeRef::TypeParam(TypeParamId::from_unchecked(TypeOrConstParamId {
+                parent,
+                local_id: type_or_consts.alloc(param.into()),
+            }));
+            let type_ref = ec.alloc_type_ref(param_id, ptr);
+            for bound in impl_trait_bounds {
+                where_predicates
+                    .push(WherePredicate::TypeBound { target: type_ref, bound: bound.clone() });
+            }
+            type_ref
+        }
+    }
+
+    fn fill_self_param(&mut self, ec: &mut ExprCollector<'_>, bounds: Option<ast::TypeBoundList>) {
+        let self_ = Name::new_symbol_root(sym::Self_);
+        let idx = self.type_or_consts.alloc(
+            TypeParamData {
+                name: Some(self_.clone()),
+                default: None,
+                provenance: TypeParamProvenance::TraitSelf,
+            }
+            .into(),
+        );
+        debug_assert_eq!(idx, GenericParams::SELF_PARAM_ID_IN_SELF);
+        let type_ref = TypeRef::TypeParam(TypeParamId::from_unchecked(TypeOrConstParamId {
+            parent: self.parent,
+            local_id: idx,
+        }));
+        let self_ = ec.alloc_type_ref_desugared(type_ref);
+        if let Some(bounds) = bounds {
+            self.lower_bounds(ec, Some(bounds), Either::Left(self_));
+        }
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs
similarity index 61%
rename from src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
rename to src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs
index 3b7e7653fba55..629d1f2ada716 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path.rs
@@ -1,20 +1,28 @@
 //! Transforms syntax into `Path` objects, ideally with accounting for hygiene
 
+#[cfg(test)]
+mod tests;
+
 use std::iter;
 
-use crate::{lower::LowerCtx, path::NormalPath, type_ref::ConstRef};
+use crate::expr_store::{
+    lower::{ExprCollector, generics::ImplTraitLowerFn},
+    path::NormalPath,
+};
 
 use hir_expand::{
-    mod_path::resolve_crate_root,
+    mod_path::{ModPath, PathKind, resolve_crate_root},
     name::{AsName, Name},
 };
-use intern::{sym, Interned};
-use stdx::thin_vec::EmptyOptimizedThinVec;
-use syntax::ast::{self, AstNode, HasGenericArgs, HasTypeBounds};
+use intern::{Interned, sym};
+use syntax::{
+    AstPtr,
+    ast::{self, AstNode, HasGenericArgs},
+};
 
 use crate::{
-    path::{AssociatedTypeBinding, GenericArg, GenericArgs, ModPath, Path, PathKind},
-    type_ref::{LifetimeRef, TypeBound, TypeRef},
+    expr_store::path::{GenericArg, GenericArgs, Path},
+    type_ref::TypeRef,
 };
 
 #[cfg(test)]
@@ -27,7 +35,11 @@ thread_local! {
 /// It correctly handles `$crate` based path from macro call.
 // If you modify the logic of the lowering, make sure to check if `hir_segment_to_ast_segment()`
 // also needs an update.
-pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
+pub(super) fn lower_path(
+    collector: &mut ExprCollector<'_>,
+    mut path: ast::Path,
+    impl_trait_lower_fn: ImplTraitLowerFn<'_>,
+) -> Option<Path> {
     let mut kind = PathKind::Plain;
     let mut type_anchor = None;
     let mut segments = Vec::new();
@@ -43,9 +55,20 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
         segments.push(name);
     };
     loop {
-        let segment = path.segment()?;
+        let Some(segment) = path.segment() else {
+            segments.push(Name::missing());
+            // We can end up here if for `path::`
+            match qualifier(&path) {
+                Some(it) => {
+                    path = it;
+                    continue;
+                }
+                None => break,
+            }
+        };
 
         if segment.coloncolon_token().is_some() {
+            debug_assert!(path.qualifier().is_none()); // this can only occur at the first segment
             kind = PathKind::Abs;
         }
 
@@ -57,8 +80,8 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
                         return None;
                     }
                     break kind = resolve_crate_root(
-                        ctx.db.upcast(),
-                        ctx.span_map().span_for_range(name_ref.syntax().text_range()).ctx,
+                        collector.db,
+                        collector.expander.ctx_for_range(name_ref.syntax().text_range()),
                     )
                     .map(PathKind::DollarCrate)
                     .unwrap_or(PathKind::Crate);
@@ -66,13 +89,16 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
                 let name = name_ref.as_name();
                 let args = segment
                     .generic_arg_list()
-                    .and_then(|it| lower_generic_args(ctx, it))
+                    .and_then(|it| collector.lower_generic_args(it, impl_trait_lower_fn))
                     .or_else(|| {
-                        lower_generic_args_from_fn_path(
-                            ctx,
+                        collector.lower_generic_args_from_fn_path(
                             segment.parenthesized_arg_list(),
                             segment.ret_type(),
+                            impl_trait_lower_fn,
                         )
+                    })
+                    .or_else(|| {
+                        segment.return_type_syntax().map(|_| GenericArgs::return_type_notation())
                     });
                 if args.is_some() {
                     generic_args.resize(segments.len(), None);
@@ -81,12 +107,12 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
                 push_segment(&segment, &mut segments, name);
             }
             ast::PathSegmentKind::SelfTypeKw => {
-                push_segment(&segment, &mut segments, Name::new_symbol_root(sym::Self_.clone()));
+                push_segment(&segment, &mut segments, Name::new_symbol_root(sym::Self_));
             }
             ast::PathSegmentKind::Type { type_ref, trait_ref } => {
-                assert!(path.qualifier().is_none()); // this can only occur at the first segment
+                debug_assert!(path.qualifier().is_none()); // this can only occur at the first segment
 
-                let self_type = TypeRef::from_ast(ctx, type_ref?);
+                let self_type = collector.lower_type_ref(type_ref?, impl_trait_lower_fn);
 
                 match trait_ref {
                     // <T>::foo
@@ -96,7 +122,12 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
                     }
                     // <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
                     Some(trait_ref) => {
-                        let path = Path::from_src(ctx, trait_ref.path()?)?;
+                        let path = collector.lower_path(trait_ref.path()?, impl_trait_lower_fn)?;
+                        // FIXME: Unnecessary clone
+                        collector.alloc_type_ref(
+                            TypeRef::Path(path.clone()),
+                            AstPtr::new(&trait_ref).upcast(),
+                        );
                         let mod_path = path.mod_path()?;
                         let path_generic_args = path.generic_args();
                         let num_segments = mod_path.segments().len();
@@ -123,10 +154,8 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
                                 args: iter::once(self_type)
                                     .chain(it.args.iter().cloned())
                                     .collect(),
-
                                 has_self_type: true,
-                                bindings: it.bindings.clone(),
-                                desugared_from_fn: it.desugared_from_fn,
+                                ..it
                             },
                             None => GenericArgs {
                                 args: Box::new([self_type]),
@@ -184,10 +213,10 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
     // We follow what it did anyway :)
     if segments.len() == 1 && kind == PathKind::Plain {
         if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
-            let syn_ctxt = ctx.span_map().span_for_range(path.segment()?.syntax().text_range()).ctx;
-            if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
-                if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
-                    kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
+            let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range());
+            if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) {
+                if collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner {
+                    kind = match resolve_crate_root(collector.db, syn_ctxt) {
                         Some(crate_root) => PathKind::DollarCrate(crate_root),
                         None => PathKind::Crate,
                     }
@@ -207,7 +236,11 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
     if type_anchor.is_none() && generic_args.is_empty() {
         return Some(Path::BarePath(mod_path));
     } else {
-        return Some(Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args)));
+        return Some(Path::Normal(Box::new(NormalPath {
+            type_anchor,
+            mod_path,
+            generic_args: generic_args.into_boxed_slice(),
+        })));
     }
 
     fn qualifier(path: &ast::Path) -> Option<ast::Path> {
@@ -256,102 +289,3 @@ pub fn hir_segment_to_ast_segment(path: &ast::Path, segment_idx: u32) -> Option<
             .nth(segment_idx as usize)
     }
 }
-
-pub(super) fn lower_generic_args(
-    lower_ctx: &mut LowerCtx<'_>,
-    node: ast::GenericArgList,
-) -> Option<GenericArgs> {
-    let mut args = Vec::new();
-    let mut bindings = Vec::new();
-    for generic_arg in node.generic_args() {
-        match generic_arg {
-            ast::GenericArg::TypeArg(type_arg) => {
-                let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty());
-                lower_ctx.update_impl_traits_bounds_from_type_ref(type_ref);
-                args.push(GenericArg::Type(type_ref));
-            }
-            ast::GenericArg::AssocTypeArg(assoc_type_arg) => {
-                if assoc_type_arg.param_list().is_some() {
-                    // We currently ignore associated return type bounds.
-                    continue;
-                }
-                if let Some(name_ref) = assoc_type_arg.name_ref() {
-                    // Nested impl traits like `impl Foo<Assoc = impl Bar>` are allowed
-                    lower_ctx.with_outer_impl_trait_scope(false, |lower_ctx| {
-                        let name = name_ref.as_name();
-                        let args = assoc_type_arg
-                            .generic_arg_list()
-                            .and_then(|args| lower_generic_args(lower_ctx, args));
-                        let type_ref =
-                            assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it));
-                        let type_ref = type_ref
-                            .inspect(|&tr| lower_ctx.update_impl_traits_bounds_from_type_ref(tr));
-                        let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
-                            l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect()
-                        } else {
-                            Box::default()
-                        };
-                        bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds });
-                    });
-                }
-            }
-            ast::GenericArg::LifetimeArg(lifetime_arg) => {
-                if let Some(lifetime) = lifetime_arg.lifetime() {
-                    let lifetime_ref = LifetimeRef::new(&lifetime);
-                    args.push(GenericArg::Lifetime(lifetime_ref))
-                }
-            }
-            ast::GenericArg::ConstArg(arg) => {
-                let arg = ConstRef::from_const_arg(lower_ctx, Some(arg));
-                args.push(GenericArg::Const(arg))
-            }
-        }
-    }
-
-    if args.is_empty() && bindings.is_empty() {
-        return None;
-    }
-    Some(GenericArgs {
-        args: args.into_boxed_slice(),
-        has_self_type: false,
-        bindings: bindings.into_boxed_slice(),
-        desugared_from_fn: false,
-    })
-}
-
-/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
-/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
-fn lower_generic_args_from_fn_path(
-    ctx: &mut LowerCtx<'_>,
-    args: Option<ast::ParenthesizedArgList>,
-    ret_type: Option<ast::RetType>,
-) -> Option<GenericArgs> {
-    let params = args?;
-    let mut param_types = Vec::new();
-    for param in params.type_args() {
-        let type_ref = TypeRef::from_ast_opt(ctx, param.ty());
-        param_types.push(type_ref);
-    }
-    let args = Box::new([GenericArg::Type(
-        ctx.alloc_type_ref_desugared(TypeRef::Tuple(EmptyOptimizedThinVec::from_iter(param_types))),
-    )]);
-    let bindings = if let Some(ret_type) = ret_type {
-        let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
-        Box::new([AssociatedTypeBinding {
-            name: Name::new_symbol_root(sym::Output.clone()),
-            args: None,
-            type_ref: Some(type_ref),
-            bounds: Box::default(),
-        }])
-    } else {
-        // -> ()
-        let type_ref = ctx.alloc_type_ref_desugared(TypeRef::unit());
-        Box::new([AssociatedTypeBinding {
-            name: Name::new_symbol_root(sym::Output.clone()),
-            args: None,
-            type_ref: Some(type_ref),
-            bounds: Box::default(),
-        }])
-    };
-    Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: true })
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path/tests.rs
similarity index 78%
rename from src/tools/rust-analyzer/crates/hir-def/src/path/tests.rs
rename to src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path/tests.rs
index 67a27bf85e89c..337cb103bde2f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path/tests.rs
@@ -1,26 +1,29 @@
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 use span::Edition;
 use syntax::ast::{self, make};
 use test_fixture::WithFixture;
 
 use crate::{
-    lower::LowerCtx,
-    path::{
-        lower::{hir_segment_to_ast_segment, SEGMENT_LOWERING_MAP},
-        Path,
+    db::DefDatabase,
+    expr_store::{
+        ExpressionStore,
+        lower::{
+            ExprCollector,
+            path::{SEGMENT_LOWERING_MAP, hir_segment_to_ast_segment},
+        },
+        path::Path,
+        pretty,
     },
-    pretty,
     test_db::TestDB,
-    type_ref::{TypesMap, TypesSourceMap},
 };
 
-fn lower_path(path: ast::Path) -> (TestDB, TypesMap, Option<Path>) {
+fn lower_path(path: ast::Path) -> (TestDB, ExpressionStore, Option<Path>) {
     let (db, file_id) = TestDB::with_single_file("");
-    let mut types_map = TypesMap::default();
-    let mut types_source_map = TypesSourceMap::default();
-    let mut ctx = LowerCtx::new(&db, file_id.into(), &mut types_map, &mut types_source_map);
-    let lowered_path = ctx.lower_path(path);
-    (db, types_map, lowered_path)
+    let krate = db.fetch_test_crate();
+    let mut ctx = ExprCollector::new(&db, db.crate_def_map(krate).root_module_id(), file_id.into());
+    let lowered_path = ctx.lower_path(path, &mut ExprCollector::impl_trait_allocator);
+    let store = ctx.store.finish();
+    (db, store, lowered_path)
 }
 
 #[track_caller]
@@ -111,11 +114,9 @@ fn keywords_in_middle_fail_lowering3() {
 
 #[track_caller]
 fn check_path_lowering(path: &str, expected: Expect) {
-    let (db, types_map, lowered_path) = lower_path(make::path_from_text(path));
+    let (db, store, lowered_path) = lower_path(make::path_from_text(path));
     let lowered_path = lowered_path.expect("failed to lower path");
-    let mut buf = String::new();
-    pretty::print_path(&db, &lowered_path, &types_map, &mut buf, Edition::CURRENT)
-        .expect("failed to pretty-print path");
+    let buf = pretty::print_path(&db, &store, &lowered_path, Edition::CURRENT);
     expected.assert_eq(&buf);
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs
similarity index 74%
rename from src/tools/rust-analyzer/crates/hir-def/src/path.rs
rename to src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs
index 713e7389736a0..db83e73a0b95f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs
@@ -1,54 +1,16 @@
 //! A desugared representation of paths like `crate::foo` or `<Type as Trait>::bar`.
-mod lower;
-#[cfg(test)]
-mod tests;
 
-use std::{
-    fmt::{self, Display},
-    iter,
-};
+use std::iter;
 
 use crate::{
     lang_item::LangItemTarget,
-    lower::LowerCtx,
-    type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRefId},
+    type_ref::{ConstRef, LifetimeRefId, TypeBound, TypeRefId},
+};
+use hir_expand::{
+    mod_path::{ModPath, PathKind},
+    name::Name,
 };
-use hir_expand::name::Name;
 use intern::Interned;
-use span::Edition;
-use stdx::thin_vec::thin_vec_with_header_struct;
-use syntax::ast;
-
-pub use hir_expand::mod_path::{path, ModPath, PathKind};
-
-pub use lower::hir_segment_to_ast_segment;
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum ImportAlias {
-    /// Unnamed alias, as in `use Foo as _;`
-    Underscore,
-    /// Named alias
-    Alias(Name),
-}
-
-impl ImportAlias {
-    pub fn display(&self, edition: Edition) -> impl Display + '_ {
-        ImportAliasDisplay { value: self, edition }
-    }
-}
-
-struct ImportAliasDisplay<'a> {
-    value: &'a ImportAlias,
-    edition: Edition,
-}
-impl Display for ImportAliasDisplay<'_> {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self.value {
-            ImportAlias::Underscore => f.write_str("_"),
-            ImportAlias::Alias(name) => Display::fmt(&name.display_no_db(self.edition), f),
-        }
-    }
-}
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub enum Path {
@@ -58,7 +20,7 @@ pub enum Path {
     /// this is not a problem since many more paths have generics than a type anchor).
     BarePath(Interned<ModPath>),
     /// `Path::Normal` will always have either generics or type anchor.
-    Normal(NormalPath),
+    Normal(Box<NormalPath>),
     /// A link to a lang item. It is used in desugaring of things like `it?`. We can show these
     /// links via a normal path since they might be private and not accessible in the usage place.
     LangItem(LangItemTarget, Option<Name>),
@@ -71,12 +33,24 @@ const _: () = {
     assert!(size_of::<Option<Path>>() == 16);
 };
 
-thin_vec_with_header_struct! {
-    pub new(pub(crate)) struct NormalPath, NormalPathHeader {
-        pub generic_args: [Option<GenericArgs>],
-        pub type_anchor: Option<TypeRefId>,
-        pub mod_path: Interned<ModPath>; ref,
-    }
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NormalPath {
+    pub generic_args: Box<[Option<GenericArgs>]>,
+    pub type_anchor: Option<TypeRefId>,
+    pub mod_path: Interned<ModPath>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum GenericArgsParentheses {
+    No,
+    /// Bounds of the form `Type::method(..): Send` or `impl Trait<method(..): Send>`,
+    /// aka. Return Type Notation or RTN.
+    ReturnTypeNotation,
+    /// `Fn`-family parenthesized traits, e.g. `impl Fn(u32) -> String`.
+    ///
+    /// This is desugared into one generic argument containing a tuple of all arguments,
+    /// and an associated type binding for `Output` for the return type.
+    ParenSugar,
 }
 
 /// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
@@ -92,9 +66,8 @@ pub struct GenericArgs {
     pub has_self_type: bool,
     /// Associated type bindings like in `Iterator<Item = T>`.
     pub bindings: Box<[AssociatedTypeBinding]>,
-    /// Whether these generic args were desugared from `Trait(Arg) -> Output`
-    /// parenthesis notation typically used for the `Fn` traits.
-    pub desugared_from_fn: bool,
+    /// Whether these generic args were written with parentheses and how.
+    pub parenthesized: GenericArgsParentheses,
 }
 
 /// An associated type binding like in `Iterator<Item = T>`.
@@ -118,20 +91,18 @@ pub struct AssociatedTypeBinding {
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub enum GenericArg {
     Type(TypeRefId),
-    Lifetime(LifetimeRef),
+    Lifetime(LifetimeRefId),
     Const(ConstRef),
 }
 
 impl Path {
-    /// Converts an `ast::Path` to `Path`. Works with use trees.
-    /// It correctly handles `$crate` based path from macro call.
-    pub fn from_src(ctx: &mut LowerCtx<'_>, path: ast::Path) -> Option<Path> {
-        lower::lower_path(ctx, path)
-    }
-
     /// Converts a known mod path to `Path`.
     pub fn from_known_path(path: ModPath, generic_args: Vec<Option<GenericArgs>>) -> Path {
-        Path::Normal(NormalPath::new(None, Interned::new(path), generic_args))
+        Path::Normal(Box::new(NormalPath {
+            generic_args: generic_args.into_boxed_slice(),
+            type_anchor: None,
+            mod_path: Interned::new(path),
+        }))
     }
 
     /// Converts a known mod path to `Path`.
@@ -143,7 +114,7 @@ impl Path {
     pub fn kind(&self) -> &PathKind {
         match self {
             Path::BarePath(mod_path) => &mod_path.kind,
-            Path::Normal(path) => &path.mod_path().kind,
+            Path::Normal(path) => &path.mod_path.kind,
             Path::LangItem(..) => &PathKind::Abs,
         }
     }
@@ -151,7 +122,7 @@ impl Path {
     #[inline]
     pub fn type_anchor(&self) -> Option<TypeRefId> {
         match self {
-            Path::Normal(path) => path.type_anchor(),
+            Path::Normal(path) => path.type_anchor,
             Path::LangItem(..) | Path::BarePath(_) => None,
         }
     }
@@ -159,7 +130,7 @@ impl Path {
     #[inline]
     pub fn generic_args(&self) -> Option<&[Option<GenericArgs>]> {
         match self {
-            Path::Normal(path) => Some(path.generic_args()),
+            Path::Normal(path) => Some(&path.generic_args),
             Path::LangItem(..) | Path::BarePath(_) => None,
         }
     }
@@ -170,8 +141,8 @@ impl Path {
                 PathSegments { segments: mod_path.segments(), generic_args: None }
             }
             Path::Normal(path) => PathSegments {
-                segments: path.mod_path().segments(),
-                generic_args: Some(path.generic_args()),
+                segments: path.mod_path.segments(),
+                generic_args: Some(&path.generic_args),
             },
             Path::LangItem(_, seg) => PathSegments { segments: seg.as_slice(), generic_args: None },
         }
@@ -180,7 +151,7 @@ impl Path {
     pub fn mod_path(&self) -> Option<&ModPath> {
         match self {
             Path::BarePath(mod_path) => Some(mod_path),
-            Path::Normal(path) => Some(path.mod_path()),
+            Path::Normal(path) => Some(&path.mod_path),
             Path::LangItem(..) => None,
         }
     }
@@ -197,12 +168,12 @@ impl Path {
                 ))))
             }
             Path::Normal(path) => {
-                let mod_path = path.mod_path();
+                let mod_path = &path.mod_path;
                 if mod_path.is_ident() {
                     return None;
                 }
-                let type_anchor = path.type_anchor();
-                let generic_args = path.generic_args();
+                let type_anchor = path.type_anchor;
+                let generic_args = &path.generic_args;
                 let qualifier_mod_path = Interned::new(ModPath::from_segments(
                     mod_path.kind,
                     mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(),
@@ -211,11 +182,11 @@ impl Path {
                 if type_anchor.is_none() && qualifier_generic_args.iter().all(|it| it.is_none()) {
                     Some(Path::BarePath(qualifier_mod_path))
                 } else {
-                    Some(Path::Normal(NormalPath::new(
+                    Some(Path::Normal(Box::new(NormalPath {
                         type_anchor,
-                        qualifier_mod_path,
-                        qualifier_generic_args.iter().cloned(),
-                    )))
+                        mod_path: qualifier_mod_path,
+                        generic_args: qualifier_generic_args.iter().cloned().collect(),
+                    })))
                 }
             }
             Path::LangItem(..) => None,
@@ -226,9 +197,9 @@ impl Path {
         match self {
             Path::BarePath(mod_path) => mod_path.is_Self(),
             Path::Normal(path) => {
-                path.type_anchor().is_none()
-                    && path.mod_path().is_Self()
-                    && path.generic_args().iter().all(|args| args.is_none())
+                path.type_anchor.is_none()
+                    && path.mod_path.is_Self()
+                    && path.generic_args.iter().all(|args| args.is_none())
             }
             Path::LangItem(..) => false,
         }
@@ -314,19 +285,21 @@ impl<'a> PathSegments<'a> {
 }
 
 impl GenericArgs {
-    pub(crate) fn from_ast(
-        lower_ctx: &mut LowerCtx<'_>,
-        node: ast::GenericArgList,
-    ) -> Option<GenericArgs> {
-        lower::lower_generic_args(lower_ctx, node)
+    pub(crate) fn empty() -> GenericArgs {
+        GenericArgs {
+            args: Box::default(),
+            has_self_type: false,
+            bindings: Box::default(),
+            parenthesized: GenericArgsParentheses::No,
+        }
     }
 
-    pub(crate) fn empty() -> GenericArgs {
+    pub(crate) fn return_type_notation() -> GenericArgs {
         GenericArgs {
             args: Box::default(),
             has_self_type: false,
             bindings: Box::default(),
-            desugared_from_fn: false,
+            parenthesized: GenericArgsParentheses::ReturnTypeNotation,
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
index 82ad756dc2c6a..f12a9b7a5445b 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
@@ -1,56 +1,83 @@
 //! A pretty-printer for HIR.
+#![allow(dead_code)]
 
-use std::fmt::{self, Write};
+use std::{
+    fmt::{self, Write},
+    mem,
+};
 
+use hir_expand::{Lookup, mod_path::PathKind};
 use itertools::Itertools;
 use span::Edition;
 
 use crate::{
-    hir::{Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement},
-    pretty::{print_generic_args, print_path, print_type_ref},
+    AdtId, DefWithBodyId, GenericDefId, ItemTreeLoc, TypeParamId, VariantId,
+    expr_store::path::{GenericArg, GenericArgs},
+    hir::{
+        Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
+        generics::{GenericParams, WherePredicate},
+    },
+    lang_item::LangItemTarget,
+    signatures::{FnFlags, FunctionSignature, StructSignature},
+    type_ref::{ConstRef, LifetimeRef, Mutability, TraitBoundModifier, TypeBound, UseArgRef},
 };
+use crate::{LifetimeParamId, signatures::StructFlags};
+use crate::{item_tree::FieldsShape, signatures::FieldData};
 
 use super::*;
 
+macro_rules! w {
+    ($dst:expr, $($arg:tt)*) => {
+        { let _ = write!($dst, $($arg)*); }
+    };
+}
+
+macro_rules! wln {
+    ($dst:expr) => {
+        { $dst.newline(); }
+    };
+    ($dst:expr, $($arg:tt)*) => {
+        { let _ = w!($dst, $($arg)*); $dst.newline(); }
+    };
+}
+
 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
-pub(super) enum LineFormat {
+pub enum LineFormat {
     Oneline,
     Newline,
     Indentation,
 }
 
-pub(super) fn print_body_hir(
+pub fn print_body_hir(
     db: &dyn DefDatabase,
     body: &Body,
     owner: DefWithBodyId,
     edition: Edition,
 ) -> String {
     let header = match owner {
-        DefWithBodyId::FunctionId(it) => it
-            .lookup(db)
-            .id
-            .resolved(db, |it| format!("fn {}", it.name.display(db.upcast(), edition))),
+        DefWithBodyId::FunctionId(it) => {
+            it.lookup(db).id.resolved(db, |it| format!("fn {}", it.name.display(db, edition)))
+        }
         DefWithBodyId::StaticId(it) => it
             .lookup(db)
             .id
-            .resolved(db, |it| format!("static {} = ", it.name.display(db.upcast(), edition))),
+            .resolved(db, |it| format!("static {} = ", it.name.display(db, edition))),
         DefWithBodyId::ConstId(it) => it.lookup(db).id.resolved(db, |it| {
             format!(
                 "const {} = ",
                 match &it.name {
-                    Some(name) => name.display(db.upcast(), edition).to_string(),
+                    Some(name) => name.display(db, edition).to_string(),
                     None => "_".to_owned(),
                 }
             )
         }),
-        DefWithBodyId::InTypeConstId(_) => "In type const = ".to_owned(),
         DefWithBodyId::VariantId(it) => {
             let loc = it.lookup(db);
             let enum_loc = loc.parent.lookup(db);
             format!(
                 "enum {}::{}",
-                enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
-                loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
+                enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
+                loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
             )
         }
     };
@@ -63,32 +90,21 @@ pub(super) fn print_body_hir(
         line_format: LineFormat::Newline,
         edition,
     };
-    if let DefWithBodyId::FunctionId(it) = owner {
+    if let DefWithBodyId::FunctionId(_) = owner {
         p.buf.push('(');
-        let function_data = db.function_data(it);
-        let (mut params, ret_type) = (function_data.params.iter(), &function_data.ret_type);
         if let Some(self_param) = body.self_param {
             p.print_binding(self_param);
-            p.buf.push_str(": ");
-            if let Some(ty) = params.next() {
-                p.print_type_ref(*ty, &function_data.types_map);
-                p.buf.push_str(", ");
-            }
+            p.buf.push_str(", ");
         }
-        body.params.iter().zip(params).for_each(|(&param, ty)| {
-            p.print_pat(param);
-            p.buf.push_str(": ");
-            p.print_type_ref(*ty, &function_data.types_map);
+        body.params.iter().for_each(|param| {
+            p.print_pat(*param);
             p.buf.push_str(", ");
         });
         // remove the last ", " in param list
-        if body.params.len() > 0 {
+        if !body.params.is_empty() {
             p.buf.truncate(p.buf.len() - 2);
         }
         p.buf.push(')');
-        // return type
-        p.buf.push_str(" -> ");
-        p.print_type_ref(*ret_type, &function_data.types_map);
         p.buf.push(' ');
     }
     p.print_expr(body.body_expr);
@@ -98,7 +114,298 @@ pub(super) fn print_body_hir(
     p.buf
 }
 
-pub(super) fn print_expr_hir(
+pub fn print_variant_body_hir(db: &dyn DefDatabase, owner: VariantId, edition: Edition) -> String {
+    let header = match owner {
+        VariantId::StructId(it) => {
+            it.lookup(db).id.resolved(db, |it| format!("struct {}", it.name.display(db, edition)))
+        }
+        VariantId::EnumVariantId(enum_variant_id) => {
+            let loc = enum_variant_id.lookup(db);
+            let enum_loc = loc.parent.lookup(db);
+            format!(
+                "enum {}::{}",
+                enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
+                loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
+            )
+        }
+        VariantId::UnionId(union_id) => union_id
+            .lookup(db)
+            .id
+            .resolved(db, |it| format!("union {}", it.name.display(db, edition))),
+    };
+
+    let fields = db.variant_fields(owner);
+
+    let mut p = Printer {
+        db,
+        store: &fields.store,
+        buf: header,
+        indent_level: 0,
+        line_format: LineFormat::Newline,
+        edition,
+    };
+    match fields.shape {
+        FieldsShape::Record => wln!(p, " {{"),
+        FieldsShape::Tuple => wln!(p, "("),
+        FieldsShape::Unit => (),
+    }
+
+    for (_, data) in fields.fields().iter() {
+        let FieldData { name, type_ref, visibility, is_unsafe } = data;
+        match visibility {
+            crate::item_tree::RawVisibility::Module(interned, _visibility_explicitness) => {
+                w!(p, "{}", interned.display(db, p.edition))
+            }
+            crate::item_tree::RawVisibility::Public => w!(p, "pub "),
+        }
+        if *is_unsafe {
+            w!(p, "unsafe ");
+        }
+        w!(p, "{}: ", name.display(db, p.edition));
+        p.print_type_ref(*type_ref);
+    }
+
+    match fields.shape {
+        FieldsShape::Record => wln!(p, "}}"),
+        FieldsShape::Tuple => wln!(p, ");"),
+        FieldsShape::Unit => wln!(p, ";"),
+    }
+    p.buf
+}
+
+pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Edition) -> String {
+    match owner {
+        GenericDefId::AdtId(id) => match id {
+            AdtId::StructId(id) => {
+                let signature = db.struct_signature(id);
+                print_struct(db, &signature, edition)
+            }
+            AdtId::UnionId(id) => {
+                format!("unimplemented {id:?}")
+            }
+            AdtId::EnumId(id) => {
+                format!("unimplemented {id:?}")
+            }
+        },
+        GenericDefId::ConstId(id) => format!("unimplemented {id:?}"),
+        GenericDefId::FunctionId(id) => {
+            let signature = db.function_signature(id);
+            print_function(db, &signature, edition)
+        }
+        GenericDefId::ImplId(id) => format!("unimplemented {id:?}"),
+        GenericDefId::StaticId(id) => format!("unimplemented {id:?}"),
+        GenericDefId::TraitAliasId(id) => format!("unimplemented {id:?}"),
+        GenericDefId::TraitId(id) => format!("unimplemented {id:?}"),
+        GenericDefId::TypeAliasId(id) => format!("unimplemented {id:?}"),
+    }
+}
+
+pub fn print_path(
+    db: &dyn DefDatabase,
+    store: &ExpressionStore,
+    path: &Path,
+    edition: Edition,
+) -> String {
+    let mut p = Printer {
+        db,
+        store,
+        buf: String::new(),
+        indent_level: 0,
+        line_format: LineFormat::Newline,
+        edition,
+    };
+    p.print_path(path);
+    p.buf
+}
+
+pub fn print_struct(
+    db: &dyn DefDatabase,
+    StructSignature { name, generic_params, store, flags, shape, repr }: &StructSignature,
+    edition: Edition,
+) -> String {
+    let mut p = Printer {
+        db,
+        store,
+        buf: String::new(),
+        indent_level: 0,
+        line_format: LineFormat::Newline,
+        edition,
+    };
+    if let Some(repr) = repr {
+        if repr.c() {
+            wln!(p, "#[repr(C)]");
+        }
+        if let Some(align) = repr.align {
+            wln!(p, "#[repr(align({}))]", align.bytes());
+        }
+        if let Some(pack) = repr.pack {
+            wln!(p, "#[repr(pack({}))]", pack.bytes());
+        }
+    }
+    if flags.contains(StructFlags::FUNDAMENTAL) {
+        wln!(p, "#[fundamental]");
+    }
+    w!(p, "struct ");
+    w!(p, "{}", name.display(db, edition));
+    print_generic_params(db, generic_params, &mut p);
+    match shape {
+        FieldsShape::Record => wln!(p, " {{...}}"),
+        FieldsShape::Tuple => wln!(p, "(...)"),
+        FieldsShape::Unit => (),
+    }
+
+    print_where_clauses(db, generic_params, &mut p);
+
+    match shape {
+        FieldsShape::Record => wln!(p),
+        FieldsShape::Tuple => wln!(p, ";"),
+        FieldsShape::Unit => wln!(p, ";"),
+    }
+
+    p.buf
+}
+
+pub fn print_function(
+    db: &dyn DefDatabase,
+    FunctionSignature {
+        name,
+        generic_params,
+        store,
+        params,
+        ret_type,
+        abi,
+        flags,
+        legacy_const_generics_indices,
+    }: &FunctionSignature,
+    edition: Edition,
+) -> String {
+    let mut p = Printer {
+        db,
+        store,
+        buf: String::new(),
+        indent_level: 0,
+        line_format: LineFormat::Newline,
+        edition,
+    };
+    if flags.contains(FnFlags::CONST) {
+        w!(p, "const ");
+    }
+    if flags.contains(FnFlags::ASYNC) {
+        w!(p, "async ");
+    }
+    if flags.contains(FnFlags::UNSAFE) {
+        w!(p, "unsafe ");
+    }
+    if flags.contains(FnFlags::EXPLICIT_SAFE) {
+        w!(p, "safe ");
+    }
+    if let Some(abi) = abi {
+        w!(p, "extern \"{}\" ", abi.as_str());
+    }
+    w!(p, "fn ");
+    w!(p, "{}", name.display(db, edition));
+    print_generic_params(db, generic_params, &mut p);
+    w!(p, "(");
+    for (i, param) in params.iter().enumerate() {
+        if i != 0 {
+            w!(p, ", ");
+        }
+        if legacy_const_generics_indices.as_ref().is_some_and(|idx| idx.contains(&(i as u32))) {
+            w!(p, "const: ");
+        }
+        p.print_type_ref(*param);
+    }
+    w!(p, ")");
+    if let Some(ret_type) = ret_type {
+        w!(p, " -> ");
+        p.print_type_ref(*ret_type);
+    }
+
+    print_where_clauses(db, generic_params, &mut p);
+    wln!(p, " {{...}}");
+
+    p.buf
+}
+
+fn print_where_clauses(db: &dyn DefDatabase, generic_params: &GenericParams, p: &mut Printer<'_>) {
+    if !generic_params.where_predicates.is_empty() {
+        w!(p, "\nwhere\n");
+        p.indented(|p| {
+            for (i, pred) in generic_params.where_predicates.iter().enumerate() {
+                if i != 0 {
+                    w!(p, ",\n");
+                }
+                match pred {
+                    WherePredicate::TypeBound { target, bound } => {
+                        p.print_type_ref(*target);
+                        w!(p, ": ");
+                        p.print_type_bounds(std::slice::from_ref(bound));
+                    }
+                    WherePredicate::Lifetime { target, bound } => {
+                        p.print_lifetime_ref(*target);
+                        w!(p, ": ");
+                        p.print_lifetime_ref(*bound);
+                    }
+                    WherePredicate::ForLifetime { lifetimes, target, bound } => {
+                        w!(p, "for<");
+                        for (i, lifetime) in lifetimes.iter().enumerate() {
+                            if i != 0 {
+                                w!(p, ", ");
+                            }
+                            w!(p, "{}", lifetime.display(db, p.edition));
+                        }
+                        w!(p, "> ");
+                        p.print_type_ref(*target);
+                        w!(p, ": ");
+                        p.print_type_bounds(std::slice::from_ref(bound));
+                    }
+                }
+            }
+        });
+        wln!(p);
+    }
+}
+
+fn print_generic_params(db: &dyn DefDatabase, generic_params: &GenericParams, p: &mut Printer<'_>) {
+    if !generic_params.is_empty() {
+        w!(p, "<");
+        let mut first = true;
+        for (_i, param) in generic_params.iter_lt() {
+            if !first {
+                w!(p, ", ");
+            }
+            first = false;
+            w!(p, "{}", param.name.display(db, p.edition));
+        }
+        for (i, param) in generic_params.iter_type_or_consts() {
+            if !first {
+                w!(p, ", ");
+            }
+            first = false;
+            if let Some(const_param) = param.const_param() {
+                w!(p, "const {}: ", const_param.name.display(db, p.edition));
+                p.print_type_ref(const_param.ty);
+                if let Some(default) = const_param.default {
+                    w!(p, " = ");
+                    p.print_expr(default.expr);
+                }
+            }
+            if let Some(type_param) = param.type_param() {
+                match &type_param.name {
+                    Some(name) => w!(p, "{}", name.display(db, p.edition)),
+                    None => w!(p, "Param[{}]", i.into_raw()),
+                }
+                if let Some(default) = type_param.default {
+                    w!(p, " = ");
+                    p.print_type_ref(default);
+                }
+            }
+        }
+        w!(p, ">");
+    }
+}
+
+pub fn print_expr_hir(
     db: &dyn DefDatabase,
     store: &ExpressionStore,
     _owner: DefWithBodyId,
@@ -117,7 +424,7 @@ pub(super) fn print_expr_hir(
     p.buf
 }
 
-pub(super) fn print_pat_hir(
+pub fn print_pat_hir(
     db: &dyn DefDatabase,
     store: &ExpressionStore,
     _owner: DefWithBodyId,
@@ -137,21 +444,6 @@ pub(super) fn print_pat_hir(
     p.buf
 }
 
-macro_rules! w {
-    ($dst:expr, $($arg:tt)*) => {
-        { let _ = write!($dst, $($arg)*); }
-    };
-}
-
-macro_rules! wln {
-    ($dst:expr) => {
-        { $dst.newline(); }
-    };
-    ($dst:expr, $($arg:tt)*) => {
-        { let _ = w!($dst, $($arg)*); $dst.newline(); }
-    };
-}
-
 struct Printer<'a> {
     db: &'a dyn DefDatabase,
     store: &'a ExpressionStore,
@@ -238,7 +530,7 @@ impl Printer<'_> {
             Expr::InlineAsm(_) => w!(self, "builtin#asm(_)"),
             Expr::OffsetOf(offset_of) => {
                 w!(self, "builtin#offset_of(");
-                self.print_type_ref(offset_of.container, &self.store.types);
+                self.print_type_ref(offset_of.container);
                 let edition = self.edition;
                 w!(
                     self,
@@ -246,7 +538,7 @@ impl Printer<'_> {
                     offset_of
                         .fields
                         .iter()
-                        .format_with(".", |field, f| f(&field.display(self.db.upcast(), edition)))
+                        .format_with(".", |field, f| f(&field.display(self.db, edition)))
                 );
             }
             Expr::Path(path) => self.print_path(path),
@@ -268,7 +560,7 @@ impl Printer<'_> {
             }
             Expr::Loop { body, label } => {
                 if let Some(lbl) = label {
-                    w!(self, "{}: ", self.store[*lbl].name.display(self.db.upcast(), self.edition));
+                    w!(self, "{}: ", self.store[*lbl].name.display(self.db, self.edition));
                 }
                 w!(self, "loop ");
                 self.print_expr(*body);
@@ -288,11 +580,10 @@ impl Printer<'_> {
             }
             Expr::MethodCall { receiver, method_name, args, generic_args } => {
                 self.print_expr(*receiver);
-                w!(self, ".{}", method_name.display(self.db.upcast(), self.edition));
+                w!(self, ".{}", method_name.display(self.db, self.edition));
                 if let Some(args) = generic_args {
                     w!(self, "::<");
-                    let edition = self.edition;
-                    print_generic_args(self.db, args, &self.store.types, self, edition).unwrap();
+                    self.print_generic_args(args);
                     w!(self, ">");
                 }
                 w!(self, "(");
@@ -327,13 +618,13 @@ impl Printer<'_> {
             Expr::Continue { label } => {
                 w!(self, "continue");
                 if let Some(lbl) = label {
-                    w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition));
+                    w!(self, " {}", self.store[*lbl].name.display(self.db, self.edition));
                 }
             }
             Expr::Break { expr, label } => {
                 w!(self, "break");
                 if let Some(lbl) = label {
-                    w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition));
+                    w!(self, " {}", self.store[*lbl].name.display(self.db, self.edition));
                 }
                 if let Some(expr) = expr {
                     self.whitespace();
@@ -378,7 +669,7 @@ impl Printer<'_> {
                 let edition = self.edition;
                 self.indented(|p| {
                     for field in &**fields {
-                        w!(p, "{}: ", field.name.display(self.db.upcast(), edition));
+                        w!(p, "{}: ", field.name.display(self.db, edition));
                         p.print_expr(field.expr);
                         wln!(p, ",");
                     }
@@ -392,7 +683,7 @@ impl Printer<'_> {
             }
             Expr::Field { expr, name } => {
                 self.print_expr(*expr);
-                w!(self, ".{}", name.display(self.db.upcast(), self.edition));
+                w!(self, ".{}", name.display(self.db, self.edition));
             }
             Expr::Await { expr } => {
                 self.print_expr(*expr);
@@ -401,7 +692,7 @@ impl Printer<'_> {
             Expr::Cast { expr, type_ref } => {
                 self.print_expr(*expr);
                 w!(self, " as ");
-                self.print_type_ref(*type_ref, &self.store.types);
+                self.print_type_ref(*type_ref);
             }
             Expr::Ref { expr, rawness, mutability } => {
                 w!(self, "&");
@@ -489,13 +780,13 @@ impl Printer<'_> {
                     self.print_pat(*pat);
                     if let Some(ty) = ty {
                         w!(self, ": ");
-                        self.print_type_ref(*ty, &self.store.types);
+                        self.print_type_ref(*ty);
                     }
                 }
                 w!(self, "|");
                 if let Some(ret_ty) = ret_type {
                     w!(self, " -> ");
-                    self.print_type_ref(*ret_ty, &self.store.types);
+                    self.print_type_ref(*ret_ty);
                 }
                 self.whitespace();
                 self.print_expr(*body);
@@ -531,7 +822,7 @@ impl Printer<'_> {
             Expr::Literal(lit) => self.print_literal(lit),
             Expr::Block { id: _, statements, tail, label } => {
                 let label = label.map(|lbl| {
-                    format!("{}: ", self.store[lbl].name.display(self.db.upcast(), self.edition))
+                    format!("{}: ", self.store[lbl].name.display(self.db, self.edition))
                 });
                 self.print_block(label.as_deref(), statements, tail);
             }
@@ -617,7 +908,7 @@ impl Printer<'_> {
                 let oneline = matches!(self.line_format, LineFormat::Oneline);
                 self.indented(|p| {
                     for (idx, arg) in args.iter().enumerate() {
-                        let field_name = arg.name.display(self.db.upcast(), edition).to_string();
+                        let field_name = arg.name.display(self.db, edition).to_string();
 
                         let mut same_name = false;
                         if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] {
@@ -731,7 +1022,7 @@ impl Printer<'_> {
                 self.print_pat(*pat);
                 if let Some(ty) = type_ref {
                     w!(self, ": ");
-                    self.print_type_ref(*ty, &self.store.types);
+                    self.print_type_ref(*ty);
                 }
                 if let Some(init) = initializer {
                     w!(self, " = ");
@@ -782,16 +1073,6 @@ impl Printer<'_> {
         }
     }
 
-    fn print_type_ref(&mut self, ty: TypeRefId, map: &TypesMap) {
-        let edition = self.edition;
-        print_type_ref(self.db, ty, map, self, edition).unwrap();
-    }
-
-    fn print_path(&mut self, path: &Path) {
-        let edition = self.edition;
-        print_path(self.db, path, &self.store.types, self, edition).unwrap();
-    }
-
     fn print_binding(&mut self, id: BindingId) {
         let Binding { name, mode, .. } = &self.store.bindings[id];
         let mode = match mode {
@@ -800,6 +1081,288 @@ impl Printer<'_> {
             BindingAnnotation::Ref => "ref ",
             BindingAnnotation::RefMut => "ref mut ",
         };
-        w!(self, "{}{}", mode, name.display(self.db.upcast(), self.edition));
+        w!(self, "{}{}", mode, name.display(self.db, self.edition));
+    }
+
+    fn print_path(&mut self, path: &Path) {
+        if let Path::LangItem(it, s) = path {
+            w!(self, "builtin#lang(");
+            macro_rules! write_name {
+                ($it:ident) => {{
+                    let loc = $it.lookup(self.db);
+                    let tree = loc.item_tree_id().item_tree(self.db);
+                    let name = &tree[loc.id.value].name;
+                    w!(self, "{}", name.display(self.db, self.edition));
+                }};
+            }
+            match *it {
+                LangItemTarget::ImplDef(it) => w!(self, "{it:?}"),
+                LangItemTarget::EnumId(it) => write_name!(it),
+                LangItemTarget::Function(it) => write_name!(it),
+                LangItemTarget::Static(it) => write_name!(it),
+                LangItemTarget::Struct(it) => write_name!(it),
+                LangItemTarget::Union(it) => write_name!(it),
+                LangItemTarget::TypeAlias(it) => write_name!(it),
+                LangItemTarget::Trait(it) => write_name!(it),
+                LangItemTarget::EnumVariant(it) => write_name!(it),
+            }
+
+            if let Some(s) = s {
+                w!(self, "::{}", s.display(self.db, self.edition));
+            }
+            return w!(self, ")");
+        }
+        match path.type_anchor() {
+            Some(anchor) => {
+                w!(self, "<");
+                self.print_type_ref(anchor);
+                w!(self, ">::");
+            }
+            None => match path.kind() {
+                PathKind::Plain => {}
+                &PathKind::SELF => w!(self, "self"),
+                PathKind::Super(n) => {
+                    for i in 0..*n {
+                        if i == 0 {
+                            w!(self, "super");
+                        } else {
+                            w!(self, "::super");
+                        }
+                    }
+                }
+                PathKind::Crate => w!(self, "crate"),
+                PathKind::Abs => {}
+                PathKind::DollarCrate(krate) => w!(
+                    self,
+                    "{}",
+                    krate
+                        .extra_data(self.db)
+                        .display_name
+                        .as_ref()
+                        .map(|it| it.crate_name().symbol().as_str())
+                        .unwrap_or("$crate")
+                ),
+            },
+        }
+
+        for (i, segment) in path.segments().iter().enumerate() {
+            if i != 0 || !matches!(path.kind(), PathKind::Plain) {
+                w!(self, "::");
+            }
+
+            w!(self, "{}", segment.name.display(self.db, self.edition));
+            if let Some(generics) = segment.args_and_bindings {
+                w!(self, "::<");
+                self.print_generic_args(generics);
+
+                w!(self, ">");
+            }
+        }
+    }
+
+    pub(crate) fn print_generic_args(&mut self, generics: &GenericArgs) {
+        let mut first = true;
+        let args = if generics.has_self_type {
+            let (self_ty, args) = generics.args.split_first().unwrap();
+            w!(self, "Self=");
+            self.print_generic_arg(self_ty);
+            first = false;
+            args
+        } else {
+            &generics.args
+        };
+        for arg in args {
+            if !first {
+                w!(self, ", ");
+            }
+            first = false;
+            self.print_generic_arg(arg);
+        }
+        for binding in generics.bindings.iter() {
+            if !first {
+                w!(self, ", ");
+            }
+            first = false;
+            w!(self, "{}", binding.name.display(self.db, self.edition));
+            if !binding.bounds.is_empty() {
+                w!(self, ": ");
+                self.print_type_bounds(&binding.bounds);
+            }
+            if let Some(ty) = binding.type_ref {
+                w!(self, " = ");
+                self.print_type_ref(ty);
+            }
+        }
+    }
+
+    pub(crate) fn print_generic_arg(&mut self, arg: &GenericArg) {
+        match arg {
+            GenericArg::Type(ty) => self.print_type_ref(*ty),
+            GenericArg::Const(ConstRef { expr }) => self.print_expr(*expr),
+            GenericArg::Lifetime(lt) => self.print_lifetime_ref(*lt),
+        }
+    }
+
+    pub(crate) fn print_type_param(&mut self, param: TypeParamId) {
+        let generic_params = self.db.generic_params(param.parent());
+
+        match generic_params[param.local_id()].name() {
+            Some(name) => w!(self, "{}", name.display(self.db, self.edition)),
+            None => w!(self, "Param[{}]", param.local_id().into_raw()),
+        }
+    }
+
+    pub(crate) fn print_lifetime_param(&mut self, param: LifetimeParamId) {
+        let generic_params = self.db.generic_params(param.parent);
+        w!(self, "{}", generic_params[param.local_id].name.display(self.db, self.edition))
+    }
+
+    pub(crate) fn print_lifetime_ref(&mut self, lt_ref: LifetimeRefId) {
+        match &self.store[lt_ref] {
+            LifetimeRef::Static => w!(self, "'static"),
+            LifetimeRef::Named(lt) => {
+                w!(self, "{}", lt.display(self.db, self.edition))
+            }
+            LifetimeRef::Placeholder => w!(self, "'_"),
+            LifetimeRef::Error => w!(self, "'{{error}}"),
+            &LifetimeRef::Param(p) => self.print_lifetime_param(p),
+        }
+    }
+
+    pub(crate) fn print_type_ref(&mut self, type_ref: TypeRefId) {
+        // FIXME: deduplicate with `HirDisplay` impl
+        match &self.store[type_ref] {
+            TypeRef::Never => w!(self, "!"),
+            &TypeRef::TypeParam(p) => self.print_type_param(p),
+            TypeRef::Placeholder => w!(self, "_"),
+            TypeRef::Tuple(fields) => {
+                w!(self, "(");
+                for (i, field) in fields.iter().enumerate() {
+                    if i != 0 {
+                        w!(self, ", ");
+                    }
+                    self.print_type_ref(*field);
+                }
+                w!(self, ")");
+            }
+            TypeRef::Path(path) => self.print_path(path),
+            TypeRef::RawPtr(pointee, mtbl) => {
+                let mtbl = match mtbl {
+                    Mutability::Shared => "*const",
+                    Mutability::Mut => "*mut",
+                };
+                w!(self, "{mtbl} ");
+                self.print_type_ref(*pointee);
+            }
+            TypeRef::Reference(ref_) => {
+                let mtbl = match ref_.mutability {
+                    Mutability::Shared => "",
+                    Mutability::Mut => "mut ",
+                };
+                w!(self, "&");
+                if let Some(lt) = &ref_.lifetime {
+                    self.print_lifetime_ref(*lt);
+                    w!(self, " ");
+                }
+                w!(self, "{mtbl}");
+                self.print_type_ref(ref_.ty);
+            }
+            TypeRef::Array(array) => {
+                w!(self, "[");
+                self.print_type_ref(array.ty);
+                w!(self, "; ");
+                self.print_generic_arg(&GenericArg::Const(array.len));
+                w!(self, "]");
+            }
+            TypeRef::Slice(elem) => {
+                w!(self, "[");
+                self.print_type_ref(*elem);
+                w!(self, "]");
+            }
+            TypeRef::Fn(fn_) => {
+                let ((_, return_type), args) =
+                    fn_.params.split_last().expect("TypeRef::Fn is missing return type");
+                if fn_.is_unsafe {
+                    w!(self, "unsafe ");
+                }
+                if let Some(abi) = &fn_.abi {
+                    w!(self, "extern ");
+                    w!(self, "{}", abi.as_str());
+                    w!(self, " ");
+                }
+                w!(self, "fn(");
+                for (i, (_, typeref)) in args.iter().enumerate() {
+                    if i != 0 {
+                        w!(self, ", ");
+                    }
+                    self.print_type_ref(*typeref);
+                }
+                if fn_.is_varargs {
+                    if !args.is_empty() {
+                        w!(self, ", ");
+                    }
+                    w!(self, "...");
+                }
+                w!(self, ") -> ");
+                self.print_type_ref(*return_type);
+            }
+            TypeRef::Error => w!(self, "{{error}}"),
+            TypeRef::ImplTrait(bounds) => {
+                w!(self, "impl ");
+                self.print_type_bounds(bounds);
+            }
+            TypeRef::DynTrait(bounds) => {
+                w!(self, "dyn ");
+                self.print_type_bounds(bounds);
+            }
+        }
+    }
+
+    pub(crate) fn print_type_bounds(&mut self, bounds: &[TypeBound]) {
+        for (i, bound) in bounds.iter().enumerate() {
+            if i != 0 {
+                w!(self, " + ");
+            }
+
+            match bound {
+                TypeBound::Path(path, modifier) => {
+                    match modifier {
+                        TraitBoundModifier::None => (),
+                        TraitBoundModifier::Maybe => w!(self, "?"),
+                    }
+                    self.print_path(&self.store[*path]);
+                }
+                TypeBound::ForLifetime(lifetimes, path) => {
+                    w!(
+                        self,
+                        "for<{}> ",
+                        lifetimes
+                            .iter()
+                            .map(|it| it.display(self.db, self.edition))
+                            .format(", ")
+                            .to_string()
+                    );
+                    self.print_path(&self.store[*path]);
+                }
+                TypeBound::Lifetime(lt) => self.print_lifetime_ref(*lt),
+                TypeBound::Use(args) => {
+                    w!(self, "use<");
+                    let mut first = true;
+                    for arg in args {
+                        if !mem::take(&mut first) {
+                            w!(self, ", ");
+                        }
+                        match arg {
+                            UseArgRef::Name(it) => {
+                                w!(self, "{}", it.display(self.db, self.edition))
+                            }
+                            UseArgRef::Lifetime(it) => self.print_lifetime_ref(*it),
+                        }
+                    }
+                    w!(self, ">")
+                }
+                TypeBound::Error => w!(self, "{{unknown}}"),
+            }
+        }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs
index 859a706177aab..431ea9eb1d465 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs
@@ -1,13 +1,13 @@
 //! Name resolution for expressions.
-use hir_expand::{name::Name, MacroDefId};
+use hir_expand::{MacroDefId, name::Name};
 use la_arena::{Arena, ArenaMap, Idx, IdxRange, RawIdx};
 use triomphe::Arc;
 
 use crate::{
+    BlockId, DefWithBodyId,
     db::DefDatabase,
     expr_store::{Body, ExpressionStore, HygieneId},
     hir::{Binding, BindingId, Expr, ExprId, Item, LabelId, Pat, PatId, Statement},
-    BlockId, ConstBlockId, DefWithBodyId,
 };
 
 pub type ScopeId = Idx<ScopeData>;
@@ -53,9 +53,7 @@ pub struct ScopeData {
 impl ExprScopes {
     pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<ExprScopes> {
         let body = db.body(def);
-        let mut scopes = ExprScopes::new_body(&body, |const_block| {
-            db.lookup_intern_anonymous_const(const_block).root
-        });
+        let mut scopes = ExprScopes::new_body(&body);
         scopes.shrink_to_fit();
         Arc::new(scopes)
     }
@@ -104,10 +102,7 @@ fn empty_entries(idx: usize) -> IdxRange<ScopeEntry> {
 }
 
 impl ExprScopes {
-    fn new_body(
-        body: &Body,
-        resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy,
-    ) -> ExprScopes {
+    fn new_body(body: &Body) -> ExprScopes {
         let mut scopes = ExprScopes {
             scopes: Arena::default(),
             scope_entries: Arena::default(),
@@ -118,7 +113,7 @@ impl ExprScopes {
             scopes.add_bindings(body, root, self_param, body.binding_hygiene(self_param));
         }
         scopes.add_params_bindings(body, root, &body.params);
-        compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root, resolve_const_block);
+        compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root);
         scopes
     }
 
@@ -221,23 +216,22 @@ fn compute_block_scopes(
     store: &ExpressionStore,
     scopes: &mut ExprScopes,
     scope: &mut ScopeId,
-    resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy,
 ) {
     for stmt in statements {
         match stmt {
             Statement::Let { pat, initializer, else_branch, .. } => {
                 if let Some(expr) = initializer {
-                    compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block);
+                    compute_expr_scopes(*expr, store, scopes, scope);
                 }
                 if let Some(expr) = else_branch {
-                    compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block);
+                    compute_expr_scopes(*expr, store, scopes, scope);
                 }
 
                 *scope = scopes.new_scope(*scope);
                 scopes.add_pat_bindings(store, *scope, *pat);
             }
             Statement::Expr { expr, .. } => {
-                compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block);
+                compute_expr_scopes(*expr, store, scopes, scope);
             }
             Statement::Item(Item::MacroDef(macro_id)) => {
                 *scope = scopes.new_macro_def_scope(*scope, macro_id.clone());
@@ -246,7 +240,7 @@ fn compute_block_scopes(
         }
     }
     if let Some(expr) = tail {
-        compute_expr_scopes(expr, store, scopes, scope, resolve_const_block);
+        compute_expr_scopes(expr, store, scopes, scope);
     }
 }
 
@@ -255,13 +249,12 @@ fn compute_expr_scopes(
     store: &ExpressionStore,
     scopes: &mut ExprScopes,
     scope: &mut ScopeId,
-    resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy,
 ) {
     let make_label =
         |label: &Option<LabelId>| label.map(|label| (label, store.labels[label].name.clone()));
 
     let compute_expr_scopes = |scopes: &mut ExprScopes, expr: ExprId, scope: &mut ScopeId| {
-        compute_expr_scopes(expr, store, scopes, scope, resolve_const_block)
+        compute_expr_scopes(expr, store, scopes, scope)
     };
 
     scopes.set_scope(expr, *scope);
@@ -271,18 +264,18 @@ fn compute_expr_scopes(
             // Overwrite the old scope for the block expr, so that every block scope can be found
             // via the block itself (important for blocks that only contain items, no expressions).
             scopes.set_scope(expr, scope);
-            compute_block_scopes(statements, *tail, store, scopes, &mut scope, resolve_const_block);
+            compute_block_scopes(statements, *tail, store, scopes, &mut scope);
         }
         Expr::Const(id) => {
             let mut scope = scopes.root_scope();
-            compute_expr_scopes(scopes, resolve_const_block(*id), &mut scope);
+            compute_expr_scopes(scopes, *id, &mut scope);
         }
         Expr::Unsafe { id, statements, tail } | Expr::Async { id, statements, tail } => {
             let mut scope = scopes.new_block_scope(*scope, *id, None);
             // Overwrite the old scope for the block expr, so that every block scope can be found
             // via the block itself (important for blocks that only contain items, no expressions).
             scopes.set_scope(expr, scope);
-            compute_block_scopes(statements, *tail, store, scopes, &mut scope, resolve_const_block);
+            compute_block_scopes(statements, *tail, store, scopes, &mut scope);
         }
         Expr::Loop { body: body_expr, label } => {
             let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
@@ -324,20 +317,20 @@ fn compute_expr_scopes(
 
 #[cfg(test)]
 mod tests {
-    use base_db::SourceDatabase;
-    use hir_expand::{name::AsName, InFile};
+    use base_db::RootQueryDb;
+    use hir_expand::{InFile, name::AsName};
     use span::FileId;
-    use syntax::{algo::find_node_at_offset, ast, AstNode};
+    use syntax::{AstNode, algo::find_node_at_offset, ast};
     use test_fixture::WithFixture;
     use test_utils::{assert_eq_text, extract_offset};
 
-    use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId};
+    use crate::{FunctionId, ModuleDefId, db::DefDatabase, test_db::TestDB};
 
     fn find_function(db: &TestDB, file_id: FileId) -> FunctionId {
         let krate = db.test_crate();
         let crate_def_map = db.crate_def_map(krate);
 
-        let module = crate_def_map.modules_for_file(file_id).next().unwrap();
+        let module = crate_def_map.modules_for_file(db, file_id).next().unwrap();
         let (_, def) = crate_def_map[module].scope.entries().next().unwrap();
         match def.take_values().unwrap() {
             ModuleDefId::FunctionId(it) => it,
@@ -357,18 +350,20 @@ mod tests {
         };
 
         let (db, position) = TestDB::with_position(&code);
-        let file_id = position.file_id;
+        let editioned_file_id = position.file_id;
         let offset = position.offset;
 
-        let file_syntax = db.parse(file_id).syntax_node();
+        let (file_id, _) = editioned_file_id.unpack(&db);
+
+        let file_syntax = db.parse(editioned_file_id).syntax_node();
         let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
-        let function = find_function(&db, file_id.file_id());
+        let function = find_function(&db, file_id);
 
         let scopes = db.expr_scopes(function.into());
         let (_body, source_map) = db.body_with_source_map(function.into());
 
         let expr_id = source_map
-            .node_expr(InFile { file_id: file_id.into(), value: &marker.into() })
+            .node_expr(InFile { file_id: editioned_file_id.into(), value: &marker.into() })
             .unwrap()
             .as_expr()
             .unwrap();
@@ -511,15 +506,17 @@ fn foo() {
 
     fn do_check_local_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_offset: u32) {
         let (db, position) = TestDB::with_position(ra_fixture);
-        let file_id = position.file_id;
+        let editioned_file_id = position.file_id;
         let offset = position.offset;
 
-        let file = db.parse(file_id).ok().unwrap();
+        let (file_id, _) = editioned_file_id.unpack(&db);
+
+        let file = db.parse(editioned_file_id).ok().unwrap();
         let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
             .expect("failed to find a name at the target offset");
         let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();
 
-        let function = find_function(&db, file_id.file_id());
+        let function = find_function(&db, file_id);
 
         let scopes = db.expr_scopes(function.into());
         let (_, source_map) = db.body_with_source_map(function.into());
@@ -527,7 +524,7 @@ fn foo() {
         let expr_scope = {
             let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
             let expr_id = source_map
-                .node_expr(InFile { file_id: file_id.into(), value: &expr_ast })
+                .node_expr(InFile { file_id: editioned_file_id.into(), value: &expr_ast })
                 .unwrap()
                 .as_expr()
                 .unwrap();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs
index 16bf46d3e3f95..f09ee6f0b9981 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs
@@ -1,503 +1,2 @@
-mod block;
-
-use crate::{hir::MatchArm, test_db::TestDB, ModuleDefId};
-use expect_test::{expect, Expect};
-use la_arena::RawIdx;
-use test_fixture::WithFixture;
-
-use super::*;
-
-fn lower(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (TestDB, Arc<Body>, DefWithBodyId) {
-    let db = TestDB::with_files(ra_fixture);
-
-    let krate = db.fetch_test_crate();
-    let def_map = db.crate_def_map(krate);
-    let mut fn_def = None;
-    'outer: for (_, module) in def_map.modules() {
-        for decl in module.scope.declarations() {
-            if let ModuleDefId::FunctionId(it) = decl {
-                fn_def = Some(it);
-                break 'outer;
-            }
-        }
-    }
-    let fn_def = fn_def.unwrap().into();
-
-    let body = db.body(fn_def);
-    (db, body, fn_def)
-}
-
-fn def_map_at(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String {
-    let (db, position) = TestDB::with_position(ra_fixture);
-
-    let module = db.module_at_position(position);
-    module.def_map(&db).dump(&db)
-}
-
-fn check_block_scopes_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
-    let (db, position) = TestDB::with_position(ra_fixture);
-
-    let module = db.module_at_position(position);
-    let actual = module.def_map(&db).dump_block_scopes(&db);
-    expect.assert_eq(&actual);
-}
-
-fn check_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
-    let actual = def_map_at(ra_fixture);
-    expect.assert_eq(&actual);
-}
-
-#[test]
-fn your_stack_belongs_to_me() {
-    cov_mark::check!(your_stack_belongs_to_me);
-    lower(
-        r#"
-#![recursion_limit = "32"]
-macro_rules! n_nuple {
-    ($e:tt) => ();
-    ($($rest:tt)*) => {{
-        (n_nuple!($($rest)*)None,)
-    }};
-}
-fn main() { n_nuple!(1,2,3); }
-"#,
-    );
-}
-
-#[test]
-fn your_stack_belongs_to_me2() {
-    cov_mark::check!(overflow_but_not_me);
-    lower(
-        r#"
-#![recursion_limit = "32"]
-macro_rules! foo {
-    () => {{ foo!(); foo!(); }}
-}
-fn main() { foo!(); }
-"#,
-    );
-}
-
-#[test]
-fn recursion_limit() {
-    lower(
-        r#"
-#![recursion_limit = "2"]
-macro_rules! n_nuple {
-    ($e:tt) => ();
-    ($first:tt $($rest:tt)*) => {{
-        n_nuple!($($rest)*)
-    }};
-}
-fn main() { n_nuple!(1,2,3); }
-"#,
-    );
-}
-
-#[test]
-fn issue_3642_bad_macro_stackover() {
-    lower(
-        r#"
-#[macro_export]
-macro_rules! match_ast {
-    (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
-
-    (match ($node:expr) {
-        $( ast::$ast:ident($it:ident) => $res:expr, )*
-        _ => $catch_all:expr $(,)?
-    }) => {{
-        $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )*
-        { $catch_all }
-    }};
-}
-
-fn main() {
-    let anchor = match_ast! {
-        match parent {
-            as => {},
-            _ => return None
-        }
-    };
-}"#,
-    );
-}
-
-#[test]
-fn macro_resolve() {
-    // Regression test for a path resolution bug introduced with inner item handling.
-    lower(
-        r#"
-macro_rules! vec {
-    () => { () };
-    ($elem:expr; $n:expr) => { () };
-    ($($x:expr),+ $(,)?) => { () };
-}
-mod m {
-    fn outer() {
-        let _ = vec![FileSet::default(); self.len()];
-    }
-}
-"#,
-    );
-}
-
-#[test]
-fn desugar_for_loop() {
-    let (db, body, def) = lower(
-        r#"
-//- minicore: iterator
-fn main() {
-    for ident in 0..10 {
-        foo();
-        bar()
-    }
-}
-"#,
-    );
-
-    expect![[r#"
-        fn main() -> () {
-            match builtin#lang(into_iter)(
-                (0) ..(10) ,
-            ) {
-                mut <ra@gennew>11 => loop {
-                    match builtin#lang(next)(
-                        &mut <ra@gennew>11,
-                    ) {
-                        builtin#lang(None) => break,
-                        builtin#lang(Some)(ident) => {
-                            foo();
-                            bar()
-                        },
-                    }
-                },
-            }
-        }"#]]
-    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
-}
-
-#[test]
-fn desugar_builtin_format_args() {
-    let (db, body, def) = lower(
-        r#"
-//- minicore: fmt
-fn main() {
-    let are = "are";
-    let count = 10;
-    builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
-}
-"#,
-    );
-
-    expect![[r#"
-        fn main() -> () {
-            let are = "are";
-            let count = 10;
-            builtin#lang(Arguments::new_v1_formatted)(
-                &[
-                    "\u{1b}hello ", " ", " friends, we ", " ", "",
-                ],
-                &[
-                    builtin#lang(Argument::new_display)(
-                        &count,
-                    ), builtin#lang(Argument::new_display)(
-                        &"fancy",
-                    ), builtin#lang(Argument::new_debug)(
-                        &are,
-                    ), builtin#lang(Argument::new_display)(
-                        &"!",
-                    ),
-                ],
-                &[
-                    builtin#lang(Placeholder::new)(
-                        0usize,
-                        ' ',
-                        builtin#lang(Alignment::Unknown),
-                        8u32,
-                        builtin#lang(Count::Implied),
-                        builtin#lang(Count::Is)(
-                            2usize,
-                        ),
-                    ), builtin#lang(Placeholder::new)(
-                        1usize,
-                        ' ',
-                        builtin#lang(Alignment::Unknown),
-                        0u32,
-                        builtin#lang(Count::Implied),
-                        builtin#lang(Count::Implied),
-                    ), builtin#lang(Placeholder::new)(
-                        2usize,
-                        ' ',
-                        builtin#lang(Alignment::Unknown),
-                        0u32,
-                        builtin#lang(Count::Implied),
-                        builtin#lang(Count::Implied),
-                    ), builtin#lang(Placeholder::new)(
-                        1usize,
-                        ' ',
-                        builtin#lang(Alignment::Unknown),
-                        0u32,
-                        builtin#lang(Count::Implied),
-                        builtin#lang(Count::Implied),
-                    ), builtin#lang(Placeholder::new)(
-                        3usize,
-                        ' ',
-                        builtin#lang(Alignment::Unknown),
-                        0u32,
-                        builtin#lang(Count::Implied),
-                        builtin#lang(Count::Implied),
-                    ),
-                ],
-                unsafe {
-                    builtin#lang(UnsafeArg::new)()
-                },
-            );
-        }"#]]
-    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
-}
-
-#[test]
-fn test_macro_hygiene() {
-    let (db, body, def) = lower(
-        r##"
-//- minicore: fmt, from
-//- /main.rs
-mod error;
-
-use crate::error::error;
-
-fn main() {
-    // _ = forces body expansion instead of block def map expansion
-    _ = error!("Failed to resolve path `{}`", node.text());
-}
-//- /error.rs
-macro_rules! _error {
-    ($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))}
-}
-pub(crate) use _error as error;
-macro_rules! _intermediate {
-    ($arg:expr) => {$crate::error::SsrError::new($arg)}
-}
-pub(crate) use _intermediate as intermediate;
-
-pub struct SsrError(pub(crate) core::fmt::Arguments);
-
-impl SsrError {
-    pub(crate) fn new(message: impl Into<core::fmt::Arguments>) -> SsrError {
-        SsrError(message.into())
-    }
-}
-"##,
-    );
-
-    assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]);
-    expect![[r#"
-        fn main() -> () {
-            _ = $crate::error::SsrError::new(
-                builtin#lang(Arguments::new_v1_formatted)(
-                    &[
-                        "Failed to resolve path `", "`",
-                    ],
-                    &[
-                        builtin#lang(Argument::new_display)(
-                            &node.text(),
-                        ),
-                    ],
-                    &[
-                        builtin#lang(Placeholder::new)(
-                            0usize,
-                            ' ',
-                            builtin#lang(Alignment::Unknown),
-                            0u32,
-                            builtin#lang(Count::Implied),
-                            builtin#lang(Count::Implied),
-                        ),
-                    ],
-                    unsafe {
-                        builtin#lang(UnsafeArg::new)()
-                    },
-                ),
-            );
-        }"#]]
-    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
-}
-
-#[test]
-fn regression_10300() {
-    let (db, body, def) = lower(
-        r#"
-//- minicore: concat, panic
-mod private {
-    pub use core::concat;
-}
-
-macro_rules! m {
-    () => {
-        panic!(concat!($crate::private::concat!("cc")));
-    };
-}
-
-fn f(a: i32, b: u32) -> String {
-    m!();
-}
-"#,
-    );
-
-    let (_, source_map) = db.body_with_source_map(def);
-    assert_eq!(source_map.diagnostics(), &[]);
-
-    for (_, def_map) in body.blocks(&db) {
-        assert_eq!(def_map.diagnostics(), &[]);
-    }
-
-    expect![[r#"
-        fn f(a: i32, b: u32) -> String {
-            {
-                $crate::panicking::panic_fmt(
-                    builtin#lang(Arguments::new_v1_formatted)(
-                        &[
-                            "cc",
-                        ],
-                        &[],
-                        &[],
-                        unsafe {
-                            builtin#lang(UnsafeArg::new)()
-                        },
-                    ),
-                );
-            };
-        }"#]]
-    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
-}
-
-#[test]
-fn destructuring_assignment_tuple_macro() {
-    // This is a funny one. `let m!()() = Bar()` is an error in rustc, because `m!()()` isn't a valid pattern,
-    // but in destructuring assignment it is valid, because `m!()()` is a valid expression, and destructuring
-    // assignments start their lives as expressions. So we have to do the same.
-
-    let (db, body, def) = lower(
-        r#"
-struct Bar();
-
-macro_rules! m {
-    () => { Bar };
-}
-
-fn foo() {
-    m!()() = Bar();
-}
-"#,
-    );
-
-    let (_, source_map) = db.body_with_source_map(def);
-    assert_eq!(source_map.diagnostics(), &[]);
-
-    for (_, def_map) in body.blocks(&db) {
-        assert_eq!(def_map.diagnostics(), &[]);
-    }
-
-    expect![[r#"
-        fn foo() -> () {
-            Bar() = Bar();
-        }"#]]
-    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
-}
-
-#[test]
-fn shadowing_record_variant() {
-    let (_, body, _) = lower(
-        r#"
-enum A {
-    B { field: i32 },
-}
-fn f() {
-    use A::*;
-    match () {
-        B => {}
-    };
-}
-    "#,
-    );
-    assert_eq!(body.bindings.len(), 1, "should have a binding for `B`");
-    assert_eq!(
-        body.bindings[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
-        "B",
-        "should have a binding for `B`",
-    );
-}
-
-#[test]
-fn regression_pretty_print_bind_pat() {
-    let (db, body, owner) = lower(
-        r#"
-fn foo() {
-    let v @ u = 123;
-}
-"#,
-    );
-    let printed = body.pretty_print(&db, owner, Edition::CURRENT);
-    assert_eq!(
-        printed,
-        r#"fn foo() -> () {
-    let v @ u = 123;
-}"#
-    );
-}
-
-#[test]
-fn skip_skips_body() {
-    let (db, body, owner) = lower(
-        r#"
-#[rust_analyzer::skip]
-async fn foo(a: (), b: i32) -> u32 {
-    0 + 1 + b()
-}
-"#,
-    );
-    let printed = body.pretty_print(&db, owner, Edition::CURRENT);
-    expect!["fn foo(�: (), �: i32) -> impl ::core::future::Future::<Output = u32> �"]
-        .assert_eq(&printed);
-}
-
-#[test]
-fn range_bounds_are_hir_exprs() {
-    let (_, body, _) = lower(
-        r#"
-pub const L: i32 = 6;
-mod x {
-    pub const R: i32 = 100;
-}
-const fn f(x: i32) -> i32 {
-    match x {
-        -1..=5 => x * 10,
-        L..=x::R => x * 100,
-        _ => x,
-    }
-}"#,
-    );
-
-    let mtch_arms = body
-        .exprs
-        .iter()
-        .find_map(|(_, expr)| {
-            if let Expr::Match { arms, .. } = expr {
-                return Some(arms);
-            }
-
-            None
-        })
-        .unwrap();
-
-    let MatchArm { pat, .. } = mtch_arms[1];
-    match body.pats[pat] {
-        Pat::Range { start, end } => {
-            let hir_start = &body.exprs[start.unwrap()];
-            let hir_end = &body.exprs[end.unwrap()];
-
-            assert!(matches!(hir_start, Expr::Path { .. }));
-            assert!(matches!(hir_end, Expr::Path { .. }));
-        }
-        _ => {}
-    }
-}
+mod body;
+mod signatures;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs
new file mode 100644
index 0000000000000..d6645dc1d1d38
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs
@@ -0,0 +1,502 @@
+mod block;
+
+use crate::{DefWithBodyId, ModuleDefId, hir::MatchArm, test_db::TestDB};
+use expect_test::{Expect, expect};
+use la_arena::RawIdx;
+use test_fixture::WithFixture;
+
+use super::super::*;
+
+fn lower(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (TestDB, Arc<Body>, DefWithBodyId) {
+    let db = TestDB::with_files(ra_fixture);
+
+    let krate = db.fetch_test_crate();
+    let def_map = db.crate_def_map(krate);
+    let mut fn_def = None;
+    'outer: for (_, module) in def_map.modules() {
+        for decl in module.scope.declarations() {
+            if let ModuleDefId::FunctionId(it) = decl {
+                fn_def = Some(it);
+                break 'outer;
+            }
+        }
+    }
+    let fn_def = fn_def.unwrap().into();
+
+    let body = db.body(fn_def);
+    (db, body, fn_def)
+}
+
+fn def_map_at(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String {
+    let (db, position) = TestDB::with_position(ra_fixture);
+
+    let module = db.module_at_position(position);
+    module.def_map(&db).dump(&db)
+}
+
+fn check_block_scopes_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
+    let (db, position) = TestDB::with_position(ra_fixture);
+
+    let module = db.module_at_position(position);
+    let actual = module.def_map(&db).dump_block_scopes(&db);
+    expect.assert_eq(&actual);
+}
+
+fn check_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
+    let actual = def_map_at(ra_fixture);
+    expect.assert_eq(&actual);
+}
+
+#[test]
+fn your_stack_belongs_to_me() {
+    cov_mark::check!(your_stack_belongs_to_me);
+    lower(
+        r#"
+#![recursion_limit = "32"]
+macro_rules! n_nuple {
+    ($e:tt) => ();
+    ($($rest:tt)*) => {{
+        (n_nuple!($($rest)*)None,)
+    }};
+}
+fn main() { n_nuple!(1,2,3); }
+"#,
+    );
+}
+
+#[test]
+fn your_stack_belongs_to_me2() {
+    cov_mark::check!(overflow_but_not_me);
+    lower(
+        r#"
+#![recursion_limit = "32"]
+macro_rules! foo {
+    () => {{ foo!(); foo!(); }}
+}
+fn main() { foo!(); }
+"#,
+    );
+}
+
+#[test]
+fn recursion_limit() {
+    lower(
+        r#"
+#![recursion_limit = "2"]
+macro_rules! n_nuple {
+    ($e:tt) => ();
+    ($first:tt $($rest:tt)*) => {{
+        n_nuple!($($rest)*)
+    }};
+}
+fn main() { n_nuple!(1,2,3); }
+"#,
+    );
+}
+
+#[test]
+fn issue_3642_bad_macro_stackover() {
+    lower(
+        r#"
+#[macro_export]
+macro_rules! match_ast {
+    (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+
+    (match ($node:expr) {
+        $( ast::$ast:ident($it:ident) => $res:expr, )*
+        _ => $catch_all:expr $(,)?
+    }) => {{
+        $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )*
+        { $catch_all }
+    }};
+}
+
+fn main() {
+    let anchor = match_ast! {
+        match parent {
+            as => {},
+            _ => return None
+        }
+    };
+}"#,
+    );
+}
+
+#[test]
+fn macro_resolve() {
+    // Regression test for a path resolution bug introduced with inner item handling.
+    lower(
+        r#"
+macro_rules! vec {
+    () => { () };
+    ($elem:expr; $n:expr) => { () };
+    ($($x:expr),+ $(,)?) => { () };
+}
+mod m {
+    fn outer() {
+        let _ = vec![FileSet::default(); self.len()];
+    }
+}
+"#,
+    );
+}
+
+#[test]
+fn desugar_for_loop() {
+    let (db, body, def) = lower(
+        r#"
+//- minicore: iterator
+fn main() {
+    for ident in 0..10 {
+        foo();
+        bar()
+    }
+}
+"#,
+    );
+
+    expect![[r#"
+        fn main() {
+            match builtin#lang(into_iter)(
+                (0) ..(10) ,
+            ) {
+                mut <ra@gennew>11 => loop {
+                    match builtin#lang(next)(
+                        &mut <ra@gennew>11,
+                    ) {
+                        builtin#lang(None) => break,
+                        builtin#lang(Some)(ident) => {
+                            foo();
+                            bar()
+                        },
+                    }
+                },
+            }
+        }"#]]
+    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
+
+#[test]
+fn desugar_builtin_format_args() {
+    let (db, body, def) = lower(
+        r#"
+//- minicore: fmt
+fn main() {
+    let are = "are";
+    let count = 10;
+    builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
+}
+"#,
+    );
+
+    expect![[r#"
+        fn main() {
+            let are = "are";
+            let count = 10;
+            builtin#lang(Arguments::new_v1_formatted)(
+                &[
+                    "\u{1b}hello ", " ", " friends, we ", " ", "",
+                ],
+                &[
+                    builtin#lang(Argument::new_display)(
+                        &count,
+                    ), builtin#lang(Argument::new_display)(
+                        &"fancy",
+                    ), builtin#lang(Argument::new_debug)(
+                        &are,
+                    ), builtin#lang(Argument::new_display)(
+                        &"!",
+                    ),
+                ],
+                &[
+                    builtin#lang(Placeholder::new)(
+                        0usize,
+                        ' ',
+                        builtin#lang(Alignment::Unknown),
+                        8u32,
+                        builtin#lang(Count::Implied),
+                        builtin#lang(Count::Is)(
+                            2,
+                        ),
+                    ), builtin#lang(Placeholder::new)(
+                        1usize,
+                        ' ',
+                        builtin#lang(Alignment::Unknown),
+                        0u32,
+                        builtin#lang(Count::Implied),
+                        builtin#lang(Count::Implied),
+                    ), builtin#lang(Placeholder::new)(
+                        2usize,
+                        ' ',
+                        builtin#lang(Alignment::Unknown),
+                        0u32,
+                        builtin#lang(Count::Implied),
+                        builtin#lang(Count::Implied),
+                    ), builtin#lang(Placeholder::new)(
+                        1usize,
+                        ' ',
+                        builtin#lang(Alignment::Unknown),
+                        0u32,
+                        builtin#lang(Count::Implied),
+                        builtin#lang(Count::Implied),
+                    ), builtin#lang(Placeholder::new)(
+                        3usize,
+                        ' ',
+                        builtin#lang(Alignment::Unknown),
+                        0u32,
+                        builtin#lang(Count::Implied),
+                        builtin#lang(Count::Implied),
+                    ),
+                ],
+                unsafe {
+                    builtin#lang(UnsafeArg::new)()
+                },
+            );
+        }"#]]
+    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
+
+#[test]
+fn test_macro_hygiene() {
+    let (db, body, def) = lower(
+        r##"
+//- minicore: fmt, from
+//- /main.rs
+mod error;
+
+use crate::error::error;
+
+fn main() {
+    // _ = forces body expansion instead of block def map expansion
+    _ = error!("Failed to resolve path `{}`", node.text());
+}
+//- /error.rs
+macro_rules! _error {
+    ($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))}
+}
+pub(crate) use _error as error;
+macro_rules! _intermediate {
+    ($arg:expr) => {$crate::error::SsrError::new($arg)}
+}
+pub(crate) use _intermediate as intermediate;
+
+pub struct SsrError(pub(crate) core::fmt::Arguments);
+
+impl SsrError {
+    pub(crate) fn new(message: impl Into<core::fmt::Arguments>) -> SsrError {
+        SsrError(message.into())
+    }
+}
+"##,
+    );
+
+    assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]);
+    expect![[r#"
+        fn main() {
+            _ = ra_test_fixture::error::SsrError::new(
+                builtin#lang(Arguments::new_v1_formatted)(
+                    &[
+                        "Failed to resolve path `", "`",
+                    ],
+                    &[
+                        builtin#lang(Argument::new_display)(
+                            &node.text(),
+                        ),
+                    ],
+                    &[
+                        builtin#lang(Placeholder::new)(
+                            0usize,
+                            ' ',
+                            builtin#lang(Alignment::Unknown),
+                            0u32,
+                            builtin#lang(Count::Implied),
+                            builtin#lang(Count::Implied),
+                        ),
+                    ],
+                    unsafe {
+                        builtin#lang(UnsafeArg::new)()
+                    },
+                ),
+            );
+        }"#]]
+    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
+
+#[test]
+fn regression_10300() {
+    let (db, body, def) = lower(
+        r#"
+//- minicore: concat, panic
+mod private {
+    pub use core::concat;
+}
+
+macro_rules! m {
+    () => {
+        panic!(concat!($crate::private::concat!("cc")));
+    };
+}
+
+fn f(a: i32, b: u32) -> String {
+    m!();
+}
+"#,
+    );
+
+    let (_, source_map) = db.body_with_source_map(def);
+    assert_eq!(source_map.diagnostics(), &[]);
+
+    for (_, def_map) in body.blocks(&db) {
+        assert_eq!(def_map.diagnostics(), &[]);
+    }
+
+    expect![[r#"
+        fn f(a, b) {
+            {
+                core::panicking::panic_fmt(
+                    builtin#lang(Arguments::new_v1_formatted)(
+                        &[
+                            "cc",
+                        ],
+                        &[],
+                        &[],
+                        unsafe {
+                            builtin#lang(UnsafeArg::new)()
+                        },
+                    ),
+                );
+            };
+        }"#]]
+    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
+
+#[test]
+fn destructuring_assignment_tuple_macro() {
+    // This is a funny one. `let m!()() = Bar()` is an error in rustc, because `m!()()` isn't a valid pattern,
+    // but in destructuring assignment it is valid, because `m!()()` is a valid expression, and destructuring
+    // assignments start their lives as expressions. So we have to do the same.
+
+    let (db, body, def) = lower(
+        r#"
+struct Bar();
+
+macro_rules! m {
+    () => { Bar };
+}
+
+fn foo() {
+    m!()() = Bar();
+}
+"#,
+    );
+
+    let (_, source_map) = db.body_with_source_map(def);
+    assert_eq!(source_map.diagnostics(), &[]);
+
+    for (_, def_map) in body.blocks(&db) {
+        assert_eq!(def_map.diagnostics(), &[]);
+    }
+
+    expect![[r#"
+        fn foo() {
+            Bar() = Bar();
+        }"#]]
+    .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
+
+#[test]
+fn shadowing_record_variant() {
+    let (_, body, _) = lower(
+        r#"
+enum A {
+    B { field: i32 },
+}
+fn f() {
+    use A::*;
+    match () {
+        B => {}
+    };
+}
+    "#,
+    );
+    assert_eq!(body.bindings.len(), 1, "should have a binding for `B`");
+    assert_eq!(
+        body.bindings[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
+        "B",
+        "should have a binding for `B`",
+    );
+}
+
+#[test]
+fn regression_pretty_print_bind_pat() {
+    let (db, body, owner) = lower(
+        r#"
+fn foo() {
+    let v @ u = 123;
+}
+"#,
+    );
+    let printed = body.pretty_print(&db, owner, Edition::CURRENT);
+
+    expect![[r#"
+        fn foo() {
+            let v @ u = 123;
+        }"#]]
+    .assert_eq(&printed);
+}
+
+#[test]
+fn skip_skips_body() {
+    let (db, body, owner) = lower(
+        r#"
+#[rust_analyzer::skip]
+async fn foo(a: (), b: i32) -> u32 {
+    0 + 1 + b()
+}
+"#,
+    );
+    let printed = body.pretty_print(&db, owner, Edition::CURRENT);
+    expect!["fn foo(�, �) �"].assert_eq(&printed);
+}
+
+#[test]
+fn range_bounds_are_hir_exprs() {
+    let (_, body, _) = lower(
+        r#"
+pub const L: i32 = 6;
+mod x {
+    pub const R: i32 = 100;
+}
+const fn f(x: i32) -> i32 {
+    match x {
+        -1..=5 => x * 10,
+        L..=x::R => x * 100,
+        _ => x,
+    }
+}"#,
+    );
+
+    let mtch_arms = body
+        .exprs
+        .iter()
+        .find_map(|(_, expr)| {
+            if let Expr::Match { arms, .. } = expr {
+                return Some(arms);
+            }
+
+            None
+        })
+        .unwrap();
+
+    let MatchArm { pat, .. } = mtch_arms[1];
+    match body.pats[pat] {
+        Pat::Range { start, end } => {
+            let hir_start = &body.exprs[start.unwrap()];
+            let hir_end = &body.exprs[end.unwrap()];
+
+            assert!(matches!(hir_start, Expr::Path { .. }));
+            assert!(matches!(hir_end, Expr::Path { .. }));
+        }
+        _ => {}
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs
similarity index 97%
rename from src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs
rename to src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs
index e136dd18a55e5..da3b65d4203d1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -189,8 +189,8 @@ fn f() {
 }
     "#,
         expect![[r#"
-            BlockId(1) in BlockRelativeModuleId { block: Some(BlockId(0)), local_id: Idx::<ModuleData>(1) }
-            BlockId(0) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
+            BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::<ModuleData>(1) }
+            BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
             crate scope
         "#]],
     );
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs
new file mode 100644
index 0000000000000..80561d6470830
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs
@@ -0,0 +1,190 @@
+use crate::{
+    GenericDefId, ModuleDefId,
+    expr_store::pretty::{print_function, print_struct},
+    test_db::TestDB,
+};
+use expect_test::{Expect, expect};
+use test_fixture::WithFixture;
+
+use super::super::*;
+
+fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
+    let db = TestDB::with_files(ra_fixture);
+
+    let krate = db.fetch_test_crate();
+    let def_map = db.crate_def_map(krate);
+    let mut defs = vec![];
+    for (_, module) in def_map.modules() {
+        for decl in module.scope.declarations() {
+            let def: GenericDefId = match decl {
+                ModuleDefId::ModuleId(_) => continue,
+                ModuleDefId::FunctionId(id) => id.into(),
+                ModuleDefId::AdtId(id) => id.into(),
+                ModuleDefId::ConstId(id) => id.into(),
+                ModuleDefId::StaticId(id) => id.into(),
+                ModuleDefId::TraitId(id) => id.into(),
+                ModuleDefId::TraitAliasId(id) => id.into(),
+                ModuleDefId::TypeAliasId(id) => id.into(),
+                ModuleDefId::EnumVariantId(_) => continue,
+                ModuleDefId::BuiltinType(_) => continue,
+                ModuleDefId::MacroId(_) => continue,
+            };
+            defs.push(def);
+        }
+    }
+
+    let mut out = String::new();
+    for def in defs {
+        match def {
+            GenericDefId::AdtId(adt_id) => match adt_id {
+                crate::AdtId::StructId(struct_id) => {
+                    out += &print_struct(&db, &db.struct_signature(struct_id), Edition::CURRENT);
+                }
+                crate::AdtId::UnionId(_id) => (),
+                crate::AdtId::EnumId(_id) => (),
+            },
+            GenericDefId::ConstId(_id) => (),
+            GenericDefId::FunctionId(function_id) => {
+                out += &print_function(&db, &db.function_signature(function_id), Edition::CURRENT)
+            }
+
+            GenericDefId::ImplId(_id) => (),
+            GenericDefId::StaticId(_id) => (),
+            GenericDefId::TraitAliasId(_id) => (),
+            GenericDefId::TraitId(_id) => (),
+            GenericDefId::TypeAliasId(_id) => (),
+        }
+    }
+
+    expect.assert_eq(&out);
+}
+
+#[test]
+fn structs() {
+    lower_and_print(
+        r"
+struct S { field: foo, }
+struct S(i32, u32, &'static str);
+#[repr(Rust)]
+struct S;
+
+struct S<'a, 'b, T: Clone, const C: usize = 3, X = ()> where X: Default, for<'a, 'c> fn() -> i32: for<'b> Trait<'a, Item = Boo>;
+#[repr(C, packed)]
+struct S {}
+",
+        expect![[r#"
+            struct S {...}
+            struct S(...)
+            ;
+            struct S;
+            struct S<'a, 'b, T, const C: usize = 3, X = ()>
+            where
+                T: Clone,
+                X: Default,
+                for<'a, 'c> fn() -> i32: for<'b> Trait::<'a, Item = Boo>
+            ;
+            #[repr(C)]
+            #[repr(pack(1))]
+            struct S {...}
+        "#]],
+    );
+}
+
+#[test]
+fn functions() {
+    lower_and_print(
+        r#"
+fn foo<'a, const C: usize = 314235, T: Trait<Item = A> = B>(Struct { foo: bar }: &Struct, _: (), a: u32) -> &'a dyn Fn() -> i32 where (): Default {}
+const async unsafe extern "C" fn a() {}
+fn ret_impl_trait() -> impl Trait {}
+"#,
+        expect![[r#"
+            fn foo<'a, const C: usize = 314235, T = B>(&Struct, (), u32) -> &'a dyn Fn::<(), Output = i32>
+            where
+                T: Trait::<Item = A>,
+                (): Default
+             {...}
+            const async unsafe extern "C" fn a() -> impl ::core::future::Future::<Output = ()> {...}
+            fn ret_impl_trait() -> impl Trait {...}
+        "#]],
+    );
+}
+
+#[test]
+fn argument_position_impl_trait_functions() {
+    lower_and_print(
+        r"
+fn impl_trait_args<T>(_: impl Trait) {}
+fn impl_trait_args2<T>(_: impl Trait<impl Trait>) {}
+
+fn impl_trait_ret<T>() -> impl Trait {}
+fn impl_trait_ret2<T>() -> impl Trait<impl Trait> {}
+
+fn not_allowed1(f: impl Fn(impl Foo)) {
+    let foo = S;
+    f(foo);
+}
+
+// This caused stack overflow in #17498
+fn not_allowed2(f: impl Fn(&impl Foo)) {
+    let foo = S;
+    f(&foo);
+}
+
+fn not_allowed3(bar: impl Bar<impl Foo>) {}
+
+// This also caused stack overflow
+fn not_allowed4(bar: impl Bar<&impl Foo>) {}
+
+fn allowed1(baz: impl Baz<Assoc = impl Foo>) {}
+
+fn allowed2<'a>(baz: impl Baz<Assoc = &'a (impl Foo + 'a)>) {}
+
+fn allowed3(baz: impl Baz<Assoc = Qux<impl Foo>>) {}
+",
+        expect![[r#"
+            fn impl_trait_args<T, Param[1]>(Param[1])
+            where
+                Param[1]: Trait
+             {...}
+            fn impl_trait_args2<T, Param[1]>(Param[1])
+            where
+                Param[1]: Trait::<{error}>
+             {...}
+            fn impl_trait_ret<T>() -> impl Trait {...}
+            fn impl_trait_ret2<T>() -> impl Trait::<{error}> {...}
+            fn not_allowed1<Param[0]>(Param[0])
+            where
+                Param[0]: Fn::<({error}), Output = ()>
+             {...}
+            fn not_allowed2<Param[0]>(Param[0])
+            where
+                Param[0]: Fn::<(&{error}), Output = ()>
+             {...}
+            fn not_allowed3<Param[0]>(Param[0])
+            where
+                Param[0]: Bar::<{error}>
+             {...}
+            fn not_allowed4<Param[0]>(Param[0])
+            where
+                Param[0]: Bar::<&{error}>
+             {...}
+            fn allowed1<Param[0], Param[1]>(Param[1])
+            where
+                Param[0]: Foo,
+                Param[1]: Baz::<Assoc = Param[0]>
+             {...}
+            fn allowed2<'a, Param[0], Param[1]>(Param[1])
+            where
+                Param[0]: Foo,
+                Param[0]: 'a,
+                Param[1]: Baz::<Assoc = &'a Param[0]>
+             {...}
+            fn allowed3<Param[0], Param[1]>(Param[1])
+            where
+                Param[0]: Foo,
+                Param[1]: Baz::<Assoc = Qux::<Param[0]>>
+             {...}
+        "#]],
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
index c30ad0163b9db..9d62d9ce6526c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
@@ -2,21 +2,21 @@
 
 use std::{cell::Cell, cmp::Ordering, iter};
 
-use base_db::{CrateId, CrateOrigin, LangCrateOrigin};
+use base_db::{Crate, CrateOrigin, LangCrateOrigin};
 use hir_expand::{
-    name::{AsName, Name},
     Lookup,
+    mod_path::{ModPath, PathKind},
+    name::{AsName, Name},
 };
 use intern::sym;
 use rustc_hash::FxHashSet;
 
 use crate::{
+    ImportPathConfig, ModuleDefId, ModuleId,
     db::DefDatabase,
     item_scope::ItemInNs,
     nameres::DefMap,
-    path::{ModPath, PathKind},
     visibility::{Visibility, VisibilityExplicitness},
-    ImportPathConfig, ModuleDefId, ModuleId,
 };
 
 /// Find a path that can be used to refer to a certain item. This can depend on
@@ -50,7 +50,7 @@ pub fn find_path(
             prefix: prefix_kind,
             cfg,
             ignore_local_imports,
-            is_std_item: db.crate_graph()[item_module.krate()].origin.is_lang(),
+            is_std_item: item_module.krate().data(db).origin.is_lang(),
             from,
             from_def_map: &from.def_map(db),
             fuel: Cell::new(FIND_PATH_FUEL),
@@ -134,10 +134,11 @@ fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Opt
 
     if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() {
         // - if the item is an enum variant, refer to it via the enum
-        if let Some(mut path) =
-            find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), max_len)
-        {
-            path.push_segment(ctx.db.enum_variant_data(variant).name.clone());
+        let loc = variant.lookup(ctx.db);
+        if let Some(mut path) = find_path_inner(ctx, ItemInNs::Types(loc.parent.into()), max_len) {
+            path.push_segment(
+                ctx.db.enum_variants(loc.parent).variants[loc.index as usize].1.clone(),
+            );
             return Some(path);
         }
         // If this doesn't work, it seems we have no way of referring to the
@@ -174,9 +175,9 @@ fn find_path_for_module(
         }
         // - otherwise if the item is the crate root of a dependency crate, return the name from the extern prelude
 
-        let root_def_map = ctx.from.derive_crate_root().def_map(ctx.db);
+        let root_local_def_map = ctx.from.derive_crate_root().local_def_map(ctx.db).1;
         // rev here so we prefer looking at renamed extern decls first
-        for (name, (def_id, _extern_crate)) in root_def_map.extern_prelude().rev() {
+        for (name, (def_id, _extern_crate)) in root_local_def_map.extern_prelude().rev() {
             if crate_root != def_id {
                 continue;
             }
@@ -360,7 +361,7 @@ fn calculate_best_path(
         // too (unless we can't name it at all). It could *also* be (re)exported by the same crate
         // that wants to import it here, but we always prefer to use the external path here.
 
-        ctx.db.crate_graph()[ctx.from.krate].dependencies.iter().for_each(|dep| {
+        ctx.from.krate.data(ctx.db).dependencies.iter().for_each(|dep| {
             find_in_dep(ctx, visited_modules, item, max_len, best_choice, dep.crate_id)
         });
     }
@@ -373,11 +374,10 @@ fn find_in_sysroot(
     max_len: usize,
     best_choice: &mut Option<Choice>,
 ) {
-    let crate_graph = ctx.db.crate_graph();
-    let dependencies = &crate_graph[ctx.from.krate].dependencies;
+    let dependencies = &ctx.from.krate.data(ctx.db).dependencies;
     let mut search = |lang, best_choice: &mut _| {
         if let Some(dep) = dependencies.iter().filter(|it| it.is_sysroot()).find(|dep| {
-            match crate_graph[dep.crate_id].origin {
+            match dep.crate_id.data(ctx.db).origin {
                 CrateOrigin::Lang(l) => l == lang,
                 _ => false,
             }
@@ -419,7 +419,7 @@ fn find_in_dep(
     item: ItemInNs,
     max_len: usize,
     best_choice: &mut Option<Choice>,
-    dep: CrateId,
+    dep: Crate,
 ) {
     let import_map = ctx.db.import_map(dep);
     let Some(import_info_for) = import_map.import_info_for(item) else {
@@ -652,7 +652,7 @@ fn find_local_import_locations(
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use hir_expand::db::ExpandDatabase;
     use itertools::Itertools;
     use span::Edition;
@@ -688,9 +688,10 @@ mod tests {
         })
         .unwrap();
 
-        let def_map = module.def_map(&db);
+        let (def_map, local_def_map) = module.local_def_map(&db);
         let resolved = def_map
             .resolve_path(
+                &local_def_map,
                 &db,
                 module.local_id,
                 &mod_path,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
deleted file mode 100644
index e2b36da79b232..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
+++ /dev/null
@@ -1,900 +0,0 @@
-//! Many kinds of items or constructs can have generic parameters: functions,
-//! structs, impls, traits, etc. This module provides a common HIR for these
-//! generic parameters. See also the `Generics` type and the `generics_of` query
-//! in rustc.
-
-use std::{ops, sync::LazyLock};
-
-use either::Either;
-use hir_expand::{
-    name::{AsName, Name},
-    ExpandResult,
-};
-use la_arena::{Arena, RawIdx};
-use stdx::{
-    impl_from,
-    thin_vec::{EmptyOptimizedThinVec, ThinVec},
-};
-use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
-use triomphe::Arc;
-
-use crate::{
-    db::DefDatabase,
-    expander::Expander,
-    item_tree::{AttrOwner, FileItemTreeId, GenericModItem, GenericsItemTreeNode, ItemTree},
-    lower::LowerCtx,
-    nameres::{DefMap, MacroSubNs},
-    path::{AssociatedTypeBinding, GenericArg, GenericArgs, NormalPath, Path},
-    type_ref::{
-        ArrayType, ConstRef, FnType, LifetimeRef, PathId, RefType, TypeBound, TypeRef, TypeRefId,
-        TypesMap, TypesSourceMap,
-    },
-    AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LifetimeParamId,
-    LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
-};
-
-/// The index of the self param in the generic of the non-parent definition.
-const SELF_PARAM_ID_IN_SELF: la_arena::Idx<TypeOrConstParamData> =
-    LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0));
-
-/// Data about a generic type parameter (to a function, struct, impl, ...).
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct TypeParamData {
-    /// [`None`] only if the type ref is an [`TypeRef::ImplTrait`]. FIXME: Might be better to just
-    /// make it always be a value, giving impl trait a special name.
-    pub name: Option<Name>,
-    pub default: Option<TypeRefId>,
-    pub provenance: TypeParamProvenance,
-}
-
-/// Data about a generic lifetime parameter (to a function, struct, impl, ...).
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct LifetimeParamData {
-    pub name: Name,
-}
-
-/// Data about a generic const parameter (to a function, struct, impl, ...).
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct ConstParamData {
-    pub name: Name,
-    pub ty: TypeRefId,
-    pub default: Option<ConstRef>,
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub enum TypeParamProvenance {
-    TypeParamList,
-    TraitSelf,
-    ArgumentImplTrait,
-}
-
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub enum TypeOrConstParamData {
-    TypeParamData(TypeParamData),
-    ConstParamData(ConstParamData),
-}
-
-impl TypeOrConstParamData {
-    pub fn name(&self) -> Option<&Name> {
-        match self {
-            TypeOrConstParamData::TypeParamData(it) => it.name.as_ref(),
-            TypeOrConstParamData::ConstParamData(it) => Some(&it.name),
-        }
-    }
-
-    pub fn has_default(&self) -> bool {
-        match self {
-            TypeOrConstParamData::TypeParamData(it) => it.default.is_some(),
-            TypeOrConstParamData::ConstParamData(it) => it.default.is_some(),
-        }
-    }
-
-    pub fn type_param(&self) -> Option<&TypeParamData> {
-        match self {
-            TypeOrConstParamData::TypeParamData(it) => Some(it),
-            TypeOrConstParamData::ConstParamData(_) => None,
-        }
-    }
-
-    pub fn const_param(&self) -> Option<&ConstParamData> {
-        match self {
-            TypeOrConstParamData::TypeParamData(_) => None,
-            TypeOrConstParamData::ConstParamData(it) => Some(it),
-        }
-    }
-
-    pub fn is_trait_self(&self) -> bool {
-        match self {
-            TypeOrConstParamData::TypeParamData(it) => {
-                it.provenance == TypeParamProvenance::TraitSelf
-            }
-            TypeOrConstParamData::ConstParamData(_) => false,
-        }
-    }
-}
-
-impl_from!(TypeParamData, ConstParamData for TypeOrConstParamData);
-
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub enum GenericParamData {
-    TypeParamData(TypeParamData),
-    ConstParamData(ConstParamData),
-    LifetimeParamData(LifetimeParamData),
-}
-
-impl GenericParamData {
-    pub fn name(&self) -> Option<&Name> {
-        match self {
-            GenericParamData::TypeParamData(it) => it.name.as_ref(),
-            GenericParamData::ConstParamData(it) => Some(&it.name),
-            GenericParamData::LifetimeParamData(it) => Some(&it.name),
-        }
-    }
-
-    pub fn type_param(&self) -> Option<&TypeParamData> {
-        match self {
-            GenericParamData::TypeParamData(it) => Some(it),
-            _ => None,
-        }
-    }
-
-    pub fn const_param(&self) -> Option<&ConstParamData> {
-        match self {
-            GenericParamData::ConstParamData(it) => Some(it),
-            _ => None,
-        }
-    }
-
-    pub fn lifetime_param(&self) -> Option<&LifetimeParamData> {
-        match self {
-            GenericParamData::LifetimeParamData(it) => Some(it),
-            _ => None,
-        }
-    }
-}
-
-impl_from!(TypeParamData, ConstParamData, LifetimeParamData for GenericParamData);
-
-pub enum GenericParamDataRef<'a> {
-    TypeParamData(&'a TypeParamData),
-    ConstParamData(&'a ConstParamData),
-    LifetimeParamData(&'a LifetimeParamData),
-}
-
-/// Data about the generic parameters of a function, struct, impl, etc.
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct GenericParams {
-    type_or_consts: Arena<TypeOrConstParamData>,
-    lifetimes: Arena<LifetimeParamData>,
-    where_predicates: Box<[WherePredicate]>,
-    pub types_map: TypesMap,
-}
-
-impl ops::Index<LocalTypeOrConstParamId> for GenericParams {
-    type Output = TypeOrConstParamData;
-    fn index(&self, index: LocalTypeOrConstParamId) -> &TypeOrConstParamData {
-        &self.type_or_consts[index]
-    }
-}
-
-impl ops::Index<LocalLifetimeParamId> for GenericParams {
-    type Output = LifetimeParamData;
-    fn index(&self, index: LocalLifetimeParamId) -> &LifetimeParamData {
-        &self.lifetimes[index]
-    }
-}
-
-/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
-/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
-/// It might still result in multiple actual predicates though, because of
-/// associated type bindings like `Iterator<Item = u32>`.
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub enum WherePredicate {
-    TypeBound { target: WherePredicateTypeTarget, bound: TypeBound },
-    Lifetime { target: LifetimeRef, bound: LifetimeRef },
-    ForLifetime { lifetimes: Box<[Name]>, target: WherePredicateTypeTarget, bound: TypeBound },
-}
-
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub enum WherePredicateTypeTarget {
-    TypeRef(TypeRefId),
-    /// For desugared where predicates that can directly refer to a type param.
-    TypeOrConstParam(LocalTypeOrConstParamId),
-}
-
-impl GenericParams {
-    /// Number of Generic parameters (type_or_consts + lifetimes)
-    #[inline]
-    pub fn len(&self) -> usize {
-        self.type_or_consts.len() + self.lifetimes.len()
-    }
-
-    #[inline]
-    pub fn len_lifetimes(&self) -> usize {
-        self.lifetimes.len()
-    }
-
-    #[inline]
-    pub fn len_type_or_consts(&self) -> usize {
-        self.type_or_consts.len()
-    }
-
-    #[inline]
-    pub fn is_empty(&self) -> bool {
-        self.len() == 0
-    }
-
-    #[inline]
-    pub fn no_predicates(&self) -> bool {
-        self.where_predicates.is_empty()
-    }
-
-    #[inline]
-    pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> {
-        self.where_predicates.iter()
-    }
-
-    /// Iterator of type_or_consts field
-    #[inline]
-    pub fn iter_type_or_consts(
-        &self,
-    ) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
-        self.type_or_consts.iter()
-    }
-
-    /// Iterator of lifetimes field
-    #[inline]
-    pub fn iter_lt(
-        &self,
-    ) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> {
-        self.lifetimes.iter()
-    }
-
-    pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option<TypeParamId> {
-        self.type_or_consts.iter().find_map(|(id, p)| {
-            if p.name().as_ref() == Some(&name) && p.type_param().is_some() {
-                Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
-            } else {
-                None
-            }
-        })
-    }
-
-    pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option<ConstParamId> {
-        self.type_or_consts.iter().find_map(|(id, p)| {
-            if p.name().as_ref() == Some(&name) && p.const_param().is_some() {
-                Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
-            } else {
-                None
-            }
-        })
-    }
-
-    #[inline]
-    pub fn trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
-        if self.type_or_consts.is_empty() {
-            return None;
-        }
-        matches!(
-            self.type_or_consts[SELF_PARAM_ID_IN_SELF],
-            TypeOrConstParamData::TypeParamData(TypeParamData {
-                provenance: TypeParamProvenance::TraitSelf,
-                ..
-            })
-        )
-        .then(|| SELF_PARAM_ID_IN_SELF)
-    }
-
-    pub fn find_lifetime_by_name(
-        &self,
-        name: &Name,
-        parent: GenericDefId,
-    ) -> Option<LifetimeParamId> {
-        self.lifetimes.iter().find_map(|(id, p)| {
-            if &p.name == name {
-                Some(LifetimeParamId { local_id: id, parent })
-            } else {
-                None
-            }
-        })
-    }
-
-    pub(crate) fn generic_params_query(
-        db: &dyn DefDatabase,
-        def: GenericDefId,
-    ) -> Arc<GenericParams> {
-        db.generic_params_with_source_map(def).0
-    }
-
-    pub(crate) fn generic_params_with_source_map_query(
-        db: &dyn DefDatabase,
-        def: GenericDefId,
-    ) -> (Arc<GenericParams>, Option<Arc<TypesSourceMap>>) {
-        let _p = tracing::info_span!("generic_params_query").entered();
-
-        let krate = def.krate(db);
-        let cfg_options = db.crate_graph();
-        let cfg_options = &cfg_options[krate].cfg_options;
-
-        // Returns the generic parameters that are enabled under the current `#[cfg]` options
-        let enabled_params =
-            |params: &Arc<GenericParams>, item_tree: &ItemTree, parent: GenericModItem| {
-                let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options);
-                let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param);
-                let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param);
-
-                // In the common case, no parameters will by disabled by `#[cfg]` attributes.
-                // Therefore, make a first pass to check if all parameters are enabled and, if so,
-                // clone the `Interned<GenericParams>` instead of recreating an identical copy.
-                let all_type_or_consts_enabled =
-                    params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx)));
-                let all_lifetimes_enabled =
-                    params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx)));
-
-                if all_type_or_consts_enabled && all_lifetimes_enabled {
-                    params.clone()
-                } else {
-                    Arc::new(GenericParams {
-                        type_or_consts: all_type_or_consts_enabled
-                            .then(|| params.type_or_consts.clone())
-                            .unwrap_or_else(|| {
-                                params
-                                    .type_or_consts
-                                    .iter()
-                                    .filter(|&(idx, _)| enabled(attr_owner_ct(idx)))
-                                    .map(|(_, param)| param.clone())
-                                    .collect()
-                            }),
-                        lifetimes: all_lifetimes_enabled
-                            .then(|| params.lifetimes.clone())
-                            .unwrap_or_else(|| {
-                                params
-                                    .lifetimes
-                                    .iter()
-                                    .filter(|&(idx, _)| enabled(attr_owner_lt(idx)))
-                                    .map(|(_, param)| param.clone())
-                                    .collect()
-                            }),
-                        where_predicates: params.where_predicates.clone(),
-                        types_map: params.types_map.clone(),
-                    })
-                }
-            };
-        fn id_to_generics<Id: GenericsItemTreeNode>(
-            db: &dyn DefDatabase,
-            id: impl for<'db> Lookup<
-                Database<'db> = dyn DefDatabase + 'db,
-                Data = impl ItemTreeLoc<Id = Id>,
-            >,
-            enabled_params: impl Fn(
-                &Arc<GenericParams>,
-                &ItemTree,
-                GenericModItem,
-            ) -> Arc<GenericParams>,
-        ) -> (Arc<GenericParams>, Option<Arc<TypesSourceMap>>)
-        where
-            FileItemTreeId<Id>: Into<GenericModItem>,
-        {
-            let id = id.lookup(db).item_tree_id();
-            let tree = id.item_tree(db);
-            let item = &tree[id.value];
-            (enabled_params(item.generic_params(), &tree, id.value.into()), None)
-        }
-
-        match def {
-            GenericDefId::FunctionId(id) => {
-                let loc = id.lookup(db);
-                let tree = loc.id.item_tree(db);
-                let item = &tree[loc.id.value];
-
-                let enabled_params =
-                    enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into());
-
-                let module = loc.container.module(db);
-                let func_data = db.function_data(id);
-                if func_data.params.is_empty() {
-                    (enabled_params, None)
-                } else {
-                    let source_maps = loc.id.item_tree_with_source_map(db).1;
-                    let item_source_maps = source_maps.function(loc.id.value);
-                    let mut generic_params = GenericParamsCollector {
-                        type_or_consts: enabled_params.type_or_consts.clone(),
-                        lifetimes: enabled_params.lifetimes.clone(),
-                        where_predicates: enabled_params.where_predicates.clone().into(),
-                    };
-
-                    let (mut types_map, mut types_source_maps) =
-                        (enabled_params.types_map.clone(), item_source_maps.generics().clone());
-                    // Don't create an `Expander` if not needed since this
-                    // could cause a reparse after the `ItemTree` has been created due to the spanmap.
-                    let mut expander = None;
-                    for &param in func_data.params.iter() {
-                        generic_params.fill_implicit_impl_trait_args(
-                            db,
-                            &mut types_map,
-                            &mut types_source_maps,
-                            &mut expander,
-                            &mut || {
-                                (module.def_map(db), Expander::new(db, loc.id.file_id(), module))
-                            },
-                            param,
-                            &item.types_map,
-                            item_source_maps.item(),
-                        );
-                    }
-                    let generics = generic_params.finish(types_map, &mut types_source_maps);
-                    (generics, Some(Arc::new(types_source_maps)))
-                }
-            }
-            GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params),
-            GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params),
-            GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params),
-            GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params),
-            GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params),
-            GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params),
-            GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params),
-            GenericDefId::ConstId(_) | GenericDefId::StaticId(_) => (
-                Arc::new(GenericParams {
-                    type_or_consts: Default::default(),
-                    lifetimes: Default::default(),
-                    where_predicates: Default::default(),
-                    types_map: Default::default(),
-                }),
-                None,
-            ),
-        }
-    }
-}
-
-#[derive(Clone, Default)]
-pub(crate) struct GenericParamsCollector {
-    pub(crate) type_or_consts: Arena<TypeOrConstParamData>,
-    lifetimes: Arena<LifetimeParamData>,
-    where_predicates: Vec<WherePredicate>,
-}
-
-impl GenericParamsCollector {
-    pub(crate) fn fill(
-        &mut self,
-        lower_ctx: &mut LowerCtx<'_>,
-        node: &dyn HasGenericParams,
-        add_param_attrs: impl FnMut(
-            Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
-            ast::GenericParam,
-        ),
-    ) {
-        if let Some(params) = node.generic_param_list() {
-            self.fill_params(lower_ctx, params, add_param_attrs)
-        }
-        if let Some(where_clause) = node.where_clause() {
-            self.fill_where_predicates(lower_ctx, where_clause);
-        }
-    }
-
-    pub(crate) fn fill_bounds(
-        &mut self,
-        lower_ctx: &mut LowerCtx<'_>,
-        type_bounds: Option<ast::TypeBoundList>,
-        target: Either<TypeRefId, LifetimeRef>,
-    ) {
-        for bound in type_bounds.iter().flat_map(|type_bound_list| type_bound_list.bounds()) {
-            self.add_where_predicate_from_bound(lower_ctx, bound, None, target.clone());
-        }
-    }
-
-    fn fill_params(
-        &mut self,
-        lower_ctx: &mut LowerCtx<'_>,
-        params: ast::GenericParamList,
-        mut add_param_attrs: impl FnMut(
-            Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
-            ast::GenericParam,
-        ),
-    ) {
-        for type_or_const_param in params.type_or_const_params() {
-            match type_or_const_param {
-                ast::TypeOrConstParam::Type(type_param) => {
-                    let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
-                    // FIXME: Use `Path::from_src`
-                    let default =
-                        type_param.default_type().map(|it| TypeRef::from_ast(lower_ctx, it));
-                    let param = TypeParamData {
-                        name: Some(name.clone()),
-                        default,
-                        provenance: TypeParamProvenance::TypeParamList,
-                    };
-                    let idx = self.type_or_consts.alloc(param.into());
-                    let type_ref = lower_ctx.alloc_type_ref_desugared(TypeRef::Path(name.into()));
-                    self.fill_bounds(
-                        lower_ctx,
-                        type_param.type_bound_list(),
-                        Either::Left(type_ref),
-                    );
-                    add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param));
-                }
-                ast::TypeOrConstParam::Const(const_param) => {
-                    let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
-                    let ty = TypeRef::from_ast_opt(lower_ctx, const_param.ty());
-                    let param = ConstParamData {
-                        name,
-                        ty,
-                        default: ConstRef::from_const_param(lower_ctx, &const_param),
-                    };
-                    let idx = self.type_or_consts.alloc(param.into());
-                    add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param));
-                }
-            }
-        }
-        for lifetime_param in params.lifetime_params() {
-            let name =
-                lifetime_param.lifetime().map_or_else(Name::missing, |lt| Name::new_lifetime(&lt));
-            let param = LifetimeParamData { name: name.clone() };
-            let idx = self.lifetimes.alloc(param);
-            let lifetime_ref = LifetimeRef::new_name(name);
-            self.fill_bounds(
-                lower_ctx,
-                lifetime_param.type_bound_list(),
-                Either::Right(lifetime_ref),
-            );
-            add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param));
-        }
-    }
-
-    fn fill_where_predicates(
-        &mut self,
-        lower_ctx: &mut LowerCtx<'_>,
-        where_clause: ast::WhereClause,
-    ) {
-        for pred in where_clause.predicates() {
-            let target = if let Some(type_ref) = pred.ty() {
-                Either::Left(TypeRef::from_ast(lower_ctx, type_ref))
-            } else if let Some(lifetime) = pred.lifetime() {
-                Either::Right(LifetimeRef::new(&lifetime))
-            } else {
-                continue;
-            };
-
-            let lifetimes: Option<Box<_>> = pred.generic_param_list().map(|param_list| {
-                // Higher-Ranked Trait Bounds
-                param_list
-                    .lifetime_params()
-                    .map(|lifetime_param| {
-                        lifetime_param
-                            .lifetime()
-                            .map_or_else(Name::missing, |lt| Name::new_lifetime(&lt))
-                    })
-                    .collect()
-            });
-            for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
-                self.add_where_predicate_from_bound(
-                    lower_ctx,
-                    bound,
-                    lifetimes.as_deref(),
-                    target.clone(),
-                );
-            }
-        }
-    }
-
-    fn add_where_predicate_from_bound(
-        &mut self,
-        lower_ctx: &mut LowerCtx<'_>,
-        bound: ast::TypeBound,
-        hrtb_lifetimes: Option<&[Name]>,
-        target: Either<TypeRefId, LifetimeRef>,
-    ) {
-        let bound = TypeBound::from_ast(lower_ctx, bound);
-        self.fill_impl_trait_bounds(lower_ctx.take_impl_traits_bounds());
-        let predicate = match (target, bound) {
-            (Either::Left(type_ref), bound) => match hrtb_lifetimes {
-                Some(hrtb_lifetimes) => WherePredicate::ForLifetime {
-                    lifetimes: hrtb_lifetimes.to_vec().into_boxed_slice(),
-                    target: WherePredicateTypeTarget::TypeRef(type_ref),
-                    bound,
-                },
-                None => WherePredicate::TypeBound {
-                    target: WherePredicateTypeTarget::TypeRef(type_ref),
-                    bound,
-                },
-            },
-            (Either::Right(lifetime), TypeBound::Lifetime(bound)) => {
-                WherePredicate::Lifetime { target: lifetime, bound }
-            }
-            _ => return,
-        };
-        self.where_predicates.push(predicate);
-    }
-
-    fn fill_impl_trait_bounds(&mut self, impl_bounds: Vec<ThinVec<TypeBound>>) {
-        for bounds in impl_bounds {
-            let param = TypeParamData {
-                name: None,
-                default: None,
-                provenance: TypeParamProvenance::ArgumentImplTrait,
-            };
-            let param_id = self.type_or_consts.alloc(param.into());
-            for bound in &bounds {
-                self.where_predicates.push(WherePredicate::TypeBound {
-                    target: WherePredicateTypeTarget::TypeOrConstParam(param_id),
-                    bound: bound.clone(),
-                });
-            }
-        }
-    }
-
-    fn fill_implicit_impl_trait_args(
-        &mut self,
-        db: &dyn DefDatabase,
-        generics_types_map: &mut TypesMap,
-        generics_types_source_map: &mut TypesSourceMap,
-        // FIXME: Change this back to `LazyCell` if https://github.com/rust-lang/libs-team/issues/429 is accepted.
-        exp: &mut Option<(Arc<DefMap>, Expander)>,
-        exp_fill: &mut dyn FnMut() -> (Arc<DefMap>, Expander),
-        type_ref: TypeRefId,
-        types_map: &TypesMap,
-        types_source_map: &TypesSourceMap,
-    ) {
-        TypeRef::walk(type_ref, types_map, &mut |type_ref| {
-            if let TypeRef::ImplTrait(bounds) = type_ref {
-                let param = TypeParamData {
-                    name: None,
-                    default: None,
-                    provenance: TypeParamProvenance::ArgumentImplTrait,
-                };
-                let param_id = self.type_or_consts.alloc(param.into());
-                for bound in bounds {
-                    let bound = copy_type_bound(
-                        bound,
-                        types_map,
-                        types_source_map,
-                        generics_types_map,
-                        generics_types_source_map,
-                    );
-                    self.where_predicates.push(WherePredicate::TypeBound {
-                        target: WherePredicateTypeTarget::TypeOrConstParam(param_id),
-                        bound,
-                    });
-                }
-            }
-
-            if let TypeRef::Macro(mc) = type_ref {
-                let macro_call = mc.to_node(db.upcast());
-                let (def_map, expander) = exp.get_or_insert_with(&mut *exp_fill);
-
-                let module = expander.module.local_id;
-                let resolver = |path: &_| {
-                    def_map
-                        .resolve_path(
-                            db,
-                            module,
-                            path,
-                            crate::item_scope::BuiltinShadowMode::Other,
-                            Some(MacroSubNs::Bang),
-                        )
-                        .0
-                        .take_macros()
-                };
-                if let Ok(ExpandResult { value: Some((mark, expanded)), .. }) =
-                    expander.enter_expand(db, macro_call, resolver)
-                {
-                    let (mut macro_types_map, mut macro_types_source_map) =
-                        (TypesMap::default(), TypesSourceMap::default());
-                    let mut ctx =
-                        expander.ctx(db, &mut macro_types_map, &mut macro_types_source_map);
-                    let type_ref = TypeRef::from_ast(&mut ctx, expanded.tree());
-                    self.fill_implicit_impl_trait_args(
-                        db,
-                        generics_types_map,
-                        generics_types_source_map,
-                        &mut *exp,
-                        exp_fill,
-                        type_ref,
-                        &macro_types_map,
-                        &macro_types_source_map,
-                    );
-                    exp.get_or_insert_with(&mut *exp_fill).1.exit(mark);
-                }
-            }
-        });
-    }
-
-    pub(crate) fn finish(
-        self,
-        mut generics_types_map: TypesMap,
-        generics_types_source_map: &mut TypesSourceMap,
-    ) -> Arc<GenericParams> {
-        let Self { mut lifetimes, mut type_or_consts, mut where_predicates } = self;
-
-        if lifetimes.is_empty() && type_or_consts.is_empty() && where_predicates.is_empty() {
-            static EMPTY: LazyLock<Arc<GenericParams>> = LazyLock::new(|| {
-                Arc::new(GenericParams {
-                    lifetimes: Arena::new(),
-                    type_or_consts: Arena::new(),
-                    where_predicates: Box::default(),
-                    types_map: TypesMap::default(),
-                })
-            });
-            return Arc::clone(&EMPTY);
-        }
-
-        lifetimes.shrink_to_fit();
-        type_or_consts.shrink_to_fit();
-        where_predicates.shrink_to_fit();
-        generics_types_map.shrink_to_fit();
-        generics_types_source_map.shrink_to_fit();
-        Arc::new(GenericParams {
-            type_or_consts,
-            lifetimes,
-            where_predicates: where_predicates.into_boxed_slice(),
-            types_map: generics_types_map,
-        })
-    }
-}
-
-/// Copies a `TypeRef` from a `TypesMap` (accompanied with `TypesSourceMap`) into another `TypesMap`
-/// (and `TypesSourceMap`).
-fn copy_type_ref(
-    type_ref: TypeRefId,
-    from: &TypesMap,
-    from_source_map: &TypesSourceMap,
-    to: &mut TypesMap,
-    to_source_map: &mut TypesSourceMap,
-) -> TypeRefId {
-    let result = match &from[type_ref] {
-        TypeRef::Fn(fn_) => {
-            let params = fn_.params().iter().map(|(name, param_type)| {
-                (name.clone(), copy_type_ref(*param_type, from, from_source_map, to, to_source_map))
-            });
-            TypeRef::Fn(FnType::new(fn_.is_varargs(), fn_.is_unsafe(), fn_.abi().clone(), params))
-        }
-        TypeRef::Tuple(types) => TypeRef::Tuple(EmptyOptimizedThinVec::from_iter(
-            types.iter().map(|&t| copy_type_ref(t, from, from_source_map, to, to_source_map)),
-        )),
-        &TypeRef::RawPtr(type_ref, mutbl) => TypeRef::RawPtr(
-            copy_type_ref(type_ref, from, from_source_map, to, to_source_map),
-            mutbl,
-        ),
-        TypeRef::Reference(ref_) => TypeRef::Reference(Box::new(RefType {
-            ty: copy_type_ref(ref_.ty, from, from_source_map, to, to_source_map),
-            lifetime: ref_.lifetime.clone(),
-            mutability: ref_.mutability,
-        })),
-        TypeRef::Array(array) => TypeRef::Array(Box::new(ArrayType {
-            ty: copy_type_ref(array.ty, from, from_source_map, to, to_source_map),
-            len: array.len.clone(),
-        })),
-        &TypeRef::Slice(type_ref) => {
-            TypeRef::Slice(copy_type_ref(type_ref, from, from_source_map, to, to_source_map))
-        }
-        TypeRef::ImplTrait(bounds) => TypeRef::ImplTrait(ThinVec::from_iter(copy_type_bounds(
-            bounds,
-            from,
-            from_source_map,
-            to,
-            to_source_map,
-        ))),
-        TypeRef::DynTrait(bounds) => TypeRef::DynTrait(ThinVec::from_iter(copy_type_bounds(
-            bounds,
-            from,
-            from_source_map,
-            to,
-            to_source_map,
-        ))),
-        TypeRef::Path(path) => {
-            TypeRef::Path(copy_path(path, from, from_source_map, to, to_source_map))
-        }
-        TypeRef::Never => TypeRef::Never,
-        TypeRef::Placeholder => TypeRef::Placeholder,
-        TypeRef::Macro(macro_call) => TypeRef::Macro(*macro_call),
-        TypeRef::Error => TypeRef::Error,
-    };
-    let id = to.types.alloc(result);
-    if let Some(&ptr) = from_source_map.types_map_back.get(id) {
-        to_source_map.types_map_back.insert(id, ptr);
-    }
-    id
-}
-
-fn copy_path(
-    path: &Path,
-    from: &TypesMap,
-    from_source_map: &TypesSourceMap,
-    to: &mut TypesMap,
-    to_source_map: &mut TypesSourceMap,
-) -> Path {
-    match path {
-        Path::BarePath(mod_path) => Path::BarePath(mod_path.clone()),
-        Path::Normal(path) => {
-            let type_anchor = path
-                .type_anchor()
-                .map(|type_ref| copy_type_ref(type_ref, from, from_source_map, to, to_source_map));
-            let mod_path = path.mod_path().clone();
-            let generic_args = path.generic_args().iter().map(|generic_args| {
-                copy_generic_args(generic_args, from, from_source_map, to, to_source_map)
-            });
-            Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args))
-        }
-        Path::LangItem(lang_item, name) => Path::LangItem(*lang_item, name.clone()),
-    }
-}
-
-fn copy_generic_args(
-    generic_args: &Option<GenericArgs>,
-    from: &TypesMap,
-    from_source_map: &TypesSourceMap,
-    to: &mut TypesMap,
-    to_source_map: &mut TypesSourceMap,
-) -> Option<GenericArgs> {
-    generic_args.as_ref().map(|generic_args| {
-        let args = generic_args
-            .args
-            .iter()
-            .map(|arg| match arg {
-                &GenericArg::Type(ty) => {
-                    GenericArg::Type(copy_type_ref(ty, from, from_source_map, to, to_source_map))
-                }
-                GenericArg::Lifetime(lifetime) => GenericArg::Lifetime(lifetime.clone()),
-                GenericArg::Const(konst) => GenericArg::Const(konst.clone()),
-            })
-            .collect();
-        let bindings = generic_args
-            .bindings
-            .iter()
-            .map(|binding| {
-                let name = binding.name.clone();
-                let args =
-                    copy_generic_args(&binding.args, from, from_source_map, to, to_source_map);
-                let type_ref = binding.type_ref.map(|type_ref| {
-                    copy_type_ref(type_ref, from, from_source_map, to, to_source_map)
-                });
-                let bounds =
-                    copy_type_bounds(&binding.bounds, from, from_source_map, to, to_source_map)
-                        .collect();
-                AssociatedTypeBinding { name, args, type_ref, bounds }
-            })
-            .collect();
-        GenericArgs {
-            args,
-            has_self_type: generic_args.has_self_type,
-            bindings,
-            desugared_from_fn: generic_args.desugared_from_fn,
-        }
-    })
-}
-
-fn copy_type_bounds<'a>(
-    bounds: &'a [TypeBound],
-    from: &'a TypesMap,
-    from_source_map: &'a TypesSourceMap,
-    to: &'a mut TypesMap,
-    to_source_map: &'a mut TypesSourceMap,
-) -> impl stdx::thin_vec::TrustedLen<Item = TypeBound> + 'a {
-    bounds.iter().map(|bound| copy_type_bound(bound, from, from_source_map, to, to_source_map))
-}
-
-fn copy_type_bound(
-    bound: &TypeBound,
-    from: &TypesMap,
-    from_source_map: &TypesSourceMap,
-    to: &mut TypesMap,
-    to_source_map: &mut TypesSourceMap,
-) -> TypeBound {
-    let mut copy_path_id = |path: PathId| {
-        let new_path = copy_path(&from[path], from, from_source_map, to, to_source_map);
-        let new_path_id = to.types.alloc(TypeRef::Path(new_path));
-        if let Some(&ptr) = from_source_map.types_map_back.get(path.type_ref()) {
-            to_source_map.types_map_back.insert(new_path_id, ptr);
-        }
-        PathId::from_type_ref_unchecked(new_path_id)
-    };
-
-    match bound {
-        &TypeBound::Path(path, modifier) => TypeBound::Path(copy_path_id(path), modifier),
-        TypeBound::ForLifetime(lifetimes, path) => {
-            TypeBound::ForLifetime(lifetimes.clone(), copy_path_id(*path))
-        }
-        TypeBound::Lifetime(lifetime) => TypeBound::Lifetime(lifetime.clone()),
-        TypeBound::Use(use_args) => TypeBound::Use(use_args.clone()),
-        TypeBound::Error => TypeBound::Error,
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
index 494644d8eff9d..0fc7857d9781a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
@@ -13,11 +13,12 @@
 //! See also a neighboring `body` module.
 
 pub mod format_args;
+pub mod generics;
 pub mod type_ref;
 
 use std::fmt;
 
-use hir_expand::{name::Name, MacroDefId};
+use hir_expand::{MacroDefId, name::Name};
 use intern::Symbol;
 use la_arena::Idx;
 use rustc_apfloat::ieee::{Half as f16, Quad as f128};
@@ -25,10 +26,13 @@ use syntax::ast;
 use type_ref::TypeRefId;
 
 use crate::{
+    BlockId,
     builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
-    path::{GenericArgs, Path},
+    expr_store::{
+        HygieneId,
+        path::{GenericArgs, Path},
+    },
     type_ref::{Mutability, Rawness},
-    BlockId, ConstBlockId,
 };
 
 pub use syntax::ast::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp};
@@ -137,11 +141,7 @@ pub enum LiteralOrConst {
 
 impl Literal {
     pub fn negate(self) -> Option<Self> {
-        if let Literal::Int(i, k) = self {
-            Some(Literal::Int(-i, k))
-        } else {
-            None
-        }
+        if let Literal::Int(i, k) = self { Some(Literal::Int(-i, k)) } else { None }
     }
 }
 
@@ -212,7 +212,7 @@ pub enum Expr {
         statements: Box<[Statement]>,
         tail: Option<ExprId>,
     },
-    Const(ConstBlockId),
+    Const(ExprId),
     // FIXME: Fold this into Block with an unsafe flag?
     Unsafe {
         id: Option<BlockId>,
@@ -555,6 +555,9 @@ pub struct Binding {
     pub name: Name,
     pub mode: BindingAnnotation,
     pub problems: Option<BindingProblems>,
+    /// Note that this may not be the direct `SyntaxContextId` of the binding's expansion, because transparent
+    /// expansions are attributed to their parent expansion (recursively).
+    pub hygiene: HygieneId,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs
index 24badc52f25ac..2fd21bb0ed3a0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs
@@ -4,11 +4,11 @@ use either::Either;
 use hir_expand::name::Name;
 use intern::Symbol;
 use rustc_parse_format as parse;
-use span::SyntaxContextId;
+use span::SyntaxContext;
 use stdx::TupleExt;
 use syntax::{
-    ast::{self, IsString},
     TextRange,
+    ast::{self, IsString},
 };
 
 use crate::hir::ExprId;
@@ -176,7 +176,7 @@ pub(crate) fn parse(
     is_direct_literal: bool,
     mut synth: impl FnMut(Name, Option<TextRange>) -> ExprId,
     mut record_usage: impl FnMut(Name, Option<TextRange>),
-    call_ctx: SyntaxContextId,
+    call_ctx: SyntaxContext,
 ) -> FormatArgs {
     let Ok(text) = s.value() else {
         return FormatArgs {
@@ -460,10 +460,6 @@ impl FormatArgumentsCollector {
         }
     }
 
-    pub fn new() -> Self {
-        Default::default()
-    }
-
     pub fn add(&mut self, arg: FormatArgument) -> usize {
         let index = self.arguments.len();
         if let Some(name) = arg.kind.ident() {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs
new file mode 100644
index 0000000000000..a9a0e36312c1a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs
@@ -0,0 +1,403 @@
+//! Pre-type IR item generics
+use std::{ops, sync::LazyLock};
+
+use hir_expand::name::Name;
+use la_arena::{Arena, Idx, RawIdx};
+use stdx::impl_from;
+use thin_vec::ThinVec;
+use triomphe::Arc;
+
+use crate::{
+    AdtId, ConstParamId, GenericDefId, LifetimeParamId, TypeOrConstParamId, TypeParamId,
+    db::DefDatabase,
+    expr_store::{ExpressionStore, ExpressionStoreSourceMap},
+    type_ref::{ConstRef, LifetimeRefId, TypeBound, TypeRefId},
+};
+
+pub type LocalTypeOrConstParamId = Idx<TypeOrConstParamData>;
+pub type LocalLifetimeParamId = Idx<LifetimeParamData>;
+
+/// Data about a generic type parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TypeParamData {
+    /// [`None`] only if the type ref is an [`TypeRef::ImplTrait`]. FIXME: Might be better to just
+    /// make it always be a value, giving impl trait a special name.
+    pub name: Option<Name>,
+    pub default: Option<TypeRefId>,
+    pub provenance: TypeParamProvenance,
+}
+
+/// Data about a generic lifetime parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct LifetimeParamData {
+    pub name: Name,
+}
+
+/// Data about a generic const parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ConstParamData {
+    pub name: Name,
+    pub ty: TypeRefId,
+    pub default: Option<ConstRef>,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum TypeParamProvenance {
+    TypeParamList,
+    TraitSelf,
+    ArgumentImplTrait,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum TypeOrConstParamData {
+    TypeParamData(TypeParamData),
+    ConstParamData(ConstParamData),
+}
+
+impl TypeOrConstParamData {
+    pub fn name(&self) -> Option<&Name> {
+        match self {
+            TypeOrConstParamData::TypeParamData(it) => it.name.as_ref(),
+            TypeOrConstParamData::ConstParamData(it) => Some(&it.name),
+        }
+    }
+
+    pub fn has_default(&self) -> bool {
+        match self {
+            TypeOrConstParamData::TypeParamData(it) => it.default.is_some(),
+            TypeOrConstParamData::ConstParamData(it) => it.default.is_some(),
+        }
+    }
+
+    pub fn type_param(&self) -> Option<&TypeParamData> {
+        match self {
+            TypeOrConstParamData::TypeParamData(it) => Some(it),
+            TypeOrConstParamData::ConstParamData(_) => None,
+        }
+    }
+
+    pub fn const_param(&self) -> Option<&ConstParamData> {
+        match self {
+            TypeOrConstParamData::TypeParamData(_) => None,
+            TypeOrConstParamData::ConstParamData(it) => Some(it),
+        }
+    }
+
+    pub fn is_trait_self(&self) -> bool {
+        match self {
+            TypeOrConstParamData::TypeParamData(it) => {
+                it.provenance == TypeParamProvenance::TraitSelf
+            }
+            TypeOrConstParamData::ConstParamData(_) => false,
+        }
+    }
+}
+
+impl_from!(TypeParamData, ConstParamData for TypeOrConstParamData);
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum GenericParamData {
+    TypeParamData(TypeParamData),
+    ConstParamData(ConstParamData),
+    LifetimeParamData(LifetimeParamData),
+}
+
+impl GenericParamData {
+    pub fn name(&self) -> Option<&Name> {
+        match self {
+            GenericParamData::TypeParamData(it) => it.name.as_ref(),
+            GenericParamData::ConstParamData(it) => Some(&it.name),
+            GenericParamData::LifetimeParamData(it) => Some(&it.name),
+        }
+    }
+
+    pub fn type_param(&self) -> Option<&TypeParamData> {
+        match self {
+            GenericParamData::TypeParamData(it) => Some(it),
+            _ => None,
+        }
+    }
+
+    pub fn const_param(&self) -> Option<&ConstParamData> {
+        match self {
+            GenericParamData::ConstParamData(it) => Some(it),
+            _ => None,
+        }
+    }
+
+    pub fn lifetime_param(&self) -> Option<&LifetimeParamData> {
+        match self {
+            GenericParamData::LifetimeParamData(it) => Some(it),
+            _ => None,
+        }
+    }
+}
+
+impl_from!(TypeParamData, ConstParamData, LifetimeParamData for GenericParamData);
+
+#[derive(Debug, Clone, Copy)]
+pub enum GenericParamDataRef<'a> {
+    TypeParamData(&'a TypeParamData),
+    ConstParamData(&'a ConstParamData),
+    LifetimeParamData(&'a LifetimeParamData),
+}
+
+/// Data about the generic parameters of a function, struct, impl, etc.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct GenericParams {
+    pub(crate) type_or_consts: Arena<TypeOrConstParamData>,
+    pub(crate) lifetimes: Arena<LifetimeParamData>,
+    pub(crate) where_predicates: Box<[WherePredicate]>,
+}
+
+impl ops::Index<LocalTypeOrConstParamId> for GenericParams {
+    type Output = TypeOrConstParamData;
+    fn index(&self, index: LocalTypeOrConstParamId) -> &TypeOrConstParamData {
+        &self.type_or_consts[index]
+    }
+}
+
+impl ops::Index<LocalLifetimeParamId> for GenericParams {
+    type Output = LifetimeParamData;
+    fn index(&self, index: LocalLifetimeParamId) -> &LifetimeParamData {
+        &self.lifetimes[index]
+    }
+}
+
+/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
+/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
+/// It might still result in multiple actual predicates though, because of
+/// associated type bindings like `Iterator<Item = u32>`.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum WherePredicate {
+    TypeBound { target: TypeRefId, bound: TypeBound },
+    Lifetime { target: LifetimeRefId, bound: LifetimeRefId },
+    ForLifetime { lifetimes: ThinVec<Name>, target: TypeRefId, bound: TypeBound },
+}
+
+static EMPTY: LazyLock<Arc<GenericParams>> = LazyLock::new(|| {
+    Arc::new(GenericParams {
+        type_or_consts: Arena::default(),
+        lifetimes: Arena::default(),
+        where_predicates: Box::default(),
+    })
+});
+
+impl GenericParams {
+    /// The index of the self param in the generic of the non-parent definition.
+    pub(crate) const SELF_PARAM_ID_IN_SELF: la_arena::Idx<TypeOrConstParamData> =
+        LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0));
+
+    pub fn new(db: &dyn DefDatabase, def: GenericDefId) -> Arc<GenericParams> {
+        match def {
+            GenericDefId::AdtId(AdtId::EnumId(it)) => db.enum_signature(it).generic_params.clone(),
+            GenericDefId::AdtId(AdtId::StructId(it)) => {
+                db.struct_signature(it).generic_params.clone()
+            }
+            GenericDefId::AdtId(AdtId::UnionId(it)) => {
+                db.union_signature(it).generic_params.clone()
+            }
+            GenericDefId::ConstId(_) => EMPTY.clone(),
+            GenericDefId::FunctionId(function_id) => {
+                db.function_signature(function_id).generic_params.clone()
+            }
+            GenericDefId::ImplId(impl_id) => db.impl_signature(impl_id).generic_params.clone(),
+            GenericDefId::StaticId(_) => EMPTY.clone(),
+            GenericDefId::TraitAliasId(trait_alias_id) => {
+                db.trait_alias_signature(trait_alias_id).generic_params.clone()
+            }
+            GenericDefId::TraitId(trait_id) => db.trait_signature(trait_id).generic_params.clone(),
+            GenericDefId::TypeAliasId(type_alias_id) => {
+                db.type_alias_signature(type_alias_id).generic_params.clone()
+            }
+        }
+    }
+
+    pub fn generic_params_and_store(
+        db: &dyn DefDatabase,
+        def: GenericDefId,
+    ) -> (Arc<GenericParams>, Arc<ExpressionStore>) {
+        match def {
+            GenericDefId::AdtId(AdtId::EnumId(id)) => {
+                let sig = db.enum_signature(id);
+                (sig.generic_params.clone(), sig.store.clone())
+            }
+            GenericDefId::AdtId(AdtId::StructId(id)) => {
+                let sig = db.struct_signature(id);
+                (sig.generic_params.clone(), sig.store.clone())
+            }
+            GenericDefId::AdtId(AdtId::UnionId(id)) => {
+                let sig = db.union_signature(id);
+                (sig.generic_params.clone(), sig.store.clone())
+            }
+            GenericDefId::ConstId(id) => {
+                let sig = db.const_signature(id);
+                (EMPTY.clone(), sig.store.clone())
+            }
+            GenericDefId::FunctionId(id) => {
+                let sig = db.function_signature(id);
+                (sig.generic_params.clone(), sig.store.clone())
+            }
+            GenericDefId::ImplId(id) => {
+                let sig = db.impl_signature(id);
+                (sig.generic_params.clone(), sig.store.clone())
+            }
+            GenericDefId::StaticId(id) => {
+                let sig = db.static_signature(id);
+                (EMPTY.clone(), sig.store.clone())
+            }
+            GenericDefId::TraitAliasId(id) => {
+                let sig = db.trait_alias_signature(id);
+                (sig.generic_params.clone(), sig.store.clone())
+            }
+            GenericDefId::TraitId(id) => {
+                let sig = db.trait_signature(id);
+                (sig.generic_params.clone(), sig.store.clone())
+            }
+            GenericDefId::TypeAliasId(id) => {
+                let sig = db.type_alias_signature(id);
+                (sig.generic_params.clone(), sig.store.clone())
+            }
+        }
+    }
+
+    pub fn generic_params_and_store_and_source_map(
+        db: &dyn DefDatabase,
+        def: GenericDefId,
+    ) -> (Arc<GenericParams>, Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>) {
+        match def {
+            GenericDefId::AdtId(AdtId::EnumId(id)) => {
+                let (sig, sm) = db.enum_signature_with_source_map(id);
+                (sig.generic_params.clone(), sig.store.clone(), sm)
+            }
+            GenericDefId::AdtId(AdtId::StructId(id)) => {
+                let (sig, sm) = db.struct_signature_with_source_map(id);
+                (sig.generic_params.clone(), sig.store.clone(), sm)
+            }
+            GenericDefId::AdtId(AdtId::UnionId(id)) => {
+                let (sig, sm) = db.union_signature_with_source_map(id);
+                (sig.generic_params.clone(), sig.store.clone(), sm)
+            }
+            GenericDefId::ConstId(id) => {
+                let (sig, sm) = db.const_signature_with_source_map(id);
+                (EMPTY.clone(), sig.store.clone(), sm)
+            }
+            GenericDefId::FunctionId(id) => {
+                let (sig, sm) = db.function_signature_with_source_map(id);
+                (sig.generic_params.clone(), sig.store.clone(), sm)
+            }
+            GenericDefId::ImplId(id) => {
+                let (sig, sm) = db.impl_signature_with_source_map(id);
+                (sig.generic_params.clone(), sig.store.clone(), sm)
+            }
+            GenericDefId::StaticId(id) => {
+                let (sig, sm) = db.static_signature_with_source_map(id);
+                (EMPTY.clone(), sig.store.clone(), sm)
+            }
+            GenericDefId::TraitAliasId(id) => {
+                let (sig, sm) = db.trait_alias_signature_with_source_map(id);
+                (sig.generic_params.clone(), sig.store.clone(), sm)
+            }
+            GenericDefId::TraitId(id) => {
+                let (sig, sm) = db.trait_signature_with_source_map(id);
+                (sig.generic_params.clone(), sig.store.clone(), sm)
+            }
+            GenericDefId::TypeAliasId(id) => {
+                let (sig, sm) = db.type_alias_signature_with_source_map(id);
+                (sig.generic_params.clone(), sig.store.clone(), sm)
+            }
+        }
+    }
+
+    /// Number of Generic parameters (type_or_consts + lifetimes)
+    #[inline]
+    pub fn len(&self) -> usize {
+        self.type_or_consts.len() + self.lifetimes.len()
+    }
+
+    #[inline]
+    pub fn len_lifetimes(&self) -> usize {
+        self.lifetimes.len()
+    }
+
+    #[inline]
+    pub fn len_type_or_consts(&self) -> usize {
+        self.type_or_consts.len()
+    }
+
+    #[inline]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    #[inline]
+    pub fn no_predicates(&self) -> bool {
+        self.where_predicates.is_empty()
+    }
+
+    #[inline]
+    pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> {
+        self.where_predicates.iter()
+    }
+
+    /// Iterator of type_or_consts field
+    #[inline]
+    pub fn iter_type_or_consts(
+        &self,
+    ) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
+        self.type_or_consts.iter()
+    }
+
+    /// Iterator of lifetimes field
+    #[inline]
+    pub fn iter_lt(
+        &self,
+    ) -> impl DoubleEndedIterator<Item = (LocalLifetimeParamId, &LifetimeParamData)> {
+        self.lifetimes.iter()
+    }
+
+    pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option<TypeParamId> {
+        self.type_or_consts.iter().find_map(|(id, p)| {
+            if p.name().as_ref() == Some(&name) && p.type_param().is_some() {
+                Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
+            } else {
+                None
+            }
+        })
+    }
+
+    pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option<ConstParamId> {
+        self.type_or_consts.iter().find_map(|(id, p)| {
+            if p.name().as_ref() == Some(&name) && p.const_param().is_some() {
+                Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
+            } else {
+                None
+            }
+        })
+    }
+
+    #[inline]
+    pub fn trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
+        if self.type_or_consts.is_empty() {
+            return None;
+        }
+        matches!(
+            self.type_or_consts[Self::SELF_PARAM_ID_IN_SELF],
+            TypeOrConstParamData::TypeParamData(TypeParamData {
+                provenance: TypeParamProvenance::TraitSelf,
+                ..
+            })
+        )
+        .then(|| Self::SELF_PARAM_ID_IN_SELF)
+    }
+
+    pub fn find_lifetime_by_name(
+        &self,
+        name: &Name,
+        parent: GenericDefId,
+    ) -> Option<LifetimeParamId> {
+        self.lifetimes.iter().find_map(|(id, p)| {
+            if &p.name == name { Some(LifetimeParamId { local_id: id, parent }) } else { None }
+        })
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs
index 6de4026dff75b..eb3b92d31f1c1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs
@@ -1,29 +1,21 @@
 //! HIR for references to types. Paths in these are not yet resolved. They can
 //! be directly created from an ast::TypeRef, without further queries.
 
-use core::fmt;
-use std::{fmt::Write, ops::Index};
+use std::fmt::Write;
 
-use hir_expand::{
-    db::ExpandDatabase,
-    name::{AsName, Name},
-    AstId, InFile,
-};
-use intern::{sym, Symbol};
-use la_arena::{Arena, ArenaMap, Idx};
-use span::Edition;
-use stdx::thin_vec::{thin_vec_with_header_struct, EmptyOptimizedThinVec, ThinVec};
-use syntax::{
-    ast::{self, HasGenericArgs, HasName, IsString},
-    AstPtr,
-};
+use hir_expand::name::Name;
+use intern::Symbol;
+use la_arena::Idx;
+use thin_vec::ThinVec;
 
 use crate::{
+    LifetimeParamId, TypeParamId,
     builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
-    hir::Literal,
-    lower::LowerCtx,
-    path::{GenericArg, Path},
-    SyntheticSyntax,
+    expr_store::{
+        ExpressionStore,
+        path::{GenericArg, Path},
+    },
+    hir::{ExprId, Literal},
 };
 
 #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
@@ -34,11 +26,7 @@ pub enum Mutability {
 
 impl Mutability {
     pub fn from_mutable(mutable: bool) -> Mutability {
-        if mutable {
-            Mutability::Mut
-        } else {
-            Mutability::Shared
-        }
+        if mutable { Mutability::Mut } else { Mutability::Shared }
     }
 
     pub fn as_keyword_for_ref(self) -> &'static str {
@@ -80,11 +68,7 @@ pub enum Rawness {
 
 impl Rawness {
     pub fn from_raw(is_raw: bool) -> Rawness {
-        if is_raw {
-            Rawness::RawPtr
-        } else {
-            Rawness::Ref
-        }
+        if is_raw { Rawness::RawPtr } else { Rawness::Ref }
     }
 
     pub fn is_raw(&self) -> bool {
@@ -114,40 +98,32 @@ pub struct TraitRef {
     pub path: PathId,
 }
 
-impl TraitRef {
-    /// Converts an `ast::PathType` to a `hir::TraitRef`.
-    pub(crate) fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::Type) -> Option<Self> {
-        // FIXME: Use `Path::from_src`
-        match &node {
-            ast::Type::PathType(path) => path
-                .path()
-                .and_then(|it| ctx.lower_path(it))
-                .map(|path| TraitRef { path: ctx.alloc_path(path, AstPtr::new(&node)) }),
-            _ => None,
-        }
-    }
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub struct FnType {
+    pub params: Box<[(Option<Name>, TypeRefId)]>,
+    pub is_varargs: bool,
+    pub is_unsafe: bool,
+    pub abi: Option<Symbol>,
 }
 
-thin_vec_with_header_struct! {
-    pub new(pub(crate)) struct FnType, FnTypeHeader {
-        pub params: [(Option<Name>, TypeRefId)],
-        pub is_varargs: bool,
-        pub is_unsafe: bool,
-        pub abi: Option<Symbol>; ref,
+impl FnType {
+    #[inline]
+    pub fn split_params_and_ret(&self) -> (&[(Option<Name>, TypeRefId)], TypeRefId) {
+        let (ret, params) = self.params.split_last().expect("should have at least return type");
+        (params, ret.1)
     }
 }
 
 #[derive(Clone, PartialEq, Eq, Hash, Debug)]
 pub struct ArrayType {
     pub ty: TypeRefId,
-    // FIXME: This should be Ast<ConstArg>
     pub len: ConstRef,
 }
 
 #[derive(Clone, PartialEq, Eq, Hash, Debug)]
 pub struct RefType {
     pub ty: TypeRefId,
-    pub lifetime: Option<LifetimeRef>,
+    pub lifetime: Option<LifetimeRefId>,
     pub mutability: Mutability,
 }
 
@@ -156,17 +132,19 @@ pub struct RefType {
 pub enum TypeRef {
     Never,
     Placeholder,
-    Tuple(EmptyOptimizedThinVec<TypeRefId>),
+    Tuple(ThinVec<TypeRefId>),
     Path(Path),
     RawPtr(TypeRefId, Mutability),
+    // FIXME: Unbox this once `Idx` has a niche,
+    // as `RefType` should shrink by 4 bytes then
     Reference(Box<RefType>),
-    Array(Box<ArrayType>),
+    Array(ArrayType),
     Slice(TypeRefId),
     /// A fn pointer. Last element of the vector is the return type.
-    Fn(FnType),
+    Fn(Box<FnType>),
     ImplTrait(ThinVec<TypeBound>),
     DynTrait(ThinVec<TypeBound>),
-    Macro(AstId<ast::MacroCall>),
+    TypeParam(TypeParamId),
     Error,
 }
 
@@ -175,97 +153,33 @@ const _: () = assert!(size_of::<TypeRef>() == 16);
 
 pub type TypeRefId = Idx<TypeRef>;
 
-#[derive(Default, Clone, PartialEq, Eq, Debug, Hash)]
-pub struct TypesMap {
-    pub(crate) types: Arena<TypeRef>,
-}
-
-impl TypesMap {
-    pub const EMPTY: &TypesMap = &TypesMap { types: Arena::new() };
-
-    pub(crate) fn shrink_to_fit(&mut self) {
-        let TypesMap { types } = self;
-        types.shrink_to_fit();
-    }
-}
-
-impl Index<TypeRefId> for TypesMap {
-    type Output = TypeRef;
-
-    #[inline]
-    fn index(&self, index: TypeRefId) -> &Self::Output {
-        &self.types[index]
-    }
-}
-
-impl Index<PathId> for TypesMap {
-    type Output = Path;
-
-    #[inline]
-    fn index(&self, index: PathId) -> &Self::Output {
-        let TypeRef::Path(path) = &self[index.type_ref()] else {
-            unreachable!("`PathId` always points to `TypeRef::Path`");
-        };
-        path
-    }
-}
-
-pub type TypePtr = AstPtr<ast::Type>;
-pub type TypeSource = InFile<TypePtr>;
-
-#[derive(Default, Clone, PartialEq, Eq, Debug, Hash)]
-pub struct TypesSourceMap {
-    pub(crate) types_map_back: ArenaMap<TypeRefId, TypeSource>,
-}
-
-impl TypesSourceMap {
-    pub const EMPTY: Self = Self { types_map_back: ArenaMap::new() };
-
-    pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> {
-        self.types_map_back.get(id).cloned().ok_or(SyntheticSyntax)
-    }
-
-    pub(crate) fn shrink_to_fit(&mut self) {
-        let TypesSourceMap { types_map_back } = self;
-        types_map_back.shrink_to_fit();
-    }
-}
+pub type LifetimeRefId = Idx<LifetimeRef>;
 
 #[derive(Clone, PartialEq, Eq, Hash, Debug)]
-pub struct LifetimeRef {
-    pub name: Name,
-}
-
-impl LifetimeRef {
-    pub(crate) fn new_name(name: Name) -> Self {
-        LifetimeRef { name }
-    }
-
-    pub(crate) fn new(lifetime: &ast::Lifetime) -> Self {
-        LifetimeRef { name: Name::new_lifetime(lifetime) }
-    }
-
-    pub fn missing() -> LifetimeRef {
-        LifetimeRef { name: Name::missing() }
-    }
+pub enum LifetimeRef {
+    Named(Name),
+    Static,
+    Placeholder,
+    Param(LifetimeParamId),
+    Error,
 }
 
 #[derive(Clone, PartialEq, Eq, Hash, Debug)]
 pub enum TypeBound {
     Path(PathId, TraitBoundModifier),
-    ForLifetime(Box<[Name]>, PathId),
-    Lifetime(LifetimeRef),
-    Use(Box<[UseArgRef]>),
+    ForLifetime(ThinVec<Name>, PathId),
+    Lifetime(LifetimeRefId),
+    Use(ThinVec<UseArgRef>),
     Error,
 }
 
 #[cfg(target_pointer_width = "64")]
-const _: [(); 24] = [(); size_of::<TypeBound>()];
+const _: [(); 16] = [(); size_of::<TypeBound>()];
 
 #[derive(Clone, PartialEq, Eq, Hash, Debug)]
 pub enum UseArgRef {
     Name(Name),
-    Lifetime(LifetimeRef),
+    Lifetime(LifetimeRefId),
 }
 
 /// A modifier on a bound, currently this is only used for `?Sized`, where the
@@ -277,124 +191,19 @@ pub enum TraitBoundModifier {
 }
 
 impl TypeRef {
-    /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
-    pub fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::Type) -> TypeRefId {
-        let ty = match &node {
-            ast::Type::ParenType(inner) => return TypeRef::from_ast_opt(ctx, inner.ty()),
-            ast::Type::TupleType(inner) => TypeRef::Tuple(EmptyOptimizedThinVec::from_iter(
-                Vec::from_iter(inner.fields().map(|it| TypeRef::from_ast(ctx, it))),
-            )),
-            ast::Type::NeverType(..) => TypeRef::Never,
-            ast::Type::PathType(inner) => {
-                // FIXME: Use `Path::from_src`
-                inner
-                    .path()
-                    .and_then(|it| ctx.lower_path(it))
-                    .map(TypeRef::Path)
-                    .unwrap_or(TypeRef::Error)
-            }
-            ast::Type::PtrType(inner) => {
-                let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
-                let mutability = Mutability::from_mutable(inner.mut_token().is_some());
-                TypeRef::RawPtr(inner_ty, mutability)
-            }
-            ast::Type::ArrayType(inner) => {
-                let len = ConstRef::from_const_arg(ctx, inner.const_arg());
-                TypeRef::Array(Box::new(ArrayType {
-                    ty: TypeRef::from_ast_opt(ctx, inner.ty()),
-                    len,
-                }))
-            }
-            ast::Type::SliceType(inner) => TypeRef::Slice(TypeRef::from_ast_opt(ctx, inner.ty())),
-            ast::Type::RefType(inner) => {
-                let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
-                let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(&lt));
-                let mutability = Mutability::from_mutable(inner.mut_token().is_some());
-                TypeRef::Reference(Box::new(RefType { ty: inner_ty, lifetime, mutability }))
-            }
-            ast::Type::InferType(_inner) => TypeRef::Placeholder,
-            ast::Type::FnPtrType(inner) => {
-                let ret_ty = inner
-                    .ret_type()
-                    .and_then(|rt| rt.ty())
-                    .map(|it| TypeRef::from_ast(ctx, it))
-                    .unwrap_or_else(|| ctx.alloc_type_ref_desugared(TypeRef::unit()));
-                let mut is_varargs = false;
-                let mut params = if let Some(pl) = inner.param_list() {
-                    if let Some(param) = pl.params().last() {
-                        is_varargs = param.dotdotdot_token().is_some();
-                    }
-
-                    pl.params()
-                        .map(|it| {
-                            let type_ref = TypeRef::from_ast_opt(ctx, it.ty());
-                            let name = match it.pat() {
-                                Some(ast::Pat::IdentPat(it)) => Some(
-                                    it.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing),
-                                ),
-                                _ => None,
-                            };
-                            (name, type_ref)
-                        })
-                        .collect()
-                } else {
-                    Vec::with_capacity(1)
-                };
-                fn lower_abi(abi: ast::Abi) -> Symbol {
-                    match abi.abi_string() {
-                        Some(tok) => Symbol::intern(tok.text_without_quotes()),
-                        // `extern` default to be `extern "C"`.
-                        _ => sym::C.clone(),
-                    }
-                }
-
-                let abi = inner.abi().map(lower_abi);
-                params.push((None, ret_ty));
-                TypeRef::Fn(FnType::new(is_varargs, inner.unsafe_token().is_some(), abi, params))
-            }
-            // for types are close enough for our purposes to the inner type for now...
-            ast::Type::ForType(inner) => return TypeRef::from_ast_opt(ctx, inner.ty()),
-            ast::Type::ImplTraitType(inner) => {
-                if ctx.outer_impl_trait() {
-                    // Disallow nested impl traits
-                    TypeRef::Error
-                } else {
-                    ctx.with_outer_impl_trait_scope(true, |ctx| {
-                        TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
-                    })
-                }
-            }
-            ast::Type::DynTraitType(inner) => {
-                TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
-            }
-            ast::Type::MacroType(mt) => match mt.macro_call() {
-                Some(mc) => TypeRef::Macro(ctx.ast_id(&mc)),
-                None => TypeRef::Error,
-            },
-        };
-        ctx.alloc_type_ref(ty, AstPtr::new(&node))
-    }
-
-    pub(crate) fn from_ast_opt(ctx: &mut LowerCtx<'_>, node: Option<ast::Type>) -> TypeRefId {
-        match node {
-            Some(node) => TypeRef::from_ast(ctx, node),
-            None => ctx.alloc_error_type(),
-        }
-    }
-
     pub(crate) fn unit() -> TypeRef {
-        TypeRef::Tuple(EmptyOptimizedThinVec::empty())
+        TypeRef::Tuple(ThinVec::new())
     }
 
-    pub fn walk(this: TypeRefId, map: &TypesMap, f: &mut impl FnMut(&TypeRef)) {
+    pub fn walk(this: TypeRefId, map: &ExpressionStore, f: &mut impl FnMut(&TypeRef)) {
         go(this, f, map);
 
-        fn go(type_ref: TypeRefId, f: &mut impl FnMut(&TypeRef), map: &TypesMap) {
+        fn go(type_ref: TypeRefId, f: &mut impl FnMut(&TypeRef), map: &ExpressionStore) {
             let type_ref = &map[type_ref];
             f(type_ref);
             match type_ref {
                 TypeRef::Fn(fn_) => {
-                    fn_.params().iter().for_each(|&(_, param_type)| go(param_type, f, map))
+                    fn_.params.iter().for_each(|&(_, param_type)| go(param_type, f, map))
                 }
                 TypeRef::Tuple(types) => types.iter().for_each(|&t| go(t, f, map)),
                 TypeRef::RawPtr(type_ref, _) | TypeRef::Slice(type_ref) => go(*type_ref, f, map),
@@ -411,11 +220,11 @@ impl TypeRef {
                     }
                 }
                 TypeRef::Path(path) => go_path(path, f, map),
-                TypeRef::Never | TypeRef::Placeholder | TypeRef::Macro(_) | TypeRef::Error => {}
+                TypeRef::Never | TypeRef::Placeholder | TypeRef::Error | TypeRef::TypeParam(_) => {}
             };
         }
 
-        fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef), map: &TypesMap) {
+        fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef), map: &ExpressionStore) {
             if let Some(type_ref) = path.type_anchor() {
                 go(type_ref, f, map);
             }
@@ -448,71 +257,8 @@ impl TypeRef {
     }
 }
 
-pub(crate) fn type_bounds_from_ast(
-    lower_ctx: &mut LowerCtx<'_>,
-    type_bounds_opt: Option<ast::TypeBoundList>,
-) -> ThinVec<TypeBound> {
-    if let Some(type_bounds) = type_bounds_opt {
-        ThinVec::from_iter(Vec::from_iter(
-            type_bounds.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)),
-        ))
-    } else {
-        ThinVec::from_iter([])
-    }
-}
-
 impl TypeBound {
-    pub(crate) fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::TypeBound) -> Self {
-        let mut lower_path_type = |path_type: &ast::PathType| ctx.lower_path(path_type.path()?);
-
-        match node.kind() {
-            ast::TypeBoundKind::PathType(path_type) => {
-                let m = match node.question_mark_token() {
-                    Some(_) => TraitBoundModifier::Maybe,
-                    None => TraitBoundModifier::None,
-                };
-                lower_path_type(&path_type)
-                    .map(|p| {
-                        TypeBound::Path(ctx.alloc_path(p, AstPtr::new(&path_type).upcast()), m)
-                    })
-                    .unwrap_or(TypeBound::Error)
-            }
-            ast::TypeBoundKind::ForType(for_type) => {
-                let lt_refs = match for_type.generic_param_list() {
-                    Some(gpl) => gpl
-                        .lifetime_params()
-                        .flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(&lt)))
-                        .collect(),
-                    None => Box::default(),
-                };
-                let path = for_type.ty().and_then(|ty| match &ty {
-                    ast::Type::PathType(path_type) => lower_path_type(path_type).map(|p| (p, ty)),
-                    _ => None,
-                });
-                match path {
-                    Some((p, ty)) => {
-                        TypeBound::ForLifetime(lt_refs, ctx.alloc_path(p, AstPtr::new(&ty)))
-                    }
-                    None => TypeBound::Error,
-                }
-            }
-            ast::TypeBoundKind::Use(gal) => TypeBound::Use(
-                gal.use_bound_generic_args()
-                    .map(|p| match p {
-                        ast::UseBoundGenericArg::Lifetime(l) => {
-                            UseArgRef::Lifetime(LifetimeRef::new(&l))
-                        }
-                        ast::UseBoundGenericArg::NameRef(n) => UseArgRef::Name(n.as_name()),
-                    })
-                    .collect(),
-            ),
-            ast::TypeBoundKind::Lifetime(lifetime) => {
-                TypeBound::Lifetime(LifetimeRef::new(&lifetime))
-            }
-        }
-    }
-
-    pub fn as_path<'a>(&self, map: &'a TypesMap) -> Option<(&'a Path, TraitBoundModifier)> {
+    pub fn as_path<'a>(&self, map: &'a ExpressionStore) -> Option<(&'a Path, TraitBoundModifier)> {
         match self {
             &TypeBound::Path(p, m) => Some((&map[p], m)),
             &TypeBound::ForLifetime(_, p) => Some((&map[p], TraitBoundModifier::None)),
@@ -521,90 +267,9 @@ impl TypeBound {
     }
 }
 
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum ConstRef {
-    Scalar(Box<LiteralConstRef>),
-    Path(Name),
-    Complex(AstId<ast::ConstArg>),
-}
-
-impl ConstRef {
-    pub(crate) fn from_const_arg(lower_ctx: &LowerCtx<'_>, arg: Option<ast::ConstArg>) -> Self {
-        if let Some(arg) = arg {
-            if let Some(expr) = arg.expr() {
-                return Self::from_expr(expr, Some(lower_ctx.ast_id(&arg)));
-            }
-        }
-        Self::Scalar(Box::new(LiteralConstRef::Unknown))
-    }
-
-    pub(crate) fn from_const_param(
-        lower_ctx: &LowerCtx<'_>,
-        param: &ast::ConstParam,
-    ) -> Option<Self> {
-        param.default_val().map(|default| Self::from_const_arg(lower_ctx, Some(default)))
-    }
-
-    pub fn display<'a>(
-        &'a self,
-        db: &'a dyn ExpandDatabase,
-        edition: Edition,
-    ) -> impl fmt::Display + 'a {
-        struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef, Edition);
-        impl fmt::Display for Display<'_> {
-            fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-                match self.1 {
-                    ConstRef::Scalar(s) => s.fmt(f),
-                    ConstRef::Path(n) => n.display(self.0, self.2).fmt(f),
-                    ConstRef::Complex(_) => f.write_str("{const}"),
-                }
-            }
-        }
-        Display(db, self, edition)
-    }
-
-    // We special case literals and single identifiers, to speed up things.
-    fn from_expr(expr: ast::Expr, ast_id: Option<AstId<ast::ConstArg>>) -> Self {
-        fn is_path_ident(p: &ast::PathExpr) -> bool {
-            let Some(path) = p.path() else {
-                return false;
-            };
-            if path.coloncolon_token().is_some() {
-                return false;
-            }
-            if let Some(s) = path.segment() {
-                if s.coloncolon_token().is_some() || s.generic_arg_list().is_some() {
-                    return false;
-                }
-            }
-            true
-        }
-        match expr {
-            ast::Expr::PathExpr(p) if is_path_ident(&p) => {
-                match p.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) {
-                    Some(it) => Self::Path(it.as_name()),
-                    None => Self::Scalar(Box::new(LiteralConstRef::Unknown)),
-                }
-            }
-            ast::Expr::Literal(literal) => Self::Scalar(Box::new(match literal.kind() {
-                ast::LiteralKind::IntNumber(num) => {
-                    num.value().map(LiteralConstRef::UInt).unwrap_or(LiteralConstRef::Unknown)
-                }
-                ast::LiteralKind::Char(c) => {
-                    c.value().map(LiteralConstRef::Char).unwrap_or(LiteralConstRef::Unknown)
-                }
-                ast::LiteralKind::Bool(f) => LiteralConstRef::Bool(f),
-                _ => LiteralConstRef::Unknown,
-            })),
-            _ => {
-                if let Some(ast_id) = ast_id {
-                    Self::Complex(ast_id)
-                } else {
-                    Self::Scalar(Box::new(LiteralConstRef::Unknown))
-                }
-            }
-        }
-    }
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct ConstRef {
+    pub expr: ExprId,
 }
 
 /// A literal constant value
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
index d43776b8a66ad..db571f045d740 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -2,22 +2,22 @@
 
 use std::fmt;
 
-use base_db::CrateId;
-use fst::{raw::IndexedValue, Automaton, Streamer};
+use base_db::Crate;
+use fst::{Automaton, Streamer, raw::IndexedValue};
 use hir_expand::name::Name;
 use itertools::Itertools;
 use rustc_hash::FxHashSet;
 use smallvec::SmallVec;
 use span::Edition;
-use stdx::{format_to, TupleExt};
+use stdx::format_to;
 use triomphe::Arc;
 
 use crate::{
+    AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
     db::DefDatabase,
     item_scope::{ImportOrExternCrate, ItemInNs},
     nameres::DefMap,
     visibility::Visibility,
-    AssocItemId, FxIndexMap, ModuleDefId, ModuleId, TraitId,
 };
 
 /// Item import details stored in the `ImportMap`.
@@ -31,6 +31,8 @@ pub struct ImportInfo {
     pub is_doc_hidden: bool,
     /// Whether this item is annotated with `#[unstable(..)]`.
     pub is_unstable: bool,
+    /// The value of `#[rust_analyzer::completions(...)]`, if exists.
+    pub complete: Complete,
 }
 
 /// A map from publicly exported items to its name.
@@ -66,19 +68,14 @@ impl ImportMap {
         for (k, v) in self.item_to_info_map.iter() {
             format_to!(out, "{:?} ({:?}) -> ", k, v.1);
             for v in &v.0 {
-                format_to!(
-                    out,
-                    "{}:{:?}, ",
-                    v.name.display(db.upcast(), Edition::CURRENT),
-                    v.container
-                );
+                format_to!(out, "{}:{:?}, ", v.name.display(db, Edition::CURRENT), v.container);
             }
             format_to!(out, "\n");
         }
         out
     }
 
-    pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
+    pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: Crate) -> Arc<Self> {
         let _p = tracing::info_span!("import_map_query").entered();
 
         let map = Self::collect_import_map(db, krate);
@@ -129,7 +126,7 @@ impl ImportMap {
         self.item_to_info_map.get(&item).map(|(info, _)| &**info)
     }
 
-    fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
+    fn collect_import_map(db: &dyn DefDatabase, krate: Crate) -> ImportMapIndex {
         let _p = tracing::info_span!("collect_import_map").entered();
 
         let def_map = db.crate_def_map(krate);
@@ -155,11 +152,7 @@ impl ImportMap {
 
             let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| {
                 let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public);
-                if per_ns.is_none() {
-                    None
-                } else {
-                    Some((name, per_ns))
-                }
+                if per_ns.is_none() { None } else { Some((name, per_ns)) }
             });
 
             for (name, per_ns) in visible_items {
@@ -176,16 +169,22 @@ impl ImportMap {
                             ItemInNs::Macros(id) => Some(id.into()),
                         }
                     };
-                    let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| {
-                        let attrs = db.attrs(attr_id);
-                        (attrs.has_doc_hidden(), attrs.is_unstable())
-                    });
+                    let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id {
+                        None => (false, false, Complete::Yes),
+                        Some(attr_id) => {
+                            let attrs = db.attrs(attr_id);
+                            let do_not_complete =
+                                Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs);
+                            (attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete)
+                        }
+                    };
 
                     let import_info = ImportInfo {
                         name: name.clone(),
                         container: module,
                         is_doc_hidden,
                         is_unstable,
+                        complete: do_not_complete,
                     };
 
                     if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
@@ -222,7 +221,7 @@ impl ImportMap {
         trait_import_info: &ImportInfo,
     ) {
         let _p = tracing::info_span!("collect_trait_assoc_items").entered();
-        for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
+        for &(ref assoc_item_name, item) in &db.trait_items(tr).items {
             let module_def_id = match item {
                 AssocItemId::FunctionId(f) => ModuleDefId::from(f),
                 AssocItemId::ConstId(c) => ModuleDefId::from(c),
@@ -239,12 +238,17 @@ impl ImportMap {
                 ItemInNs::Values(module_def_id)
             };
 
-            let attrs = &db.attrs(item.into());
+            let attr_id = item.into();
+            let attrs = &db.attrs(attr_id);
+            let item_do_not_complete = Complete::extract(false, attrs);
+            let do_not_complete =
+                Complete::for_trait_item(trait_import_info.complete, item_do_not_complete);
             let assoc_item_info = ImportInfo {
                 container: trait_import_info.container,
                 name: assoc_item_name.clone(),
                 is_doc_hidden: attrs.has_doc_hidden(),
                 is_unstable: attrs.is_unstable(),
+                complete: do_not_complete,
             };
 
             let (infos, _) =
@@ -400,15 +404,13 @@ impl Query {
 /// This returns a list of items that could be imported from dependencies of `krate`.
 pub fn search_dependencies(
     db: &dyn DefDatabase,
-    krate: CrateId,
+    krate: Crate,
     query: &Query,
-) -> FxHashSet<ItemInNs> {
+) -> FxHashSet<(ItemInNs, Complete)> {
     let _p = tracing::info_span!("search_dependencies", ?query).entered();
 
-    let graph = db.crate_graph();
-
     let import_maps: Vec<_> =
-        graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
+        krate.data(db).dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
 
     let mut op = fst::map::OpBuilder::new();
 
@@ -445,7 +447,7 @@ fn search_maps(
     import_maps: &[Arc<ImportMap>],
     mut stream: fst::map::Union<'_>,
     query: &Query,
-) -> FxHashSet<ItemInNs> {
+) -> FxHashSet<(ItemInNs, Complete)> {
     let mut res = FxHashSet::default();
     while let Some((_, indexed_values)) = stream.next() {
         for &IndexedValue { index: import_map_idx, value } in indexed_values {
@@ -465,8 +467,9 @@ fn search_maps(
                 })
                 .filter(|&(_, info)| {
                     query.search_mode.check(&query.query, query.case_sensitive, info.name.as_str())
-                });
-            res.extend(iter.map(TupleExt::head));
+                })
+                .map(|(item, import_info)| (item, import_info.complete));
+            res.extend(iter);
         }
     }
 
@@ -475,11 +478,11 @@ fn search_maps(
 
 #[cfg(test)]
 mod tests {
-    use base_db::{SourceDatabase, Upcast};
-    use expect_test::{expect, Expect};
+    use base_db::RootQueryDb;
+    use expect_test::{Expect, expect};
     use test_fixture::WithFixture;
 
-    use crate::{test_db::TestDB, ItemContainerId, Lookup};
+    use crate::{ItemContainerId, Lookup, test_db::TestDB};
 
     use super::*;
 
@@ -512,21 +515,23 @@ mod tests {
         expect: Expect,
     ) {
         let db = TestDB::with_files(ra_fixture);
-        let crate_graph = db.crate_graph();
-        let krate = crate_graph
+        let all_crates = db.all_crates();
+        let krate = all_crates
             .iter()
+            .copied()
             .find(|&krate| {
-                crate_graph[krate]
+                krate
+                    .extra_data(&db)
                     .display_name
                     .as_ref()
                     .is_some_and(|it| it.crate_name().as_str() == crate_name)
             })
             .expect("could not find crate");
 
-        let actual = search_dependencies(db.upcast(), krate, &query)
+        let actual = search_dependencies(&db, krate, &query)
             .into_iter()
-            .filter_map(|dependency| {
-                let dependency_krate = dependency.krate(db.upcast())?;
+            .filter_map(|(dependency, _)| {
+                let dependency_krate = dependency.krate(&db)?;
                 let dependency_imports = db.import_map(dependency_krate);
 
                 let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
@@ -545,7 +550,7 @@ mod tests {
 
                 Some(format!(
                     "{}::{} ({})\n",
-                    crate_graph[dependency_krate].display_name.as_ref()?,
+                    dependency_krate.extra_data(&db).display_name.as_ref()?,
                     path,
                     mark
                 ))
@@ -575,8 +580,8 @@ mod tests {
 
         let trait_info = dependency_imports.import_info_for(ItemInNs::Types(trait_id.into()))?;
 
-        let trait_data = db.trait_data(trait_id);
-        let (assoc_item_name, _) = trait_data
+        let trait_items = db.trait_items(trait_id);
+        let (assoc_item_name, _) = trait_items
             .items
             .iter()
             .find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?;
@@ -584,23 +589,24 @@ mod tests {
         Some(format!(
             "{}::{}",
             render_path(db, &trait_info[0]),
-            assoc_item_name.display(db.upcast(), Edition::CURRENT)
+            assoc_item_name.display(db, Edition::CURRENT)
         ))
     }
 
     fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
         let db = TestDB::with_files(ra_fixture);
-        let crate_graph = db.crate_graph();
+        let all_crates = db.all_crates();
 
-        let actual = crate_graph
+        let actual = all_crates
             .iter()
+            .copied()
             .filter_map(|krate| {
-                let cdata = &crate_graph[krate];
+                let cdata = &krate.extra_data(&db);
                 let name = cdata.display_name.as_ref()?;
 
                 let map = db.import_map(krate);
 
-                Some(format!("{name}:\n{}\n", map.fmt_for_test(db.upcast())))
+                Some(format!("{name}:\n{}\n", map.fmt_for_test(&db)))
             })
             .sorted()
             .collect::<String>();
@@ -623,7 +629,7 @@ mod tests {
             module = parent;
         }
 
-        segments.iter().rev().map(|it| it.display(db.upcast(), Edition::CURRENT)).join("::")
+        segments.iter().rev().map(|it| it.display(db, Edition::CURRENT)).join("::")
     }
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
index 0ca1eb9bcfe37..bece940950d82 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
@@ -3,23 +3,23 @@
 
 use std::sync::LazyLock;
 
-use base_db::CrateId;
-use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCallId};
+use base_db::Crate;
+use hir_expand::{AstId, MacroCallId, attrs::AttrId, db::ExpandDatabase, name::Name};
 use indexmap::map::Entry;
 use itertools::Itertools;
 use la_arena::Idx;
 use rustc_hash::{FxHashMap, FxHashSet};
-use smallvec::{smallvec, SmallVec};
+use smallvec::{SmallVec, smallvec};
 use span::Edition;
 use stdx::format_to;
 use syntax::ast;
 
 use crate::{
+    AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId,
+    LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
     db::DefDatabase,
     per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem},
     visibility::{Visibility, VisibilityExplicitness},
-    AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId,
-    LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
 };
 
 #[derive(Debug, Default)]
@@ -358,7 +358,7 @@ impl ItemScope {
     }
 
     /// Get a name from current module scope, legacy macros are not included
-    pub(crate) fn get(&self, name: &Name) -> PerNs {
+    pub fn get(&self, name: &Name) -> PerNs {
         PerNs {
             types: self.types.get(name).copied(),
             values: self.values.get(name).copied(),
@@ -453,7 +453,7 @@ impl ItemScope {
         )
     }
 
-    pub(crate) fn macro_invoc(&self, call: AstId<ast::MacroCall>) -> Option<MacroCallId> {
+    pub fn macro_invoc(&self, call: AstId<ast::MacroCall>) -> Option<MacroCallId> {
         self.macro_invocations.get(&call).copied()
     }
 
@@ -916,7 +916,7 @@ impl ItemInNs {
     }
 
     /// Returns the crate defining this item (or `None` if `self` is built-in).
-    pub fn krate(&self, db: &dyn DefDatabase) -> Option<CrateId> {
+    pub fn krate(&self, db: &dyn DefDatabase) -> Option<Crate> {
         match self {
             ItemInNs::Types(id) | ItemInNs::Values(id) => id.module(db).map(|m| m.krate),
             ItemInNs::Macros(id) => Some(id.module(db).krate),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
index 382afbcb1dd4f..01d340cea6df4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -44,27 +44,23 @@ use std::{
 };
 
 use ast::{AstNode, StructKind};
-use base_db::CrateId;
-use either::Either;
-use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile};
+use base_db::Crate;
+use hir_expand::{
+    ExpandTo, HirFileId, InFile,
+    attrs::RawAttrs,
+    mod_path::{ModPath, PathKind},
+    name::Name,
+};
 use intern::{Interned, Symbol};
 use la_arena::{Arena, Idx, RawIdx};
 use rustc_hash::FxHashMap;
 use smallvec::SmallVec;
-use span::{AstIdNode, Edition, FileAstId, SyntaxContextId};
+use span::{AstIdNode, Edition, FileAstId, SyntaxContext};
 use stdx::never;
-use syntax::{ast, match_ast, SyntaxKind};
+use syntax::{SyntaxKind, ast, match_ast};
 use triomphe::Arc;
 
-use crate::{
-    attr::Attrs,
-    db::DefDatabase,
-    generics::GenericParams,
-    path::{GenericArgs, ImportAlias, ModPath, Path, PathKind},
-    type_ref::{Mutability, TraitRef, TypeBound, TypeRefId, TypesMap, TypesSourceMap},
-    visibility::{RawVisibility, VisibilityExplicitness},
-    BlockId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup,
-};
+use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase};
 
 #[derive(Copy, Clone, Eq, PartialEq)]
 pub struct RawVisibilityId(u32);
@@ -100,23 +96,16 @@ pub struct ItemTree {
 
 impl ItemTree {
     pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
-        db.file_item_tree_with_source_map(file_id).0
-    }
-
-    pub(crate) fn file_item_tree_with_source_map_query(
-        db: &dyn DefDatabase,
-        file_id: HirFileId,
-    ) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>) {
         let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
-        static EMPTY: OnceLock<(Arc<ItemTree>, Arc<ItemTreeSourceMaps>)> = OnceLock::new();
+        static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
 
         let ctx = lower::Ctx::new(db, file_id);
         let syntax = db.parse_or_expand(file_id);
         let mut top_attrs = None;
-        let (mut item_tree, source_maps) = match_ast! {
+        let mut item_tree = match_ast! {
             match syntax {
                 ast::SourceFile(file) => {
-                    top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map()));
+                    top_attrs = Some(RawAttrs::new(db, &file, ctx.span_map()));
                     ctx.lower_module_items(&file)
                 },
                 ast::MacroItems(items) => {
@@ -143,55 +132,42 @@ impl ItemTree {
         {
             EMPTY
                 .get_or_init(|| {
-                    (
-                        Arc::new(ItemTree {
-                            top_level: SmallVec::new_const(),
-                            attrs: FxHashMap::default(),
-                            data: None,
-                        }),
-                        Arc::default(),
-                    )
+                    Arc::new(ItemTree {
+                        top_level: SmallVec::new_const(),
+                        attrs: FxHashMap::default(),
+                        data: None,
+                    })
                 })
                 .clone()
         } else {
             item_tree.shrink_to_fit();
-            (Arc::new(item_tree), Arc::new(source_maps))
+            Arc::new(item_tree)
         }
     }
 
     pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
-        db.block_item_tree_with_source_map(block).0
-    }
-
-    pub(crate) fn block_item_tree_with_source_map_query(
-        db: &dyn DefDatabase,
-        block: BlockId,
-    ) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>) {
         let _p = tracing::info_span!("block_item_tree_query", ?block).entered();
-        static EMPTY: OnceLock<(Arc<ItemTree>, Arc<ItemTreeSourceMaps>)> = OnceLock::new();
+        static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
 
         let loc = block.lookup(db);
-        let block = loc.ast_id.to_node(db.upcast());
+        let block = loc.ast_id.to_node(db);
 
         let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
-        let (mut item_tree, source_maps) = ctx.lower_block(&block);
+        let mut item_tree = ctx.lower_block(&block);
         if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty()
         {
             EMPTY
                 .get_or_init(|| {
-                    (
-                        Arc::new(ItemTree {
-                            top_level: SmallVec::new_const(),
-                            attrs: FxHashMap::default(),
-                            data: None,
-                        }),
-                        Arc::default(),
-                    )
+                    Arc::new(ItemTree {
+                        top_level: SmallVec::new_const(),
+                        attrs: FxHashMap::default(),
+                        data: None,
+                    })
                 })
                 .clone()
         } else {
             item_tree.shrink_to_fit();
-            (Arc::new(item_tree), Arc::new(source_maps))
+            Arc::new(item_tree)
         }
     }
 
@@ -202,7 +178,7 @@ impl ItemTree {
     }
 
     /// Returns the inner attributes of the source file.
-    pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: CrateId) -> Attrs {
+    pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
         Attrs::filter(
             db,
             krate,
@@ -214,10 +190,26 @@ impl ItemTree {
         self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
     }
 
-    pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: CrateId, of: AttrOwner) -> Attrs {
+    pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: Crate, of: AttrOwner) -> Attrs {
         Attrs::filter(db, krate, self.raw_attrs(of).clone())
     }
 
+    /// Returns a count of a few, expensive items.
+    ///
+    /// For more detail, see [`ItemTreeDataStats`].
+    pub fn item_tree_stats(&self) -> ItemTreeDataStats {
+        match self.data {
+            Some(ref data) => ItemTreeDataStats {
+                traits: data.traits.len(),
+                impls: data.impls.len(),
+                mods: data.mods.len(),
+                macro_calls: data.macro_calls.len(),
+                macro_rules: data.macro_rules.len(),
+            },
+            None => ItemTreeDataStats::default(),
+        }
+    }
+
     pub fn pretty_print(&self, db: &dyn DefDatabase, edition: Edition) -> String {
         pretty::print_item_tree(db, self, edition)
     }
@@ -231,7 +223,10 @@ impl ItemTree {
     }
 
     fn shrink_to_fit(&mut self) {
-        if let Some(data) = &mut self.data {
+        let ItemTree { top_level, attrs, data } = self;
+        top_level.shrink_to_fit();
+        attrs.shrink_to_fit();
+        if let Some(data) = data {
             let ItemTreeData {
                 uses,
                 extern_crates,
@@ -329,157 +324,12 @@ struct ItemTreeData {
 }
 
 #[derive(Default, Debug, Eq, PartialEq)]
-pub struct ItemTreeSourceMaps {
-    all_concatenated: Box<[TypesSourceMap]>,
-    structs_offset: u32,
-    unions_offset: u32,
-    enum_generics_offset: u32,
-    variants_offset: u32,
-    consts_offset: u32,
-    statics_offset: u32,
-    trait_generics_offset: u32,
-    trait_alias_generics_offset: u32,
-    impls_offset: u32,
-    type_aliases_offset: u32,
-}
-
-#[derive(Clone, Copy)]
-pub struct GenericItemSourceMap<'a>(&'a [TypesSourceMap; 2]);
-
-impl<'a> GenericItemSourceMap<'a> {
-    #[inline]
-    pub fn item(self) -> &'a TypesSourceMap {
-        &self.0[0]
-    }
-
-    #[inline]
-    pub fn generics(self) -> &'a TypesSourceMap {
-        &self.0[1]
-    }
-}
-
-#[derive(Default, Debug, Eq, PartialEq)]
-pub struct GenericItemSourceMapBuilder {
-    pub item: TypesSourceMap,
-    pub generics: TypesSourceMap,
-}
-
-#[derive(Default, Debug, Eq, PartialEq)]
-struct ItemTreeSourceMapsBuilder {
-    functions: Vec<GenericItemSourceMapBuilder>,
-    structs: Vec<GenericItemSourceMapBuilder>,
-    unions: Vec<GenericItemSourceMapBuilder>,
-    enum_generics: Vec<TypesSourceMap>,
-    variants: Vec<TypesSourceMap>,
-    consts: Vec<TypesSourceMap>,
-    statics: Vec<TypesSourceMap>,
-    trait_generics: Vec<TypesSourceMap>,
-    trait_alias_generics: Vec<TypesSourceMap>,
-    impls: Vec<GenericItemSourceMapBuilder>,
-    type_aliases: Vec<GenericItemSourceMapBuilder>,
-}
-
-impl ItemTreeSourceMapsBuilder {
-    fn build(self) -> ItemTreeSourceMaps {
-        let ItemTreeSourceMapsBuilder {
-            functions,
-            structs,
-            unions,
-            enum_generics,
-            variants,
-            consts,
-            statics,
-            trait_generics,
-            trait_alias_generics,
-            impls,
-            type_aliases,
-        } = self;
-        let structs_offset = functions.len() as u32 * 2;
-        let unions_offset = structs_offset + (structs.len() as u32 * 2);
-        let enum_generics_offset = unions_offset + (unions.len() as u32 * 2);
-        let variants_offset = enum_generics_offset + (enum_generics.len() as u32);
-        let consts_offset = variants_offset + (variants.len() as u32);
-        let statics_offset = consts_offset + (consts.len() as u32);
-        let trait_generics_offset = statics_offset + (statics.len() as u32);
-        let trait_alias_generics_offset = trait_generics_offset + (trait_generics.len() as u32);
-        let impls_offset = trait_alias_generics_offset + (trait_alias_generics.len() as u32);
-        let type_aliases_offset = impls_offset + (impls.len() as u32 * 2);
-        let all_concatenated = generics_concat(functions)
-            .chain(generics_concat(structs))
-            .chain(generics_concat(unions))
-            .chain(enum_generics)
-            .chain(variants)
-            .chain(consts)
-            .chain(statics)
-            .chain(trait_generics)
-            .chain(trait_alias_generics)
-            .chain(generics_concat(impls))
-            .chain(generics_concat(type_aliases))
-            .collect();
-        return ItemTreeSourceMaps {
-            all_concatenated,
-            structs_offset,
-            unions_offset,
-            enum_generics_offset,
-            variants_offset,
-            consts_offset,
-            statics_offset,
-            trait_generics_offset,
-            trait_alias_generics_offset,
-            impls_offset,
-            type_aliases_offset,
-        };
-
-        fn generics_concat(
-            source_maps: Vec<GenericItemSourceMapBuilder>,
-        ) -> impl Iterator<Item = TypesSourceMap> {
-            source_maps.into_iter().flat_map(|it| [it.item, it.generics])
-        }
-    }
-}
-
-impl ItemTreeSourceMaps {
-    #[inline]
-    fn generic_item(&self, offset: u32, index: u32) -> GenericItemSourceMap<'_> {
-        GenericItemSourceMap(
-            self.all_concatenated[(offset + (index * 2)) as usize..][..2].try_into().unwrap(),
-        )
-    }
-
-    #[inline]
-    fn non_generic_item(&self, offset: u32, index: u32) -> &TypesSourceMap {
-        &self.all_concatenated[(offset + index) as usize]
-    }
-
-    #[inline]
-    pub fn function(&self, index: FileItemTreeId<Function>) -> GenericItemSourceMap<'_> {
-        self.generic_item(0, index.0.into_raw().into_u32())
-    }
-}
-
-macro_rules! index_item_source_maps {
-    ( $( $name:ident; $field:ident[$tree_id:ident]; $fn:ident; $ret:ty, )* ) => {
-        impl ItemTreeSourceMaps {
-            $(
-                #[inline]
-                pub fn $name(&self, index: FileItemTreeId<$tree_id>) -> $ret {
-                    self.$fn(self.$field, index.0.into_raw().into_u32())
-                }
-            )*
-        }
-    };
-}
-index_item_source_maps! {
-    strukt; structs_offset[Struct]; generic_item; GenericItemSourceMap<'_>,
-    union; unions_offset[Union]; generic_item; GenericItemSourceMap<'_>,
-    enum_generic; enum_generics_offset[Enum]; non_generic_item; &TypesSourceMap,
-    variant; variants_offset[Variant]; non_generic_item; &TypesSourceMap,
-    konst; consts_offset[Const]; non_generic_item; &TypesSourceMap,
-    statik; statics_offset[Static]; non_generic_item; &TypesSourceMap,
-    trait_generic; trait_generics_offset[Trait]; non_generic_item; &TypesSourceMap,
-    trait_alias_generic; trait_alias_generics_offset[TraitAlias]; non_generic_item; &TypesSourceMap,
-    impl_; impls_offset[Impl]; generic_item; GenericItemSourceMap<'_>,
-    type_alias; type_aliases_offset[TypeAlias]; generic_item; GenericItemSourceMap<'_>,
+pub struct ItemTreeDataStats {
+    pub traits: usize,
+    pub impls: usize,
+    pub mods: usize,
+    pub macro_calls: usize,
+    pub macro_rules: usize,
 }
 
 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
@@ -490,10 +340,8 @@ pub enum AttrOwner {
     TopLevel,
 
     Variant(FileItemTreeId<Variant>),
+    // while not relevant to early name resolution, fields can contain visibility
     Field(FieldParent, ItemTreeFieldId),
-    Param(FileItemTreeId<Function>, ItemTreeParamId),
-    TypeOrConstParamData(GenericModItem, LocalTypeOrConstParamId),
-    LifetimeParamData(GenericModItem, LocalLifetimeParamId),
 }
 
 impl AttrOwner {
@@ -506,10 +354,9 @@ impl AttrOwner {
 pub enum FieldParent {
     Struct(FileItemTreeId<Struct>),
     Union(FileItemTreeId<Union>),
-    Variant(FileItemTreeId<Variant>),
+    EnumVariant(FileItemTreeId<Variant>),
 }
 
-pub type ItemTreeParamId = Idx<Param>;
 pub type ItemTreeFieldId = Idx<Field>;
 
 macro_rules! from_attrs {
@@ -536,9 +383,6 @@ pub trait ItemTreeNode: Clone {
     fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self;
     fn attr_owner(id: FileItemTreeId<Self>) -> AttrOwner;
 }
-pub trait GenericsItemTreeNode: ItemTreeNode {
-    fn generic_params(&self) -> &Arc<GenericParams>;
-}
 
 pub struct FileItemTreeId<N>(Idx<N>);
 
@@ -591,7 +435,7 @@ pub struct TreeId {
 }
 
 impl TreeId {
-    pub(crate) fn new(file: HirFileId, block: Option<BlockId>) -> Self {
+    pub fn new(file: HirFileId, block: Option<BlockId>) -> Self {
         Self { file, block }
     }
 
@@ -602,16 +446,6 @@ impl TreeId {
         }
     }
 
-    pub fn item_tree_with_source_map(
-        &self,
-        db: &dyn DefDatabase,
-    ) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>) {
-        match self.block {
-            Some(block) => db.block_item_tree_with_source_map(block),
-            None => db.file_item_tree_with_source_map(self.file),
-        }
-    }
-
     pub fn file_id(self) -> HirFileId {
         self.file
     }
@@ -644,13 +478,6 @@ impl<N> ItemTreeId<N> {
         self.tree.item_tree(db)
     }
 
-    pub fn item_tree_with_source_map(
-        self,
-        db: &dyn DefDatabase,
-    ) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>) {
-        self.tree.item_tree_with_source_map(db)
-    }
-
     pub fn resolved<R>(self, db: &dyn DefDatabase, cb: impl FnOnce(&N) -> R) -> R
     where
         ItemTree: Index<FileItemTreeId<N>, Output = N>,
@@ -682,7 +509,7 @@ impl<N> Hash for ItemTreeId<N> {
 }
 
 macro_rules! mod_items {
-    ( $( $typ:ident $(<$generic_params:ident>)? in $fld:ident -> $ast:ty ),+ $(,)? ) => {
+    ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => {
         #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
         pub enum ModItem {
             $(
@@ -690,16 +517,6 @@ macro_rules! mod_items {
             )+
         }
 
-        #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
-        pub enum GenericModItem {
-            $(
-                $(
-                    #[cfg_attr(ignore_fragment, $generic_params)]
-                    $typ(FileItemTreeId<$typ>),
-                )?
-            )+
-        }
-
         impl ModItem {
             pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
                 match self {
@@ -708,52 +525,12 @@ macro_rules! mod_items {
             }
         }
 
-        impl GenericModItem {
-            pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::AnyHasGenericParams> {
-                match self {
-                    $(
-                        $(
-                            #[cfg_attr(ignore_fragment, $generic_params)]
-                            GenericModItem::$typ(it) => tree[it.index()].ast_id().upcast(),
-                        )?
-                    )+
-                }
-            }
-        }
-
-        impl From<GenericModItem> for ModItem {
-            fn from(id: GenericModItem) -> ModItem {
-                match id {
-                    $(
-                        $(
-                            #[cfg_attr(ignore_fragment, $generic_params)]
-                            GenericModItem::$typ(id) => ModItem::$typ(id),
-                        )?
-                    )+
-                }
-            }
-        }
-
-        impl From<GenericModItem> for AttrOwner {
-            fn from(t: GenericModItem) -> AttrOwner {
-                AttrOwner::ModItem(t.into())
-            }
-        }
-
         $(
             impl From<FileItemTreeId<$typ>> for ModItem {
                 fn from(id: FileItemTreeId<$typ>) -> ModItem {
                     ModItem::$typ(id)
                 }
             }
-            $(
-                #[cfg_attr(ignore_fragment, $generic_params)]
-                impl From<FileItemTreeId<$typ>> for GenericModItem {
-                    fn from(id: FileItemTreeId<$typ>) -> GenericModItem {
-                        GenericModItem::$typ(id)
-                    }
-                }
-            )?
         )+
 
         $(
@@ -780,14 +557,6 @@ macro_rules! mod_items {
                     &self.data().$fld[index]
                 }
             }
-
-            $(
-                impl GenericsItemTreeNode for $typ {
-                    fn generic_params(&self) -> &Arc<GenericParams> {
-                        &self.$generic_params
-                    }
-                }
-            )?
         )+
     };
 }
@@ -796,16 +565,16 @@ mod_items! {
     Use in uses -> ast::Use,
     ExternCrate in extern_crates -> ast::ExternCrate,
     ExternBlock in extern_blocks -> ast::ExternBlock,
-    Function<explicit_generic_params> in functions -> ast::Fn,
-    Struct<generic_params> in structs -> ast::Struct,
-    Union<generic_params> in unions -> ast::Union,
-    Enum<generic_params> in enums -> ast::Enum,
+    Function in functions -> ast::Fn,
+    Struct in structs -> ast::Struct,
+    Union in unions -> ast::Union,
+    Enum in enums -> ast::Enum,
     Const in consts -> ast::Const,
     Static in statics -> ast::Static,
-    Trait<generic_params> in traits -> ast::Trait,
-    TraitAlias<generic_params> in trait_aliases -> ast::TraitAlias,
-    Impl<generic_params> in impls -> ast::Impl,
-    TypeAlias<generic_params> in type_aliases -> ast::TypeAlias,
+    Trait in traits -> ast::Trait,
+    TraitAlias in trait_aliases -> ast::TraitAlias,
+    Impl in impls -> ast::Impl,
+    TypeAlias in type_aliases -> ast::TypeAlias,
     Mod in mods -> ast::Module,
     MacroCall in macro_calls -> ast::MacroCall,
     MacroRules in macro_rules -> ast::MacroRules,
@@ -881,6 +650,34 @@ pub struct UseTree {
     kind: UseTreeKind,
 }
 
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ImportAlias {
+    /// Unnamed alias, as in `use Foo as _;`
+    Underscore,
+    /// Named alias
+    Alias(Name),
+}
+
+impl ImportAlias {
+    pub fn display(&self, edition: Edition) -> impl fmt::Display + '_ {
+        ImportAliasDisplay { value: self, edition }
+    }
+}
+
+struct ImportAliasDisplay<'a> {
+    value: &'a ImportAlias,
+    edition: Edition,
+}
+
+impl fmt::Display for ImportAliasDisplay<'_> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self.value {
+            ImportAlias::Underscore => f.write_str("_"),
+            ImportAlias::Alias(name) => fmt::Display::fmt(&name.display_no_db(self.edition), f),
+        }
+    }
+}
+
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub enum UseTreeKind {
     /// ```ignore
@@ -921,66 +718,30 @@ pub struct ExternBlock {
 pub struct Function {
     pub name: Name,
     pub visibility: RawVisibilityId,
-    pub explicit_generic_params: Arc<GenericParams>,
-    pub abi: Option<Symbol>,
-    pub params: Box<[Param]>,
-    pub ret_type: TypeRefId,
     pub ast_id: FileAstId<ast::Fn>,
-    pub types_map: Arc<TypesMap>,
-    pub(crate) flags: FnFlags,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct Param {
-    pub type_ref: Option<TypeRefId>,
-}
-
-bitflags::bitflags! {
-    #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
-    pub(crate) struct FnFlags: u16 {
-        const HAS_SELF_PARAM = 1 << 0;
-        const HAS_BODY = 1 << 1;
-        const HAS_DEFAULT_KW = 1 << 2;
-        const HAS_CONST_KW = 1 << 3;
-        const HAS_ASYNC_KW = 1 << 4;
-        const HAS_UNSAFE_KW = 1 << 5;
-        const IS_VARARGS = 1 << 6;
-        const HAS_SAFE_KW = 1 << 7;
-        /// The `#[target_feature]` attribute is necessary to check safety (with RFC 2396),
-        /// but keeping it for all functions will consume a lot of memory when there are
-        /// only very few functions with it. So we only encode its existence here, and lookup
-        /// it if needed.
-        const HAS_TARGET_FEATURE = 1 << 8;
-        const DEPRECATED_SAFE_2024 = 1 << 9;
-    }
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct Struct {
     pub name: Name,
     pub visibility: RawVisibilityId,
-    pub generic_params: Arc<GenericParams>,
     pub fields: Box<[Field]>,
     pub shape: FieldsShape,
     pub ast_id: FileAstId<ast::Struct>,
-    pub types_map: Arc<TypesMap>,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct Union {
     pub name: Name,
     pub visibility: RawVisibilityId,
-    pub generic_params: Arc<GenericParams>,
     pub fields: Box<[Field]>,
     pub ast_id: FileAstId<ast::Union>,
-    pub types_map: Arc<TypesMap>,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct Enum {
     pub name: Name,
     pub visibility: RawVisibilityId,
-    pub generic_params: Arc<GenericParams>,
     pub variants: Range<FileItemTreeId<Variant>>,
     pub ast_id: FileAstId<ast::Enum>,
 }
@@ -991,7 +752,6 @@ pub struct Variant {
     pub fields: Box<[Field]>,
     pub shape: FieldsShape,
     pub ast_id: FileAstId<ast::Variant>,
-    pub types_map: Arc<TypesMap>,
 }
 
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
@@ -1001,12 +761,38 @@ pub enum FieldsShape {
     Unit,
 }
 
+/// Visibility of an item, not yet resolved.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum RawVisibility {
+    /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
+    /// equivalent to `pub(self)`.
+    Module(Interned<ModPath>, VisibilityExplicitness),
+    /// `pub`.
+    Public,
+}
+
+/// Whether the item was imported through an explicit `pub(crate) use` or just a `use` without
+/// visibility.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum VisibilityExplicitness {
+    Explicit,
+    Implicit,
+}
+
+impl VisibilityExplicitness {
+    pub fn is_explicit(&self) -> bool {
+        matches!(self, Self::Explicit)
+    }
+}
+
+// FIXME: Remove this from item tree?
 /// A single field of an enum variant or struct
 #[derive(Debug, Clone, PartialEq, Eq)]
 pub struct Field {
     pub name: Name,
-    pub type_ref: TypeRefId,
     pub visibility: RawVisibilityId,
+    // FIXME: Not an item tree property
+    pub is_unsafe: bool,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
@@ -1014,32 +800,20 @@ pub struct Const {
     /// `None` for `const _: () = ();`
     pub name: Option<Name>,
     pub visibility: RawVisibilityId,
-    pub type_ref: TypeRefId,
     pub ast_id: FileAstId<ast::Const>,
-    pub has_body: bool,
-    pub types_map: Arc<TypesMap>,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct Static {
     pub name: Name,
     pub visibility: RawVisibilityId,
-    // TODO: use bitflags when we have more flags
-    pub mutable: bool,
-    pub has_safe_kw: bool,
-    pub has_unsafe_kw: bool,
-    pub type_ref: TypeRefId,
     pub ast_id: FileAstId<ast::Static>,
-    pub types_map: Arc<TypesMap>,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct Trait {
     pub name: Name,
     pub visibility: RawVisibilityId,
-    pub generic_params: Arc<GenericParams>,
-    pub is_auto: bool,
-    pub is_unsafe: bool,
     pub items: Box<[AssocItem]>,
     pub ast_id: FileAstId<ast::Trait>,
 }
@@ -1048,32 +822,20 @@ pub struct Trait {
 pub struct TraitAlias {
     pub name: Name,
     pub visibility: RawVisibilityId,
-    pub generic_params: Arc<GenericParams>,
     pub ast_id: FileAstId<ast::TraitAlias>,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct Impl {
-    pub generic_params: Arc<GenericParams>,
-    pub target_trait: Option<TraitRef>,
-    pub self_ty: TypeRefId,
-    pub is_negative: bool,
-    pub is_unsafe: bool,
     pub items: Box<[AssocItem]>,
     pub ast_id: FileAstId<ast::Impl>,
-    pub types_map: Arc<TypesMap>,
 }
 
 #[derive(Debug, Clone, PartialEq, Eq)]
 pub struct TypeAlias {
     pub name: Name,
     pub visibility: RawVisibilityId,
-    /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`.
-    pub bounds: Box<[TypeBound]>,
-    pub generic_params: Arc<GenericParams>,
-    pub type_ref: Option<TypeRefId>,
     pub ast_id: FileAstId<ast::TypeAlias>,
-    pub types_map: Arc<TypesMap>,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
@@ -1098,7 +860,7 @@ pub struct MacroCall {
     pub path: Interned<ModPath>,
     pub ast_id: FileAstId<ast::MacroCall>,
     pub expand_to: ExpandTo,
-    pub ctxt: SyntaxContextId,
+    pub ctxt: SyntaxContext,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
@@ -1126,7 +888,7 @@ impl Use {
     ) -> ast::UseTree {
         // Re-lower the AST item and get the source map.
         // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
-        let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
+        let ast = InFile::new(file_id, self.ast_id).to_node(db);
         let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
         let (_, source_map) = lower::lower_use_tree(db, ast_use_tree, &mut |range| {
             db.span_map(file_id).span_for_range(range).ctx
@@ -1143,7 +905,7 @@ impl Use {
     ) -> Arena<ast::UseTree> {
         // Re-lower the AST item and get the source map.
         // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
-        let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
+        let ast = InFile::new(file_id, self.ast_id).to_node(db);
         let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
         lower::lower_use_tree(db, ast_use_tree, &mut |range| {
             db.span_map(file_id).span_for_range(range).ctx
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
index 71848845a84df..b490e1683c01f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -3,42 +3,29 @@
 use std::{cell::OnceCell, collections::hash_map::Entry};
 
 use hir_expand::{
-    mod_path::path,
+    HirFileId,
+    mod_path::PathKind,
     name::AsName,
     span_map::{SpanMap, SpanMapRef},
-    HirFileId,
 };
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use la_arena::Arena;
-use rustc_hash::FxHashMap;
-use span::{AstIdMap, SyntaxContextId};
-use stdx::thin_vec::ThinVec;
+use span::{AstIdMap, SyntaxContext};
 use syntax::{
-    ast::{self, HasModuleItem, HasName, HasTypeBounds, IsString},
     AstNode,
+    ast::{self, HasModuleItem, HasName, IsString},
 };
 use triomphe::Arc;
 
 use crate::{
     db::DefDatabase,
-    generics::{GenericParams, GenericParamsCollector, TypeParamData, TypeParamProvenance},
     item_tree::{
-        AssocItem, AttrOwner, Const, Either, Enum, ExternBlock, ExternCrate, Field, FieldParent,
-        FieldsShape, FileItemTreeId, FnFlags, Function, GenericArgs, GenericItemSourceMapBuilder,
-        GenericModItem, Idx, Impl, ImportAlias, Interned, ItemTree, ItemTreeData,
-        ItemTreeSourceMaps, ItemTreeSourceMapsBuilder, Macro2, MacroCall, MacroRules, Mod, ModItem,
-        ModKind, ModPath, Mutability, Name, Param, Path, Range, RawAttrs, RawIdx, RawVisibilityId,
-        Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind,
-        Variant,
-    },
-    lower::LowerCtx,
-    path::AssociatedTypeBinding,
-    type_ref::{
-        LifetimeRef, PathId, RefType, TraitBoundModifier, TraitRef, TypeBound, TypeRef, TypeRefId,
-        TypesMap, TypesSourceMap,
+        AssocItem, AttrOwner, Const, Enum, ExternBlock, ExternCrate, Field, FieldParent,
+        FieldsShape, FileItemTreeId, Function, Idx, Impl, ImportAlias, Interned, ItemTree,
+        ItemTreeData, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, ModPath, Name, Range,
+        RawAttrs, RawIdx, RawVisibility, RawVisibilityId, Static, Struct, StructKind, Trait,
+        TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind, Variant, VisibilityExplicitness,
     },
-    visibility::RawVisibility,
-    LocalLifetimeParamId, LocalTypeOrConstParamId,
 };
 
 fn id<N>(index: Idx<N>) -> FileItemTreeId<N> {
@@ -49,11 +36,8 @@ pub(super) struct Ctx<'a> {
     db: &'a dyn DefDatabase,
     tree: ItemTree,
     source_ast_id_map: Arc<AstIdMap>,
-    generic_param_attr_buffer:
-        FxHashMap<Either<LocalTypeOrConstParamId, LocalLifetimeParamId>, RawAttrs>,
     span_map: OnceCell<SpanMap>,
     file: HirFileId,
-    source_maps: ItemTreeSourceMapsBuilder,
 }
 
 impl<'a> Ctx<'a> {
@@ -61,11 +45,9 @@ impl<'a> Ctx<'a> {
         Self {
             db,
             tree: ItemTree::default(),
-            generic_param_attr_buffer: FxHashMap::default(),
             source_ast_id_map: db.ast_id_map(file),
             file,
             span_map: OnceCell::new(),
-            source_maps: ItemTreeSourceMapsBuilder::default(),
         }
     }
 
@@ -73,39 +55,13 @@ impl<'a> Ctx<'a> {
         self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref()
     }
 
-    fn body_ctx<'b, 'c>(
-        &self,
-        types_map: &'b mut TypesMap,
-        types_source_map: &'b mut TypesSourceMap,
-    ) -> LowerCtx<'c>
-    where
-        'a: 'c,
-        'b: 'c,
-    {
-        // FIXME: This seems a bit wasteful that if `LowerCtx` will initialize the span map we won't benefit.
-        LowerCtx::with_span_map_cell(
-            self.db,
-            self.file,
-            self.span_map.clone(),
-            types_map,
-            types_source_map,
-        )
-    }
-
-    pub(super) fn lower_module_items(
-        mut self,
-        item_owner: &dyn HasModuleItem,
-    ) -> (ItemTree, ItemTreeSourceMaps) {
+    pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
         self.tree.top_level =
             item_owner.items().flat_map(|item| self.lower_mod_item(&item)).collect();
-        assert!(self.generic_param_attr_buffer.is_empty());
-        (self.tree, self.source_maps.build())
+        self.tree
     }
 
-    pub(super) fn lower_macro_stmts(
-        mut self,
-        stmts: ast::MacroStmts,
-    ) -> (ItemTree, ItemTreeSourceMaps) {
+    pub(super) fn lower_macro_stmts(mut self, stmts: ast::MacroStmts) -> ItemTree {
         self.tree.top_level = stmts
             .statements()
             .filter_map(|stmt| {
@@ -135,14 +91,11 @@ impl<'a> Ctx<'a> {
             }
         }
 
-        assert!(self.generic_param_attr_buffer.is_empty());
-        (self.tree, self.source_maps.build())
+        self.tree
     }
 
-    pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> (ItemTree, ItemTreeSourceMaps) {
-        self.tree
-            .attrs
-            .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map()));
+    pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
+        self.tree.attrs.insert(AttrOwner::TopLevel, RawAttrs::new(self.db, block, self.span_map()));
         self.tree.top_level = block
             .statements()
             .filter_map(|stmt| match stmt {
@@ -164,8 +117,7 @@ impl<'a> Ctx<'a> {
             }
         }
 
-        assert!(self.generic_param_attr_buffer.is_empty());
-        (self.tree, self.source_maps.build())
+        self.tree
     }
 
     fn data(&mut self) -> &mut ItemTreeData {
@@ -192,7 +144,7 @@ impl<'a> Ctx<'a> {
             ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
             ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
         };
-        let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map());
+        let attrs = RawAttrs::new(self.db, item, self.span_map());
         self.add_attrs(mod_item.into(), attrs);
 
         Some(mod_item)
@@ -218,7 +170,7 @@ impl<'a> Ctx<'a> {
             ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
             ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
         }?;
-        let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map());
+        let attrs = RawAttrs::new(self.db, item_node, self.span_map());
         self.add_attrs(
             match item {
                 AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
@@ -232,31 +184,13 @@ impl<'a> Ctx<'a> {
     }
 
     fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
         let visibility = self.lower_visibility(strukt);
         let name = strukt.name()?.as_name();
         let ast_id = self.source_ast_id_map.ast_id(strukt);
-        let (fields, kind, attrs) = self.lower_fields(&strukt.kind(), &mut body_ctx);
-        let (generic_params, generics_source_map) =
-            self.lower_generic_params(HasImplicitSelf::No, strukt);
-        types_map.shrink_to_fit();
-        types_source_map.shrink_to_fit();
-        let res = Struct {
-            name,
-            visibility,
-            generic_params,
-            fields,
-            shape: kind,
-            ast_id,
-            types_map: Arc::new(types_map),
-        };
+        let (fields, kind, attrs) = self.lower_fields(&strukt.kind());
+        let res = Struct { name, visibility, fields, shape: kind, ast_id };
         let id = id(self.data().structs.alloc(res));
-        self.source_maps.structs.push(GenericItemSourceMapBuilder {
-            item: types_source_map,
-            generics: generics_source_map,
-        });
+
         for (idx, attr) in attrs {
             self.add_attrs(
                 AttrOwner::Field(
@@ -266,14 +200,12 @@ impl<'a> Ctx<'a> {
                 attr,
             );
         }
-        self.write_generic_params_attributes(id.into());
         Some(id)
     }
 
     fn lower_fields(
         &mut self,
         strukt_kind: &ast::StructKind,
-        body_ctx: &mut LowerCtx<'_>,
     ) -> (Box<[Field]>, FieldsShape, Vec<(usize, RawAttrs)>) {
         match strukt_kind {
             ast::StructKind::Record(it) => {
@@ -281,9 +213,9 @@ impl<'a> Ctx<'a> {
                 let mut attrs = vec![];
 
                 for (i, field) in it.fields().enumerate() {
-                    let data = self.lower_record_field(&field, body_ctx);
+                    let data = self.lower_record_field(&field);
                     fields.push(data);
-                    let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map());
+                    let attr = RawAttrs::new(self.db, &field, self.span_map());
                     if !attr.is_empty() {
                         attrs.push((i, attr))
                     }
@@ -295,9 +227,9 @@ impl<'a> Ctx<'a> {
                 let mut attrs = vec![];
 
                 for (i, field) in it.fields().enumerate() {
-                    let data = self.lower_tuple_field(i, &field, body_ctx);
+                    let data = self.lower_tuple_field(i, &field);
                     fields.push(data);
-                    let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map());
+                    let attr = RawAttrs::new(self.db, &field, self.span_map());
                     if !attr.is_empty() {
                         attrs.push((i, attr))
                     }
@@ -308,63 +240,32 @@ impl<'a> Ctx<'a> {
         }
     }
 
-    fn lower_record_field(
-        &mut self,
-        field: &ast::RecordField,
-        body_ctx: &mut LowerCtx<'_>,
-    ) -> Field {
+    fn lower_record_field(&mut self, field: &ast::RecordField) -> Field {
         let name = match field.name() {
             Some(name) => name.as_name(),
             None => Name::missing(),
         };
         let visibility = self.lower_visibility(field);
-        let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty());
 
-        Field { name, type_ref, visibility }
+        Field { name, visibility, is_unsafe: field.unsafe_token().is_some() }
     }
 
-    fn lower_tuple_field(
-        &mut self,
-        idx: usize,
-        field: &ast::TupleField,
-        body_ctx: &mut LowerCtx<'_>,
-    ) -> Field {
+    fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field {
         let name = Name::new_tuple_field(idx);
         let visibility = self.lower_visibility(field);
-        let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty());
-        Field { name, type_ref, visibility }
+        Field { name, visibility, is_unsafe: false }
     }
 
     fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
         let visibility = self.lower_visibility(union);
         let name = union.name()?.as_name();
         let ast_id = self.source_ast_id_map.ast_id(union);
         let (fields, _, attrs) = match union.record_field_list() {
-            Some(record_field_list) => {
-                self.lower_fields(&StructKind::Record(record_field_list), &mut body_ctx)
-            }
+            Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
             None => (Box::default(), FieldsShape::Record, Vec::default()),
         };
-        let (generic_params, generics_source_map) =
-            self.lower_generic_params(HasImplicitSelf::No, union);
-        types_map.shrink_to_fit();
-        types_source_map.shrink_to_fit();
-        let res = Union {
-            name,
-            visibility,
-            generic_params,
-            fields,
-            ast_id,
-            types_map: Arc::new(types_map),
-        };
+        let res = Union { name, visibility, fields, ast_id };
         let id = id(self.data().unions.alloc(res));
-        self.source_maps.unions.push(GenericItemSourceMapBuilder {
-            item: types_source_map,
-            generics: generics_source_map,
-        });
         for (idx, attr) in attrs {
             self.add_attrs(
                 AttrOwner::Field(
@@ -374,7 +275,6 @@ impl<'a> Ctx<'a> {
                 attr,
             );
         }
-        self.write_generic_params_attributes(id.into());
         Some(id)
     }
 
@@ -388,12 +288,8 @@ impl<'a> Ctx<'a> {
                 FileItemTreeId(self.next_variant_idx())..FileItemTreeId(self.next_variant_idx())
             }
         };
-        let (generic_params, generics_source_map) =
-            self.lower_generic_params(HasImplicitSelf::No, enum_);
-        let res = Enum { name, visibility, generic_params, variants, ast_id };
+        let res = Enum { name, visibility, variants, ast_id };
         let id = id(self.data().enums.alloc(res));
-        self.source_maps.enum_generics.push(generics_source_map);
-        self.write_generic_params_attributes(id.into());
         Some(id)
     }
 
@@ -401,34 +297,25 @@ impl<'a> Ctx<'a> {
         let start = self.next_variant_idx();
         for variant in variants.variants() {
             let idx = self.lower_variant(&variant);
-            self.add_attrs(
-                id(idx).into(),
-                RawAttrs::new(self.db.upcast(), &variant, self.span_map()),
-            );
+            self.add_attrs(id(idx).into(), RawAttrs::new(self.db, &variant, self.span_map()));
         }
         let end = self.next_variant_idx();
         FileItemTreeId(start)..FileItemTreeId(end)
     }
 
     fn lower_variant(&mut self, variant: &ast::Variant) -> Idx<Variant> {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
         let name = match variant.name() {
             Some(name) => name.as_name(),
             None => Name::missing(),
         };
-        let (fields, kind, attrs) = self.lower_fields(&variant.kind(), &mut body_ctx);
+        let (fields, kind, attrs) = self.lower_fields(&variant.kind());
         let ast_id = self.source_ast_id_map.ast_id(variant);
-        types_map.shrink_to_fit();
-        types_source_map.shrink_to_fit();
-        let res = Variant { name, fields, shape: kind, ast_id, types_map: Arc::new(types_map) };
+        let res = Variant { name, fields, shape: kind, ast_id };
         let id = self.data().variants.alloc(res);
-        self.source_maps.variants.push(types_source_map);
         for (idx, attr) in attrs {
             self.add_attrs(
                 AttrOwner::Field(
-                    FieldParent::Variant(FileItemTreeId(id)),
+                    FieldParent::EnumVariant(FileItemTreeId(id)),
                     Idx::from_raw(RawIdx::from_u32(idx as u32)),
                 ),
                 attr,
@@ -438,144 +325,14 @@ impl<'a> Ctx<'a> {
     }
 
     fn lower_function(&mut self, func: &ast::Fn) -> Option<FileItemTreeId<Function>> {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
-
         let visibility = self.lower_visibility(func);
         let name = func.name()?.as_name();
 
-        let mut has_self_param = false;
-        let mut has_var_args = false;
-        let mut params = vec![];
-        let mut attrs = vec![];
-        let mut push_attr = |idx, attr: RawAttrs| {
-            if !attr.is_empty() {
-                attrs.push((idx, attr))
-            }
-        };
-        if let Some(param_list) = func.param_list() {
-            if let Some(self_param) = param_list.self_param() {
-                push_attr(
-                    params.len(),
-                    RawAttrs::new(self.db.upcast(), &self_param, self.span_map()),
-                );
-                let self_type = match self_param.ty() {
-                    Some(type_ref) => TypeRef::from_ast(&mut body_ctx, type_ref),
-                    None => {
-                        let self_type = body_ctx.alloc_type_ref_desugared(TypeRef::Path(
-                            Name::new_symbol_root(sym::Self_.clone()).into(),
-                        ));
-                        match self_param.kind() {
-                            ast::SelfParamKind::Owned => self_type,
-                            ast::SelfParamKind::Ref => body_ctx.alloc_type_ref_desugared(
-                                TypeRef::Reference(Box::new(RefType {
-                                    ty: self_type,
-                                    lifetime: self_param.lifetime().as_ref().map(LifetimeRef::new),
-                                    mutability: Mutability::Shared,
-                                })),
-                            ),
-                            ast::SelfParamKind::MutRef => body_ctx.alloc_type_ref_desugared(
-                                TypeRef::Reference(Box::new(RefType {
-                                    ty: self_type,
-                                    lifetime: self_param.lifetime().as_ref().map(LifetimeRef::new),
-                                    mutability: Mutability::Mut,
-                                })),
-                            ),
-                        }
-                    }
-                };
-                params.push(Param { type_ref: Some(self_type) });
-                has_self_param = true;
-            }
-            for param in param_list.params() {
-                push_attr(params.len(), RawAttrs::new(self.db.upcast(), &param, self.span_map()));
-                let param = match param.dotdotdot_token() {
-                    Some(_) => {
-                        has_var_args = true;
-                        Param { type_ref: None }
-                    }
-                    None => {
-                        let type_ref = TypeRef::from_ast_opt(&mut body_ctx, param.ty());
-                        Param { type_ref: Some(type_ref) }
-                    }
-                };
-                params.push(param);
-            }
-        }
-
-        let ret_type = match func.ret_type() {
-            Some(rt) => match rt.ty() {
-                Some(type_ref) => TypeRef::from_ast(&mut body_ctx, type_ref),
-                None if rt.thin_arrow_token().is_some() => body_ctx.alloc_error_type(),
-                None => body_ctx.alloc_type_ref_desugared(TypeRef::unit()),
-            },
-            None => body_ctx.alloc_type_ref_desugared(TypeRef::unit()),
-        };
-
-        let ret_type = if func.async_token().is_some() {
-            let future_impl = desugar_future_path(&mut body_ctx, ret_type);
-            let ty_bound = TypeBound::Path(future_impl, TraitBoundModifier::None);
-            body_ctx.alloc_type_ref_desugared(TypeRef::ImplTrait(ThinVec::from_iter([ty_bound])))
-        } else {
-            ret_type
-        };
-
-        let abi = func.abi().map(lower_abi);
-
         let ast_id = self.source_ast_id_map.ast_id(func);
 
-        let mut flags = FnFlags::default();
-        if func.body().is_some() {
-            flags |= FnFlags::HAS_BODY;
-        }
-        if has_self_param {
-            flags |= FnFlags::HAS_SELF_PARAM;
-        }
-        if func.default_token().is_some() {
-            flags |= FnFlags::HAS_DEFAULT_KW;
-        }
-        if func.const_token().is_some() {
-            flags |= FnFlags::HAS_CONST_KW;
-        }
-        if func.async_token().is_some() {
-            flags |= FnFlags::HAS_ASYNC_KW;
-        }
-        if func.unsafe_token().is_some() {
-            flags |= FnFlags::HAS_UNSAFE_KW;
-        }
-        if func.safe_token().is_some() {
-            flags |= FnFlags::HAS_SAFE_KW;
-        }
-        if has_var_args {
-            flags |= FnFlags::IS_VARARGS;
-        }
-
-        types_map.shrink_to_fit();
-        types_source_map.shrink_to_fit();
-        let (generic_params, generics_source_map) =
-            self.lower_generic_params(HasImplicitSelf::No, func);
-        let res = Function {
-            name,
-            visibility,
-            explicit_generic_params: generic_params,
-            abi,
-            params: params.into_boxed_slice(),
-            ret_type,
-            ast_id,
-            types_map: Arc::new(types_map),
-            flags,
-        };
+        let res = Function { name, visibility, ast_id };
 
         let id = id(self.data().functions.alloc(res));
-        self.source_maps.functions.push(GenericItemSourceMapBuilder {
-            item: types_source_map,
-            generics: generics_source_map,
-        });
-        for (idx, attr) in attrs {
-            self.add_attrs(AttrOwner::Param(id, Idx::from_raw(RawIdx::from_u32(idx as u32))), attr);
-        }
-        self.write_generic_params_attributes(id.into());
         Some(id)
     }
 
@@ -583,82 +340,27 @@ impl<'a> Ctx<'a> {
         &mut self,
         type_alias: &ast::TypeAlias,
     ) -> Option<FileItemTreeId<TypeAlias>> {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
         let name = type_alias.name()?.as_name();
-        let type_ref = type_alias.ty().map(|it| TypeRef::from_ast(&mut body_ctx, it));
         let visibility = self.lower_visibility(type_alias);
-        let bounds = self.lower_type_bounds(type_alias, &mut body_ctx);
         let ast_id = self.source_ast_id_map.ast_id(type_alias);
-        let (generic_params, generics_source_map) =
-            self.lower_generic_params(HasImplicitSelf::No, type_alias);
-        types_map.shrink_to_fit();
-        types_source_map.shrink_to_fit();
-        let res = TypeAlias {
-            name,
-            visibility,
-            bounds,
-            generic_params,
-            type_ref,
-            ast_id,
-            types_map: Arc::new(types_map),
-        };
+        let res = TypeAlias { name, visibility, ast_id };
         let id = id(self.data().type_aliases.alloc(res));
-        self.source_maps.type_aliases.push(GenericItemSourceMapBuilder {
-            item: types_source_map,
-            generics: generics_source_map,
-        });
-        self.write_generic_params_attributes(id.into());
         Some(id)
     }
 
     fn lower_static(&mut self, static_: &ast::Static) -> Option<FileItemTreeId<Static>> {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
         let name = static_.name()?.as_name();
-        let type_ref = TypeRef::from_ast_opt(&mut body_ctx, static_.ty());
         let visibility = self.lower_visibility(static_);
-        let mutable = static_.mut_token().is_some();
-        let has_safe_kw = static_.safe_token().is_some();
-        let has_unsafe_kw = static_.unsafe_token().is_some();
         let ast_id = self.source_ast_id_map.ast_id(static_);
-        types_map.shrink_to_fit();
-        types_source_map.shrink_to_fit();
-        let res = Static {
-            name,
-            visibility,
-            mutable,
-            type_ref,
-            ast_id,
-            has_safe_kw,
-            has_unsafe_kw,
-            types_map: Arc::new(types_map),
-        };
-        self.source_maps.statics.push(types_source_map);
+        let res = Static { name, visibility, ast_id };
         Some(id(self.data().statics.alloc(res)))
     }
 
     fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId<Const> {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
         let name = konst.name().map(|it| it.as_name());
-        let type_ref = TypeRef::from_ast_opt(&mut body_ctx, konst.ty());
         let visibility = self.lower_visibility(konst);
         let ast_id = self.source_ast_id_map.ast_id(konst);
-        types_map.shrink_to_fit();
-        types_source_map.shrink_to_fit();
-        let res = Const {
-            name,
-            visibility,
-            type_ref,
-            ast_id,
-            has_body: konst.body().is_some(),
-            types_map: Arc::new(types_map),
-        };
-        self.source_maps.consts.push(types_source_map);
+        let res = Const { name, visibility, ast_id };
         id(self.data().consts.alloc(res))
     }
 
@@ -687,8 +389,6 @@ impl<'a> Ctx<'a> {
         let name = trait_def.name()?.as_name();
         let visibility = self.lower_visibility(trait_def);
         let ast_id = self.source_ast_id_map.ast_id(trait_def);
-        let is_auto = trait_def.auto_token().is_some();
-        let is_unsafe = trait_def.unsafe_token().is_some();
 
         let items = trait_def
             .assoc_item_list()
@@ -697,12 +397,8 @@ impl<'a> Ctx<'a> {
             .filter_map(|item_node| self.lower_assoc_item(&item_node))
             .collect();
 
-        let (generic_params, generics_source_map) =
-            self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
-        let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
+        let def = Trait { name, visibility, items, ast_id };
         let id = id(self.data().traits.alloc(def));
-        self.source_maps.trait_generics.push(generics_source_map);
-        self.write_generic_params_attributes(id.into());
         Some(id)
     }
 
@@ -713,32 +409,14 @@ impl<'a> Ctx<'a> {
         let name = trait_alias_def.name()?.as_name();
         let visibility = self.lower_visibility(trait_alias_def);
         let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
-        let (generic_params, generics_source_map) = self.lower_generic_params(
-            HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
-            trait_alias_def,
-        );
 
-        let alias = TraitAlias { name, visibility, generic_params, ast_id };
+        let alias = TraitAlias { name, visibility, ast_id };
         let id = id(self.data().trait_aliases.alloc(alias));
-        self.source_maps.trait_alias_generics.push(generics_source_map);
-        self.write_generic_params_attributes(id.into());
         Some(id)
     }
 
     fn lower_impl(&mut self, impl_def: &ast::Impl) -> FileItemTreeId<Impl> {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
-
         let ast_id = self.source_ast_id_map.ast_id(impl_def);
-        // FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
-        // as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
-        // equals itself.
-        let self_ty = TypeRef::from_ast_opt(&mut body_ctx, impl_def.self_ty());
-        let target_trait = impl_def.trait_().and_then(|tr| TraitRef::from_ast(&mut body_ctx, tr));
-        let is_negative = impl_def.excl_token().is_some();
-        let is_unsafe = impl_def.unsafe_token().is_some();
-
         // We cannot use `assoc_items()` here as that does not include macro calls.
         let items = impl_def
             .assoc_item_list()
@@ -748,27 +426,8 @@ impl<'a> Ctx<'a> {
             .collect();
         // Note that trait impls don't get implicit `Self` unlike traits, because here they are a
         // type alias rather than a type parameter, so this is handled by the resolver.
-        let (generic_params, generics_source_map) =
-            self.lower_generic_params(HasImplicitSelf::No, impl_def);
-        types_map.shrink_to_fit();
-        types_source_map.shrink_to_fit();
-        let res = Impl {
-            generic_params,
-            target_trait,
-            self_ty,
-            is_negative,
-            is_unsafe,
-            items,
-            ast_id,
-            types_map: Arc::new(types_map),
-        };
-        let id = id(self.data().impls.alloc(res));
-        self.source_maps.impls.push(GenericItemSourceMapBuilder {
-            item: types_source_map,
-            generics: generics_source_map,
-        });
-        self.write_generic_params_attributes(id.into());
-        id
+        let res = Impl { items, ast_id };
+        id(self.data().impls.alloc(res))
     }
 
     fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
@@ -801,7 +460,7 @@ impl<'a> Ctx<'a> {
         let span_map = self.span_map();
         let path = m.path()?;
         let range = path.syntax().text_range();
-        let path = Interned::new(ModPath::from_src(self.db.upcast(), path, &mut |range| {
+        let path = Interned::new(ModPath::from_src(self.db, path, &mut |range| {
             span_map.span_for_range(range).ctx
         })?);
         let ast_id = self.source_ast_id_map.ast_id(m);
@@ -844,7 +503,7 @@ impl<'a> Ctx<'a> {
                         ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
                         ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
                     };
-                    let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map());
+                    let attrs = RawAttrs::new(self.db, &item, self.span_map());
                     self.add_attrs(mod_item.into(), attrs);
                     Some(mod_item)
                 })
@@ -855,75 +514,8 @@ impl<'a> Ctx<'a> {
         id(self.data().extern_blocks.alloc(res))
     }
 
-    fn write_generic_params_attributes(&mut self, parent: GenericModItem) {
-        self.generic_param_attr_buffer.drain().for_each(|(idx, attrs)| {
-            self.tree.attrs.insert(
-                match idx {
-                    Either::Left(id) => AttrOwner::TypeOrConstParamData(parent, id),
-                    Either::Right(id) => AttrOwner::LifetimeParamData(parent, id),
-                },
-                attrs,
-            );
-        })
-    }
-
-    fn lower_generic_params(
-        &mut self,
-        has_implicit_self: HasImplicitSelf,
-        node: &dyn ast::HasGenericParams,
-    ) -> (Arc<GenericParams>, TypesSourceMap) {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
-        debug_assert!(self.generic_param_attr_buffer.is_empty(),);
-        body_ctx.take_impl_traits_bounds();
-        let mut generics = GenericParamsCollector::default();
-
-        if let HasImplicitSelf::Yes(bounds) = has_implicit_self {
-            // Traits and trait aliases get the Self type as an implicit first type parameter.
-            generics.type_or_consts.alloc(
-                TypeParamData {
-                    name: Some(Name::new_symbol_root(sym::Self_.clone())),
-                    default: None,
-                    provenance: TypeParamProvenance::TraitSelf,
-                }
-                .into(),
-            );
-            // add super traits as bounds on Self
-            // i.e., `trait Foo: Bar` is equivalent to `trait Foo where Self: Bar`
-            let bound_target = Either::Left(body_ctx.alloc_type_ref_desugared(TypeRef::Path(
-                Name::new_symbol_root(sym::Self_.clone()).into(),
-            )));
-            generics.fill_bounds(&mut body_ctx, bounds, bound_target);
-        }
-
-        let span_map = body_ctx.span_map().clone();
-        let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
-                               param| {
-            let attrs = RawAttrs::new(self.db.upcast(), &param, span_map.as_ref());
-            debug_assert!(self.generic_param_attr_buffer.insert(item, attrs).is_none());
-        };
-        generics.fill(&mut body_ctx, node, add_param_attrs);
-
-        let generics = generics.finish(types_map, &mut types_source_map);
-        (generics, types_source_map)
-    }
-
-    fn lower_type_bounds(
-        &mut self,
-        node: &dyn ast::HasTypeBounds,
-        body_ctx: &mut LowerCtx<'_>,
-    ) -> Box<[TypeBound]> {
-        match node.type_bound_list() {
-            Some(bound_list) => {
-                bound_list.bounds().map(|it| TypeBound::from_ast(body_ctx, it)).collect()
-            }
-            None => Box::default(),
-        }
-    }
-
     fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
-        let vis = RawVisibility::from_ast(self.db, item.visibility(), &mut |range| {
+        let vis = visibility_from_ast(self.db, item.visibility(), &mut |range| {
             self.span_map().span_for_range(range).ctx
         });
         self.data().vis.alloc(vis)
@@ -936,33 +528,11 @@ impl<'a> Ctx<'a> {
     }
 }
 
-fn desugar_future_path(ctx: &mut LowerCtx<'_>, orig: TypeRefId) -> PathId {
-    let path = path![core::future::Future];
-    let mut generic_args: Vec<_> =
-        std::iter::repeat(None).take(path.segments().len() - 1).collect();
-    let binding = AssociatedTypeBinding {
-        name: Name::new_symbol_root(sym::Output.clone()),
-        args: None,
-        type_ref: Some(orig),
-        bounds: Box::default(),
-    };
-    generic_args.push(Some(GenericArgs { bindings: Box::new([binding]), ..GenericArgs::empty() }));
-
-    let path = Path::from_known_path(path, generic_args);
-    PathId::from_type_ref_unchecked(ctx.alloc_type_ref_desugared(TypeRef::Path(path)))
-}
-
-enum HasImplicitSelf {
-    /// Inner list is a type bound list for the implicit `Self`.
-    Yes(Option<ast::TypeBoundList>),
-    No,
-}
-
 fn lower_abi(abi: ast::Abi) -> Symbol {
     match abi.abi_string() {
         Some(tok) => Symbol::intern(tok.text_without_quotes()),
         // `extern` default to be `extern "C"`.
-        _ => sym::C.clone(),
+        _ => sym::C,
     }
 }
 
@@ -975,7 +545,7 @@ impl UseTreeLowering<'_> {
     fn lower_use_tree(
         &mut self,
         tree: ast::UseTree,
-        span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
+        span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
     ) -> Option<UseTree> {
         if let Some(use_tree_list) = tree.use_tree_list() {
             let prefix = match tree.path() {
@@ -984,7 +554,7 @@ impl UseTreeLowering<'_> {
                 // E.g. `use something::{inner}` (prefix is `None`, path is `something`)
                 // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
                 Some(path) => {
-                    match ModPath::from_src(self.db.upcast(), path, span_for_range) {
+                    match ModPath::from_src(self.db, path, span_for_range) {
                         Some(it) => Some(it),
                         None => return None, // FIXME: report errors somewhere
                     }
@@ -1005,7 +575,7 @@ impl UseTreeLowering<'_> {
         } else {
             let is_glob = tree.star_token().is_some();
             let path = match tree.path() {
-                Some(path) => Some(ModPath::from_src(self.db.upcast(), path, span_for_range)?),
+                Some(path) => Some(ModPath::from_src(self.db, path, span_for_range)?),
                 None => None,
             };
             let alias = tree.rename().map(|a| {
@@ -1042,9 +612,38 @@ impl UseTreeLowering<'_> {
 pub(crate) fn lower_use_tree(
     db: &dyn DefDatabase,
     tree: ast::UseTree,
-    span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
+    span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
 ) -> Option<(UseTree, Arena<ast::UseTree>)> {
     let mut lowering = UseTreeLowering { db, mapping: Arena::new() };
     let tree = lowering.lower_use_tree(tree, span_for_range)?;
     Some((tree, lowering.mapping))
 }
+
+fn private_vis() -> RawVisibility {
+    RawVisibility::Module(
+        Interned::new(ModPath::from_kind(PathKind::SELF)),
+        VisibilityExplicitness::Implicit,
+    )
+}
+
+fn visibility_from_ast(
+    db: &dyn DefDatabase,
+    node: Option<ast::Visibility>,
+    span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
+) -> RawVisibility {
+    let Some(node) = node else { return private_vis() };
+    let path = match node.kind() {
+        ast::VisibilityKind::In(path) => {
+            let path = ModPath::from_src(db, path, span_for_range);
+            match path {
+                None => return private_vis(),
+                Some(path) => path,
+            }
+        }
+        ast::VisibilityKind::PubCrate => ModPath::from_kind(PathKind::Crate),
+        ast::VisibilityKind::PubSuper => ModPath::from_kind(PathKind::Super(1)),
+        ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF),
+        ast::VisibilityKind::Pub => return RawVisibility::Public,
+    };
+    RawVisibility::Module(Interned::new(path), VisibilityExplicitness::Explicit)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index 70bf2f13c88a1..47c6eb13293f5 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -6,16 +6,12 @@ use la_arena::{Idx, RawIdx};
 use span::{Edition, ErasedFileAstId};
 
 use crate::{
-    generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
     item_tree::{
         AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldParent,
-        FieldsShape, FileItemTreeId, FnFlags, Function, GenericModItem, GenericParams, Impl,
-        ItemTree, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, Param, Path, RawAttrs,
-        RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, Union, Use,
-        UseTree, UseTreeKind, Variant,
+        FieldsShape, FileItemTreeId, Function, Impl, ItemTree, Macro2, MacroCall, MacroRules, Mod,
+        ModItem, ModKind, RawAttrs, RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias,
+        Union, Use, UseTree, UseTreeKind, Variant,
     },
-    pretty::{print_path, print_type_bounds, print_type_ref},
-    type_ref::{TypeRefId, TypesMap},
     visibility::RawVisibility,
 };
 
@@ -100,7 +96,7 @@ impl Printer<'_> {
                 self,
                 "#{}[{}{}]{}",
                 inner,
-                attr.path.display(self.db.upcast(), self.edition),
+                attr.path.display(self.db, self.edition),
                 attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
                 separated_by,
             );
@@ -116,34 +112,30 @@ impl Printer<'_> {
     fn print_visibility(&mut self, vis: RawVisibilityId) {
         match &self.tree[vis] {
             RawVisibility::Module(path, _expl) => {
-                w!(self, "pub({}) ", path.display(self.db.upcast(), self.edition))
+                w!(self, "pub({}) ", path.display(self.db, self.edition))
             }
             RawVisibility::Public => w!(self, "pub "),
         };
     }
 
-    fn print_fields(
-        &mut self,
-        parent: FieldParent,
-        kind: FieldsShape,
-        fields: &[Field],
-        map: &TypesMap,
-    ) {
+    fn print_fields(&mut self, parent: FieldParent, kind: FieldsShape, fields: &[Field]) {
         let edition = self.edition;
         match kind {
             FieldsShape::Record => {
                 self.whitespace();
                 w!(self, "{{");
                 self.indented(|this| {
-                    for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() {
+                    for (idx, Field { name, visibility, is_unsafe }) in fields.iter().enumerate() {
                         this.print_attrs_of(
                             AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))),
                             "\n",
                         );
                         this.print_visibility(*visibility);
-                        w!(this, "{}: ", name.display(self.db.upcast(), edition));
-                        this.print_type_ref(*type_ref, map);
-                        wln!(this, ",");
+                        if *is_unsafe {
+                            w!(this, "unsafe ");
+                        }
+
+                        wln!(this, "{},", name.display(self.db, edition));
                     }
                 });
                 w!(self, "}}");
@@ -151,15 +143,16 @@ impl Printer<'_> {
             FieldsShape::Tuple => {
                 w!(self, "(");
                 self.indented(|this| {
-                    for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() {
+                    for (idx, Field { name, visibility, is_unsafe }) in fields.iter().enumerate() {
                         this.print_attrs_of(
                             AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))),
                             "\n",
                         );
                         this.print_visibility(*visibility);
-                        w!(this, "{}: ", name.display(self.db.upcast(), edition));
-                        this.print_type_ref(*type_ref, map);
-                        wln!(this, ",");
+                        if *is_unsafe {
+                            w!(this, "unsafe ");
+                        }
+                        wln!(this, "{},", name.display(self.db, edition));
                     }
                 });
                 w!(self, ")");
@@ -168,49 +161,23 @@ impl Printer<'_> {
         }
     }
 
-    fn print_fields_and_where_clause(
-        &mut self,
-        parent: FieldParent,
-        kind: FieldsShape,
-        fields: &[Field],
-        params: &GenericParams,
-        map: &TypesMap,
-    ) {
-        match kind {
-            FieldsShape::Record => {
-                if self.print_where_clause(params) {
-                    wln!(self);
-                }
-                self.print_fields(parent, kind, fields, map);
-            }
-            FieldsShape::Unit => {
-                self.print_where_clause(params);
-                self.print_fields(parent, kind, fields, map);
-            }
-            FieldsShape::Tuple => {
-                self.print_fields(parent, kind, fields, map);
-                self.print_where_clause(params);
-            }
-        }
-    }
-
     fn print_use_tree(&mut self, use_tree: &UseTree) {
         match &use_tree.kind {
             UseTreeKind::Single { path, alias } => {
-                w!(self, "{}", path.display(self.db.upcast(), self.edition));
+                w!(self, "{}", path.display(self.db, self.edition));
                 if let Some(alias) = alias {
                     w!(self, " as {}", alias.display(self.edition));
                 }
             }
             UseTreeKind::Glob { path } => {
                 if let Some(path) = path {
-                    w!(self, "{}::", path.display(self.db.upcast(), self.edition));
+                    w!(self, "{}::", path.display(self.db, self.edition));
                 }
                 w!(self, "*");
             }
             UseTreeKind::Prefixed { prefix, list } => {
                 if let Some(prefix) = prefix {
-                    w!(self, "{}::", prefix.display(self.db.upcast(), self.edition));
+                    w!(self, "{}::", prefix.display(self.db, self.edition));
                 }
                 w!(self, "{{");
                 for (i, tree) in list.iter().enumerate() {
@@ -240,7 +207,7 @@ impl Printer<'_> {
                 let ExternCrate { name, alias, visibility, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                w!(self, "extern crate {}", name.display(self.db.upcast(), self.edition));
+                w!(self, "extern crate {}", name.display(self.db, self.edition));
                 if let Some(alias) = alias {
                     w!(self, " as {}", alias.display(self.edition));
                 }
@@ -262,89 +229,17 @@ impl Printer<'_> {
                 wln!(self, "}}");
             }
             ModItem::Function(it) => {
-                let Function {
-                    name,
-                    visibility,
-                    explicit_generic_params,
-                    abi,
-                    params,
-                    ret_type,
-                    ast_id,
-                    types_map,
-                    flags,
-                } = &self.tree[it];
+                let Function { name, visibility, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                if flags.contains(FnFlags::HAS_DEFAULT_KW) {
-                    w!(self, "default ");
-                }
-                if flags.contains(FnFlags::HAS_CONST_KW) {
-                    w!(self, "const ");
-                }
-                if flags.contains(FnFlags::HAS_ASYNC_KW) {
-                    w!(self, "async ");
-                }
-                if flags.contains(FnFlags::HAS_UNSAFE_KW) {
-                    w!(self, "unsafe ");
-                }
-                if flags.contains(FnFlags::HAS_SAFE_KW) {
-                    w!(self, "safe ");
-                }
-                if let Some(abi) = abi {
-                    w!(self, "extern \"{}\" ", abi);
-                }
-                w!(self, "fn {}", name.display(self.db.upcast(), self.edition));
-                self.print_generic_params(explicit_generic_params, it.into());
-                w!(self, "(");
-                if !params.is_empty() {
-                    self.indented(|this| {
-                        for (idx, Param { type_ref }) in params.iter().enumerate() {
-                            this.print_attrs_of(
-                                AttrOwner::Param(it, Idx::from_raw(RawIdx::from(idx as u32))),
-                                "\n",
-                            );
-                            if idx == 0 && flags.contains(FnFlags::HAS_SELF_PARAM) {
-                                w!(this, "self: ");
-                            }
-                            if let Some(type_ref) = type_ref {
-                                this.print_type_ref(*type_ref, types_map);
-                            } else {
-                                wln!(this, "...");
-                            }
-                            wln!(this, ",");
-                        }
-                    });
-                }
-                w!(self, ") -> ");
-                self.print_type_ref(*ret_type, types_map);
-                self.print_where_clause(explicit_generic_params);
-                if flags.contains(FnFlags::HAS_BODY) {
-                    wln!(self, " {{ ... }}");
-                } else {
-                    wln!(self, ";");
-                }
+                wln!(self, "fn {};", name.display(self.db, self.edition));
             }
             ModItem::Struct(it) => {
-                let Struct {
-                    visibility,
-                    name,
-                    fields,
-                    shape: kind,
-                    generic_params,
-                    ast_id,
-                    types_map,
-                } = &self.tree[it];
+                let Struct { visibility, name, fields, shape: kind, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                w!(self, "struct {}", name.display(self.db.upcast(), self.edition));
-                self.print_generic_params(generic_params, it.into());
-                self.print_fields_and_where_clause(
-                    FieldParent::Struct(it),
-                    *kind,
-                    fields,
-                    generic_params,
-                    types_map,
-                );
+                w!(self, "struct {}", name.display(self.db, self.edition));
+                self.print_fields(FieldParent::Struct(it), *kind, fields);
                 if matches!(kind, FieldsShape::Record) {
                     wln!(self);
                 } else {
@@ -352,98 +247,56 @@ impl Printer<'_> {
                 }
             }
             ModItem::Union(it) => {
-                let Union { name, visibility, fields, generic_params, ast_id, types_map } =
-                    &self.tree[it];
+                let Union { name, visibility, fields, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                w!(self, "union {}", name.display(self.db.upcast(), self.edition));
-                self.print_generic_params(generic_params, it.into());
-                self.print_fields_and_where_clause(
-                    FieldParent::Union(it),
-                    FieldsShape::Record,
-                    fields,
-                    generic_params,
-                    types_map,
-                );
+                w!(self, "union {}", name.display(self.db, self.edition));
+                self.print_fields(FieldParent::Union(it), FieldsShape::Record, fields);
                 wln!(self);
             }
             ModItem::Enum(it) => {
-                let Enum { name, visibility, variants, generic_params, ast_id } = &self.tree[it];
+                let Enum { name, visibility, variants, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                w!(self, "enum {}", name.display(self.db.upcast(), self.edition));
-                self.print_generic_params(generic_params, it.into());
-                self.print_where_clause_and_opening_brace(generic_params);
+                w!(self, "enum {}", name.display(self.db, self.edition));
                 let edition = self.edition;
                 self.indented(|this| {
                     for variant in FileItemTreeId::range_iter(variants.clone()) {
-                        let Variant { name, fields, shape: kind, ast_id, types_map } =
-                            &this.tree[variant];
+                        let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant];
                         this.print_ast_id(ast_id.erase());
                         this.print_attrs_of(variant, "\n");
-                        w!(this, "{}", name.display(self.db.upcast(), edition));
-                        this.print_fields(FieldParent::Variant(variant), *kind, fields, types_map);
+                        w!(this, "{}", name.display(self.db, edition));
+                        this.print_fields(FieldParent::EnumVariant(variant), *kind, fields);
                         wln!(this, ",");
                     }
                 });
                 wln!(self, "}}");
             }
             ModItem::Const(it) => {
-                let Const { name, visibility, type_ref, ast_id, has_body: _, types_map } =
-                    &self.tree[it];
+                let Const { name, visibility, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
                 w!(self, "const ");
                 match name {
-                    Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)),
+                    Some(name) => w!(self, "{}", name.display(self.db, self.edition)),
                     None => w!(self, "_"),
                 }
-                w!(self, ": ");
-                self.print_type_ref(*type_ref, types_map);
                 wln!(self, " = _;");
             }
             ModItem::Static(it) => {
-                let Static {
-                    name,
-                    visibility,
-                    mutable,
-                    type_ref,
-                    ast_id,
-                    has_safe_kw,
-                    has_unsafe_kw,
-                    types_map,
-                } = &self.tree[it];
+                let Static { name, visibility, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                if *has_safe_kw {
-                    w!(self, "safe ");
-                }
-                if *has_unsafe_kw {
-                    w!(self, "unsafe ");
-                }
                 w!(self, "static ");
-                if *mutable {
-                    w!(self, "mut ");
-                }
-                w!(self, "{}: ", name.display(self.db.upcast(), self.edition));
-                self.print_type_ref(*type_ref, types_map);
+                w!(self, "{}", name.display(self.db, self.edition));
                 w!(self, " = _;");
                 wln!(self);
             }
             ModItem::Trait(it) => {
-                let Trait { name, visibility, is_auto, is_unsafe, items, generic_params, ast_id } =
-                    &self.tree[it];
+                let Trait { name, visibility, items, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                if *is_unsafe {
-                    w!(self, "unsafe ");
-                }
-                if *is_auto {
-                    w!(self, "auto ");
-                }
-                w!(self, "trait {}", name.display(self.db.upcast(), self.edition));
-                self.print_generic_params(generic_params, it.into());
-                self.print_where_clause_and_opening_brace(generic_params);
+                w!(self, "trait {} {{", name.display(self.db, self.edition));
                 self.indented(|this| {
                     for item in &**items {
                         this.print_mod_item((*item).into());
@@ -452,43 +305,15 @@ impl Printer<'_> {
                 wln!(self, "}}");
             }
             ModItem::TraitAlias(it) => {
-                let TraitAlias { name, visibility, generic_params, ast_id } = &self.tree[it];
+                let TraitAlias { name, visibility, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                w!(self, "trait {}", name.display(self.db.upcast(), self.edition));
-                self.print_generic_params(generic_params, it.into());
-                w!(self, " = ");
-                self.print_where_clause(generic_params);
-                w!(self, ";");
-                wln!(self);
+                wln!(self, "trait {} = ..;", name.display(self.db, self.edition));
             }
             ModItem::Impl(it) => {
-                let Impl {
-                    target_trait,
-                    self_ty,
-                    is_negative,
-                    is_unsafe,
-                    items,
-                    generic_params,
-                    ast_id,
-                    types_map,
-                } = &self.tree[it];
+                let Impl { items, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
-                if *is_unsafe {
-                    w!(self, "unsafe");
-                }
-                w!(self, "impl");
-                self.print_generic_params(generic_params, it.into());
-                w!(self, " ");
-                if *is_negative {
-                    w!(self, "!");
-                }
-                if let Some(tr) = target_trait {
-                    self.print_path(&types_map[tr.path], types_map);
-                    w!(self, " for ");
-                }
-                self.print_type_ref(*self_ty, types_map);
-                self.print_where_clause_and_opening_brace(generic_params);
+                w!(self, "impl {{");
                 self.indented(|this| {
                     for item in &**items {
                         this.print_mod_item((*item).into());
@@ -497,28 +322,10 @@ impl Printer<'_> {
                 wln!(self, "}}");
             }
             ModItem::TypeAlias(it) => {
-                let TypeAlias {
-                    name,
-                    visibility,
-                    bounds,
-                    type_ref,
-                    generic_params,
-                    ast_id,
-                    types_map,
-                } = &self.tree[it];
+                let TypeAlias { name, visibility, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                w!(self, "type {}", name.display(self.db.upcast(), self.edition));
-                self.print_generic_params(generic_params, it.into());
-                if !bounds.is_empty() {
-                    w!(self, ": ");
-                    self.print_type_bounds(bounds, types_map);
-                }
-                if let Some(ty) = type_ref {
-                    w!(self, " = ");
-                    self.print_type_ref(*ty, types_map);
-                }
-                self.print_where_clause(generic_params);
+                w!(self, "type {}", name.display(self.db, self.edition));
                 w!(self, ";");
                 wln!(self);
             }
@@ -526,7 +333,7 @@ impl Printer<'_> {
                 let Mod { name, visibility, kind, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                w!(self, "mod {}", name.display(self.db.upcast(), self.edition));
+                w!(self, "mod {}", name.display(self.db, self.edition));
                 match kind {
                     ModKind::Inline { items } => {
                         w!(self, " {{");
@@ -546,145 +353,29 @@ impl Printer<'_> {
                 let MacroCall { path, ast_id, expand_to, ctxt } = &self.tree[it];
                 let _ = writeln!(
                     self,
-                    "// AstId: {:?}, SyntaxContext: {}, ExpandTo: {:?}",
+                    "// AstId: {:?}, SyntaxContextId: {}, ExpandTo: {:?}",
                     ast_id.erase().into_raw(),
                     ctxt,
                     expand_to
                 );
-                wln!(self, "{}!(...);", path.display(self.db.upcast(), self.edition));
+                wln!(self, "{}!(...);", path.display(self.db, self.edition));
             }
             ModItem::MacroRules(it) => {
                 let MacroRules { name, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
-                wln!(
-                    self,
-                    "macro_rules! {} {{ ... }}",
-                    name.display(self.db.upcast(), self.edition)
-                );
+                wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db, self.edition));
             }
             ModItem::Macro2(it) => {
                 let Macro2 { name, visibility, ast_id } = &self.tree[it];
                 self.print_ast_id(ast_id.erase());
                 self.print_visibility(*visibility);
-                wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast(), self.edition));
+                wln!(self, "macro {} {{ ... }}", name.display(self.db, self.edition));
             }
         }
 
         self.blank();
     }
 
-    fn print_type_ref(&mut self, type_ref: TypeRefId, map: &TypesMap) {
-        let edition = self.edition;
-        print_type_ref(self.db, type_ref, map, self, edition).unwrap();
-    }
-
-    fn print_type_bounds(&mut self, bounds: &[TypeBound], map: &TypesMap) {
-        let edition = self.edition;
-        print_type_bounds(self.db, bounds, map, self, edition).unwrap();
-    }
-
-    fn print_path(&mut self, path: &Path, map: &TypesMap) {
-        let edition = self.edition;
-        print_path(self.db, path, map, self, edition).unwrap();
-    }
-
-    fn print_generic_params(&mut self, params: &GenericParams, parent: GenericModItem) {
-        if params.is_empty() {
-            return;
-        }
-
-        w!(self, "<");
-        let mut first = true;
-        for (idx, lt) in params.iter_lt() {
-            if !first {
-                w!(self, ", ");
-            }
-            first = false;
-            self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " ");
-            w!(self, "{}", lt.name.display(self.db.upcast(), self.edition));
-        }
-        for (idx, x) in params.iter_type_or_consts() {
-            if !first {
-                w!(self, ", ");
-            }
-            first = false;
-            self.print_attrs_of(AttrOwner::TypeOrConstParamData(parent, idx), " ");
-            match x {
-                TypeOrConstParamData::TypeParamData(ty) => match &ty.name {
-                    Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)),
-                    None => w!(self, "_anon_{}", idx.into_raw()),
-                },
-                TypeOrConstParamData::ConstParamData(konst) => {
-                    w!(self, "const {}: ", konst.name.display(self.db.upcast(), self.edition));
-                    self.print_type_ref(konst.ty, &params.types_map);
-                }
-            }
-        }
-        w!(self, ">");
-    }
-
-    fn print_where_clause_and_opening_brace(&mut self, params: &GenericParams) {
-        if self.print_where_clause(params) {
-            w!(self, "\n{{");
-        } else {
-            self.whitespace();
-            w!(self, "{{");
-        }
-    }
-
-    fn print_where_clause(&mut self, params: &GenericParams) -> bool {
-        if params.where_predicates().next().is_none() {
-            return false;
-        }
-
-        w!(self, "\nwhere");
-        let edition = self.edition;
-        self.indented(|this| {
-            for (i, pred) in params.where_predicates().enumerate() {
-                if i != 0 {
-                    wln!(this, ",");
-                }
-
-                let (target, bound) = match pred {
-                    WherePredicate::TypeBound { target, bound } => (target, bound),
-                    WherePredicate::Lifetime { target, bound } => {
-                        w!(
-                            this,
-                            "{}: {}",
-                            target.name.display(self.db.upcast(), edition),
-                            bound.name.display(self.db.upcast(), edition)
-                        );
-                        continue;
-                    }
-                    WherePredicate::ForLifetime { lifetimes, target, bound } => {
-                        w!(this, "for<");
-                        for (i, lt) in lifetimes.iter().enumerate() {
-                            if i != 0 {
-                                w!(this, ", ");
-                            }
-                            w!(this, "{}", lt.display(self.db.upcast(), edition));
-                        }
-                        w!(this, "> ");
-                        (target, bound)
-                    }
-                };
-
-                match target {
-                    WherePredicateTypeTarget::TypeRef(ty) => {
-                        this.print_type_ref(*ty, &params.types_map)
-                    }
-                    WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
-                        Some(name) => w!(this, "{}", name.display(self.db.upcast(), edition)),
-                        None => w!(this, "_anon_{}", id.into_raw()),
-                    },
-                }
-                w!(this, ": ");
-                this.print_type_bounds(std::slice::from_ref(bound), &params.types_map);
-            }
-        });
-        true
-    }
-
     fn print_ast_id(&mut self, ast_id: ErasedFileAstId) {
         wln!(self, "// AstId: {:?}", ast_id.into_raw());
     }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
index 80b699649fba8..824fbfa5921a7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
@@ -1,4 +1,4 @@
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 use span::Edition;
 use test_fixture::WithFixture;
 
@@ -83,11 +83,11 @@ extern "C" {
 
                 #[on_extern_static]
                 // AstId: 3
-                pub(self) static EX_STATIC: u8 = _;
+                pub(self) static EX_STATIC = _;
 
                 #[on_extern_fn]
                 // AstId: 4
-                pub(self) fn ex_fn() -> ();
+                pub(self) fn ex_fn;
             }
         "##]],
     );
@@ -131,35 +131,35 @@ enum E {
             // AstId: 2
             pub(self) struct Struct {
                 #[doc = " fld docs"]
-                pub(self) fld: (),
+                pub(self) fld,
             }
 
             // AstId: 3
             pub(self) struct Tuple(
                 #[attr]
-                pub(self) 0: u8,
+                pub(self) 0,
             );
 
             // AstId: 4
             pub(self) union Ize {
-                pub(self) a: (),
-                pub(self) b: (),
+                pub(self) a,
+                pub(self) b,
             }
 
             // AstId: 5
-            pub(self) enum E {
+            pub(self) enum E
                 // AstId: 6
                 #[doc = " comment on Unit"]
                 Unit,
                 // AstId: 7
                 #[doc = " comment on Tuple"]
                 Tuple(
-                    pub(self) 0: u8,
+                    pub(self) 0,
                 ),
                 // AstId: 8
                 Struct {
                     #[doc = " comment on a: u8"]
-                    pub(self) a: u8,
+                    pub(self) a,
                 },
             }
         "#]],
@@ -186,33 +186,23 @@ trait Tr: SuperTrait + 'lifetime {
         "#,
         expect![[r#"
             // AstId: 1
-            pub static mut ST: () = _;
+            pub static ST = _;
 
             // AstId: 2
-            pub(self) const _: Anon = _;
+            pub(self) const _ = _;
 
             #[attr]
             #[inner_attr_in_fn]
             // AstId: 3
-            pub(self) fn f(
-                #[attr]
-                u8,
-                (),
-            ) -> () { ... }
+            pub(self) fn f;
 
             // AstId: 4
-            pub(self) trait Tr<Self>
-            where
-                Self: SuperTrait,
-                Self: 'lifetime
-            {
+            pub(self) trait Tr {
                 // AstId: 6
-                pub(self) type Assoc: AssocBound = Default;
+                pub(self) type Assoc;
 
                 // AstId: 7
-                pub(self) fn method(
-                    self: &Self,
-                ) -> ();
+                pub(self) fn method;
             }
         "#]],
     );
@@ -242,7 +232,7 @@ mod outline;
                 pub(self) use super::*;
 
                 // AstId: 4
-                pub(self) fn fn_in_module() -> () { ... }
+                pub(self) fn fn_in_module;
             }
 
             // AstId: 2
@@ -270,159 +260,12 @@ m!();
             // AstId: 2
             pub macro m2 { ... }
 
-            // AstId: 3, SyntaxContext: 2, ExpandTo: Items
+            // AstId: 3, SyntaxContextId: ROOT2024, ExpandTo: Items
             m!(...);
         "#]],
     );
 }
 
-#[test]
-fn mod_paths() {
-    check(
-        r#"
-struct S {
-    a: self::Ty,
-    b: super::SuperTy,
-    c: super::super::SuperSuperTy,
-    d: ::abs::Path,
-    e: crate::Crate,
-    f: plain::path::Ty,
-}
-        "#,
-        expect![[r#"
-            // AstId: 1
-            pub(self) struct S {
-                pub(self) a: self::Ty,
-                pub(self) b: super::SuperTy,
-                pub(self) c: super::super::SuperSuperTy,
-                pub(self) d: ::abs::Path,
-                pub(self) e: crate::Crate,
-                pub(self) f: plain::path::Ty,
-            }
-        "#]],
-    )
-}
-
-#[test]
-fn types() {
-    check(
-        r#"
-struct S {
-    a: Mixed<'a, T, Item=(), OtherItem=u8>,
-    b: <Fully as Qualified>::Syntax,
-    c: <TypeAnchored>::Path::<'a>,
-    d: dyn for<'a> Trait<'a>,
-}
-        "#,
-        expect![[r#"
-            // AstId: 1
-            pub(self) struct S {
-                pub(self) a: Mixed::<'a, T, Item = (), OtherItem = u8>,
-                pub(self) b: Qualified::<Self=Fully>::Syntax,
-                pub(self) c: <TypeAnchored>::Path::<'a>,
-                pub(self) d: dyn for<'a> Trait::<'a>,
-            }
-        "#]],
-    )
-}
-
-#[test]
-fn generics() {
-    check(
-        r#"
-struct S<'a, 'b: 'a, T: Copy + 'a + 'b, const K: u8 = 0> {
-    field: &'a &'b T,
-}
-
-struct Tuple<T: Copy, U: ?Sized>(T, U);
-
-impl<'a, 'b: 'a, T: Copy + 'a + 'b, const K: u8 = 0> S<'a, 'b, T, K> {
-    fn f<G: 'a>(arg: impl Copy) -> impl Copy {}
-}
-
-enum Enum<'a, T, const U: u8> {}
-union Union<'a, T, const U: u8> {}
-
-trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {}
-        "#,
-        expect![[r#"
-            // AstId: 1
-            pub(self) struct S<'a, 'b, T, const K: u8>
-            where
-                T: Copy,
-                T: 'a,
-                T: 'b,
-                'b: 'a
-            {
-                pub(self) field: &'a &'b T,
-            }
-
-            // AstId: 2
-            pub(self) struct Tuple<T, U>(
-                pub(self) 0: T,
-                pub(self) 1: U,
-            )
-            where
-                T: Copy,
-                U: ?Sized;
-
-            // AstId: 3
-            impl<'a, 'b, T, const K: u8> S::<'a, 'b, T, K>
-            where
-                T: Copy,
-                T: 'a,
-                T: 'b,
-                'b: 'a
-            {
-                // AstId: 9
-                pub(self) fn f<G>(
-                    impl Copy,
-                ) -> impl Copy
-                where
-                    G: 'a { ... }
-            }
-
-            // AstId: 4
-            pub(self) enum Enum<'a, T, const U: u8> {
-            }
-
-            // AstId: 5
-            pub(self) union Union<'a, T, const U: u8> {
-            }
-
-            // AstId: 6
-            pub(self) trait Tr<'a, Self, T>
-            where
-                Self: Super,
-                T: 'a,
-                Self: for<'a> Tr::<'a, T>
-            {
-            }
-        "#]],
-    )
-}
-
-#[test]
-fn generics_with_attributes() {
-    check(
-        r#"
-struct S<#[cfg(never)] T>;
-struct S<A, B, #[cfg(never)] C>;
-struct S<A, #[cfg(never)] B, C>;
-        "#,
-        expect![[r#"
-            // AstId: 1
-            pub(self) struct S<#[cfg(never)] T>;
-
-            // AstId: 2
-            pub(self) struct S<A, B, #[cfg(never)] C>;
-
-            // AstId: 3
-            pub(self) struct S<A, #[cfg(never)] B, C>;
-        "#]],
-    )
-}
-
 #[test]
 fn pub_self() {
     check(
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
index 59f51db9f7401..5431ec9679c5a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
@@ -3,13 +3,13 @@
 //! This attribute to tell the compiler about semi built-in std library
 //! features, such as Fn family of traits.
 use hir_expand::name::Name;
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use rustc_hash::FxHashMap;
 use triomphe::Arc;
 
 use crate::{
-    db::DefDatabase, path::Path, AdtId, AssocItemId, AttrDefId, CrateId, EnumId, EnumVariantId,
-    FunctionId, ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
+    AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
+    StaticId, StructId, TraitId, TypeAliasId, UnionId, db::DefDatabase, expr_store::path::Path,
 };
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -96,7 +96,7 @@ impl LangItems {
     /// Salsa query. This will look for lang items in a specific crate.
     pub(crate) fn crate_lang_items_query(
         db: &dyn DefDatabase,
-        krate: CrateId,
+        krate: Crate,
     ) -> Option<Arc<LangItems>> {
         let _p = tracing::info_span!("crate_lang_items_query").entered();
 
@@ -107,7 +107,7 @@ impl LangItems {
         for (_, module_data) in crate_def_map.modules() {
             for impl_def in module_data.scope.impls() {
                 lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
-                for &(_, assoc) in db.impl_data(impl_def).items.iter() {
+                for &(_, assoc) in db.impl_items(impl_def).items.iter() {
                     match assoc {
                         AssocItemId::FunctionId(f) => {
                             lang_items.collect_lang_item(db, f, LangItemTarget::Function)
@@ -124,7 +124,7 @@ impl LangItems {
                 match def {
                     ModuleDefId::TraitId(trait_) => {
                         lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
-                        db.trait_data(trait_).items.iter().for_each(
+                        db.trait_items(trait_).items.iter().for_each(
                             |&(_, assoc_id)| match assoc_id {
                                 AssocItemId::FunctionId(f) => {
                                     lang_items.collect_lang_item(db, f, LangItemTarget::Function);
@@ -140,7 +140,7 @@ impl LangItems {
                     }
                     ModuleDefId::AdtId(AdtId::EnumId(e)) => {
                         lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
-                        crate_def_map.enum_definitions[&e].iter().for_each(|&id| {
+                        db.enum_variants(e).variants.iter().for_each(|&(id, _)| {
                             lang_items.collect_lang_item(db, id, LangItemTarget::EnumVariant);
                         });
                     }
@@ -164,18 +164,14 @@ impl LangItems {
             }
         }
 
-        if lang_items.items.is_empty() {
-            None
-        } else {
-            Some(Arc::new(lang_items))
-        }
+        if lang_items.items.is_empty() { None } else { Some(Arc::new(lang_items)) }
     }
 
     /// Salsa query. Look for a lang item, starting from the specified crate and recursively
     /// traversing its dependencies.
     pub(crate) fn lang_item_query(
         db: &dyn DefDatabase,
-        start_crate: CrateId,
+        start_crate: Crate,
         item: LangItem,
     ) -> Option<LangItemTarget> {
         let _p = tracing::info_span!("lang_item_query").entered();
@@ -184,10 +180,7 @@ impl LangItems {
         {
             return Some(target);
         }
-        db.crate_graph()[start_crate]
-            .dependencies
-            .iter()
-            .find_map(|dep| db.lang_item(dep.crate_id, item))
+        start_crate.data(db).dependencies.iter().find_map(|dep| db.lang_item(dep.crate_id, item))
     }
 
     fn collect_lang_item<T>(
@@ -209,19 +202,14 @@ pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangIte
     db.attrs(item).lang_item()
 }
 
-pub(crate) fn notable_traits_in_deps(
-    db: &dyn DefDatabase,
-    krate: CrateId,
-) -> Arc<[Arc<[TraitId]>]> {
+pub(crate) fn notable_traits_in_deps(db: &dyn DefDatabase, krate: Crate) -> Arc<[Arc<[TraitId]>]> {
     let _p = tracing::info_span!("notable_traits_in_deps", ?krate).entered();
-    let crate_graph = db.crate_graph();
-
     Arc::from_iter(
-        crate_graph.transitive_deps(krate).filter_map(|krate| db.crate_notable_traits(krate)),
+        db.transitive_deps(krate).into_iter().filter_map(|krate| db.crate_notable_traits(krate)),
     )
 }
 
-pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option<Arc<[TraitId]>> {
+pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option<Arc<[TraitId]>> {
     let _p = tracing::info_span!("crate_notable_traits", ?krate).entered();
 
     let mut traits = Vec::new();
@@ -238,11 +226,7 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Opti
         }
     }
 
-    if traits.is_empty() {
-        None
-    } else {
-        Some(traits.into_iter().collect())
-    }
+    if traits.is_empty() { None } else { Some(traits.into_iter().collect()) }
 }
 
 pub enum GenericRequirement {
@@ -290,17 +274,12 @@ impl LangItem {
         Self::from_symbol(name.symbol())
     }
 
-    pub fn path(&self, db: &dyn DefDatabase, start_crate: CrateId) -> Option<Path> {
+    pub fn path(&self, db: &dyn DefDatabase, start_crate: Crate) -> Option<Path> {
         let t = db.lang_item(start_crate, *self)?;
         Some(Path::LangItem(t, None))
     }
 
-    pub fn ty_rel_path(
-        &self,
-        db: &dyn DefDatabase,
-        start_crate: CrateId,
-        seg: Name,
-    ) -> Option<Path> {
+    pub fn ty_rel_path(&self, db: &dyn DefDatabase, start_crate: Crate, seg: Name) -> Option<Path> {
         let t = db.lang_item(start_crate, *self)?;
         Some(Path::LangItem(t, Some(seg)))
     }
@@ -366,6 +345,7 @@ language_item_table! {
     IndexMut,                sym::index_mut,           index_mut_trait,            Target::Trait,          GenericRequirement::Exact(1);
 
     UnsafeCell,              sym::unsafe_cell,         unsafe_cell_type,           Target::Struct,         GenericRequirement::None;
+    UnsafePinned,            sym::unsafe_pinned,       unsafe_pinned_type,         Target::Struct,         GenericRequirement::None;
     VaList,                  sym::va_list,             va_list,                    Target::Struct,         GenericRequirement::None;
 
     Deref,                   sym::deref,               deref_trait,                Target::Trait,          GenericRequirement::Exact(0);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index 9c947df35e990..737941dba07ec 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -18,32 +18,22 @@ extern crate ra_ap_rustc_parse_format as rustc_parse_format;
 #[cfg(feature = "in-rust-tree")]
 extern crate rustc_abi;
 
-#[cfg(feature = "in-rust-tree")]
-extern crate rustc_hashes;
-
 #[cfg(not(feature = "in-rust-tree"))]
 extern crate ra_ap_rustc_abi as rustc_abi;
 
-#[cfg(not(feature = "in-rust-tree"))]
-extern crate ra_ap_rustc_hashes as rustc_hashes;
-
 pub mod db;
 
 pub mod attr;
 pub mod builtin_type;
 pub mod item_scope;
-pub mod path;
 pub mod per_ns;
 
-pub mod expander;
-pub mod lower;
+pub mod signatures;
 
 pub mod dyn_map;
 
 pub mod item_tree;
 
-pub mod data;
-pub mod generics;
 pub mod lang_item;
 
 pub mod hir;
@@ -59,57 +49,54 @@ pub mod find_path;
 pub mod import_map;
 pub mod visibility;
 
-use intern::Interned;
+use intern::{Interned, sym};
 pub use rustc_abi as layout;
 use triomphe::Arc;
 
+pub use crate::signatures::LocalFieldId;
+
 #[cfg(test)]
 mod macro_expansion_tests;
-mod pretty;
 #[cfg(test)]
 mod test_db;
 
-use std::{
-    hash::{Hash, Hasher},
-    panic::{RefUnwindSafe, UnwindSafe},
-};
+use std::hash::{Hash, Hasher};
 
-use base_db::{
-    impl_intern_key,
-    ra_salsa::{self, InternValueTrivial},
-    CrateId,
-};
+use base_db::{Crate, impl_intern_key};
 use hir_expand::{
+    AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
+    MacroDefId, MacroDefKind,
     builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander},
     db::ExpandDatabase,
     eager::expand_eager_macro_input,
     impl_intern_lookup,
+    mod_path::ModPath,
     name::Name,
     proc_macro::{CustomProcMacroExpander, ProcMacroKind},
-    AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
-    MacroDefId, MacroDefKind,
 };
 use item_tree::ExternBlock;
 use la_arena::Idx;
 use nameres::DefMap;
-use span::{AstIdNode, Edition, FileAstId, SyntaxContextId};
+use span::{AstIdNode, Edition, FileAstId, SyntaxContext};
 use stdx::impl_from;
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 
-pub use hir_expand::{tt, Intern, Lookup};
+pub use hir_expand::{Intern, Lookup, tt};
 
 use crate::{
+    attr::Attrs,
     builtin_type::BuiltinType,
-    data::adt::VariantData,
     db::DefDatabase,
+    hir::generics::{LocalLifetimeParamId, LocalTypeOrConstParamId},
     item_tree::{
         Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, Macro2, MacroRules,
         Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use, Variant,
     },
+    nameres::LocalDefMap,
+    signatures::VariantFields,
 };
 
-type FxIndexMap<K, V> =
-    indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
+type FxIndexMap<K, V> = indexmap::IndexMap<K, V, rustc_hash::FxBuildHasher>;
 /// A wrapper around three booleans
 #[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
 pub struct ImportPathConfig {
@@ -192,8 +179,7 @@ pub trait ItemTreeLoc {
 
 macro_rules! impl_intern {
     ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
-        impl_intern_key!($id);
-        impl InternValueTrivial for $loc {}
+        impl_intern_key!($id, $loc);
         impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup);
     };
 }
@@ -213,87 +199,58 @@ macro_rules! impl_loc {
     };
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct FunctionId(ra_salsa::InternId);
 type FunctionLoc = AssocItemLoc<Function>;
 impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function);
 impl_loc!(FunctionLoc, id: Function, container: ItemContainerId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct StructId(ra_salsa::InternId);
 type StructLoc = ItemLoc<Struct>;
 impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct);
 impl_loc!(StructLoc, id: Struct, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct UnionId(ra_salsa::InternId);
 pub type UnionLoc = ItemLoc<Union>;
 impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union);
 impl_loc!(UnionLoc, id: Union, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct EnumId(ra_salsa::InternId);
 pub type EnumLoc = ItemLoc<Enum>;
 impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum);
 impl_loc!(EnumLoc, id: Enum, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct ConstId(ra_salsa::InternId);
 type ConstLoc = AssocItemLoc<Const>;
 impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const);
 impl_loc!(ConstLoc, id: Const, container: ItemContainerId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct StaticId(ra_salsa::InternId);
 pub type StaticLoc = AssocItemLoc<Static>;
 impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
 impl_loc!(StaticLoc, id: Static, container: ItemContainerId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct TraitId(ra_salsa::InternId);
 pub type TraitLoc = ItemLoc<Trait>;
 impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
 impl_loc!(TraitLoc, id: Trait, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct TraitAliasId(ra_salsa::InternId);
 pub type TraitAliasLoc = ItemLoc<TraitAlias>;
 impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias);
 impl_loc!(TraitAliasLoc, id: TraitAlias, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct TypeAliasId(ra_salsa::InternId);
 type TypeAliasLoc = AssocItemLoc<TypeAlias>;
 impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias);
 impl_loc!(TypeAliasLoc, id: TypeAlias, container: ItemContainerId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct ImplId(ra_salsa::InternId);
 type ImplLoc = ItemLoc<Impl>;
 impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
 impl_loc!(ImplLoc, id: Impl, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct UseId(ra_salsa::InternId);
 type UseLoc = ItemLoc<Use>;
 impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use);
 impl_loc!(UseLoc, id: Use, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct ExternCrateId(ra_salsa::InternId);
 type ExternCrateLoc = ItemLoc<ExternCrate>;
 impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate);
 impl_loc!(ExternCrateLoc, id: ExternCrate, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct ExternBlockId(ra_salsa::InternId);
 type ExternBlockLoc = ItemLoc<ExternBlock>;
 impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block);
 impl_loc!(ExternBlockLoc, id: ExternBlock, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct EnumVariantId(ra_salsa::InternId);
-
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct EnumVariantLoc {
     pub id: ItemTreeId<Variant>,
@@ -302,9 +259,6 @@ pub struct EnumVariantLoc {
 }
 impl_intern!(EnumVariantId, EnumVariantLoc, intern_enum_variant, lookup_intern_enum_variant);
 impl_loc!(EnumVariantLoc, id: Variant, parent: EnumId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct Macro2Id(ra_salsa::InternId);
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct Macro2Loc {
     pub container: ModuleId,
@@ -316,8 +270,6 @@ pub struct Macro2Loc {
 impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2);
 impl_loc!(Macro2Loc, id: Macro2, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct MacroRulesId(ra_salsa::InternId);
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct MacroRulesLoc {
     pub container: ModuleId,
@@ -345,8 +297,7 @@ pub enum MacroExpander {
     BuiltInDerive(BuiltinDeriveExpander),
     BuiltInEager(EagerExpander),
 }
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct ProcMacroId(ra_salsa::InternId);
+
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct ProcMacroLoc {
     pub container: CrateRootModuleId,
@@ -358,8 +309,6 @@ pub struct ProcMacroLoc {
 impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro);
 impl_loc!(ProcMacroLoc, id: Function, container: CrateRootModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct BlockId(ra_salsa::InternId);
 #[derive(Debug, Hash, PartialEq, Eq, Clone)]
 pub struct BlockLoc {
     pub ast_id: AstId<ast::BlockExpr>,
@@ -368,24 +317,10 @@ pub struct BlockLoc {
 }
 impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block);
 
-/// Id of the anonymous const block expression and patterns. This is very similar to `ClosureId` and
-/// shouldn't be a `DefWithBodyId` since its type inference is dependent on its parent.
-#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
-pub struct ConstBlockId(ra_salsa::InternId);
-impl_intern!(ConstBlockId, ConstBlockLoc, intern_anonymous_const, lookup_intern_anonymous_const);
-
-#[derive(Debug, Hash, PartialEq, Eq, Clone)]
-pub struct ConstBlockLoc {
-    /// The parent of the anonymous const block.
-    pub parent: DefWithBodyId,
-    /// The root expression of this const block in the parent body.
-    pub root: hir::ExprId,
-}
-
 /// A `ModuleId` that is always a crate's root module.
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct CrateRootModuleId {
-    krate: CrateId,
+    krate: Crate,
 }
 
 impl CrateRootModuleId {
@@ -393,7 +328,11 @@ impl CrateRootModuleId {
         db.crate_def_map(self.krate)
     }
 
-    pub fn krate(self) -> CrateId {
+    pub(crate) fn local_def_map(&self, db: &dyn DefDatabase) -> (Arc<DefMap>, Arc<LocalDefMap>) {
+        db.crate_local_def_map(self.krate)
+    }
+
+    pub fn krate(self) -> Crate {
         self.krate
     }
 }
@@ -421,8 +360,8 @@ impl From<CrateRootModuleId> for ModuleDefId {
     }
 }
 
-impl From<CrateId> for CrateRootModuleId {
-    fn from(krate: CrateId) -> Self {
+impl From<Crate> for CrateRootModuleId {
+    fn from(krate: Crate) -> Self {
         CrateRootModuleId { krate }
     }
 }
@@ -441,7 +380,7 @@ impl TryFrom<ModuleId> for CrateRootModuleId {
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct ModuleId {
-    krate: CrateId,
+    krate: Crate,
     /// If this `ModuleId` was derived from a `DefMap` for a block expression, this stores the
     /// `BlockId` of that block expression. If `None`, this module is part of the crate-level
     /// `DefMap` of `krate`.
@@ -458,11 +397,22 @@ impl ModuleId {
         }
     }
 
+    pub(crate) fn local_def_map(self, db: &dyn DefDatabase) -> (Arc<DefMap>, Arc<LocalDefMap>) {
+        match self.block {
+            Some(block) => (db.block_def_map(block), self.only_local_def_map(db)),
+            None => db.crate_local_def_map(self.krate),
+        }
+    }
+
+    pub(crate) fn only_local_def_map(self, db: &dyn DefDatabase) -> Arc<LocalDefMap> {
+        db.crate_local_def_map(self.krate).1
+    }
+
     pub fn crate_def_map(self, db: &dyn DefDatabase) -> Arc<DefMap> {
         db.crate_def_map(self.krate)
     }
 
-    pub fn krate(self) -> CrateId {
+    pub fn krate(self) -> Crate {
         self.krate
     }
 
@@ -470,11 +420,7 @@ impl ModuleId {
         let def_map = self.def_map(db);
         let parent = def_map[self.local_id].parent?;
         def_map[parent].children.iter().find_map(|(name, module_id)| {
-            if *module_id == self.local_id {
-                Some(name.clone())
-            } else {
-                None
-            }
+            if *module_id == self.local_id { Some(name.clone()) } else { None }
         })
     }
 
@@ -525,8 +471,6 @@ pub struct FieldId {
     pub local_id: LocalFieldId,
 }
 
-pub type LocalFieldId = Idx<data::adt::FieldData>;
-
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct TupleId(pub u32);
 
@@ -536,12 +480,11 @@ pub struct TupleFieldId {
     pub index: u32,
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct TypeOrConstParamId {
     pub parent: GenericDefId,
     pub local_id: LocalTypeOrConstParamId,
 }
-impl InternValueTrivial for TypeOrConstParamId {}
 
 /// A TypeOrConstParamId with an invariant that it actually belongs to a type
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -595,15 +538,11 @@ impl From<ConstParamId> for TypeOrConstParamId {
     }
 }
 
-pub type LocalTypeOrConstParamId = Idx<generics::TypeOrConstParamData>;
-
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct LifetimeParamId {
     pub parent: GenericDefId,
     pub local_id: LocalLifetimeParamId,
 }
-pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>;
-impl InternValueTrivial for LifetimeParamId {}
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub enum ItemContainerId {
@@ -615,7 +554,7 @@ pub enum ItemContainerId {
 impl_from!(ModuleId for ItemContainerId);
 
 /// A Data Type
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum AdtId {
     StructId(StructId),
     UnionId(UnionId),
@@ -624,7 +563,7 @@ pub enum AdtId {
 impl_from!(StructId, UnionId, EnumId for AdtId);
 
 /// A macro
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum MacroId {
     Macro2Id(Macro2Id),
     MacroRulesId(MacroRulesId),
@@ -678,222 +617,59 @@ impl_from!(
     for ModuleDefId
 );
 
-/// Something that holds types, required for the current const arg lowering implementation as they
-/// need to be able to query where they are defined.
-#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
-pub enum TypeOwnerId {
-    FunctionId(FunctionId),
-    StaticId(StaticId),
-    ConstId(ConstId),
-    InTypeConstId(InTypeConstId),
-    AdtId(AdtId),
-    TraitId(TraitId),
-    TraitAliasId(TraitAliasId),
-    TypeAliasId(TypeAliasId),
-    ImplId(ImplId),
-    EnumVariantId(EnumVariantId),
-}
-
-impl TypeOwnerId {
-    fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
-        Some(match self {
-            TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it),
-            TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it),
-            TypeOwnerId::StaticId(it) => GenericDefId::StaticId(it),
-            TypeOwnerId::AdtId(it) => GenericDefId::AdtId(it),
-            TypeOwnerId::TraitId(it) => GenericDefId::TraitId(it),
-            TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it),
-            TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
-            TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
-            TypeOwnerId::EnumVariantId(it) => {
-                GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent))
-            }
-            TypeOwnerId::InTypeConstId(_) => return None,
-        })
-    }
-}
-
-impl_from!(
-    FunctionId,
-    StaticId,
-    ConstId,
-    InTypeConstId,
-    AdtId,
-    TraitId,
-    TraitAliasId,
-    TypeAliasId,
-    ImplId,
-    EnumVariantId
-    for TypeOwnerId
-);
-
-// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let it: Type = _; }`)
-impl From<DefWithBodyId> for TypeOwnerId {
-    fn from(value: DefWithBodyId) -> Self {
-        match value {
-            DefWithBodyId::FunctionId(it) => it.into(),
-            DefWithBodyId::StaticId(it) => it.into(),
-            DefWithBodyId::ConstId(it) => it.into(),
-            DefWithBodyId::InTypeConstId(it) => it.into(),
-            DefWithBodyId::VariantId(it) => it.into(),
-        }
-    }
-}
-
-impl From<GenericDefId> for TypeOwnerId {
-    fn from(value: GenericDefId) -> Self {
-        match value {
-            GenericDefId::FunctionId(it) => it.into(),
-            GenericDefId::AdtId(it) => it.into(),
-            GenericDefId::TraitId(it) => it.into(),
-            GenericDefId::TraitAliasId(it) => it.into(),
-            GenericDefId::TypeAliasId(it) => it.into(),
-            GenericDefId::ImplId(it) => it.into(),
-            GenericDefId::ConstId(it) => it.into(),
-            GenericDefId::StaticId(it) => it.into(),
-        }
-    }
-}
-
-// FIXME: This should not be a thing
-/// A thing that we want to store in interned ids, but we don't know its type in `hir-def`. This is
-/// currently only used in `InTypeConstId` for storing the type (which has type `Ty` defined in
-/// the `hir-ty` crate) of the constant in its id, which is a temporary hack so we may want
-/// to remove this after removing that.
-pub trait OpaqueInternableThing:
-    std::any::Any + std::fmt::Debug + Sync + Send + UnwindSafe + RefUnwindSafe
-{
-    fn as_any(&self) -> &dyn std::any::Any;
-    fn box_any(&self) -> Box<dyn std::any::Any>;
-    fn dyn_hash(&self, state: &mut dyn Hasher);
-    fn dyn_eq(&self, other: &dyn OpaqueInternableThing) -> bool;
-    fn dyn_clone(&self) -> Box<dyn OpaqueInternableThing>;
-}
-
-impl Hash for dyn OpaqueInternableThing {
-    fn hash<H: Hasher>(&self, state: &mut H) {
-        self.dyn_hash(state);
-    }
-}
-
-impl PartialEq for dyn OpaqueInternableThing {
-    fn eq(&self, other: &Self) -> bool {
-        self.dyn_eq(other)
-    }
-}
-
-impl Eq for dyn OpaqueInternableThing {}
-
-impl Clone for Box<dyn OpaqueInternableThing> {
-    fn clone(&self) -> Self {
-        self.dyn_clone()
-    }
-}
-
-// FIXME(const-generic-body): Use an stable id for in type consts.
-//
-// The current id uses `AstId<ast::ConstArg>` which will be changed by every change in the code. Ideally
-// we should use an id which is relative to the type owner, so that every change will only invalidate the
-// id if it happens inside of the type owner.
-//
-// The solution probably is to have some query on `TypeOwnerId` to traverse its constant children and store
-// their `AstId` in a list (vector or arena), and use the index of that list in the id here. That query probably
-// needs name resolution, and might go far and handles the whole path lowering or type lowering for a `TypeOwnerId`.
-//
-// Whatever path the solution takes, it should answer 3 questions at the same time:
-// * Is the id stable enough?
-// * How to find a constant id using an ast node / position in the source code? This is needed when we want to
-//   provide ide functionalities inside an in type const (which we currently don't support) e.g. go to definition
-//   for a local defined there. A complex id might have some trouble in this reverse mapping.
-// * How to find the return type of a constant using its id? We have this data when we are doing type lowering
-//   and the name of the struct that contains this constant is resolved, so a query that only traverses the
-//   type owner by its syntax tree might have a hard time here.
-
-/// A constant in a type as a substitution for const generics (like `Foo<{ 2 + 2 }>`) or as an array
-/// length (like `[u8; 2 + 2]`). These constants are body owner and are a variant of `DefWithBodyId`. These
-/// are not called `AnonymousConstId` to prevent confusion with [`ConstBlockId`].
-#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
-pub struct InTypeConstId(ra_salsa::InternId);
-impl_intern!(InTypeConstId, InTypeConstLoc, intern_in_type_const, lookup_intern_in_type_const);
-
-// We would like to set `derive(PartialEq)`
-// but the compiler complains about that `.expected_ty` does not implement the `Copy` trait.
-#[allow(clippy::derived_hash_with_manual_eq)]
-#[derive(Debug, Hash, Eq, Clone)]
-pub struct InTypeConstLoc {
-    pub id: AstId<ast::ConstArg>,
-    /// The thing this const arg appears in
-    pub owner: TypeOwnerId,
-    // FIXME(const-generic-body): The expected type should not be
-    pub expected_ty: Box<dyn OpaqueInternableThing>,
-}
-
-impl PartialEq for InTypeConstLoc {
-    fn eq(&self, other: &Self) -> bool {
-        self.id == other.id && self.owner == other.owner && *self.expected_ty == *other.expected_ty
-    }
-}
-
-impl InTypeConstId {
-    pub fn source(&self, db: &dyn DefDatabase) -> ast::ConstArg {
-        let src = self.lookup(db).id;
-        let file_id = src.file_id;
-        let root = &db.parse_or_expand(file_id);
-        db.ast_id_map(file_id).get(src.value).to_node(root)
-    }
-}
-
 /// A constant, which might appears as a const item, an anonymous const block in expressions
 /// or patterns, or as a constant in types with const generics.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum GeneralConstId {
     ConstId(ConstId),
     StaticId(StaticId),
-    ConstBlockId(ConstBlockId),
-    InTypeConstId(InTypeConstId),
 }
 
-impl_from!(ConstId, StaticId, ConstBlockId, InTypeConstId for GeneralConstId);
+impl_from!(ConstId, StaticId for GeneralConstId);
 
 impl GeneralConstId {
-    pub fn generic_def(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
+    pub fn generic_def(self, _db: &dyn DefDatabase) -> Option<GenericDefId> {
         match self {
             GeneralConstId::ConstId(it) => Some(it.into()),
             GeneralConstId::StaticId(it) => Some(it.into()),
-            GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(db),
-            GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(db),
         }
     }
 
     pub fn name(self, db: &dyn DefDatabase) -> String {
         match self {
             GeneralConstId::StaticId(it) => {
-                db.static_data(it).name.display(db.upcast(), Edition::CURRENT).to_string()
+                let loc = it.lookup(db);
+                let tree = loc.item_tree_id().item_tree(db);
+                let name = tree[loc.id.value].name.display(db, Edition::CURRENT);
+                name.to_string()
+            }
+            GeneralConstId::ConstId(const_id) => {
+                let loc = const_id.lookup(db);
+                let tree = loc.item_tree_id().item_tree(db);
+                tree[loc.id.value].name.as_ref().map_or_else(
+                    || "_".to_owned(),
+                    |name| name.display(db, Edition::CURRENT).to_string(),
+                )
             }
-            GeneralConstId::ConstId(const_id) => db
-                .const_data(const_id)
-                .name
-                .as_ref()
-                .map(|it| it.as_str())
-                .unwrap_or("_")
-                .to_owned(),
-            GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"),
-            GeneralConstId::InTypeConstId(id) => format!("{{in type const {id:?}}}"),
         }
     }
 }
 
-/// The defs which have a body.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+/// The defs which have a body (have root expressions for type inference).
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum DefWithBodyId {
     FunctionId(FunctionId),
     StaticId(StaticId),
     ConstId(ConstId),
-    InTypeConstId(InTypeConstId),
     VariantId(EnumVariantId),
+    // /// All fields of a variant are inference roots
+    // VariantId(VariantId),
+    // /// The signature can contain inference roots in a bunch of places
+    // /// like const parameters or const arguments in paths
+    // This should likely be kept on its own with a separate query
+    // GenericDefId(GenericDefId),
 }
-
-impl_from!(FunctionId, ConstId, StaticId, InTypeConstId for DefWithBodyId);
+impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId);
 
 impl From<EnumVariantId> for DefWithBodyId {
     fn from(id: EnumVariantId) -> Self {
@@ -908,9 +684,6 @@ impl DefWithBodyId {
             DefWithBodyId::StaticId(s) => Some(s.into()),
             DefWithBodyId::ConstId(c) => Some(c.into()),
             DefWithBodyId::VariantId(c) => Some(c.lookup(db).parent.into()),
-            // FIXME: stable rust doesn't allow generics in constants, but we should
-            // use `TypeOwnerId::as_generic_def_id` when it does.
-            DefWithBodyId::InTypeConstId(_) => None,
         }
     }
 }
@@ -928,7 +701,7 @@ pub enum AssocItemId {
 // casting them, and somehow making the constructors private, which would be annoying.
 impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId);
 
-#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum GenericDefId {
     AdtId(AdtId),
     // consts can have type parameters from their parents (i.e. associated consts of traits)
@@ -962,7 +735,7 @@ impl GenericDefId {
     ) -> (HirFileId, Option<ast::GenericParamList>) {
         fn file_id_and_params_of_item_loc<Loc>(
             db: &dyn DefDatabase,
-            def: impl for<'db> Lookup<Database<'db> = dyn DefDatabase + 'db, Data = Loc>,
+            def: impl Lookup<Database = dyn DefDatabase, Data = Loc>,
         ) -> (HirFileId, Option<ast::GenericParamList>)
         where
             Loc: src::HasSource,
@@ -1017,15 +790,13 @@ impl From<AssocItemId> for GenericDefId {
     }
 }
 
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum CallableDefId {
     FunctionId(FunctionId),
     StructId(StructId),
     EnumVariantId(EnumVariantId),
 }
 
-impl InternValueTrivial for CallableDefId {}
-
 impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
 impl From<CallableDefId> for ModuleDefId {
     fn from(def: CallableDefId) -> ModuleDefId {
@@ -1038,7 +809,7 @@ impl From<CallableDefId> for ModuleDefId {
 }
 
 impl CallableDefId {
-    pub fn krate(self, db: &dyn DefDatabase) -> CrateId {
+    pub fn krate(self, db: &dyn DefDatabase) -> Crate {
         match self {
             CallableDefId::FunctionId(f) => f.krate(db),
             CallableDefId::StructId(s) => s.krate(db),
@@ -1135,7 +906,7 @@ impl From<VariantId> for AttrDefId {
     }
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum VariantId {
     EnumVariantId(EnumVariantId),
     StructId(StructId),
@@ -1144,12 +915,8 @@ pub enum VariantId {
 impl_from!(EnumVariantId, StructId, UnionId for VariantId);
 
 impl VariantId {
-    pub fn variant_data(self, db: &dyn DefDatabase) -> Arc<VariantData> {
-        match self {
-            VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
-            VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
-            VariantId::EnumVariantId(it) => db.enum_variant_data(it).variant_data.clone(),
-        }
+    pub fn variant_data(self, db: &dyn DefDatabase) -> Arc<VariantFields> {
+        db.variant_fields(self)
     }
 
     pub fn file_id(self, db: &dyn DefDatabase) -> HirFileId {
@@ -1175,7 +942,7 @@ pub trait HasModule {
     /// Returns the crate this thing is defined within.
     #[inline]
     #[doc(alias = "crate")]
-    fn krate(&self, db: &dyn DefDatabase) -> CrateId {
+    fn krate(&self, db: &dyn DefDatabase) -> Crate {
         self.module(db).krate
     }
 }
@@ -1197,7 +964,7 @@ pub trait HasModule {
 impl<N, ItemId> HasModule for ItemId
 where
     N: ItemTreeNode,
-    ItemId: for<'db> Lookup<Database<'db> = dyn DefDatabase + 'db, Data = ItemLoc<N>> + Copy,
+    ItemId: Lookup<Database = dyn DefDatabase, Data = ItemLoc<N>> + Copy,
 {
     #[inline]
     fn module(&self, db: &dyn DefDatabase) -> ModuleId {
@@ -1222,7 +989,7 @@ where
 #[inline]
 fn module_for_assoc_item_loc<'db>(
     db: &(dyn 'db + DefDatabase),
-    id: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<impl ItemTreeNode>>,
+    id: impl Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<impl ItemTreeNode>>,
 ) -> ModuleId {
     id.lookup(db).container.module(db)
 }
@@ -1325,23 +1092,6 @@ impl HasModule for MacroId {
     }
 }
 
-impl HasModule for TypeOwnerId {
-    fn module(&self, db: &dyn DefDatabase) -> ModuleId {
-        match *self {
-            TypeOwnerId::FunctionId(it) => it.module(db),
-            TypeOwnerId::StaticId(it) => it.module(db),
-            TypeOwnerId::ConstId(it) => it.module(db),
-            TypeOwnerId::AdtId(it) => it.module(db),
-            TypeOwnerId::TraitId(it) => it.module(db),
-            TypeOwnerId::TraitAliasId(it) => it.module(db),
-            TypeOwnerId::TypeAliasId(it) => it.module(db),
-            TypeOwnerId::ImplId(it) => it.module(db),
-            TypeOwnerId::EnumVariantId(it) => it.module(db),
-            TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db),
-        }
-    }
-}
-
 impl HasModule for DefWithBodyId {
     fn module(&self, db: &dyn DefDatabase) -> ModuleId {
         match self {
@@ -1349,7 +1099,6 @@ impl HasModule for DefWithBodyId {
             DefWithBodyId::StaticId(it) => it.module(db),
             DefWithBodyId::ConstId(it) => it.module(db),
             DefWithBodyId::VariantId(it) => it.module(db),
-            DefWithBodyId::InTypeConstId(it) => it.lookup(db).owner.module(db),
         }
     }
 }
@@ -1418,22 +1167,18 @@ impl ModuleDefId {
     }
 }
 
+// FIXME: Replace this with a plain function, it only has one impl
 /// A helper trait for converting to MacroCallId
-pub trait AsMacroCall {
-    fn as_call_id(
-        &self,
-        db: &dyn ExpandDatabase,
-        krate: CrateId,
-        resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
-    ) -> Option<MacroCallId> {
-        self.as_call_id_with_errors(db, krate, resolver).ok()?.value
-    }
-
+trait AsMacroCall {
     fn as_call_id_with_errors(
         &self,
         db: &dyn ExpandDatabase,
-        krate: CrateId,
-        resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
+        krate: Crate,
+        resolver: impl Fn(&ModPath) -> Option<MacroDefId> + Copy,
+        eager_callback: &mut dyn FnMut(
+            InFile<(syntax::AstPtr<ast::MacroCall>, span::FileAstId<ast::MacroCall>)>,
+            MacroCallId,
+        ),
     ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro>;
 }
 
@@ -1441,15 +1186,19 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
     fn as_call_id_with_errors(
         &self,
         db: &dyn ExpandDatabase,
-        krate: CrateId,
-        resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
+        krate: Crate,
+        resolver: impl Fn(&ModPath) -> Option<MacroDefId> + Copy,
+        eager_callback: &mut dyn FnMut(
+            InFile<(syntax::AstPtr<ast::MacroCall>, span::FileAstId<ast::MacroCall>)>,
+            MacroCallId,
+        ),
     ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
         let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
         let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
         let span_map = db.span_map(self.file_id);
         let path = self.value.path().and_then(|path| {
             let range = path.syntax().text_range();
-            let mod_path = path::ModPath::from_src(db, path, &mut |range| {
+            let mod_path = ModPath::from_src(db, path, &mut |range| {
                 span_map.as_ref().span_for_range(range).ctx
             })?;
             let call_site = span_map.span_for_range(range);
@@ -1472,6 +1221,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
             krate,
             resolver,
             resolver,
+            eager_callback,
         )
     }
 }
@@ -1480,15 +1230,11 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
 #[derive(Clone, Debug, Eq, PartialEq)]
 struct AstIdWithPath<T: AstIdNode> {
     ast_id: AstId<T>,
-    path: Interned<path::ModPath>,
+    path: Interned<ModPath>,
 }
 
 impl<T: AstIdNode> AstIdWithPath<T> {
-    fn new(
-        file_id: HirFileId,
-        ast_id: FileAstId<T>,
-        path: Interned<path::ModPath>,
-    ) -> AstIdWithPath<T> {
+    fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: Interned<ModPath>) -> AstIdWithPath<T> {
         AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
     }
 }
@@ -1496,10 +1242,14 @@ impl<T: AstIdNode> AstIdWithPath<T> {
 fn macro_call_as_call_id(
     db: &dyn ExpandDatabase,
     call: &AstIdWithPath<ast::MacroCall>,
-    call_site: SyntaxContextId,
+    call_site: SyntaxContext,
     expand_to: ExpandTo,
-    krate: CrateId,
-    resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
+    krate: Crate,
+    resolver: impl Fn(&ModPath) -> Option<MacroDefId> + Copy,
+    eager_callback: &mut dyn FnMut(
+        InFile<(syntax::AstPtr<ast::MacroCall>, span::FileAstId<ast::MacroCall>)>,
+        MacroCallId,
+    ),
 ) -> Result<Option<MacroCallId>, UnresolvedMacro> {
     macro_call_as_call_id_with_eager(
         db,
@@ -1510,6 +1260,7 @@ fn macro_call_as_call_id(
         krate,
         resolver,
         resolver,
+        eager_callback,
     )
     .map(|res| res.value)
 }
@@ -1517,12 +1268,16 @@ fn macro_call_as_call_id(
 fn macro_call_as_call_id_with_eager(
     db: &dyn ExpandDatabase,
     ast_id: AstId<ast::MacroCall>,
-    path: &path::ModPath,
-    call_site: SyntaxContextId,
+    path: &ModPath,
+    call_site: SyntaxContext,
     expand_to: ExpandTo,
-    krate: CrateId,
-    resolver: impl FnOnce(&path::ModPath) -> Option<MacroDefId>,
-    eager_resolver: impl Fn(&path::ModPath) -> Option<MacroDefId>,
+    krate: Crate,
+    resolver: impl FnOnce(&ModPath) -> Option<MacroDefId>,
+    eager_resolver: impl Fn(&ModPath) -> Option<MacroDefId>,
+    eager_callback: &mut dyn FnMut(
+        InFile<(syntax::AstPtr<ast::MacroCall>, span::FileAstId<ast::MacroCall>)>,
+        MacroCallId,
+    ),
 ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
     let def = resolver(path).ok_or_else(|| UnresolvedMacro { path: path.clone() })?;
 
@@ -1535,6 +1290,7 @@ fn macro_call_as_call_id_with_eager(
             def,
             call_site,
             &|path| eager_resolver(path).filter(MacroDefId::is_fn_like),
+            eager_callback,
         ),
         _ if def.is_fn_like() => ExpandResult {
             value: Some(def.make_call(
@@ -1552,8 +1308,86 @@ fn macro_call_as_call_id_with_eager(
 
 #[derive(Debug)]
 pub struct UnresolvedMacro {
-    pub path: hir_expand::mod_path::ModPath,
+    pub path: ModPath,
 }
 
 #[derive(Default, Debug, Eq, PartialEq, Clone, Copy)]
 pub struct SyntheticSyntax;
+
+// Feature: Completions Attribute
+// Crate authors can opt their type out of completions in some cases.
+// This is done with the `#[rust_analyzer::completions(...)]` attribute.
+//
+// All completeable things support `#[rust_analyzer::completions(ignore_flyimport)]`,
+// which causes the thing to get excluded from flyimport completion. It will still
+// be completed when in scope. This is analogous to the setting `rust-analyzer.completion.autoimport.exclude`
+// with `"type": "always"`.
+//
+// In addition, traits support two more modes: `#[rust_analyzer::completions(ignore_flyimport_methods)]`,
+// which means the trait itself may still be flyimported but its methods won't, and
+// `#[rust_analyzer::completions(ignore_methods)]`, which means the methods won't be completed even when
+// the trait is in scope (but the trait itself may still be completed). The methods will still be completed
+// on `dyn Trait`, `impl Trait` or where the trait is specified in bounds. These modes correspond to
+// the settings `rust-analyzer.completion.autoimport.exclude` with `"type": "methods"` and
+// `rust-analyzer.completion.excludeTraits`, respectively.
+//
+// Malformed attributes will be ignored without warnings.
+//
+// Note that users have no way to override this attribute, so be careful and only include things
+// users definitely do not want to be completed!
+
+/// `#[rust_analyzer::completions(...)]` options.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum Complete {
+    /// No `#[rust_analyzer::completions(...)]`.
+    Yes,
+    /// `#[rust_analyzer::completions(ignore_flyimport)]`.
+    IgnoreFlyimport,
+    /// `#[rust_analyzer::completions(ignore_flyimport_methods)]` (on a trait only).
+    IgnoreFlyimportMethods,
+    /// `#[rust_analyzer::completions(ignore_methods)]` (on a trait only).
+    IgnoreMethods,
+}
+
+impl Complete {
+    pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete {
+        let mut do_not_complete = Complete::Yes;
+        for ra_attr in attrs.rust_analyzer_tool() {
+            let segments = ra_attr.path.segments();
+            if segments.len() != 2 {
+                continue;
+            }
+            let action = segments[1].symbol();
+            if *action == sym::completions {
+                match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) {
+                    Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => {
+                        if ident.sym == sym::ignore_flyimport {
+                            do_not_complete = Complete::IgnoreFlyimport;
+                        } else if is_trait {
+                            if ident.sym == sym::ignore_methods {
+                                do_not_complete = Complete::IgnoreMethods;
+                            } else if ident.sym == sym::ignore_flyimport_methods {
+                                do_not_complete = Complete::IgnoreFlyimportMethods;
+                            }
+                        }
+                    }
+                    _ => {}
+                }
+            }
+        }
+        do_not_complete
+    }
+
+    #[inline]
+    pub fn for_trait_item(trait_attr: Complete, item_attr: Complete) -> Complete {
+        match (trait_attr, item_attr) {
+            (
+                Complete::IgnoreFlyimportMethods
+                | Complete::IgnoreFlyimport
+                | Complete::IgnoreMethods,
+                _,
+            ) => Complete::IgnoreFlyimport,
+            _ => item_attr,
+        }
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
deleted file mode 100644
index 7cddd48eb174c..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
+++ /dev/null
@@ -1,149 +0,0 @@
-//! Context for lowering paths.
-use std::{cell::OnceCell, mem};
-
-use hir_expand::{span_map::SpanMap, AstId, HirFileId, InFile};
-use span::{AstIdMap, AstIdNode, Edition, EditionedFileId, FileId, RealSpanMap};
-use stdx::thin_vec::ThinVec;
-use syntax::ast;
-use triomphe::Arc;
-
-use crate::{
-    db::DefDatabase,
-    path::Path,
-    type_ref::{PathId, TypeBound, TypePtr, TypeRef, TypeRefId, TypesMap, TypesSourceMap},
-};
-
-pub struct LowerCtx<'a> {
-    pub db: &'a dyn DefDatabase,
-    file_id: HirFileId,
-    span_map: OnceCell<SpanMap>,
-    ast_id_map: OnceCell<Arc<AstIdMap>>,
-    impl_trait_bounds: Vec<ThinVec<TypeBound>>,
-    // Prevent nested impl traits like `impl Foo<impl Bar>`.
-    outer_impl_trait: bool,
-    types_map: &'a mut TypesMap,
-    types_source_map: &'a mut TypesSourceMap,
-}
-
-impl<'a> LowerCtx<'a> {
-    pub fn new(
-        db: &'a dyn DefDatabase,
-        file_id: HirFileId,
-        types_map: &'a mut TypesMap,
-        types_source_map: &'a mut TypesSourceMap,
-    ) -> Self {
-        LowerCtx {
-            db,
-            file_id,
-            span_map: OnceCell::new(),
-            ast_id_map: OnceCell::new(),
-            impl_trait_bounds: Vec::new(),
-            outer_impl_trait: false,
-            types_map,
-            types_source_map,
-        }
-    }
-
-    pub fn with_span_map_cell(
-        db: &'a dyn DefDatabase,
-        file_id: HirFileId,
-        span_map: OnceCell<SpanMap>,
-        types_map: &'a mut TypesMap,
-        types_source_map: &'a mut TypesSourceMap,
-    ) -> Self {
-        LowerCtx {
-            db,
-            file_id,
-            span_map,
-            ast_id_map: OnceCell::new(),
-            impl_trait_bounds: Vec::new(),
-            outer_impl_trait: false,
-            types_map,
-            types_source_map,
-        }
-    }
-
-    /// Prepares a `LowerCtx` for synthetic AST that needs to be lowered. This is intended for IDE things.
-    pub fn for_synthetic_ast(
-        db: &'a dyn DefDatabase,
-        ast_id_map: Arc<AstIdMap>,
-        types_map: &'a mut TypesMap,
-        types_source_map: &'a mut TypesSourceMap,
-    ) -> Self {
-        let file_id = EditionedFileId::new(
-            FileId::from_raw(EditionedFileId::MAX_FILE_ID),
-            Edition::Edition2015,
-        );
-        LowerCtx {
-            db,
-            // Make up an invalid file id, so that if we will try to actually access it salsa will panic.
-            file_id: file_id.into(),
-            span_map: SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(file_id))).into(),
-            ast_id_map: ast_id_map.into(),
-            impl_trait_bounds: Vec::new(),
-            outer_impl_trait: false,
-            types_map,
-            types_source_map,
-        }
-    }
-
-    pub(crate) fn span_map(&self) -> &SpanMap {
-        self.span_map.get_or_init(|| self.db.span_map(self.file_id))
-    }
-
-    pub(crate) fn lower_path(&mut self, ast: ast::Path) -> Option<Path> {
-        Path::from_src(self, ast)
-    }
-
-    pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> AstId<N> {
-        InFile::new(
-            self.file_id,
-            self.ast_id_map.get_or_init(|| self.db.ast_id_map(self.file_id)).ast_id(item),
-        )
-    }
-
-    pub fn update_impl_traits_bounds_from_type_ref(&mut self, type_ref: TypeRefId) {
-        TypeRef::walk(type_ref, self.types_map, &mut |tr| {
-            if let TypeRef::ImplTrait(bounds) = tr {
-                self.impl_trait_bounds.push(bounds.clone());
-            }
-        });
-    }
-
-    pub fn take_impl_traits_bounds(&mut self) -> Vec<ThinVec<TypeBound>> {
-        mem::take(&mut self.impl_trait_bounds)
-    }
-
-    pub(crate) fn outer_impl_trait(&self) -> bool {
-        self.outer_impl_trait
-    }
-
-    pub(crate) fn with_outer_impl_trait_scope<R>(
-        &mut self,
-        impl_trait: bool,
-        f: impl FnOnce(&mut Self) -> R,
-    ) -> R {
-        let old = mem::replace(&mut self.outer_impl_trait, impl_trait);
-        let result = f(self);
-        self.outer_impl_trait = old;
-        result
-    }
-
-    pub(crate) fn alloc_type_ref(&mut self, type_ref: TypeRef, node: TypePtr) -> TypeRefId {
-        let id = self.types_map.types.alloc(type_ref);
-        self.types_source_map.types_map_back.insert(id, InFile::new(self.file_id, node));
-        id
-    }
-
-    pub(crate) fn alloc_type_ref_desugared(&mut self, type_ref: TypeRef) -> TypeRefId {
-        self.types_map.types.alloc(type_ref)
-    }
-
-    pub(crate) fn alloc_error_type(&mut self) -> TypeRefId {
-        self.types_map.types.alloc(TypeRef::Error)
-    }
-
-    pub(crate) fn alloc_path(&mut self, path: Path, node: TypePtr) -> PathId {
-        PathId::from_type_ref_unchecked(self.alloc_type_ref(TypeRef::Path(path), node))
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index c31d322132897..777953d3f212b 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -336,7 +336,7 @@ enum Command {
 }
 
 impl <> $crate::cmp::PartialOrd for Command< > where {
-    fn partial_cmp(&self , other: &Self ) -> $crate::option::Option::Option<$crate::cmp::Ordering> {
+    fn partial_cmp(&self , other: &Self ) -> $crate::option::Option<$crate::cmp::Ordering> {
         match $crate::intrinsics::discriminant_value(self ).partial_cmp(&$crate::intrinsics::discriminant_value(other)) {
             $crate::option::Option::Some($crate::cmp::Ordering::Equal)=> {
                 match (self , other) {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 8c5bd3b6d3696..abb5bd5ed7266 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -35,9 +35,9 @@ macro_rules! f {
     };
 }
 
-struct#0:1@58..64#4# MyTraitMap2#0:2@31..42#2# {#0:1@72..73#4#
-    map#0:1@86..89#4#:#0:1@89..90#4# #0:1@89..90#4#::#0:1@91..93#4#std#0:1@93..96#4#::#0:1@96..98#4#collections#0:1@98..109#4#::#0:1@109..111#4#HashSet#0:1@111..118#4#<#0:1@118..119#4#(#0:1@119..120#4#)#0:1@120..121#4#>#0:1@121..122#4#,#0:1@122..123#4#
-}#0:1@132..133#4#
+struct#0:1@58..64#14336# MyTraitMap2#0:2@31..42#ROOT2024# {#0:1@72..73#14336#
+    map#0:1@86..89#14336#:#0:1@89..90#14336# #0:1@89..90#14336#::#0:1@91..93#14336#std#0:1@93..96#14336#::#0:1@96..98#14336#collections#0:1@98..109#14336#::#0:1@109..111#14336#HashSet#0:1@111..118#14336#<#0:1@118..119#14336#(#0:1@119..120#14336#)#0:1@120..121#14336#>#0:1@121..122#14336#,#0:1@122..123#14336#
+}#0:1@132..133#14336#
 "#]],
     );
 }
@@ -75,12 +75,12 @@ macro_rules! f {
     };
 }
 
-fn#0:2@30..32#2# main#0:2@33..37#2#(#0:2@37..38#2#)#0:2@38..39#2# {#0:2@40..41#2#
-    1#0:2@50..51#2#;#0:2@51..52#2#
-    1.0#0:2@61..64#2#;#0:2@64..65#2#
-    (#0:2@74..75#2#(#0:2@75..76#2#1#0:2@76..77#2#,#0:2@77..78#2# )#0:2@78..79#2#,#0:2@79..80#2# )#0:2@80..81#2#.#0:2@81..82#2#0#0:2@82..85#2#.#0:2@82..85#2#0#0:2@82..85#2#;#0:2@85..86#2#
-    let#0:2@95..98#2# x#0:2@99..100#2# =#0:2@101..102#2# 1#0:2@103..104#2#;#0:2@104..105#2#
-}#0:2@110..111#2#
+fn#0:2@30..32#ROOT2024# main#0:2@33..37#ROOT2024#(#0:2@37..38#ROOT2024#)#0:2@38..39#ROOT2024# {#0:2@40..41#ROOT2024#
+    1#0:2@50..51#ROOT2024#;#0:2@51..52#ROOT2024#
+    1.0#0:2@61..64#ROOT2024#;#0:2@64..65#ROOT2024#
+    (#0:2@74..75#ROOT2024#(#0:2@75..76#ROOT2024#1#0:2@76..77#ROOT2024#,#0:2@77..78#ROOT2024# )#0:2@78..79#ROOT2024#,#0:2@79..80#ROOT2024# )#0:2@80..81#ROOT2024#.#0:2@81..82#ROOT2024#0#0:2@82..85#ROOT2024#.#0:2@82..85#ROOT2024#0#0:2@82..85#ROOT2024#;#0:2@85..86#ROOT2024#
+    let#0:2@95..98#ROOT2024# x#0:2@99..100#ROOT2024# =#0:2@101..102#ROOT2024# 1#0:2@103..104#ROOT2024#;#0:2@104..105#ROOT2024#
+}#0:2@110..111#ROOT2024#
 
 
 "#]],
@@ -171,7 +171,7 @@ fn main(foo: ()) {
     }
 
     fn main(foo: ()) {
-        /* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#2#;
+        /* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#ROOT2024#;
     }
 }
 
@@ -197,7 +197,7 @@ macro_rules! mk_struct {
 #[macro_use]
 mod foo;
 
-struct#1:1@59..65#4# Foo#0:2@32..35#2#(#1:1@70..71#4#u32#0:2@41..44#2#)#1:1@74..75#4#;#1:1@75..76#4#
+struct#1:1@59..65#14336# Foo#0:2@32..35#ROOT2024#(#1:1@70..71#14336#u32#0:2@41..44#ROOT2024#)#1:1@74..75#14336#;#1:1@75..76#14336#
 "#]],
     );
 }
@@ -423,10 +423,10 @@ m! { foo, bar }
 macro_rules! m {
     ($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
 }
-impl#\4# Bar#\4# {#\4#
-    fn#\4# foo#\2#(#\4#)#\4# {#\4#}#\4#
-    fn#\4# bar#\2#(#\4#)#\4# {#\4#}#\4#
-}#\4#
+impl#\14336# Bar#\14336# {#\14336#
+    fn#\14336# foo#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336#
+    fn#\14336# bar#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336#
+}#\14336#
 "#]],
     );
 }
@@ -1408,7 +1408,7 @@ ok!();
 macro_rules! m2 {
     ($($a:expr => $b:ident)* _ => $c:expr) => { ok!(); }
 }
-ok!();
+/* error: unexpected token in input */ok!();
 "#]],
     );
 }
@@ -1979,3 +1979,51 @@ fn f() {
 "#]],
     );
 }
+
+#[test]
+fn semicolon_does_not_glue() {
+    check(
+        r#"
+macro_rules! bug {
+    ($id: expr) => {
+        true
+    };
+    ($id: expr; $($attr: ident),*) => {
+        true
+    };
+    ($id: expr; $($attr: ident),*; $norm: expr) => {
+        true
+    };
+    ($id: expr; $($attr: ident),*;; $print: expr) => {
+        true
+    };
+    ($id: expr; $($attr: ident),*; $norm: expr; $print: expr) => {
+        true
+    };
+}
+
+let _ = bug!(a;;;test);
+    "#,
+        expect![[r#"
+macro_rules! bug {
+    ($id: expr) => {
+        true
+    };
+    ($id: expr; $($attr: ident),*) => {
+        true
+    };
+    ($id: expr; $($attr: ident),*; $norm: expr) => {
+        true
+    };
+    ($id: expr; $($attr: ident),*;; $print: expr) => {
+        true
+    };
+    ($id: expr; $($attr: ident),*; $norm: expr; $print: expr) => {
+        true
+    };
+}
+
+let _ = true;
+    "#]],
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
index e9a977da913bf..e33a366769b09 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
@@ -162,9 +162,10 @@ fn test() {
 }
 
 #[test]
-fn expr_dont_match_inline_const() {
+fn expr_inline_const() {
     check(
         r#"
+//- /lib.rs edition:2021
 macro_rules! foo {
     ($e:expr) => { $e }
 }
@@ -181,6 +182,30 @@ macro_rules! foo {
 fn test() {
     /* error: no rule matches input tokens */missing;
 }
+"#]],
+    );
+    check(
+        r#"
+//- /lib.rs edition:2024
+macro_rules! foo {
+    ($e:expr) => { $e }
+}
+
+fn test() {
+    foo!(const { 3 });
+}
+"#,
+        expect![[r#"
+macro_rules! foo {
+    ($e:expr) => { $e }
+}
+
+fn test() {
+    (const {
+        3
+    }
+    );
+}
 "#]],
     );
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index 1bbed01443de8..cb4fcd887d8a5 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -582,8 +582,8 @@ macro_rules! arbitrary {
 }
 
 impl <A: Arbitrary> $crate::arbitrary::Arbitrary for Vec<A> {
-    type Parameters = RangedParams1<A::Parameters>;
-    type Strategy = VecStrategy<A::Strategy>;
+    type Parameters = RangedParams1<A::Parameters> ;
+    type Strategy = VecStrategy<A::Strategy> ;
     fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { {
             let product_unpack![range, a] = args;
             vec(any_with::<A>(a), range)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
index a2d0ba3deb845..143b5df773054 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -16,34 +16,34 @@ mod proc_macros;
 
 use std::{iter, ops::Range, sync};
 
-use base_db::SourceDatabase;
+use base_db::RootQueryDb;
 use expect_test::Expect;
 use hir_expand::{
+    InFile, MacroCallKind, MacroKind,
     db::ExpandDatabase,
     proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
     span_map::SpanMapRef,
-    InFile, MacroCallKind, MacroFileId, MacroFileIdExt, MacroKind,
 };
 use intern::Symbol;
 use itertools::Itertools;
 use span::{Edition, Span};
 use stdx::{format_to, format_to_acc};
 use syntax::{
-    ast::{self, edit::IndentLevel},
     AstNode,
     SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
     SyntaxNode, T,
+    ast::{self, edit::IndentLevel},
 };
 use test_fixture::WithFixture;
 
 use crate::{
+    AdtId, AsMacroCall, Lookup, ModuleDefId,
     db::DefDatabase,
     nameres::{DefMap, MacroSubNs, ModuleSource},
     resolver::HasResolver,
     src::HasSource,
     test_db::TestDB,
     tt::TopSubtree,
-    AdtId, AsMacroCall, Lookup, ModuleDefId,
 };
 
 #[track_caller]
@@ -63,9 +63,11 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect)
                 MacroCallKind::Derive { ast_id, .. } => ast_id.map(|it| it.erase()),
                 MacroCallKind::Attr { ast_id, .. } => ast_id.map(|it| it.erase()),
             };
-            let ast = db
-                .parse(ast_id.file_id.file_id().expect("macros inside macros are not supported"))
-                .syntax_node();
+
+            let editioned_file_id =
+                ast_id.file_id.file_id().expect("macros inside macros are not supported");
+
+            let ast = db.parse(editioned_file_id).syntax_node();
             let ast_id_map = db.ast_id_map(ast_id.file_id);
             let node = ast_id_map.get_erased(ast_id.value).to_node(&ast);
             Some((node.text_range(), errors))
@@ -126,15 +128,19 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
     for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
         let macro_call = InFile::new(source.file_id, &macro_call);
         let res = macro_call
-            .as_call_id_with_errors(&db, krate, |path| {
-                resolver
-                    .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang))
-                    .map(|(it, _)| db.macro_def(it))
-            })
+            .as_call_id_with_errors(
+                &db,
+                krate,
+                |path| {
+                    resolver
+                        .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang))
+                        .map(|(it, _)| db.macro_def(it))
+                },
+                &mut |_, _| (),
+            )
             .unwrap();
         let macro_call_id = res.value.unwrap();
-        let macro_file = MacroFileId { macro_call_id };
-        let mut expansion_result = db.parse_macro_expansion(macro_file);
+        let mut expansion_result = db.parse_macro_expansion(macro_call_id);
         expansion_result.err = expansion_result.err.or(res.err);
         expansions.push((macro_call.value.clone(), expansion_result));
     }
@@ -357,7 +363,7 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
         _: Span,
         _: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
             subtree,
@@ -371,4 +377,8 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
             panic!("got invalid macro input: {:?}", parse.errors());
         }
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index a43c0eb9d70bb..b2e1adc3650d4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -181,9 +181,9 @@ fn foo(&self) {
     self.0. 1;
 }
 
-fn#0:1@45..47#2# foo#0:1@48..51#2#(#0:1@51..52#2#&#0:1@52..53#2#self#0:1@53..57#2# )#0:1@57..58#2# {#0:1@59..60#2#
-    self#0:1@65..69#2# .#0:1@69..70#2#0#0:1@70..71#2#.#0:1@71..72#2#1#0:1@73..74#2#;#0:1@74..75#2#
-}#0:1@76..77#2#"#]],
+fn#0:1@45..47#ROOT2024# foo#0:1@48..51#ROOT2024#(#0:1@51..52#ROOT2024#&#0:1@52..53#ROOT2024#self#0:1@53..57#ROOT2024# )#0:1@57..58#ROOT2024# {#0:1@59..60#ROOT2024#
+    self#0:1@65..69#ROOT2024# .#0:1@69..70#ROOT2024#0#0:1@70..71#ROOT2024#.#0:1@71..72#ROOT2024#1#0:1@73..74#ROOT2024#;#0:1@74..75#ROOT2024#
+}#0:1@76..77#ROOT2024#"#]],
     );
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
index 3b6e3c5916e32..fc66d8e28d8c6 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
@@ -47,6 +47,7 @@
 //! path and, upon success, we run macro expansion and "collect module" phase on
 //! the result
 
+pub mod assoc;
 pub mod attr_resolution;
 mod collector;
 pub mod diagnostics;
@@ -59,30 +60,30 @@ mod tests;
 
 use std::ops::Deref;
 
-use base_db::CrateId;
+use base_db::Crate;
 use hir_expand::{
-    name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId,
+    EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, mod_path::ModPath,
+    name::Name, proc_macro::ProcMacroKind,
 };
 use intern::Symbol;
 use itertools::Itertools;
 use la_arena::Arena;
 use rustc_hash::{FxHashMap, FxHashSet};
-use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
+use span::{Edition, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
 use stdx::format_to;
-use syntax::{ast, AstNode, SmolStr, SyntaxNode};
+use syntax::{AstNode, SmolStr, SyntaxNode, ToSmolStr, ast};
 use triomphe::Arc;
 use tt::TextRange;
 
 use crate::{
+    AstId, BlockId, BlockLoc, CrateRootModuleId, ExternCrateId, FunctionId, FxIndexMap,
+    LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
     db::DefDatabase,
     item_scope::{BuiltinShadowMode, ItemScope},
     item_tree::{ItemTreeId, Mod, TreeId},
     nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode},
-    path::ModPath,
     per_ns::PerNs,
     visibility::{Visibility, VisibilityExplicitness},
-    AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId,
-    FxIndexMap, LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
 };
 
 pub use self::path_resolution::ResolvePathResultPrefixInfo;
@@ -95,6 +96,39 @@ const PREDEFINED_TOOLS: &[SmolStr] = &[
     SmolStr::new_static("rust_analyzer"),
 ];
 
+/// Parts of the def map that are only needed when analyzing code in the same crate.
+///
+/// There are some data in the def map (e.g. extern prelude) that is only needed when analyzing
+/// things in the same crate (and maybe in the IDE layer), e.g. the extern prelude. If we put
+/// it in the DefMap dependant DefMaps will be invalidated when they change (e.g. when we add
+/// a dependency to the crate). Instead we split them out of the DefMap into a LocalDefMap struct.
+/// `crate_local_def_map()` returns both, and `crate_def_map()` returns only the external-relevant
+/// DefMap.
+#[derive(Debug, PartialEq, Eq, Default)]
+pub struct LocalDefMap {
+    // FIXME: There are probably some other things that could be here, but this is less severe and you
+    // need to be careful with things that block def maps also have.
+    /// The extern prelude which contains all root modules of external crates that are in scope.
+    extern_prelude: FxIndexMap<Name, (CrateRootModuleId, Option<ExternCrateId>)>,
+}
+
+impl LocalDefMap {
+    pub(crate) const EMPTY: &Self =
+        &Self { extern_prelude: FxIndexMap::with_hasher(rustc_hash::FxBuildHasher) };
+
+    fn shrink_to_fit(&mut self) {
+        let Self { extern_prelude } = self;
+        extern_prelude.shrink_to_fit();
+    }
+
+    pub(crate) fn extern_prelude(
+        &self,
+    ) -> impl DoubleEndedIterator<Item = (&Name, (CrateRootModuleId, Option<ExternCrateId>))> + '_
+    {
+        self.extern_prelude.iter().map(|(name, &def)| (name, def))
+    }
+}
+
 /// Contains the results of (early) name resolution.
 ///
 /// A `DefMap` stores the module tree and the definitions that are in scope in every module after
@@ -107,7 +141,7 @@ const PREDEFINED_TOOLS: &[SmolStr] = &[
 #[derive(Debug, PartialEq, Eq)]
 pub struct DefMap {
     /// The crate this `DefMap` belongs to.
-    krate: CrateId,
+    krate: Crate,
     /// When this is a block def map, this will hold the block id of the block and module that
     /// contains this block.
     block: Option<BlockInfo>,
@@ -124,12 +158,15 @@ pub struct DefMap {
     /// this contains all kinds of macro, not just `macro_rules!` macro.
     /// ExternCrateId being None implies it being imported from the general prelude import.
     macro_use_prelude: FxHashMap<Name, (MacroId, Option<ExternCrateId>)>,
-    pub(crate) enum_definitions: FxHashMap<EnumId, Box<[EnumVariantId]>>,
 
+    // FIXME: AstId's are fairly unstable
     /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
     /// attributes.
     // FIXME: Figure out a better way for the IDE layer to resolve these?
     derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
+    // FIXME: AstId's are fairly unstable
+    /// A mapping from [`hir_expand::MacroDefId`] to [`crate::MacroId`].
+    pub macro_def_to_macro_id: FxHashMap<ErasedAstId, MacroId>,
 
     /// The diagnostics that need to be emitted for this crate.
     diagnostics: Vec<DefDiagnostic>,
@@ -141,9 +178,6 @@ pub struct DefMap {
 /// Data that belongs to a crate which is shared between a crate's def map and all its block def maps.
 #[derive(Clone, Debug, PartialEq, Eq)]
 struct DefMapCrateData {
-    /// The extern prelude which contains all root modules of external crates that are in scope.
-    extern_prelude: FxIndexMap<Name, (CrateRootModuleId, Option<ExternCrateId>)>,
-
     /// Side table for resolving derive helpers.
     exported_derives: FxHashMap<MacroDefId, Box<[Name]>>,
     fn_proc_macro_mapping: FxHashMap<FunctionId, ProcMacroId>,
@@ -166,7 +200,6 @@ struct DefMapCrateData {
 impl DefMapCrateData {
     fn new(edition: Edition) -> Self {
         Self {
-            extern_prelude: FxIndexMap::default(),
             exported_derives: FxHashMap::default(),
             fn_proc_macro_mapping: FxHashMap::default(),
             registered_attrs: Vec::new(),
@@ -182,7 +215,6 @@ impl DefMapCrateData {
 
     fn shrink_to_fit(&mut self) {
         let Self {
-            extern_prelude,
             exported_derives,
             fn_proc_macro_mapping,
             registered_attrs,
@@ -194,7 +226,6 @@ impl DefMapCrateData {
             edition: _,
             recursion_limit: _,
         } = self;
-        extern_prelude.shrink_to_fit();
         exported_derives.shrink_to_fit();
         fn_proc_macro_mapping.shrink_to_fit();
         registered_attrs.shrink_to_fit();
@@ -219,11 +250,11 @@ struct BlockRelativeModuleId {
 }
 
 impl BlockRelativeModuleId {
-    fn def_map(self, db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
+    fn def_map(self, db: &dyn DefDatabase, krate: Crate) -> Arc<DefMap> {
         self.into_module(krate).def_map(db)
     }
 
-    fn into_module(self, krate: CrateId) -> ModuleId {
+    fn into_module(self, krate: Crate) -> ModuleId {
         ModuleId { krate, block: self.block, local_id: self.local_id }
     }
 
@@ -295,18 +326,19 @@ impl ModuleOrigin {
     /// That is, a file or a `mod foo {}` with items.
     pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
         match self {
-            &ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
-                let sf = db.parse(definition).tree();
-                InFile::new(definition.into(), ModuleSource::SourceFile(sf))
+            &ModuleOrigin::File { definition: editioned_file_id, .. }
+            | &ModuleOrigin::CrateRoot { definition: editioned_file_id } => {
+                let sf = db.parse(editioned_file_id).tree();
+                InFile::new(editioned_file_id.into(), ModuleSource::SourceFile(sf))
             }
             &ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
                 definition_tree_id.file_id(),
                 ModuleSource::Module(
-                    AstId::new(definition_tree_id.file_id(), definition).to_node(db.upcast()),
+                    AstId::new(definition_tree_id.file_id(), definition).to_node(db),
                 ),
             ),
             ModuleOrigin::BlockExpr { block, .. } => {
-                InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db.upcast())))
+                InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db)))
             }
         }
     }
@@ -334,14 +366,28 @@ impl DefMap {
         self.data.edition
     }
 
-    pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: CrateId) -> Arc<DefMap> {
-        let crate_graph = db.crate_graph();
-        let krate = &crate_graph[crate_id];
-        let name = krate.display_name.as_deref().map(Symbol::as_str).unwrap_or_default();
-        let _p = tracing::info_span!("crate_def_map_query", ?name).entered();
+    pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: Crate) -> Arc<DefMap> {
+        db.crate_local_def_map(crate_id).0
+    }
+
+    pub(crate) fn crate_local_def_map_query(
+        db: &dyn DefDatabase,
+        crate_id: Crate,
+    ) -> (Arc<DefMap>, Arc<LocalDefMap>) {
+        let krate = crate_id.data(db);
+        let _p = tracing::info_span!(
+            "crate_def_map_query",
+            name=?crate_id
+                .extra_data(db)
+                .display_name
+                .as_ref()
+                .map(|it| it.crate_name().to_smolstr())
+                .unwrap_or_default()
+        )
+        .entered();
 
         let module_data = ModuleData::new(
-            ModuleOrigin::CrateRoot { definition: krate.root_file_id() },
+            ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
             Visibility::Public,
         );
 
@@ -351,10 +397,14 @@ impl DefMap {
             module_data,
             None,
         );
-        let def_map =
-            collector::collect_defs(db, def_map, TreeId::new(krate.root_file_id().into(), None));
+        let (def_map, local_def_map) = collector::collect_defs(
+            db,
+            def_map,
+            TreeId::new(krate.root_file_id(db).into(), None),
+            None,
+        );
 
-        Arc::new(def_map)
+        (Arc::new(def_map), Arc::new(local_def_map))
     }
 
     pub(crate) fn block_def_map_query(db: &dyn DefDatabase, block_id: BlockId) -> Arc<DefMap> {
@@ -367,10 +417,10 @@ impl DefMap {
         let module_data =
             ModuleData::new(ModuleOrigin::BlockExpr { block: ast_id, id: block_id }, visibility);
 
-        let parent_map = module.def_map(db);
+        let (crate_map, crate_local_map) = db.crate_local_def_map(module.krate);
         let def_map = DefMap::empty(
             module.krate,
-            parent_map.data.clone(),
+            crate_map.data.clone(),
             module_data,
             Some(BlockInfo {
                 block: block_id,
@@ -378,13 +428,17 @@ impl DefMap {
             }),
         );
 
-        let def_map =
-            collector::collect_defs(db, def_map, TreeId::new(ast_id.file_id, Some(block_id)));
+        let (def_map, _) = collector::collect_defs(
+            db,
+            def_map,
+            TreeId::new(ast_id.file_id, Some(block_id)),
+            Some(crate_local_map),
+        );
         Arc::new(def_map)
     }
 
     fn empty(
-        krate: CrateId,
+        krate: Crate,
         crate_data: Arc<DefMapCrateData>,
         module_data: ModuleData,
         block: Option<BlockInfo>,
@@ -401,8 +455,8 @@ impl DefMap {
             macro_use_prelude: FxHashMap::default(),
             derive_helpers_in_scope: FxHashMap::default(),
             diagnostics: Vec::new(),
-            enum_definitions: FxHashMap::default(),
             data: crate_data,
+            macro_def_to_macro_id: FxHashMap::default(),
         }
     }
     fn shrink_to_fit(&mut self) {
@@ -416,14 +470,14 @@ impl DefMap {
             krate: _,
             prelude: _,
             data: _,
-            enum_definitions,
+            macro_def_to_macro_id,
         } = self;
 
+        macro_def_to_macro_id.shrink_to_fit();
         macro_use_prelude.shrink_to_fit();
         diagnostics.shrink_to_fit();
         modules.shrink_to_fit();
         derive_helpers_in_scope.shrink_to_fit();
-        enum_definitions.shrink_to_fit();
         for (_, module) in modules.iter_mut() {
             module.children.shrink_to_fit();
             module.scope.shrink_to_fit();
@@ -432,11 +486,15 @@ impl DefMap {
 }
 
 impl DefMap {
-    pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
+    pub fn modules_for_file<'a>(
+        &'a self,
+        db: &'a dyn DefDatabase,
+        file_id: FileId,
+    ) -> impl Iterator<Item = LocalModuleId> + 'a {
         self.modules
             .iter()
             .filter(move |(_id, data)| {
-                data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id)
+                data.origin.file_id().map(|file_id| file_id.file_id(db)) == Some(file_id)
             })
             .map(|(id, _data)| id)
     }
@@ -476,7 +534,7 @@ impl DefMap {
         self.data.fn_proc_macro_mapping.get(&id).copied()
     }
 
-    pub fn krate(&self) -> CrateId {
+    pub fn krate(&self) -> Crate {
         self.krate
     }
 
@@ -551,12 +609,12 @@ impl DefMap {
         ) {
             format_to!(buf, "{}\n", path);
 
-            map.modules[module].scope.dump(db.upcast(), buf);
+            map.modules[module].scope.dump(db, buf);
 
             for (name, child) in
                 map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
             {
-                let path = format!("{path}::{}", name.display(db.upcast(), Edition::LATEST));
+                let path = format!("{path}::{}", name.display(db, Edition::LATEST));
                 buf.push('\n');
                 go(buf, db, map, &path, *child);
             }
@@ -587,19 +645,13 @@ impl DefMap {
         self.prelude
     }
 
-    pub(crate) fn extern_prelude(
-        &self,
-    ) -> impl DoubleEndedIterator<Item = (&Name, (CrateRootModuleId, Option<ExternCrateId>))> + '_
-    {
-        self.data.extern_prelude.iter().map(|(name, &def)| (name, def))
-    }
-
     pub(crate) fn macro_use_prelude(&self) -> &FxHashMap<Name, (MacroId, Option<ExternCrateId>)> {
         &self.macro_use_prelude
     }
 
     pub(crate) fn resolve_path(
         &self,
+        local_def_map: &LocalDefMap,
         db: &dyn DefDatabase,
         original_module: LocalModuleId,
         path: &ModPath,
@@ -607,6 +659,7 @@ impl DefMap {
         expected_macro_subns: Option<MacroSubNs>,
     ) -> (PerNs, Option<usize>) {
         let res = self.resolve_path_fp_with_macro(
+            local_def_map,
             db,
             ResolveMode::Other,
             original_module,
@@ -621,12 +674,14 @@ impl DefMap {
     /// points at the unresolved segments.
     pub(crate) fn resolve_path_locally(
         &self,
+        local_def_map: &LocalDefMap,
         db: &dyn DefDatabase,
         original_module: LocalModuleId,
         path: &ModPath,
         shadow: BuiltinShadowMode,
     ) -> (PerNs, Option<usize>, ResolvePathResultPrefixInfo) {
         let res = self.resolve_path_fp_with_macro_single(
+            local_def_map,
             db,
             ResolveMode::Other,
             original_module,
@@ -695,17 +750,14 @@ impl ModuleData {
             &ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
                 InFile::new(
                     definition.into(),
-                    ErasedAstId::new(definition.into(), ROOT_ERASED_FILE_AST_ID)
-                        .to_range(db.upcast()),
+                    ErasedAstId::new(definition.into(), ROOT_ERASED_FILE_AST_ID).to_range(db),
                 )
             }
             &ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
                 definition_tree_id.file_id(),
-                AstId::new(definition_tree_id.file_id(), definition).to_range(db.upcast()),
+                AstId::new(definition_tree_id.file_id(), definition).to_range(db),
             ),
-            ModuleOrigin::BlockExpr { block, .. } => {
-                InFile::new(block.file_id, block.to_range(db.upcast()))
-            }
+            ModuleOrigin::BlockExpr { block, .. } => InFile::new(block.file_id, block.to_range(db)),
         }
     }
 
@@ -713,7 +765,7 @@ impl ModuleData {
     /// `None` for the crate root or block.
     pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
         let decl = self.origin.declaration()?;
-        let value = decl.to_node(db.upcast());
+        let value = decl.to_node(db);
         Some(InFile { file_id: decl.file_id, value })
     }
 
@@ -721,7 +773,7 @@ impl ModuleData {
     /// `None` for the crate root or block.
     pub fn declaration_source_range(&self, db: &dyn DefDatabase) -> Option<InFile<TextRange>> {
         let decl = self.origin.declaration()?;
-        Some(InFile { file_id: decl.file_id, value: decl.to_range(db.upcast()) })
+        Some(InFile { file_id: decl.file_id, value: decl.to_range(db) })
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs
new file mode 100644
index 0000000000000..b097065529476
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs
@@ -0,0 +1,307 @@
+//! Expansion of associated items
+
+use hir_expand::{AstId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind, name::Name};
+use syntax::ast;
+use triomphe::Arc;
+
+use crate::{
+    AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
+    ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
+    db::DefDatabase,
+    item_tree::{AssocItem, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
+    macro_call_as_call_id,
+    nameres::{
+        DefMap, LocalDefMap, MacroSubNs,
+        attr_resolution::ResolvedAttr,
+        diagnostics::{DefDiagnostic, DefDiagnostics},
+    },
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TraitItems {
+    pub items: Box<[(Name, AssocItemId)]>,
+    // box it as the vec is usually empty anyways
+    // FIXME: AstIds are rather unstable...
+    pub macro_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+}
+
+impl TraitItems {
+    #[inline]
+    pub(crate) fn trait_items_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitItems> {
+        db.trait_items_with_diagnostics(tr).0
+    }
+
+    pub(crate) fn trait_items_with_diagnostics_query(
+        db: &dyn DefDatabase,
+        tr: TraitId,
+    ) -> (Arc<TraitItems>, DefDiagnostics) {
+        let ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
+
+        let collector = AssocItemCollector::new(db, module_id, ItemContainerId::TraitId(tr));
+        let item_tree = tree_id.item_tree(db);
+        let (items, macro_calls, diagnostics) =
+            collector.collect(&item_tree, tree_id.tree_id(), &item_tree[tree_id.value].items);
+
+        (Arc::new(TraitItems { macro_calls, items }), DefDiagnostics::new(diagnostics))
+    }
+
+    pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
+        self.items.iter().filter_map(|(_name, item)| match item {
+            AssocItemId::TypeAliasId(t) => Some(*t),
+            _ => None,
+        })
+    }
+
+    pub fn associated_type_by_name(&self, name: &Name) -> Option<TypeAliasId> {
+        self.items.iter().find_map(|(item_name, item)| match item {
+            AssocItemId::TypeAliasId(t) if item_name == name => Some(*t),
+            _ => None,
+        })
+    }
+
+    pub fn method_by_name(&self, name: &Name) -> Option<FunctionId> {
+        self.items.iter().find_map(|(item_name, item)| match item {
+            AssocItemId::FunctionId(t) if item_name == name => Some(*t),
+            _ => None,
+        })
+    }
+
+    pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+        self.macro_calls.iter().flat_map(|it| it.iter()).copied()
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ImplItems {
+    pub items: Box<[(Name, AssocItemId)]>,
+    // box it as the vec is usually empty anyways
+    // FIXME: AstIds are rather unstable...
+    pub macro_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+}
+
+impl ImplItems {
+    #[inline]
+    pub(crate) fn impl_items_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplItems> {
+        db.impl_items_with_diagnostics(id).0
+    }
+
+    pub(crate) fn impl_items_with_diagnostics_query(
+        db: &dyn DefDatabase,
+        id: ImplId,
+    ) -> (Arc<ImplItems>, DefDiagnostics) {
+        let _p = tracing::info_span!("impl_items_with_diagnostics_query").entered();
+        let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
+
+        let collector = AssocItemCollector::new(db, module_id, ItemContainerId::ImplId(id));
+        let item_tree = tree_id.item_tree(db);
+        let (items, macro_calls, diagnostics) =
+            collector.collect(&item_tree, tree_id.tree_id(), &item_tree[tree_id.value].items);
+
+        (Arc::new(ImplItems { items, macro_calls }), DefDiagnostics::new(diagnostics))
+    }
+
+    pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+        self.macro_calls.iter().flat_map(|it| it.iter()).copied()
+    }
+}
+
+struct AssocItemCollector<'a> {
+    db: &'a dyn DefDatabase,
+    module_id: ModuleId,
+    def_map: Arc<DefMap>,
+    local_def_map: Arc<LocalDefMap>,
+    diagnostics: Vec<DefDiagnostic>,
+    container: ItemContainerId,
+
+    depth: usize,
+    items: Vec<(Name, AssocItemId)>,
+    macro_calls: Vec<(AstId<ast::Item>, MacroCallId)>,
+}
+
+impl<'a> AssocItemCollector<'a> {
+    fn new(db: &'a dyn DefDatabase, module_id: ModuleId, container: ItemContainerId) -> Self {
+        let (def_map, local_def_map) = module_id.local_def_map(db);
+        Self {
+            db,
+            module_id,
+            def_map,
+            local_def_map,
+            container,
+            items: Vec::new(),
+
+            depth: 0,
+            macro_calls: Vec::new(),
+            diagnostics: Vec::new(),
+        }
+    }
+
+    fn collect(
+        mut self,
+        item_tree: &ItemTree,
+        tree_id: TreeId,
+        assoc_items: &[AssocItem],
+    ) -> (
+        Box<[(Name, AssocItemId)]>,
+        Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+        Vec<DefDiagnostic>,
+    ) {
+        self.items.reserve(assoc_items.len());
+        for &item in assoc_items {
+            self.collect_item(item_tree, tree_id, item);
+        }
+        (
+            self.items.into_boxed_slice(),
+            if self.macro_calls.is_empty() { None } else { Some(Box::new(self.macro_calls)) },
+            self.diagnostics,
+        )
+    }
+
+    fn collect_item(&mut self, item_tree: &ItemTree, tree_id: TreeId, item: AssocItem) {
+        let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
+        if !attrs.is_cfg_enabled(self.module_id.krate.cfg_options(self.db)) {
+            self.diagnostics.push(DefDiagnostic::unconfigured_code(
+                self.module_id.local_id,
+                tree_id,
+                ModItem::from(item).into(),
+                attrs.cfg().unwrap(),
+                self.module_id.krate.cfg_options(self.db).clone(),
+            ));
+            return;
+        }
+
+        'attrs: for attr in &*attrs {
+            let ast_id = AstId::new(tree_id.file_id(), item.ast_id(item_tree).upcast());
+            let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
+
+            match self.def_map.resolve_attr_macro(
+                &self.local_def_map,
+                self.db,
+                self.module_id.local_id,
+                ast_id_with_path,
+                attr,
+            ) {
+                Ok(ResolvedAttr::Macro(call_id)) => {
+                    let loc = self.db.lookup_intern_macro_call(call_id);
+                    if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind {
+                        // If there's no expander for the proc macro (e.g. the
+                        // proc macro is ignored, or building the proc macro
+                        // crate failed), skip expansion like we would if it was
+                        // disabled. This is analogous to the handling in
+                        // `DefCollector::collect_macros`.
+                        if let Some(err) = exp.as_expand_error(self.module_id.krate) {
+                            self.diagnostics.push(DefDiagnostic::macro_error(
+                                self.module_id.local_id,
+                                ast_id,
+                                (*attr.path).clone(),
+                                err,
+                            ));
+                            continue 'attrs;
+                        }
+                    }
+
+                    self.macro_calls.push((ast_id, call_id));
+                    self.collect_macro_items(call_id);
+                    return;
+                }
+                Ok(_) => (),
+                Err(_) => {
+                    self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+                        self.module_id.local_id,
+                        MacroCallKind::Attr { ast_id, attr_args: None, invoc_attr_index: attr.id },
+                        attr.path().clone(),
+                    ));
+                }
+            }
+        }
+
+        self.record_item(item_tree, tree_id, item);
+    }
+
+    fn record_item(&mut self, item_tree: &ItemTree, tree_id: TreeId, item: AssocItem) {
+        match item {
+            AssocItem::Function(id) => {
+                let item = &item_tree[id];
+                let def =
+                    FunctionLoc { container: self.container, id: ItemTreeId::new(tree_id, id) }
+                        .intern(self.db);
+                self.items.push((item.name.clone(), def.into()));
+            }
+            AssocItem::TypeAlias(id) => {
+                let item = &item_tree[id];
+                let def =
+                    TypeAliasLoc { container: self.container, id: ItemTreeId::new(tree_id, id) }
+                        .intern(self.db);
+                self.items.push((item.name.clone(), def.into()));
+            }
+            AssocItem::Const(id) => {
+                let item = &item_tree[id];
+                let Some(name) = item.name.clone() else { return };
+                let def = ConstLoc { container: self.container, id: ItemTreeId::new(tree_id, id) }
+                    .intern(self.db);
+                self.items.push((name, def.into()));
+            }
+            AssocItem::MacroCall(call) => {
+                let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call];
+
+                let resolver = |path: &_| {
+                    self.def_map
+                        .resolve_path(
+                            &self.local_def_map,
+                            self.db,
+                            self.module_id.local_id,
+                            path,
+                            crate::item_scope::BuiltinShadowMode::Other,
+                            Some(MacroSubNs::Bang),
+                        )
+                        .0
+                        .take_macros()
+                        .map(|it| self.db.macro_def(it))
+                };
+                match macro_call_as_call_id(
+                    self.db,
+                    &AstIdWithPath::new(tree_id.file_id(), ast_id, Clone::clone(path)),
+                    ctxt,
+                    expand_to,
+                    self.module_id.krate(),
+                    resolver,
+                    &mut |ptr, call_id| {
+                        self.macro_calls.push((ptr.map(|(_, it)| it.upcast()), call_id))
+                    },
+                ) {
+                    Ok(Some(call_id)) => {
+                        self.macro_calls
+                            .push((InFile::new(tree_id.file_id(), ast_id.upcast()), call_id));
+                        self.collect_macro_items(call_id);
+                    }
+                    Ok(None) => (),
+                    Err(_) => {
+                        self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+                            self.module_id.local_id,
+                            MacroCallKind::FnLike {
+                                ast_id: InFile::new(tree_id.file_id(), ast_id),
+                                expand_to,
+                                eager: None,
+                            },
+                            Clone::clone(path),
+                        ));
+                    }
+                }
+            }
+        }
+    }
+
+    fn collect_macro_items(&mut self, macro_call_id: MacroCallId) {
+        if self.depth > self.def_map.recursion_limit() as usize {
+            tracing::warn!("macro expansion is too deep");
+            return;
+        }
+        let tree_id = TreeId::new(macro_call_id.into(), None);
+        let item_tree = self.db.file_item_tree(macro_call_id.into());
+
+        self.depth += 1;
+        for item in item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item) {
+            self.collect_item(&item_tree, tree_id, item);
+        }
+        self.depth -= 1;
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
index d1f6ed023c2fa..e7e96804ae737 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
@@ -1,21 +1,21 @@
 //! Post-nameres attribute resolution.
 
-use base_db::CrateId;
+use base_db::Crate;
 use hir_expand::{
+    MacroCallId, MacroCallKind, MacroDefId,
     attrs::{Attr, AttrId, AttrInput},
     inert_attr_macro::find_builtin_attr_idx,
-    MacroCallId, MacroCallKind, MacroDefId,
+    mod_path::{ModPath, PathKind},
 };
-use span::SyntaxContextId;
+use span::SyntaxContext;
 use syntax::ast;
 use triomphe::Arc;
 
 use crate::{
+    AstIdWithPath, LocalModuleId, MacroId, UnresolvedMacro,
     db::DefDatabase,
     item_scope::BuiltinShadowMode,
-    nameres::path_resolution::ResolveMode,
-    path::{self, ModPath, PathKind},
-    AstIdWithPath, LocalModuleId, MacroId, UnresolvedMacro,
+    nameres::{LocalDefMap, path_resolution::ResolveMode},
 };
 
 use super::{DefMap, MacroSubNs};
@@ -30,6 +30,7 @@ pub enum ResolvedAttr {
 impl DefMap {
     pub(crate) fn resolve_attr_macro(
         &self,
+        local_def_map: &LocalDefMap,
         db: &dyn DefDatabase,
         original_module: LocalModuleId,
         ast_id: AstIdWithPath<ast::Item>,
@@ -42,6 +43,7 @@ impl DefMap {
         }
 
         let resolved_res = self.resolve_path_fp_with_macro(
+            local_def_map,
             db,
             ResolveMode::Other,
             original_module,
@@ -105,7 +107,7 @@ pub(super) fn attr_macro_as_call_id(
     db: &dyn DefDatabase,
     item_attr: &AstIdWithPath<ast::Item>,
     macro_attr: &Attr,
-    krate: CrateId,
+    krate: Crate,
     def: MacroDefId,
 ) -> MacroCallId {
     let arg = match macro_attr.input.as_deref() {
@@ -119,7 +121,7 @@ pub(super) fn attr_macro_as_call_id(
     };
 
     def.make_call(
-        db.upcast(),
+        db,
         krate,
         MacroCallKind::Attr {
             ast_id: item_attr.ast_id,
@@ -135,16 +137,16 @@ pub(super) fn derive_macro_as_call_id(
     item_attr: &AstIdWithPath<ast::Adt>,
     derive_attr_index: AttrId,
     derive_pos: u32,
-    call_site: SyntaxContextId,
-    krate: CrateId,
-    resolver: impl Fn(&path::ModPath) -> Option<(MacroId, MacroDefId)>,
+    call_site: SyntaxContext,
+    krate: Crate,
+    resolver: impl Fn(&ModPath) -> Option<(MacroId, MacroDefId)>,
     derive_macro_id: MacroCallId,
 ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
     let (macro_id, def_id) = resolver(&item_attr.path)
         .filter(|(_, def_id)| def_id.is_derive())
         .ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?;
     let call_id = def_id.make_call(
-        db.upcast(),
+        db,
         krate,
         MacroCallKind::Derive {
             ast_id: item_attr.ast_id,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index 16f3fd56eb9ed..77effbcc88009 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -5,62 +5,66 @@
 
 use std::{cmp::Ordering, iter, mem, ops::Not};
 
-use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin};
+use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
 use cfg::{CfgAtom, CfgExpr, CfgOptions};
 use either::Either;
 use hir_expand::{
+    EditionedFileId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
+    MacroDefKind,
     attrs::{Attr, AttrId},
     builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
+    mod_path::{ModPath, PathKind},
     name::{AsName, Name},
     proc_macro::CustomProcMacroExpander,
-    ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
-    MacroFileIdExt,
 };
-use intern::{sym, Interned};
-use itertools::{izip, Itertools};
+use intern::{Interned, sym};
+use itertools::{Itertools, izip};
 use la_arena::Idx;
 use rustc_hash::{FxHashMap, FxHashSet};
-use span::{Edition, EditionedFileId, FileAstId, SyntaxContextId};
+use span::{Edition, FileAstId, SyntaxContext};
 use syntax::ast;
 use triomphe::Arc;
 
 use crate::{
+    AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, ExternBlockLoc,
+    ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId,
+    LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId,
+    MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc,
+    StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, UseLoc,
     attr::Attrs,
     db::DefDatabase,
     item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports},
     item_tree::{
-        self, AttrOwner, FieldsShape, FileItemTreeId, ImportKind, ItemTree, ItemTreeId,
-        ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, UseTreeKind,
+        self, AttrOwner, FieldsShape, FileItemTreeId, ImportAlias, ImportKind, ItemTree,
+        ItemTreeId, ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId,
+        UseTreeKind,
     },
     macro_call_as_call_id, macro_call_as_call_id_with_eager,
     nameres::{
+        BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, ModuleData, ModuleOrigin, ResolveMode,
         attr_resolution::{attr_macro_as_call_id, derive_macro_as_call_id},
         diagnostics::DefDiagnostic,
         mod_resolution::ModDir,
         path_resolution::ReachedFixedPoint,
-        proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroDef, ProcMacroKind},
-        sub_namespace_match, BuiltinShadowMode, DefMap, MacroSubNs, ModuleData, ModuleOrigin,
-        ResolveMode,
+        proc_macro::{ProcMacroDef, ProcMacroKind, parse_macro_name_and_helper_attrs},
+        sub_namespace_match,
     },
-    path::{ImportAlias, ModPath, PathKind},
     per_ns::{Item, PerNs},
     tt,
     visibility::{RawVisibility, Visibility},
-    AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantLoc,
-    ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern,
-    ItemContainerId, LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId,
-    MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId,
-    ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc,
-    UnresolvedMacro, UseId, UseLoc,
 };
 
 const GLOB_RECURSION_LIMIT: usize = 100;
 const FIXED_POINT_LIMIT: usize = 8192;
 
-pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap {
-    let crate_graph = db.crate_graph();
-
-    let krate = &crate_graph[def_map.krate];
+pub(super) fn collect_defs(
+    db: &dyn DefDatabase,
+    def_map: DefMap,
+    tree_id: TreeId,
+    crate_local_def_map: Option<Arc<LocalDefMap>>,
+) -> (DefMap, LocalDefMap) {
+    let krate = &def_map.krate.data(db);
+    let cfg_options = def_map.krate.cfg_options(db);
 
     // populate external prelude and dependency list
     let mut deps =
@@ -72,8 +76,10 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
     }
 
     let proc_macros = if krate.is_proc_macro {
-        db.proc_macros()
-            .for_crate(def_map.krate, db.syntax_context(tree_id.file_id(), krate.edition))
+        db.proc_macros_for_crate(def_map.krate)
+            .and_then(|proc_macros| {
+                proc_macros.list(db.syntax_context(tree_id.file_id(), krate.edition))
+            })
             .unwrap_or_default()
     } else {
         Default::default()
@@ -82,13 +88,15 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
     let mut collector = DefCollector {
         db,
         def_map,
+        local_def_map: LocalDefMap::default(),
+        crate_local_def_map,
         deps,
         glob_imports: FxHashMap::default(),
         unresolved_imports: Vec::new(),
         indeterminate_imports: Vec::new(),
         unresolved_macros: Vec::new(),
         mod_dirs: FxHashMap::default(),
-        cfg_options: &krate.cfg_options,
+        cfg_options,
         proc_macros,
         from_glob_import: Default::default(),
         skip_attrs: Default::default(),
@@ -101,9 +109,10 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
         collector.seed_with_top_level();
     }
     collector.collect();
-    let mut def_map = collector.finish();
+    let (mut def_map, mut local_def_map) = collector.finish();
     def_map.shrink_to_fit();
-    def_map
+    local_def_map.shrink_to_fit();
+    (def_map, local_def_map)
 }
 
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
@@ -183,13 +192,13 @@ enum MacroDirectiveKind {
     FnLike {
         ast_id: AstIdWithPath<ast::MacroCall>,
         expand_to: ExpandTo,
-        ctxt: SyntaxContextId,
+        ctxt: SyntaxContext,
     },
     Derive {
         ast_id: AstIdWithPath<ast::Adt>,
         derive_attr: AttrId,
         derive_pos: usize,
-        ctxt: SyntaxContextId,
+        ctxt: SyntaxContext,
         /// The "parent" macro it is resolved to.
         derive_macro_id: MacroCallId,
     },
@@ -205,8 +214,11 @@ enum MacroDirectiveKind {
 struct DefCollector<'a> {
     db: &'a dyn DefDatabase,
     def_map: DefMap,
+    local_def_map: LocalDefMap,
+    /// Set only in case of blocks.
+    crate_local_def_map: Option<Arc<LocalDefMap>>,
     // The dependencies of the current crate, including optional deps like `test`.
-    deps: FxHashMap<Name, Dependency>,
+    deps: FxHashMap<Name, BuiltDependency>,
     glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility, GlobId)>>,
     unresolved_imports: Vec<ImportDirective>,
     indeterminate_imports: Vec<(ImportDirective, PerNs)>,
@@ -238,8 +250,7 @@ impl DefCollector<'_> {
     fn seed_with_top_level(&mut self) {
         let _p = tracing::info_span!("seed_with_top_level").entered();
 
-        let crate_graph = self.db.crate_graph();
-        let file_id = crate_graph[self.def_map.krate].root_file_id();
+        let file_id = self.def_map.krate.data(self.db).root_file_id(self.db);
         let item_tree = self.db.file_item_tree(file_id.into());
         let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
         let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
@@ -257,41 +268,40 @@ impl DefCollector<'_> {
             let Some(attr_name) = attr.path.as_ident() else { continue };
 
             match () {
-                () if *attr_name == sym::recursion_limit.clone() => {
+                () if *attr_name == sym::recursion_limit => {
                     if let Some(limit) = attr.string_value() {
                         if let Ok(limit) = limit.as_str().parse() {
                             crate_data.recursion_limit = Some(limit);
                         }
                     }
                 }
-                () if *attr_name == sym::crate_type.clone() => {
+                () if *attr_name == sym::crate_type => {
                     if attr.string_value() == Some(&sym::proc_dash_macro) {
                         self.is_proc_macro = true;
                     }
                 }
-                () if *attr_name == sym::no_core.clone() => crate_data.no_core = true,
-                () if *attr_name == sym::no_std.clone() => crate_data.no_std = true,
-                () if *attr_name == sym::rustc_coherence_is_core.clone() => {
+                () if *attr_name == sym::no_core => crate_data.no_core = true,
+                () if *attr_name == sym::no_std => crate_data.no_std = true,
+                () if *attr_name == sym::rustc_coherence_is_core => {
                     crate_data.rustc_coherence_is_core = true;
                 }
-                () if *attr_name == sym::feature.clone() => {
-                    let features = attr
-                        .parse_path_comma_token_tree(self.db.upcast())
-                        .into_iter()
-                        .flatten()
-                        .filter_map(|(feat, _)| match feat.segments() {
-                            [name] => Some(name.symbol().clone()),
-                            _ => None,
-                        });
+                () if *attr_name == sym::feature => {
+                    let features =
+                        attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map(
+                            |(feat, _)| match feat.segments() {
+                                [name] => Some(name.symbol().clone()),
+                                _ => None,
+                            },
+                        );
                     crate_data.unstable_features.extend(features);
                 }
-                () if *attr_name == sym::register_attr.clone() => {
+                () if *attr_name == sym::register_attr => {
                     if let Some(ident) = attr.single_ident_value() {
                         crate_data.registered_attrs.push(ident.sym.clone());
                         cov_mark::hit!(register_attr);
                     }
                 }
-                () if *attr_name == sym::register_tool.clone() => {
+                () if *attr_name == sym::register_tool => {
                     if let Some(ident) = attr.single_ident_value() {
                         crate_data.registered_tools.push(ident.sym.clone());
                         cov_mark::hit!(register_tool);
@@ -310,20 +320,24 @@ impl DefCollector<'_> {
                 // don't do pre-configured attribute resolution yet.
                 // So here check if we are no_core / no_std and we are trying to add the
                 // corresponding dep from the sysroot
-                let skip = match crate_graph[dep.crate_id].origin {
-                    CrateOrigin::Lang(LangCrateOrigin::Core) => {
-                        crate_data.no_core && dep.is_sysroot()
-                    }
-                    CrateOrigin::Lang(LangCrateOrigin::Std) => {
-                        crate_data.no_std && dep.is_sysroot()
-                    }
-                    _ => false,
-                };
+
+                // Depending on the crate data of a dependency seems bad for incrementality, but
+                // we only do that for sysroot crates (this is why the order of the `&&` is important)
+                // - which are normally standard library crate, which realistically aren't going
+                // to have their crate ID invalidated, because they stay on the same root file and
+                // they're dependencies of everything else, so if some collision miraculously occurs
+                // we will resolve it by disambiguating the other crate.
+                let skip = dep.is_sysroot()
+                    && match dep.crate_id.data(self.db).origin {
+                        CrateOrigin::Lang(LangCrateOrigin::Core) => crate_data.no_core,
+                        CrateOrigin::Lang(LangCrateOrigin::Std) => crate_data.no_std,
+                        _ => false,
+                    };
                 if skip {
                     continue;
                 }
 
-                crate_data
+                self.local_def_map
                     .extern_prelude
                     .insert(name.clone(), (CrateRootModuleId { krate: dep.crate_id }, None));
             }
@@ -376,7 +390,7 @@ impl DefCollector<'_> {
         'resolve_attr: loop {
             let _p = tracing::info_span!("resolve_macros loop").entered();
             'resolve_macros: loop {
-                self.db.unwind_if_cancelled();
+                self.db.unwind_if_revision_cancelled();
 
                 {
                     let _p = tracing::info_span!("resolve_imports loop").entered();
@@ -493,20 +507,20 @@ impl DefCollector<'_> {
         }
 
         let krate = if self.def_map.data.no_std {
-            Name::new_symbol_root(sym::core.clone())
-        } else if self.def_map.extern_prelude().any(|(name, _)| *name == sym::std.clone()) {
-            Name::new_symbol_root(sym::std.clone())
+            Name::new_symbol_root(sym::core)
+        } else if self.local_def_map().extern_prelude().any(|(name, _)| *name == sym::std) {
+            Name::new_symbol_root(sym::std)
         } else {
             // If `std` does not exist for some reason, fall back to core. This mostly helps
             // keep r-a's own tests minimal.
-            Name::new_symbol_root(sym::core.clone())
+            Name::new_symbol_root(sym::core)
         };
 
         let edition = match self.def_map.data.edition {
-            Edition::Edition2015 => Name::new_symbol_root(sym::rust_2015.clone()),
-            Edition::Edition2018 => Name::new_symbol_root(sym::rust_2018.clone()),
-            Edition::Edition2021 => Name::new_symbol_root(sym::rust_2021.clone()),
-            Edition::Edition2024 => Name::new_symbol_root(sym::rust_2024.clone()),
+            Edition::Edition2015 => Name::new_symbol_root(sym::rust_2015),
+            Edition::Edition2018 => Name::new_symbol_root(sym::rust_2018),
+            Edition::Edition2021 => Name::new_symbol_root(sym::rust_2021),
+            Edition::Edition2024 => Name::new_symbol_root(sym::rust_2024),
         };
 
         let path_kind = match self.def_map.data.edition {
@@ -515,11 +529,17 @@ impl DefCollector<'_> {
         };
         let path = ModPath::from_segments(
             path_kind,
-            [krate, Name::new_symbol_root(sym::prelude.clone()), edition],
+            [krate, Name::new_symbol_root(sym::prelude), edition],
         );
 
-        let (per_ns, _) =
-            self.def_map.resolve_path(self.db, DefMap::ROOT, &path, BuiltinShadowMode::Other, None);
+        let (per_ns, _) = self.def_map.resolve_path(
+            self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
+            self.db,
+            DefMap::ROOT,
+            &path,
+            BuiltinShadowMode::Other,
+            None,
+        );
 
         match per_ns.types {
             Some(Item { def: ModuleDefId::ModuleId(m), import, .. }) => {
@@ -528,13 +548,17 @@ impl DefCollector<'_> {
             types => {
                 tracing::debug!(
                     "could not resolve prelude path `{}` to module (resolved to {:?})",
-                    path.display(self.db.upcast(), Edition::LATEST),
+                    path.display(self.db, Edition::LATEST),
                     types
                 );
             }
         }
     }
 
+    fn local_def_map(&mut self) -> &LocalDefMap {
+        self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map)
+    }
+
     /// Adds a definition of procedural macro `name` to the root module.
     ///
     /// # Notes on procedural macro resolution
@@ -555,6 +579,7 @@ impl DefCollector<'_> {
         &mut self,
         def: ProcMacroDef,
         id: ItemTreeId<item_tree::Function>,
+        ast_id: AstId<ast::Fn>,
         fn_id: FunctionId,
     ) {
         let kind = def.kind.to_basedb_kind();
@@ -578,6 +603,8 @@ impl DefCollector<'_> {
             edition: self.def_map.data.edition,
         }
         .intern(self.db);
+
+        self.def_map.macro_def_to_macro_id.insert(ast_id.erase(), proc_macro_id.into());
         self.define_proc_macro(def.name.clone(), proc_macro_id);
         let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
         if let ProcMacroKind::Derive { helpers } = def.kind {
@@ -660,7 +687,13 @@ impl DefCollector<'_> {
     ) {
         let vis = self
             .def_map
-            .resolve_visibility(self.db, module_id, vis, false)
+            .resolve_visibility(
+                self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
+                self.db,
+                module_id,
+                vis,
+                false,
+            )
             .unwrap_or(Visibility::Public);
         self.def_map.modules[module_id].scope.declare(macro_.into());
         self.update(
@@ -694,7 +727,7 @@ impl DefCollector<'_> {
     /// created by `use` in the root module, ignoring the visibility of `use`.
     fn import_macros_from_extern_crate(
         &mut self,
-        krate: CrateId,
+        krate: Crate,
         names: Option<Vec<Name>>,
         extern_crate: Option<ExternCrateId>,
     ) {
@@ -775,10 +808,11 @@ impl DefCollector<'_> {
     }
 
     fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
-        let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast(), Edition::LATEST))
+        let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db, Edition::LATEST))
             .entered();
         tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
         let res = self.def_map.resolve_path_fp_with_macro(
+            self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
             self.db,
             ResolveMode::Import,
             module_id,
@@ -814,7 +848,13 @@ impl DefCollector<'_> {
         let mut def = directive.status.namespaces();
         let vis = self
             .def_map
-            .resolve_visibility(self.db, module_id, &directive.import.visibility, false)
+            .resolve_visibility(
+                self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
+                self.db,
+                module_id,
+                &directive.import.visibility,
+                false,
+            )
             .unwrap_or(Visibility::Public);
 
         match import.source {
@@ -929,27 +969,16 @@ impl DefCollector<'_> {
                     Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => {
                         cov_mark::hit!(glob_enum);
                         // glob import from enum => just import all the variants
-
-                        // We need to check if the def map the enum is from is us, if it is we can't
-                        // call the def-map query since we are currently constructing it!
-                        let loc = e.lookup(self.db);
-                        let tree = loc.id.item_tree(self.db);
-                        let current_def_map = self.def_map.krate == loc.container.krate
-                            && self.def_map.block_id() == loc.container.block;
-                        let def_map;
-                        let resolutions = if current_def_map {
-                            &self.def_map.enum_definitions[&e]
-                        } else {
-                            def_map = loc.container.def_map(self.db);
-                            &def_map.enum_definitions[&e]
-                        }
-                        .iter()
-                        .map(|&variant| {
-                            let name = tree[variant.lookup(self.db).id.value].name.clone();
-                            let res = PerNs::both(variant.into(), variant.into(), vis, None);
-                            (Some(name), res)
-                        })
-                        .collect::<Vec<_>>();
+                        let resolutions = self
+                            .db
+                            .enum_variants(e)
+                            .variants
+                            .iter()
+                            .map(|&(variant, ref name)| {
+                                let res = PerNs::both(variant.into(), variant.into(), vis, None);
+                                (Some(name.clone()), res)
+                            })
+                            .collect::<Vec<_>>();
                         self.update(
                             module_id,
                             &resolutions,
@@ -977,7 +1006,7 @@ impl DefCollector<'_> {
         vis: Visibility,
         import: Option<ImportOrExternCrate>,
     ) {
-        self.db.unwind_if_cancelled();
+        self.db.unwind_if_revision_cancelled();
         self.update_recursive(module_id, resolutions, vis, import, 0)
     }
 
@@ -1199,6 +1228,7 @@ impl DefCollector<'_> {
             No,
         }
 
+        let mut eager_callback_buffer = vec![];
         let mut res = ReachedFixedPoint::Yes;
         // Retain unresolved macros after this round of resolution.
         let mut retain = |directive: &MacroDirective| {
@@ -1210,6 +1240,7 @@ impl DefCollector<'_> {
             };
             let resolver = |path: &_| {
                 let resolved_res = self.def_map.resolve_path_fp_with_macro(
+                    self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
                     self.db,
                     ResolveMode::Other,
                     directive.module_id,
@@ -1224,12 +1255,15 @@ impl DefCollector<'_> {
             match &directive.kind {
                 MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
                     let call_id = macro_call_as_call_id(
-                        self.db.upcast(),
+                        self.db,
                         ast_id,
                         *call_site,
                         *expand_to,
                         self.def_map.krate,
                         resolver_def_id,
+                        &mut |ptr, call_id| {
+                            eager_callback_buffer.push((directive.module_id, ptr, call_id));
+                        },
                     );
                     if let Ok(Some(call_id)) = call_id {
                         self.def_map.modules[directive.module_id]
@@ -1339,8 +1373,7 @@ impl DefCollector<'_> {
                         MacroDefKind::BuiltInAttr(_, expander)
                         if expander.is_test() || expander.is_bench() || expander.is_test_case()
                     ) {
-                        let test_is_active =
-                            self.cfg_options.check_atom(&CfgAtom::Flag(sym::test.clone()));
+                        let test_is_active = self.cfg_options.check_atom(&CfgAtom::Flag(sym::test));
                         if test_is_active {
                             return recollect_without(self);
                         }
@@ -1375,7 +1408,7 @@ impl DefCollector<'_> {
 
                         let ast_id = ast_id.with_value(ast_adt_id);
 
-                        match attr.parse_path_comma_token_tree(self.db.upcast()) {
+                        match attr.parse_path_comma_token_tree(self.db) {
                             Some(derive_macros) => {
                                 let call_id = call_id();
                                 let mut len = 0;
@@ -1455,6 +1488,10 @@ impl DefCollector<'_> {
         macros.extend(mem::take(&mut self.unresolved_macros));
         self.unresolved_macros = macros;
 
+        for (module_id, ptr, call_id) in eager_callback_buffer {
+            self.def_map.modules[module_id].scope.add_macro_invoc(ptr.map(|(_, it)| it), call_id);
+        }
+
         for (module_id, depth, container, macro_call_id) in resolved {
             self.collect_macro_expansion(module_id, macro_call_id, depth, container);
         }
@@ -1474,11 +1511,11 @@ impl DefCollector<'_> {
             tracing::warn!("macro expansion is too deep");
             return;
         }
-        let file_id = macro_call_id.as_file();
+        let file_id = macro_call_id.into();
 
         let item_tree = self.db.file_item_tree(file_id);
 
-        let mod_dir = if macro_call_id.as_macro_file().is_include_macro(self.db.upcast()) {
+        let mod_dir = if macro_call_id.is_include_macro(self.db) {
             ModDir::root()
         } else {
             self.mod_dirs[&module_id].clone()
@@ -1495,7 +1532,7 @@ impl DefCollector<'_> {
         .collect(item_tree.top_level_items(), container);
     }
 
-    fn finish(mut self) -> DefMap {
+    fn finish(mut self) -> (DefMap, LocalDefMap) {
         // Emit diagnostics for all remaining unexpanded macros.
         let _p = tracing::info_span!("DefCollector::finish").entered();
 
@@ -1504,13 +1541,14 @@ impl DefCollector<'_> {
                 MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
                     // FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
                     let macro_call_as_call_id = macro_call_as_call_id(
-                        self.db.upcast(),
+                        self.db,
                         ast_id,
                         *call_site,
                         *expand_to,
                         self.def_map.krate,
                         |path| {
                             let resolved_res = self.def_map.resolve_path_fp_with_macro(
+                                self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
                                 self.db,
                                 ResolveMode::Other,
                                 directive.module_id,
@@ -1520,6 +1558,7 @@ impl DefCollector<'_> {
                             );
                             resolved_res.resolved_def.take_macros().map(|it| self.db.macro_def(it))
                         },
+                        &mut |_, _| (),
                     );
                     if let Err(UnresolvedMacro { path }) = macro_call_as_call_id {
                         self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
@@ -1582,7 +1621,7 @@ impl DefCollector<'_> {
             ));
         }
 
-        self.def_map
+        (self.def_map, self.local_def_map)
     }
 }
 
@@ -1635,9 +1674,9 @@ impl ModCollector<'_, '_> {
                     None,
                 )
             };
-        let resolve_vis = |def_map: &DefMap, visibility| {
+        let resolve_vis = |def_map: &DefMap, local_def_map: &LocalDefMap, visibility| {
             def_map
-                .resolve_visibility(db, module_id, visibility, false)
+                .resolve_visibility(local_def_map, db, module_id, visibility, false)
                 .unwrap_or(Visibility::Public)
         };
 
@@ -1658,6 +1697,11 @@ impl ModCollector<'_, '_> {
 
             let module = self.def_collector.def_map.module_id(module_id);
             let def_map = &mut self.def_collector.def_map;
+            let local_def_map = self
+                .def_collector
+                .crate_local_def_map
+                .as_deref()
+                .unwrap_or(&self.def_collector.local_def_map);
 
             match item {
                 ModItem::Mod(m) => self.collect_module(m, &attrs),
@@ -1667,7 +1711,7 @@ impl ModCollector<'_, '_> {
                         id: ItemTreeId::new(self.tree_id, item_tree_id),
                     }
                     .intern(db);
-                    let is_prelude = attrs.by_key(&sym::prelude_import).exists();
+                    let is_prelude = attrs.by_key(sym::prelude_import).exists();
                     Import::from_use(
                         self.item_tree,
                         ItemTreeId::new(self.tree_id, item_tree_id),
@@ -1711,13 +1755,13 @@ impl ModCollector<'_, '_> {
                     };
 
                     if let Some(resolved) = resolved {
-                        let vis = resolve_vis(def_map, &self.item_tree[*visibility]);
+                        let vis = resolve_vis(def_map, local_def_map, &self.item_tree[*visibility]);
 
                         if is_crate_root {
                             // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
                             if let Some(name) = name {
-                                Arc::get_mut(&mut def_map.data)
-                                    .unwrap()
+                                self.def_collector
+                                    .local_def_map
                                     .extern_prelude
                                     .insert(name.clone(), (resolved, Some(id)));
                             }
@@ -1725,7 +1769,7 @@ impl ModCollector<'_, '_> {
                             if !is_self {
                                 self.process_macro_use_extern_crate(
                                     id,
-                                    attrs.by_key(&sym::macro_use).attrs(),
+                                    attrs.by_key(sym::macro_use).attrs(),
                                     resolved.krate,
                                 );
                             }
@@ -1784,7 +1828,7 @@ impl ModCollector<'_, '_> {
                     let fn_id =
                         FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
 
-                    let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+                    let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
 
                     if self.def_collector.def_map.block.is_none()
                         && self.def_collector.is_proc_macro
@@ -1794,6 +1838,7 @@ impl ModCollector<'_, '_> {
                             self.def_collector.export_proc_macro(
                                 proc_macro,
                                 ItemTreeId::new(self.tree_id, id),
+                                InFile::new(self.file_id(), self.item_tree[id].ast_id()),
                                 fn_id,
                             );
                         }
@@ -1804,7 +1849,7 @@ impl ModCollector<'_, '_> {
                 ModItem::Struct(id) => {
                     let it = &self.item_tree[id];
 
-                    let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+                    let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
                     update_def(
                         self.def_collector,
                         StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
@@ -1818,7 +1863,7 @@ impl ModCollector<'_, '_> {
                 ModItem::Union(id) => {
                     let it = &self.item_tree[id];
 
-                    let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+                    let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
                     update_def(
                         self.def_collector,
                         UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
@@ -1835,41 +1880,8 @@ impl ModCollector<'_, '_> {
                         EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
                             .intern(db);
 
-                    let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+                    let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
                     update_def(self.def_collector, enum_.into(), &it.name, vis, false);
-
-                    let mut index = 0;
-                    let variants = FileItemTreeId::range_iter(it.variants.clone())
-                        .filter_map(|variant| {
-                            let is_enabled = self
-                                .item_tree
-                                .attrs(db, krate, variant.into())
-                                .cfg()
-                                .and_then(|cfg| self.is_cfg_enabled(&cfg).not().then_some(cfg))
-                                .map_or(Ok(()), Err);
-                            match is_enabled {
-                                Err(cfg) => {
-                                    self.emit_unconfigured_diagnostic(
-                                        self.tree_id,
-                                        variant.into(),
-                                        &cfg,
-                                    );
-                                    None
-                                }
-                                Ok(()) => Some({
-                                    let loc = EnumVariantLoc {
-                                        id: ItemTreeId::new(self.tree_id, variant),
-                                        parent: enum_,
-                                        index,
-                                    }
-                                    .intern(db);
-                                    index += 1;
-                                    loc
-                                }),
-                            }
-                        })
-                        .collect();
-                    self.def_collector.def_map.enum_definitions.insert(enum_, variants);
                 }
                 ModItem::Const(id) => {
                     let it = &self.item_tree[id];
@@ -1878,7 +1890,8 @@ impl ModCollector<'_, '_> {
 
                     match &it.name {
                         Some(name) => {
-                            let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+                            let vis =
+                                resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
                             update_def(self.def_collector, const_id.into(), name, vis, false);
                         }
                         None => {
@@ -1892,7 +1905,7 @@ impl ModCollector<'_, '_> {
                 ModItem::Static(id) => {
                     let it = &self.item_tree[id];
 
-                    let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+                    let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
                     update_def(
                         self.def_collector,
                         StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) }
@@ -1906,7 +1919,7 @@ impl ModCollector<'_, '_> {
                 ModItem::Trait(id) => {
                     let it = &self.item_tree[id];
 
-                    let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+                    let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
                     update_def(
                         self.def_collector,
                         TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
@@ -1920,7 +1933,7 @@ impl ModCollector<'_, '_> {
                 ModItem::TraitAlias(id) => {
                     let it = &self.item_tree[id];
 
-                    let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+                    let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
                     update_def(
                         self.def_collector,
                         TraitAliasLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
@@ -1934,7 +1947,7 @@ impl ModCollector<'_, '_> {
                 ModItem::TypeAlias(id) => {
                     let it = &self.item_tree[id];
 
-                    let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+                    let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
                     update_def(
                         self.def_collector,
                         TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) }
@@ -1971,13 +1984,12 @@ impl ModCollector<'_, '_> {
         &mut self,
         extern_crate_id: ExternCrateId,
         macro_use_attrs: impl Iterator<Item = &'a Attr>,
-        target_crate: CrateId,
+        target_crate: Crate,
     ) {
         cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
         let mut single_imports = Vec::new();
         for attr in macro_use_attrs {
-            let Some(paths) = attr.parse_path_comma_token_tree(self.def_collector.db.upcast())
-            else {
+            let Some(paths) = attr.parse_path_comma_token_tree(self.def_collector.db) else {
                 // `#[macro_use]` (without any paths) found, forget collected names and just import
                 // all visible macros.
                 self.def_collector.import_macros_from_extern_crate(
@@ -2002,8 +2014,8 @@ impl ModCollector<'_, '_> {
     }
 
     fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
-        let path_attr = attrs.by_key(&sym::path).string_value_unescape();
-        let is_macro_use = attrs.by_key(&sym::macro_use).exists();
+        let path_attr = attrs.by_key(sym::path).string_value_unescape();
+        let is_macro_use = attrs.by_key(sym::macro_use).exists();
         let module = &self.item_tree[module_id];
         match &module.kind {
             // inline module, just recurse
@@ -2080,7 +2092,7 @@ impl ModCollector<'_, '_> {
                                 let is_macro_use = is_macro_use
                                     || item_tree
                                         .top_level_attrs(db, krate)
-                                        .by_key(&sym::macro_use)
+                                        .by_key(sym::macro_use)
                                         .exists();
                                 if is_macro_use {
                                     self.import_all_legacy_macros(module_id);
@@ -2115,7 +2127,16 @@ impl ModCollector<'_, '_> {
     ) -> LocalModuleId {
         let def_map = &mut self.def_collector.def_map;
         let vis = def_map
-            .resolve_visibility(self.def_collector.db, self.module_id, visibility, false)
+            .resolve_visibility(
+                self.def_collector
+                    .crate_local_def_map
+                    .as_deref()
+                    .unwrap_or(&self.def_collector.local_def_map),
+                self.def_collector.db,
+                self.module_id,
+                visibility,
+                false,
+            )
             .unwrap_or(Visibility::Public);
         let origin = match definition {
             None => ModuleOrigin::Inline {
@@ -2198,7 +2219,7 @@ impl ModCollector<'_, '_> {
             }
             tracing::debug!(
                 "non-builtin attribute {}",
-                attr.path.display(self.def_collector.db.upcast(), Edition::LATEST)
+                attr.path.display(self.def_collector.db, Edition::LATEST)
             );
 
             let ast_id = AstIdWithPath::new(
@@ -2230,11 +2251,11 @@ impl ModCollector<'_, '_> {
         let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
         let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
 
-        let export_attr = attrs.by_key(&sym::macro_export);
+        let export_attr = || attrs.by_key(sym::macro_export);
 
-        let is_export = export_attr.exists();
+        let is_export = export_attr().exists();
         let local_inner = if is_export {
-            export_attr.tt_values().flat_map(|it| it.iter()).any(|it| match it {
+            export_attr().tt_values().flat_map(|it| it.iter()).any(|it| match it {
                 tt::TtElement::Leaf(tt::Leaf::Ident(ident)) => ident.sym == sym::local_inner_macros,
                 _ => false,
             })
@@ -2243,17 +2264,17 @@ impl ModCollector<'_, '_> {
         };
 
         // Case 1: builtin macros
-        let expander = if attrs.by_key(&sym::rustc_builtin_macro).exists() {
+        let expander = if attrs.by_key(sym::rustc_builtin_macro).exists() {
             // `#[rustc_builtin_macro = "builtin_name"]` overrides the `macro_rules!` name.
             let name;
-            let name = match attrs.by_key(&sym::rustc_builtin_macro).string_value_with_span() {
+            let name = match attrs.by_key(sym::rustc_builtin_macro).string_value_with_span() {
                 Some((it, span)) => {
                     name = Name::new_symbol(it.clone(), span.ctx);
                     &name
                 }
                 None => {
                     let explicit_name =
-                        attrs.by_key(&sym::rustc_builtin_macro).tt_values().next().and_then(|tt| {
+                        attrs.by_key(sym::rustc_builtin_macro).tt_values().next().and_then(|tt| {
                             match tt.token_trees().flat_tokens().first() {
                                 Some(tt::TokenTree::Leaf(tt::Leaf::Ident(name))) => Some(name),
                                 _ => None,
@@ -2283,7 +2304,7 @@ impl ModCollector<'_, '_> {
             // Case 2: normal `macro_rules!` macro
             MacroExpander::Declarative
         };
-        let allow_internal_unsafe = attrs.by_key(&sym::allow_internal_unsafe).exists();
+        let allow_internal_unsafe = attrs.by_key(sym::allow_internal_unsafe).exists();
 
         let mut flags = MacroRulesLocFlags::empty();
         flags.set(MacroRulesLocFlags::LOCAL_INNER, local_inner);
@@ -2297,6 +2318,10 @@ impl ModCollector<'_, '_> {
             edition: self.def_collector.def_map.data.edition,
         }
         .intern(self.def_collector.db);
+        self.def_collector.def_map.macro_def_to_macro_id.insert(
+            InFile::new(self.file_id(), self.item_tree[id].ast_id()).erase(),
+            macro_id.into(),
+        );
         self.def_collector.define_macro_rules(
             self.module_id,
             mac.name.clone(),
@@ -2313,14 +2338,14 @@ impl ModCollector<'_, '_> {
         // Case 1: builtin macros
         let mut helpers_opt = None;
         let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
-        let expander = if attrs.by_key(&sym::rustc_builtin_macro).exists() {
+        let expander = if attrs.by_key(sym::rustc_builtin_macro).exists() {
             if let Some(expander) = find_builtin_macro(&mac.name) {
                 match expander {
                     Either::Left(it) => MacroExpander::BuiltIn(it),
                     Either::Right(it) => MacroExpander::BuiltInEager(it),
                 }
             } else if let Some(expander) = find_builtin_derive(&mac.name) {
-                if let Some(attr) = attrs.by_key(&sym::rustc_builtin_macro).tt_values().next() {
+                if let Some(attr) = attrs.by_key(sym::rustc_builtin_macro).tt_values().next() {
                     // NOTE: The item *may* have both `#[rustc_builtin_macro]` and `#[proc_macro_derive]`,
                     // in which case rustc ignores the helper attributes from the latter, but it
                     // "doesn't make sense in practice" (see rust-lang/rust#87027).
@@ -2331,8 +2356,8 @@ impl ModCollector<'_, '_> {
                         stdx::always!(
                             name == mac.name,
                             "built-in macro {} has #[rustc_builtin_macro] which declares different name {}",
-                            mac.name.display(self.def_collector.db.upcast(), Edition::LATEST),
-                            name.display(self.def_collector.db.upcast(), Edition::LATEST),
+                            mac.name.display(self.def_collector.db, Edition::LATEST),
+                            name.display(self.def_collector.db, Edition::LATEST),
                         );
                         helpers_opt = Some(helpers);
                     }
@@ -2351,7 +2376,7 @@ impl ModCollector<'_, '_> {
             // Case 2: normal `macro`
             MacroExpander::Declarative
         };
-        let allow_internal_unsafe = attrs.by_key(&sym::allow_internal_unsafe).exists();
+        let allow_internal_unsafe = attrs.by_key(sym::allow_internal_unsafe).exists();
 
         let macro_id = Macro2Loc {
             container: module,
@@ -2361,6 +2386,10 @@ impl ModCollector<'_, '_> {
             edition: self.def_collector.def_map.data.edition,
         }
         .intern(self.def_collector.db);
+        self.def_collector.def_map.macro_def_to_macro_id.insert(
+            InFile::new(self.file_id(), self.item_tree[id].ast_id()).erase(),
+            macro_id.into(),
+        );
         self.def_collector.define_macro_def(
             self.module_id,
             mac.name.clone(),
@@ -2389,9 +2418,10 @@ impl ModCollector<'_, '_> {
         // new legacy macros that create textual scopes. We need a way to resolve names in textual
         // scopes without eager expansion.
 
+        let mut eager_callback_buffer = vec![];
         // Case 1: try to resolve macro calls with single-segment name and expand macro_rules
         if let Ok(res) = macro_call_as_call_id_with_eager(
-            db.upcast(),
+            db,
             ast_id.ast_id,
             &ast_id.path,
             ctxt,
@@ -2417,6 +2447,10 @@ impl ModCollector<'_, '_> {
             },
             |path| {
                 let resolved_res = self.def_collector.def_map.resolve_path_fp_with_macro(
+                    self.def_collector
+                        .crate_local_def_map
+                        .as_deref()
+                        .unwrap_or(&self.def_collector.local_def_map),
                     db,
                     ResolveMode::Other,
                     self.module_id,
@@ -2426,7 +2460,13 @@ impl ModCollector<'_, '_> {
                 );
                 resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it))
             },
+            &mut |ptr, call_id| eager_callback_buffer.push((ptr, call_id)),
         ) {
+            for (ptr, call_id) in eager_callback_buffer {
+                self.def_collector.def_map.modules[self.module_id]
+                    .scope
+                    .add_macro_invoc(ptr.map(|(_, it)| it), call_id);
+            }
             // FIXME: if there were errors, this might've been in the eager expansion from an
             // unresolved macro, so we need to push this into late macro resolution. see fixme above
             if res.err.is_none() {
@@ -2517,7 +2557,6 @@ impl ModCollector<'_, '_> {
 
 #[cfg(test)]
 mod tests {
-    use base_db::SourceDatabase;
     use test_fixture::WithFixture;
 
     use crate::{nameres::DefMapCrateData, test_db::TestDB};
@@ -2528,6 +2567,8 @@ mod tests {
         let mut collector = DefCollector {
             db,
             def_map,
+            local_def_map: LocalDefMap::default(),
+            crate_local_def_map: None,
             deps: FxHashMap::default(),
             glob_imports: FxHashMap::default(),
             unresolved_imports: Vec::new(),
@@ -2550,7 +2591,7 @@ mod tests {
         let (db, file_id) = TestDB::with_single_file(not_ra_fixture);
         let krate = db.test_crate();
 
-        let edition = db.crate_graph()[krate].edition;
+        let edition = krate.data(&db).edition;
         let module_origin = ModuleOrigin::CrateRoot { definition: file_id };
         let def_map = DefMap::empty(
             krate,
@@ -2588,7 +2629,7 @@ foo!(KABOOM);
         // the release mode. That's why the argument is not an ra_fixture --
         // otherwise injection highlighting gets stuck.
         //
-        // We need to find a way to fail this faster.
+        // We need to find a way to fail this faster!
         do_resolve(
             r#"
 macro_rules! foo {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
index bc1617c55b029..de3d2f48367f7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
@@ -3,15 +3,14 @@
 use std::ops::Not;
 
 use cfg::{CfgExpr, CfgOptions};
-use hir_expand::{attrs::AttrId, ExpandErrorKind, MacroCallKind};
+use hir_expand::{ExpandErrorKind, MacroCallKind, attrs::AttrId, mod_path::ModPath};
 use la_arena::Idx;
 use syntax::ast;
 
 use crate::{
+    AstId,
     item_tree::{self, AttrOwner, ItemTreeId, TreeId},
     nameres::LocalModuleId,
-    path::ModPath,
-    AstId,
 };
 
 #[derive(Debug, PartialEq, Eq)]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
index 17d09bcbd0478..d6c9f5a00c91a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
@@ -1,10 +1,9 @@
 //! This module resolves `mod foo;` declaration to file.
 use arrayvec::ArrayVec;
 use base_db::AnchoredPath;
-use hir_expand::{name::Name, HirFileIdExt};
-use span::EditionedFileId;
+use hir_expand::{EditionedFileId, name::Name};
 
-use crate::{db::DefDatabase, HirFileId};
+use crate::{HirFileId, db::DefDatabase};
 
 const MOD_DEPTH_LIMIT: usize = 32;
 
@@ -77,9 +76,9 @@ impl ModDir {
             }
         };
 
-        let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
+        let orig_file_id = file_id.original_file_respecting_includes(db);
         for candidate in candidate_files.iter() {
-            let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() };
+            let path = AnchoredPath { anchor: orig_file_id.file_id(db), path: candidate.as_str() };
             if let Some(file_id) = db.resolve_path(path) {
                 let is_mod_rs = candidate.ends_with("/mod.rs");
 
@@ -92,7 +91,7 @@ impl ModDir {
                 if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
                     return Ok((
                         // FIXME: Edition, is this rightr?
-                        EditionedFileId::new(file_id, orig_file_id.edition()),
+                        EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
                         is_mod_rs,
                         mod_dir,
                     ));
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
index 47c08d3d1dc67..f8b2c73a8f68e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
@@ -11,19 +11,22 @@
 //! `ReachedFixedPoint` signals about this.
 
 use either::Either;
-use hir_expand::{name::Name, Lookup};
+use hir_expand::{
+    Lookup,
+    mod_path::{ModPath, PathKind},
+    name::Name,
+};
 use span::Edition;
 use triomphe::Arc;
 
 use crate::{
+    AdtId, LocalModuleId, ModuleDefId,
     db::DefDatabase,
-    item_scope::{ImportOrExternCrate, BUILTIN_SCOPE},
+    item_scope::{BUILTIN_SCOPE, ImportOrExternCrate},
     item_tree::FieldsShape,
-    nameres::{sub_namespace_match, BlockInfo, BuiltinShadowMode, DefMap, MacroSubNs},
-    path::{ModPath, PathKind},
+    nameres::{BlockInfo, BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, sub_namespace_match},
     per_ns::PerNs,
     visibility::{RawVisibility, Visibility},
-    AdtId, LocalModuleId, ModuleDefId,
 };
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -91,6 +94,7 @@ impl PerNs {
 impl DefMap {
     pub(crate) fn resolve_visibility(
         &self,
+        local_def_map: &LocalDefMap,
         db: &dyn DefDatabase,
         // module to import to
         original_module: LocalModuleId,
@@ -101,8 +105,14 @@ impl DefMap {
     ) -> Option<Visibility> {
         let mut vis = match visibility {
             RawVisibility::Module(path, explicitness) => {
-                let (result, remaining) =
-                    self.resolve_path(db, original_module, path, BuiltinShadowMode::Module, None);
+                let (result, remaining) = self.resolve_path(
+                    local_def_map,
+                    db,
+                    original_module,
+                    path,
+                    BuiltinShadowMode::Module,
+                    None,
+                );
                 if remaining.is_some() {
                     return None;
                 }
@@ -137,6 +147,7 @@ impl DefMap {
     // the result.
     pub(super) fn resolve_path_fp_with_macro(
         &self,
+        local_def_map: &LocalDefMap,
         db: &dyn DefDatabase,
         mode: ResolveMode,
         // module to import to
@@ -148,6 +159,7 @@ impl DefMap {
         expected_macro_subns: Option<MacroSubNs>,
     ) -> ResolvePathResult {
         let mut result = self.resolve_path_fp_with_macro_single(
+            local_def_map,
             db,
             mode,
             original_module,
@@ -196,6 +208,7 @@ impl DefMap {
                         current_map = &arc;
 
                         let new = current_map.resolve_path_fp_in_all_preludes(
+                            local_def_map,
                             db,
                             mode,
                             original_module,
@@ -210,6 +223,7 @@ impl DefMap {
             }
 
             let new = current_map.resolve_path_fp_with_macro_single(
+                local_def_map,
                 db,
                 mode,
                 original_module,
@@ -224,6 +238,7 @@ impl DefMap {
 
     pub(super) fn resolve_path_fp_with_macro_single(
         &self,
+        local_def_map: &LocalDefMap,
         db: &dyn DefDatabase,
         mode: ResolveMode,
         original_module: LocalModuleId,
@@ -258,7 +273,12 @@ impl DefMap {
                     None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
                 };
                 tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
-                self.resolve_name_in_crate_root_or_extern_prelude(db, original_module, segment)
+                self.resolve_name_in_crate_root_or_extern_prelude(
+                    local_def_map,
+                    db,
+                    original_module,
+                    segment,
+                )
             }
             PathKind::Plain => {
                 let (_, segment) = match segments.next() {
@@ -276,6 +296,7 @@ impl DefMap {
 
                 tracing::debug!("resolving {:?} in module", segment);
                 self.resolve_name_in_module(
+                    local_def_map,
                     db,
                     original_module,
                     segment,
@@ -321,7 +342,9 @@ impl DefMap {
                     // with), resolve the remaining path segments in that `DefMap`.
                     let path =
                         ModPath::from_segments(PathKind::SELF, path.segments().iter().cloned());
+                    // This is the same crate, so the local def map is the same.
                     return def_map.resolve_path_fp_with_macro(
+                        local_def_map,
                         db,
                         mode,
                         local_id,
@@ -333,10 +356,10 @@ impl DefMap {
 
                 PerNs::types(module.into(), Visibility::Public, None)
             }
-            PathKind::Abs => match self.resolve_path_abs(&mut segments, path) {
+            PathKind::Abs => match self.resolve_path_abs(local_def_map, &mut segments, path) {
                 Either::Left(it) => it,
                 Either::Right(reached_fixed_point) => {
-                    return ResolvePathResult::empty(reached_fixed_point)
+                    return ResolvePathResult::empty(reached_fixed_point);
                 }
             },
         };
@@ -347,6 +370,7 @@ impl DefMap {
     /// Resolves a path only in the preludes, without accounting for item scopes.
     pub(super) fn resolve_path_fp_in_all_preludes(
         &self,
+        local_def_map: &LocalDefMap,
         db: &dyn DefDatabase,
         mode: ResolveMode,
         original_module: LocalModuleId,
@@ -368,7 +392,7 @@ impl DefMap {
                     None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
                 };
                 tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
-                self.resolve_name_in_extern_prelude(segment)
+                self.resolve_name_in_extern_prelude(local_def_map, segment)
             }
             PathKind::Plain => {
                 let (_, segment) = match segments.next() {
@@ -376,16 +400,16 @@ impl DefMap {
                     None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
                 };
                 tracing::debug!("resolving {:?} in module", segment);
-                self.resolve_name_in_all_preludes(db, segment)
+                self.resolve_name_in_all_preludes(local_def_map, db, segment)
             }
-            PathKind::Abs => match self.resolve_path_abs(&mut segments, path) {
+            PathKind::Abs => match self.resolve_path_abs(local_def_map, &mut segments, path) {
                 Either::Left(it) => it,
                 Either::Right(reached_fixed_point) => {
-                    return ResolvePathResult::empty(reached_fixed_point)
+                    return ResolvePathResult::empty(reached_fixed_point);
                 }
             },
             PathKind::DollarCrate(_) | PathKind::Crate | PathKind::Super(_) => {
-                return ResolvePathResult::empty(ReachedFixedPoint::Yes)
+                return ResolvePathResult::empty(ReachedFixedPoint::Yes);
             }
         };
 
@@ -395,6 +419,7 @@ impl DefMap {
     /// 2018-style absolute path -- only extern prelude
     fn resolve_path_abs<'a>(
         &self,
+        local_def_map: &LocalDefMap,
         segments: &mut impl Iterator<Item = (usize, &'a Name)>,
         path: &ModPath,
     ) -> Either<PerNs, ReachedFixedPoint> {
@@ -402,7 +427,7 @@ impl DefMap {
             Some((_, segment)) => segment,
             None => return Either::Right(ReachedFixedPoint::Yes),
         };
-        if let Some(&(def, extern_crate)) = self.data.extern_prelude.get(segment) {
+        if let Some(&(def, extern_crate)) = local_def_map.extern_prelude.get(segment) {
             tracing::debug!("absolute path {:?} resolved to crate {:?}", path, def);
             Either::Left(PerNs::types(
                 def.into(),
@@ -451,6 +476,7 @@ impl DefMap {
                         // this point, we know we're resolving a multi-segment path so macro kind
                         // expectation is discarded.
                         let resolution = defp_map.resolve_path_fp_with_macro(
+                            LocalDefMap::EMPTY,
                             db,
                             ResolveMode::Other,
                             module.local_id,
@@ -483,33 +509,24 @@ impl DefMap {
                 ModuleDefId::AdtId(AdtId::EnumId(e)) => {
                     // enum variant
                     cov_mark::hit!(can_import_enum_variant);
-                    let def_map;
 
-                    let loc = e.lookup(db);
-                    let tree = loc.id.item_tree(db);
-                    let current_def_map =
-                        self.krate == loc.container.krate && self.block_id() == loc.container.block;
-                    let res = if current_def_map {
-                        &self.enum_definitions[&e]
-                    } else {
-                        def_map = loc.container.def_map(db);
-                        &def_map.enum_definitions[&e]
-                    }
-                    .iter()
-                    .find_map(|&variant| {
-                        let variant_data = &tree[variant.lookup(db).id.value];
-                        (variant_data.name == *segment).then(|| match variant_data.shape {
-                            FieldsShape::Record => {
-                                PerNs::types(variant.into(), Visibility::Public, None)
-                            }
-                            FieldsShape::Tuple | FieldsShape::Unit => PerNs::both(
-                                variant.into(),
-                                variant.into(),
-                                Visibility::Public,
-                                None,
-                            ),
-                        })
-                    });
+                    let res =
+                        db.enum_variants(e).variants.iter().find(|(_, name)| name == segment).map(
+                            |&(variant, _)| {
+                                let item_tree_id = variant.lookup(db).id;
+                                match item_tree_id.item_tree(db)[item_tree_id.value].shape {
+                                    FieldsShape::Record => {
+                                        PerNs::types(variant.into(), Visibility::Public, None)
+                                    }
+                                    FieldsShape::Tuple | FieldsShape::Unit => PerNs::both(
+                                        variant.into(),
+                                        variant.into(),
+                                        Visibility::Public,
+                                        None,
+                                    ),
+                                }
+                            },
+                        );
                     // FIXME: Need to filter visibility here and below? Not sure.
                     return match res {
                         Some(res) => {
@@ -568,6 +585,7 @@ impl DefMap {
 
     fn resolve_name_in_module(
         &self,
+        local_def_map: &LocalDefMap,
         db: &dyn DefDatabase,
         module: LocalModuleId,
         name: &Name,
@@ -611,7 +629,7 @@ impl DefMap {
                 // they might been shadowed by local names.
                 return PerNs::none();
             }
-            self.resolve_name_in_extern_prelude(name)
+            self.resolve_name_in_extern_prelude(local_def_map, name)
         };
         let macro_use_prelude = || self.resolve_in_macro_use_prelude(name);
         let prelude = || {
@@ -628,19 +646,24 @@ impl DefMap {
             .or_else(prelude)
     }
 
-    fn resolve_name_in_all_preludes(&self, db: &dyn DefDatabase, name: &Name) -> PerNs {
+    fn resolve_name_in_all_preludes(
+        &self,
+        local_def_map: &LocalDefMap,
+        db: &dyn DefDatabase,
+        name: &Name,
+    ) -> PerNs {
         // Resolve in:
         //  - extern prelude / macro_use prelude
         //  - std prelude
-        let extern_prelude = self.resolve_name_in_extern_prelude(name);
+        let extern_prelude = self.resolve_name_in_extern_prelude(local_def_map, name);
         let macro_use_prelude = || self.resolve_in_macro_use_prelude(name);
         let prelude = || self.resolve_in_prelude(db, name);
 
         extern_prelude.or_else(macro_use_prelude).or_else(prelude)
     }
 
-    fn resolve_name_in_extern_prelude(&self, name: &Name) -> PerNs {
-        self.data.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| {
+    fn resolve_name_in_extern_prelude(&self, local_def_map: &LocalDefMap, name: &Name) -> PerNs {
+        local_def_map.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| {
             PerNs::types(
                 it.into(),
                 Visibility::Public,
@@ -662,6 +685,7 @@ impl DefMap {
 
     fn resolve_name_in_crate_root_or_extern_prelude(
         &self,
+        local_def_map: &LocalDefMap,
         db: &dyn DefDatabase,
         module: LocalModuleId,
         name: &Name,
@@ -678,7 +702,7 @@ impl DefMap {
                 // Don't resolve extern prelude in pseudo-module of a block.
                 return PerNs::none();
             }
-            self.resolve_name_in_extern_prelude(name)
+            self.resolve_name_in_extern_prelude(local_def_map, name)
         };
 
         from_crate_root.or_else(from_extern_prelude)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
index b93a1c87b432f..cd8882183bb4d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
@@ -30,26 +30,36 @@ impl ProcMacroKind {
 }
 
 impl Attrs {
-    #[rustfmt::skip]
     pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
         if self.is_proc_macro() {
             Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang })
         } else if self.is_proc_macro_attribute() {
             Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr })
-        } else if self.by_key(&sym::proc_macro_derive).exists() {
-            let derive = self.by_key(&sym::proc_macro_derive).tt_values().next()?;
-            let def = parse_macro_name_and_helper_attrs(derive)
-                .map(|(name, helpers)| ProcMacroDef { name, kind: ProcMacroKind::Derive { helpers } });
-
-            if def.is_none() {
-                tracing::trace!("malformed `#[proc_macro_derive]`: {}", derive);
-            }
-
-            def
+        } else if self.by_key(sym::proc_macro_derive).exists() {
+            let derive = self.parse_proc_macro_derive();
+            Some(match derive {
+                Some((name, helpers)) => {
+                    ProcMacroDef { name, kind: ProcMacroKind::Derive { helpers } }
+                }
+                None => ProcMacroDef {
+                    name: func_name.clone(),
+                    kind: ProcMacroKind::Derive { helpers: Box::default() },
+                },
+            })
         } else {
             None
         }
     }
+
+    pub fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
+        let derive = self.by_key(sym::proc_macro_derive).tt_values().next()?;
+        parse_macro_name_and_helper_attrs(derive)
+    }
+
+    pub fn parse_rustc_builtin_macro(&self) -> Option<(Name, Box<[Name]>)> {
+        let derive = self.by_key(sym::rustc_builtin_macro).tt_values().next()?;
+        parse_macro_name_and_helper_attrs(derive)
+    }
 }
 
 // This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
index 73fc6787bfe81..3fd095a9a98a8 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
@@ -4,8 +4,8 @@ mod macros;
 mod mod_resolution;
 mod primitives;
 
-use base_db::SourceDatabase;
-use expect_test::{expect, Expect};
+use base_db::RootQueryDb;
+use expect_test::{Expect, expect};
 use test_fixture::WithFixture;
 use triomphe::Arc;
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
index c8b7ec463a0fd..179a9c8fec21b 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
@@ -1,7 +1,13 @@
-use base_db::SourceDatabaseFileInputExt as _;
+use base_db::{
+    CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
+    DependencyBuilder, Env, RootQueryDb, SourceDatabase,
+};
+use intern::Symbol;
+use span::Edition;
 use test_fixture::WithFixture;
+use triomphe::Arc;
 
-use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};
+use crate::{AdtId, ModuleDefId, db::DefDatabase, nameres::tests::TestDB};
 
 fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
     let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
@@ -12,7 +18,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
         });
         assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
     }
-    db.set_file_text(pos.file_id.file_id(), ra_fixture_change);
+    db.set_file_text(pos.file_id.file_id(&db), ra_fixture_change);
 
     {
         let events = db.log_executed(|| {
@@ -22,6 +28,80 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
     }
 }
 
+#[test]
+fn crate_metadata_changes_should_not_invalidate_unrelated_def_maps() {
+    let (mut db, files) = TestDB::with_many_files(
+        r#"
+//- /a.rs crate:a
+pub fn foo() {}
+
+//- /b.rs crate:b
+pub struct Bar;
+
+//- /c.rs crate:c deps:b
+pub const BAZ: u32 = 0;
+    "#,
+    );
+
+    for &krate in db.all_crates().iter() {
+        db.crate_def_map(krate);
+    }
+
+    let all_crates_before = db.all_crates();
+
+    {
+        // Add a dependency a -> b.
+        let mut new_crate_graph = CrateGraphBuilder::default();
+
+        let mut add_crate = |crate_name, root_file_idx: usize| {
+            new_crate_graph.add_crate_root(
+                files[root_file_idx].file_id(&db),
+                Edition::CURRENT,
+                Some(CrateDisplayName::from_canonical_name(crate_name)),
+                None,
+                Default::default(),
+                None,
+                Env::default(),
+                CrateOrigin::Local { repo: None, name: Some(Symbol::intern(crate_name)) },
+                false,
+                Arc::new(
+                    // FIXME: This is less than ideal
+                    TryFrom::try_from(
+                        &*std::env::current_dir().unwrap().as_path().to_string_lossy(),
+                    )
+                    .unwrap(),
+                ),
+                Arc::new(CrateWorkspaceData { data_layout: Err("".into()), toolchain: None }),
+            )
+        };
+        let a = add_crate("a", 0);
+        let b = add_crate("b", 1);
+        let c = add_crate("c", 2);
+        new_crate_graph
+            .add_dep(c, DependencyBuilder::new(CrateName::new("b").unwrap(), b))
+            .unwrap();
+        new_crate_graph
+            .add_dep(b, DependencyBuilder::new(CrateName::new("a").unwrap(), a))
+            .unwrap();
+        new_crate_graph.set_in_db(&mut db);
+    }
+
+    let all_crates_after = db.all_crates();
+    assert!(
+        Arc::ptr_eq(&all_crates_before, &all_crates_after),
+        "the all_crates list should not have been invalidated"
+    );
+
+    let events = db.log_executed(|| {
+        for &krate in db.all_crates().iter() {
+            db.crate_def_map(krate);
+        }
+    });
+    let invalidated_def_maps =
+        events.iter().filter(|event| event.contains("crate_def_map")).count();
+    assert_eq!(invalidated_def_maps, 1, "{events:#?}")
+}
+
 #[test]
 fn typing_inside_a_function_should_not_invalidate_def_map() {
     check_def_map_is_not_recomputed(
@@ -255,10 +335,10 @@ m!(Z);
             assert_eq!(module_data.scope.resolutions().count(), 4);
         });
         let n_recalculated_item_trees =
-            events.iter().filter(|it| it.contains("item_tree(")).count();
+            events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
         assert_eq!(n_recalculated_item_trees, 6);
         let n_reparsed_macros =
-            events.iter().filter(|it| it.contains("parse_macro_expansion(")).count();
+            events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count();
         assert_eq!(n_reparsed_macros, 3);
     }
 
@@ -268,7 +348,7 @@ fn quux() { 92 }
 m!(Y);
 m!(Z);
 "#;
-    db.set_file_text(pos.file_id.file_id(), new_text);
+    db.set_file_text(pos.file_id.file_id(&db), new_text);
 
     {
         let events = db.log_executed(|| {
@@ -276,10 +356,11 @@ m!(Z);
             let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
             assert_eq!(module_data.scope.resolutions().count(), 4);
         });
-        let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
-        assert_eq!(n_recalculated_item_trees, 1);
+        let n_recalculated_item_trees =
+            events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
+        assert_eq!(n_recalculated_item_trees, 1, "{events:#?}");
         let n_reparsed_macros =
-            events.iter().filter(|it| it.contains("parse_macro_expansion(")).count();
+            events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count();
         assert_eq!(n_reparsed_macros, 0);
     }
 }
@@ -310,14 +391,15 @@ pub type Ty = ();
         let events = db.log_executed(|| {
             db.file_item_tree(pos.file_id.into());
         });
-        let n_calculated_item_trees = events.iter().filter(|it| it.contains("item_tree(")).count();
+        let n_calculated_item_trees =
+            events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
         assert_eq!(n_calculated_item_trees, 1);
-        let n_parsed_files = events.iter().filter(|it| it.contains("parse(")).count();
+        let n_parsed_files = events.iter().filter(|it| it.contains("parse")).count();
         assert_eq!(n_parsed_files, 1);
     }
 
-    // Delete the parse tree.
-    base_db::ParseQuery.in_db(&db).purge();
+    // FIXME(salsa-transition): bring this back
+    // base_db::ParseQuery.in_db(&db).purge();
 
     {
         let events = db.log_executed(|| {
@@ -327,22 +409,22 @@ pub type Ty = ();
             assert_eq!(module_data.scope.impls().count(), 1);
 
             for imp in module_data.scope.impls() {
-                db.impl_data(imp);
+                db.impl_signature(imp);
             }
 
             for (_, res) in module_data.scope.resolutions() {
                 match res.values.map(|it| it.def).or(res.types.map(|it| it.def)).unwrap() {
-                    ModuleDefId::FunctionId(f) => _ = db.function_data(f),
+                    ModuleDefId::FunctionId(f) => _ = db.function_signature(f),
                     ModuleDefId::AdtId(adt) => match adt {
-                        AdtId::StructId(it) => _ = db.struct_data(it),
-                        AdtId::UnionId(it) => _ = db.union_data(it),
-                        AdtId::EnumId(it) => _ = db.enum_data(it),
+                        AdtId::StructId(it) => _ = db.struct_signature(it),
+                        AdtId::UnionId(it) => _ = db.union_signature(it),
+                        AdtId::EnumId(it) => _ = db.enum_signature(it),
                     },
-                    ModuleDefId::ConstId(it) => _ = db.const_data(it),
-                    ModuleDefId::StaticId(it) => _ = db.static_data(it),
-                    ModuleDefId::TraitId(it) => _ = db.trait_data(it),
-                    ModuleDefId::TraitAliasId(it) => _ = db.trait_alias_data(it),
-                    ModuleDefId::TypeAliasId(it) => _ = db.type_alias_data(it),
+                    ModuleDefId::ConstId(it) => _ = db.const_signature(it),
+                    ModuleDefId::StaticId(it) => _ = db.static_signature(it),
+                    ModuleDefId::TraitId(it) => _ = db.trait_signature(it),
+                    ModuleDefId::TraitAliasId(it) => _ = db.trait_alias_signature(it),
+                    ModuleDefId::TypeAliasId(it) => _ = db.type_alias_signature(it),
                     ModuleDefId::EnumVariantId(_)
                     | ModuleDefId::ModuleId(_)
                     | ModuleDefId::MacroId(_)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
index 610886d55f40f..5f8a01523d820 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
@@ -1095,7 +1095,7 @@ pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
 }
 "#,
     );
-    let krate = db.crate_graph().iter().next().unwrap();
+    let krate = *db.all_crates().last().expect("no crate graph present");
     let def_map = db.crate_def_map(krate);
 
     assert_eq!(def_map.data.exported_derives.len(), 1);
@@ -1445,7 +1445,7 @@ struct TokenStream;
 fn proc_attr(a: TokenStream, b: TokenStream) -> TokenStream { a }
     "#,
     );
-    let krate = db.crate_graph().iter().next().unwrap();
+    let krate = *db.all_crates().last().expect("no crate graph present");
     let def_map = db.crate_def_map(krate);
 
     let root_module = &def_map[DefMap::ROOT].scope;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs
index c2d3f67f17e77..1f7dd6f0c4075 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs
@@ -6,9 +6,9 @@
 use bitflags::bitflags;
 
 use crate::{
+    MacroId, ModuleDefId,
     item_scope::{ImportId, ImportOrExternCrate, ImportOrGlob, ItemInNs},
     visibility::Visibility,
-    MacroId, ModuleDefId,
 };
 
 #[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
@@ -146,11 +146,7 @@ impl PerNs {
     }
 
     pub fn or_else(self, f: impl FnOnce() -> PerNs) -> PerNs {
-        if self.is_full() {
-            self
-        } else {
-            self.or(f())
-        }
+        if self.is_full() { self } else { self.or(f()) }
     }
 
     pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
deleted file mode 100644
index eb9488feaa914..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
+++ /dev/null
@@ -1,306 +0,0 @@
-//! Display and pretty printing routines.
-
-use std::{
-    fmt::{self, Write},
-    mem,
-};
-
-use hir_expand::mod_path::PathKind;
-use itertools::Itertools;
-use span::Edition;
-
-use crate::{
-    db::DefDatabase,
-    lang_item::LangItemTarget,
-    path::{GenericArg, GenericArgs, Path},
-    type_ref::{
-        Mutability, TraitBoundModifier, TypeBound, TypeRef, TypeRefId, TypesMap, UseArgRef,
-    },
-};
-
-pub(crate) fn print_path(
-    db: &dyn DefDatabase,
-    path: &Path,
-    map: &TypesMap,
-    buf: &mut dyn Write,
-    edition: Edition,
-) -> fmt::Result {
-    if let Path::LangItem(it, s) = path {
-        write!(buf, "builtin#lang(")?;
-        match *it {
-            LangItemTarget::ImplDef(it) => write!(buf, "{it:?}")?,
-            LangItemTarget::EnumId(it) => {
-                write!(buf, "{}", db.enum_data(it).name.display(db.upcast(), edition))?
-            }
-            LangItemTarget::Function(it) => {
-                write!(buf, "{}", db.function_data(it).name.display(db.upcast(), edition))?
-            }
-            LangItemTarget::Static(it) => {
-                write!(buf, "{}", db.static_data(it).name.display(db.upcast(), edition))?
-            }
-            LangItemTarget::Struct(it) => {
-                write!(buf, "{}", db.struct_data(it).name.display(db.upcast(), edition))?
-            }
-            LangItemTarget::Union(it) => {
-                write!(buf, "{}", db.union_data(it).name.display(db.upcast(), edition))?
-            }
-            LangItemTarget::TypeAlias(it) => {
-                write!(buf, "{}", db.type_alias_data(it).name.display(db.upcast(), edition))?
-            }
-            LangItemTarget::Trait(it) => {
-                write!(buf, "{}", db.trait_data(it).name.display(db.upcast(), edition))?
-            }
-            LangItemTarget::EnumVariant(it) => {
-                write!(buf, "{}", db.enum_variant_data(it).name.display(db.upcast(), edition))?
-            }
-        }
-
-        if let Some(s) = s {
-            write!(buf, "::{}", s.display(db.upcast(), edition))?;
-        }
-        return write!(buf, ")");
-    }
-    match path.type_anchor() {
-        Some(anchor) => {
-            write!(buf, "<")?;
-            print_type_ref(db, anchor, map, buf, edition)?;
-            write!(buf, ">::")?;
-        }
-        None => match path.kind() {
-            PathKind::Plain => {}
-            &PathKind::SELF => write!(buf, "self")?,
-            PathKind::Super(n) => {
-                for i in 0..*n {
-                    if i == 0 {
-                        buf.write_str("super")?;
-                    } else {
-                        buf.write_str("::super")?;
-                    }
-                }
-            }
-            PathKind::Crate => write!(buf, "crate")?,
-            PathKind::Abs => {}
-            PathKind::DollarCrate(_) => write!(buf, "$crate")?,
-        },
-    }
-
-    for (i, segment) in path.segments().iter().enumerate() {
-        if i != 0 || !matches!(path.kind(), PathKind::Plain) {
-            write!(buf, "::")?;
-        }
-
-        write!(buf, "{}", segment.name.display(db.upcast(), edition))?;
-        if let Some(generics) = segment.args_and_bindings {
-            write!(buf, "::<")?;
-            print_generic_args(db, generics, map, buf, edition)?;
-
-            write!(buf, ">")?;
-        }
-    }
-
-    Ok(())
-}
-
-pub(crate) fn print_generic_args(
-    db: &dyn DefDatabase,
-    generics: &GenericArgs,
-    map: &TypesMap,
-    buf: &mut dyn Write,
-    edition: Edition,
-) -> fmt::Result {
-    let mut first = true;
-    let args = if generics.has_self_type {
-        let (self_ty, args) = generics.args.split_first().unwrap();
-        write!(buf, "Self=")?;
-        print_generic_arg(db, self_ty, map, buf, edition)?;
-        first = false;
-        args
-    } else {
-        &generics.args
-    };
-    for arg in args {
-        if !first {
-            write!(buf, ", ")?;
-        }
-        first = false;
-        print_generic_arg(db, arg, map, buf, edition)?;
-    }
-    for binding in generics.bindings.iter() {
-        if !first {
-            write!(buf, ", ")?;
-        }
-        first = false;
-        write!(buf, "{}", binding.name.display(db.upcast(), edition))?;
-        if !binding.bounds.is_empty() {
-            write!(buf, ": ")?;
-            print_type_bounds(db, &binding.bounds, map, buf, edition)?;
-        }
-        if let Some(ty) = binding.type_ref {
-            write!(buf, " = ")?;
-            print_type_ref(db, ty, map, buf, edition)?;
-        }
-    }
-    Ok(())
-}
-
-pub(crate) fn print_generic_arg(
-    db: &dyn DefDatabase,
-    arg: &GenericArg,
-    map: &TypesMap,
-    buf: &mut dyn Write,
-    edition: Edition,
-) -> fmt::Result {
-    match arg {
-        GenericArg::Type(ty) => print_type_ref(db, *ty, map, buf, edition),
-        GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast(), edition)),
-        GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition)),
-    }
-}
-
-pub(crate) fn print_type_ref(
-    db: &dyn DefDatabase,
-    type_ref: TypeRefId,
-    map: &TypesMap,
-    buf: &mut dyn Write,
-    edition: Edition,
-) -> fmt::Result {
-    // FIXME: deduplicate with `HirDisplay` impl
-    match &map[type_ref] {
-        TypeRef::Never => write!(buf, "!")?,
-        TypeRef::Placeholder => write!(buf, "_")?,
-        TypeRef::Tuple(fields) => {
-            write!(buf, "(")?;
-            for (i, field) in fields.iter().enumerate() {
-                if i != 0 {
-                    write!(buf, ", ")?;
-                }
-                print_type_ref(db, *field, map, buf, edition)?;
-            }
-            write!(buf, ")")?;
-        }
-        TypeRef::Path(path) => print_path(db, path, map, buf, edition)?,
-        TypeRef::RawPtr(pointee, mtbl) => {
-            let mtbl = match mtbl {
-                Mutability::Shared => "*const",
-                Mutability::Mut => "*mut",
-            };
-            write!(buf, "{mtbl} ")?;
-            print_type_ref(db, *pointee, map, buf, edition)?;
-        }
-        TypeRef::Reference(ref_) => {
-            let mtbl = match ref_.mutability {
-                Mutability::Shared => "",
-                Mutability::Mut => "mut ",
-            };
-            write!(buf, "&")?;
-            if let Some(lt) = &ref_.lifetime {
-                write!(buf, "{} ", lt.name.display(db.upcast(), edition))?;
-            }
-            write!(buf, "{mtbl}")?;
-            print_type_ref(db, ref_.ty, map, buf, edition)?;
-        }
-        TypeRef::Array(array) => {
-            write!(buf, "[")?;
-            print_type_ref(db, array.ty, map, buf, edition)?;
-            write!(buf, "; {}]", array.len.display(db.upcast(), edition))?;
-        }
-        TypeRef::Slice(elem) => {
-            write!(buf, "[")?;
-            print_type_ref(db, *elem, map, buf, edition)?;
-            write!(buf, "]")?;
-        }
-        TypeRef::Fn(fn_) => {
-            let ((_, return_type), args) =
-                fn_.params().split_last().expect("TypeRef::Fn is missing return type");
-            if fn_.is_unsafe() {
-                write!(buf, "unsafe ")?;
-            }
-            if let Some(abi) = fn_.abi() {
-                buf.write_str("extern ")?;
-                buf.write_str(abi.as_str())?;
-                buf.write_char(' ')?;
-            }
-            write!(buf, "fn(")?;
-            for (i, (_, typeref)) in args.iter().enumerate() {
-                if i != 0 {
-                    write!(buf, ", ")?;
-                }
-                print_type_ref(db, *typeref, map, buf, edition)?;
-            }
-            if fn_.is_varargs() {
-                if !args.is_empty() {
-                    write!(buf, ", ")?;
-                }
-                write!(buf, "...")?;
-            }
-            write!(buf, ") -> ")?;
-            print_type_ref(db, *return_type, map, buf, edition)?;
-        }
-        TypeRef::Macro(_ast_id) => {
-            write!(buf, "<macro>")?;
-        }
-        TypeRef::Error => write!(buf, "{{unknown}}")?,
-        TypeRef::ImplTrait(bounds) => {
-            write!(buf, "impl ")?;
-            print_type_bounds(db, bounds, map, buf, edition)?;
-        }
-        TypeRef::DynTrait(bounds) => {
-            write!(buf, "dyn ")?;
-            print_type_bounds(db, bounds, map, buf, edition)?;
-        }
-    }
-
-    Ok(())
-}
-
-pub(crate) fn print_type_bounds(
-    db: &dyn DefDatabase,
-    bounds: &[TypeBound],
-    map: &TypesMap,
-    buf: &mut dyn Write,
-    edition: Edition,
-) -> fmt::Result {
-    for (i, bound) in bounds.iter().enumerate() {
-        if i != 0 {
-            write!(buf, " + ")?;
-        }
-
-        match bound {
-            TypeBound::Path(path, modifier) => {
-                match modifier {
-                    TraitBoundModifier::None => (),
-                    TraitBoundModifier::Maybe => write!(buf, "?")?,
-                }
-                print_path(db, &map[*path], map, buf, edition)?;
-            }
-            TypeBound::ForLifetime(lifetimes, path) => {
-                write!(
-                    buf,
-                    "for<{}> ",
-                    lifetimes.iter().map(|it| it.display(db.upcast(), edition)).format(", ")
-                )?;
-                print_path(db, &map[*path], map, buf, edition)?;
-            }
-            TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition))?,
-            TypeBound::Use(args) => {
-                write!(buf, "use<")?;
-                let mut first = true;
-                for arg in args {
-                    if !mem::take(&mut first) {
-                        write!(buf, ", ")?;
-                    }
-                    match arg {
-                        UseArgRef::Name(it) => write!(buf, "{}", it.display(db.upcast(), edition))?,
-                        UseArgRef::Lifetime(it) => {
-                            write!(buf, "{}", it.name.display(db.upcast(), edition))?
-                        }
-                    }
-                }
-                write!(buf, ">")?
-            }
-            TypeBound::Error => write!(buf, "{{unknown}}")?,
-        }
-    }
-
-    Ok(())
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index a2e6e4cc04368..46c12574fded1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -1,37 +1,43 @@
 //! Name resolution façade.
-use std::{fmt, iter, mem};
+use std::{fmt, mem};
 
-use base_db::CrateId;
-use hir_expand::{name::Name, MacroDefId};
-use intern::{sym, Symbol};
+use base_db::Crate;
+use hir_expand::{
+    MacroDefId,
+    mod_path::{ModPath, PathKind},
+    name::Name,
+};
+use intern::{Symbol, sym};
 use itertools::Itertools as _;
 use rustc_hash::FxHashSet;
-use smallvec::{smallvec, SmallVec};
-use span::SyntaxContextId;
+use smallvec::{SmallVec, smallvec};
+use span::SyntaxContext;
 use triomphe::Arc;
 
 use crate::{
+    AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId,
+    ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule,
+    ImplId, ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id,
+    MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId,
+    TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UseId, VariantId,
     builtin_type::BuiltinType,
-    data::ExternCrateDeclData,
     db::DefDatabase,
     expr_store::{
-        scope::{ExprScopes, ScopeId},
         HygieneId,
+        path::Path,
+        scope::{ExprScopes, ScopeId},
     },
-    generics::{GenericParams, TypeOrConstParamData},
-    hir::{BindingId, ExprId, LabelId},
-    item_scope::{BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, BUILTIN_SCOPE},
+    hir::{
+        BindingId, ExprId, LabelId,
+        generics::{GenericParams, TypeOrConstParamData},
+    },
+    item_scope::{BUILTIN_SCOPE, BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, ItemScope},
+    item_tree::ImportAlias,
     lang_item::LangItemTarget,
-    nameres::{DefMap, MacroSubNs, ResolvePathResultPrefixInfo},
-    path::{ModPath, Path, PathKind},
+    nameres::{DefMap, LocalDefMap, MacroSubNs, ResolvePathResultPrefixInfo},
     per_ns::PerNs,
-    type_ref::{LifetimeRef, TypesMap},
+    type_ref::LifetimeRef,
     visibility::{RawVisibility, Visibility},
-    AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId,
-    ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule,
-    ImplId, ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id,
-    MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId,
-    TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId,
 };
 
 #[derive(Debug, Clone)]
@@ -47,6 +53,7 @@ pub struct Resolver {
 #[derive(Clone)]
 struct ModuleItemMap {
     def_map: Arc<DefMap>,
+    local_def_map: Arc<LocalDefMap>,
     module_id: LocalModuleId,
 }
 
@@ -76,16 +83,13 @@ impl fmt::Debug for ExprScope {
 enum Scope {
     /// All the items and imported names of a module
     BlockScope(ModuleItemMap),
-    /// Brings the generic parameters of an item into scope
+    /// Brings the generic parameters of an item into scope as well as the `Self` type alias /
+    /// generic for ADTs and impls.
     GenericParams { def: GenericDefId, params: Arc<GenericParams> },
-    /// Brings `Self` in `impl` block into scope
-    ImplDefScope(ImplId),
-    /// Brings `Self` in enum, struct and union definitions into scope
-    AdtScope(AdtId),
     /// Local bindings
     ExprScope(ExprScope),
     /// Macro definition inside bodies that affects all paths after it in the same block.
-    MacroDefScope(Box<MacroDefId>),
+    MacroDefScope(MacroDefId),
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -101,9 +105,8 @@ pub enum TypeNs {
     BuiltinType(BuiltinType),
     TraitId(TraitId),
     TraitAliasId(TraitAliasId),
-    // Module belong to type ns, but the resolver is used when all module paths
-    // are fully resolved.
-    // ModuleId(ModuleId)
+
+    ModuleId(ModuleId),
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -180,7 +183,7 @@ impl Resolver {
     {
         let path = match path {
             Path::BarePath(mod_path) => mod_path,
-            Path::Normal(it) => it.mod_path(),
+            Path::Normal(it) => &it.mod_path,
             Path::LangItem(l, seg) => {
                 let type_ns = match *l {
                     LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
@@ -207,12 +210,33 @@ impl Resolver {
             return self.module_scope.resolve_path_in_type_ns(db, path);
         }
 
-        let remaining_idx = || if path.segments().len() == 1 { None } else { Some(1) };
+        let remaining_idx = || {
+            if path.segments().len() == 1 { None } else { Some(1) }
+        };
 
         for scope in self.scopes() {
             match scope {
                 Scope::ExprScope(_) | Scope::MacroDefScope(_) => continue,
                 Scope::GenericParams { params, def } => {
+                    if let &GenericDefId::ImplId(impl_) = def {
+                        if *first_name == sym::Self_ {
+                            return Some((
+                                TypeNs::SelfType(impl_),
+                                remaining_idx(),
+                                None,
+                                ResolvePathResultPrefixInfo::default(),
+                            ));
+                        }
+                    } else if let &GenericDefId::AdtId(adt) = def {
+                        if *first_name == sym::Self_ {
+                            return Some((
+                                TypeNs::AdtSelfType(adt),
+                                remaining_idx(),
+                                None,
+                                ResolvePathResultPrefixInfo::default(),
+                            ));
+                        }
+                    }
                     if let Some(id) = params.find_type_by_name(first_name, *def) {
                         return Some((
                             TypeNs::GenericParam(id),
@@ -222,28 +246,26 @@ impl Resolver {
                         ));
                     }
                 }
-                &Scope::ImplDefScope(impl_) => {
-                    if *first_name == sym::Self_.clone() {
-                        return Some((
-                            TypeNs::SelfType(impl_),
-                            remaining_idx(),
-                            None,
-                            ResolvePathResultPrefixInfo::default(),
-                        ));
-                    }
-                }
-                &Scope::AdtScope(adt) => {
-                    if *first_name == sym::Self_.clone() {
-                        return Some((
-                            TypeNs::AdtSelfType(adt),
-                            remaining_idx(),
-                            None,
-                            ResolvePathResultPrefixInfo::default(),
-                        ));
-                    }
-                }
                 Scope::BlockScope(m) => {
                     if let Some(res) = m.resolve_path_in_type_ns(db, path) {
+                        let res = match res.0 {
+                            TypeNs::ModuleId(_) if res.1.is_none() => {
+                                if let Some(ModuleDefId::BuiltinType(builtin)) = BUILTIN_SCOPE
+                                    .get(first_name)
+                                    .and_then(|builtin| builtin.take_types())
+                                {
+                                    (
+                                        TypeNs::BuiltinType(builtin),
+                                        remaining_idx(),
+                                        None,
+                                        ResolvePathResultPrefixInfo::default(),
+                                    )
+                                } else {
+                                    res
+                                }
+                            }
+                            _ => res,
+                        };
                         return Some(res);
                     }
                 }
@@ -269,11 +291,18 @@ impl Resolver {
         db: &dyn DefDatabase,
         visibility: &RawVisibility,
     ) -> Option<Visibility> {
-        let within_impl = self.scopes().any(|scope| matches!(scope, Scope::ImplDefScope(_)));
         match visibility {
             RawVisibility::Module(_, _) => {
-                let (item_map, module) = self.item_scope();
-                item_map.resolve_visibility(db, module, visibility, within_impl)
+                let (item_map, item_local_map, module) = self.item_scope_();
+                item_map.resolve_visibility(
+                    item_local_map,
+                    db,
+                    module,
+                    visibility,
+                    self.scopes().any(|scope| {
+                        matches!(scope, Scope::GenericParams { def: GenericDefId::ImplId(_), .. })
+                    }),
+                )
             }
             RawVisibility::Public => Some(Visibility::Public),
         }
@@ -296,7 +325,7 @@ impl Resolver {
     ) -> Option<(ResolveValueResult, ResolvePathResultPrefixInfo)> {
         let path = match path {
             Path::BarePath(mod_path) => mod_path,
-            Path::Normal(it) => it.mod_path(),
+            Path::Normal(it) => &it.mod_path,
             Path::LangItem(l, None) => {
                 return Some((
                     ResolveValueResult::ValueNs(
@@ -314,7 +343,7 @@ impl Resolver {
                         None,
                     ),
                     ResolvePathResultPrefixInfo::default(),
-                ))
+                ));
             }
             Path::LangItem(l, Some(_)) => {
                 let type_ns = match *l {
@@ -336,7 +365,7 @@ impl Resolver {
             }
         };
         let n_segments = path.segments().len();
-        let tmp = Name::new_symbol_root(sym::self_.clone());
+        let tmp = Name::new_symbol_root(sym::self_);
         let first_name = if path.is_self() { &tmp } else { path.segments().first()? };
         let skip_to_mod = path.kind != PathKind::Plain && !path.is_self();
         if skip_to_mod {
@@ -367,6 +396,14 @@ impl Resolver {
                         handle_macro_def_scope(db, &mut hygiene_id, &mut hygiene_info, macro_id)
                     }
                     Scope::GenericParams { params, def } => {
+                        if let &GenericDefId::ImplId(impl_) = def {
+                            if *first_name == sym::Self_ {
+                                return Some((
+                                    ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None),
+                                    ResolvePathResultPrefixInfo::default(),
+                                ));
+                            }
+                        }
                         if let Some(id) = params.find_const_by_name(first_name, *def) {
                             let val = ValueNs::GenericParam(id);
                             return Some((
@@ -375,16 +412,6 @@ impl Resolver {
                             ));
                         }
                     }
-                    &Scope::ImplDefScope(impl_) => {
-                        if *first_name == sym::Self_.clone() {
-                            return Some((
-                                ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None),
-                                ResolvePathResultPrefixInfo::default(),
-                            ));
-                        }
-                    }
-                    // bare `Self` doesn't work in the value namespace in a struct/enum definition
-                    Scope::AdtScope(_) => continue,
                     Scope::BlockScope(m) => {
                         if let Some(def) = m.resolve_path_in_value_ns(db, path) {
                             return Some(def);
@@ -397,6 +424,22 @@ impl Resolver {
                 match scope {
                     Scope::ExprScope(_) | Scope::MacroDefScope(_) => continue,
                     Scope::GenericParams { params, def } => {
+                        if let &GenericDefId::ImplId(impl_) = def {
+                            if *first_name == sym::Self_ {
+                                return Some((
+                                    ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1, None),
+                                    ResolvePathResultPrefixInfo::default(),
+                                ));
+                            }
+                        } else if let &GenericDefId::AdtId(adt) = def {
+                            if *first_name == sym::Self_ {
+                                let ty = TypeNs::AdtSelfType(adt);
+                                return Some((
+                                    ResolveValueResult::Partial(ty, 1, None),
+                                    ResolvePathResultPrefixInfo::default(),
+                                ));
+                            }
+                        }
                         if let Some(id) = params.find_type_by_name(first_name, *def) {
                             let ty = TypeNs::GenericParam(id);
                             return Some((
@@ -405,23 +448,6 @@ impl Resolver {
                             ));
                         }
                     }
-                    &Scope::ImplDefScope(impl_) => {
-                        if *first_name == sym::Self_.clone() {
-                            return Some((
-                                ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1, None),
-                                ResolvePathResultPrefixInfo::default(),
-                            ));
-                        }
-                    }
-                    Scope::AdtScope(adt) => {
-                        if *first_name == sym::Self_.clone() {
-                            let ty = TypeNs::AdtSelfType(*adt);
-                            return Some((
-                                ResolveValueResult::Partial(ty, 1, None),
-                                ResolvePathResultPrefixInfo::default(),
-                            ));
-                        }
-                    }
                     Scope::BlockScope(m) => {
                         if let Some(def) = m.resolve_path_in_value_ns(db, path) {
                             return Some(def);
@@ -468,9 +494,16 @@ impl Resolver {
         path: &ModPath,
         expected_macro_kind: Option<MacroSubNs>,
     ) -> Option<(MacroId, Option<ImportOrGlob>)> {
-        let (item_map, module) = self.item_scope();
+        let (item_map, item_local_map, module) = self.item_scope_();
         item_map
-            .resolve_path(db, module, path, BuiltinShadowMode::Other, expected_macro_kind)
+            .resolve_path(
+                item_local_map,
+                db,
+                module,
+                path,
+                BuiltinShadowMode::Other,
+                expected_macro_kind,
+            )
             .0
             .take_macros_import()
     }
@@ -485,16 +518,19 @@ impl Resolver {
     }
 
     pub fn resolve_lifetime(&self, lifetime: &LifetimeRef) -> Option<LifetimeNs> {
-        if lifetime.name == sym::tick_static.clone() {
-            return Some(LifetimeNs::Static);
-        }
-
-        self.scopes().find_map(|scope| match scope {
-            Scope::GenericParams { def, params } => {
-                params.find_lifetime_by_name(&lifetime.name, *def).map(LifetimeNs::LifetimeParam)
+        match lifetime {
+            LifetimeRef::Static => Some(LifetimeNs::Static),
+            LifetimeRef::Named(name) => self.scopes().find_map(|scope| match scope {
+                Scope::GenericParams { def, params } => {
+                    params.find_lifetime_by_name(name, *def).map(LifetimeNs::LifetimeParam)
+                }
+                _ => None,
+            }),
+            LifetimeRef::Placeholder | LifetimeRef::Error => None,
+            LifetimeRef::Param(lifetime_param_id) => {
+                Some(LifetimeNs::LifetimeParam(*lifetime_param_id))
             }
-            _ => None,
-        })
+        }
     }
 
     /// Returns a set of names available in the current scope.
@@ -544,7 +580,7 @@ impl Resolver {
         for scope in self.scopes() {
             scope.process_names(&mut res, db);
         }
-        let ModuleItemMap { ref def_map, module_id } = self.module_scope;
+        let ModuleItemMap { ref def_map, module_id, ref local_def_map } = self.module_scope;
         // FIXME: should we provide `self` here?
         // f(
         //     Name::self_param(),
@@ -566,7 +602,7 @@ impl Resolver {
                 res.add(name, ScopeDef::ModuleDef(def.into()));
             },
         );
-        def_map.extern_prelude().for_each(|(name, (def, _extern_crate))| {
+        local_def_map.extern_prelude().for_each(|(name, (def, _extern_crate))| {
             res.add(name, ScopeDef::ModuleDef(ModuleDefId::ModuleId(def.into())));
         });
         BUILTIN_SCOPE.iter().for_each(|(name, &def)| {
@@ -581,6 +617,7 @@ impl Resolver {
         res.map
     }
 
+    /// Note: Not to be used directly within hir-def/hir-ty
     pub fn extern_crate_decls_in_scope<'a>(
         &'a self,
         db: &'a dyn DefDatabase,
@@ -588,12 +625,22 @@ impl Resolver {
         self.module_scope.def_map[self.module_scope.module_id]
             .scope
             .extern_crate_decls()
-            .map(|id| ExternCrateDeclData::extern_crate_decl_data_query(db, id).name.clone())
+            .filter_map(|id| {
+                let loc = id.lookup(db);
+                let tree = loc.item_tree_id().item_tree(db);
+                match &tree[loc.id.value].alias {
+                    Some(alias) => match alias {
+                        ImportAlias::Underscore => None,
+                        ImportAlias::Alias(name) => Some(name.clone()),
+                    },
+                    None => Some(tree[loc.id.value].name.clone()),
+                }
+            })
     }
 
     pub fn extern_crates_in_scope(&self) -> impl Iterator<Item = (Name, ModuleId)> + '_ {
         self.module_scope
-            .def_map
+            .local_def_map
             .extern_prelude()
             .map(|(name, module_id)| (name.clone(), module_id.0.into()))
     }
@@ -606,13 +653,12 @@ impl Resolver {
         for scope in self.scopes() {
             match scope {
                 Scope::BlockScope(m) => traits.extend(m.def_map[m.module_id].scope.traits()),
-                &Scope::ImplDefScope(impl_) => {
-                    let impl_data = db.impl_data(impl_);
+                &Scope::GenericParams { def: GenericDefId::ImplId(impl_), .. } => {
+                    let impl_data = db.impl_signature(impl_);
                     if let Some(target_trait) = impl_data.target_trait {
-                        if let Some(TypeNs::TraitId(trait_)) = self.resolve_path_in_type_ns_fully(
-                            db,
-                            &impl_data.types_map[target_trait.path],
-                        ) {
+                        if let Some(TypeNs::TraitId(trait_)) = self
+                            .resolve_path_in_type_ns_fully(db, &impl_data.store[target_trait.path])
+                        {
                             traits.insert(trait_);
                         }
                     }
@@ -641,29 +687,21 @@ impl Resolver {
     }
 
     pub fn module(&self) -> ModuleId {
-        let (def_map, local_id) = self.item_scope();
+        let (def_map, _, local_id) = self.item_scope_();
         def_map.module_id(local_id)
     }
 
-    pub fn krate(&self) -> CrateId {
-        self.module_scope.def_map.krate()
+    pub fn item_scope(&self) -> &ItemScope {
+        let (def_map, _, local_id) = self.item_scope_();
+        &def_map[local_id].scope
     }
 
-    pub fn def_map(&self) -> &DefMap {
-        self.item_scope().0
+    pub fn krate(&self) -> Crate {
+        self.module_scope.def_map.krate()
     }
 
-    pub fn where_predicates_in_scope(
-        &self,
-    ) -> impl Iterator<Item = (&crate::generics::WherePredicate, (&GenericDefId, &TypesMap))> {
-        self.scopes()
-            .filter_map(|scope| match scope {
-                Scope::GenericParams { params, def } => Some((params, def)),
-                _ => None,
-            })
-            .flat_map(|(params, def)| {
-                params.where_predicates().zip(iter::repeat((def, &params.types_map)))
-            })
+    pub fn def_map(&self) -> &DefMap {
+        self.item_scope_().0
     }
 
     pub fn generic_def(&self) -> Option<GenericDefId> {
@@ -694,19 +732,9 @@ impl Resolver {
         })
     }
 
-    pub fn type_owner(&self) -> Option<TypeOwnerId> {
-        self.scopes().find_map(|scope| match scope {
-            Scope::BlockScope(_) | Scope::MacroDefScope(_) => None,
-            &Scope::GenericParams { def, .. } => Some(def.into()),
-            &Scope::ImplDefScope(id) => Some(id.into()),
-            &Scope::AdtScope(adt) => Some(adt.into()),
-            Scope::ExprScope(it) => Some(it.owner.into()),
-        })
-    }
-
     pub fn impl_def(&self) -> Option<ImplId> {
         self.scopes().find_map(|scope| match scope {
-            Scope::ImplDefScope(def) => Some(*def),
+            &Scope::GenericParams { def: GenericDefId::ImplId(def), .. } => Some(def),
             _ => None,
         })
     }
@@ -748,7 +776,6 @@ impl Resolver {
                         return None;
                     }
                 }
-                Scope::AdtScope(_) | Scope::ImplDefScope(_) => continue,
                 Scope::BlockScope(m) => {
                     if m.resolve_path_in_value_ns(db, current_name_as_path).is_some() {
                         // It does not resolve to our renamed variable.
@@ -801,7 +828,6 @@ impl Resolver {
                         return None;
                     }
                 }
-                Scope::AdtScope(_) | Scope::ImplDefScope(_) => continue,
                 Scope::BlockScope(m) => {
                     if m.resolve_path_in_value_ns(db, name_as_path).is_some() {
                         return None;
@@ -829,7 +855,7 @@ impl Resolver {
             scope_id: ScopeId,
         ) {
             if let Some(macro_id) = expr_scopes.macro_def(scope_id) {
-                resolver.scopes.push(Scope::MacroDefScope(macro_id.clone()));
+                resolver.scopes.push(Scope::MacroDefScope(**macro_id));
             }
             resolver.scopes.push(Scope::ExprScope(ExprScope {
                 owner,
@@ -838,9 +864,12 @@ impl Resolver {
             }));
             if let Some(block) = expr_scopes.block(scope_id) {
                 let def_map = db.block_def_map(block);
-                resolver
-                    .scopes
-                    .push(Scope::BlockScope(ModuleItemMap { def_map, module_id: DefMap::ROOT }));
+                let local_def_map = block.lookup(db).module.only_local_def_map(db);
+                resolver.scopes.push(Scope::BlockScope(ModuleItemMap {
+                    def_map,
+                    local_def_map,
+                    module_id: DefMap::ROOT,
+                }));
                 // FIXME: This adds as many module scopes as there are blocks, but resolving in each
                 // already traverses all parents, so this is O(n²). I think we could only store the
                 // innermost module scope instead?
@@ -881,7 +910,7 @@ impl Resolver {
 fn handle_macro_def_scope(
     db: &dyn DefDatabase,
     hygiene_id: &mut HygieneId,
-    hygiene_info: &mut Option<(SyntaxContextId, MacroDefId)>,
+    hygiene_info: &mut Option<(SyntaxContext, MacroDefId)>,
     macro_id: &MacroDefId,
 ) {
     if let Some((parent_ctx, label_macro_id)) = hygiene_info {
@@ -889,11 +918,10 @@ fn handle_macro_def_scope(
             // A macro is allowed to refer to variables from before its declaration.
             // Therefore, if we got to the rib of its declaration, give up its hygiene
             // and use its parent expansion.
-            let parent_ctx = db.lookup_intern_syntax_context(*parent_ctx);
-            *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent);
-            *hygiene_info = parent_ctx.outer_expn.map(|expansion| {
-                let expansion = db.lookup_intern_macro_call(expansion);
-                (parent_ctx.parent, expansion.def)
+            *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db));
+            *hygiene_info = parent_ctx.outer_expn(db).map(|expansion| {
+                let expansion = db.lookup_intern_macro_call(expansion.into());
+                (parent_ctx.parent(db), expansion.def)
             });
         }
     }
@@ -903,12 +931,12 @@ fn handle_macro_def_scope(
 fn hygiene_info(
     db: &dyn DefDatabase,
     hygiene_id: HygieneId,
-) -> Option<(SyntaxContextId, MacroDefId)> {
+) -> Option<(SyntaxContext, MacroDefId)> {
     if !hygiene_id.is_root() {
-        let ctx = hygiene_id.lookup(db);
-        ctx.outer_expn.map(|expansion| {
-            let expansion = db.lookup_intern_macro_call(expansion);
-            (ctx.parent, expansion.def)
+        let ctx = hygiene_id.lookup();
+        ctx.outer_expn(db).map(|expansion| {
+            let expansion = db.lookup_intern_macro_call(expansion.into());
+            (ctx.parent(db), expansion.def)
         })
     } else {
         None
@@ -928,9 +956,10 @@ impl Resolver {
         path: &ModPath,
         shadow: BuiltinShadowMode,
     ) -> PerNs {
-        let (item_map, module) = self.item_scope();
+        let (item_map, item_local_map, module) = self.item_scope_();
         // This method resolves `path` just like import paths, so no expected macro subns is given.
-        let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow, None);
+        let (module_res, segment_index) =
+            item_map.resolve_path(item_local_map, db, module, path, shadow, None);
         if segment_index.is_some() {
             return PerNs::none();
         }
@@ -938,13 +967,17 @@ impl Resolver {
     }
 
     /// The innermost block scope that contains items or the module scope that contains this resolver.
-    fn item_scope(&self) -> (&DefMap, LocalModuleId) {
+    fn item_scope_(&self) -> (&DefMap, &LocalDefMap, LocalModuleId) {
         self.scopes()
             .find_map(|scope| match scope {
-                Scope::BlockScope(m) => Some((&*m.def_map, m.module_id)),
+                Scope::BlockScope(m) => Some((&*m.def_map, &*m.local_def_map, m.module_id)),
                 _ => None,
             })
-            .unwrap_or((&self.module_scope.def_map, self.module_scope.module_id))
+            .unwrap_or((
+                &self.module_scope.def_map,
+                &self.module_scope.local_def_map,
+                self.module_scope.module_id,
+            ))
     }
 }
 
@@ -972,8 +1005,13 @@ impl Scope {
                     })
                 });
             }
-            Scope::GenericParams { params, def: parent } => {
-                let parent = *parent;
+            &Scope::GenericParams { ref params, def: parent } => {
+                if let GenericDefId::ImplId(impl_) = parent {
+                    acc.add(&Name::new_symbol_root(sym::Self_), ScopeDef::ImplSelfType(impl_));
+                } else if let GenericDefId::AdtId(adt) = parent {
+                    acc.add(&Name::new_symbol_root(sym::Self_), ScopeDef::AdtSelfType(adt));
+                }
+
                 for (local_id, param) in params.iter_type_or_consts() {
                     if let Some(name) = &param.name() {
                         let id = TypeOrConstParamId { parent, local_id };
@@ -996,12 +1034,6 @@ impl Scope {
                     acc.add(&param.name, ScopeDef::GenericParam(id.into()))
                 }
             }
-            Scope::ImplDefScope(i) => {
-                acc.add(&Name::new_symbol_root(sym::Self_.clone()), ScopeDef::ImplSelfType(*i));
-            }
-            Scope::AdtScope(i) => {
-                acc.add(&Name::new_symbol_root(sym::Self_.clone()), ScopeDef::AdtSelfType(*i));
-            }
             Scope::ExprScope(scope) => {
                 if let Some((label, name)) = scope.expr_scopes.label(scope.scope_id) {
                     acc.add(&name, ScopeDef::Label(label))
@@ -1045,13 +1077,14 @@ fn resolver_for_scope_(
     for scope in scope_chain.into_iter().rev() {
         if let Some(block) = scopes.block(scope) {
             let def_map = db.block_def_map(block);
-            r = r.push_block_scope(def_map);
+            let local_def_map = block.lookup(db).module.only_local_def_map(db);
+            r = r.push_block_scope(def_map, local_def_map);
             // FIXME: This adds as many module scopes as there are blocks, but resolving in each
             // already traverses all parents, so this is O(n²). I think we could only store the
             // innermost module scope instead?
         }
         if let Some(macro_id) = scopes.macro_def(scope) {
-            r = r.push_scope(Scope::MacroDefScope(macro_id.clone()));
+            r = r.push_scope(Scope::MacroDefScope(**macro_id));
         }
 
         r = r.push_expr_scope(owner, Arc::clone(&scopes), scope);
@@ -1070,13 +1103,12 @@ impl Resolver {
         self.push_scope(Scope::GenericParams { def, params })
     }
 
-    fn push_impl_def_scope(self, impl_def: ImplId) -> Resolver {
-        self.push_scope(Scope::ImplDefScope(impl_def))
-    }
-
-    fn push_block_scope(self, def_map: Arc<DefMap>) -> Resolver {
-        debug_assert!(def_map.block_id().is_some());
-        self.push_scope(Scope::BlockScope(ModuleItemMap { def_map, module_id: DefMap::ROOT }))
+    fn push_block_scope(self, def_map: Arc<DefMap>, local_def_map: Arc<LocalDefMap>) -> Resolver {
+        self.push_scope(Scope::BlockScope(ModuleItemMap {
+            def_map,
+            local_def_map,
+            module_id: DefMap::ROOT,
+        }))
     }
 
     fn push_expr_scope(
@@ -1095,8 +1127,13 @@ impl ModuleItemMap {
         db: &dyn DefDatabase,
         path: &ModPath,
     ) -> Option<(ResolveValueResult, ResolvePathResultPrefixInfo)> {
-        let (module_def, unresolved_idx, prefix_info) =
-            self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
+        let (module_def, unresolved_idx, prefix_info) = self.def_map.resolve_path_locally(
+            &self.local_def_map,
+            db,
+            self.module_id,
+            path,
+            BuiltinShadowMode::Other,
+        );
         match unresolved_idx {
             None => {
                 let (value, import) = to_value_ns(module_def)?;
@@ -1129,8 +1166,13 @@ impl ModuleItemMap {
         path: &ModPath,
     ) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>, ResolvePathResultPrefixInfo)>
     {
-        let (module_def, idx, prefix_info) =
-            self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
+        let (module_def, idx, prefix_info) = self.def_map.resolve_path_locally(
+            &self.local_def_map,
+            db,
+            self.module_id,
+            path,
+            BuiltinShadowMode::Other,
+        );
         let (res, import) = to_type_ns(module_def)?;
         Some((res, idx, import, prefix_info))
     }
@@ -1168,11 +1210,12 @@ fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
         ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
         ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it),
 
+        ModuleDefId::ModuleId(it) => TypeNs::ModuleId(it),
+
         ModuleDefId::FunctionId(_)
         | ModuleDefId::ConstId(_)
         | ModuleDefId::MacroId(_)
-        | ModuleDefId::StaticId(_)
-        | ModuleDefId::ModuleId(_) => return None,
+        | ModuleDefId::StaticId(_) => return None,
     };
     Some((res, def.import))
 }
@@ -1225,11 +1268,14 @@ pub trait HasResolver: Copy {
 
 impl HasResolver for ModuleId {
     fn resolver(self, db: &dyn DefDatabase) -> Resolver {
-        let mut def_map = self.def_map(db);
+        let (mut def_map, local_def_map) = self.local_def_map(db);
         let mut module_id = self.local_id;
 
         if !self.is_block_module() {
-            return Resolver { scopes: vec![], module_scope: ModuleItemMap { def_map, module_id } };
+            return Resolver {
+                scopes: vec![],
+                module_scope: ModuleItemMap { def_map, local_def_map, module_id },
+            };
         }
 
         let mut modules: SmallVec<[_; 1]> = smallvec![];
@@ -1243,10 +1289,14 @@ impl HasResolver for ModuleId {
         }
         let mut resolver = Resolver {
             scopes: Vec::with_capacity(modules.len()),
-            module_scope: ModuleItemMap { def_map, module_id },
+            module_scope: ModuleItemMap {
+                def_map,
+                local_def_map: local_def_map.clone(),
+                module_id,
+            },
         };
         for def_map in modules.into_iter().rev() {
-            resolver = resolver.push_block_scope(def_map);
+            resolver = resolver.push_block_scope(def_map, local_def_map.clone());
         }
         resolver
     }
@@ -1254,9 +1304,10 @@ impl HasResolver for ModuleId {
 
 impl HasResolver for CrateRootModuleId {
     fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+        let (def_map, local_def_map) = self.local_def_map(db);
         Resolver {
             scopes: vec![],
-            module_scope: ModuleItemMap { def_map: self.def_map(db), module_id: DefMap::ROOT },
+            module_scope: ModuleItemMap { def_map, local_def_map, module_id: DefMap::ROOT },
         }
     }
 }
@@ -1276,10 +1327,7 @@ impl HasResolver for TraitAliasId {
 impl<T: Into<AdtId> + Copy> HasResolver for T {
     fn resolver(self, db: &dyn DefDatabase) -> Resolver {
         let def = self.into();
-        def.module(db)
-            .resolver(db)
-            .push_generic_params_scope(db, def.into())
-            .push_scope(Scope::AdtScope(def))
+        def.module(db).resolver(db).push_generic_params_scope(db, def.into())
     }
 }
 
@@ -1309,11 +1357,7 @@ impl HasResolver for TypeAliasId {
 
 impl HasResolver for ImplId {
     fn resolver(self, db: &dyn DefDatabase) -> Resolver {
-        self.lookup(db)
-            .container
-            .resolver(db)
-            .push_generic_params_scope(db, self.into())
-            .push_impl_def_scope(self)
+        self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
     }
 }
 
@@ -1336,23 +1380,6 @@ impl HasResolver for UseId {
     }
 }
 
-impl HasResolver for TypeOwnerId {
-    fn resolver(self, db: &dyn DefDatabase) -> Resolver {
-        match self {
-            TypeOwnerId::FunctionId(it) => it.resolver(db),
-            TypeOwnerId::StaticId(it) => it.resolver(db),
-            TypeOwnerId::ConstId(it) => it.resolver(db),
-            TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.resolver(db),
-            TypeOwnerId::AdtId(it) => it.resolver(db),
-            TypeOwnerId::TraitId(it) => it.resolver(db),
-            TypeOwnerId::TraitAliasId(it) => it.resolver(db),
-            TypeOwnerId::TypeAliasId(it) => it.resolver(db),
-            TypeOwnerId::ImplId(it) => it.resolver(db),
-            TypeOwnerId::EnumVariantId(it) => it.resolver(db),
-        }
-    }
-}
-
 impl HasResolver for DefWithBodyId {
     fn resolver(self, db: &dyn DefDatabase) -> Resolver {
         match self {
@@ -1360,7 +1387,6 @@ impl HasResolver for DefWithBodyId {
             DefWithBodyId::FunctionId(f) => f.resolver(db),
             DefWithBodyId::StaticId(s) => s.resolver(db),
             DefWithBodyId::VariantId(v) => v.resolver(db),
-            DefWithBodyId::InTypeConstId(c) => c.lookup(db).owner.resolver(db),
         }
     }
 }
@@ -1438,7 +1464,7 @@ impl HasResolver for MacroRulesId {
 fn lookup_resolver<'db>(
     db: &(dyn DefDatabase + 'db),
     lookup: impl Lookup<
-        Database<'db> = dyn DefDatabase + 'db,
+        Database = dyn DefDatabase,
         Data = impl ItemTreeLoc<Container = impl HasResolver>,
     >,
 ) -> Resolver {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
new file mode 100644
index 0000000000000..44cfd72c48f5e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
@@ -0,0 +1,975 @@
+//! Item signature IR definitions
+
+use std::ops::Not as _;
+
+use bitflags::bitflags;
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{InFile, Intern, Lookup, name::Name};
+use intern::{Symbol, sym};
+use la_arena::{Arena, Idx};
+use rustc_abi::{IntegerType, ReprOptions};
+use syntax::{
+    AstNode, SyntaxNodePtr,
+    ast::{self, HasGenericParams, IsString},
+};
+use thin_vec::ThinVec;
+use triomphe::Arc;
+
+use crate::{
+    ConstId, EnumId, EnumVariantId, EnumVariantLoc, FunctionId, HasModule, ImplId, ItemContainerId,
+    ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, UnionId, VariantId,
+    db::DefDatabase,
+    expr_store::{
+        ExpressionStore, ExpressionStoreSourceMap,
+        lower::{
+            ExprCollector, lower_function, lower_generic_params, lower_trait, lower_trait_alias,
+            lower_type_alias,
+        },
+    },
+    hir::{ExprId, PatId, generics::GenericParams},
+    item_tree::{
+        AttrOwner, Field, FieldParent, FieldsShape, FileItemTreeId, ItemTree, ItemTreeId, ModItem,
+        RawVisibility, RawVisibilityId,
+    },
+    lang_item::LangItem,
+    src::HasSource,
+    type_ref::{TraitRef, TypeBound, TypeRefId},
+};
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct StructSignature {
+    pub name: Name,
+    pub generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+    pub flags: StructFlags,
+    pub shape: FieldsShape,
+    pub repr: Option<ReprOptions>,
+}
+
+bitflags! {
+    #[derive(Debug, Copy, Clone, PartialEq, Eq)]
+    pub struct StructFlags: u8 {
+        /// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute.
+        const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1;
+        /// Indicates whether the struct has a `#[fundamental]` attribute.
+        const FUNDAMENTAL      = 1 << 2;
+        /// Indicates whether the struct is `PhantomData`.
+        const IS_PHANTOM_DATA  = 1 << 3;
+        /// Indicates whether this struct is `Box`.
+        const IS_BOX           = 1 << 4;
+        /// Indicates whether this struct is `ManuallyDrop`.
+        const IS_MANUALLY_DROP = 1 << 5;
+        /// Indicates whether this struct is `UnsafeCell`.
+        const IS_UNSAFE_CELL   = 1 << 6;
+        /// Indicates whether this struct is `UnsafePinned`.
+        const IS_UNSAFE_PINNED = 1 << 7;
+    }
+}
+
+impl StructSignature {
+    pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+
+        let mut flags = StructFlags::empty();
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+            flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
+        }
+        if attrs.by_key(sym::fundamental).exists() {
+            flags |= StructFlags::FUNDAMENTAL;
+        }
+        if let Some(lang) = attrs.lang_item() {
+            match lang {
+                LangItem::PhantomData => flags |= StructFlags::IS_PHANTOM_DATA,
+                LangItem::OwnedBox => flags |= StructFlags::IS_BOX,
+                LangItem::ManuallyDrop => flags |= StructFlags::IS_MANUALLY_DROP,
+                LangItem::UnsafeCell => flags |= StructFlags::IS_UNSAFE_CELL,
+                LangItem::UnsafePinned => flags |= StructFlags::IS_UNSAFE_PINNED,
+                _ => (),
+            }
+        }
+        let repr = attrs.repr();
+
+        let hir_expand::files::InFileWrapper { file_id, value } = loc.source(db);
+        let (store, generic_params, source_map) = lower_generic_params(
+            db,
+            loc.container,
+            id.into(),
+            file_id,
+            value.generic_param_list(),
+            value.where_clause(),
+        );
+        (
+            Arc::new(StructSignature {
+                generic_params,
+                store,
+                flags,
+                shape: item_tree[loc.id.value].shape,
+                name: item_tree[loc.id.value].name.clone(),
+                repr,
+            }),
+            Arc::new(source_map),
+        )
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct UnionSignature {
+    pub name: Name,
+    pub generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+    pub flags: StructFlags,
+    pub repr: Option<ReprOptions>,
+}
+
+impl UnionSignature {
+    pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+        let krate = loc.container.krate;
+        let item_tree = loc.id.item_tree(db);
+        let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
+        let mut flags = StructFlags::empty();
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+            flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
+        }
+        if attrs.by_key(sym::fundamental).exists() {
+            flags |= StructFlags::FUNDAMENTAL;
+        }
+
+        let repr = attrs.repr();
+
+        let hir_expand::files::InFileWrapper { file_id, value } = loc.source(db);
+        let (store, generic_params, source_map) = lower_generic_params(
+            db,
+            loc.container,
+            id.into(),
+            file_id,
+            value.generic_param_list(),
+            value.where_clause(),
+        );
+        (
+            Arc::new(UnionSignature {
+                generic_params,
+                store,
+                flags,
+                repr,
+                name: item_tree[loc.id.value].name.clone(),
+            }),
+            Arc::new(source_map),
+        )
+    }
+}
+
+bitflags! {
+    #[derive(Debug, Copy, Clone, PartialEq, Eq)]
+    pub struct EnumFlags: u8 {
+        const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS  = 1 << 1;
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct EnumSignature {
+    pub name: Name,
+    pub generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+    pub flags: EnumFlags,
+    pub repr: Option<ReprOptions>,
+}
+
+impl EnumSignature {
+    pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+        let mut flags = EnumFlags::empty();
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+            flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
+        }
+
+        let repr = attrs.repr();
+
+        let hir_expand::files::InFileWrapper { file_id, value } = loc.source(db);
+        let (store, generic_params, source_map) = lower_generic_params(
+            db,
+            loc.container,
+            id.into(),
+            file_id,
+            value.generic_param_list(),
+            value.where_clause(),
+        );
+
+        (
+            Arc::new(EnumSignature {
+                generic_params,
+                store,
+                flags,
+                repr,
+                name: item_tree[loc.id.value].name.clone(),
+            }),
+            Arc::new(source_map),
+        )
+    }
+
+    pub fn variant_body_type(&self) -> IntegerType {
+        match self.repr {
+            Some(ReprOptions { int: Some(builtin), .. }) => builtin,
+            _ => IntegerType::Pointer(true),
+        }
+    }
+}
+bitflags::bitflags! {
+    #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
+    pub struct ConstFlags: u8 {
+        const HAS_BODY = 1 << 1;
+        const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7;
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ConstSignature {
+    pub name: Option<Name>,
+    // generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+    pub type_ref: TypeRefId,
+    pub flags: ConstFlags,
+}
+
+impl ConstSignature {
+    pub fn query(db: &dyn DefDatabase, id: ConstId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+
+        let module = loc.container.module(db);
+        let attrs = item_tree.attrs(db, module.krate, ModItem::from(loc.id.value).into());
+        let mut flags = ConstFlags::empty();
+        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+            flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
+        }
+        let source = loc.source(db);
+        if source.value.body().is_some() {
+            flags.insert(ConstFlags::HAS_BODY);
+        }
+
+        let (store, source_map, type_ref) =
+            crate::expr_store::lower::lower_type_ref(db, module, source.map(|it| it.ty()));
+
+        (
+            Arc::new(ConstSignature {
+                store: Arc::new(store),
+                type_ref,
+                flags,
+                name: item_tree[loc.id.value].name.clone(),
+            }),
+            Arc::new(source_map),
+        )
+    }
+
+    pub fn has_body(&self) -> bool {
+        self.flags.contains(ConstFlags::HAS_BODY)
+    }
+}
+
+bitflags::bitflags! {
+    #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
+    pub struct StaticFlags: u8 {
+        const HAS_BODY = 1 << 1;
+        const MUTABLE    = 1 << 3;
+        const UNSAFE     = 1 << 4;
+        const EXPLICIT_SAFE = 1 << 5;
+        const EXTERN     = 1 << 6;
+        const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7;
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct StaticSignature {
+    pub name: Name,
+
+    // generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+    pub type_ref: TypeRefId,
+    pub flags: StaticFlags,
+}
+impl StaticSignature {
+    pub fn query(db: &dyn DefDatabase, id: StaticId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+
+        let module = loc.container.module(db);
+        let attrs = item_tree.attrs(db, module.krate, ModItem::from(loc.id.value).into());
+        let mut flags = StaticFlags::empty();
+        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+            flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
+        }
+
+        if matches!(loc.container, ItemContainerId::ExternBlockId(_)) {
+            flags.insert(StaticFlags::EXTERN);
+        }
+
+        let source = loc.source(db);
+        if source.value.body().is_some() {
+            flags.insert(StaticFlags::HAS_BODY);
+        }
+        if source.value.mut_token().is_some() {
+            flags.insert(StaticFlags::MUTABLE);
+        }
+        if source.value.unsafe_token().is_some() {
+            flags.insert(StaticFlags::UNSAFE);
+        }
+        if source.value.safe_token().is_some() {
+            flags.insert(StaticFlags::EXPLICIT_SAFE);
+        }
+
+        let (store, source_map, type_ref) =
+            crate::expr_store::lower::lower_type_ref(db, module, source.map(|it| it.ty()));
+
+        (
+            Arc::new(StaticSignature {
+                store: Arc::new(store),
+                type_ref,
+                flags,
+                name: item_tree[loc.id.value].name.clone(),
+            }),
+            Arc::new(source_map),
+        )
+    }
+}
+
+bitflags::bitflags! {
+    #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
+    pub struct ImplFlags: u8 {
+        const NEGATIVE = 1 << 1;
+        const UNSAFE = 1 << 3;
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct ImplSignature {
+    pub generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+    pub self_ty: TypeRefId,
+    pub target_trait: Option<TraitRef>,
+    pub flags: ImplFlags,
+}
+
+impl ImplSignature {
+    pub fn query(db: &dyn DefDatabase, id: ImplId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+
+        let mut flags = ImplFlags::empty();
+        let src = loc.source(db);
+        if src.value.unsafe_token().is_some() {
+            flags.insert(ImplFlags::UNSAFE);
+        }
+        if src.value.excl_token().is_some() {
+            flags.insert(ImplFlags::NEGATIVE);
+        }
+
+        let (store, source_map, self_ty, target_trait, generic_params) =
+            crate::expr_store::lower::lower_impl(db, loc.container, src, id);
+
+        (
+            Arc::new(ImplSignature {
+                store: Arc::new(store),
+                generic_params,
+                self_ty,
+                target_trait,
+                flags,
+            }),
+            Arc::new(source_map),
+        )
+    }
+}
+
+bitflags::bitflags! {
+    #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
+    pub struct TraitFlags: u8 {
+        const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1;
+        const FUNDAMENTAL = 1 << 2;
+        const UNSAFE = 1 << 3;
+        const AUTO = 1 << 4;
+        const SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 5;
+        const SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 6;
+        const RUSTC_PAREN_SUGAR = 1 << 7;
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitSignature {
+    pub name: Name,
+    pub generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+    pub flags: TraitFlags,
+}
+
+impl TraitSignature {
+    pub fn query(db: &dyn DefDatabase, id: TraitId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+
+        let mut flags = TraitFlags::empty();
+        let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+        let source = loc.source(db);
+        if source.value.auto_token().is_some() {
+            flags.insert(TraitFlags::AUTO);
+        }
+        if source.value.unsafe_token().is_some() {
+            flags.insert(TraitFlags::UNSAFE);
+        }
+        if attrs.by_key(sym::fundamental).exists() {
+            flags |= TraitFlags::FUNDAMENTAL;
+        }
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+            flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
+        }
+        if attrs.by_key(sym::rustc_paren_sugar).exists() {
+            flags |= TraitFlags::RUSTC_PAREN_SUGAR;
+        }
+        let mut skip_array_during_method_dispatch =
+            attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists();
+        let mut skip_boxed_slice_during_method_dispatch = false;
+        for tt in attrs.by_key(sym::rustc_skip_during_method_dispatch).tt_values() {
+            for tt in tt.iter() {
+                if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt {
+                    skip_array_during_method_dispatch |= ident.sym == sym::array;
+                    skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice;
+                }
+            }
+        }
+
+        if skip_array_during_method_dispatch {
+            flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH;
+        }
+        if skip_boxed_slice_during_method_dispatch {
+            flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH;
+        }
+
+        let (store, source_map, generic_params) = lower_trait(db, loc.container, source, id);
+
+        (
+            Arc::new(TraitSignature {
+                store: Arc::new(store),
+                generic_params,
+                flags,
+                name: item_tree[loc.id.value].name.clone(),
+            }),
+            Arc::new(source_map),
+        )
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitAliasSignature {
+    pub name: Name,
+    pub generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+}
+
+impl TraitAliasSignature {
+    pub fn query(
+        db: &dyn DefDatabase,
+        id: TraitAliasId,
+    ) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+
+        let source = loc.source(db);
+        let (store, source_map, generic_params) = lower_trait_alias(db, loc.container, source, id);
+
+        (
+            Arc::new(TraitAliasSignature {
+                generic_params,
+                store: Arc::new(store),
+                name: item_tree[loc.id.value].name.clone(),
+            }),
+            Arc::new(source_map),
+        )
+    }
+}
+
+bitflags! {
+    #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
+    pub struct FnFlags: u16 {
+        const HAS_BODY = 1 << 1;
+        const DEFAULT = 1 << 2;
+        const CONST = 1 << 3;
+        const ASYNC = 1 << 4;
+        const UNSAFE = 1 << 5;
+        const HAS_VARARGS = 1 << 6;
+        const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7;
+        const HAS_SELF_PARAM = 1 << 8;
+        /// The `#[target_feature]` attribute is necessary to check safety (with RFC 2396),
+        /// but keeping it for all functions will consume a lot of memory when there are
+        /// only very few functions with it. So we only encode its existence here, and lookup
+        /// it if needed.
+        const HAS_TARGET_FEATURE = 1 << 9;
+        const DEPRECATED_SAFE_2024 = 1 << 10;
+        const EXPLICIT_SAFE = 1 << 11;
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct FunctionSignature {
+    pub name: Name,
+    pub generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+    pub params: Box<[TypeRefId]>,
+    pub ret_type: Option<TypeRefId>,
+    pub abi: Option<Symbol>,
+    pub flags: FnFlags,
+    // FIXME: we should put this behind a fn flags + query to avoid bloating the struct
+    pub legacy_const_generics_indices: Option<Box<Box<[u32]>>>,
+}
+
+impl FunctionSignature {
+    pub fn query(
+        db: &dyn DefDatabase,
+        id: FunctionId,
+    ) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+        let module = loc.container.module(db);
+        let item_tree = loc.id.item_tree(db);
+
+        let mut flags = FnFlags::empty();
+        let attrs = item_tree.attrs(db, module.krate, ModItem::from(loc.id.value).into());
+        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+            flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
+        }
+
+        if attrs.by_key(sym::target_feature).exists() {
+            flags.insert(FnFlags::HAS_TARGET_FEATURE);
+        }
+        let legacy_const_generics_indices = attrs.rustc_legacy_const_generics();
+
+        let source = loc.source(db);
+
+        if source.value.unsafe_token().is_some() {
+            if attrs.by_key(sym::rustc_deprecated_safe_2024).exists() {
+                flags.insert(FnFlags::DEPRECATED_SAFE_2024);
+            } else {
+                flags.insert(FnFlags::UNSAFE);
+            }
+        }
+        if source.value.async_token().is_some() {
+            flags.insert(FnFlags::ASYNC);
+        }
+        if source.value.const_token().is_some() {
+            flags.insert(FnFlags::CONST);
+        }
+        if source.value.default_token().is_some() {
+            flags.insert(FnFlags::DEFAULT);
+        }
+        if source.value.safe_token().is_some() {
+            flags.insert(FnFlags::EXPLICIT_SAFE);
+        }
+        if source.value.body().is_some() {
+            flags.insert(FnFlags::HAS_BODY);
+        }
+
+        let abi = source.value.abi().map(|abi| {
+            abi.abi_string().map_or_else(|| sym::C, |it| Symbol::intern(it.text_without_quotes()))
+        });
+        let (store, source_map, generic_params, params, ret_type, self_param, variadic) =
+            lower_function(db, module, source, id);
+        if self_param {
+            flags.insert(FnFlags::HAS_SELF_PARAM);
+        }
+        if variadic {
+            flags.insert(FnFlags::HAS_VARARGS);
+        }
+        (
+            Arc::new(FunctionSignature {
+                generic_params,
+                store: Arc::new(store),
+                params,
+                ret_type,
+                abi,
+                flags,
+                legacy_const_generics_indices,
+                name: item_tree[loc.id.value].name.clone(),
+            }),
+            Arc::new(source_map),
+        )
+    }
+
+    pub fn has_body(&self) -> bool {
+        self.flags.contains(FnFlags::HAS_BODY)
+    }
+
+    /// True if the first param is `self`. This is relevant to decide whether this
+    /// can be called as a method.
+    pub fn has_self_param(&self) -> bool {
+        self.flags.contains(FnFlags::HAS_SELF_PARAM)
+    }
+
+    pub fn is_default(&self) -> bool {
+        self.flags.contains(FnFlags::DEFAULT)
+    }
+
+    pub fn is_const(&self) -> bool {
+        self.flags.contains(FnFlags::CONST)
+    }
+
+    pub fn is_async(&self) -> bool {
+        self.flags.contains(FnFlags::ASYNC)
+    }
+
+    pub fn is_unsafe(&self) -> bool {
+        self.flags.contains(FnFlags::UNSAFE)
+    }
+
+    pub fn is_deprecated_safe_2024(&self) -> bool {
+        self.flags.contains(FnFlags::DEPRECATED_SAFE_2024)
+    }
+
+    pub fn is_safe(&self) -> bool {
+        self.flags.contains(FnFlags::EXPLICIT_SAFE)
+    }
+
+    pub fn is_varargs(&self) -> bool {
+        self.flags.contains(FnFlags::HAS_VARARGS)
+    }
+
+    pub fn has_target_feature(&self) -> bool {
+        self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
+    }
+}
+
+bitflags! {
+    #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
+    pub struct TypeAliasFlags: u8 {
+        const RUSTC_HAS_INCOHERENT_INHERENT_IMPL = 1 << 1;
+        const IS_EXTERN = 1 << 6;
+        const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7;
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TypeAliasSignature {
+    pub name: Name,
+    pub generic_params: Arc<GenericParams>,
+    pub store: Arc<ExpressionStore>,
+    pub bounds: Box<[TypeBound]>,
+    pub ty: Option<TypeRefId>,
+    pub flags: TypeAliasFlags,
+}
+
+impl TypeAliasSignature {
+    pub fn query(
+        db: &dyn DefDatabase,
+        id: TypeAliasId,
+    ) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let loc = id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+
+        let mut flags = TypeAliasFlags::empty();
+        let attrs = item_tree.attrs(
+            db,
+            loc.container.module(db).krate(),
+            ModItem::from(loc.id.value).into(),
+        );
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+            flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL);
+        }
+        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+            flags.insert(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
+        }
+        if matches!(loc.container, ItemContainerId::ExternBlockId(_)) {
+            flags.insert(TypeAliasFlags::IS_EXTERN);
+        }
+        let source = loc.source(db);
+        let (store, source_map, generic_params, bounds, ty) =
+            lower_type_alias(db, loc.container.module(db), source, id);
+
+        (
+            Arc::new(TypeAliasSignature {
+                store: Arc::new(store),
+                generic_params,
+                flags,
+                bounds,
+                name: item_tree[loc.id.value].name.clone(),
+                ty,
+            }),
+            Arc::new(source_map),
+        )
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct FunctionBody {
+    pub store: Arc<ExpressionStore>,
+    pub parameters: Box<[PatId]>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct SimpleBody {
+    pub store: Arc<ExpressionStore>,
+}
+pub type StaticBody = SimpleBody;
+pub type ConstBody = SimpleBody;
+pub type EnumVariantBody = SimpleBody;
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct VariantFieldsBody {
+    pub store: Arc<ExpressionStore>,
+    pub fields: Box<[Option<ExprId>]>,
+}
+
+/// A single field of an enum variant or struct
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct FieldData {
+    pub name: Name,
+    pub type_ref: TypeRefId,
+    pub visibility: RawVisibility,
+    pub is_unsafe: bool,
+}
+
+pub type LocalFieldId = Idx<FieldData>;
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct VariantFields {
+    fields: Arena<FieldData>,
+    pub store: Arc<ExpressionStore>,
+    pub shape: FieldsShape,
+}
+impl VariantFields {
+    #[inline]
+    pub(crate) fn query(
+        db: &dyn DefDatabase,
+        id: VariantId,
+    ) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
+        let (shape, (fields, store, source_map)) = match id {
+            VariantId::EnumVariantId(id) => {
+                let loc = id.lookup(db);
+                let item_tree = loc.id.item_tree(db);
+                let parent = loc.parent.lookup(db);
+                let variant = &item_tree[loc.id.value];
+                (
+                    variant.shape,
+                    lower_fields(
+                        db,
+                        parent.container,
+                        &item_tree,
+                        FieldParent::EnumVariant(loc.id.value),
+                        loc.source(db).map(|src| {
+                            variant.fields.iter().zip(
+                                src.field_list()
+                                    .map(|it| {
+                                        match it {
+                                            ast::FieldList::RecordFieldList(record_field_list) => {
+                                                Either::Left(record_field_list.fields().map(|it| {
+                                                    (SyntaxNodePtr::new(it.syntax()), it.ty())
+                                                }))
+                                            }
+                                            ast::FieldList::TupleFieldList(field_list) => {
+                                                Either::Right(field_list.fields().map(|it| {
+                                                    (SyntaxNodePtr::new(it.syntax()), it.ty())
+                                                }))
+                                            }
+                                        }
+                                        .into_iter()
+                                    })
+                                    .into_iter()
+                                    .flatten(),
+                            )
+                        }),
+                        Some(item_tree[parent.id.value].visibility),
+                    ),
+                )
+            }
+            VariantId::StructId(id) => {
+                let loc = id.lookup(db);
+                let item_tree = loc.id.item_tree(db);
+                let strukt = &item_tree[loc.id.value];
+                (
+                    strukt.shape,
+                    lower_fields(
+                        db,
+                        loc.container,
+                        &item_tree,
+                        FieldParent::Struct(loc.id.value),
+                        loc.source(db).map(|src| {
+                            strukt.fields.iter().zip(
+                                src.field_list()
+                                    .map(|it| {
+                                        match it {
+                                            ast::FieldList::RecordFieldList(record_field_list) => {
+                                                Either::Left(record_field_list.fields().map(|it| {
+                                                    (SyntaxNodePtr::new(it.syntax()), it.ty())
+                                                }))
+                                            }
+                                            ast::FieldList::TupleFieldList(field_list) => {
+                                                Either::Right(field_list.fields().map(|it| {
+                                                    (SyntaxNodePtr::new(it.syntax()), it.ty())
+                                                }))
+                                            }
+                                        }
+                                        .into_iter()
+                                    })
+                                    .into_iter()
+                                    .flatten(),
+                            )
+                        }),
+                        None,
+                    ),
+                )
+            }
+            VariantId::UnionId(id) => {
+                let loc = id.lookup(db);
+                let item_tree = loc.id.item_tree(db);
+                let union = &item_tree[loc.id.value];
+                (
+                    FieldsShape::Record,
+                    lower_fields(
+                        db,
+                        loc.container,
+                        &item_tree,
+                        FieldParent::Union(loc.id.value),
+                        loc.source(db).map(|src| {
+                            union.fields.iter().zip(
+                                src.record_field_list()
+                                    .map(|it| {
+                                        it.fields()
+                                            .map(|it| (SyntaxNodePtr::new(it.syntax()), it.ty()))
+                                    })
+                                    .into_iter()
+                                    .flatten(),
+                            )
+                        }),
+                        None,
+                    ),
+                )
+            }
+        };
+
+        (Arc::new(VariantFields { fields, store: Arc::new(store), shape }), Arc::new(source_map))
+    }
+
+    pub fn len(&self) -> usize {
+        self.fields.len()
+    }
+
+    pub fn fields(&self) -> &Arena<FieldData> {
+        &self.fields
+    }
+
+    pub fn field(&self, name: &Name) -> Option<LocalFieldId> {
+        self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None })
+    }
+}
+
+fn lower_fields<'a>(
+    db: &dyn DefDatabase,
+    module: ModuleId,
+    item_tree: &ItemTree,
+    parent: FieldParent,
+    fields: InFile<impl Iterator<Item = (&'a Field, (SyntaxNodePtr, Option<ast::Type>))>>,
+    override_visibility: Option<RawVisibilityId>,
+) -> (Arena<FieldData>, ExpressionStore, ExpressionStoreSourceMap) {
+    let mut arena = Arena::new();
+    let cfg_options = module.krate.cfg_options(db);
+    let mut col = ExprCollector::new(db, module, fields.file_id);
+    for (idx, (field, (ptr, ty))) in fields.value.enumerate() {
+        let attr_owner = AttrOwner::make_field_indexed(parent, idx);
+        let attrs = item_tree.attrs(db, module.krate, attr_owner);
+        if attrs.is_cfg_enabled(cfg_options) {
+            arena.alloc(FieldData {
+                name: field.name.clone(),
+                type_ref: col
+                    .lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator),
+                visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(),
+                is_unsafe: field.is_unsafe,
+            });
+        } else {
+            col.source_map.diagnostics.push(
+                crate::expr_store::ExpressionStoreDiagnostics::InactiveCode {
+                    node: InFile::new(fields.file_id, ptr),
+                    cfg: attrs.cfg().unwrap(),
+                    opts: cfg_options.clone(),
+                },
+            );
+        }
+    }
+    let store = col.store.finish();
+    (arena, store, col.source_map)
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct InactiveEnumVariantCode {
+    pub cfg: CfgExpr,
+    pub opts: CfgOptions,
+    pub ast_id: span::FileAstId<ast::Variant>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct EnumVariants {
+    pub variants: Box<[(EnumVariantId, Name)]>,
+}
+
+impl EnumVariants {
+    pub(crate) fn enum_variants_query(
+        db: &dyn DefDatabase,
+        e: EnumId,
+    ) -> (Arc<EnumVariants>, Option<Arc<ThinVec<InactiveEnumVariantCode>>>) {
+        let loc = e.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+
+        let mut diagnostics = ThinVec::new();
+        let cfg_options = loc.container.krate.cfg_options(db);
+        let mut index = 0;
+        let variants = FileItemTreeId::range_iter(item_tree[loc.id.value].variants.clone())
+            .filter_map(|variant| {
+                let attrs = item_tree.attrs(db, loc.container.krate, variant.into());
+                if attrs.is_cfg_enabled(cfg_options) {
+                    let enum_variant = EnumVariantLoc {
+                        id: ItemTreeId::new(loc.id.tree_id(), variant),
+                        parent: e,
+                        index,
+                    }
+                    .intern(db);
+                    index += 1;
+                    Some((enum_variant, item_tree[variant].name.clone()))
+                } else {
+                    diagnostics.push(InactiveEnumVariantCode {
+                        ast_id: item_tree[variant].ast_id,
+                        cfg: attrs.cfg().unwrap(),
+                        opts: cfg_options.clone(),
+                    });
+                    None
+                }
+            })
+            .collect();
+
+        (
+            Arc::new(EnumVariants { variants }),
+            diagnostics.is_empty().not().then(|| Arc::new(diagnostics)),
+        )
+    }
+
+    pub fn variant(&self, name: &Name) -> Option<EnumVariantId> {
+        self.variants.iter().find_map(|(v, n)| if n == name { Some(*v) } else { None })
+    }
+
+    // [Adopted from rustc](https://github.com/rust-lang/rust/blob/bd53aa3bf7a24a70d763182303bd75e5fc51a9af/compiler/rustc_middle/src/ty/adt.rs#L446-L448)
+    pub fn is_payload_free(&self, db: &dyn DefDatabase) -> bool {
+        self.variants.iter().all(|&(v, _)| {
+            // The condition check order is slightly modified from rustc
+            // to improve performance by early returning with relatively fast checks
+            let variant = &db.variant_fields(v.into());
+            if !variant.fields().is_empty() {
+                return false;
+            }
+            // The outer if condition is whether this variant has const ctor or not
+            if !matches!(variant.shape, FieldsShape::Unit) {
+                let body = db.body(v.into());
+                // A variant with explicit discriminant
+                if body.exprs[body.body_expr] != crate::hir::Expr::Missing {
+                    return false;
+                }
+            }
+            true
+        })
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
index c7ebfeecf5141..3867f39b8b173 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/src.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
@@ -3,13 +3,13 @@
 use either::Either;
 use hir_expand::InFile;
 use la_arena::ArenaMap;
-use syntax::{ast, AstNode, AstPtr};
+use syntax::{AstNode, AstPtr, ast};
 
 use crate::{
-    db::DefDatabase,
-    item_tree::{AttrOwner, FieldParent, ItemTreeNode},
     GenericDefId, ItemTreeLoc, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup,
     UseId, VariantId,
+    db::DefDatabase,
+    item_tree::{AttrOwner, FieldParent, ItemTreeNode},
 };
 
 pub trait HasSource {
@@ -131,7 +131,7 @@ impl HasChildSource<LocalFieldId> for VariantId {
                 item_tree = lookup.id.item_tree(db);
                 (
                     lookup.source(db).map(|it| it.kind()),
-                    FieldParent::Variant(lookup.id.value),
+                    FieldParent::EnumVariant(lookup.id.value),
                     lookup.parent.lookup(db).container,
                 )
             }
@@ -158,7 +158,7 @@ impl HasChildSource<LocalFieldId> for VariantId {
         let mut map = ArenaMap::new();
         match &src.value {
             ast::StructKind::Tuple(fl) => {
-                let cfg_options = &db.crate_graph()[container.krate].cfg_options;
+                let cfg_options = container.krate.cfg_options(db);
                 let mut idx = 0;
                 for (i, fd) in fl.fields().enumerate() {
                     let attrs = item_tree.attrs(
@@ -177,7 +177,7 @@ impl HasChildSource<LocalFieldId> for VariantId {
                 }
             }
             ast::StructKind::Record(fl) => {
-                let cfg_options = &db.crate_graph()[container.krate].cfg_options;
+                let cfg_options = container.krate.cfg_options(db);
                 let mut idx = 0;
                 for (i, fd) in fl.fields().enumerate() {
                     let attrs = item_tree.attrs(
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
index 54e6c1fd206d8..2f7675134ca79 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
@@ -3,58 +3,53 @@
 use std::{fmt, panic, sync::Mutex};
 
 use base_db::{
-    ra_salsa::{self, Durability},
-    AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
+    Crate, CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, RootQueryDb,
+    SourceDatabase, SourceRoot, SourceRootId, SourceRootInput,
 };
-use hir_expand::{db::ExpandDatabase, files::FilePosition, InFile};
-use span::{EditionedFileId, FileId};
-use syntax::{algo, ast, AstNode};
+use hir_expand::{InFile, files::FilePosition};
+use salsa::{AsDynDatabase, Durability};
+use span::FileId;
+use syntax::{AstNode, algo, ast};
 use triomphe::Arc;
 
 use crate::{
+    LocalModuleId, Lookup, ModuleDefId, ModuleId,
     db::DefDatabase,
     nameres::{DefMap, ModuleSource},
     src::HasSource,
-    LocalModuleId, Lookup, ModuleDefId, ModuleId,
 };
 
-#[ra_salsa::database(
-    base_db::SourceRootDatabaseStorage,
-    base_db::SourceDatabaseStorage,
-    hir_expand::db::ExpandDatabaseStorage,
-    crate::db::InternDatabaseStorage,
-    crate::db::DefDatabaseStorage
-)]
+#[salsa::db]
+#[derive(Clone)]
 pub(crate) struct TestDB {
-    storage: ra_salsa::Storage<TestDB>,
-    events: Mutex<Option<Vec<ra_salsa::Event>>>,
+    storage: salsa::Storage<Self>,
+    files: Arc<base_db::Files>,
+    crates_map: Arc<CratesMap>,
+    events: Arc<Mutex<Option<Vec<salsa::Event>>>>,
 }
 
 impl Default for TestDB {
     fn default() -> Self {
-        let mut this = Self { storage: Default::default(), events: Default::default() };
-        this.setup_syntax_context_root();
+        let mut this = Self {
+            storage: Default::default(),
+            events: Default::default(),
+            files: Default::default(),
+            crates_map: Default::default(),
+        };
         this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
+        // This needs to be here otherwise `CrateGraphBuilder` panics.
+        this.set_all_crates(Arc::new(Box::new([])));
+        CrateGraphBuilder::default().set_in_db(&mut this);
         this
     }
 }
 
-impl Upcast<dyn ExpandDatabase> for TestDB {
-    fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
-        self
-    }
-}
-
-impl Upcast<dyn DefDatabase> for TestDB {
-    fn upcast(&self) -> &(dyn DefDatabase + 'static) {
-        self
-    }
-}
-
-impl ra_salsa::Database for TestDB {
-    fn salsa_event(&self, event: ra_salsa::Event) {
+#[salsa::db]
+impl salsa::Database for TestDB {
+    fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
         let mut events = self.events.lock().unwrap();
         if let Some(events) = &mut *events {
+            let event = event();
             events.push(event);
         }
     }
@@ -68,34 +63,79 @@ impl fmt::Debug for TestDB {
 
 impl panic::RefUnwindSafe for TestDB {}
 
-impl FileLoader for TestDB {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
-        FileLoaderDelegate(self).resolve_path(path)
+#[salsa::db]
+impl SourceDatabase for TestDB {
+    fn file_text(&self, file_id: base_db::FileId) -> FileText {
+        self.files.file_text(file_id)
+    }
+
+    fn set_file_text(&mut self, file_id: base_db::FileId, text: &str) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text(self, file_id, text);
     }
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
-        FileLoaderDelegate(self).relevant_crates(file_id)
+
+    fn set_file_text_with_durability(
+        &mut self,
+        file_id: base_db::FileId,
+        text: &str,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text_with_durability(self, file_id, text, durability);
+    }
+
+    /// Source root of the file.
+    fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
+        self.files.source_root(source_root_id)
+    }
+
+    fn set_source_root_with_durability(
+        &mut self,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_source_root_with_durability(self, source_root_id, source_root, durability);
+    }
+
+    fn file_source_root(&self, id: base_db::FileId) -> FileSourceRootInput {
+        self.files.file_source_root(id)
+    }
+
+    fn set_file_source_root_with_durability(
+        &mut self,
+        id: base_db::FileId,
+        source_root_id: SourceRootId,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_source_root_with_durability(self, id, source_root_id, durability);
+    }
+
+    fn crates_map(&self) -> Arc<CratesMap> {
+        self.crates_map.clone()
     }
 }
 
 impl TestDB {
-    pub(crate) fn fetch_test_crate(&self) -> CrateId {
-        let crate_graph = self.crate_graph();
-        let it = crate_graph
+    pub(crate) fn fetch_test_crate(&self) -> Crate {
+        let all_crates = self.all_crates();
+        all_crates
             .iter()
-            .find(|&idx| {
-                crate_graph[idx].display_name.as_ref().map(|it| it.canonical_name().as_str())
+            .copied()
+            .find(|&krate| {
+                krate.extra_data(self).display_name.as_ref().map(|it| it.canonical_name().as_str())
                     == Some("ra_test_fixture")
             })
-            .or_else(|| crate_graph.iter().next())
-            .unwrap();
-        it
+            .unwrap_or(*all_crates.last().unwrap())
     }
 
     pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
         for &krate in self.relevant_crates(file_id).iter() {
             let crate_def_map = self.crate_def_map(krate);
             for (local_id, data) in crate_def_map.modules() {
-                if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
+                if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
                     return crate_def_map.module_id(local_id);
                 }
             }
@@ -104,7 +144,7 @@ impl TestDB {
     }
 
     pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
-        let file_module = self.module_for_file(position.file_id.file_id());
+        let file_module = self.module_for_file(position.file_id.file_id(self));
         let mut def_map = file_module.def_map(self);
         let module = self.mod_at_position(&def_map, position);
 
@@ -203,12 +243,12 @@ impl TestDB {
 
         // Find the innermost block expression that has a `DefMap`.
         let def_with_body = fn_def?.into();
-        let (_, source_map) = self.body_with_source_map(def_with_body);
+        let source_map = self.body_with_source_map(def_with_body).1;
         let scopes = self.expr_scopes(def_with_body);
-        let root = self.parse(position.file_id);
 
-        let scope_iter = algo::ancestors_at_offset(&root.syntax_node(), position.offset)
-            .filter_map(|node| {
+        let root_syntax_node = self.parse(position.file_id).syntax_node();
+        let scope_iter =
+            algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
                 let block = ast::BlockExpr::cast(node)?;
                 let expr = ast::Expr::from(block);
                 let expr_id = source_map
@@ -231,7 +271,7 @@ impl TestDB {
         None
     }
 
-    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<ra_salsa::Event> {
+    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
         *self.events.lock().unwrap() = Some(Vec::new());
         f();
         self.events.lock().unwrap().take().unwrap()
@@ -244,8 +284,11 @@ impl TestDB {
             .filter_map(|e| match e.kind {
                 // This is pretty horrible, but `Debug` is the only way to inspect
                 // QueryDescriptor at the moment.
-                ra_salsa::EventKind::WillExecute { database_key } => {
-                    Some(format!("{:?}", database_key.debug(self)))
+                salsa::EventKind::WillExecute { database_key } => {
+                    let ingredient = self
+                        .as_dyn_database()
+                        .ingredient_debug_name(database_key.ingredient_index());
+                    Some(ingredient.to_string())
                 }
                 _ => None,
             })
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
index c4473e454a1bc..b42c8d383d4a6 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
@@ -2,80 +2,19 @@
 
 use std::iter;
 
-use intern::Interned;
+use hir_expand::Lookup;
 use la_arena::ArenaMap;
-use span::SyntaxContextId;
-use syntax::ast;
 use triomphe::Arc;
 
 use crate::{
+    ConstId, FunctionId, HasModule, ItemContainerId, ItemLoc, ItemTreeLoc, LocalFieldId,
+    LocalModuleId, ModuleId, TraitId, TypeAliasId, VariantId,
     db::DefDatabase,
     nameres::DefMap,
-    path::{ModPath, PathKind},
-    resolver::HasResolver,
-    ConstId, FunctionId, HasModule, LocalFieldId, LocalModuleId, ModuleId, VariantId,
+    resolver::{HasResolver, Resolver},
 };
 
-/// Visibility of an item, not yet resolved.
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum RawVisibility {
-    /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
-    /// equivalent to `pub(self)`.
-    Module(Interned<ModPath>, VisibilityExplicitness),
-    /// `pub`.
-    Public,
-}
-
-impl RawVisibility {
-    pub(crate) fn private() -> RawVisibility {
-        RawVisibility::Module(
-            Interned::new(ModPath::from_kind(PathKind::SELF)),
-            VisibilityExplicitness::Implicit,
-        )
-    }
-
-    pub(crate) fn from_ast(
-        db: &dyn DefDatabase,
-        node: Option<ast::Visibility>,
-        span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
-    ) -> RawVisibility {
-        let node = match node {
-            None => return RawVisibility::private(),
-            Some(node) => node,
-        };
-        Self::from_ast_with_span_map(db, node, span_for_range)
-    }
-
-    fn from_ast_with_span_map(
-        db: &dyn DefDatabase,
-        node: ast::Visibility,
-        span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
-    ) -> RawVisibility {
-        let path = match node.kind() {
-            ast::VisibilityKind::In(path) => {
-                let path = ModPath::from_src(db.upcast(), path, span_for_range);
-                match path {
-                    None => return RawVisibility::private(),
-                    Some(path) => path,
-                }
-            }
-            ast::VisibilityKind::PubCrate => ModPath::from_kind(PathKind::Crate),
-            ast::VisibilityKind::PubSuper => ModPath::from_kind(PathKind::Super(1)),
-            ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF),
-            ast::VisibilityKind::Pub => return RawVisibility::Public,
-        };
-        RawVisibility::Module(Interned::new(path), VisibilityExplicitness::Explicit)
-    }
-
-    pub fn resolve(
-        &self,
-        db: &dyn DefDatabase,
-        resolver: &crate::resolver::Resolver,
-    ) -> Visibility {
-        // we fall back to public visibility (i.e. fail open) if the path can't be resolved
-        resolver.resolve_visibility(db, self).unwrap_or(Visibility::Public)
-    }
-}
+pub use crate::item_tree::{RawVisibility, VisibilityExplicitness};
 
 /// Visibility of an item, with the path resolved.
 #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@@ -87,6 +26,15 @@ pub enum Visibility {
 }
 
 impl Visibility {
+    pub fn resolve(
+        db: &dyn DefDatabase,
+        resolver: &crate::resolver::Resolver,
+        raw_vis: &RawVisibility,
+    ) -> Self {
+        // we fall back to public visibility (i.e. fail open) if the path can't be resolved
+        resolver.resolve_visibility(db, raw_vis).unwrap_or(Visibility::Public)
+    }
+
     pub(crate) fn is_visible_from_other_crate(self) -> bool {
         matches!(self, Visibility::Public)
     }
@@ -254,30 +202,20 @@ impl Visibility {
     }
 }
 
-/// Whether the item was imported through an explicit `pub(crate) use` or just a `use` without
-/// visibility.
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub enum VisibilityExplicitness {
-    Explicit,
-    Implicit,
-}
-
-impl VisibilityExplicitness {
-    pub fn is_explicit(&self) -> bool {
-        matches!(self, Self::Explicit)
-    }
-}
-
 /// Resolve visibility of all specific fields of a struct or union variant.
 pub(crate) fn field_visibilities_query(
     db: &dyn DefDatabase,
     variant_id: VariantId,
 ) -> Arc<ArenaMap<LocalFieldId, Visibility>> {
-    let var_data = variant_id.variant_data(db);
+    let variant_fields = db.variant_fields(variant_id);
+    let fields = variant_fields.fields();
+    if fields.is_empty() {
+        return Arc::default();
+    }
     let resolver = variant_id.module(db).resolver(db);
     let mut res = ArenaMap::default();
-    for (field_id, field_data) in var_data.fields().iter() {
-        res.insert(field_id, field_data.visibility.resolve(db, &resolver));
+    for (field_id, field_data) in fields.iter() {
+        res.insert(field_id, Visibility::resolve(db, &resolver, &field_data.visibility));
     }
     Arc::new(res)
 }
@@ -285,11 +223,43 @@ pub(crate) fn field_visibilities_query(
 /// Resolve visibility of a function.
 pub(crate) fn function_visibility_query(db: &dyn DefDatabase, def: FunctionId) -> Visibility {
     let resolver = def.resolver(db);
-    db.function_data(def).visibility.resolve(db, &resolver)
+    let loc = def.lookup(db);
+    let tree = loc.item_tree_id().item_tree(db);
+    if let ItemContainerId::TraitId(trait_id) = loc.container {
+        trait_vis(db, &resolver, trait_id)
+    } else {
+        Visibility::resolve(db, &resolver, &tree[tree[loc.id.value].visibility])
+    }
 }
 
 /// Resolve visibility of a const.
 pub(crate) fn const_visibility_query(db: &dyn DefDatabase, def: ConstId) -> Visibility {
     let resolver = def.resolver(db);
-    db.const_data(def).visibility.resolve(db, &resolver)
+    let loc = def.lookup(db);
+    let tree = loc.item_tree_id().item_tree(db);
+    if let ItemContainerId::TraitId(trait_id) = loc.container {
+        trait_vis(db, &resolver, trait_id)
+    } else {
+        Visibility::resolve(db, &resolver, &tree[tree[loc.id.value].visibility])
+    }
+}
+
+/// Resolve visibility of a type alias.
+pub(crate) fn type_alias_visibility_query(db: &dyn DefDatabase, def: TypeAliasId) -> Visibility {
+    let resolver = def.resolver(db);
+    let loc = def.lookup(db);
+    let tree = loc.item_tree_id().item_tree(db);
+    if let ItemContainerId::TraitId(trait_id) = loc.container {
+        trait_vis(db, &resolver, trait_id)
+    } else {
+        Visibility::resolve(db, &resolver, &tree[tree[loc.id.value].visibility])
+    }
+}
+
+#[inline]
+fn trait_vis(db: &dyn DefDatabase, resolver: &Resolver, trait_id: TraitId) -> Visibility {
+    let ItemLoc { id: tree_id, .. } = trait_id.lookup(db);
+    let item_tree = tree_id.item_tree(db);
+    let tr_def = &item_tree[tree_id.value];
+    Visibility::resolve(db, resolver, &item_tree[tr_def.visibility])
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 7d561e0527d91..b83efca255286 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -12,15 +12,15 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
 tracing.workspace = true
 either.workspace = true
 rustc-hash.workspace = true
-la-arena.workspace = true
 itertools.workspace = true
-hashbrown.workspace = true
 smallvec.workspace = true
 triomphe.workspace = true
+query-group.workspace = true
+salsa.workspace = true
 
 # local deps
 stdx.workspace = true
@@ -35,7 +35,7 @@ parser.workspace = true
 syntax-bridge.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 
 [features]
 in-rust-tree = ["syntax/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
index c9c793d54f26c..5dae27f7a16cf 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
@@ -1,26 +1,26 @@
 //! A higher level attributes based on TokenTree, with also some shortcuts.
 use std::{borrow::Cow, fmt, ops};
 
-use base_db::CrateId;
+use base_db::Crate;
 use cfg::CfgExpr;
 use either::Either;
-use intern::{sym, Interned, Symbol};
+use intern::{Interned, Symbol, sym};
 
 use mbe::{DelimiterKind, Punct};
-use smallvec::{smallvec, SmallVec};
-use span::{Span, SyntaxContextId};
+use smallvec::{SmallVec, smallvec};
+use span::{Span, SyntaxContext};
 use syntax::unescape;
-use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode};
-use syntax_bridge::{desugar_doc_comment_text, syntax_node_to_token_tree, DocCommentDesugarMode};
+use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast};
+use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree};
 use triomphe::ThinArc;
 
 use crate::name::Name;
 use crate::{
+    InFile,
     db::ExpandDatabase,
     mod_path::ModPath,
     span_map::SpanMapRef,
-    tt::{self, token_to_literal, TopSubtree},
-    InFile,
+    tt::{self, TopSubtree, token_to_literal},
 };
 
 /// Syntactical attributes, without filtering of `cfg_attr`s.
@@ -66,10 +66,7 @@ impl RawAttrs {
                             kind,
                             suffix: None,
                         }))),
-                        path: Interned::new(ModPath::from(Name::new_symbol(
-                            sym::doc.clone(),
-                            span.ctx,
-                        ))),
+                        path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))),
                         ctxt: span.ctx,
                     }
                 }),
@@ -119,50 +116,48 @@ impl RawAttrs {
 
     /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
     // FIXME: This should return a different type, signaling it was filtered?
-    pub fn filter(self, db: &dyn ExpandDatabase, krate: CrateId) -> RawAttrs {
-        let has_cfg_attrs = self
-            .iter()
-            .any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr.clone()));
+    pub fn filter(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
+        let has_cfg_attrs =
+            self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr));
         if !has_cfg_attrs {
             return self;
         }
 
-        let crate_graph = db.crate_graph();
-        let new_attrs =
-            self.iter()
-                .flat_map(|attr| -> SmallVec<[_; 1]> {
-                    let is_cfg_attr =
-                        attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr.clone());
-                    if !is_cfg_attr {
-                        return smallvec![attr.clone()];
-                    }
+        let cfg_options = krate.cfg_options(db);
+        let new_attrs = self
+            .iter()
+            .flat_map(|attr| -> SmallVec<[_; 1]> {
+                let is_cfg_attr = attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
+                if !is_cfg_attr {
+                    return smallvec![attr.clone()];
+                }
 
-                    let subtree = match attr.token_tree_value() {
-                        Some(it) => it,
-                        _ => return smallvec![attr.clone()],
-                    };
-
-                    let (cfg, parts) = match parse_cfg_attr_input(subtree) {
-                        Some(it) => it,
-                        None => return smallvec![attr.clone()],
-                    };
-                    let index = attr.id;
-                    let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
-                        |(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)),
-                    );
-
-                    let cfg_options = &crate_graph[krate].cfg_options;
-                    let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
-                    let cfg = CfgExpr::parse(&cfg);
-                    if cfg_options.check(&cfg) == Some(false) {
-                        smallvec![]
-                    } else {
-                        cov_mark::hit!(cfg_attr_active);
-
-                        attrs.collect()
-                    }
-                })
-                .collect::<Vec<_>>();
+                let subtree = match attr.token_tree_value() {
+                    Some(it) => it,
+                    _ => return smallvec![attr.clone()],
+                };
+
+                let (cfg, parts) = match parse_cfg_attr_input(subtree) {
+                    Some(it) => it,
+                    None => return smallvec![attr.clone()],
+                };
+                let index = attr.id;
+                let attrs = parts
+                    .enumerate()
+                    .take(1 << AttrId::CFG_ATTR_BITS)
+                    .filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
+
+                let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
+                let cfg = CfgExpr::parse(&cfg);
+                if cfg_options.check(&cfg) == Some(false) {
+                    smallvec![]
+                } else {
+                    cov_mark::hit!(cfg_attr_active);
+
+                    attrs.collect()
+                }
+            })
+            .collect::<Vec<_>>();
         let entries = if new_attrs.is_empty() {
             None
         } else {
@@ -211,7 +206,7 @@ pub struct Attr {
     pub id: AttrId,
     pub path: Interned<ModPath>,
     pub input: Option<Box<AttrInput>>,
-    pub ctxt: SyntaxContextId,
+    pub ctxt: SyntaxContext,
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -306,13 +301,12 @@ impl Attr {
                 Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree))))
             }
             (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => {
-                let input = match input.flat_tokens().get(1) {
+                match input.flat_tokens().get(1) {
                     Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
                         Some(Box::new(AttrInput::Literal(lit.clone())))
                     }
                     _ => None,
-                };
-                input
+                }
             }
             _ => None,
         };
@@ -403,7 +397,7 @@ impl Attr {
     }
 
     pub fn cfg(&self) -> Option<CfgExpr> {
-        if *self.path.as_ident()? == sym::cfg.clone() {
+        if *self.path.as_ident()? == sym::cfg {
             self.token_tree_value().map(CfgExpr::parse)
         } else {
             None
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs
index 7b9b7f36e2cdb..0bf4943b60cec 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs
@@ -7,9 +7,9 @@ mod derive_macro;
 mod fn_macro;
 
 pub use self::{
-    attr_macro::{find_builtin_attr, pseudo_derive_attr_expansion, BuiltinAttrExpander},
-    derive_macro::{find_builtin_derive, BuiltinDeriveExpander},
+    attr_macro::{BuiltinAttrExpander, find_builtin_attr, pseudo_derive_attr_expansion},
+    derive_macro::{BuiltinDeriveExpander, find_builtin_derive},
     fn_macro::{
-        find_builtin_macro, include_input_to_file_id, BuiltinFnLikeExpander, EagerExpander,
+        BuiltinFnLikeExpander, EagerExpander, find_builtin_macro, include_input_to_file_id,
     },
 };
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs
index e9dc17a28f688..25dd933f26772 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs
@@ -1,8 +1,8 @@
 //! Builtin attributes.
 use intern::sym;
-use span::{MacroCallId, Span};
+use span::Span;
 
-use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
+use crate::{ExpandResult, MacroCallId, MacroCallKind, db::ExpandDatabase, name, tt};
 
 use super::quote;
 
@@ -130,7 +130,7 @@ fn derive_expand(
             return ExpandResult::ok(tt::TopSubtree::empty(tt::DelimSpan {
                 open: span,
                 close: span,
-            }))
+            }));
         }
     };
     pseudo_derive_attr_expansion(tt, derives, span)
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs
index 28b6812139446..68283b916d74b 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs
@@ -1,26 +1,27 @@
 //! Builtin derives.
 
 use intern::sym;
-use itertools::{izip, Itertools};
+use itertools::{Itertools, izip};
 use parser::SyntaxKind;
 use rustc_hash::FxHashSet;
-use span::{Edition, MacroCallId, Span, SyntaxContextId};
+use span::{Edition, Span, SyntaxContext};
 use stdx::never;
 use syntax_bridge::DocCommentDesugarMode;
 use tracing::debug;
 
 use crate::{
+    ExpandError, ExpandResult, MacroCallId,
     builtin::quote::{dollar_crate, quote},
     db::ExpandDatabase,
     hygiene::span_with_def_site_ctxt,
     name::{self, AsName, Name},
     span_map::ExpansionSpanMap,
-    tt, ExpandError, ExpandResult,
+    tt,
 };
 use syntax::{
     ast::{
-        self, edit_in_place::GenericParamsOwnerEdit, make, AstNode, FieldList, HasAttrs,
-        HasGenericArgs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds,
+        self, AstNode, FieldList, HasAttrs, HasGenericArgs, HasGenericParams, HasModuleItem,
+        HasName, HasTypeBounds, edit_in_place::GenericParamsOwnerEdit, make,
     },
     ted,
 };
@@ -58,7 +59,7 @@ impl BuiltinDeriveExpander {
         tt: &tt::TopSubtree,
         span: Span,
     ) -> ExpandResult<tt::TopSubtree> {
-        let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
+        let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
         self.expander()(db, span, tt)
     }
 }
@@ -117,7 +118,7 @@ impl VariantShape {
                     quote! {span => #it : #mapped , }
                 });
                 quote! {span =>
-                    #path { ##fields }
+                    #path { # #fields }
                 }
             }
             &VariantShape::Tuple(n) => {
@@ -128,7 +129,7 @@ impl VariantShape {
                     }
                 });
                 quote! {span =>
-                    #path ( ##fields )
+                    #path ( # #fields )
                 }
             }
             VariantShape::Unit => path,
@@ -237,7 +238,7 @@ fn parse_adt(
 
 fn parse_adt_from_syntax(
     adt: &ast::Adt,
-    tm: &span::SpanMap<SyntaxContextId>,
+    tm: &span::SpanMap<SyntaxContext>,
     call_site: Span,
 ) -> Result<BasicAdtInfo, ExpandError> {
     let (name, generic_param_list, where_clause, shape) = match &adt {
@@ -389,7 +390,7 @@ fn to_adt_syntax(
     db: &dyn ExpandDatabase,
     tt: &tt::TopSubtree,
     call_site: Span,
-) -> Result<(ast::Adt, span::SpanMap<SyntaxContextId>), ExpandError> {
+) -> Result<(ast::Adt, span::SpanMap<SyntaxContext>), ExpandError> {
     let (parsed, tm) = crate::db::token_tree_to_syntax_node(
         db,
         tt,
@@ -464,7 +465,7 @@ fn expand_simple_derive(
             return ExpandResult::new(
                 tt::TopSubtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
                 e,
-            )
+            );
         }
     };
     ExpandResult::ok(expand_simple_derive_with_parsed(
@@ -523,7 +524,7 @@ fn expand_simple_derive_with_parsed(
 
     let name = info.name;
     quote! {invoc_span =>
-        impl < ##params #extra_impl_params > #trait_path for #name < ##args > where ##where_block { #trait_body }
+        impl < # #params #extra_impl_params > #trait_path for #name < # #args > where # #where_block { #trait_body }
     }
 }
 
@@ -572,7 +573,7 @@ fn clone_expand(
         quote! {span =>
             fn clone(&self) -> Self {
                 match self {
-                    ##arms
+                    # #arms
                 }
             }
         }
@@ -650,7 +651,7 @@ fn debug_expand(
                     }
                 });
                 quote! {span =>
-                    f.debug_struct(#name) ##for_fields .finish()
+                    f.debug_struct(#name) # #for_fields .finish()
                 }
             }
             VariantShape::Tuple(n) => {
@@ -660,7 +661,7 @@ fn debug_expand(
                     }
                 });
                 quote! {span =>
-                    f.debug_tuple(#name) ##for_fields .finish()
+                    f.debug_tuple(#name) # #for_fields .finish()
                 }
             }
             VariantShape::Unit => quote! {span =>
@@ -703,7 +704,7 @@ fn debug_expand(
         quote! {span =>
             fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
                 match self {
-                    ##arms
+                    # #arms
                 }
             }
         }
@@ -736,7 +737,7 @@ fn hash_expand(
                         let it =
                             names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); });
                         quote! {span => {
-                            ##it
+                            # #it
                         } }
                     };
                     let fat_arrow = fat_arrow(span);
@@ -754,7 +755,7 @@ fn hash_expand(
             fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
                 #check_discriminant
                 match self {
-                    ##arms
+                    # #arms
                 }
             }
         }
@@ -803,7 +804,7 @@ fn partial_eq_expand(
                             let t2 = tt::Ident::new(&format!("{}_other", first.sym), first.span);
                             quote!(span =>#t1 .eq( #t2 ))
                         };
-                        quote!(span =>#first ##rest)
+                        quote!(span =>#first # #rest)
                     }
                 };
                 quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
@@ -814,7 +815,7 @@ fn partial_eq_expand(
         quote! {span =>
             fn eq(&self, other: &Self) -> bool {
                 match (self, other) {
-                    ##arms
+                    # #arms
                     _unused #fat_arrow false
                 }
             }
@@ -891,7 +892,7 @@ fn ord_expand(
         let fat_arrow = fat_arrow(span);
         let mut body = quote! {span =>
             match (self, other) {
-                ##arms
+                # #arms
                 _unused #fat_arrow #krate::cmp::Ordering::Equal
             }
         };
@@ -961,14 +962,14 @@ fn partial_ord_expand(
             right,
             quote! {span =>
                 match (self, other) {
-                    ##arms
+                    # #arms
                     _unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
                 }
             },
             span,
         );
         quote! {span =>
-            fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
+            fn partial_cmp(&self, other: &Self) -> #krate::option::Option<#krate::cmp::Ordering> {
                 #body
             }
         }
@@ -1072,7 +1073,7 @@ fn coerce_pointee_expand(
                         "exactly one generic type parameter must be marked \
                                 as `#[pointee]` to derive `CoercePointee` traits",
                     ),
-                )
+                );
             }
             (Some(_), Some(_)) => {
                 return ExpandResult::new(
@@ -1082,7 +1083,7 @@ fn coerce_pointee_expand(
                         "only one type parameter can be marked as `#[pointee]` \
                                 when deriving `CoercePointee` traits",
                     ),
-                )
+                );
             }
         }
     };
@@ -1120,7 +1121,9 @@ fn coerce_pointee_expand(
                 tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
                 ExpandError::other(
                     span,
-                    format!("`derive(CoercePointee)` requires `{pointee_param_name}` to be marked `?Sized`"),
+                    format!(
+                        "`derive(CoercePointee)` requires `{pointee_param_name}` to be marked `?Sized`"
+                    ),
                 ),
             );
         }
@@ -1311,15 +1314,15 @@ fn coerce_pointee_expand(
                     }
                 })
             });
-        let self_for_traits = make::path_from_segments(
+
+        make::path_from_segments(
             [make::generic_ty_path_segment(
                 make::name_ref(&struct_name.text()),
                 self_params_for_traits,
             )],
             false,
         )
-        .clone_for_update();
-        self_for_traits
+        .clone_for_update()
     };
 
     let mut span_map = span::SpanMap::empty();
@@ -1335,7 +1338,7 @@ fn coerce_pointee_expand(
     let info = match parse_adt_from_syntax(&adt, &span_map, span) {
         Ok(it) => it,
         Err(err) => {
-            return ExpandResult::new(tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), err)
+            return ExpandResult::new(tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), err);
         }
     };
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
index 55242ab3e57d1..621e174cac997 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
@@ -3,24 +3,27 @@
 use base_db::AnchoredPath;
 use cfg::CfgExpr;
 use either::Either;
-use intern::{sym, Symbol};
-use mbe::{expect_fragment, DelimiterKind};
-use span::{Edition, EditionedFileId, Span};
+use intern::{
+    Symbol,
+    sym::{self},
+};
+use mbe::{DelimiterKind, expect_fragment};
+use span::{Edition, FileId, Span};
 use stdx::format_to;
 use syntax::{
     format_smolstr,
-    unescape::{unescape_byte, unescape_char, unescape_unicode, Mode},
+    unescape::{Mode, unescape_byte, unescape_char, unescape_unicode},
 };
 use syntax_bridge::syntax_node_to_token_tree;
 
 use crate::{
-    builtin::quote::{dollar_crate, quote, WithDelimiter},
+    EditionedFileId, ExpandError, ExpandResult, Lookup as _, MacroCallId,
+    builtin::quote::{WithDelimiter, dollar_crate, quote},
     db::ExpandDatabase,
     hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
     name,
     span_map::SpanMap,
     tt::{self, DelimSpan, TtElement, TtIter},
-    ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
 };
 
 macro_rules! register_builtin {
@@ -69,7 +72,7 @@ impl BuiltinFnLikeExpander {
         tt: &tt::TopSubtree,
         span: Span,
     ) -> ExpandResult<tt::TopSubtree> {
-        let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
+        let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
         self.expander()(db, id, tt, span)
     }
 
@@ -86,7 +89,7 @@ impl EagerExpander {
         tt: &tt::TopSubtree,
         span: Span,
     ) -> ExpandResult<tt::TopSubtree> {
-        let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
+        let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT);
         self.expander()(db, id, tt, span)
     }
 
@@ -174,10 +177,10 @@ fn line_expand(
     ExpandResult::ok(tt::TopSubtree::invisible_from_leaves(
         span,
         [tt::Leaf::Literal(tt::Literal {
-            symbol: sym::INTEGER_0.clone(),
+            symbol: sym::INTEGER_0,
             span,
             kind: tt::LitKind::Integer,
-            suffix: Some(sym::u32.clone()),
+            suffix: Some(sym::u32),
         })],
     ))
 }
@@ -221,14 +224,14 @@ fn assert_expand(
     tt: &tt::TopSubtree,
     span: Span,
 ) -> ExpandResult<tt::TopSubtree> {
-    let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
+    let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
 
     let mut iter = tt.iter();
 
     let cond = expect_fragment(
         &mut iter,
         parser::PrefixEntryPoint::Expr,
-        db.crate_graph()[id.lookup(db).krate].edition,
+        id.lookup(db).krate.data(db).edition,
         tt.top_subtree().delimiter.delim_span(),
     );
     _ = iter.expect_char(',');
@@ -237,9 +240,9 @@ fn assert_expand(
     let dollar_crate = dollar_crate(span);
     let panic_args = rest.iter();
     let mac = if use_panic_2021(db, span) {
-        quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) }
+        quote! {call_site_span => #dollar_crate::panic::panic_2021!(# #panic_args) }
     } else {
-        quote! {call_site_span => #dollar_crate::panic!(##panic_args) }
+        quote! {call_site_span => #dollar_crate::panic!(# #panic_args) }
     };
     let value = cond.value;
     let expanded = quote! {call_site_span =>{
@@ -330,7 +333,7 @@ fn cfg_expand(
 ) -> ExpandResult<tt::TopSubtree> {
     let loc = db.lookup_intern_macro_call(id);
     let expr = CfgExpr::parse(tt);
-    let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
+    let enabled = loc.krate.cfg_options(db).check(&expr) != Some(false);
     let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) };
     ExpandResult::ok(expanded)
 }
@@ -342,13 +345,9 @@ fn panic_expand(
     span: Span,
 ) -> ExpandResult<tt::TopSubtree> {
     let dollar_crate = dollar_crate(span);
-    let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
+    let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
 
-    let mac = if use_panic_2021(db, call_site_span) {
-        sym::panic_2021.clone()
-    } else {
-        sym::panic_2015.clone()
-    };
+    let mac = if use_panic_2021(db, call_site_span) { sym::panic_2021 } else { sym::panic_2015 };
 
     // Pass the original arguments
     let subtree = WithDelimiter {
@@ -373,12 +372,12 @@ fn unreachable_expand(
     span: Span,
 ) -> ExpandResult<tt::TopSubtree> {
     let dollar_crate = dollar_crate(span);
-    let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
+    let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT);
 
     let mac = if use_panic_2021(db, call_site_span) {
-        sym::unreachable_2021.clone()
+        sym::unreachable_2021
     } else {
-        sym::unreachable_2015.clone()
+        sym::unreachable_2015
     };
 
     // Pass the original arguments
@@ -401,14 +400,14 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
     // stack that does not have #[allow_internal_unstable(edition_panic)].
     // (To avoid using the edition of e.g. the assert!() or debug_assert!() definition.)
     loop {
-        let Some(expn) = db.lookup_intern_syntax_context(span.ctx).outer_expn else {
+        let Some(expn) = span.ctx.outer_expn(db) else {
             break false;
         };
-        let expn = db.lookup_intern_macro_call(expn);
+        let expn = db.lookup_intern_macro_call(expn.into());
         // FIXME: Record allow_internal_unstable in the macro def (not been done yet because it
         // would consume quite a bit extra memory for all call locs...)
         // if let Some(features) = expn.def.allow_internal_unstable {
-        //     if features.iter().any(|&f| f == sym::edition_panic.clone()) {
+        //     if features.iter().any(|&f| f == sym::edition_panic) {
         //         span = expn.call_site;
         //         continue;
         //     }
@@ -424,12 +423,15 @@ fn compile_error_expand(
     span: Span,
 ) -> ExpandResult<tt::TopSubtree> {
     let err = match &*tt.0 {
-        [_, tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
-            symbol: text,
-            span: _,
-            kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
-            suffix: _,
-        }))] => ExpandError::other(span, Box::from(unescape_str(text).as_str())),
+        [
+            _,
+            tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+                symbol: text,
+                span: _,
+                kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
+                suffix: _,
+            })),
+        ] => ExpandError::other(span, Box::from(unescape_str(text).as_str())),
         _ => ExpandError::other(span, "`compile_error!` argument must be a string"),
     };
 
@@ -656,17 +658,17 @@ fn relative_file(
     allow_recursion: bool,
     err_span: Span,
 ) -> Result<EditionedFileId, ExpandError> {
-    let lookup = call_id.lookup(db);
-    let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
+    let lookup = db.lookup_intern_macro_call(call_id);
+    let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id(db);
     let path = AnchoredPath { anchor: call_site, path: path_str };
-    let res = db
+    let res: FileId = db
         .resolve_path(path)
         .ok_or_else(|| ExpandError::other(err_span, format!("failed to load file `{path_str}`")))?;
     // Prevent include itself
     if res == call_site && !allow_recursion {
         Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
     } else {
-        Ok(EditionedFileId::new(res, db.crate_graph()[lookup.krate].edition))
+        Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
     }
 }
 
@@ -725,19 +727,19 @@ fn include_expand(
     tt: &tt::TopSubtree,
     span: Span,
 ) -> ExpandResult<tt::TopSubtree> {
-    let file_id = match include_input_to_file_id(db, arg_id, tt) {
-        Ok(it) => it,
+    let editioned_file_id = match include_input_to_file_id(db, arg_id, tt) {
+        Ok(editioned_file_id) => editioned_file_id,
         Err(e) => {
             return ExpandResult::new(
                 tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
                 e,
-            )
+            );
         }
     };
-    let span_map = db.real_span_map(file_id);
+    let span_map = db.real_span_map(editioned_file_id);
     // FIXME: Parse errors
     ExpandResult::ok(syntax_node_to_token_tree(
-        &db.parse(file_id).syntax_node(),
+        &db.parse(editioned_file_id).syntax_node(),
         SpanMap::RealSpanMap(span_map),
         span,
         syntax_bridge::DocCommentDesugarMode::ProcMacro,
@@ -776,15 +778,15 @@ fn include_str_expand(
     db: &dyn ExpandDatabase,
     arg_id: MacroCallId,
     tt: &tt::TopSubtree,
-    span: Span,
+    call_site: Span,
 ) -> ExpandResult<tt::TopSubtree> {
-    let (path, span) = match parse_string(tt) {
+    let (path, input_span) = match parse_string(tt) {
         Ok(it) => it,
         Err(e) => {
             return ExpandResult::new(
-                tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
+                tt::TopSubtree::empty(DelimSpan { open: call_site, close: call_site }),
                 e,
-            )
+            );
         }
     };
 
@@ -792,22 +794,22 @@ fn include_str_expand(
     // it's unusual to `include_str!` a Rust file), but we can return an empty string.
     // Ideally, we'd be able to offer a precise expansion if the user asks for macro
     // expansion.
-    let file_id = match relative_file(db, arg_id, path.as_str(), true, span) {
+    let file_id = match relative_file(db, arg_id, path.as_str(), true, input_span) {
         Ok(file_id) => file_id,
         Err(_) => {
-            return ExpandResult::ok(quote!(span =>""));
+            return ExpandResult::ok(quote!(call_site =>""));
         }
     };
 
-    let text = db.file_text(file_id.file_id());
-    let text = &*text;
+    let text = db.file_text(file_id.file_id(db));
+    let text = &*text.text(db);
 
-    ExpandResult::ok(quote!(span =>#text))
+    ExpandResult::ok(quote!(call_site =>#text))
 }
 
 fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &Symbol) -> Option<String> {
     let krate = db.lookup_intern_macro_call(arg_id).krate;
-    db.crate_graph()[krate].env.get(key.as_str())
+    krate.env(db).get(key.as_str())
 }
 
 fn env_expand(
@@ -822,7 +824,7 @@ fn env_expand(
             return ExpandResult::new(
                 tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
                 e,
-            )
+            );
         }
     };
 
@@ -860,7 +862,7 @@ fn option_env_expand(
             return ExpandResult::new(
                 tt::TopSubtree::empty(DelimSpan { open: call_site, close: call_site }),
                 e,
-            )
+            );
         }
     };
     let dollar_crate = dollar_crate(call_site);
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs
index 9b637fc768446..62b7b638e7b67 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs
@@ -1,7 +1,7 @@
 //! A simplified version of quote-crate like quasi quote macro
 #![allow(clippy::crate_in_macro_def)]
 
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use span::Span;
 use syntax::ToSmolStr;
 use tt::IdentIsRaw;
@@ -9,7 +9,7 @@ use tt::IdentIsRaw;
 use crate::{name::Name, tt::TopSubtreeBuilder};
 
 pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> {
-    tt::Ident { sym: sym::dollar_crate.clone(), span, is_raw: tt::IdentIsRaw::No }
+    tt::Ident { sym: sym::dollar_crate, span, is_raw: tt::IdentIsRaw::No }
 }
 
 // A helper macro quote macro
@@ -61,7 +61,7 @@ macro_rules! quote_impl__ {
         $crate::builtin::quote::__quote!($span $builder $($tail)*);
     };
 
-    ($span:ident $builder:ident ## $first:ident $($tail:tt)* ) => {{
+    ($span:ident $builder:ident # # $first:ident $($tail:tt)* ) => {{
         ::std::iter::IntoIterator::into_iter($first).for_each(|it| $crate::builtin::quote::ToTokenTree::to_tokens(it, $span, $builder));
         $crate::builtin::quote::__quote!($span $builder $($tail)*);
     }};
@@ -203,7 +203,7 @@ impl_to_to_tokentrees! {
     span: u32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } };
     span: usize => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } };
     span: i32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } };
-    span: bool => self { crate::tt::Ident{sym: if self { sym::true_.clone() } else { sym::false_.clone() }, span, is_raw: tt::IdentIsRaw::No } };
+    span: bool => self { crate::tt::Ident{sym: if self { sym::true_ } else { sym::false_ }, span, is_raw: tt::IdentIsRaw::No } };
     _span: crate::tt::Leaf => self { self };
     _span: crate::tt::Literal => self { self };
     _span: crate::tt::Ident => self { self };
@@ -226,7 +226,7 @@ mod tests {
     use ::tt::IdentIsRaw;
     use expect_test::expect;
     use intern::Symbol;
-    use span::{Edition, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
+    use span::{Edition, ROOT_ERASED_FILE_AST_ID, SpanAnchor, SyntaxContext};
     use syntax::{TextRange, TextSize};
 
     use super::quote;
@@ -240,7 +240,7 @@ mod tests {
             ),
             ast_id: ROOT_ERASED_FILE_AST_ID,
         },
-        ctx: SyntaxContextId::root(Edition::CURRENT),
+        ctx: SyntaxContext::root(Edition::CURRENT),
     };
 
     #[test]
@@ -277,8 +277,8 @@ mod tests {
         assert_eq!(quoted.to_string(), "hello");
         let t = format!("{quoted:#?}");
         expect![[r#"
-            SUBTREE $$ 937550:0@0..0#2 937550:0@0..0#2
-              IDENT   hello 937550:0@0..0#2"#]]
+            SUBTREE $$ 937550:0@0..0#ROOT2024 937550:0@0..0#ROOT2024
+              IDENT   hello 937550:0@0..0#ROOT2024"#]]
         .assert_eq(&t);
     }
 
@@ -324,6 +324,9 @@ mod tests {
             }
         };
 
-        assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {name : self . name . clone () , id : self . id . clone () ,}}}");
+        assert_eq!(
+            quoted.to_string(),
+            "impl Clone for Foo {fn clone (& self) -> Self {Self {name : self . name . clone () , id : self . id . clone () ,}}}"
+        );
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs
index 626a82ae08eab..c6ea4a3a33db8 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs
@@ -1,28 +1,28 @@
 //! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
 use std::iter::Peekable;
 
-use base_db::CrateId;
+use base_db::Crate;
 use cfg::{CfgAtom, CfgExpr};
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use rustc_hash::FxHashSet;
 use syntax::{
-    ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList},
     AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
+    ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList},
 };
 use tracing::{debug, warn};
 
-use crate::{db::ExpandDatabase, proc_macro::ProcMacroKind, MacroCallLoc, MacroDefKind};
+use crate::{MacroCallLoc, MacroDefKind, db::ExpandDatabase, proc_macro::ProcMacroKind};
 
-fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option<bool> {
+fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
     if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
         return None;
     }
     let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?;
-    let enabled = db.crate_graph()[krate].cfg_options.check(&cfg) != Some(false);
+    let enabled = krate.cfg_options(db).check(&cfg) != Some(false);
     Some(enabled)
 }
 
-fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option<bool> {
+fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
     if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
         return None;
     }
@@ -32,17 +32,17 @@ fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Optio
 pub fn check_cfg_attr_value(
     db: &dyn ExpandDatabase,
     attr: &TokenTree,
-    krate: CrateId,
+    krate: Crate,
 ) -> Option<bool> {
     let cfg_expr = parse_from_attr_token_tree(attr)?;
-    let enabled = db.crate_graph()[krate].cfg_options.check(&cfg_expr) != Some(false);
+    let enabled = krate.cfg_options(db).check(&cfg_expr) != Some(false);
     Some(enabled)
 }
 
 fn process_has_attrs_with_possible_comma<I: HasAttrs>(
     db: &dyn ExpandDatabase,
     items: impl Iterator<Item = I>,
-    krate: CrateId,
+    krate: Crate,
     remove: &mut FxHashSet<SyntaxElement>,
 ) -> Option<()> {
     for item in items {
@@ -144,7 +144,7 @@ fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet<SyntaxElement>
 fn process_enum(
     db: &dyn ExpandDatabase,
     variants: VariantList,
-    krate: CrateId,
+    krate: Crate,
     remove: &mut FxHashSet<SyntaxElement>,
 ) -> Option<()> {
     'variant: for variant in variants.variants() {
@@ -344,8 +344,8 @@ where
 #[cfg(test)]
 mod tests {
     use cfg::DnfExpr;
-    use expect_test::{expect, Expect};
-    use syntax::{ast::Attr, AstNode, SourceFile};
+    use expect_test::{Expect, expect};
+    use syntax::{AstNode, SourceFile, ast::Attr};
 
     use crate::cfg_process::parse_from_attr_token_tree;
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs
index 1fdf251ba5210..6873cb7eaf9cd 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs
@@ -1,29 +1,27 @@
 //! Defines a unit of change that can applied to the database to get the next
 //! state. Changes are transactional.
-use base_db::{
-    ra_salsa::Durability, CrateGraph, CrateId, CrateWorkspaceData, FileChange, SourceRoot,
-    SourceRootDatabase,
-};
-use rustc_hash::FxHashMap;
+use base_db::{CrateGraphBuilder, FileChange, SourceRoot};
+use salsa::Durability;
 use span::FileId;
 use triomphe::Arc;
 
-use crate::{db::ExpandDatabase, proc_macro::ProcMacros};
+use crate::{db::ExpandDatabase, proc_macro::ProcMacrosBuilder};
 
 #[derive(Debug, Default)]
 pub struct ChangeWithProcMacros {
     pub source_change: FileChange,
-    pub proc_macros: Option<ProcMacros>,
+    pub proc_macros: Option<ProcMacrosBuilder>,
 }
 
 impl ChangeWithProcMacros {
-    pub fn new() -> Self {
-        Self::default()
-    }
-
-    pub fn apply(self, db: &mut (impl ExpandDatabase + SourceRootDatabase)) {
-        self.source_change.apply(db);
+    pub fn apply(self, db: &mut impl ExpandDatabase) {
+        let crates_id_map = self.source_change.apply(db);
         if let Some(proc_macros) = self.proc_macros {
+            let proc_macros = proc_macros.build(
+                crates_id_map
+                    .as_ref()
+                    .expect("cannot set proc macros without setting the crate graph too"),
+            );
             db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
         }
     }
@@ -32,16 +30,11 @@ impl ChangeWithProcMacros {
         self.source_change.change_file(file_id, new_text)
     }
 
-    pub fn set_crate_graph(
-        &mut self,
-        graph: CrateGraph,
-        ws_data: FxHashMap<CrateId, Arc<CrateWorkspaceData>>,
-    ) {
+    pub fn set_crate_graph(&mut self, graph: CrateGraphBuilder) {
         self.source_change.set_crate_graph(graph);
-        self.source_change.set_ws_data(ws_data);
     }
 
-    pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) {
+    pub fn set_proc_macros(&mut self, proc_macros: ProcMacrosBuilder) {
         self.proc_macros = Some(proc_macros);
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 8ca8bf1ba4a6e..29b7b33fd0fb8 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,30 +1,30 @@
 //! Defines database & queries for macro expansion.
 
-use base_db::{ra_salsa, CrateId, SourceDatabase};
+use base_db::{Crate, RootQueryDb};
 use either::Either;
 use mbe::MatchedArmIndex;
 use rustc_hash::FxHashSet;
-use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
-use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
-use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
+use span::{AstIdMap, Edition, Span, SyntaxContext};
+use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
+use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
 use triomphe::Arc;
 
 use crate::{
-    attrs::{collect_attrs, AttrId},
+    AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
+    EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
+    MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
+    attrs::{AttrId, collect_attrs},
     builtin::pseudo_derive_attr_expansion,
     cfg_process,
     declarative::DeclarativeMacroExpander,
     fixup::{self, SyntaxFixupUndoInfo},
     hygiene::{
-        span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
-        SyntaxContextExt as _,
+        SyntaxContextExt as _, span_with_call_site_ctxt, span_with_def_site_ctxt,
+        span_with_mixed_site_ctxt,
     },
-    proc_macro::ProcMacros,
-    span_map::{RealSpanMap, SpanMap, SpanMapRef},
-    tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
-    CustomProcMacroExpander, EagerCallInfo, EagerExpander, ExpandError, ExpandResult, ExpandTo,
-    ExpansionSpanMap, HirFileId, HirFileIdRepr, Lookup, MacroCallId, MacroCallKind, MacroCallLoc,
-    MacroDefId, MacroDefKind, MacroFileId,
+    proc_macro::{CrateProcMacros, CustomProcMacroExpander, ProcMacros},
+    span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
+    tt,
 };
 /// This is just to ensure the types of smart_macro_arg and macro_arg are the same
 type MacroArgResult = (Arc<tt::TopSubtree>, SyntaxFixupUndoInfo, Span);
@@ -52,32 +52,37 @@ pub enum TokenExpander {
     ProcMacro(CustomProcMacroExpander),
 }
 
-#[ra_salsa::query_group(ExpandDatabaseStorage)]
-pub trait ExpandDatabase: SourceDatabase {
-    /// The proc macros.
-    #[ra_salsa::input]
+#[query_group::query_group]
+pub trait ExpandDatabase: RootQueryDb {
+    /// The proc macros. Do not use this! Use `proc_macros_for_crate()` instead.
+    #[salsa::input]
     fn proc_macros(&self) -> Arc<ProcMacros>;
 
+    /// Incrementality query to prevent queries from directly depending on `ExpandDatabase::proc_macros`.
+    #[salsa::invoke(crate::proc_macro::proc_macros_for_crate)]
+    fn proc_macros_for_crate(&self, krate: Crate) -> Option<Arc<CrateProcMacros>>;
+
+    #[salsa::invoke(ast_id_map)]
     fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
 
-    /// Main public API -- parses a hir file, not caring whether it's a real
-    /// file or a macro expansion.
-    #[ra_salsa::transparent]
+    #[salsa::transparent]
     fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode;
+
     /// Implementation for the macro case.
-    #[ra_salsa::lru]
+    #[salsa::lru(512)]
     fn parse_macro_expansion(
         &self,
-        macro_file: MacroFileId,
+        macro_file: MacroCallId,
     ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(SpanMap::new)]
+
+    #[salsa::transparent]
+    #[salsa::invoke(SpanMap::new)]
     fn span_map(&self, file_id: HirFileId) -> SpanMap;
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(crate::span_map::expansion_span_map)]
-    fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
-    #[ra_salsa::invoke(crate::span_map::real_span_map)]
+    #[salsa::transparent]
+    #[salsa::invoke(crate::span_map::expansion_span_map)]
+    fn expansion_span_map(&self, file_id: MacroCallId) -> Arc<ExpansionSpanMap>;
+    #[salsa::invoke(crate::span_map::real_span_map)]
     fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>;
 
     /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
@@ -85,66 +90,74 @@ pub trait ExpandDatabase: SourceDatabase {
     ///
     /// We encode macro definitions into ids of macro calls, this what allows us
     /// to be incremental.
-    #[ra_salsa::interned]
+    #[salsa::transparent]
     fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
-    #[ra_salsa::interned]
-    fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
-
-    #[ra_salsa::transparent]
-    fn setup_syntax_context_root(&self) -> ();
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(crate::hygiene::dump_syntax_contexts)]
-    fn dump_syntax_contexts(&self) -> String;
+    #[salsa::transparent]
+    fn lookup_intern_macro_call(&self, macro_call: MacroCallId) -> MacroCallLoc;
 
     /// Lowers syntactic macro call to a token tree representation. That's a firewall
     /// query, only typing in the macro call itself changes the returned
     /// subtree.
     #[deprecated = "calling this is incorrect, call `macro_arg_considering_derives` instead"]
+    #[salsa::invoke(macro_arg)]
     fn macro_arg(&self, id: MacroCallId) -> MacroArgResult;
-    #[ra_salsa::transparent]
+
+    #[salsa::transparent]
     fn macro_arg_considering_derives(
         &self,
         id: MacroCallId,
         kind: &MacroCallKind,
     ) -> MacroArgResult;
+
     /// Fetches the expander for this macro.
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(TokenExpander::macro_expander)]
+    #[salsa::transparent]
+    #[salsa::invoke(TokenExpander::macro_expander)]
     fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
+
     /// Fetches (and compiles) the expander of this decl macro.
-    #[ra_salsa::invoke(DeclarativeMacroExpander::expander)]
+    #[salsa::invoke(DeclarativeMacroExpander::expander)]
     fn decl_macro_expander(
         &self,
-        def_crate: CrateId,
+        def_crate: Crate,
         id: AstId<ast::Macro>,
     ) -> Arc<DeclarativeMacroExpander>;
+
     /// Special case of the previous query for procedural macros. We can't LRU
     /// proc macros, since they are not deterministic in general, and
     /// non-determinism breaks salsa in a very, very, very bad way.
     /// @edwin0cheng heroically debugged this once! See #4315 for details
+    #[salsa::invoke(expand_proc_macro)]
     fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::TopSubtree>>;
     /// Retrieves the span to be used for a proc-macro expansions spans.
     /// This is a firewall query as it requires parsing the file, which we don't want proc-macros to
     /// directly depend on as that would cause to frequent invalidations, mainly because of the
     /// parse queries being LRU cached. If they weren't the invalidations would only happen if the
     /// user wrote in the file that defines the proc-macro.
+    #[salsa::invoke_interned(proc_macro_span)]
     fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
+
     /// Firewall query that returns the errors from the `parse_macro_expansion` query.
+    #[salsa::invoke(parse_macro_expansion_error)]
     fn parse_macro_expansion_error(
         &self,
         macro_call: MacroCallId,
     ) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>>;
-    #[ra_salsa::transparent]
-    fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContextId;
+
+    #[salsa::transparent]
+    fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
+}
+
+#[salsa::interned(no_lifetime, id = span::SyntaxContext)]
+pub struct SyntaxContextWrapper {
+    pub data: SyntaxContext,
 }
 
-fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContextId {
-    match file.repr() {
-        HirFileIdRepr::FileId(_) => SyntaxContextId::root(edition),
-        HirFileIdRepr::MacroFile(m) => {
-            db.macro_arg_considering_derives(m.macro_call_id, &m.macro_call_id.lookup(db).kind)
-                .2
-                .ctx
+fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContext {
+    match file {
+        HirFileId::FileId(_) => SyntaxContext::root(edition),
+        HirFileId::MacroFile(m) => {
+            let kind = db.lookup_intern_macro_call(m).kind;
+            db.macro_arg_considering_derives(m, &kind).2.ctx
         }
     }
 }
@@ -272,9 +285,9 @@ pub fn expand_speculative(
                 loc.krate,
                 &tt,
                 attr_arg.as_ref(),
-                span_with_def_site_ctxt(db, span, actual_macro_call, loc.def.edition),
-                span_with_call_site_ctxt(db, span, actual_macro_call, loc.def.edition),
-                span_with_mixed_site_ctxt(db, span, actual_macro_call, loc.def.edition),
+                span_with_def_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
+                span_with_call_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
+                span_with_mixed_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition),
             )
         }
         MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
@@ -318,14 +331,16 @@ pub fn expand_speculative(
     Some((node.syntax_node(), token))
 }
 
-fn ast_id_map(db: &dyn ExpandDatabase, file_id: span::HirFileId) -> triomphe::Arc<AstIdMap> {
+fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> triomphe::Arc<AstIdMap> {
     triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
 }
 
+/// Main public API -- parses a hir file, not caring whether it's a real
+/// file or a macro expansion.
 fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
-    match file_id.repr() {
-        HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
-        HirFileIdRepr::MacroFile(macro_file) => {
+    match file_id {
+        HirFileId::FileId(file_id) => db.parse(file_id).syntax_node(),
+        HirFileId::MacroFile(macro_file) => {
             db.parse_macro_expansion(macro_file).value.0.syntax_node()
         }
     }
@@ -335,14 +350,13 @@ fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
 // instead of having it be untyped
 fn parse_macro_expansion(
     db: &dyn ExpandDatabase,
-    macro_file: MacroFileId,
+    macro_file: MacroCallId,
 ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
     let _p = tracing::info_span!("parse_macro_expansion").entered();
-    let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+    let loc = db.lookup_intern_macro_call(macro_file);
     let def_edition = loc.def.edition;
     let expand_to = loc.expand_to();
-    let mbe::ValueResult { value: (tt, matched_arm), err } =
-        macro_expand(db, macro_file.macro_call_id, loc);
+    let mbe::ValueResult { value: (tt, matched_arm), err } = macro_expand(db, macro_file, loc);
 
     let (parse, mut rev_token_map) = token_tree_to_syntax_node(
         db,
@@ -363,23 +377,19 @@ fn parse_macro_expansion_error(
     macro_call_id: MacroCallId,
 ) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>> {
     let e: ExpandResult<Arc<[SyntaxError]>> =
-        db.parse_macro_expansion(MacroFileId { macro_call_id }).map(|it| Arc::from(it.0.errors()));
-    if e.value.is_empty() && e.err.is_none() {
-        None
-    } else {
-        Some(Arc::new(e))
-    }
+        db.parse_macro_expansion(macro_call_id).map(|it| Arc::from(it.0.errors()));
+    if e.value.is_empty() && e.err.is_none() { None } else { Some(Arc::new(e)) }
 }
 
 pub(crate) fn parse_with_map(
     db: &dyn ExpandDatabase,
     file_id: HirFileId,
 ) -> (Parse<SyntaxNode>, SpanMap) {
-    match file_id.repr() {
-        HirFileIdRepr::FileId(file_id) => {
+    match file_id {
+        HirFileId::FileId(file_id) => {
             (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
         }
-        HirFileIdRepr::MacroFile(macro_file) => {
+        HirFileId::MacroFile(macro_file) => {
             let (parse, map) = db.parse_macro_expansion(macro_file).value;
             (parse, SpanMap::ExpansionSpanMap(map))
         }
@@ -597,7 +607,7 @@ fn macro_expand(
 
     let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind {
         MacroDefKind::ProcMacro(..) => {
-            return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None)
+            return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None);
         }
         _ => {
             let (macro_arg, undo_info, span) =
@@ -699,9 +709,9 @@ fn expand_proc_macro(
             loc.krate,
             &macro_arg,
             attr_arg,
-            span_with_def_site_ctxt(db, span, id, loc.def.edition),
-            span_with_call_site_ctxt(db, span, id, loc.def.edition),
-            span_with_mixed_site_ctxt(db, span, id, loc.def.edition),
+            span_with_def_site_ctxt(db, span, id.into(), loc.def.edition),
+            span_with_call_site_ctxt(db, span, id.into(), loc.def.edition),
+            span_with_mixed_site_ctxt(db, span, id.into(), loc.def.edition),
         )
     };
 
@@ -728,12 +738,7 @@ pub(crate) fn token_tree_to_syntax_node(
         ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
         ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
     };
-    syntax_bridge::token_tree_to_syntax_node(
-        tt,
-        entry_point,
-        &mut |ctx| ctx.lookup(db).edition,
-        edition,
-    )
+    syntax_bridge::token_tree_to_syntax_node(tt, entry_point, &mut |ctx| ctx.edition(db), edition)
 }
 
 fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
@@ -755,8 +760,10 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
     }
 }
 
-fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
-    for edition in Edition::iter() {
-        db.intern_syntax_context(SyntaxContextData::root(edition));
-    }
+fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
+    MacroCallId::new(db, macro_call)
+}
+
+fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
+    macro_call.loc(db)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
index fef77acb7bbb7..1fa682ce3a2d4 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
@@ -1,18 +1,19 @@
 //! Compiled declarative macro expanders (`macro_rules!` and `macro`)
 
-use base_db::CrateId;
+use base_db::Crate;
 use intern::sym;
-use span::{Edition, HirFileIdRepr, MacroCallId, Span, SyntaxContextId};
+use span::{Edition, Span, SyntaxContext};
 use stdx::TupleExt;
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 use syntax_bridge::DocCommentDesugarMode;
 use triomphe::Arc;
 
 use crate::{
+    AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
     attrs::RawAttrs,
     db::ExpandDatabase,
-    hygiene::{apply_mark, Transparency},
-    tt, AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup,
+    hygiene::{Transparency, apply_mark},
+    tt,
 };
 
 /// Old-style `macro_rules` or the new macros 2.0
@@ -41,7 +42,10 @@ impl DeclarativeMacroExpander {
                 .mac
                 .expand(
                     &tt,
-                    |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency, self.edition),
+                    |s| {
+                        s.ctx =
+                            apply_mark(db, s.ctx, call_id.into(), self.transparency, self.edition)
+                    },
                     span,
                     loc.def.edition,
                 )
@@ -70,7 +74,7 @@ impl DeclarativeMacroExpander {
 
     pub(crate) fn expander(
         db: &dyn ExpandDatabase,
-        def_crate: CrateId,
+        def_crate: Crate,
         id: AstId<ast::Macro>,
     ) -> Arc<DeclarativeMacroExpander> {
         let (root, map) = crate::db::parse_with_map(db, id.file_id);
@@ -84,7 +88,7 @@ impl DeclarativeMacroExpander {
                 .find(|it| {
                     it.path
                         .as_ident()
-                        .map(|it| *it == sym::rustc_macro_transparency.clone())
+                        .map(|it| *it == sym::rustc_macro_transparency)
                         .unwrap_or(false)
                 })?
                 .token_tree_value()?
@@ -100,14 +104,14 @@ impl DeclarativeMacroExpander {
                 _ => None,
             }
         };
-        let ctx_edition = |ctx: SyntaxContextId| {
-            let crate_graph = db.crate_graph();
+        let ctx_edition = |ctx: SyntaxContext| {
             if ctx.is_root() {
-                crate_graph[def_crate].edition
+                def_crate.data(db).edition
             } else {
-                let data = db.lookup_intern_syntax_context(ctx);
                 // UNWRAP-SAFETY: Only the root context has no outer expansion
-                crate_graph[data.outer_expn.unwrap().lookup(db).def.krate].edition
+                let krate =
+                    db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap().into()).def.krate;
+                krate.data(db).edition
             }
         };
         let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
@@ -160,9 +164,9 @@ impl DeclarativeMacroExpander {
                 transparency(&macro_def).unwrap_or(Transparency::Opaque),
             ),
         };
-        let edition = ctx_edition(match id.file_id.repr() {
-            HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id.lookup(db).ctxt,
-            HirFileIdRepr::FileId(file) => SyntaxContextId::root(file.edition()),
+        let edition = ctx_edition(match id.file_id {
+            HirFileId::MacroFile(macro_file) => macro_file.lookup(db).ctxt,
+            HirFileId::FileId(file) => SyntaxContext::root(file.edition(db)),
         });
         Arc::new(DeclarativeMacroExpander { mac, transparency, edition })
     }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index f476d1b564c4c..28d3fcdab9dba 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -18,28 +18,34 @@
 //!
 //!
 //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
-use base_db::CrateId;
-use span::SyntaxContextId;
-use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
+use base_db::Crate;
+use span::SyntaxContext;
+use syntax::{AstPtr, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent, ted};
 use syntax_bridge::DocCommentDesugarMode;
 use triomphe::Arc;
 
 use crate::{
+    AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile,
+    MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
     ast::{self, AstNode},
     db::ExpandDatabase,
     mod_path::ModPath,
-    AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern,
-    MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
 };
 
+pub type EagerCallBackFn<'a> = &'a mut dyn FnMut(
+    InFile<(syntax::AstPtr<ast::MacroCall>, span::FileAstId<ast::MacroCall>)>,
+    MacroCallId,
+);
+
 pub fn expand_eager_macro_input(
     db: &dyn ExpandDatabase,
-    krate: CrateId,
+    krate: Crate,
     macro_call: &ast::MacroCall,
     ast_id: AstId<ast::MacroCall>,
     def: MacroDefId,
-    call_site: SyntaxContextId,
+    call_site: SyntaxContext,
     resolver: &dyn Fn(&ModPath) -> Option<MacroDefId>,
+    eager_callback: EagerCallBackFn<'_>,
 ) -> ExpandResult<Option<MacroCallId>> {
     let expand_to = ExpandTo::from_call_site(macro_call);
 
@@ -47,17 +53,17 @@ pub fn expand_eager_macro_input(
     // When `lazy_expand` is called, its *parent* file must already exist.
     // Here we store an eager macro id for the argument expanded subtree
     // for that purpose.
-    let arg_id = MacroCallLoc {
+    let loc = MacroCallLoc {
         def,
         krate,
         kind: MacroCallKind::FnLike { ast_id, expand_to: ExpandTo::Expr, eager: None },
         ctxt: call_site,
-    }
-    .intern(db);
+    };
+    let arg_id = db.intern_macro_call(loc);
     #[allow(deprecated)] // builtin eager macros are never derives
     let (_, _, span) = db.macro_arg(arg_id);
     let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
-        db.parse_macro_expansion(arg_id.as_macro_file());
+        db.parse_macro_expansion(arg_id);
 
     let mut arg_map = ExpansionSpanMap::empty();
 
@@ -67,10 +73,11 @@ pub fn expand_eager_macro_input(
             &arg_exp_map,
             &mut arg_map,
             TextSize::new(0),
-            InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
+            InFile::new(arg_id.into(), arg_exp.syntax_node()),
             krate,
             call_site,
             resolver,
+            eager_callback,
         )
     };
     let err = parse_err.or(err);
@@ -107,7 +114,7 @@ pub fn expand_eager_macro_input(
         ctxt: call_site,
     };
 
-    ExpandResult { value: Some(loc.intern(db)), err }
+    ExpandResult { value: Some(db.intern_macro_call(loc)), err }
 }
 
 fn lazy_expand(
@@ -115,8 +122,9 @@ fn lazy_expand(
     def: &MacroDefId,
     macro_call: &ast::MacroCall,
     ast_id: AstId<ast::MacroCall>,
-    krate: CrateId,
-    call_site: SyntaxContextId,
+    krate: Crate,
+    call_site: SyntaxContext,
+    eager_callback: EagerCallBackFn<'_>,
 ) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
     let expand_to = ExpandTo::from_call_site(macro_call);
     let id = def.make_call(
@@ -125,10 +133,9 @@ fn lazy_expand(
         MacroCallKind::FnLike { ast_id, expand_to, eager: None },
         call_site,
     );
-    let macro_file = id.as_macro_file();
+    eager_callback(ast_id.map(|ast_id| (AstPtr::new(macro_call), ast_id)), id);
 
-    db.parse_macro_expansion(macro_file)
-        .map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
+    db.parse_macro_expansion(id).map(|parse| (InFile::new(id.into(), parse.0), parse.1))
 }
 
 fn eager_macro_recur(
@@ -137,9 +144,10 @@ fn eager_macro_recur(
     expanded_map: &mut ExpansionSpanMap,
     mut offset: TextSize,
     curr: InFile<SyntaxNode>,
-    krate: CrateId,
-    call_site: SyntaxContextId,
+    krate: Crate,
+    call_site: SyntaxContext,
     macro_resolver: &dyn Fn(&ModPath) -> Option<MacroDefId>,
+    eager_callback: EagerCallBackFn<'_>,
 ) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
     let original = curr.value.clone_for_update();
 
@@ -176,7 +184,7 @@ fn eager_macro_recur(
             Some(path) => match macro_resolver(&path) {
                 Some(def) => def,
                 None => {
-                    let edition = db.crate_graph()[krate].edition;
+                    let edition = krate.data(db).edition;
                     error = Some(ExpandError::other(
                         span_map.span_at(call.syntax().text_range().start()),
                         format!("unresolved macro {}", path.display(db, edition)),
@@ -205,11 +213,16 @@ fn eager_macro_recur(
                     def,
                     call_site,
                     macro_resolver,
+                    eager_callback,
                 );
                 match value {
                     Some(call_id) => {
+                        eager_callback(
+                            curr.with_value(ast_id).map(|ast_id| (AstPtr::new(&call), ast_id)),
+                            call_id,
+                        );
                         let ExpandResult { value: (parse, map), err: err2 } =
-                            db.parse_macro_expansion(call_id.as_macro_file());
+                            db.parse_macro_expansion(call_id);
 
                         map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
 
@@ -230,8 +243,15 @@ fn eager_macro_recur(
             | MacroDefKind::BuiltInAttr(..)
             | MacroDefKind::BuiltInDerive(..)
             | MacroDefKind::ProcMacro(..) => {
-                let ExpandResult { value: (parse, tm), err } =
-                    lazy_expand(db, &def, &call, curr.with_value(ast_id), krate, call_site);
+                let ExpandResult { value: (parse, tm), err } = lazy_expand(
+                    db,
+                    &def,
+                    &call,
+                    curr.with_value(ast_id),
+                    krate,
+                    call_site,
+                    eager_callback,
+                );
 
                 // replace macro inside
                 let ExpandResult { value, err: error } = eager_macro_recur(
@@ -244,6 +264,7 @@ fn eager_macro_recur(
                     krate,
                     call_site,
                     macro_resolver,
+                    eager_callback,
                 );
                 let err = err.or(error);
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
index f3bcc77268224..321ee8feb9a8c 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
@@ -2,15 +2,13 @@
 use std::borrow::Borrow;
 
 use either::Either;
-use span::{
-    AstIdNode, EditionedFileId, ErasedFileAstId, FileAstId, HirFileId, HirFileIdRepr, MacroFileId,
-    SyntaxContextId,
-};
+use span::{AstIdNode, ErasedFileAstId, FileAstId, FileId, SyntaxContext};
 use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize};
 
 use crate::{
+    EditionedFileId, HirFileId, MacroCallId, MacroKind,
     db::{self, ExpandDatabase},
-    map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt, MacroKind,
+    map_node_range_up, map_node_range_up_rooted, span_for_offset,
 };
 
 /// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
@@ -26,7 +24,7 @@ pub struct InFileWrapper<FileKind, T> {
     pub value: T,
 }
 pub type InFile<T> = InFileWrapper<HirFileId, T>;
-pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
+pub type InMacroFile<T> = InFileWrapper<MacroCallId, T>;
 pub type InRealFile<T> = InFileWrapper<EditionedFileId, T>;
 
 #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@@ -35,12 +33,13 @@ pub struct FilePositionWrapper<FileKind> {
     pub offset: TextSize,
 }
 pub type HirFilePosition = FilePositionWrapper<HirFileId>;
-pub type MacroFilePosition = FilePositionWrapper<MacroFileId>;
+pub type MacroFilePosition = FilePositionWrapper<MacroCallId>;
 pub type FilePosition = FilePositionWrapper<EditionedFileId>;
 
-impl From<FilePositionWrapper<EditionedFileId>> for FilePositionWrapper<span::FileId> {
-    fn from(value: FilePositionWrapper<EditionedFileId>) -> Self {
-        FilePositionWrapper { file_id: value.file_id.into(), offset: value.offset }
+impl FilePosition {
+    #[inline]
+    pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FilePositionWrapper<FileId> {
+        FilePositionWrapper { file_id: self.file_id.file_id(db), offset: self.offset }
     }
 }
 #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@@ -49,12 +48,13 @@ pub struct FileRangeWrapper<FileKind> {
     pub range: TextRange,
 }
 pub type HirFileRange = FileRangeWrapper<HirFileId>;
-pub type MacroFileRange = FileRangeWrapper<MacroFileId>;
+pub type MacroFileRange = FileRangeWrapper<MacroCallId>;
 pub type FileRange = FileRangeWrapper<EditionedFileId>;
 
-impl From<FileRangeWrapper<EditionedFileId>> for FileRangeWrapper<span::FileId> {
-    fn from(value: FileRangeWrapper<EditionedFileId>) -> Self {
-        FileRangeWrapper { file_id: value.file_id.into(), range: value.range }
+impl FileRange {
+    #[inline]
+    pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FileRangeWrapper<FileId> {
+        FileRangeWrapper { file_id: self.file_id.file_id(db), range: self.range }
     }
 }
 
@@ -76,6 +76,9 @@ impl<N: AstIdNode> AstId<N> {
     pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> AstPtr<N> {
         db.ast_id_map(self.file_id).get(self.value)
     }
+    pub fn erase(&self) -> ErasedAstId {
+        crate::InFile::new(self.file_id, self.value.erase())
+    }
 }
 
 pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
@@ -162,7 +165,7 @@ impl FileIdToSyntax for EditionedFileId {
         db.parse(self).syntax_node()
     }
 }
-impl FileIdToSyntax for MacroFileId {
+impl FileIdToSyntax for MacroCallId {
     fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
         db.parse_macro_expansion(self).value.0.syntax_node()
     }
@@ -215,7 +218,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
         let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
             Some(parent) => Some(node.with_value(parent)),
             None => db
-                .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
+                .lookup_intern_macro_call(node.file_id.macro_file()?)
                 .to_node_item(db)
                 .syntax()
                 .cloned()
@@ -232,7 +235,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
         let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
             Some(parent) => Some(node.with_value(parent)),
             None => db
-                .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
+                .lookup_intern_macro_call(node.file_id.macro_file()?)
                 .to_node_item(db)
                 .syntax()
                 .cloned()
@@ -272,11 +275,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
     ) -> Option<InRealFile<SyntaxNode>> {
         // This kind of upmapping can only be achieved in attribute expanded files,
         // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
-        let file_id = match self.file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => {
-                return Some(InRealFile { file_id, value: self.value.borrow().clone() })
+        let file_id = match self.file_id {
+            HirFileId::FileId(file_id) => {
+                return Some(InRealFile { file_id, value: self.value.borrow().clone() });
             }
-            HirFileIdRepr::MacroFile(m)
+            HirFileId::MacroFile(m)
                 if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) =>
             {
                 m
@@ -284,7 +287,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
             _ => return None,
         };
 
-        let FileRange { file_id, range } = map_node_range_up_rooted(
+        let FileRange { file_id: editioned_file_id, range } = map_node_range_up_rooted(
             db,
             &db.expansion_span_map(file_id),
             self.value.borrow().text_range(),
@@ -292,13 +295,13 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
 
         let kind = self.kind();
         let value = db
-            .parse(file_id)
+            .parse(editioned_file_id)
             .syntax_node()
             .covering_element(range)
             .ancestors()
             .take_while(|it| it.text_range() == range)
             .find(|it| it.kind() == kind)?;
-        Some(InRealFile::new(file_id, value))
+        Some(InRealFile::new(editioned_file_id, value))
     }
 }
 
@@ -307,7 +310,7 @@ impl InFile<&SyntaxNode> {
     pub fn original_file_range_opt(
         self,
         db: &dyn db::ExpandDatabase,
-    ) -> Option<(FileRange, SyntaxContextId)> {
+    ) -> Option<(FileRange, SyntaxContext)> {
         self.borrow().map(SyntaxNode::text_range).original_node_file_range_opt(db)
     }
 }
@@ -324,9 +327,9 @@ impl InMacroFile<SyntaxToken> {
 impl InFile<SyntaxToken> {
     /// Falls back to the macro call range if the node cannot be mapped up fully.
     pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
-        match self.file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
-            HirFileIdRepr::MacroFile(mac_file) => {
+        match self.file_id {
+            HirFileId::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+            HirFileId::MacroFile(mac_file) => {
                 let (range, ctxt) = span_for_offset(
                     db,
                     &db.expansion_span_map(mac_file),
@@ -340,7 +343,7 @@ impl InFile<SyntaxToken> {
                 }
 
                 // Fall back to whole macro call.
-                let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+                let loc = db.lookup_intern_macro_call(mac_file);
                 loc.kind.original_call_range(db)
             }
         }
@@ -348,11 +351,11 @@ impl InFile<SyntaxToken> {
 
     /// Attempts to map the syntax node back up its macro calls.
     pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
-        match self.file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => {
+        match self.file_id {
+            HirFileId::FileId(file_id) => {
                 Some(FileRange { file_id, range: self.value.text_range() })
             }
-            HirFileIdRepr::MacroFile(mac_file) => {
+            HirFileId::MacroFile(mac_file) => {
                 let (range, ctxt) = span_for_offset(
                     db,
                     &db.expansion_span_map(mac_file),
@@ -361,18 +364,14 @@ impl InFile<SyntaxToken> {
 
                 // FIXME: Figure out an API that makes proper use of ctx, this only exists to
                 // keep pre-token map rewrite behaviour.
-                if ctxt.is_root() {
-                    Some(range)
-                } else {
-                    None
-                }
+                if ctxt.is_root() { Some(range) } else { None }
             }
         }
     }
 }
 
 impl InMacroFile<TextSize> {
-    pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
+    pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContext) {
         span_for_offset(db, &db.expansion_span_map(self.file_id), self.value)
     }
 }
@@ -381,17 +380,17 @@ impl InFile<TextRange> {
     pub fn original_node_file_range(
         self,
         db: &dyn db::ExpandDatabase,
-    ) -> (FileRange, SyntaxContextId) {
-        match self.file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => {
-                (FileRange { file_id, range: self.value }, SyntaxContextId::root(file_id.edition()))
+    ) -> (FileRange, SyntaxContext) {
+        match self.file_id {
+            HirFileId::FileId(file_id) => {
+                (FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition(db)))
             }
-            HirFileIdRepr::MacroFile(mac_file) => {
+            HirFileId::MacroFile(mac_file) => {
                 match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
                     Some(it) => it,
                     None => {
-                        let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
-                        (loc.kind.original_call_range(db), SyntaxContextId::root(loc.def.edition))
+                        let loc = db.lookup_intern_macro_call(mac_file);
+                        (loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
                     }
                 }
             }
@@ -399,13 +398,13 @@ impl InFile<TextRange> {
     }
 
     pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
-        match self.file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
-            HirFileIdRepr::MacroFile(mac_file) => {
+        match self.file_id {
+            HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
+            HirFileId::MacroFile(mac_file) => {
                 match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
                     Some(it) => it,
                     _ => {
-                        let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+                        let loc = db.lookup_intern_macro_call(mac_file);
                         loc.kind.original_call_range(db)
                     }
                 }
@@ -417,13 +416,13 @@ impl InFile<TextRange> {
         self,
         db: &dyn db::ExpandDatabase,
     ) -> FileRange {
-        match self.file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
-            HirFileIdRepr::MacroFile(mac_file) => {
+        match self.file_id {
+            HirFileId::FileId(file_id) => FileRange { file_id, range: self.value },
+            HirFileId::MacroFile(mac_file) => {
                 match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
                     Some(it) => it,
                     _ => {
-                        let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+                        let loc = db.lookup_intern_macro_call(mac_file);
                         loc.kind.original_call_range_with_body(db)
                     }
                 }
@@ -434,13 +433,13 @@ impl InFile<TextRange> {
     pub fn original_node_file_range_opt(
         self,
         db: &dyn db::ExpandDatabase,
-    ) -> Option<(FileRange, SyntaxContextId)> {
-        match self.file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => Some((
+    ) -> Option<(FileRange, SyntaxContext)> {
+        match self.file_id {
+            HirFileId::FileId(file_id) => Some((
                 FileRange { file_id, range: self.value },
-                SyntaxContextId::root(file_id.edition()),
+                SyntaxContext::root(file_id.edition(db)),
             )),
-            HirFileIdRepr::MacroFile(mac_file) => {
+            HirFileId::MacroFile(mac_file) => {
                 map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
             }
         }
@@ -451,34 +450,34 @@ impl<N: AstNode> InFile<N> {
     pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
         // This kind of upmapping can only be achieved in attribute expanded files,
         // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
-        let file_id = match self.file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => {
-                return Some(InRealFile { file_id, value: self.value })
+        let file_id = match self.file_id {
+            HirFileId::FileId(file_id) => {
+                return Some(InRealFile { file_id, value: self.value });
             }
-            HirFileIdRepr::MacroFile(m) => m,
+            HirFileId::MacroFile(m) => m,
         };
         if !matches!(file_id.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) {
             return None;
         }
 
-        let FileRange { file_id, range } = map_node_range_up_rooted(
+        let FileRange { file_id: editioned_file_id, range } = map_node_range_up_rooted(
             db,
             &db.expansion_span_map(file_id),
             self.value.syntax().text_range(),
         )?;
 
         // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
-        let anc = db.parse(file_id).syntax_node().covering_element(range);
+        let anc = db.parse(editioned_file_id).syntax_node().covering_element(range);
         let value = anc.ancestors().find_map(N::cast)?;
-        Some(InRealFile::new(file_id, value))
+        Some(InRealFile::new(editioned_file_id, value))
     }
 }
 
 impl<T> InFile<T> {
     pub fn into_real_file(self) -> Result<InRealFile<T>, InFile<T>> {
-        match self.file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
-            HirFileIdRepr::MacroFile(_) => Err(self),
+        match self.file_id {
+            HirFileId::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
+            HirFileId::MacroFile(_) => Err(self),
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index 28894537d48f7..4a4a3e52aea43 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -4,13 +4,14 @@
 use intern::sym;
 use rustc_hash::{FxHashMap, FxHashSet};
 use span::{
-    ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, FIXUP_ERASED_FILE_AST_ID_MARKER,
-    ROOT_ERASED_FILE_AST_ID,
+    ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor,
+    SyntaxContext,
 };
 use stdx::never;
 use syntax::{
+    SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
     ast::{self, AstNode, HasLoopBody},
-    match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
+    match_ast,
 };
 use syntax_bridge::DocCommentDesugarMode;
 use triomphe::Arc;
@@ -81,7 +82,7 @@ pub(crate) fn fixup_syntax(
             original.push(original_tree);
             let span = span_map.span_for_range(node_range);
             let replacement = Leaf::Ident(Ident {
-                sym: sym::__ra_fixup.clone(),
+                sym: sym::__ra_fixup,
                 span: Span {
                     range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
                     anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
@@ -101,7 +102,7 @@ pub(crate) fn fixup_syntax(
                         // incomplete field access: some_expr.|
                         append.insert(node.clone().into(), vec![
                             Leaf::Ident(Ident {
-                                sym: sym::__ra_fixup.clone(),
+                                sym: sym::__ra_fixup,
                                 span: fake_span(node_range),
                                 is_raw: tt::IdentIsRaw::No
                             }),
@@ -140,7 +141,7 @@ pub(crate) fn fixup_syntax(
                         };
                         append.insert(if_token.into(), vec![
                             Leaf::Ident(Ident {
-                                sym: sym::__ra_fixup.clone(),
+                                sym: sym::__ra_fixup,
                                 span: fake_span(node_range),
                                 is_raw: tt::IdentIsRaw::No
                             }),
@@ -170,7 +171,7 @@ pub(crate) fn fixup_syntax(
                         };
                         append.insert(while_token.into(), vec![
                             Leaf::Ident(Ident {
-                                sym: sym::__ra_fixup.clone(),
+                                sym: sym::__ra_fixup,
                                 span: fake_span(node_range),
                                 is_raw: tt::IdentIsRaw::No
                             }),
@@ -216,7 +217,7 @@ pub(crate) fn fixup_syntax(
                         };
                         append.insert(match_token.into(), vec![
                             Leaf::Ident(Ident {
-                                sym: sym::__ra_fixup.clone(),
+                                sym: sym::__ra_fixup,
                                 span: fake_span(node_range),
                                 is_raw: tt::IdentIsRaw::No
                             }),
@@ -245,9 +246,9 @@ pub(crate) fn fixup_syntax(
                     };
 
                     let [pat, in_token, iter] = [
-                         sym::underscore.clone(),
-                         sym::in_.clone(),
-                         sym::__ra_fixup.clone(),
+                         sym::underscore,
+                         sym::in_,
+                         sym::__ra_fixup,
                     ].map(|sym|
                         Leaf::Ident(Ident {
                             sym,
@@ -283,7 +284,7 @@ pub(crate) fn fixup_syntax(
                         if it.name_ref().is_some() && it.expr().is_none() {
                             append.insert(colon.into(), vec![
                                 Leaf::Ident(Ident {
-                                    sym: sym::__ra_fixup.clone(),
+                                    sym: sym::__ra_fixup,
                                     span: fake_span(node_range),
                                     is_raw: tt::IdentIsRaw::No
                                 })
@@ -296,7 +297,7 @@ pub(crate) fn fixup_syntax(
                         if it.segment().is_none() {
                             append.insert(colon.into(), vec![
                                 Leaf::Ident(Ident {
-                                    sym: sym::__ra_fixup.clone(),
+                                    sym: sym::__ra_fixup,
                                     span: fake_span(node_range),
                                     is_raw: tt::IdentIsRaw::No
                                 })
@@ -308,7 +309,7 @@ pub(crate) fn fixup_syntax(
                     if it.body().is_none() {
                         append.insert(node.into(), vec![
                             Leaf::Ident(Ident {
-                                sym: sym::__ra_fixup.clone(),
+                                sym: sym::__ra_fixup,
                                 span: fake_span(node_range),
                                 is_raw: tt::IdentIsRaw::No
                             })
@@ -353,7 +354,7 @@ pub(crate) fn reverse_fixups(tt: &mut TopSubtree, undo_info: &SyntaxFixupUndoInf
         let span = |file_id| Span {
             range: TextRange::empty(TextSize::new(0)),
             anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
-            ctx: SyntaxContextId::root(span::Edition::Edition2015),
+            ctx: SyntaxContext::root(span::Edition::Edition2015),
         };
         delimiter.open = span(delimiter.open.anchor.file_id);
         delimiter.close = span(delimiter.close.anchor.file_id);
@@ -465,7 +466,7 @@ fn reverse_fixups_(tt: &mut TopSubtree, undo_info: &[TopSubtree]) {
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use span::{Edition, EditionedFileId, FileId};
     use syntax::TextRange;
     use syntax_bridge::DocCommentDesugarMode;
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index fe05af0ac9d31..e7856920bc427 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -22,11 +22,11 @@
 // FIXME: Move this into the span crate? Not quite possible today as that depends on `MacroCallLoc`
 // which contains a bunch of unrelated things
 
-use std::iter;
+use std::{convert::identity, iter};
 
-use span::{Edition, MacroCallId, Span, SyntaxContextData, SyntaxContextId};
+use span::{Edition, MacroCallId, Span, SyntaxContext};
 
-use crate::db::{ExpandDatabase, InternSyntaxContextQuery};
+use crate::db::ExpandDatabase;
 
 pub use span::Transparency;
 
@@ -65,23 +65,23 @@ fn span_with_ctxt_from_mark(
     edition: Edition,
 ) -> Span {
     Span {
-        ctx: apply_mark(db, SyntaxContextId::root(edition), expn_id, transparency, edition),
+        ctx: apply_mark(db, SyntaxContext::root(edition), expn_id, transparency, edition),
         ..span
     }
 }
 
 pub(super) fn apply_mark(
     db: &dyn ExpandDatabase,
-    ctxt: SyntaxContextId,
-    call_id: MacroCallId,
+    ctxt: span::SyntaxContext,
+    call_id: span::MacroCallId,
     transparency: Transparency,
     edition: Edition,
-) -> SyntaxContextId {
+) -> SyntaxContext {
     if transparency == Transparency::Opaque {
         return apply_mark_internal(db, ctxt, call_id, transparency, edition);
     }
 
-    let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt;
+    let call_site_ctxt = db.lookup_intern_macro_call(call_id.into()).ctxt;
     let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
         call_site_ctxt.normalize_to_macros_2_0(db)
     } else {
@@ -109,105 +109,87 @@ pub(super) fn apply_mark(
 
 fn apply_mark_internal(
     db: &dyn ExpandDatabase,
-    ctxt: SyntaxContextId,
+    ctxt: SyntaxContext,
     call_id: MacroCallId,
     transparency: Transparency,
     edition: Edition,
-) -> SyntaxContextId {
-    use base_db::ra_salsa;
-
+) -> SyntaxContext {
     let call_id = Some(call_id);
 
-    let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
-    let mut opaque = syntax_context_data.opaque;
-    let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
+    let mut opaque = ctxt.opaque(db);
+    let mut opaque_and_semitransparent = ctxt.opaque_and_semitransparent(db);
 
     if transparency >= Transparency::Opaque {
         let parent = opaque;
-        opaque = ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
-            (parent, call_id, transparency, edition),
-            |new_opaque| SyntaxContextData {
-                outer_expn: call_id,
-                outer_transparency: transparency,
-                parent,
-                opaque: new_opaque,
-                opaque_and_semitransparent: new_opaque,
-                edition,
-            },
-        );
+        opaque = SyntaxContext::new(db, call_id, transparency, edition, parent, identity, identity);
     }
 
     if transparency >= Transparency::SemiTransparent {
         let parent = opaque_and_semitransparent;
         opaque_and_semitransparent =
-            ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
-                (parent, call_id, transparency, edition),
-                |new_opaque_and_semitransparent| SyntaxContextData {
-                    outer_expn: call_id,
-                    outer_transparency: transparency,
-                    parent,
-                    opaque,
-                    opaque_and_semitransparent: new_opaque_and_semitransparent,
-                    edition,
-                },
-            );
+            SyntaxContext::new(db, call_id, transparency, edition, parent, |_| opaque, identity);
     }
 
     let parent = ctxt;
-    db.intern_syntax_context(SyntaxContextData {
-        outer_expn: call_id,
-        outer_transparency: transparency,
-        parent,
-        opaque,
-        opaque_and_semitransparent,
+    SyntaxContext::new(
+        db,
+        call_id,
+        transparency,
         edition,
-    })
+        parent,
+        |_| opaque,
+        |_| opaque_and_semitransparent,
+    )
 }
 
 pub trait SyntaxContextExt {
-    fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
-    fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
-    fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
-    fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
-    fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
-    fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>;
+    fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> span::SyntaxContext;
+    fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> span::SyntaxContext;
+    fn parent_ctxt(self, db: &dyn ExpandDatabase) -> span::SyntaxContext;
+    fn remove_mark(&mut self, db: &dyn ExpandDatabase)
+    -> (Option<span::MacroCallId>, Transparency);
+    fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<span::MacroCallId>, Transparency);
+    fn marks(self, db: &dyn ExpandDatabase) -> Vec<(span::MacroCallId, Transparency)>;
     fn is_opaque(self, db: &dyn ExpandDatabase) -> bool;
 }
 
-impl SyntaxContextExt for SyntaxContextId {
-    fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
-        db.lookup_intern_syntax_context(self).opaque_and_semitransparent
+impl SyntaxContextExt for SyntaxContext {
+    fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> span::SyntaxContext {
+        self.opaque_and_semitransparent(db)
     }
-    fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
-        db.lookup_intern_syntax_context(self).opaque
+    fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> span::SyntaxContext {
+        self.opaque(db)
     }
-    fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
-        db.lookup_intern_syntax_context(self).parent
+    fn parent_ctxt(self, db: &dyn ExpandDatabase) -> span::SyntaxContext {
+        self.parent(db)
     }
-    fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
-        let data = db.lookup_intern_syntax_context(self);
-        (data.outer_expn, data.outer_transparency)
+    fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<span::MacroCallId>, Transparency) {
+        let data = self;
+        (data.outer_expn(db), data.outer_transparency(db))
     }
-    fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
-        let data = db.lookup_intern_syntax_context(*self);
-        *self = data.parent;
-        (data.outer_expn, data.outer_transparency)
+    fn remove_mark(
+        &mut self,
+        db: &dyn ExpandDatabase,
+    ) -> (Option<span::MacroCallId>, Transparency) {
+        let data = *self;
+        *self = data.parent(db);
+        (data.outer_expn(db), data.outer_transparency(db))
     }
-    fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)> {
+    fn marks(self, db: &dyn ExpandDatabase) -> Vec<(span::MacroCallId, Transparency)> {
         let mut marks = marks_rev(self, db).collect::<Vec<_>>();
         marks.reverse();
         marks
     }
     fn is_opaque(self, db: &dyn ExpandDatabase) -> bool {
-        !self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque()
+        !self.is_root() && self.outer_transparency(db).is_opaque()
     }
 }
 
 // FIXME: Make this a SyntaxContextExt method once we have RPIT
 pub fn marks_rev(
-    ctxt: SyntaxContextId,
+    ctxt: SyntaxContext,
     db: &dyn ExpandDatabase,
-) -> impl Iterator<Item = (MacroCallId, Transparency)> + '_ {
+) -> impl Iterator<Item = (span::MacroCallId, Transparency)> + '_ {
     iter::successors(Some(ctxt), move |&mark| Some(mark.parent_ctxt(db)))
         .take_while(|&it| !it.is_root())
         .map(|ctx| {
@@ -217,59 +199,3 @@ pub fn marks_rev(
             (mark.0.unwrap(), mark.1)
         })
 }
-
-pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
-    use crate::db::{InternMacroCallLookupQuery, InternSyntaxContextLookupQuery};
-    use base_db::ra_salsa::debug::DebugQueryTable;
-
-    let mut s = String::from("Expansions:");
-    let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
-    entries.sort_by_key(|e| e.key);
-    for e in entries {
-        let id = e.key;
-        let expn_data = e.value.as_ref().unwrap();
-        s.push_str(&format!(
-            "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, kind: {:?}",
-            id,
-            expn_data.kind.file_id(),
-            expn_data.ctxt,
-            expn_data.kind.descr(),
-        ));
-    }
-
-    s.push_str("\n\nSyntaxContexts:\n");
-    let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
-    entries.sort_by_key(|e| e.key);
-    for e in entries {
-        struct SyntaxContextDebug<'a>(
-            &'a dyn ExpandDatabase,
-            SyntaxContextId,
-            &'a SyntaxContextData,
-        );
-
-        impl std::fmt::Debug for SyntaxContextDebug<'_> {
-            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-                fancy_debug(self.2, self.1, self.0, f)
-            }
-        }
-
-        fn fancy_debug(
-            this: &SyntaxContextData,
-            self_id: SyntaxContextId,
-            db: &dyn ExpandDatabase,
-            f: &mut std::fmt::Formatter<'_>,
-        ) -> std::fmt::Result {
-            write!(f, "#{self_id} parent: #{}, outer_mark: (", this.parent)?;
-            match this.outer_expn {
-                Some(id) => {
-                    write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
-                }
-                None => write!(f, "root")?,
-            }
-            write!(f, ", {:?})", this.outer_transparency)
-        }
-
-        stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
-    }
-    s
-}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs
index 4c4174e2680f5..543ac0619dd3e 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs
@@ -562,7 +562,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
     ),
 
     BuiltinAttribute {
-        // name: sym::rustc_diagnostic_item.clone(),
+        // name: sym::rustc_diagnostic_item,
         name: "rustc_diagnostic_item",
         // FIXME: This can be `true` once we always use `tcx.is_diagnostic_item`.
         // only_local: false,
@@ -571,7 +571,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
         // duplicates: ErrorFollowing,
         // gate: Gated(
             // Stability::Unstable,
-            // sym::rustc_attrs.clone(),
+            // sym::rustc_attrs,
             // "diagnostic items compiler internal support for linting",
             // cfg_fn!(rustc_attrs),
         // ),
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index c1d808cbf2c5a..f0a9a2ad52c85 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -27,28 +27,26 @@ mod prettify_macro_expansion_;
 
 use attrs::collect_attrs;
 use rustc_hash::FxHashMap;
+use salsa::plumbing::{AsId, FromId};
 use stdx::TupleExt;
 use triomphe::Arc;
 
 use core::fmt;
 use std::hash::Hash;
 
-use base_db::{ra_salsa::InternValueTrivial, CrateId};
+use base_db::Crate;
 use either::Either;
-use span::{
-    Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor,
-    SyntaxContextData, SyntaxContextId,
-};
+use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
 use syntax::{
-    ast::{self, AstNode},
     SyntaxNode, SyntaxToken, TextRange, TextSize,
+    ast::{self, AstNode},
 };
 
 use crate::{
     attrs::AttrId,
     builtin::{
-        include_input_to_file_id, BuiltinAttrExpander, BuiltinDeriveExpander,
-        BuiltinFnLikeExpander, EagerExpander,
+        BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander,
+        include_input_to_file_id,
     },
     db::ExpandDatabase,
     mod_path::ModPath,
@@ -62,12 +60,12 @@ pub use crate::{
     prettify_macro_expansion_::prettify_macro_expansion,
 };
 
+pub use base_db::EditionedFileId;
 pub use mbe::{DeclarativeMacro, ValueResult};
-pub use span::{HirFileId, MacroCallId, MacroFileId};
 
 pub mod tt {
     pub use span::Span;
-    pub use tt::{token_to_literal, DelimiterKind, IdentIsRaw, LitKind, Spacing};
+    pub use tt::{DelimiterKind, IdentIsRaw, LitKind, Spacing, token_to_literal};
 
     pub type Delimiter = ::tt::Delimiter<Span>;
     pub type DelimSpan = ::tt::DelimSpan<Span>;
@@ -89,17 +87,17 @@ pub mod tt {
 macro_rules! impl_intern_lookup {
     ($db:ident, $id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
         impl $crate::Intern for $loc {
-            type Database<'db> = dyn $db + 'db;
+            type Database = dyn $db;
             type ID = $id;
-            fn intern(self, db: &Self::Database<'_>) -> $id {
+            fn intern(self, db: &Self::Database) -> Self::ID {
                 db.$intern(self)
             }
         }
 
         impl $crate::Lookup for $id {
-            type Database<'db> = dyn $db + 'db;
+            type Database = dyn $db;
             type Data = $loc;
-            fn lookup(&self, db: &Self::Database<'_>) -> $loc {
+            fn lookup(&self, db: &Self::Database) -> Self::Data {
                 db.$lookup(*self)
             }
         }
@@ -108,15 +106,15 @@ macro_rules! impl_intern_lookup {
 
 // ideally these would be defined in base-db, but the orphan rule doesn't let us
 pub trait Intern {
-    type Database<'db>: ?Sized;
+    type Database: ?Sized;
     type ID;
-    fn intern(self, db: &Self::Database<'_>) -> Self::ID;
+    fn intern(self, db: &Self::Database) -> Self::ID;
 }
 
 pub trait Lookup {
-    type Database<'db>: ?Sized;
+    type Database: ?Sized;
     type Data;
-    fn lookup(&self, db: &Self::Database<'_>) -> Self::Data;
+    fn lookup(&self, db: &Self::Database) -> Self::Data;
 }
 
 impl_intern_lookup!(
@@ -127,14 +125,6 @@ impl_intern_lookup!(
     lookup_intern_macro_call
 );
 
-impl_intern_lookup!(
-    ExpandDatabase,
-    SyntaxContextId,
-    SyntaxContextData,
-    intern_syntax_context,
-    lookup_intern_syntax_context
-);
-
 pub type ExpandResult<T> = ValueResult<T, ExpandError>;
 
 #[derive(Debug, PartialEq, Eq, Clone, Hash)]
@@ -165,7 +155,7 @@ impl ExpandError {
 pub enum ExpandErrorKind {
     /// Attribute macro expansion is disabled.
     ProcMacroAttrExpansionDisabled,
-    MissingProcMacroExpander(CrateId),
+    MissingProcMacroExpander(Crate),
     /// The macro for this call is disabled.
     MacroDisabled,
     /// The macro definition has errors.
@@ -208,14 +198,17 @@ impl ExpandErrorKind {
                 kind: RenderedExpandError::DISABLED,
             },
             &ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
-                match db.proc_macros().get_error_for_crate(def_crate) {
+                match db.proc_macros_for_crate(def_crate).as_ref().and_then(|it| it.get_error()) {
                     Some((e, hard_err)) => RenderedExpandError {
                         message: e.to_owned(),
                         error: hard_err,
                         kind: RenderedExpandError::GENERAL_KIND,
                     },
                     None => RenderedExpandError {
-                        message: format!("internal error: proc-macro map is missing error entry for crate {def_crate:?}"),
+                        message: format!(
+                            "internal error: proc-macro map is missing error entry for crate {:?}",
+                            def_crate
+                        ),
                         error: true,
                         kind: RenderedExpandError::GENERAL_KIND,
                     },
@@ -258,15 +251,14 @@ impl From<mbe::ExpandError> for ExpandError {
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroCallLoc {
     pub def: MacroDefId,
-    pub krate: CrateId,
+    pub krate: Crate,
     pub kind: MacroCallKind,
-    pub ctxt: SyntaxContextId,
+    pub ctxt: SyntaxContext,
 }
-impl InternValueTrivial for MacroCallLoc {}
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct MacroDefId {
-    pub krate: CrateId,
+    pub krate: Crate,
     pub edition: Edition,
     pub kind: MacroDefKind,
     pub local_inner: bool,
@@ -288,6 +280,17 @@ impl MacroDefKind {
     pub fn is_declarative(&self) -> bool {
         matches!(self, MacroDefKind::Declarative(..))
     }
+
+    pub fn erased_ast_id(&self) -> ErasedAstId {
+        match *self {
+            MacroDefKind::ProcMacro(id, ..) => id.erase(),
+            MacroDefKind::BuiltIn(id, _)
+            | MacroDefKind::BuiltInAttr(id, _)
+            | MacroDefKind::BuiltInDerive(id, _)
+            | MacroDefKind::BuiltInEager(id, _)
+            | MacroDefKind::Declarative(id, ..) => id.erase(),
+        }
+    }
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -338,51 +341,34 @@ pub enum MacroCallKind {
     },
 }
 
-pub trait HirFileIdExt {
-    fn edition(self, db: &dyn ExpandDatabase) -> Edition;
-    /// Returns the original file of this macro call hierarchy.
-    fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId;
-
-    /// Returns the original file of this macro call hierarchy while going into the included file if
-    /// one of the calls comes from an `include!``.
-    fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> EditionedFileId;
-
-    /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
-    fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
-
-    fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>;
-}
-
-impl HirFileIdExt for HirFileId {
-    fn edition(self, db: &dyn ExpandDatabase) -> Edition {
-        match self.repr() {
-            HirFileIdRepr::FileId(file_id) => file_id.edition(),
-            HirFileIdRepr::MacroFile(m) => m.macro_call_id.lookup(db).def.edition,
+impl HirFileId {
+    pub fn edition(self, db: &dyn ExpandDatabase) -> Edition {
+        match self {
+            HirFileId::FileId(file_id) => file_id.editioned_file_id(db).edition(),
+            HirFileId::MacroFile(m) => db.lookup_intern_macro_call(m).def.edition,
         }
     }
-    fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
+    pub fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
         let mut file_id = self;
         loop {
-            match file_id.repr() {
-                HirFileIdRepr::FileId(id) => break id,
-                HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
-                    file_id = macro_call_id.lookup(db).kind.file_id();
+            match file_id {
+                HirFileId::FileId(id) => break id,
+                HirFileId::MacroFile(macro_call_id) => {
+                    file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id()
                 }
             }
         }
     }
 
-    fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
+    pub fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
         loop {
-            match self.repr() {
-                HirFileIdRepr::FileId(id) => break id,
-                HirFileIdRepr::MacroFile(file) => {
-                    let loc = db.lookup_intern_macro_call(file.macro_call_id);
+            match self {
+                HirFileId::FileId(id) => break id,
+                HirFileId::MacroFile(file) => {
+                    let loc = db.lookup_intern_macro_call(file);
                     if loc.def.is_include() {
                         if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind {
-                            if let Ok(it) =
-                                include_input_to_file_id(db, file.macro_call_id, &eager.arg)
-                            {
+                            if let Ok(it) = include_input_to_file_id(db, file, &eager.arg) {
                                 break it;
                             }
                         }
@@ -393,23 +379,26 @@ impl HirFileIdExt for HirFileId {
         }
     }
 
-    fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
-        let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
+    pub fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
+        let mut call = db.lookup_intern_macro_call(self.macro_file()?).to_node(db);
         loop {
-            match call.file_id.repr() {
-                HirFileIdRepr::FileId(file_id) => {
-                    break Some(InRealFile { file_id, value: call.value })
+            match call.file_id {
+                HirFileId::FileId(file_id) => {
+                    break Some(InRealFile { file_id, value: call.value });
                 }
-                HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
+                HirFileId::MacroFile(macro_call_id) => {
                     call = db.lookup_intern_macro_call(macro_call_id).to_node(db);
                 }
             }
         }
     }
 
-    fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
+    pub fn as_builtin_derive_attr_node(
+        &self,
+        db: &dyn ExpandDatabase,
+    ) -> Option<InFile<ast::Attr>> {
         let macro_file = self.macro_file()?;
-        let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+        let loc = db.lookup_intern_macro_call(macro_file);
         let attr = match loc.def.kind {
             MacroDefKind::BuiltInDerive(..) => loc.to_node(db),
             _ => return None,
@@ -436,57 +425,34 @@ pub enum MacroKind {
     ProcMacro,
 }
 
-pub trait MacroFileIdExt {
-    fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool;
-    fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool;
-    fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId>;
-    fn expansion_level(self, db: &dyn ExpandDatabase) -> u32;
-    /// If this is a macro call, returns the syntax node of the call.
-    fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>;
-    fn parent(self, db: &dyn ExpandDatabase) -> HirFileId;
-
-    fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo;
-
-    fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind;
-
-    /// Return whether this file is an include macro
-    fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool;
-
-    fn is_eager(&self, db: &dyn ExpandDatabase) -> bool;
-
-    /// Return whether this file is the pseudo expansion of the derive attribute.
-    /// See [`crate::builtin_attr_macro::derive_attr_expand`].
-    fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool;
-}
-
-impl MacroFileIdExt for MacroFileId {
-    fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
-        db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
+impl MacroCallId {
+    pub fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
+        db.lookup_intern_macro_call(self).to_node(db)
     }
-    fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
+    pub fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 {
         let mut level = 0;
         let mut macro_file = self;
         loop {
-            let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+            let loc = db.lookup_intern_macro_call(macro_file);
 
             level += 1;
-            macro_file = match loc.kind.file_id().repr() {
-                HirFileIdRepr::FileId(_) => break level,
-                HirFileIdRepr::MacroFile(it) => it,
+            macro_file = match loc.kind.file_id() {
+                HirFileId::FileId(_) => break level,
+                HirFileId::MacroFile(it) => it,
             };
         }
     }
-    fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
-        self.macro_call_id.lookup(db).kind.file_id()
+    pub fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
+        db.lookup_intern_macro_call(self).kind.file_id()
     }
 
     /// Return expansion information if it is a macro-expansion file
-    fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
+    pub fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo {
         ExpansionInfo::new(db, self)
     }
 
-    fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind {
-        match db.lookup_intern_macro_call(self.macro_call_id).def.kind {
+    pub fn kind(self, db: &dyn ExpandDatabase) -> MacroKind {
+        match db.lookup_intern_macro_call(self).def.kind {
             MacroDefKind::Declarative(..) => MacroKind::Declarative,
             MacroDefKind::BuiltIn(..) | MacroDefKind::BuiltInEager(..) => {
                 MacroKind::DeclarativeBuiltIn
@@ -499,33 +465,33 @@ impl MacroFileIdExt for MacroFileId {
         }
     }
 
-    fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool {
-        db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
+    pub fn is_include_macro(self, db: &dyn ExpandDatabase) -> bool {
+        db.lookup_intern_macro_call(self).def.is_include()
     }
 
-    fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool {
-        db.lookup_intern_macro_call(self.macro_call_id).def.is_include_like()
+    pub fn is_include_like_macro(self, db: &dyn ExpandDatabase) -> bool {
+        db.lookup_intern_macro_call(self).def.is_include_like()
     }
 
-    fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool {
-        db.lookup_intern_macro_call(self.macro_call_id).def.is_env_or_option_env()
+    pub fn is_env_or_option_env(self, db: &dyn ExpandDatabase) -> bool {
+        db.lookup_intern_macro_call(self).def.is_env_or_option_env()
     }
 
-    fn is_eager(&self, db: &dyn ExpandDatabase) -> bool {
-        let loc = db.lookup_intern_macro_call(self.macro_call_id);
+    pub fn is_eager(self, db: &dyn ExpandDatabase) -> bool {
+        let loc = db.lookup_intern_macro_call(self);
         matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
     }
 
-    fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
-        let loc = db.lookup_intern_macro_call(self.macro_call_id);
+    pub fn eager_arg(self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
+        let loc = db.lookup_intern_macro_call(self);
         match &loc.kind {
             MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id),
             _ => None,
         }
     }
 
-    fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
-        let loc = db.lookup_intern_macro_call(self.macro_call_id);
+    pub fn is_derive_attr_pseudo_expansion(self, db: &dyn ExpandDatabase) -> bool {
+        let loc = db.lookup_intern_macro_call(self);
         loc.def.is_attribute_derive()
     }
 }
@@ -534,11 +500,11 @@ impl MacroDefId {
     pub fn make_call(
         self,
         db: &dyn ExpandDatabase,
-        krate: CrateId,
+        krate: Crate,
         kind: MacroCallKind,
-        ctxt: SyntaxContextId,
+        ctxt: SyntaxContext,
     ) -> MacroCallId {
-        MacroCallLoc { def: self, krate, kind, ctxt }.intern(db)
+        db.intern_macro_call(MacroCallLoc { def: self, krate, kind, ctxt })
     }
 
     pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {
@@ -692,7 +658,7 @@ impl MacroCallLoc {
 }
 
 impl MacroCallKind {
-    fn descr(&self) -> &'static str {
+    pub fn descr(&self) -> &'static str {
         match self {
             MacroCallKind::FnLike { .. } => "macro call",
             MacroCallKind::Derive { .. } => "derive macro",
@@ -723,11 +689,11 @@ impl MacroCallKind {
     pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
         let mut kind = self;
         let file_id = loop {
-            match kind.file_id().repr() {
-                HirFileIdRepr::MacroFile(file) => {
-                    kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+            match kind.file_id() {
+                HirFileId::MacroFile(file) => {
+                    kind = db.lookup_intern_macro_call(file).kind;
                 }
-                HirFileIdRepr::FileId(file_id) => break file_id,
+                HirFileId::FileId(file_id) => break file_id,
             }
         };
 
@@ -748,11 +714,11 @@ impl MacroCallKind {
     pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
         let mut kind = self;
         let file_id = loop {
-            match kind.file_id().repr() {
-                HirFileIdRepr::MacroFile(file) => {
-                    kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+            match kind.file_id() {
+                HirFileId::MacroFile(file) => {
+                    kind = db.lookup_intern_macro_call(file).kind;
                 }
-                HirFileIdRepr::FileId(file_id) => break file_id,
+                HirFileId::FileId(file_id) => break file_id,
             }
         };
 
@@ -840,7 +806,7 @@ impl ExpansionInfo {
     pub fn map_range_down_exact(
         &self,
         span: Span,
-    ) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
+    ) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContext)> + '_>> {
         let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| {
             self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
         });
@@ -855,7 +821,7 @@ impl ExpansionInfo {
     pub fn map_range_down(
         &self,
         span: Span,
-    ) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
+    ) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContext)> + '_>> {
         let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| {
             self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
         });
@@ -868,7 +834,7 @@ impl ExpansionInfo {
         &self,
         db: &dyn ExpandDatabase,
         offset: TextSize,
-    ) -> (FileRange, SyntaxContextId) {
+    ) -> (FileRange, SyntaxContext) {
         debug_assert!(self.expanded.value.text_range().contains(offset));
         span_for_offset(db, &self.exp_map, offset)
     }
@@ -878,7 +844,7 @@ impl ExpansionInfo {
         &self,
         db: &dyn ExpandDatabase,
         range: TextRange,
-    ) -> Option<(FileRange, SyntaxContextId)> {
+    ) -> Option<(FileRange, SyntaxContext)> {
         debug_assert!(self.expanded.value.text_range().contains_range(range));
         map_node_range_up(db, &self.exp_map, range)
     }
@@ -893,7 +859,7 @@ impl ExpansionInfo {
         let span = self.exp_map.span_at(token.start());
         match &self.arg_map {
             SpanMap::RealSpanMap(_) => {
-                let file_id = span.anchor.file_id.into();
+                let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
                 let anchor_offset =
                     db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
                 InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@@ -916,9 +882,9 @@ impl ExpansionInfo {
         }
     }
 
-    pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
+    pub fn new(db: &dyn ExpandDatabase, macro_file: MacroCallId) -> ExpansionInfo {
         let _p = tracing::info_span!("ExpansionInfo::new").entered();
-        let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+        let loc = db.lookup_intern_macro_call(macro_file);
 
         let arg_tt = loc.kind.arg(db);
         let arg_map = db.span_map(arg_tt.file_id);
@@ -950,9 +916,10 @@ pub fn map_node_range_up_rooted(
         start = start.min(span.range.start());
         end = end.max(span.range.end());
     }
+    let file_id = EditionedFileId::from_span(db, anchor.file_id);
     let anchor_offset =
-        db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
-    Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset })
+        db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
+    Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
 }
 
 /// Maps up the text range out of the expansion hierarchy back into the original file its from.
@@ -962,7 +929,7 @@ pub fn map_node_range_up(
     db: &dyn ExpandDatabase,
     exp_map: &ExpansionSpanMap,
     range: TextRange,
-) -> Option<(FileRange, SyntaxContextId)> {
+) -> Option<(FileRange, SyntaxContext)> {
     let mut spans = exp_map.spans_for_range(range);
     let Span { range, anchor, ctx } = spans.next()?;
     let mut start = range.start();
@@ -975,12 +942,10 @@ pub fn map_node_range_up(
         start = start.min(span.range.start());
         end = end.max(span.range.end());
     }
+    let file_id = EditionedFileId::from_span(db, anchor.file_id);
     let anchor_offset =
-        db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
-    Some((
-        FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset },
-        ctx,
-    ))
+        db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
+    Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
 }
 
 /// Maps up the text range out of the expansion hierarchy back into the original file its from.
@@ -989,7 +954,7 @@ pub fn map_node_range_up_aggregated(
     db: &dyn ExpandDatabase,
     exp_map: &ExpansionSpanMap,
     range: TextRange,
-) -> FxHashMap<(SpanAnchor, SyntaxContextId), TextRange> {
+) -> FxHashMap<(SpanAnchor, SyntaxContext), TextRange> {
     let mut map = FxHashMap::default();
     for span in exp_map.spans_for_range(range) {
         let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range);
@@ -999,8 +964,9 @@ pub fn map_node_range_up_aggregated(
         );
     }
     for ((anchor, _), range) in &mut map {
+        let file_id = EditionedFileId::from_span(db, anchor.file_id);
         let anchor_offset =
-            db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
+            db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
         *range += anchor_offset;
     }
     map
@@ -1011,14 +977,12 @@ pub fn span_for_offset(
     db: &dyn ExpandDatabase,
     exp_map: &ExpansionSpanMap,
     offset: TextSize,
-) -> (FileRange, SyntaxContextId) {
+) -> (FileRange, SyntaxContext) {
     let span = exp_map.span_at(offset);
-    let anchor_offset = db
-        .ast_id_map(span.anchor.file_id.into())
-        .get_erased(span.anchor.ast_id)
-        .text_range()
-        .start();
-    (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
+    let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
+    let anchor_offset =
+        db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
+    (FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
 }
 
 /// In Rust, macros expand token trees to token trees. When we want to turn a
@@ -1086,3 +1050,77 @@ impl ExpandTo {
 }
 
 intern::impl_internable!(ModPath, attrs::AttrInput);
+
+#[salsa::interned(no_lifetime, debug)]
+#[doc(alias = "MacroFileId")]
+pub struct MacroCallId {
+    pub loc: MacroCallLoc,
+}
+
+impl From<span::MacroCallId> for MacroCallId {
+    #[inline]
+    fn from(value: span::MacroCallId) -> Self {
+        MacroCallId::from_id(value.0)
+    }
+}
+
+impl From<MacroCallId> for span::MacroCallId {
+    #[inline]
+    fn from(value: MacroCallId) -> span::MacroCallId {
+        span::MacroCallId(value.as_id())
+    }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
+pub enum HirFileId {
+    FileId(EditionedFileId),
+    MacroFile(MacroCallId),
+}
+
+impl From<EditionedFileId> for HirFileId {
+    #[inline]
+    fn from(file_id: EditionedFileId) -> Self {
+        HirFileId::FileId(file_id)
+    }
+}
+
+impl From<MacroCallId> for HirFileId {
+    #[inline]
+    fn from(file_id: MacroCallId) -> Self {
+        HirFileId::MacroFile(file_id)
+    }
+}
+
+impl HirFileId {
+    #[inline]
+    pub fn macro_file(self) -> Option<MacroCallId> {
+        match self {
+            HirFileId::FileId(_) => None,
+            HirFileId::MacroFile(it) => Some(it),
+        }
+    }
+
+    #[inline]
+    pub fn is_macro(self) -> bool {
+        matches!(self, HirFileId::MacroFile(_))
+    }
+
+    #[inline]
+    pub fn file_id(self) -> Option<EditionedFileId> {
+        match self {
+            HirFileId::FileId(it) => Some(it),
+            HirFileId::MacroFile(_) => None,
+        }
+    }
+}
+
+impl PartialEq<EditionedFileId> for HirFileId {
+    fn eq(&self, &other: &EditionedFileId) -> bool {
+        *self == HirFileId::from(other)
+    }
+}
+impl PartialEq<HirFileId> for EditionedFileId {
+    fn eq(&self, &other: &HirFileId) -> bool {
+        other == HirFileId::from(*self)
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index 75b5861454056..72a5627636bf6 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -7,15 +7,15 @@ use std::{
 
 use crate::{
     db::ExpandDatabase,
-    hygiene::{marks_rev, SyntaxContextExt, Transparency},
+    hygiene::{SyntaxContextExt, Transparency, marks_rev},
     name::{AsName, Name},
     tt,
 };
-use base_db::CrateId;
+use base_db::Crate;
 use intern::sym;
 use smallvec::SmallVec;
-use span::{Edition, SyntaxContextId};
-use syntax::{ast, AstNode};
+use span::{Edition, SyntaxContext};
+use syntax::{AstNode, ast};
 
 #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
 pub struct ModPath {
@@ -33,7 +33,7 @@ pub enum PathKind {
     Abs,
     // FIXME: Can we remove this somehow?
     /// `$crate` from macro expansion
-    DollarCrate(CrateId),
+    DollarCrate(Crate),
 }
 
 impl PathKind {
@@ -44,7 +44,7 @@ impl ModPath {
     pub fn from_src(
         db: &dyn ExpandDatabase,
         path: ast::Path,
-        span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
+        span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
     ) -> Option<ModPath> {
         convert_path(db, path, span_for_range)
     }
@@ -111,8 +111,7 @@ impl ModPath {
 
     #[allow(non_snake_case)]
     pub fn is_Self(&self) -> bool {
-        self.kind == PathKind::Plain
-            && matches!(&*self.segments, [name] if *name == sym::Self_.clone())
+        self.kind == PathKind::Plain && matches!(&*self.segments, [name] if *name == sym::Self_)
     }
 
     /// If this path is a single identifier, like `foo`, return its name.
@@ -209,7 +208,7 @@ fn display_fmt_path(
 fn convert_path(
     db: &dyn ExpandDatabase,
     path: ast::Path,
-    span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId,
+    span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
 ) -> Option<ModPath> {
     let mut segments = path.segments();
 
@@ -251,7 +250,7 @@ fn convert_path(
             }
         }
         ast::PathSegmentKind::SelfTypeKw => {
-            ModPath::from_segments(PathKind::Plain, Some(Name::new_symbol_root(sym::Self_.clone())))
+            ModPath::from_segments(PathKind::Plain, Some(Name::new_symbol_root(sym::Self_)))
         }
         ast::PathSegmentKind::CrateKw => ModPath::from_segments(PathKind::Crate, iter::empty()),
         ast::PathSegmentKind::SelfKw => handle_super_kw(0)?,
@@ -277,8 +276,8 @@ fn convert_path(
     if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
         if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
             let syn_ctx = span_for_range(segment.syntax().text_range());
-            if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
-                if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
+            if let Some(macro_call_id) = syn_ctx.outer_expn(db) {
+                if db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner {
                     mod_path.kind = match resolve_crate_root(db, syn_ctx) {
                         Some(crate_root) => PathKind::DollarCrate(crate_root),
                         None => PathKind::Crate,
@@ -333,10 +332,10 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: tt::TokenTreesView<'_>) -> Optio
     Some(ModPath { kind, segments })
 }
 
-pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
+pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContext) -> Option<Crate> {
     // When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
     // we don't want to pretend that the `macro_rules!` definition is in the `macro`
-    // as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
+    // as described in `SyntaxContextId::apply_mark`, so we ignore prepended opaque marks.
     // FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
     // definitions actually produced by `macro` and `macro` definitions produced by
     // `macro_rules!`, but at least such configurations are not stable yet.
@@ -353,7 +352,7 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) ->
         result_mark = Some(mark);
     }
 
-    result_mark.map(|call| db.lookup_intern_macro_call(call).def.krate)
+    result_mark.map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
 }
 
 pub use crate::name as __name;
@@ -399,7 +398,7 @@ pub use crate::__path as path;
 macro_rules! __tool_path {
     ($start:ident $(:: $seg:ident)*) => ({
         $crate::mod_path::ModPath::from_segments($crate::mod_path::PathKind::Plain, vec![
-            $crate::name::Name::new_symbol_root($crate::intern::sym::rust_analyzer.clone()), $crate::name::Name::new_symbol_root($crate::intern::sym::$start.clone()), $($crate::name::Name::new_symbol_root($crate::intern::sym::$seg.clone()),)*
+            $crate::name::Name::new_symbol_root($crate::intern::sym::rust_analyzer), $crate::name::Name::new_symbol_root($crate::intern::sym::$start.clone()), $($crate::name::Name::new_symbol_root($crate::intern::sym::$seg.clone()),)*
         ])
     });
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
index 0758bd4515ef2..d43ef38f291d9 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -2,8 +2,8 @@
 
 use std::fmt;
 
-use intern::{sym, Symbol};
-use span::{Edition, SyntaxContextId};
+use intern::{Symbol, sym};
+use span::{Edition, SyntaxContext};
 use syntax::utils::is_raw_identifier;
 use syntax::{ast, format_smolstr};
 
@@ -74,7 +74,7 @@ impl Name {
         Name { symbol: Symbol::intern(text), ctx: () }
     }
 
-    pub fn new(text: &str, mut ctx: SyntaxContextId) -> Name {
+    pub fn new(text: &str, mut ctx: SyntaxContext) -> Name {
         // For comparisons etc. we remove the edition, because sometimes we search for some `Name`
         // and we don't know which edition it came from.
         // Can't do that for all `SyntaxContextId`s because it breaks Salsa.
@@ -88,41 +88,40 @@ impl Name {
 
     pub fn new_root(text: &str) -> Name {
         // The edition doesn't matter for hygiene.
-        Self::new(text, SyntaxContextId::root(Edition::Edition2015))
+        Self::new(text, SyntaxContext::root(Edition::Edition2015))
     }
 
     pub fn new_tuple_field(idx: usize) -> Name {
         let symbol = match idx {
-            0 => sym::INTEGER_0.clone(),
-            1 => sym::INTEGER_1.clone(),
-            2 => sym::INTEGER_2.clone(),
-            3 => sym::INTEGER_3.clone(),
-            4 => sym::INTEGER_4.clone(),
-            5 => sym::INTEGER_5.clone(),
-            6 => sym::INTEGER_6.clone(),
-            7 => sym::INTEGER_7.clone(),
-            8 => sym::INTEGER_8.clone(),
-            9 => sym::INTEGER_9.clone(),
-            10 => sym::INTEGER_10.clone(),
-            11 => sym::INTEGER_11.clone(),
-            12 => sym::INTEGER_12.clone(),
-            13 => sym::INTEGER_13.clone(),
-            14 => sym::INTEGER_14.clone(),
-            15 => sym::INTEGER_15.clone(),
+            0 => sym::INTEGER_0,
+            1 => sym::INTEGER_1,
+            2 => sym::INTEGER_2,
+            3 => sym::INTEGER_3,
+            4 => sym::INTEGER_4,
+            5 => sym::INTEGER_5,
+            6 => sym::INTEGER_6,
+            7 => sym::INTEGER_7,
+            8 => sym::INTEGER_8,
+            9 => sym::INTEGER_9,
+            10 => sym::INTEGER_10,
+            11 => sym::INTEGER_11,
+            12 => sym::INTEGER_12,
+            13 => sym::INTEGER_13,
+            14 => sym::INTEGER_14,
+            15 => sym::INTEGER_15,
             _ => Symbol::intern(&idx.to_string()),
         };
         Name { symbol, ctx: () }
     }
 
-    pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
-        let text = lt.text();
-        match text.strip_prefix("'r#") {
-            Some(text) => Self::new_text(&format_smolstr!("'{text}")),
-            None => Self::new_text(text.as_str()),
+    pub fn new_lifetime(lt: &str) -> Name {
+        match lt.strip_prefix("'r#") {
+            Some(lt) => Self::new_text(&format_smolstr!("'{lt}")),
+            None => Self::new_text(lt),
         }
     }
 
-    pub fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self {
+    pub fn new_symbol(symbol: Symbol, ctx: SyntaxContext) -> Self {
         debug_assert!(!symbol.as_str().starts_with("r#"));
         _ = ctx;
         Self { symbol, ctx: () }
@@ -130,7 +129,7 @@ impl Name {
 
     // FIXME: This needs to go once we have hygiene
     pub fn new_symbol_root(sym: Symbol) -> Self {
-        Self::new_symbol(sym, SyntaxContextId::root(Edition::Edition2015))
+        Self::new_symbol(sym, SyntaxContext::root(Edition::Edition2015))
     }
 
     /// A fake name for things missing in the source code.
@@ -143,7 +142,7 @@ impl Name {
     /// name is equal only to itself. It's not clear how to implement this in
     /// salsa though, so we punt on that bit for a moment.
     pub const fn missing() -> Name {
-        Name { symbol: sym::consts::MISSING_NAME, ctx: () }
+        Name { symbol: sym::MISSING_NAME, ctx: () }
     }
 
     /// Returns true if this is a fake name for things missing in the source code. See
@@ -260,7 +259,7 @@ impl AsName for ast::FieldKind {
     }
 }
 
-impl AsName for base_db::Dependency {
+impl AsName for base_db::BuiltDependency {
     fn as_name(&self) -> Name {
         Name::new_symbol_root((*self.name).clone())
     }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs b/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs
index c744fbce77b7c..11cc434c2d826 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -1,9 +1,9 @@
 //! Pretty printing of macros output.
 
-use base_db::CrateId;
+use base_db::Crate;
 use rustc_hash::FxHashMap;
 use syntax::NodeOrToken;
-use syntax::{ast::make, SyntaxNode};
+use syntax::{SyntaxNode, ast::make};
 
 use crate::{db::ExpandDatabase, span_map::ExpansionSpanMap};
 
@@ -13,22 +13,20 @@ pub fn prettify_macro_expansion(
     db: &dyn ExpandDatabase,
     syn: SyntaxNode,
     span_map: &ExpansionSpanMap,
-    target_crate_id: CrateId,
+    target_crate_id: Crate,
 ) -> SyntaxNode {
     // Because `syntax_bridge::prettify_macro_expansion::prettify_macro_expansion()` clones subtree for `syn`,
     // that means it will be offsetted to the beginning.
     let span_offset = syn.text_range().start();
-    let crate_graph = db.crate_graph();
-    let target_crate = &crate_graph[target_crate_id];
+    let target_crate = target_crate_id.data(db);
     let mut syntax_ctx_id_to_dollar_crate_replacement = FxHashMap::default();
     syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(syn, &mut |dollar_crate| {
         let ctx = span_map.span_at(dollar_crate.text_range().start() + span_offset).ctx;
         let replacement =
             syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
-                let ctx_data = db.lookup_intern_syntax_context(ctx);
                 let macro_call_id =
-                    ctx_data.outer_expn.expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
-                let macro_call = db.lookup_intern_macro_call(macro_call_id);
+                    ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
+                let macro_call = db.lookup_intern_macro_call(macro_call_id.into());
                 let macro_def_crate = macro_call.def.krate;
                 // First, if this is the same crate as the macro, nothing will work but `crate`.
                 // If not, if the target trait has the macro's crate as a dependency, using the dependency name
@@ -42,7 +40,7 @@ pub fn prettify_macro_expansion(
                     target_crate.dependencies.iter().find(|dep| dep.crate_id == macro_def_crate)
                 {
                     make::tokens::ident(dep.name.as_str())
-                } else if let Some(crate_name) = &crate_graph[macro_def_crate].display_name {
+                } else if let Some(crate_name) = &macro_def_crate.extra_data(db).display_name {
                     make::tokens::ident(crate_name.crate_name().as_str())
                 } else {
                     return dollar_crate.clone();
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
index 3dc3dcd760cd3..8a1a33d7e3b42 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
@@ -1,24 +1,36 @@
 //! Proc Macro Expander stuff
 
 use core::fmt;
+use std::any::Any;
 use std::{panic::RefUnwindSafe, sync};
 
-use base_db::{CrateId, Env};
+use base_db::{Crate, CrateBuilderId, CratesIdMap, Env};
 use intern::Symbol;
 use rustc_hash::FxHashMap;
 use span::Span;
+use triomphe::Arc;
 
-use crate::{db::ExpandDatabase, tt, ExpandError, ExpandErrorKind, ExpandResult};
+use crate::{ExpandError, ExpandErrorKind, ExpandResult, db::ExpandDatabase, tt};
 
-#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
+#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug, Hash)]
 pub enum ProcMacroKind {
     CustomDerive,
     Bang,
     Attr,
 }
 
+pub trait AsAny: Any {
+    fn as_any(&self) -> &dyn Any;
+}
+
+impl<T: Any> AsAny for T {
+    fn as_any(&self) -> &dyn Any {
+        self
+    }
+}
+
 /// A proc-macro expander implementation.
-pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
+pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe + AsAny {
     /// Run the expander with the given input subtree, optional attribute input subtree (for
     /// [`ProcMacroKind::Attr`]), environment variables, and span information.
     fn expand(
@@ -29,10 +41,20 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
         def_site: Span,
         call_site: Span,
         mixed_site: Span,
-        current_dir: Option<String>,
+        current_dir: String,
     ) -> Result<tt::TopSubtree, ProcMacroExpansionError>;
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool;
 }
 
+impl PartialEq for dyn ProcMacroExpander {
+    fn eq(&self, other: &Self) -> bool {
+        self.eq_dyn(other)
+    }
+}
+
+impl Eq for dyn ProcMacroExpander {}
+
 #[derive(Debug)]
 pub enum ProcMacroExpansionError {
     /// The proc-macro panicked.
@@ -45,41 +67,70 @@ pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, (String, bool)>;
 type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, (Box<str>, bool)>;
 
 #[derive(Default, Debug)]
-pub struct ProcMacrosBuilder(FxHashMap<CrateId, StoredProcMacroLoadResult>);
+pub struct ProcMacrosBuilder(FxHashMap<CrateBuilderId, Arc<CrateProcMacros>>);
+
 impl ProcMacrosBuilder {
-    pub fn insert(&mut self, proc_macros_crate: CrateId, proc_macro: ProcMacroLoadResult) {
+    pub fn insert(
+        &mut self,
+        proc_macros_crate: CrateBuilderId,
+        mut proc_macro: ProcMacroLoadResult,
+    ) {
+        if let Ok(proc_macros) = &mut proc_macro {
+            // Sort proc macros to improve incrementality when only their order has changed (ideally the build system
+            // will not change their order, but just to be sure).
+            proc_macros.sort_unstable_by(|proc_macro, proc_macro2| {
+                (proc_macro.name.as_str(), proc_macro.kind)
+                    .cmp(&(proc_macro2.name.as_str(), proc_macro2.kind))
+            });
+        }
         self.0.insert(
             proc_macros_crate,
             match proc_macro {
-                Ok(it) => Ok(it.into_boxed_slice()),
-                Err((e, hard_err)) => Err((e.into_boxed_str(), hard_err)),
+                Ok(it) => Arc::new(CrateProcMacros(Ok(it.into_boxed_slice()))),
+                Err((e, hard_err)) => {
+                    Arc::new(CrateProcMacros(Err((e.into_boxed_str(), hard_err))))
+                }
             },
         );
     }
-    pub fn build(mut self) -> ProcMacros {
-        self.0.shrink_to_fit();
-        ProcMacros(self.0)
+
+    pub(crate) fn build(self, crates_id_map: &CratesIdMap) -> ProcMacros {
+        let mut map = self
+            .0
+            .into_iter()
+            .map(|(krate, proc_macro)| (crates_id_map[&krate], proc_macro))
+            .collect::<FxHashMap<_, _>>();
+        map.shrink_to_fit();
+        ProcMacros(map)
     }
 }
 
-#[derive(Default, Debug)]
-pub struct ProcMacros(FxHashMap<CrateId, StoredProcMacroLoadResult>);
-
-impl FromIterator<(CrateId, ProcMacroLoadResult)> for ProcMacros {
-    fn from_iter<T: IntoIterator<Item = (CrateId, ProcMacroLoadResult)>>(iter: T) -> Self {
+impl FromIterator<(CrateBuilderId, ProcMacroLoadResult)> for ProcMacrosBuilder {
+    fn from_iter<T: IntoIterator<Item = (CrateBuilderId, ProcMacroLoadResult)>>(iter: T) -> Self {
         let mut builder = ProcMacrosBuilder::default();
         for (k, v) in iter {
             builder.insert(k, v);
         }
-        builder.build()
+        builder
     }
 }
 
+#[derive(Debug, PartialEq, Eq)]
+pub struct CrateProcMacros(StoredProcMacroLoadResult);
+
+#[derive(Default, Debug)]
+pub struct ProcMacros(FxHashMap<Crate, Arc<CrateProcMacros>>);
 impl ProcMacros {
-    fn get(&self, krate: CrateId, idx: u32, err_span: Span) -> Result<&ProcMacro, ExpandError> {
-        let proc_macros = match self.0.get(&krate) {
-            Some(Ok(proc_macros)) => proc_macros,
-            Some(Err(_)) | None => {
+    fn get(&self, krate: Crate) -> Option<Arc<CrateProcMacros>> {
+        self.0.get(&krate).cloned()
+    }
+}
+
+impl CrateProcMacros {
+    fn get(&self, idx: u32, err_span: Span) -> Result<&ProcMacro, ExpandError> {
+        let proc_macros = match &self.0 {
+            Ok(proc_macros) => proc_macros,
+            Err(_) => {
                 return Err(ExpandError::other(
                     err_span,
                     "internal error: no proc macros for crate",
@@ -98,18 +149,17 @@ impl ProcMacros {
         )
     }
 
-    pub fn get_error_for_crate(&self, krate: CrateId) -> Option<(&str, bool)> {
-        self.0.get(&krate).and_then(|it| it.as_ref().err()).map(|(e, hard_err)| (&**e, *hard_err))
+    pub fn get_error(&self) -> Option<(&str, bool)> {
+        self.0.as_ref().err().map(|(e, hard_err)| (&**e, *hard_err))
     }
 
     /// Fetch the [`CustomProcMacroExpander`]s and their corresponding names for the given crate.
-    pub fn for_crate(
+    pub fn list(
         &self,
-        krate: CrateId,
-        def_site_ctx: span::SyntaxContextId,
+        def_site_ctx: span::SyntaxContext,
     ) -> Option<Box<[(crate::name::Name, CustomProcMacroExpander, bool)]>> {
-        match self.0.get(&krate) {
-            Some(Ok(proc_macros)) => Some({
+        match &self.0 {
+            Ok(proc_macros) => Some(
                 proc_macros
                     .iter()
                     .enumerate()
@@ -117,15 +167,15 @@ impl ProcMacros {
                         let name = crate::name::Name::new_symbol(it.name.clone(), def_site_ctx);
                         (name, CustomProcMacroExpander::new(idx as u32), it.disabled)
                     })
-                    .collect()
-            }),
+                    .collect(),
+            ),
             _ => None,
         }
     }
 }
 
 /// A loaded proc-macro.
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, Eq)]
 pub struct ProcMacro {
     /// The name of the proc macro.
     pub name: Symbol,
@@ -137,6 +187,23 @@ pub struct ProcMacro {
     pub disabled: bool,
 }
 
+// `#[derive(PartialEq)]` generates a strange "cannot move" error.
+impl PartialEq for ProcMacro {
+    fn eq(&self, other: &Self) -> bool {
+        let Self { name, kind, expander, disabled } = self;
+        let Self {
+            name: other_name,
+            kind: other_kind,
+            expander: other_expander,
+            disabled: other_disabled,
+        } = other;
+        name == other_name
+            && kind == other_kind
+            && expander == other_expander
+            && disabled == other_disabled
+    }
+}
+
 /// A custom proc-macro expander handle. This handle together with its crate resolves to a [`ProcMacro`]
 #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
 pub struct CustomProcMacroExpander {
@@ -186,8 +253,7 @@ impl CustomProcMacroExpander {
         self.proc_macro_id == Self::PROC_MACRO_ATTR_DISABLED
     }
 
-    /// The macro is explicitly disabled due to proc-macro attribute expansion being disabled.
-    pub fn as_expand_error(&self, def_crate: CrateId) -> Option<ExpandErrorKind> {
+    pub fn as_expand_error(&self, def_crate: Crate) -> Option<ExpandErrorKind> {
         match self.proc_macro_id {
             Self::PROC_MACRO_ATTR_DISABLED => Some(ExpandErrorKind::ProcMacroAttrExpansionDisabled),
             Self::DISABLED_ID => Some(ExpandErrorKind::MacroDisabled),
@@ -199,8 +265,8 @@ impl CustomProcMacroExpander {
     pub fn expand(
         self,
         db: &dyn ExpandDatabase,
-        def_crate: CrateId,
-        calling_crate: CrateId,
+        def_crate: Crate,
+        calling_crate: Crate,
         tt: &tt::TopSubtree,
         attr_arg: Option<&tt::TopSubtree>,
         def_site: Span,
@@ -221,8 +287,22 @@ impl CustomProcMacroExpander {
                 ExpandError::new(call_site, ExpandErrorKind::MacroDisabled),
             ),
             id => {
-                let proc_macros = db.proc_macros();
-                let proc_macro = match proc_macros.get(def_crate, id, call_site) {
+                let proc_macros = match db.proc_macros_for_crate(def_crate) {
+                    Some(it) => it,
+                    None => {
+                        return ExpandResult::new(
+                            tt::TopSubtree::empty(tt::DelimSpan {
+                                open: call_site,
+                                close: call_site,
+                            }),
+                            ExpandError::other(
+                                call_site,
+                                "internal error: no proc macros for crate",
+                            ),
+                        );
+                    }
+                };
+                let proc_macro = match proc_macros.get(id, call_site) {
                     Ok(proc_macro) => proc_macro,
                     Err(e) => {
                         return ExpandResult::new(
@@ -231,15 +311,14 @@ impl CustomProcMacroExpander {
                                 close: call_site,
                             }),
                             e,
-                        )
+                        );
                     }
                 };
 
-                let krate_graph = db.crate_graph();
                 // Proc macros have access to the environment variables of the invoking crate.
-                let env = &krate_graph[calling_crate].env;
-                let current_dir =
-                    krate_graph[calling_crate].proc_macro_cwd.as_deref().map(ToString::to_string);
+                let env = calling_crate.env(db);
+                // FIXME: Can we avoid the string allocation here?
+                let current_dir = calling_crate.data(db).proc_macro_cwd.to_string();
 
                 match proc_macro.expander.expand(
                     tt,
@@ -278,3 +357,10 @@ impl CustomProcMacroExpander {
         }
     }
 }
+
+pub(crate) fn proc_macros_for_crate(
+    db: &dyn ExpandDatabase,
+    krate: Crate,
+) -> Option<Arc<CrateProcMacros>> {
+    db.proc_macros().get(krate)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
index 740c27b89cea1..e5a778a95c7c9 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
@@ -1,15 +1,15 @@
 //! Span maps for real files and macro expansions.
 
-use span::{EditionedFileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId};
+use span::{Span, SyntaxContext};
 use stdx::TupleExt;
-use syntax::{ast, AstNode, TextRange};
+use syntax::{AstNode, TextRange, ast};
 use triomphe::Arc;
 
 pub use span::RealSpanMap;
 
-use crate::{attrs::collect_attrs, db::ExpandDatabase};
+use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
 
-pub type ExpansionSpanMap = span::SpanMap<SyntaxContextId>;
+pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
 
 /// Spanmap for a macro file or a real file
 #[derive(Clone, Debug, PartialEq, Eq)]
@@ -61,9 +61,9 @@ impl SpanMap {
 
     #[inline]
     pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
-        match file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
-            HirFileIdRepr::MacroFile(m) => {
+        match file_id {
+            HirFileId::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
+            HirFileId::MacroFile(m) => {
                 SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
             }
         }
@@ -79,11 +79,15 @@ impl SpanMapRef<'_> {
     }
 }
 
-pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -> Arc<RealSpanMap> {
+pub(crate) fn real_span_map(
+    db: &dyn ExpandDatabase,
+    editioned_file_id: base_db::EditionedFileId,
+) -> Arc<RealSpanMap> {
     use syntax::ast::HasModuleItem;
     let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
-    let ast_id_map = db.ast_id_map(file_id.into());
-    let tree = db.parse(file_id).tree();
+    let ast_id_map = db.ast_id_map(editioned_file_id.into());
+
+    let tree = db.parse(editioned_file_id).tree();
     // This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
     // would mean we'd invalidate everything whenever we type. So instead we make the text ranges
     // relative to some AstIds reducing the risk of invalidation as typing somewhere no longer
@@ -134,7 +138,7 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -
     });
 
     Arc::new(RealSpanMap::from_file(
-        file_id,
+        editioned_file_id.editioned_file_id(db),
         pairs.into_boxed_slice(),
         tree.syntax().text_range().end(),
     ))
@@ -142,7 +146,7 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -
 
 pub(crate) fn expansion_span_map(
     db: &dyn ExpandDatabase,
-    file_id: MacroFileId,
+    file_id: MacroCallId,
 ) -> Arc<ExpansionSpanMap> {
     db.parse_macro_expansion(file_id).value.1
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index 1d12bee646c4f..69ad7703c2caf 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -12,27 +12,28 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
 itertools.workspace = true
 arrayvec.workspace = true
 bitflags.workspace = true
 smallvec.workspace = true
-ena = "0.14.0"
+ena = "0.14.3"
 either.workspace = true
-oorandom = "11.1.3"
+oorandom = "11.1.5"
 tracing.workspace = true
 rustc-hash.workspace = true
-scoped-tls = "1.0.0"
+scoped-tls = "1.0.1"
 chalk-solve.workspace = true
 chalk-ir.workspace = true
 chalk-recursive.workspace = true
 chalk-derive.workspace = true
 la-arena.workspace = true
 triomphe.workspace = true
-nohash-hasher.workspace = true
-typed-arena = "2.0.1"
+typed-arena = "2.0.2"
 indexmap.workspace = true
-rustc_apfloat = "0.2.0"
+rustc_apfloat = "0.2.2"
+query-group.workspace = true
+salsa.workspace = true
 
 ra-ap-rustc_abi.workspace = true
 ra-ap-rustc_index.workspace = true
@@ -49,7 +50,7 @@ syntax.workspace = true
 span.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 tracing.workspace = true
 tracing-subscriber.workspace = true
 tracing-tree.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
index 171ba001c4a79..711544545681d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
@@ -12,8 +12,8 @@ use intern::sym;
 use triomphe::Arc;
 
 use crate::{
-    db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt,
-    TraitEnvironment, Ty, TyBuilder, TyKind,
+    Canonical, Goal, Interner, ProjectionTyExt, TraitEnvironment, Ty, TyBuilder, TyKind,
+    db::HirDatabase, infer::unify::InferenceTable,
 };
 
 const AUTODEREF_RECURSION_LIMIT: usize = 20;
@@ -209,9 +209,8 @@ pub(crate) fn deref_by_trait(
         db.lang_item(table.trait_env.krate, LangItem::Deref).and_then(|l| l.as_trait())
     };
     let trait_id = trait_id()?;
-    let target = db
-        .trait_data(trait_id)
-        .associated_type_by_name(&Name::new_symbol_root(sym::Target.clone()))?;
+    let target =
+        db.trait_items(trait_id).associated_type_by_name(&Name::new_symbol_root(sym::Target))?;
 
     let projection = {
         let b = TyBuilder::subst_for_def(db, trait_id, None);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
index 76d9c60f6f903..77d15a73af6ff 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
@@ -3,21 +3,21 @@
 use std::iter;
 
 use chalk_ir::{
+    AdtId, DebruijnIndex, Scalar,
     cast::{Cast, CastTo, Caster},
     fold::TypeFoldable,
     interner::HasInterner,
-    AdtId, DebruijnIndex, Scalar,
 };
 use hir_def::{
-    builtin_type::BuiltinType, DefWithBodyId, GenericDefId, GenericParamId, TraitId, TypeAliasId,
+    DefWithBodyId, GenericDefId, GenericParamId, TraitId, TypeAliasId, builtin_type::BuiltinType,
 };
 use smallvec::SmallVec;
 
 use crate::{
-    consteval::unknown_const_as_generic, db::HirDatabase, error_lifetime, generics::generics,
-    infer::unify::InferenceTable, primitive, to_assoc_type_id, to_chalk_trait_id, Binders,
-    BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution,
-    TraitRef, Ty, TyDefId, TyExt, TyKind,
+    Binders, BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy,
+    Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind, consteval::unknown_const_as_generic,
+    db::HirDatabase, error_lifetime, generics::generics, infer::unify::InferenceTable, primitive,
+    to_assoc_type_id, to_chalk_trait_id,
 };
 
 #[derive(Debug, Clone, PartialEq, Eq)]
@@ -76,7 +76,7 @@ impl<D> TyBuilder<D> {
         }
         let subst = Substitution::from_iter(
             Interner,
-            self.vec.into_iter().chain(self.parent_subst.iter(Interner).cloned()),
+            self.parent_subst.iter(Interner).cloned().chain(self.vec),
         );
         (self.data, subst)
     }
@@ -209,12 +209,12 @@ impl TyBuilder<()> {
     }
 
     pub fn placeholder_subst(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substitution {
-        let params = generics(db.upcast(), def.into());
+        let params = generics(db, def.into());
         params.placeholder_subst(db)
     }
 
     pub fn unknown_subst(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substitution {
-        let params = generics(db.upcast(), def.into());
+        let params = generics(db, def.into());
         Substitution::from_iter(
             Interner,
             params.iter_id().map(|id| match id {
@@ -233,7 +233,7 @@ impl TyBuilder<()> {
         def: impl Into<GenericDefId>,
         parent_subst: Option<Substitution>,
     ) -> TyBuilder<()> {
-        let generics = generics(db.upcast(), def.into());
+        let generics = generics(db, def.into());
         assert!(generics.parent_generics().is_some() == parent_subst.is_some());
         let params = generics
             .iter_self()
@@ -259,11 +259,10 @@ impl TyBuilder<()> {
     /// This method prepopulates the builder with placeholder substitution of `parent`, so you
     /// should only push exactly 3 `GenericArg`s before building.
     pub fn subst_for_coroutine(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> {
-        let parent_subst = parent
-            .as_generic_def_id(db.upcast())
-            .map(|p| generics(db.upcast(), p).placeholder_subst(db));
+        let parent_subst =
+            parent.as_generic_def_id(db).map(|p| generics(db, p).placeholder_subst(db));
         // These represent resume type, yield type, and return type of coroutine.
-        let params = std::iter::repeat(ParamKind::Type).take(3).collect();
+        let params = std::iter::repeat_n(ParamKind::Type, 3).collect();
         TyBuilder::new((), params, parent_subst)
     }
 
@@ -274,13 +273,15 @@ impl TyBuilder<()> {
     ) -> Substitution {
         let sig_ty = sig_ty.cast(Interner);
         let self_subst = iter::once(&sig_ty);
-        let Some(parent) = parent.as_generic_def_id(db.upcast()) else {
+        let Some(parent) = parent.as_generic_def_id(db) else {
             return Substitution::from_iter(Interner, self_subst);
         };
         Substitution::from_iter(
             Interner,
-            self_subst
-                .chain(generics(db.upcast(), parent).placeholder_subst(db).iter(Interner))
+            generics(db, parent)
+                .placeholder_subst(db)
+                .iter(Interner)
+                .chain(self_subst)
                 .cloned()
                 .collect::<Vec<_>>(),
         )
@@ -305,29 +306,28 @@ impl TyBuilder<hir_def::AdtId> {
         // Note that we're building ADT, so we never have parent generic parameters.
         let defaults = db.generic_defaults(self.data.into());
 
-        for default_ty in &defaults[self.vec.len()..] {
-            // NOTE(skip_binders): we only check if the arg type is error type.
-            if let Some(x) = default_ty.skip_binders().ty(Interner) {
-                if x.is_unknown() {
-                    self.vec.push(fallback().cast(Interner));
-                    continue;
+        if let Some(defaults) = defaults.get(self.vec.len()..) {
+            for default_ty in defaults {
+                // NOTE(skip_binders): we only check if the arg type is error type.
+                if let Some(x) = default_ty.skip_binders().ty(Interner) {
+                    if x.is_unknown() {
+                        self.vec.push(fallback().cast(Interner));
+                        continue;
+                    }
                 }
+                // Each default can only depend on the previous parameters.
+                self.vec.push(default_ty.clone().substitute(Interner, &*self.vec).cast(Interner));
             }
-            // Each default can only depend on the previous parameters.
-            let subst_so_far = Substitution::from_iter(
-                Interner,
-                self.vec
-                    .iter()
-                    .cloned()
-                    .chain(self.param_kinds[self.vec.len()..].iter().map(|it| match it {
-                        ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner),
-                        ParamKind::Lifetime => error_lifetime().cast(Interner),
-                        ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
-                    }))
-                    .take(self.param_kinds.len()),
-            );
-            self.vec.push(default_ty.clone().substitute(Interner, &subst_so_far).cast(Interner));
         }
+
+        // The defaults may be missing if no param has default, so fill that.
+        let filler = self.param_kinds[self.vec.len()..].iter().map(|x| match x {
+            ParamKind::Type => fallback().cast(Interner),
+            ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+            ParamKind::Lifetime => error_lifetime().cast(Interner),
+        });
+        self.vec.extend(filler.casted(Interner));
+
         self
     }
 
@@ -340,7 +340,7 @@ impl TyBuilder<hir_def::AdtId> {
 pub struct Tuple(usize);
 impl TyBuilder<Tuple> {
     pub fn tuple(size: usize) -> TyBuilder<Tuple> {
-        TyBuilder::new(Tuple(size), iter::repeat(ParamKind::Type).take(size).collect(), None)
+        TyBuilder::new(Tuple(size), std::iter::repeat_n(ParamKind::Type, size).collect(), None)
     }
 
     pub fn build(self) -> Ty {
@@ -356,7 +356,7 @@ impl TyBuilder<Tuple> {
         let elements = elements.into_iter();
         let len = elements.len();
         let mut b =
-            TyBuilder::new(Tuple(len), iter::repeat(ParamKind::Type).take(len).collect(), None);
+            TyBuilder::new(Tuple(len), std::iter::repeat_n(ParamKind::Type, len).collect(), None);
         for e in elements {
             b = b.push(e);
         }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
index 65fb342f75258..2aa9401eefa9e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -8,31 +8,33 @@ use intern::sym;
 use span::Edition;
 use tracing::debug;
 
-use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds};
+use chalk_ir::{CanonicalVarKinds, cast::Caster, fold::shift::Shift};
 use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
 
-use base_db::CrateId;
+use base_db::Crate;
 use hir_def::{
-    data::{adt::StructFlags, TraitFlags},
-    hir::Movability,
-    lang_item::{LangItem, LangItemTarget},
     AssocItemId, BlockId, CallableDefId, GenericDefId, HasModule, ItemContainerId, Lookup,
     TypeAliasId, VariantId,
+    hir::Movability,
+    lang_item::{LangItem, LangItemTarget},
+    signatures::{ImplFlags, StructFlags, TraitFlags},
 };
 
 use crate::{
+    AliasEq, AliasTy, BoundVar, DebruijnIndex, Interner, ProjectionTy, ProjectionTyExt,
+    QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+    WhereClause,
     db::{HirDatabase, InternedCoroutine},
     from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
     generics::generics,
+    lower::LifetimeElisionKind,
     make_binders, make_single_type_binders,
-    mapping::{from_chalk, ToChalk, TypeAliasAsValue},
-    method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
+    mapping::{ToChalk, TypeAliasAsValue, from_chalk},
+    method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TraitImpls, TyFingerprint},
     to_assoc_type_id, to_chalk_trait_id,
     traits::ChalkContext,
     utils::ClosureSubst,
-    wrap_empty_binders, AliasEq, AliasTy, BoundVar, DebruijnIndex, FnDefId, Interner, ProjectionTy,
-    ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder,
-    TyExt, TyKind, WhereClause,
+    wrap_empty_binders,
 };
 
 pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
@@ -52,7 +54,23 @@ pub(crate) type Variances = chalk_ir::Variances<Interner>;
 
 impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
     fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
-        self.db.associated_ty_data(id)
+        self.db.associated_ty_data(from_assoc_type_id(id))
+    }
+    fn associated_ty_from_impl(
+        &self,
+        impl_id: chalk_ir::ImplId<Interner>,
+        assoc_type_id: chalk_ir::AssocTypeId<Interner>,
+    ) -> Option<rust_ir::AssociatedTyValueId<Interner>> {
+        let alias_id = from_assoc_type_id(assoc_type_id);
+        let trait_sig = self.db.type_alias_signature(alias_id);
+        self.db.impl_items(hir_def::ImplId::from_chalk(self.db, impl_id)).items.iter().find_map(
+            |(name, item)| match item {
+                AssocItemId::TypeAliasId(alias) if &trait_sig.name == name => {
+                    Some(TypeAliasAsValue(*alias).to_chalk(self.db))
+                }
+                _ => None,
+            },
+        )
     }
     fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
         self.db.trait_datum(self.krate, trait_id)
@@ -67,7 +85,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
     fn discriminant_type(&self, ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
         if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) {
             if let hir_def::AdtId::EnumId(e) = id.0 {
-                let enum_data = self.db.enum_data(e);
+                let enum_data = self.db.enum_signature(e);
                 let ty = enum_data.repr.unwrap_or_default().discr_type();
                 return chalk_ir::TyKind::Scalar(match ty {
                     hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed {
@@ -104,7 +122,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
         &self,
         fn_def_id: chalk_ir::FnDefId<Interner>,
     ) -> Arc<rust_ir::FnDefDatum<Interner>> {
-        self.db.fn_def_datum(fn_def_id)
+        self.db.fn_def_datum(from_chalk(self.db, fn_def_id))
     }
 
     fn impls_for_trait(
@@ -137,7 +155,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
         let fps: &[TyFingerprint] = match binder_kind(&ty, binders) {
             Some(chalk_ir::TyVariableKind::Integer) => &ALL_INT_FPS,
             Some(chalk_ir::TyVariableKind::Float) => &ALL_FLOAT_FPS,
-            _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]),
+            _ => self_ty_fp.as_slice(),
         };
 
         let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
@@ -145,19 +163,18 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
         let mut result = vec![];
         if fps.is_empty() {
             debug!("Unrestricted search for {:?} impls...", trait_);
-            let _ = self.for_trait_impls(trait_, self_ty_fp, |impls| {
+            _ = self.for_trait_impls(trait_, self_ty_fp, |impls| {
                 result.extend(impls.for_trait(trait_).map(id_to_chalk));
                 ControlFlow::Continue(())
             });
         } else {
-            let _ = self.for_trait_impls(trait_, self_ty_fp, |impls| {
-                result.extend(
-                    fps.iter().flat_map(move |fp| {
+            _ =
+                self.for_trait_impls(trait_, self_ty_fp, |impls| {
+                    result.extend(fps.iter().flat_map(move |fp| {
                         impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
-                    }),
-                );
-                ControlFlow::Continue(())
-            });
+                    }));
+                    ControlFlow::Continue(())
+                });
         };
 
         debug!("impls_for_trait returned {} impls", result.len());
@@ -289,16 +306,17 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
                 chalk_ir::Binders::new(binders, bound)
             }
             crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => {
-                if let Some((future_trait, future_output)) =
-                    self.db
-                        .lang_item(self.krate, LangItem::Future)
-                        .and_then(|item| item.as_trait())
-                        .and_then(|trait_| {
-                            let alias = self.db.trait_data(trait_).associated_type_by_name(
-                                &Name::new_symbol_root(sym::Output.clone()),
-                            )?;
-                            Some((trait_, alias))
-                        })
+                if let Some((future_trait, future_output)) = self
+                    .db
+                    .lang_item(self.krate, LangItem::Future)
+                    .and_then(|item| item.as_trait())
+                    .and_then(|trait_| {
+                        let alias = self
+                            .db
+                            .trait_items(trait_)
+                            .associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
+                        Some((trait_, alias))
+                    })
                 {
                     // Making up Symbol’s value as variable is void: AsyncBlock<T>:
                     //
@@ -426,19 +444,19 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
 
     fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String {
         let id = from_chalk_trait_id(trait_id);
-        self.db.trait_data(id).name.display(self.db.upcast(), self.edition()).to_string()
+        self.db.trait_signature(id).name.display(self.db, self.edition()).to_string()
     }
     fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
         let edition = self.edition();
         match adt_id {
             hir_def::AdtId::StructId(id) => {
-                self.db.struct_data(id).name.display(self.db.upcast(), edition).to_string()
+                self.db.struct_signature(id).name.display(self.db, edition).to_string()
             }
             hir_def::AdtId::EnumId(id) => {
-                self.db.enum_data(id).name.display(self.db.upcast(), edition).to_string()
+                self.db.enum_signature(id).name.display(self.db, edition).to_string()
             }
             hir_def::AdtId::UnionId(id) => {
-                self.db.union_data(id).name.display(self.db.upcast(), edition).to_string()
+                self.db.union_signature(id).name.display(self.db, edition).to_string()
             }
         }
     }
@@ -447,14 +465,14 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
         Arc::new(rust_ir::AdtSizeAlign::from_one_zst(false))
     }
     fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
-        let id = self.db.associated_ty_data(assoc_ty_id).name;
-        self.db.type_alias_data(id).name.display(self.db.upcast(), self.edition()).to_string()
+        let id = self.db.associated_ty_data(from_assoc_type_id(assoc_ty_id)).name;
+        self.db.type_alias_signature(id).name.display(self.db, self.edition()).to_string()
     }
     fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
-        format!("Opaque_{}", opaque_ty_id.0)
+        format!("Opaque_{:?}", opaque_ty_id.0)
     }
     fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId<Interner>) -> String {
-        format!("fn_{}", fn_def_id.0)
+        format!("fn_{:?}", fn_def_id.0)
     }
     fn coroutine_datum(
         &self,
@@ -467,12 +485,13 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
         // `resume_type`, `yield_type`, and `return_type` of the coroutine in question.
         let subst = TyBuilder::subst_for_coroutine(self.db, parent).fill_with_unknown().build();
 
+        let len = subst.len(Interner);
         let input_output = rust_ir::CoroutineInputOutputDatum {
-            resume_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+            resume_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, len - 3))
                 .intern(Interner),
-            yield_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 1))
+            yield_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, len - 2))
                 .intern(Interner),
-            return_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 2))
+            return_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, len - 1))
                 .intern(Interner),
             // FIXME: calculate upvars
             upvars: vec![],
@@ -523,7 +542,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
 
 impl ChalkContext<'_> {
     fn edition(&self) -> Edition {
-        self.db.crate_graph()[self.krate].edition
+        self.krate.data(self.db).edition
     }
 
     fn for_trait_impls(
@@ -537,13 +556,13 @@ impl ChalkContext<'_> {
         // `impl_datum` relies on that and will panic if the trait can't be resolved.
         let in_deps = self.db.trait_impls_in_deps(self.krate);
         let in_self = self.db.trait_impls_in_crate(self.krate);
-        let trait_module = trait_id.module(self.db.upcast());
+        let trait_module = trait_id.module(self.db);
         let type_module = match self_ty_fp {
-            Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())),
+            Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db)),
             Some(TyFingerprint::ForeignType(type_id)) => {
-                Some(from_foreign_def_id(type_id).module(self.db.upcast()))
+                Some(from_foreign_def_id(type_id).module(self.db))
             }
-            Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())),
+            Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db)),
             _ => None,
         };
 
@@ -552,7 +571,7 @@ impl ChalkContext<'_> {
 
         let block_impls = iter::successors(self.block, |&block_id| {
             cov_mark::hit!(block_local_impls);
-            self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block())
+            block_id.loc(self.db).module.containing_block()
         })
         .inspect(|&block_id| {
             // make sure we don't search the same block twice
@@ -583,17 +602,17 @@ impl chalk_ir::UnificationDatabase<Interner> for &dyn HirDatabase {
         &self,
         fn_def_id: chalk_ir::FnDefId<Interner>,
     ) -> chalk_ir::Variances<Interner> {
-        HirDatabase::fn_def_variance(*self, fn_def_id)
+        HirDatabase::fn_def_variance(*self, from_chalk(*self, fn_def_id))
     }
 
     fn adt_variance(&self, adt_id: chalk_ir::AdtId<Interner>) -> chalk_ir::Variances<Interner> {
-        HirDatabase::adt_variance(*self, adt_id)
+        HirDatabase::adt_variance(*self, adt_id.0)
     }
 }
 
 pub(crate) fn program_clauses_for_chalk_env_query(
     db: &dyn HirDatabase,
-    krate: CrateId,
+    krate: Crate,
     block: Option<BlockId>,
     environment: chalk_ir::Environment<Interner>,
 ) -> chalk_ir::ProgramClauses<Interner> {
@@ -602,28 +621,32 @@ pub(crate) fn program_clauses_for_chalk_env_query(
 
 pub(crate) fn associated_ty_data_query(
     db: &dyn HirDatabase,
-    id: AssocTypeId,
+    type_alias: TypeAliasId,
 ) -> Arc<AssociatedTyDatum> {
-    debug!("associated_ty_data {:?}", id);
-    let type_alias: TypeAliasId = from_assoc_type_id(id);
-    let trait_ = match type_alias.lookup(db.upcast()).container {
+    debug!("associated_ty_data {:?}", type_alias);
+    let trait_ = match type_alias.lookup(db).container {
         ItemContainerId::TraitId(t) => t,
         _ => panic!("associated type not in trait"),
     };
 
     // Lower bounds -- we could/should maybe move this to a separate query in `lower`
-    let type_alias_data = db.type_alias_data(type_alias);
-    let generic_params = generics(db.upcast(), type_alias.into());
-    let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
-    let mut ctx =
-        crate::TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, type_alias.into())
-            .with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
+    let type_alias_data = db.type_alias_signature(type_alias);
+    let generic_params = generics(db, type_alias.into());
+    let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
+    let mut ctx = crate::TyLoweringContext::new(
+        db,
+        &resolver,
+        &type_alias_data.store,
+        type_alias.into(),
+        LifetimeElisionKind::AnonymousReportError,
+    )
+    .with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
 
     let trait_subst = TyBuilder::subst_for_def(db, trait_, None)
-        .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, generic_params.len_self())
+        .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0)
         .build();
     let pro_ty = TyBuilder::assoc_type_projection(db, type_alias, Some(trait_subst))
-        .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0)
+        .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, generic_params.len_self())
         .build();
     let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner);
 
@@ -656,7 +679,7 @@ pub(crate) fn associated_ty_data_query(
     let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses: vec![] };
     let datum = AssociatedTyDatum {
         trait_id: to_chalk_trait_id(trait_),
-        id,
+        id: to_assoc_type_id(type_alias),
         name: type_alias,
         binders: make_binders(db, &generic_params, bound_data),
     };
@@ -665,26 +688,27 @@ pub(crate) fn associated_ty_data_query(
 
 pub(crate) fn trait_datum_query(
     db: &dyn HirDatabase,
-    krate: CrateId,
+    krate: Crate,
     trait_id: TraitId,
 ) -> Arc<TraitDatum> {
     debug!("trait_datum {:?}", trait_id);
     let trait_ = from_chalk_trait_id(trait_id);
-    let trait_data = db.trait_data(trait_);
+    let trait_data = db.trait_signature(trait_);
     debug!("trait {:?} = {:?}", trait_id, trait_data.name);
-    let generic_params = generics(db.upcast(), trait_.into());
+    let generic_params = generics(db, trait_.into());
     let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
     let flags = rust_ir::TraitFlags {
-        auto: trait_data.flags.contains(TraitFlags::IS_AUTO),
-        upstream: trait_.lookup(db.upcast()).container.krate() != krate,
+        auto: trait_data.flags.contains(TraitFlags::AUTO),
+        upstream: trait_.lookup(db).container.krate() != krate,
         non_enumerable: true,
         coinductive: false, // only relevant for Chalk testing
         // FIXME: set these flags correctly
         marker: false,
-        fundamental: trait_data.flags.contains(TraitFlags::IS_FUNDAMENTAL),
+        fundamental: trait_data.flags.contains(TraitFlags::FUNDAMENTAL),
     };
     let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
-    let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
+    let associated_ty_ids =
+        db.trait_items(trait_).associated_types().map(to_assoc_type_id).collect();
     let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
     let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item);
     let trait_datum = TraitDatum {
@@ -750,35 +774,32 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem {
 
 pub(crate) fn adt_datum_query(
     db: &dyn HirDatabase,
-    krate: CrateId,
+    krate: Crate,
     chalk_ir::AdtId(adt_id): AdtId,
 ) -> Arc<AdtDatum> {
     debug!("adt_datum {:?}", adt_id);
-    let generic_params = generics(db.upcast(), adt_id.into());
+    let generic_params = generics(db, adt_id.into());
     let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
     let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst);
 
     let (fundamental, phantom_data) = match adt_id {
         hir_def::AdtId::StructId(s) => {
-            let flags = db.struct_data(s).flags;
-            (
-                flags.contains(StructFlags::IS_FUNDAMENTAL),
-                flags.contains(StructFlags::IS_PHANTOM_DATA),
-            )
+            let flags = db.struct_signature(s).flags;
+            (flags.contains(StructFlags::FUNDAMENTAL), flags.contains(StructFlags::IS_PHANTOM_DATA))
         }
         // FIXME set fundamental flags correctly
         hir_def::AdtId::UnionId(_) => (false, false),
         hir_def::AdtId::EnumId(_) => (false, false),
     };
     let flags = rust_ir::AdtFlags {
-        upstream: adt_id.module(db.upcast()).krate() != krate,
+        upstream: adt_id.module(db).krate() != krate,
         fundamental,
         phantom_data,
     };
 
     // this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it
     let _variant_id_to_fields = |id: VariantId| {
-        let variant_data = &id.variant_data(db.upcast());
+        let variant_data = &id.variant_data(db);
         let fields = if variant_data.fields().is_empty() {
             vec![]
         } else {
@@ -800,7 +821,7 @@ pub(crate) fn adt_datum_query(
         }
         hir_def::AdtId::EnumId(id) => {
             let variants = db
-                .enum_data(id)
+                .enum_variants(id)
                 .variants
                 .iter()
                 .map(|&(variant_id, _)| variant_id_to_fields(variant_id.into()))
@@ -824,7 +845,7 @@ pub(crate) fn adt_datum_query(
 
 pub(crate) fn impl_datum_query(
     db: &dyn HirDatabase,
-    krate: CrateId,
+    krate: Crate,
     impl_id: ImplId,
 ) -> Arc<ImplDatum> {
     let _p = tracing::info_span!("impl_datum_query").entered();
@@ -833,35 +854,31 @@ pub(crate) fn impl_datum_query(
     impl_def_datum(db, krate, impl_)
 }
 
-fn impl_def_datum(
-    db: &dyn HirDatabase,
-    krate: CrateId,
-    impl_id: hir_def::ImplId,
-) -> Arc<ImplDatum> {
+fn impl_def_datum(db: &dyn HirDatabase, krate: Crate, impl_id: hir_def::ImplId) -> Arc<ImplDatum> {
     let trait_ref = db
         .impl_trait(impl_id)
         // ImplIds for impls where the trait ref can't be resolved should never reach Chalk
         .expect("invalid impl passed to Chalk")
         .into_value_and_skipped_binders()
         .0;
-    let impl_data = db.impl_data(impl_id);
+    let impl_data = db.impl_signature(impl_id);
 
-    let generic_params = generics(db.upcast(), impl_id.into());
+    let generic_params = generics(db, impl_id.into());
     let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
     let trait_ = trait_ref.hir_trait_id();
-    let impl_type = if impl_id.lookup(db.upcast()).container.krate() == krate {
+    let impl_type = if impl_id.lookup(db).container.krate() == krate {
         rust_ir::ImplType::Local
     } else {
         rust_ir::ImplType::External
     };
     let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
-    let negative = impl_data.is_negative;
-
+    let negative = impl_data.flags.contains(ImplFlags::NEGATIVE);
     let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
 
     let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
-    let trait_data = db.trait_data(trait_);
-    let associated_ty_value_ids = impl_data
+    let trait_data = db.trait_items(trait_);
+    let associated_ty_value_ids = db
+        .impl_items(impl_id)
         .items
         .iter()
         .filter_map(|(_, item)| match item {
@@ -870,7 +887,7 @@ fn impl_def_datum(
         })
         .filter(|&type_alias| {
             // don't include associated types that don't exist in the trait
-            let name = &db.type_alias_data(type_alias).name;
+            let name = &db.type_alias_signature(type_alias).name;
             trait_data.associated_type_by_name(name).is_some()
         })
         .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db))
@@ -887,7 +904,7 @@ fn impl_def_datum(
 
 pub(crate) fn associated_ty_value_query(
     db: &dyn HirDatabase,
-    krate: CrateId,
+    krate: Crate,
     id: AssociatedTyValueId,
 ) -> Arc<AssociatedTyValue> {
     let type_alias: TypeAliasAsValue = from_chalk(db, id);
@@ -896,11 +913,11 @@ pub(crate) fn associated_ty_value_query(
 
 fn type_alias_associated_ty_value(
     db: &dyn HirDatabase,
-    _krate: CrateId,
+    _krate: Crate,
     type_alias: TypeAliasId,
 ) -> Arc<AssociatedTyValue> {
-    let type_alias_data = db.type_alias_data(type_alias);
-    let impl_id = match type_alias.lookup(db.upcast()).container {
+    let type_alias_data = db.type_alias_signature(type_alias);
+    let impl_id = match type_alias.lookup(db).container {
         ItemContainerId::ImplId(it) => it,
         _ => panic!("assoc ty value should be in impl"),
     };
@@ -912,7 +929,7 @@ fn type_alias_associated_ty_value(
         .0; // we don't return any assoc ty values if the impl'd trait can't be resolved
 
     let assoc_ty = db
-        .trait_data(trait_ref.hir_trait_id())
+        .trait_items(trait_ref.hir_trait_id())
         .associated_type_by_name(&type_alias_data.name)
         .expect("assoc ty value should not exist"); // validated when building the impl data as well
     let (ty, binders) = db.ty(type_alias.into()).into_value_and_skipped_binders();
@@ -925,10 +942,12 @@ fn type_alias_associated_ty_value(
     Arc::new(value)
 }
 
-pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Arc<FnDefDatum> {
-    let callable_def: CallableDefId = from_chalk(db, fn_def_id);
-    let generic_def = GenericDefId::from_callable(db.upcast(), callable_def);
-    let generic_params = generics(db.upcast(), generic_def);
+pub(crate) fn fn_def_datum_query(
+    db: &dyn HirDatabase,
+    callable_def: CallableDefId,
+) -> Arc<FnDefDatum> {
+    let generic_def = GenericDefId::from_callable(db, callable_def);
+    let generic_params = generics(db, generic_def);
     let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders();
     let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
     let where_clauses = convert_where_clauses(db, generic_def, &bound_vars);
@@ -945,7 +964,7 @@ pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Ar
         where_clauses,
     };
     let datum = FnDefDatum {
-        id: fn_def_id,
+        id: callable_def.to_chalk(db),
         sig: chalk_ir::FnSig {
             abi: sig.abi,
             safety: chalk_ir::Safety::Safe,
@@ -956,11 +975,13 @@ pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Ar
     Arc::new(datum)
 }
 
-pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances {
-    let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+pub(crate) fn fn_def_variance_query(
+    db: &dyn HirDatabase,
+    callable_def: CallableDefId,
+) -> Variances {
     Variances::from_iter(
         Interner,
-        db.variances_of(GenericDefId::from_callable(db.upcast(), callable_def))
+        db.variances_of(GenericDefId::from_callable(db, callable_def))
             .as_deref()
             .unwrap_or_default()
             .iter()
@@ -973,10 +994,7 @@ pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) ->
     )
 }
 
-pub(crate) fn adt_variance_query(
-    db: &dyn HirDatabase,
-    chalk_ir::AdtId(adt_id): AdtId,
-) -> Variances {
+pub(crate) fn adt_variance_query(db: &dyn HirDatabase, adt_id: hir_def::AdtId) -> Variances {
     Variances::from_iter(
         Interner,
         db.variances_of(adt_id.into()).as_deref().unwrap_or_default().iter().map(|v| match v {
@@ -1026,10 +1044,10 @@ pub(super) fn generic_predicate_to_inline_bound(
             Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
         }
         WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
-            let generics =
-                generics(db.upcast(), from_assoc_type_id(projection_ty.associated_ty_id).into());
-            let (assoc_args, trait_args) =
-                projection_ty.substitution.as_slice(Interner).split_at(generics.len_self());
+            let generics = generics(db, from_assoc_type_id(projection_ty.associated_ty_id).into());
+            let parent_len = generics.parent_generics().map_or(0, |g| g.len_self());
+            let (trait_args, assoc_args) =
+                projection_ty.substitution.as_slice(Interner).split_at(parent_len);
             let (self_ty, args_no_self) =
                 trait_args.split_first().expect("projection without trait self type");
             if self_ty.assert_ty_ref(Interner) != &self_ty_shifted_in {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
index 51c178b90d72b..0f0cf6ae7ae6d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -1,22 +1,22 @@
 //! Various extensions traits for Chalk types.
 
 use chalk_ir::{
-    cast::Cast, FloatTy, IntTy, Mutability, Scalar, TyVariableKind, TypeOutlives, UintTy,
+    FloatTy, IntTy, Mutability, Scalar, TyVariableKind, TypeOutlives, UintTy, cast::Cast,
 };
 use hir_def::{
+    DefWithBodyId, FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
     builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
-    generics::TypeOrConstParamData,
+    hir::generics::{TypeOrConstParamData, TypeParamProvenance},
     lang_item::LangItem,
     type_ref::Rawness,
-    DefWithBodyId, FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
 };
 
 use crate::{
-    db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
-    from_placeholder_idx, generics::generics, to_chalk_trait_id, utils::ClosureSubst, AdtId,
-    AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, ClosureId,
-    DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
+    AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds,
+    ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
     QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
+    db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
+    from_placeholder_idx, generics::generics, to_chalk_trait_id, utils::ClosureSubst,
 };
 
 pub trait TyExt {
@@ -191,7 +191,7 @@ impl TyExt for Ty {
         match *self.kind(Interner) {
             TyKind::Adt(AdtId(adt), ..) => Some(adt.into()),
             TyKind::FnDef(callable, ..) => Some(GenericDefId::from_callable(
-                db.upcast(),
+                db,
                 db.lookup_intern_callable_def(callable.into()),
             )),
             TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()),
@@ -250,7 +250,7 @@ impl TyExt for Ty {
             TyKind::OpaqueType(opaque_ty_id, subst) => {
                 match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) {
                     ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => {
-                        let krate = def.module(db.upcast()).krate();
+                        let krate = def.module(db).krate();
                         if let Some(future_trait) =
                             db.lang_item(krate, LangItem::Future).and_then(|item| item.as_trait())
                         {
@@ -314,7 +314,7 @@ impl TyExt for Ty {
                 let param_data = &generic_params[id.local_id];
                 match param_data {
                     TypeOrConstParamData::TypeParamData(p) => match p.provenance {
-                        hir_def::generics::TypeParamProvenance::ArgumentImplTrait => {
+                        TypeParamProvenance::ArgumentImplTrait => {
                             let substs = TyBuilder::placeholder_subst(db, id.parent);
                             let predicates = db
                                 .generic_predicates(id.parent)
@@ -348,17 +348,12 @@ impl TyExt for Ty {
 
     fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId> {
         match self.kind(Interner) {
-            TyKind::AssociatedType(id, ..) => {
-                match from_assoc_type_id(*id).lookup(db.upcast()).container {
-                    ItemContainerId::TraitId(trait_id) => Some(trait_id),
-                    _ => None,
-                }
-            }
+            TyKind::AssociatedType(id, ..) => match from_assoc_type_id(*id).lookup(db).container {
+                ItemContainerId::TraitId(trait_id) => Some(trait_id),
+                _ => None,
+            },
             TyKind::Alias(AliasTy::Projection(projection_ty)) => {
-                match from_assoc_type_id(projection_ty.associated_ty_id)
-                    .lookup(db.upcast())
-                    .container
-                {
+                match from_assoc_type_id(projection_ty.associated_ty_id).lookup(db).container {
                     ItemContainerId::TraitId(trait_id) => Some(trait_id),
                     _ => None,
                 }
@@ -368,7 +363,7 @@ impl TyExt for Ty {
     }
 
     fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
-        let crate_id = owner.module(db.upcast()).krate();
+        let crate_id = owner.module(db).krate();
         let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|it| it.as_trait())
         else {
             return false;
@@ -422,16 +417,15 @@ pub trait ProjectionTyExt {
 impl ProjectionTyExt for ProjectionTy {
     fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
         // FIXME: something like `Split` trait from chalk-solve might be nice.
-        let generics = generics(db.upcast(), from_assoc_type_id(self.associated_ty_id).into());
-        let substitution = Substitution::from_iter(
-            Interner,
-            self.substitution.iter(Interner).skip(generics.len_self()),
-        );
+        let generics = generics(db, from_assoc_type_id(self.associated_ty_id).into());
+        let parent_len = generics.parent_generics().map_or(0, |g| g.len_self());
+        let substitution =
+            Substitution::from_iter(Interner, self.substitution.iter(Interner).take(parent_len));
         TraitRef { trait_id: to_chalk_trait_id(self.trait_(db)), substitution }
     }
 
     fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
-        match from_assoc_type_id(self.associated_ty_id).lookup(db.upcast()).container {
+        match from_assoc_type_id(self.associated_ty_id).lookup(db).container {
             ItemContainerId::TraitId(it) => it,
             _ => panic!("projection ty without parent trait"),
         }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index fb604569f439d..d1a1e135ffffa 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -1,27 +1,25 @@
 //! Constant evaluation details
 
-use base_db::{ra_salsa::Cycle, CrateId};
-use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
+use base_db::Crate;
+use chalk_ir::{BoundVar, DebruijnIndex, cast::Cast};
 use hir_def::{
-    expr_store::{Body, HygieneId},
+    EnumVariantId, GeneralConstId, HasModule as _, StaticId,
+    expr_store::{Body, HygieneId, path::Path},
     hir::{Expr, ExprId},
-    path::Path,
     resolver::{Resolver, ValueNs},
     type_ref::LiteralConstRef,
-    ConstBlockLoc, EnumVariantId, GeneralConstId, HasModule as _, StaticId,
 };
 use hir_expand::Lookup;
 use stdx::never;
 use triomphe::Arc;
 
 use crate::{
-    db::HirDatabase, display::DisplayTarget, generics::Generics, infer::InferenceContext,
-    lower::ParamLoweringMode, mir::monomorphize_mir_body_bad, to_placeholder_idx, Const, ConstData,
-    ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty,
-    TyBuilder,
+    Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution,
+    TraitEnvironment, Ty, TyBuilder, db::HirDatabase, display::DisplayTarget, generics::Generics,
+    infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
 };
 
-use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
+use super::mir::{MirEvalError, MirLowerError, interpret_mir, lower_to_mir, pad16};
 
 /// Extension trait for [`Const`]
 pub trait ConstExt {
@@ -96,11 +94,11 @@ pub(crate) fn path_to_const<'g>(
     resolver: &Resolver,
     path: &Path,
     mode: ParamLoweringMode,
-    args: impl FnOnce() -> Option<&'g Generics>,
+    args: impl FnOnce() -> &'g Generics,
     debruijn: DebruijnIndex,
     expected_ty: Ty,
 ) -> Option<Const> {
-    match resolver.resolve_path_in_value_ns_fully(db.upcast(), path, HygieneId::ROOT) {
+    match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) {
         Some(ValueNs::GenericParam(p)) => {
             let ty = db.const_param_ty(p);
             let value = match mode {
@@ -109,7 +107,7 @@ pub(crate) fn path_to_const<'g>(
                 }
                 ParamLoweringMode::Variable => {
                     let args = args();
-                    match args.and_then(|args| args.type_or_const_param_idx(p.into())) {
+                    match args.type_or_const_param_idx(p.into()) {
                         Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
                         None => {
                             never!(
@@ -157,17 +155,17 @@ pub fn intern_const_ref(
     db: &dyn HirDatabase,
     value: &LiteralConstRef,
     ty: Ty,
-    krate: CrateId,
+    krate: Crate,
 ) -> Const {
-    let layout = db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate));
+    let layout = || db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate));
     let bytes = match value {
         LiteralConstRef::Int(i) => {
             // FIXME: We should handle failure of layout better.
-            let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
+            let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16);
             ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
         }
         LiteralConstRef::UInt(i) => {
-            let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
+            let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16);
             ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
         }
         LiteralConstRef::Bool(b) => ConstScalar::Bytes(Box::new([*b as u8]), MemoryMap::default()),
@@ -180,7 +178,7 @@ pub fn intern_const_ref(
 }
 
 /// Interns a possibly-unknown target usize
-pub fn usize_const(db: &dyn HirDatabase, value: Option<u128>, krate: CrateId) -> Const {
+pub fn usize_const(db: &dyn HirDatabase, value: Option<u128>, krate: Crate) -> Const {
     intern_const_ref(
         db,
         &value.map_or(LiteralConstRef::Unknown, LiteralConstRef::UInt),
@@ -221,28 +219,25 @@ pub fn try_const_isize(db: &dyn HirDatabase, c: &Const) -> Option<i128> {
     }
 }
 
-pub(crate) fn const_eval_recover(
+pub(crate) fn const_eval_cycle_result(
     _: &dyn HirDatabase,
-    _: &Cycle,
-    _: &GeneralConstId,
-    _: &Substitution,
-    _: &Option<Arc<TraitEnvironment>>,
+    _: GeneralConstId,
+    _: Substitution,
+    _: Option<Arc<TraitEnvironment>>,
 ) -> Result<Const, ConstEvalError> {
     Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
 }
 
-pub(crate) fn const_eval_static_recover(
+pub(crate) fn const_eval_static_cycle_result(
     _: &dyn HirDatabase,
-    _: &Cycle,
-    _: &StaticId,
+    _: StaticId,
 ) -> Result<Const, ConstEvalError> {
     Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
 }
 
-pub(crate) fn const_eval_discriminant_recover(
+pub(crate) fn const_eval_discriminant_cycle_result(
     _: &dyn HirDatabase,
-    _: &Cycle,
-    _: &EnumVariantId,
+    _: EnumVariantId,
 ) -> Result<i128, ConstEvalError> {
     Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
 }
@@ -258,21 +253,9 @@ pub(crate) fn const_eval_query(
             db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))?
         }
         GeneralConstId::StaticId(s) => {
-            let krate = s.module(db.upcast()).krate();
+            let krate = s.module(db).krate();
             db.monomorphized_mir_body(s.into(), subst, TraitEnvironment::empty(krate))?
         }
-        GeneralConstId::ConstBlockId(c) => {
-            let ConstBlockLoc { parent, root } = db.lookup_intern_anonymous_const(c);
-            let body = db.body(parent);
-            let infer = db.infer(parent);
-            Arc::new(monomorphize_mir_body_bad(
-                db,
-                lower_to_mir(db, parent, &body, &infer, root)?,
-                subst,
-                db.trait_environment_for_body(parent),
-            )?)
-        }
-        GeneralConstId::InTypeConstId(c) => db.mir_body(c.into())?,
     };
     let c = interpret_mir(db, body, false, trait_env)?.0?;
     Ok(c)
@@ -297,13 +280,13 @@ pub(crate) fn const_eval_discriminant_variant(
 ) -> Result<i128, ConstEvalError> {
     let def = variant_id.into();
     let body = db.body(def);
-    let loc = variant_id.lookup(db.upcast());
+    let loc = variant_id.lookup(db);
     if body.exprs[body.body_expr] == Expr::Missing {
         let prev_idx = loc.index.checked_sub(1);
         let value = match prev_idx {
             Some(prev_idx) => {
                 1 + db.const_eval_discriminant(
-                    db.enum_data(loc.parent).variants[prev_idx as usize].0,
+                    db.enum_variants(loc.parent).variants[prev_idx as usize].0,
                 )?
             }
             _ => 0,
@@ -311,7 +294,7 @@ pub(crate) fn const_eval_discriminant_variant(
         return Ok(value);
     }
 
-    let repr = db.enum_data(loc.parent).repr;
+    let repr = db.enum_signature(loc.parent).repr;
     let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed());
 
     let mir_body = db.monomorphized_mir_body(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
index 26a3b7022976f..6449a4dc7e8c6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -1,17 +1,17 @@
-use base_db::SourceDatabase;
+use base_db::RootQueryDb;
 use chalk_ir::Substitution;
 use hir_def::db::DefDatabase;
+use hir_expand::EditionedFileId;
 use rustc_apfloat::{
-    ieee::{Half as f16, Quad as f128},
     Float,
+    ieee::{Half as f16, Quad as f128},
 };
-use span::EditionedFileId;
 use test_fixture::WithFixture;
 use test_utils::skip_slow_tests;
 
 use crate::{
-    consteval::try_const_usize, db::HirDatabase, display::DisplayTarget, mir::pad16,
-    test_db::TestDB, Const, ConstScalar, Interner, MemoryMap,
+    Const, ConstScalar, Interner, MemoryMap, consteval::try_const_usize, db::HirDatabase,
+    display::DisplayTarget, mir::pad16, test_db::TestDB,
 };
 
 use super::{
@@ -101,10 +101,8 @@ fn check_answer(
 fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
     let mut err = String::new();
     let span_formatter = |file, range| format!("{file:?} {range:?}");
-    let display_target = DisplayTarget::from_crate(
-        &db,
-        *db.crate_graph().crates_in_topological_order().last().unwrap(),
-    );
+    let display_target =
+        DisplayTarget::from_crate(&db, *db.all_crates().last().expect("no crate graph present"));
     match e {
         ConstEvalError::MirLowerError(e) => {
             e.pretty_print(&mut err, &db, span_formatter, display_target)
@@ -118,14 +116,14 @@ fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
 }
 
 fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalError> {
-    let module_id = db.module_for_file(file_id.file_id());
+    let module_id = db.module_for_file(file_id.file_id(db));
     let def_map = module_id.def_map(db);
     let scope = &def_map[module_id.local_id].scope;
     let const_id = scope
         .declarations()
         .find_map(|x| match x {
             hir_def::ModuleDefId::ConstId(x) => {
-                if db.const_data(x).name.as_ref()?.display(db, file_id.edition()).to_string()
+                if db.const_signature(x).name.as_ref()?.display(db, file_id.edition(db)).to_string()
                     == "GOAL"
                 {
                     Some(x)
@@ -2460,6 +2458,8 @@ fn extern_weak_statics() {
 }
 
 #[test]
+// FIXME
+#[should_panic]
 fn from_ne_bytes() {
     check_number(
         r#"
@@ -2536,6 +2536,8 @@ fn const_transfer_memory() {
 }
 
 #[test]
+// FIXME
+#[should_panic]
 fn anonymous_const_block() {
     check_number(
         r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index 76031491d9a07..c24ef16b4969c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -3,23 +3,22 @@
 
 use std::sync;
 
-use base_db::{
-    impl_intern_key,
-    ra_salsa::{self, InternValueTrivial},
-    CrateId, Upcast,
-};
+use base_db::{Crate, impl_intern_key};
 use hir_def::{
-    db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, CallableDefId,
-    ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
-    LifetimeParamId, LocalFieldId, StaticId, TraitId, TypeAliasId, TypeOrConstParamId, VariantId,
+    AdtId, BlockId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId,
+    GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId,
+    TypeAliasId, TypeOrConstParamId, VariantId, db::DefDatabase, hir::ExprId,
+    layout::TargetDataLayout,
 };
 use hir_expand::name::Name;
 use la_arena::ArenaMap;
+use salsa::plumbing::AsId;
 use smallvec::SmallVec;
 use triomphe::Arc;
 
 use crate::{
-    chalk_db,
+    Binders, Const, ImplTraitId, ImplTraits, InferenceResult, Interner, PolyFnSig, Substitution,
+    TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, chalk_db,
     consteval::ConstEvalError,
     drop::DropGlue,
     dyn_compatibility::DynCompatibilityViolation,
@@ -27,26 +26,24 @@ use crate::{
     lower::{Diagnostics, GenericDefaults, GenericPredicates},
     method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
     mir::{BorrowckResult, MirBody, MirLowerError},
-    Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
-    PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
 };
 
-#[ra_salsa::query_group(HirDatabaseStorage)]
-pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
-    #[ra_salsa::invoke(crate::infer::infer_query)]
+#[query_group::query_group]
+pub trait HirDatabase: DefDatabase + std::fmt::Debug {
+    #[salsa::invoke(crate::infer::infer_query)]
     fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
 
     // region:mir
 
-    #[ra_salsa::invoke(crate::mir::mir_body_query)]
-    #[ra_salsa::cycle(crate::mir::mir_body_recover)]
+    #[salsa::invoke(crate::mir::mir_body_query)]
+    #[salsa::cycle(cycle_result = crate::mir::mir_body_cycle_result)]
     fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::mir::mir_body_for_closure_query)]
-    fn mir_body_for_closure(&self, def: ClosureId) -> Result<Arc<MirBody>, MirLowerError>;
+    #[salsa::invoke(crate::mir::mir_body_for_closure_query)]
+    fn mir_body_for_closure(&self, def: InternedClosureId) -> Result<Arc<MirBody>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::mir::monomorphized_mir_body_query)]
-    #[ra_salsa::cycle(crate::mir::monomorphized_mir_body_recover)]
+    #[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
+    #[salsa::cycle(cycle_result = crate::mir::monomorphized_mir_body_cycle_result)]
     fn monomorphized_mir_body(
         &self,
         def: DefWithBodyId,
@@ -54,20 +51,20 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         env: Arc<TraitEnvironment>,
     ) -> Result<Arc<MirBody>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
+    #[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
     fn monomorphized_mir_body_for_closure(
         &self,
-        def: ClosureId,
+        def: InternedClosureId,
         subst: Substitution,
         env: Arc<TraitEnvironment>,
     ) -> Result<Arc<MirBody>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::mir::borrowck_query)]
-    #[ra_salsa::lru]
+    #[salsa::invoke(crate::mir::borrowck_query)]
+    #[salsa::lru(2024)]
     fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::consteval::const_eval_query)]
-    #[ra_salsa::cycle(crate::consteval::const_eval_recover)]
+    #[salsa::invoke(crate::consteval::const_eval_query)]
+    #[salsa::cycle(cycle_result = crate::consteval::const_eval_cycle_result)]
     fn const_eval(
         &self,
         def: GeneralConstId,
@@ -75,15 +72,15 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         trait_env: Option<Arc<TraitEnvironment>>,
     ) -> Result<Const, ConstEvalError>;
 
-    #[ra_salsa::invoke(crate::consteval::const_eval_static_query)]
-    #[ra_salsa::cycle(crate::consteval::const_eval_static_recover)]
+    #[salsa::invoke(crate::consteval::const_eval_static_query)]
+    #[salsa::cycle(cycle_result = crate::consteval::const_eval_static_cycle_result)]
     fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>;
 
-    #[ra_salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
-    #[ra_salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
+    #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
+    #[salsa::cycle(cycle_result = crate::consteval::const_eval_discriminant_cycle_result)]
     fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
 
-    #[ra_salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
+    #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
     fn lookup_impl_method(
         &self,
         env: Arc<TraitEnvironment>,
@@ -93,8 +90,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
 
     // endregion:mir
 
-    #[ra_salsa::invoke(crate::layout::layout_of_adt_query)]
-    #[ra_salsa::cycle(crate::layout::layout_of_adt_recover)]
+    #[salsa::invoke(crate::layout::layout_of_adt_query)]
+    #[salsa::cycle(cycle_result = crate::layout::layout_of_adt_cycle_result)]
     fn layout_of_adt(
         &self,
         def: AdtId,
@@ -102,63 +99,73 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         env: Arc<TraitEnvironment>,
     ) -> Result<Arc<Layout>, LayoutError>;
 
-    #[ra_salsa::invoke(crate::layout::layout_of_ty_query)]
-    #[ra_salsa::cycle(crate::layout::layout_of_ty_recover)]
+    #[salsa::invoke(crate::layout::layout_of_ty_query)]
+    #[salsa::cycle(cycle_result = crate::layout::layout_of_ty_cycle_result)]
     fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
 
-    #[ra_salsa::invoke(crate::layout::target_data_layout_query)]
-    fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
+    #[salsa::invoke(crate::layout::target_data_layout_query)]
+    fn target_data_layout(&self, krate: Crate) -> Result<Arc<TargetDataLayout>, Arc<str>>;
 
-    #[ra_salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
+    #[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
     fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
 
-    #[ra_salsa::invoke(crate::lower::ty_query)]
-    #[ra_salsa::cycle(crate::lower::ty_recover)]
+    #[salsa::invoke(crate::lower::ty_query)]
+    #[salsa::transparent]
     fn ty(&self, def: TyDefId) -> Binders<Ty>;
 
-    #[ra_salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
+    #[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
+    #[salsa::cycle(cycle_result = crate::lower::type_for_type_alias_with_diagnostics_cycle_result)]
     fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders<Ty>, Diagnostics);
 
     /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
     /// a `StructId` or `EnumVariantId` with a record constructor.
-    #[ra_salsa::invoke(crate::lower::value_ty_query)]
+    #[salsa::invoke(crate::lower::value_ty_query)]
     fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>;
 
-    #[ra_salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
-    #[ra_salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)]
+    #[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
+    #[salsa::cycle(cycle_result = crate::lower::impl_self_ty_with_diagnostics_cycle_result)]
     fn impl_self_ty_with_diagnostics(&self, def: ImplId) -> (Binders<Ty>, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::impl_self_ty_query)]
+
+    #[salsa::invoke(crate::lower::impl_self_ty_query)]
+    #[salsa::transparent]
     fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
 
-    #[ra_salsa::invoke(crate::lower::const_param_ty_with_diagnostics_query)]
+    // FIXME: Make this a non-interned query.
+    #[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)]
     fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::const_param_ty_query)]
+
+    #[salsa::invoke(crate::lower::const_param_ty_query)]
+    #[salsa::transparent]
     fn const_param_ty(&self, def: ConstParamId) -> Ty;
 
-    #[ra_salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
+    #[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
     fn impl_trait_with_diagnostics(&self, def: ImplId) -> Option<(Binders<TraitRef>, Diagnostics)>;
-    #[ra_salsa::invoke(crate::lower::impl_trait_query)]
+
+    #[salsa::invoke(crate::lower::impl_trait_query)]
+    #[salsa::transparent]
     fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
 
-    #[ra_salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
+    #[salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
     fn field_types_with_diagnostics(
         &self,
         var: VariantId,
     ) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::field_types_query)]
+
+    #[salsa::invoke(crate::lower::field_types_query)]
+    #[salsa::transparent]
     fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
 
-    #[ra_salsa::invoke(crate::lower::callable_item_sig)]
+    #[salsa::invoke(crate::lower::callable_item_signature_query)]
     fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
 
-    #[ra_salsa::invoke(crate::lower::return_type_impl_traits)]
+    #[salsa::invoke(crate::lower::return_type_impl_traits)]
     fn return_type_impl_traits(&self, def: FunctionId) -> Option<Arc<Binders<ImplTraits>>>;
 
-    #[ra_salsa::invoke(crate::lower::type_alias_impl_traits)]
+    #[salsa::invoke(crate::lower::type_alias_impl_traits)]
     fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option<Arc<Binders<ImplTraits>>>;
 
-    #[ra_salsa::invoke(crate::lower::generic_predicates_for_param_query)]
-    #[ra_salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
+    #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
+    #[salsa::cycle(cycle_result = crate::lower::generic_predicates_for_param_cycle_result)]
     fn generic_predicates_for_param(
         &self,
         def: GenericDefId,
@@ -166,150 +173,155 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         assoc_name: Option<Name>,
     ) -> GenericPredicates;
 
-    #[ra_salsa::invoke(crate::lower::generic_predicates_query)]
+    #[salsa::invoke(crate::lower::generic_predicates_query)]
     fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
 
-    #[ra_salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
+    #[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
     fn generic_predicates_without_parent_with_diagnostics(
         &self,
         def: GenericDefId,
     ) -> (GenericPredicates, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
+
+    #[salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
+    #[salsa::transparent]
     fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates;
 
-    #[ra_salsa::invoke(crate::lower::trait_environment_for_body_query)]
-    #[ra_salsa::transparent]
+    #[salsa::invoke(crate::lower::trait_environment_for_body_query)]
+    #[salsa::transparent]
     fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
 
-    #[ra_salsa::invoke(crate::lower::trait_environment_query)]
+    #[salsa::invoke(crate::lower::trait_environment_query)]
     fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
 
-    #[ra_salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
-    #[ra_salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)]
+    #[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
+    #[salsa::cycle(cycle_result = crate::lower::generic_defaults_with_diagnostics_cycle_result)]
     fn generic_defaults_with_diagnostics(
         &self,
         def: GenericDefId,
     ) -> (GenericDefaults, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::generic_defaults_query)]
+
+    /// This returns an empty list if no parameter has default.
+    ///
+    /// The binders of the returned defaults are only up to (not including) this parameter.
+    #[salsa::invoke(crate::lower::generic_defaults_query)]
+    #[salsa::transparent]
     fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
 
-    #[ra_salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
-    fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
+    #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
+    fn inherent_impls_in_crate(&self, krate: Crate) -> Arc<InherentImpls>;
 
-    #[ra_salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
+    #[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
     fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
 
     /// Collects all crates in the dependency graph that have impls for the
     /// given fingerprint. This is only used for primitive types and types
     /// annotated with `rustc_has_incoherent_inherent_impls`; for other types
     /// we just look at the crate where the type is defined.
-    #[ra_salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)]
+    #[salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)]
     fn incoherent_inherent_impl_crates(
         &self,
-        krate: CrateId,
+        krate: Crate,
         fp: TyFingerprint,
-    ) -> SmallVec<[CrateId; 2]>;
+    ) -> SmallVec<[Crate; 2]>;
 
-    #[ra_salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
-    fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
+    #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
+    fn trait_impls_in_crate(&self, krate: Crate) -> Arc<TraitImpls>;
 
-    #[ra_salsa::invoke(TraitImpls::trait_impls_in_block_query)]
+    #[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
     fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>;
 
-    #[ra_salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
-    fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<[Arc<TraitImpls>]>;
+    #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
+    fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc<TraitImpls>]>;
 
     // Interned IDs for Chalk integration
-    #[ra_salsa::interned]
+    #[salsa::interned]
     fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_type_or_const_param_id(
         &self,
         param_id: TypeOrConstParamId,
     ) -> InternedTypeOrConstParamId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_closure(&self, id: InternedClosure) -> InternedClosureId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
 
-    #[ra_salsa::invoke(chalk_db::associated_ty_data_query)]
-    fn associated_ty_data(
-        &self,
-        id: chalk_db::AssocTypeId,
-    ) -> sync::Arc<chalk_db::AssociatedTyDatum>;
+    #[salsa::invoke(chalk_db::associated_ty_data_query)]
+    fn associated_ty_data(&self, id: TypeAliasId) -> sync::Arc<chalk_db::AssociatedTyDatum>;
 
-    #[ra_salsa::invoke(chalk_db::trait_datum_query)]
+    #[salsa::invoke(chalk_db::trait_datum_query)]
     fn trait_datum(
         &self,
-        krate: CrateId,
+        krate: Crate,
         trait_id: chalk_db::TraitId,
     ) -> sync::Arc<chalk_db::TraitDatum>;
 
-    #[ra_salsa::invoke(chalk_db::adt_datum_query)]
-    fn adt_datum(
-        &self,
-        krate: CrateId,
-        struct_id: chalk_db::AdtId,
-    ) -> sync::Arc<chalk_db::AdtDatum>;
+    #[salsa::invoke(chalk_db::adt_datum_query)]
+    fn adt_datum(&self, krate: Crate, struct_id: chalk_db::AdtId) -> sync::Arc<chalk_db::AdtDatum>;
 
-    #[ra_salsa::invoke(chalk_db::impl_datum_query)]
-    fn impl_datum(
-        &self,
-        krate: CrateId,
-        impl_id: chalk_db::ImplId,
-    ) -> sync::Arc<chalk_db::ImplDatum>;
+    #[salsa::invoke(chalk_db::impl_datum_query)]
+    fn impl_datum(&self, krate: Crate, impl_id: chalk_db::ImplId)
+    -> sync::Arc<chalk_db::ImplDatum>;
 
-    #[ra_salsa::invoke(chalk_db::fn_def_datum_query)]
-    fn fn_def_datum(&self, fn_def_id: FnDefId) -> sync::Arc<chalk_db::FnDefDatum>;
+    #[salsa::invoke(chalk_db::fn_def_datum_query)]
+    fn fn_def_datum(&self, fn_def_id: CallableDefId) -> sync::Arc<chalk_db::FnDefDatum>;
 
-    #[ra_salsa::invoke(chalk_db::fn_def_variance_query)]
-    fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
+    #[salsa::invoke(chalk_db::fn_def_variance_query)]
+    fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances;
 
-    #[ra_salsa::invoke(chalk_db::adt_variance_query)]
-    fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances;
+    #[salsa::invoke(chalk_db::adt_variance_query)]
+    fn adt_variance(&self, adt_id: AdtId) -> chalk_db::Variances;
 
-    #[ra_salsa::invoke(crate::variance::variances_of)]
-    #[ra_salsa::cycle(crate::variance::variances_of_cycle)]
+    #[salsa::invoke(crate::variance::variances_of)]
+    #[salsa::cycle(
+        cycle_fn = crate::variance::variances_of_cycle_fn,
+        cycle_initial = crate::variance::variances_of_cycle_initial,
+    )]
     fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
 
-    #[ra_salsa::invoke(chalk_db::associated_ty_value_query)]
+    #[salsa::invoke(chalk_db::associated_ty_value_query)]
     fn associated_ty_value(
         &self,
-        krate: CrateId,
+        krate: Crate,
         id: chalk_db::AssociatedTyValueId,
     ) -> sync::Arc<chalk_db::AssociatedTyValue>;
 
-    #[ra_salsa::invoke(crate::traits::normalize_projection_query)]
-    #[ra_salsa::transparent]
+    #[salsa::invoke(crate::traits::normalize_projection_query)]
+    #[salsa::transparent]
     fn normalize_projection(
         &self,
         projection: crate::ProjectionTy,
         env: Arc<TraitEnvironment>,
     ) -> Ty;
 
-    #[ra_salsa::invoke(crate::traits::trait_solve_query)]
+    #[salsa::invoke(crate::traits::trait_solve_query)]
     fn trait_solve(
         &self,
-        krate: CrateId,
+        krate: Crate,
         block: Option<BlockId>,
         goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
     ) -> Option<crate::Solution>;
 
-    #[ra_salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)]
+    #[salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)]
     fn program_clauses_for_chalk_env(
         &self,
-        krate: CrateId,
+        krate: Crate,
         block: Option<BlockId>,
         env: chalk_ir::Environment<Interner>,
     ) -> chalk_ir::ProgramClauses<Interner>;
 
-    #[ra_salsa::invoke(crate::drop::has_drop_glue)]
-    #[ra_salsa::cycle(crate::drop::has_drop_glue_recover)]
-    fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue {}
+    #[salsa::invoke(crate::drop::has_drop_glue)]
+    #[salsa::cycle(cycle_result = crate::drop::has_drop_glue_cycle_result)]
+    fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue;
 }
 
 #[test]
@@ -317,41 +329,22 @@ fn hir_database_is_dyn_compatible() {
     fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedTypeOrConstParamId(ra_salsa::InternId);
-impl_intern_key!(InternedTypeOrConstParamId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedLifetimeParamId(ra_salsa::InternId);
-impl_intern_key!(InternedLifetimeParamId);
+impl_intern_key!(InternedTypeOrConstParamId, TypeOrConstParamId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedConstParamId(ra_salsa::InternId);
-impl_intern_key!(InternedConstParamId);
+impl_intern_key!(InternedLifetimeParamId, LifetimeParamId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedOpaqueTyId(ra_salsa::InternId);
-impl_intern_key!(InternedOpaqueTyId);
+impl_intern_key!(InternedConstParamId, ConstParamId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedClosureId(ra_salsa::InternId);
-impl_intern_key!(InternedClosureId);
+impl_intern_key!(InternedOpaqueTyId, ImplTraitId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct InternedClosure(pub DefWithBodyId, pub ExprId);
+impl_intern_key!(InternedClosureId, InternedClosure);
 
-impl InternValueTrivial for InternedClosure {}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedCoroutineId(ra_salsa::InternId);
-impl_intern_key!(InternedCoroutineId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId);
-impl InternValueTrivial for InternedCoroutine {}
+impl_intern_key!(InternedCoroutineId, InternedCoroutine);
 
-/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
-/// we have different IDs for struct and enum variant constructors.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct InternedCallableDefId(ra_salsa::InternId);
-impl_intern_key!(InternedCallableDefId);
+// This exists just for Chalk, because Chalk just has a single `FnDefId` where
+// we have different IDs for struct and enum variant constructors.
+impl_intern_key!(InternedCallableDefId, CallableDefId);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
index 845d333335365..047a348fb09a7 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -5,12 +5,12 @@ mod match_check;
 mod unsafe_check;
 
 pub use crate::diagnostics::{
-    decl_check::{incorrect_case, CaseType, IncorrectCase},
+    decl_check::{CaseType, IncorrectCase, incorrect_case},
     expr::{
-        record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic,
+        BodyValidationDiagnostic, record_literal_missing_fields, record_pattern_missing_fields,
     },
     unsafe_check::{
-        missing_unsafe, unsafe_operations, unsafe_operations_for_body, InsideUnsafeBlock,
-        UnsafetyReason,
+        InsideUnsafeBlock, UnsafetyReason, missing_unsafe, unsafe_operations,
+        unsafe_operations_for_body,
     },
 };
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index 774991560e9ca..099100a73288d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -16,20 +16,20 @@ mod case_conv;
 use std::fmt;
 
 use hir_def::{
-    data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, ConstId, EnumId,
-    EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId,
-    StructId, TraitId, TypeAliasId,
+    AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
+    ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, db::DefDatabase, hir::Pat,
+    item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
 };
 use hir_expand::{
+    HirFileId,
     name::{AsName, Name},
-    HirFileId, HirFileIdExt,
 };
 use intern::sym;
 use stdx::{always, never};
 use syntax::{
+    AstNode, AstPtr, ToSmolStr,
     ast::{self, HasName},
     utils::is_raw_identifier,
-    AstNode, AstPtr, ToSmolStr,
 };
 
 use crate::db::HirDatabase;
@@ -154,7 +154,7 @@ impl<'a> DeclValidator<'a> {
 
     fn validate_module(&mut self, module_id: ModuleId) {
         // Check the module name.
-        let Some(module_name) = module_id.name(self.db.upcast()) else { return };
+        let Some(module_name) = module_id.name(self.db) else { return };
         let Some(module_name_replacement) =
             to_lower_snake_case(module_name.as_str()).map(|new_name| Replacement {
                 current_name: module_name,
@@ -164,8 +164,8 @@ impl<'a> DeclValidator<'a> {
         else {
             return;
         };
-        let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id];
-        let Some(module_src) = module_data.declaration_source(self.db.upcast()) else {
+        let module_data = &module_id.def_map(self.db)[module_id.local_id];
+        let Some(module_src) = module_data.declaration_source(self.db) else {
             return;
         };
         self.create_incorrect_case_diagnostic_for_ast_node(
@@ -178,7 +178,7 @@ impl<'a> DeclValidator<'a> {
 
     fn validate_trait(&mut self, trait_id: TraitId) {
         // Check the trait name.
-        let data = self.db.trait_data(trait_id);
+        let data = self.db.trait_signature(trait_id);
         self.create_incorrect_case_diagnostic_for_item_name(
             trait_id,
             &data.name,
@@ -188,7 +188,7 @@ impl<'a> DeclValidator<'a> {
     }
 
     fn validate_func(&mut self, func: FunctionId) {
-        let container = func.lookup(self.db.upcast()).container;
+        let container = func.lookup(self.db).container;
         if matches!(container, ItemContainerId::ExternBlockId(_)) {
             cov_mark::hit!(extern_func_incorrect_case_ignored);
             return;
@@ -197,11 +197,11 @@ impl<'a> DeclValidator<'a> {
         // Check the function name.
         // Skipped if function is an associated item of a trait implementation.
         if !self.is_trait_impl_container(container) {
-            let data = self.db.function_data(func);
+            let data = self.db.function_signature(func);
 
             // Don't run the lint on extern "[not Rust]" fn items with the
             // #[no_mangle] attribute.
-            let no_mangle = self.db.attrs(func.into()).by_key(&sym::no_mangle).exists();
+            let no_mangle = self.db.attrs(func.into()).by_key(sym::no_mangle).exists();
             if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) {
                 cov_mark::hit!(extern_func_no_mangle_ignored);
             } else {
@@ -251,7 +251,7 @@ impl<'a> DeclValidator<'a> {
             return;
         }
 
-        let (_, source_map) = self.db.body_with_source_map(func.into());
+        let source_map = self.db.body_with_source_map(func.into()).1;
         for (id, replacement) in pats_replacements {
             let Ok(source_ptr) = source_map.pat_syntax(id) else {
                 continue;
@@ -259,7 +259,7 @@ impl<'a> DeclValidator<'a> {
             let Some(ptr) = source_ptr.value.cast::<ast::IdentPat>() else {
                 continue;
             };
-            let root = source_ptr.file_syntax(self.db.upcast());
+            let root = source_ptr.file_syntax(self.db);
             let ident_pat = ptr.to_node(&root);
             let Some(parent) = ident_pat.syntax().parent() else {
                 continue;
@@ -287,13 +287,13 @@ impl<'a> DeclValidator<'a> {
     }
 
     fn edition(&self, id: impl HasModule) -> span::Edition {
-        let krate = id.krate(self.db.upcast());
-        self.db.crate_graph()[krate].edition
+        let krate = id.krate(self.db);
+        krate.data(self.db).edition
     }
 
     fn validate_struct(&mut self, struct_id: StructId) {
         // Check the structure name.
-        let data = self.db.struct_data(struct_id);
+        let data = self.db.struct_signature(struct_id);
         self.create_incorrect_case_diagnostic_for_item_name(
             struct_id,
             &data.name,
@@ -307,12 +307,13 @@ impl<'a> DeclValidator<'a> {
 
     /// Check incorrect names for struct fields.
     fn validate_struct_fields(&mut self, struct_id: StructId) {
-        let data = self.db.struct_data(struct_id);
-        let VariantData::Record { fields, .. } = data.variant_data.as_ref() else {
+        let data = self.db.variant_fields(struct_id.into());
+        if data.shape != FieldsShape::Record {
             return;
         };
         let edition = self.edition(struct_id);
-        let mut struct_fields_replacements = fields
+        let mut struct_fields_replacements = data
+            .fields()
             .iter()
             .filter_map(|(_, field)| {
                 to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map(
@@ -330,8 +331,8 @@ impl<'a> DeclValidator<'a> {
             return;
         }
 
-        let struct_loc = struct_id.lookup(self.db.upcast());
-        let struct_src = struct_loc.source(self.db.upcast());
+        let struct_loc = struct_id.lookup(self.db);
+        let struct_src = struct_loc.source(self.db);
 
         let Some(ast::FieldList::RecordFieldList(struct_fields_list)) =
             struct_src.value.field_list()
@@ -378,7 +379,7 @@ impl<'a> DeclValidator<'a> {
     }
 
     fn validate_enum(&mut self, enum_id: EnumId) {
-        let data = self.db.enum_data(enum_id);
+        let data = self.db.enum_signature(enum_id);
 
         // Check the enum name.
         self.create_incorrect_case_diagnostic_for_item_name(
@@ -394,7 +395,7 @@ impl<'a> DeclValidator<'a> {
 
     /// Check incorrect names for enum variants.
     fn validate_enum_variants(&mut self, enum_id: EnumId) {
-        let data = self.db.enum_data(enum_id);
+        let data = self.db.enum_variants(enum_id);
 
         for (variant_id, _) in data.variants.iter() {
             self.validate_enum_variant_fields(*variant_id);
@@ -420,8 +421,8 @@ impl<'a> DeclValidator<'a> {
             return;
         }
 
-        let enum_loc = enum_id.lookup(self.db.upcast());
-        let enum_src = enum_loc.source(self.db.upcast());
+        let enum_loc = enum_id.lookup(self.db);
+        let enum_src = enum_loc.source(self.db);
 
         let Some(enum_variants_list) = enum_src.value.variant_list() else {
             always!(
@@ -467,12 +468,13 @@ impl<'a> DeclValidator<'a> {
 
     /// Check incorrect names for fields of enum variant.
     fn validate_enum_variant_fields(&mut self, variant_id: EnumVariantId) {
-        let variant_data = self.db.enum_variant_data(variant_id);
-        let VariantData::Record { fields, .. } = variant_data.variant_data.as_ref() else {
+        let variant_data = self.db.variant_fields(variant_id.into());
+        if variant_data.shape != FieldsShape::Record {
             return;
         };
         let edition = self.edition(variant_id);
-        let mut variant_field_replacements = fields
+        let mut variant_field_replacements = variant_data
+            .fields()
             .iter()
             .filter_map(|(_, field)| {
                 to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map(
@@ -490,8 +492,8 @@ impl<'a> DeclValidator<'a> {
             return;
         }
 
-        let variant_loc = variant_id.lookup(self.db.upcast());
-        let variant_src = variant_loc.source(self.db.upcast());
+        let variant_loc = variant_id.lookup(self.db);
+        let variant_src = variant_loc.source(self.db);
 
         let Some(ast::FieldList::RecordFieldList(variant_fields_list)) =
             variant_src.value.field_list()
@@ -538,13 +540,13 @@ impl<'a> DeclValidator<'a> {
     }
 
     fn validate_const(&mut self, const_id: ConstId) {
-        let container = const_id.lookup(self.db.upcast()).container;
+        let container = const_id.lookup(self.db).container;
         if self.is_trait_impl_container(container) {
             cov_mark::hit!(trait_impl_assoc_const_incorrect_case_ignored);
             return;
         }
 
-        let data = self.db.const_data(const_id);
+        let data = self.db.const_signature(const_id);
         let Some(name) = &data.name else {
             return;
         };
@@ -557,8 +559,8 @@ impl<'a> DeclValidator<'a> {
     }
 
     fn validate_static(&mut self, static_id: StaticId) {
-        let data = self.db.static_data(static_id);
-        if data.is_extern {
+        let data = self.db.static_signature(static_id);
+        if data.flags.contains(StaticFlags::EXTERN) {
             cov_mark::hit!(extern_static_incorrect_case_ignored);
             return;
         }
@@ -572,14 +574,14 @@ impl<'a> DeclValidator<'a> {
     }
 
     fn validate_type_alias(&mut self, type_alias_id: TypeAliasId) {
-        let container = type_alias_id.lookup(self.db.upcast()).container;
+        let container = type_alias_id.lookup(self.db).container;
         if self.is_trait_impl_container(container) {
             cov_mark::hit!(trait_impl_assoc_type_incorrect_case_ignored);
             return;
         }
 
         // Check the type alias name.
-        let data = self.db.type_alias_data(type_alias_id);
+        let data = self.db.type_alias_signature(type_alias_id);
         self.create_incorrect_case_diagnostic_for_item_name(
             type_alias_id,
             &data.name,
@@ -597,7 +599,7 @@ impl<'a> DeclValidator<'a> {
     ) where
         N: AstNode + HasName + fmt::Debug,
         S: HasSource<Value = N>,
-        L: Lookup<Data = S, Database<'a> = dyn DefDatabase + 'a> + HasModule + Copy,
+        L: Lookup<Data = S, Database = dyn DefDatabase> + HasModule + Copy,
     {
         let to_expected_case_type = match expected_case {
             CaseType::LowerSnakeCase => to_lower_snake_case,
@@ -605,19 +607,16 @@ impl<'a> DeclValidator<'a> {
             CaseType::UpperCamelCase => to_camel_case,
         };
         let edition = self.edition(item_id);
-        let Some(replacement) = to_expected_case_type(
-            &name.display(self.db.upcast(), edition).to_smolstr(),
-        )
-        .map(|new_name| Replacement {
-            current_name: name.clone(),
-            suggested_text: new_name,
-            expected_case,
-        }) else {
+        let Some(replacement) =
+            to_expected_case_type(&name.display(self.db, edition).to_smolstr()).map(|new_name| {
+                Replacement { current_name: name.clone(), suggested_text: new_name, expected_case }
+            })
+        else {
             return;
         };
 
-        let item_loc = item_id.lookup(self.db.upcast());
-        let item_src = item_loc.source(self.db.upcast());
+        let item_loc = item_id.lookup(self.db);
+        let item_src = item_loc.source(self.db);
         self.create_incorrect_case_diagnostic_for_ast_node(
             replacement,
             item_src.file_id,
@@ -645,13 +644,13 @@ impl<'a> DeclValidator<'a> {
             return;
         };
 
-        let edition = file_id.original_file(self.db.upcast()).edition();
+        let edition = file_id.original_file(self.db).edition(self.db);
         let diagnostic = IncorrectCase {
             file: file_id,
             ident_type,
             ident: AstPtr::new(&name_ast),
             expected_case: replacement.expected_case,
-            ident_text: replacement.current_name.display(self.db.upcast(), edition).to_string(),
+            ident_text: replacement.current_name.display(self.db, edition).to_string(),
             suggested_text: replacement.suggested_text,
         };
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
index 348f8a0f4a856..234c7e4b03c3c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
@@ -97,7 +97,7 @@ fn is_snake_case<F: Fn(char) -> bool>(ident: &str, wrong_case: F) -> bool {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
 
     fn check<F: Fn(&str) -> Option<String>>(fun: F, input: &str, expect: Expect) {
         // `None` is translated to empty string, meaning that there is nothing to fix.
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
index cc6f4d9e52eb8..5e3d880589629 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -4,40 +4,40 @@
 
 use std::fmt;
 
-use base_db::CrateId;
+use base_db::Crate;
 use chalk_solve::rust_ir::AdtKind;
 use either::Either;
 use hir_def::{
+    AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup,
     lang_item::LangItem,
     resolver::{HasResolver, ValueNs},
-    AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup,
 };
 use intern::sym;
 use itertools::Itertools;
 use rustc_hash::FxHashSet;
 use rustc_pattern_analysis::constructor::Constructor;
 use syntax::{
-    ast::{self, UnaryOp},
     AstNode,
+    ast::{self, UnaryOp},
 };
 use tracing::debug;
 use triomphe::Arc;
 use typed_arena::Arena;
 
 use crate::{
+    Adjust, InferenceResult, Interner, Ty, TyExt, TyKind,
     db::HirDatabase,
     diagnostics::match_check::{
         self,
         pat_analysis::{self, DeconstructedPat, MatchCheckCtx, WitnessPat},
     },
     display::{DisplayTarget, HirDisplay},
-    Adjust, InferenceResult, Interner, Ty, TyExt, TyKind,
 };
 
 pub(crate) use hir_def::{
+    LocalFieldId, VariantId,
     expr_store::Body,
     hir::{Expr, ExprId, MatchArm, Pat, PatId, Statement},
-    LocalFieldId, VariantId,
 };
 
 pub enum BodyValidationDiagnostic {
@@ -164,9 +164,8 @@ impl ExprValidator {
                 None => return,
             };
 
-            let checker = filter_map_next_checker.get_or_insert_with(|| {
-                FilterMapNextChecker::new(&self.owner.resolver(db.upcast()), db)
-            });
+            let checker = filter_map_next_checker
+                .get_or_insert_with(|| FilterMapNextChecker::new(&self.owner.resolver(db), db));
 
             if checker.check(call_id, receiver, &callee).is_some() {
                 self.diagnostics.push(BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap {
@@ -191,7 +190,7 @@ impl ExprValidator {
             return;
         }
 
-        let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db);
+        let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
 
         let pattern_arena = Arena::new();
         let mut m_arms = Vec::with_capacity(arms.len());
@@ -264,7 +263,7 @@ impl ExprValidator {
                     scrut_ty,
                     witnesses,
                     m_arms.is_empty(),
-                    self.owner.krate(db.upcast()),
+                    self.owner.krate(db),
                 ),
             });
         }
@@ -288,17 +287,16 @@ impl ExprValidator {
         match &self.body[scrutinee_expr] {
             Expr::UnaryOp { op: UnaryOp::Deref, .. } => false,
             Expr::Path(path) => {
-                let value_or_partial =
-                    self.owner.resolver(db.upcast()).resolve_path_in_value_ns_fully(
-                        db.upcast(),
-                        path,
-                        self.body.expr_path_hygiene(scrutinee_expr),
-                    );
+                let value_or_partial = self.owner.resolver(db).resolve_path_in_value_ns_fully(
+                    db,
+                    path,
+                    self.body.expr_path_hygiene(scrutinee_expr),
+                );
                 value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_)))
             }
             Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) {
                 TyKind::Adt(adt, ..)
-                    if db.adt_datum(self.owner.krate(db.upcast()), *adt).kind == AdtKind::Union =>
+                    if db.adt_datum(self.owner.krate(db), *adt).kind == AdtKind::Union =>
                 {
                     false
                 }
@@ -319,7 +317,7 @@ impl ExprValidator {
             return;
         };
         let pattern_arena = Arena::new();
-        let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db);
+        let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
         for stmt in &**statements {
             let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else {
                 continue;
@@ -359,7 +357,7 @@ impl ExprValidator {
                         ty,
                         witnesses,
                         false,
-                        self.owner.krate(db.upcast()),
+                        self.owner.krate(db),
                     ),
                 });
             }
@@ -434,11 +432,11 @@ impl ExprValidator {
                     let last_then_expr_ty = &self.infer[last_then_expr];
                     if last_then_expr_ty.is_never() {
                         // Only look at sources if the then branch diverges and we have an else branch.
-                        let (_, source_map) = db.body_with_source_map(self.owner);
+                        let source_map = db.body_with_source_map(self.owner).1;
                         let Ok(source_ptr) = source_map.expr_syntax(id) else {
                             return;
                         };
-                        let root = source_ptr.file_syntax(db.upcast());
+                        let root = source_ptr.file_syntax(db);
                         let either::Left(ast::Expr::IfExpr(if_expr)) =
                             source_ptr.value.to_node(&root)
                         else {
@@ -490,13 +488,11 @@ impl FilterMapNextChecker {
         {
             Some(next_function_id) => (
                 Some(next_function_id),
-                match next_function_id.lookup(db.upcast()).container {
+                match next_function_id.lookup(db).container {
                     ItemContainerId::TraitId(iterator_trait_id) => {
-                        let iterator_trait_items = &db.trait_data(iterator_trait_id).items;
+                        let iterator_trait_items = &db.trait_items(iterator_trait_id).items;
                         iterator_trait_items.iter().find_map(|(name, it)| match it {
-                            &AssocItemId::FunctionId(id) if *name == sym::filter_map.clone() => {
-                                Some(id)
-                            }
+                            &AssocItemId::FunctionId(id) if *name == sym::filter_map => Some(id),
                             _ => None,
                         })
                     }
@@ -558,7 +554,7 @@ pub fn record_literal_missing_fields(
         return None;
     }
 
-    let variant_data = variant_def.variant_data(db.upcast());
+    let variant_data = variant_def.variant_data(db);
 
     let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
     let missed_fields: Vec<LocalFieldId> = variant_data
@@ -588,7 +584,7 @@ pub fn record_pattern_missing_fields(
         return None;
     }
 
-    let variant_data = variant_def.variant_data(db.upcast());
+    let variant_data = variant_def.variant_data(db);
 
     let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
     let missed_fields: Vec<LocalFieldId> = variant_data
@@ -630,7 +626,7 @@ fn missing_match_arms<'p>(
     scrut_ty: &Ty,
     witnesses: Vec<WitnessPat<'p>>,
     arms_is_empty: bool,
-    krate: CrateId,
+    krate: Crate,
 ) -> String {
     struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>, DisplayTarget);
     impl fmt::Display for DisplayWitness<'_, '_> {
@@ -642,7 +638,7 @@ fn missing_match_arms<'p>(
     }
 
     let non_empty_enum = match scrut_ty.as_adt() {
-        Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(),
+        Some((AdtId::EnumId(e), _)) => !cx.db.enum_variants(e).variants.is_empty(),
         _ => false,
     };
     let display_target = DisplayTarget::from_crate(cx.db, krate);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
index b0f9fc53e29ee..7df22a45cb4ef 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
@@ -11,19 +11,21 @@ pub(crate) mod pat_analysis;
 
 use chalk_ir::Mutability;
 use hir_def::{
-    data::adt::VariantData, expr_store::Body, hir::PatId, AdtId, EnumVariantId, LocalFieldId,
-    VariantId,
+    AdtId, EnumVariantId, LocalFieldId, Lookup, VariantId,
+    expr_store::{Body, path::Path},
+    hir::PatId,
+    item_tree::FieldsShape,
 };
 use hir_expand::name::Name;
 use span::Edition;
 use stdx::{always, never};
 
 use crate::{
+    InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
     db::HirDatabase,
     display::{HirDisplay, HirDisplayError, HirFormatter},
     infer::BindingMode,
     lang_items::is_box,
-    InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
 };
 
 use self::pat_util::EnumerateAndAdjustIterator;
@@ -155,7 +157,7 @@ impl<'a> PatCtxt<'a> {
                     (BindingMode::Ref(_), _) => {
                         never!(
                             "`ref {}` has wrong type {:?}",
-                            name.display(self.db.upcast(), Edition::LATEST),
+                            name.display(self.db, Edition::LATEST),
                             ty
                         );
                         self.errors.push(PatternError::UnexpectedType);
@@ -167,13 +169,13 @@ impl<'a> PatCtxt<'a> {
             }
 
             hir_def::hir::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
-                let expected_len = variant.unwrap().variant_data(self.db.upcast()).fields().len();
+                let expected_len = variant.unwrap().variant_data(self.db).fields().len();
                 let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis);
                 self.lower_variant_or_leaf(pat, ty, subpatterns)
             }
 
             hir_def::hir::Pat::Record { ref args, .. } if variant.is_some() => {
-                let variant_data = variant.unwrap().variant_data(self.db.upcast());
+                let variant_data = variant.unwrap().variant_data(self.db);
                 let subpatterns = args
                     .iter()
                     .map(|field| {
@@ -242,7 +244,7 @@ impl<'a> PatCtxt<'a> {
         ty: &Ty,
         subpatterns: Vec<FieldPat>,
     ) -> PatKind {
-        let kind = match self.infer.variant_resolution_for_pat(pat) {
+        match self.infer.variant_resolution_for_pat(pat) {
             Some(variant_id) => {
                 if let VariantId::EnumVariantId(enum_variant) = variant_id {
                     let substs = match ty.kind(Interner) {
@@ -266,11 +268,10 @@ impl<'a> PatCtxt<'a> {
                 self.errors.push(PatternError::UnresolvedVariant);
                 PatKind::Wild
             }
-        };
-        kind
+        }
     }
 
-    fn lower_path(&mut self, pat: PatId, _path: &hir_def::path::Path) -> Pat {
+    fn lower_path(&mut self, pat: PatId, _path: &Path) -> Pat {
         let ty = &self.infer[pat];
 
         let pat_from_kind = |kind| Pat { ty: ty.clone(), kind: Box::new(kind) };
@@ -303,7 +304,7 @@ impl HirDisplay for Pat {
             PatKind::Wild => write!(f, "_"),
             PatKind::Never => write!(f, "!"),
             PatKind::Binding { name, subpattern } => {
-                write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
+                write!(f, "{}", name.display(f.db, f.edition()))?;
                 if let Some(subpattern) = subpattern {
                     write!(f, " @ ")?;
                     subpattern.hir_fmt(f)?;
@@ -323,26 +324,29 @@ impl HirDisplay for Pat {
                 if let Some(variant) = variant {
                     match variant {
                         VariantId::EnumVariantId(v) => {
+                            let loc = v.lookup(f.db);
                             write!(
                                 f,
                                 "{}",
-                                f.db.enum_variant_data(v).name.display(f.db.upcast(), f.edition())
+                                f.db.enum_variants(loc.parent).variants[loc.index as usize]
+                                    .1
+                                    .display(f.db, f.edition())
                             )?;
                         }
                         VariantId::StructId(s) => write!(
                             f,
                             "{}",
-                            f.db.struct_data(s).name.display(f.db.upcast(), f.edition())
+                            f.db.struct_signature(s).name.display(f.db, f.edition())
                         )?,
                         VariantId::UnionId(u) => write!(
                             f,
                             "{}",
-                            f.db.union_data(u).name.display(f.db.upcast(), f.edition())
+                            f.db.union_signature(u).name.display(f.db, f.edition())
                         )?,
                     };
 
-                    let variant_data = variant.variant_data(f.db.upcast());
-                    if let VariantData::Record { fields: rec_fields, .. } = &*variant_data {
+                    let variant_data = variant.variant_data(f.db);
+                    if variant_data.shape == FieldsShape::Record {
                         write!(f, " {{ ")?;
 
                         let mut printed = 0;
@@ -351,20 +355,20 @@ impl HirDisplay for Pat {
                             .filter(|p| !matches!(*p.pattern.kind, PatKind::Wild))
                             .map(|p| {
                                 printed += 1;
-                                WriteWith(move |f| {
+                                WriteWith(|f| {
                                     write!(
                                         f,
                                         "{}: ",
-                                        rec_fields[p.field]
+                                        variant_data.fields()[p.field]
                                             .name
-                                            .display(f.db.upcast(), f.edition())
+                                            .display(f.db, f.edition())
                                     )?;
                                     p.pattern.hir_fmt(f)
                                 })
                             });
                         f.write_joined(subpats, ", ")?;
 
-                        if printed < rec_fields.len() {
+                        if printed < variant_data.fields().len() {
                             write!(f, "{}..", if printed > 0 { ", " } else { "" })?;
                         }
 
@@ -372,8 +376,8 @@ impl HirDisplay for Pat {
                     }
                 }
 
-                let num_fields = variant
-                    .map_or(subpatterns.len(), |v| v.variant_data(f.db.upcast()).fields().len());
+                let num_fields =
+                    variant.map_or(subpatterns.len(), |v| v.variant_data(f.db).fields().len());
                 if num_fields != 0 || variant.is_none() {
                     write!(f, "(")?;
                     let subpats = (0..num_fields).map(|i| {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index 91eb59fb3140f..6323d8b71b720 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -6,21 +6,21 @@ use std::fmt;
 use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
 use intern::sym;
 use rustc_pattern_analysis::{
-    constructor::{Constructor, ConstructorSet, VariantVisibility},
-    usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport},
     Captures, IndexVec, PatCx, PrivateUninhabitedField,
+    constructor::{Constructor, ConstructorSet, VariantVisibility},
+    usefulness::{PlaceValidity, UsefulnessReport, compute_match_usefulness},
 };
-use smallvec::{smallvec, SmallVec};
+use smallvec::{SmallVec, smallvec};
 use stdx::never;
 
 use crate::{
+    AdtId, Interner, Scalar, Ty, TyExt, TyKind,
     db::HirDatabase,
     infer::normalize,
     inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
-    AdtId, Interner, Scalar, Ty, TyExt, TyKind,
 };
 
-use super::{is_box, FieldPat, Pat, PatKind};
+use super::{FieldPat, Pat, PatKind, is_box};
 
 use Constructor::*;
 
@@ -44,12 +44,12 @@ impl EnumVariantContiguousIndex {
     fn from_enum_variant_id(db: &dyn HirDatabase, target_evid: EnumVariantId) -> Self {
         // Find the index of this variant in the list of variants.
         use hir_def::Lookup;
-        let i = target_evid.lookup(db.upcast()).index as usize;
+        let i = target_evid.lookup(db).index as usize;
         EnumVariantContiguousIndex(i)
     }
 
     fn to_enum_variant_id(self, db: &dyn HirDatabase, eid: EnumId) -> EnumVariantId {
-        db.enum_data(eid).variants[self.0].0
+        db.enum_variants(eid).variants[self.0].0
     }
 }
 
@@ -105,8 +105,8 @@ impl<'db> MatchCheckCtx<'db> {
 
     /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
     fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
-        let is_local = adt.krate(self.db.upcast()) == self.module.krate();
-        !is_local && self.db.attrs(adt.into()).by_key(&sym::non_exhaustive).exists()
+        let is_local = adt.krate(self.db) == self.module.krate();
+        !is_local && self.db.attrs(adt.into()).by_key(sym::non_exhaustive).exists()
     }
 
     fn variant_id_for_adt(
@@ -139,7 +139,7 @@ impl<'db> MatchCheckCtx<'db> {
         let (_, substs) = ty.as_adt().unwrap();
 
         let field_tys = self.db.field_types(variant);
-        let fields_len = variant.variant_data(self.db.upcast()).fields().len() as u32;
+        let fields_len = variant.variant_data(self.db).fields().len() as u32;
 
         (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| {
             let ty = field_tys[fid].clone().substitute(Interner, substs);
@@ -222,7 +222,7 @@ impl<'db> MatchCheckCtx<'db> {
                             }
                         };
                         let variant = Self::variant_id_for_adt(self.db, &ctor, adt).unwrap();
-                        arity = variant.variant_data(self.db.upcast()).fields().len();
+                        arity = variant.variant_data(self.db).fields().len();
                     }
                     _ => {
                         never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
@@ -341,7 +341,7 @@ impl PatCx for MatchCheckCtx<'_> {
                         1
                     } else {
                         let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
-                        variant.variant_data(self.db.upcast()).fields().len()
+                        variant.variant_data(self.db).fields().len()
                     }
                 }
                 _ => {
@@ -389,8 +389,7 @@ impl PatCx for MatchCheckCtx<'_> {
                             .map(move |(fid, ty)| {
                                 let is_visible = || {
                                     matches!(adt, hir_def::AdtId::EnumId(..))
-                                        || visibilities[fid]
-                                            .is_visible_from(self.db.upcast(), self.module)
+                                        || visibilities[fid].is_visible_from(self.db, self.module)
                                 };
                                 let is_uninhabited = self.is_uninhabited(&ty);
                                 let private_uninhabited = is_uninhabited && !is_visible();
@@ -449,7 +448,7 @@ impl PatCx for MatchCheckCtx<'_> {
             TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
             TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
             &TyKind::Adt(AdtId(adt @ hir_def::AdtId::EnumId(enum_id)), ref subst) => {
-                let enum_data = cx.db.enum_data(enum_id);
+                let enum_data = cx.db.enum_variants(enum_id);
                 let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive(adt);
 
                 if enum_data.variants.is_empty() && !is_declared_nonexhaustive {
@@ -493,13 +492,13 @@ impl PatCx for MatchCheckCtx<'_> {
         // if let Some(variant) = variant {
         //     match variant {
         //         VariantId::EnumVariantId(v) => {
-        //             write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?;
+        //             write!(f, "{}", db.enum_variant_data(v).name.display(db))?;
         //         }
         //         VariantId::StructId(s) => {
-        //             write!(f, "{}", db.struct_data(s).name.display(db.upcast()))?
+        //             write!(f, "{}", db.struct_data(s).name.display(db))?
         //         }
         //         VariantId::UnionId(u) => {
-        //             write!(f, "{}", db.union_data(u).name.display(db.upcast()))?
+        //             write!(f, "{}", db.union_data(u).name.display(db))?
         //         }
         //     }
         // }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
index d2b908839c42e..73b99db726841 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -5,18 +5,18 @@ use std::mem;
 
 use either::Either;
 use hir_def::{
-    expr_store::Body,
+    AdtId, DefWithBodyId, FieldId, FunctionId, VariantId,
+    expr_store::{Body, path::Path},
     hir::{Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp},
-    path::Path,
     resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
+    signatures::StaticFlags,
     type_ref::Rawness,
-    AdtId, DefWithBodyId, FieldId, FunctionId, VariantId,
 };
 use span::Edition;
 
 use crate::{
-    db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TargetFeatures, TyExt,
-    TyKind,
+    InferenceResult, Interner, TargetFeatures, TyExt, TyKind, db::HirDatabase,
+    utils::is_fn_unsafe_to_call,
 };
 
 #[derive(Debug, Default)]
@@ -31,11 +31,10 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> MissingUnsafe
     let _p = tracing::info_span!("missing_unsafe").entered();
 
     let is_unsafe = match def {
-        DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe(),
-        DefWithBodyId::StaticId(_)
-        | DefWithBodyId::ConstId(_)
-        | DefWithBodyId::VariantId(_)
-        | DefWithBodyId::InTypeConstId(_) => false,
+        DefWithBodyId::FunctionId(it) => db.function_signature(it).is_unsafe(),
+        DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) | DefWithBodyId::VariantId(_) => {
+            false
+        }
     };
 
     let mut res = MissingUnsafeResult { fn_is_unsafe: is_unsafe, ..MissingUnsafeResult::default() };
@@ -128,7 +127,7 @@ pub fn unsafe_operations(
         }
     };
     let mut visitor = UnsafeVisitor::new(db, infer, body, def, &mut visitor_callback);
-    _ = visitor.resolver.update_to_inner_scope(db.upcast(), def, current);
+    _ = visitor.resolver.update_to_inner_scope(db, def, current);
     visitor.walk_expr(current);
 }
 
@@ -155,12 +154,12 @@ impl<'a> UnsafeVisitor<'a> {
         def: DefWithBodyId,
         unsafe_expr_cb: &'a mut dyn FnMut(UnsafeDiagnostic),
     ) -> Self {
-        let resolver = def.resolver(db.upcast());
+        let resolver = def.resolver(db);
         let def_target_features = match def {
             DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())),
             _ => TargetFeatures::default(),
         };
-        let edition = db.crate_graph()[resolver.module().krate()].edition;
+        let edition = resolver.module().krate().data(db).edition;
         Self {
             db,
             infer,
@@ -201,7 +200,7 @@ impl<'a> UnsafeVisitor<'a> {
     }
 
     fn walk_pats_top(&mut self, pats: impl Iterator<Item = PatId>, parent_expr: ExprId) {
-        let guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.def, parent_expr);
+        let guard = self.resolver.update_to_inner_scope(self.db, self.def, parent_expr);
         pats.for_each(|pat| self.walk_pat(pat));
         self.resolver.reset_to_guard(guard);
     }
@@ -269,8 +268,7 @@ impl<'a> UnsafeVisitor<'a> {
                 }
             }
             Expr::Path(path) => {
-                let guard =
-                    self.resolver.update_to_inner_scope(self.db.upcast(), self.def, current);
+                let guard = self.resolver.update_to_inner_scope(self.db, self.def, current);
                 self.mark_unsafe_path(current.into(), path);
                 self.resolver.reset_to_guard(guard);
             }
@@ -350,6 +348,7 @@ impl<'a> UnsafeVisitor<'a> {
             Expr::Closure { args, .. } => {
                 self.walk_pats_top(args.iter().copied(), current);
             }
+            Expr::Const(e) => self.walk_expr(*e),
             _ => {}
         }
 
@@ -358,13 +357,14 @@ impl<'a> UnsafeVisitor<'a> {
 
     fn mark_unsafe_path(&mut self, node: ExprOrPatId, path: &Path) {
         let hygiene = self.body.expr_or_pat_path_hygiene(node);
-        let value_or_partial =
-            self.resolver.resolve_path_in_value_ns(self.db.upcast(), path, hygiene);
+        let value_or_partial = self.resolver.resolve_path_in_value_ns(self.db, path, hygiene);
         if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
-            let static_data = self.db.static_data(id);
-            if static_data.mutable {
+            let static_data = self.db.static_signature(id);
+            if static_data.flags.contains(StaticFlags::MUTABLE) {
                 self.on_unsafe_op(node, UnsafetyReason::MutableStatic);
-            } else if static_data.is_extern && !static_data.has_safe_kw {
+            } else if static_data.flags.contains(StaticFlags::EXTERN)
+                && !static_data.flags.contains(StaticFlags::EXPLICIT_SAFE)
+            {
                 self.on_unsafe_op(node, UnsafetyReason::ExternStatic);
             }
         }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index 95ce36390d33d..f62e4bb4f806c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -7,32 +7,34 @@ use std::{
     mem,
 };
 
-use base_db::CrateId;
+use base_db::Crate;
 use chalk_ir::{BoundVar, Safety, TyKind};
 use either::Either;
 use hir_def::{
-    data::adt::VariantData,
+    GenericDefId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId,
+    ModuleId, TraitId,
     db::DefDatabase,
+    expr_store::{ExpressionStore, path::Path},
     find_path::{self, PrefixKind},
-    generics::{TypeOrConstParamData, TypeParamProvenance},
+    hir::generics::{TypeOrConstParamData, TypeParamProvenance, WherePredicate},
     item_scope::ItemInNs,
+    item_tree::FieldsShape,
     lang_item::{LangItem, LangItemTarget},
     nameres::DefMap,
-    path::{Path, PathKind},
+    signatures::VariantFields,
     type_ref::{
-        TraitBoundModifier, TypeBound, TypeRef, TypeRefId, TypesMap, TypesSourceMap, UseArgRef,
+        ConstRef, LifetimeRef, LifetimeRefId, TraitBoundModifier, TypeBound, TypeRef, TypeRefId,
+        UseArgRef,
     },
     visibility::Visibility,
-    GenericDefId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId,
-    ModuleId, TraitId,
 };
-use hir_expand::name::Name;
-use intern::{sym, Internable, Interned};
+use hir_expand::{mod_path::PathKind, name::Name};
+use intern::{Internable, Interned, sym};
 use itertools::Itertools;
 use la_arena::ArenaMap;
 use rustc_apfloat::{
-    ieee::{Half as f16, Quad as f128},
     Float,
+    ieee::{Half as f16, Quad as f128},
 };
 use rustc_hash::FxHashSet;
 use smallvec::SmallVec;
@@ -41,6 +43,11 @@ use stdx::never;
 use triomphe::Arc;
 
 use crate::{
+    AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const,
+    ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime,
+    LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt,
+    QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty,
+    TyExt, WhereClause,
     consteval::try_const_usize,
     db::{HirDatabase, InternedClosure},
     from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
@@ -51,12 +58,7 @@ use crate::{
     mapping::from_chalk,
     mir::pad16,
     primitive, to_assoc_type_id,
-    utils::{self, detect_variant_from_bytes, ClosureSubst},
-    AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const,
-    ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime,
-    LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt,
-    QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty,
-    TyExt, WhereClause,
+    utils::{self, ClosureSubst, detect_variant_from_bytes},
 };
 
 pub trait HirWrite: fmt::Write {
@@ -339,7 +341,7 @@ pub trait HirDisplay {
 }
 
 impl HirFormatter<'_> {
-    pub fn krate(&self) -> CrateId {
+    pub fn krate(&self) -> Crate {
         self.display_target.krate
     }
 
@@ -408,13 +410,13 @@ impl HirFormatter<'_> {
 
 #[derive(Debug, Clone, Copy)]
 pub struct DisplayTarget {
-    krate: CrateId,
+    krate: Crate,
     pub edition: Edition,
 }
 
 impl DisplayTarget {
-    pub fn from_crate(db: &dyn HirDatabase, krate: CrateId) -> Self {
-        let edition = db.crate_graph()[krate].edition;
+    pub fn from_crate(db: &dyn HirDatabase, krate: Crate) -> Self {
+        let edition = krate.data(db).edition;
         Self { krate, edition }
     }
 }
@@ -529,7 +531,9 @@ where
             Err(HirDisplayError::FmtError) => Err(fmt::Error),
             Err(HirDisplayError::DisplaySourceCodeError(_)) => {
                 // This should never happen
-                panic!("HirDisplay::hir_fmt failed with DisplaySourceCodeError when calling Display::fmt!")
+                panic!(
+                    "HirDisplay::hir_fmt failed with DisplaySourceCodeError when calling Display::fmt!"
+                )
             }
         }
     }
@@ -565,7 +569,7 @@ impl HirDisplay for ProjectionTy {
                 if !f.bounds_formatting_ctx.contains(self) {
                     let db = f.db;
                     let id = from_placeholder_idx(db, *idx);
-                    let generics = generics(db.upcast(), id.parent);
+                    let generics = generics(db, id.parent);
 
                     let substs = generics.placeholder_subst(db);
                     let bounds = db
@@ -612,13 +616,12 @@ impl HirDisplay for ProjectionTy {
         write!(
             f,
             ">::{}",
-            f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id))
+            f.db.type_alias_signature(from_assoc_type_id(self.associated_ty_id))
                 .name
-                .display(f.db.upcast(), f.edition())
+                .display(f.db, f.edition())
         )?;
-        let proj_params_count =
-            self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
-        let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count];
+        let proj_params =
+            &self.substitution.as_slice(Interner)[trait_ref.substitution.len(Interner)..];
         hir_fmt_generics(f, proj_params, None, None)
     }
 }
@@ -651,21 +654,16 @@ impl HirDisplay for Const {
             ConstValue::InferenceVar(..) => write!(f, "#c#"),
             ConstValue::Placeholder(idx) => {
                 let id = from_placeholder_idx(f.db, *idx);
-                let generics = generics(f.db.upcast(), id.parent);
+                let generics = generics(f.db, id.parent);
                 let param_data = &generics[id.local_id];
-                write!(f, "{}", param_data.name().unwrap().display(f.db.upcast(), f.edition()))?;
+                write!(f, "{}", param_data.name().unwrap().display(f.db, f.edition()))?;
                 Ok(())
             }
             ConstValue::Concrete(c) => match &c.interned {
                 ConstScalar::Bytes(b, m) => render_const_scalar(f, b, m, &data.ty),
                 ConstScalar::UnevaluatedConst(c, parameters) => {
-                    write!(f, "{}", c.name(f.db.upcast()))?;
-                    hir_fmt_generics(
-                        f,
-                        parameters.as_slice(Interner),
-                        c.generic_def(f.db.upcast()),
-                        None,
-                    )?;
+                    write!(f, "{}", c.name(f.db))?;
+                    hir_fmt_generics(f, parameters.as_slice(Interner), c.generic_def(f.db), None)?;
                     Ok(())
                 }
                 ConstScalar::Unknown => f.write_char('_'),
@@ -784,8 +782,8 @@ fn render_const_scalar(
             }
             TyKind::Adt(adt, _) if b.len() == 2 * size_of::<usize>() => match adt.0 {
                 hir_def::AdtId::StructId(s) => {
-                    let data = f.db.struct_data(s);
-                    write!(f, "&{}", data.name.display(f.db.upcast(), f.edition()))?;
+                    let data = f.db.struct_signature(s);
+                    write!(f, "&{}", data.name.display(f.db, f.edition()))?;
                     Ok(())
                 }
                 _ => f.write_str("<unsized-enum-or-union>"),
@@ -842,11 +840,11 @@ fn render_const_scalar(
             };
             match adt.0 {
                 hir_def::AdtId::StructId(s) => {
-                    let data = f.db.struct_data(s);
-                    write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?;
+                    let data = f.db.struct_signature(s);
+                    write!(f, "{}", data.name.display(f.db, f.edition()))?;
                     let field_types = f.db.field_types(s.into());
                     render_variant_after_name(
-                        &data.variant_data,
+                        &f.db.variant_fields(s.into()),
                         f,
                         &field_types,
                         f.db.trait_environment(adt.0.into()),
@@ -857,7 +855,7 @@ fn render_const_scalar(
                     )
                 }
                 hir_def::AdtId::UnionId(u) => {
-                    write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast(), f.edition()))
+                    write!(f, "{}", f.db.union_signature(u).name.display(f.db, f.edition()))
                 }
                 hir_def::AdtId::EnumId(e) => {
                     let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else {
@@ -868,11 +866,17 @@ fn render_const_scalar(
                     else {
                         return f.write_str("<failed-to-detect-variant>");
                     };
-                    let data = f.db.enum_variant_data(var_id);
-                    write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?;
+                    let loc = var_id.lookup(f.db);
+                    write!(
+                        f,
+                        "{}",
+                        f.db.enum_variants(loc.parent).variants[loc.index as usize]
+                            .1
+                            .display(f.db, f.edition())
+                    )?;
                     let field_types = f.db.field_types(var_id.into());
                     render_variant_after_name(
-                        &data.variant_data,
+                        &f.db.variant_fields(var_id.into()),
                         f,
                         &field_types,
                         f.db.trait_environment(adt.0.into()),
@@ -930,7 +934,7 @@ fn render_const_scalar(
 }
 
 fn render_variant_after_name(
-    data: &VariantData,
+    data: &VariantFields,
     f: &mut HirFormatter<'_>,
     field_types: &ArenaMap<LocalFieldId, Binders<Ty>>,
     trait_env: Arc<TraitEnvironment>,
@@ -939,8 +943,8 @@ fn render_variant_after_name(
     b: &[u8],
     memory_map: &MemoryMap,
 ) -> Result<(), HirDisplayError> {
-    match data {
-        VariantData::Record { fields, .. } | VariantData::Tuple { fields, .. } => {
+    match data.shape {
+        FieldsShape::Record | FieldsShape::Tuple => {
             let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| {
                 let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize();
                 let ty = field_types[id].clone().substitute(Interner, subst);
@@ -950,15 +954,15 @@ fn render_variant_after_name(
                 let size = layout.size.bytes_usize();
                 render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)
             };
-            let mut it = fields.iter();
-            if matches!(data, VariantData::Record { .. }) {
+            let mut it = data.fields().iter();
+            if matches!(data.shape, FieldsShape::Record) {
                 write!(f, " {{")?;
                 if let Some((id, data)) = it.next() {
-                    write!(f, " {}: ", data.name.display(f.db.upcast(), f.edition()))?;
+                    write!(f, " {}: ", data.name.display(f.db, f.edition()))?;
                     render_field(f, id)?;
                 }
                 for (id, data) in it {
-                    write!(f, ", {}: ", data.name.display(f.db.upcast(), f.edition()))?;
+                    write!(f, ", {}: ", data.name.display(f.db, f.edition()))?;
                     render_field(f, id)?;
                 }
                 write!(f, " }}")?;
@@ -976,7 +980,7 @@ fn render_variant_after_name(
             }
             Ok(())
         }
-        VariantData::Unit => Ok(()),
+        FieldsShape::Unit => Ok(()),
     }
 }
 
@@ -1044,7 +1048,7 @@ impl HirDisplay for Ty {
                     bounds.iter().any(|bound| {
                         if let WhereClause::Implemented(trait_ref) = bound.skip_binders() {
                             let trait_ = trait_ref.hir_trait_id();
-                            fn_traits(db.upcast(), trait_).any(|it| it == trait_)
+                            fn_traits(db, trait_).any(|it| it == trait_)
                         } else {
                             false
                         }
@@ -1072,8 +1076,7 @@ impl HirDisplay for Ty {
 
                             // Don't count Sized but count when it absent
                             // (i.e. when explicit ?Sized bound is set).
-                            let default_sized =
-                                SizedByDefault::Sized { anchor: func.krate(db.upcast()) };
+                            let default_sized = SizedByDefault::Sized { anchor: func.krate(db) };
                             let sized_bounds = bounds
                                 .skip_binders()
                                 .iter()
@@ -1083,7 +1086,7 @@ impl HirDisplay for Ty {
                                         WhereClause::Implemented(trait_ref)
                                             if default_sized.is_sized_trait(
                                                 trait_ref.hir_trait_id(),
-                                                db.upcast(),
+                                                db,
                                             ),
                                     )
                                 })
@@ -1151,25 +1154,28 @@ impl HirDisplay for Ty {
                 write!(f, "fn ")?;
                 f.start_location_link(def.into());
                 match def {
-                    CallableDefId::FunctionId(ff) => write!(
-                        f,
-                        "{}",
-                        db.function_data(ff).name.display(f.db.upcast(), f.edition())
-                    )?,
+                    CallableDefId::FunctionId(ff) => {
+                        write!(f, "{}", db.function_signature(ff).name.display(f.db, f.edition()))?
+                    }
                     CallableDefId::StructId(s) => {
-                        write!(f, "{}", db.struct_data(s).name.display(f.db.upcast(), f.edition()))?
+                        write!(f, "{}", db.struct_signature(s).name.display(f.db, f.edition()))?
+                    }
+                    CallableDefId::EnumVariantId(e) => {
+                        let loc = e.lookup(db);
+                        write!(
+                            f,
+                            "{}",
+                            db.enum_variants(loc.parent).variants[loc.index as usize]
+                                .1
+                                .display(db, f.edition())
+                        )?
                     }
-                    CallableDefId::EnumVariantId(e) => write!(
-                        f,
-                        "{}",
-                        db.enum_variant_data(e).name.display(f.db.upcast(), f.edition())
-                    )?,
                 };
                 f.end_location_link();
 
                 if parameters.len(Interner) > 0 {
-                    let generic_def_id = GenericDefId::from_callable(db.upcast(), def);
-                    let generics = generics(db.upcast(), generic_def_id);
+                    let generic_def_id = GenericDefId::from_callable(db, def);
+                    let generics = generics(db, generic_def_id);
                     let (parent_len, self_param, type_, const_, impl_, lifetime) =
                         generics.provenance_split();
                     let parameters = parameters.as_slice(Interner);
@@ -1188,27 +1194,31 @@ impl HirDisplay for Ty {
 
                         // Normally, functions cannot have default parameters, but they can,
                         // for function-like things such as struct names or enum variants.
-                        // The former cannot have defaults but parents, and the later cannot have
-                        // parents but defaults.
-                        // So, if `parent_len` > 0, it have a parent and thus it doesn't have any
-                        // default. Therefore, we shouldn't subtract defaults because those defaults
-                        // are from their parents.
-                        // And if `parent_len` == 0, either parents don't exists or they don't have
-                        // any defaults. Thus, we can - and should - subtract defaults.
-                        let without_impl = if parent_len > 0 {
-                            params_len - parent_len - impl_
+                        // The former cannot have defaults but does have parents,
+                        // but the latter cannot have parents but can have defaults.
+                        //
+                        // However, it's also true that *traits* can have defaults too.
+                        // In this case, there can be no function params.
+                        let parent_end = if parent_len > 0 {
+                            // If `parent_len` > 0, then there cannot be defaults on the function
+                            // and all defaults must come from the parent.
+                            parent_len - defaults
                         } else {
-                            params_len - parent_len - impl_ - defaults
+                            parent_len
                         };
-                        // parent's params (those from enclosing impl or trait, if any).
-                        let (fn_params, parent_params) = parameters.split_at(without_impl + impl_);
+                        let fn_params_no_impl_or_defaults = parameters.len() - parent_end - impl_;
+                        let (parent_params, fn_params) = parameters.split_at(parent_end);
 
                         write!(f, "<")?;
                         hir_fmt_generic_arguments(f, parent_params, None)?;
                         if !parent_params.is_empty() && !fn_params.is_empty() {
                             write!(f, ", ")?;
                         }
-                        hir_fmt_generic_arguments(f, &fn_params[0..without_impl], None)?;
+                        hir_fmt_generic_arguments(
+                            f,
+                            &fn_params[..fn_params_no_impl_or_defaults],
+                            None,
+                        )?;
                         write!(f, ">")?;
                     }
                 }
@@ -1224,17 +1234,17 @@ impl HirDisplay for Ty {
             TyKind::Adt(AdtId(def_id), parameters) => {
                 f.start_location_link((*def_id).into());
                 match f.display_kind {
-                    DisplayKind::Diagnostics { .. } | DisplayKind::Test { .. } => {
+                    DisplayKind::Diagnostics | DisplayKind::Test => {
                         let name = match *def_id {
-                            hir_def::AdtId::StructId(it) => db.struct_data(it).name.clone(),
-                            hir_def::AdtId::UnionId(it) => db.union_data(it).name.clone(),
-                            hir_def::AdtId::EnumId(it) => db.enum_data(it).name.clone(),
+                            hir_def::AdtId::StructId(it) => db.struct_signature(it).name.clone(),
+                            hir_def::AdtId::UnionId(it) => db.union_signature(it).name.clone(),
+                            hir_def::AdtId::EnumId(it) => db.enum_signature(it).name.clone(),
                         };
-                        write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
+                        write!(f, "{}", name.display(f.db, f.edition()))?;
                     }
                     DisplayKind::SourceCode { target_module_id: module_id, allow_opaque: _ } => {
                         if let Some(path) = find_path::find_path(
-                            db.upcast(),
+                            db,
                             ItemInNs::Types((*def_id).into()),
                             module_id,
                             PrefixKind::Plain,
@@ -1247,7 +1257,7 @@ impl HirDisplay for Ty {
                                 allow_unstable: true,
                             },
                         ) {
-                            write!(f, "{}", path.display(f.db.upcast(), f.edition()))?;
+                            write!(f, "{}", path.display(f.db, f.edition()))?;
                         } else {
                             return Err(HirDisplayError::DisplaySourceCodeError(
                                 DisplaySourceCodeError::PathNotFound,
@@ -1263,22 +1273,22 @@ impl HirDisplay for Ty {
             }
             TyKind::AssociatedType(assoc_type_id, parameters) => {
                 let type_alias = from_assoc_type_id(*assoc_type_id);
-                let trait_ = match type_alias.lookup(db.upcast()).container {
+                let trait_ = match type_alias.lookup(db).container {
                     ItemContainerId::TraitId(it) => it,
                     _ => panic!("not an associated type"),
                 };
-                let trait_data = db.trait_data(trait_);
-                let type_alias_data = db.type_alias_data(type_alias);
+                let trait_data = db.trait_signature(trait_);
+                let type_alias_data = db.type_alias_signature(type_alias);
 
                 // Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
                 if f.display_kind.is_test() {
                     f.start_location_link(trait_.into());
-                    write!(f, "{}", trait_data.name.display(f.db.upcast(), f.edition()))?;
+                    write!(f, "{}", trait_data.name.display(f.db, f.edition()))?;
                     f.end_location_link();
                     write!(f, "::")?;
 
                     f.start_location_link(type_alias.into());
-                    write!(f, "{}", type_alias_data.name.display(f.db.upcast(), f.edition()))?;
+                    write!(f, "{}", type_alias_data.name.display(f.db, f.edition()))?;
                     f.end_location_link();
                     // Note that the generic args for the associated type come before those for the
                     // trait (including the self type).
@@ -1294,9 +1304,9 @@ impl HirDisplay for Ty {
             }
             TyKind::Foreign(type_alias) => {
                 let alias = from_foreign_def_id(*type_alias);
-                let type_alias = db.type_alias_data(alias);
+                let type_alias = db.type_alias_signature(alias);
                 f.start_location_link(alias.into());
-                write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?;
+                write!(f, "{}", type_alias.name.display(f.db, f.edition()))?;
                 f.end_location_link();
             }
             TyKind::OpaqueType(opaque_ty_id, parameters) => {
@@ -1313,7 +1323,7 @@ impl HirDisplay for Ty {
                         let data =
                             (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
                         let bounds = data.substitute(Interner, &parameters);
-                        let krate = func.krate(db.upcast());
+                        let krate = func.krate(db);
                         write_bounds_like_dyn_trait_with_prefix(
                             f,
                             "impl",
@@ -1328,7 +1338,7 @@ impl HirDisplay for Ty {
                             db.type_alias_impl_traits(alias).expect("impl trait id without data");
                         let data = (*datas).as_ref().map(|it| it.impl_traits[idx].bounds.clone());
                         let bounds = data.substitute(Interner, &parameters);
-                        let krate = alias.krate(db.upcast());
+                        let krate = alias.krate(db);
                         write_bounds_like_dyn_trait_with_prefix(
                             f,
                             "impl",
@@ -1339,12 +1349,11 @@ impl HirDisplay for Ty {
                     }
                     ImplTraitId::AsyncBlockTypeImplTrait(body, ..) => {
                         let future_trait = db
-                            .lang_item(body.module(db.upcast()).krate(), LangItem::Future)
+                            .lang_item(body.module(db).krate(), LangItem::Future)
                             .and_then(LangItemTarget::as_trait);
                         let output = future_trait.and_then(|t| {
-                            db.trait_data(t).associated_type_by_name(&Name::new_symbol_root(
-                                sym::Output.clone(),
-                            ))
+                            db.trait_items(t)
+                                .associated_type_by_name(&Name::new_symbol_root(sym::Output))
                         });
                         write!(f, "impl ")?;
                         if let Some(t) = future_trait {
@@ -1381,7 +1390,7 @@ impl HirDisplay for Ty {
                 match f.closure_style {
                     ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"),
                     ClosureStyle::ClosureWithId => {
-                        return write!(f, "{{closure#{:?}}}", id.0.as_u32())
+                        return write!(f, "{{closure#{:?}}}", id.0.as_u32());
                     }
                     ClosureStyle::ClosureWithSubst => {
                         write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
@@ -1420,7 +1429,7 @@ impl HirDisplay for Ty {
             }
             TyKind::Placeholder(idx) => {
                 let id = from_placeholder_idx(db, *idx);
-                let generics = generics(db.upcast(), id.parent);
+                let generics = generics(db, id.parent);
                 let param_data = &generics[id.local_id];
                 match param_data {
                     TypeOrConstParamData::TypeParamData(p) => match p.provenance {
@@ -1431,7 +1440,7 @@ impl HirDisplay for Ty {
                                 p.name
                                     .clone()
                                     .unwrap_or_else(Name::missing)
-                                    .display(f.db.upcast(), f.edition())
+                                    .display(f.db, f.edition())
                             )?
                         }
                         TypeParamProvenance::ArgumentImplTrait => {
@@ -1453,7 +1462,7 @@ impl HirDisplay for Ty {
                                     WhereClause::LifetimeOutlives(_) => false,
                                 })
                                 .collect::<Vec<_>>();
-                            let krate = id.parent.module(db.upcast()).krate();
+                            let krate = id.parent.module(db).krate();
                             write_bounds_like_dyn_trait_with_prefix(
                                 f,
                                 "impl",
@@ -1464,7 +1473,7 @@ impl HirDisplay for Ty {
                         }
                     },
                     TypeOrConstParamData::ConstParamData(p) => {
-                        write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?;
+                        write!(f, "{}", p.name.display(f.db, f.edition()))?;
                     }
                 }
             }
@@ -1503,7 +1512,7 @@ impl HirDisplay for Ty {
                         let data =
                             (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
                         let bounds = data.substitute(Interner, &opaque_ty.substitution);
-                        let krate = func.krate(db.upcast());
+                        let krate = func.krate(db);
                         write_bounds_like_dyn_trait_with_prefix(
                             f,
                             "impl",
@@ -1518,7 +1527,7 @@ impl HirDisplay for Ty {
                         let data =
                             (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
                         let bounds = data.substitute(Interner, &opaque_ty.substitution);
-                        let krate = alias.krate(db.upcast());
+                        let krate = alias.krate(db);
                         write_bounds_like_dyn_trait_with_prefix(
                             f,
                             "impl",
@@ -1630,7 +1639,7 @@ fn generic_args_sans_defaults<'ga>(
                         Some(default_parameter) => {
                             // !is_err(default_parameter.skip_binders())
                             // &&
-                            arg != &default_parameter.clone().substitute(Interner, &parameters)
+                            arg != &default_parameter.clone().substitute(Interner, &parameters[..i])
                         }
                     }
                 };
@@ -1711,7 +1720,7 @@ fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator<Item = Trai
 #[derive(Clone, Copy, PartialEq, Eq)]
 pub enum SizedByDefault {
     NotSized,
-    Sized { anchor: CrateId },
+    Sized { anchor: Crate },
 }
 
 impl SizedByDefault {
@@ -1766,7 +1775,7 @@ fn write_bounds_like_dyn_trait(
         match p.skip_binders() {
             WhereClause::Implemented(trait_ref) => {
                 let trait_ = trait_ref.hir_trait_id();
-                if default_sized.is_sized_trait(trait_, f.db.upcast()) {
+                if default_sized.is_sized_trait(trait_, f.db) {
                     is_sized = true;
                     if matches!(default_sized, SizedByDefault::Sized { .. }) {
                         // Don't print +Sized, but rather +?Sized if absent.
@@ -1774,7 +1783,7 @@ fn write_bounds_like_dyn_trait(
                     }
                 }
                 if !is_fn_trait {
-                    is_fn_trait = fn_traits(f.db.upcast(), trait_).any(|it| it == trait_);
+                    is_fn_trait = fn_traits(f.db, trait_).any(|it| it == trait_);
                 }
                 if !is_fn_trait && angle_open {
                     write!(f, ">")?;
@@ -1787,7 +1796,7 @@ fn write_bounds_like_dyn_trait(
                 // existential) here, which is the only thing that's
                 // possible in actual Rust, and hence don't print it
                 f.start_location_link(trait_.into());
-                write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?;
+                write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?;
                 f.end_location_link();
                 if is_fn_trait {
                     if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
@@ -1859,17 +1868,18 @@ fn write_bounds_like_dyn_trait(
                 }
                 if let AliasTy::Projection(proj) = alias {
                     let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id);
-                    let type_alias = f.db.type_alias_data(assoc_ty_id);
+                    let type_alias = f.db.type_alias_signature(assoc_ty_id);
                     f.start_location_link(assoc_ty_id.into());
-                    write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?;
+                    write!(f, "{}", type_alias.name.display(f.db, f.edition()))?;
                     f.end_location_link();
 
-                    let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
+                    let proj_arg_count = generics(f.db, assoc_ty_id.into()).len_self();
+                    let parent_len = proj.substitution.len(Interner) - proj_arg_count;
                     if proj_arg_count > 0 {
                         write!(f, "<")?;
                         hir_fmt_generic_arguments(
                             f,
-                            &proj.substitution.as_slice(Interner)[..proj_arg_count],
+                            &proj.substitution.as_slice(Interner)[parent_len..],
                             None,
                         )?;
                         write!(f, ">")?;
@@ -1912,7 +1922,7 @@ impl HirDisplay for TraitRef {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         let trait_ = self.hir_trait_id();
         f.start_location_link(trait_.into());
-        write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?;
+        write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?;
         f.end_location_link();
         let substs = self.substitution.as_slice(Interner);
         hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner))
@@ -1943,7 +1953,7 @@ impl HirDisplay for WhereClause {
                 write!(
                     f,
                     "{}",
-                    f.db.type_alias_data(type_alias).name.display(f.db.upcast(), f.edition()),
+                    f.db.type_alias_signature(type_alias).name.display(f.db, f.edition()),
                 )?;
                 f.end_location_link();
                 write!(f, " = ")?;
@@ -1978,9 +1988,9 @@ impl HirDisplay for LifetimeData {
         match self {
             LifetimeData::Placeholder(idx) => {
                 let id = lt_from_placeholder_idx(f.db, *idx);
-                let generics = generics(f.db.upcast(), id.parent);
+                let generics = generics(f.db, id.parent);
                 let param_data = &generics[id.local_id];
-                write!(f, "{}", param_data.name.display(f.db.upcast(), f.edition()))?;
+                write!(f, "{}", param_data.name.display(f.db, f.edition()))?;
                 Ok(())
             }
             _ if f.display_kind.is_source_code() => write!(f, "'_"),
@@ -2022,14 +2032,14 @@ pub fn write_visibility(
     match vis {
         Visibility::Public => write!(f, "pub "),
         Visibility::Module(vis_id, _) => {
-            let def_map = module_id.def_map(f.db.upcast());
+            let def_map = module_id.def_map(f.db);
             let root_module_id = def_map.module_id(DefMap::ROOT);
             if vis_id == module_id {
                 // pub(self) or omitted
                 Ok(())
             } else if root_module_id == vis_id {
                 write!(f, "pub(crate) ")
-            } else if module_id.containing_module(f.db.upcast()) == Some(vis_id) {
+            } else if module_id.containing_module(f.db) == Some(vis_id) {
                 write!(f, "pub(super) ")
             } else {
                 write!(f, "pub(in ...) ")
@@ -2038,70 +2048,119 @@ pub fn write_visibility(
     }
 }
 
-pub trait HirDisplayWithTypesMap {
+pub trait HirDisplayWithExpressionStore {
     fn hir_fmt(
         &self,
         f: &mut HirFormatter<'_>,
-        types_map: &TypesMap,
+        store: &ExpressionStore,
     ) -> Result<(), HirDisplayError>;
 }
 
-impl<T: ?Sized + HirDisplayWithTypesMap> HirDisplayWithTypesMap for &'_ T {
+impl<T: ?Sized + HirDisplayWithExpressionStore> HirDisplayWithExpressionStore for &'_ T {
     fn hir_fmt(
         &self,
         f: &mut HirFormatter<'_>,
-        types_map: &TypesMap,
+        store: &ExpressionStore,
     ) -> Result<(), HirDisplayError> {
-        T::hir_fmt(&**self, f, types_map)
+        T::hir_fmt(&**self, f, store)
     }
 }
 
-pub fn hir_display_with_types_map<'a, T: HirDisplayWithTypesMap + 'a>(
+pub fn hir_display_with_store<'a, T: HirDisplayWithExpressionStore + 'a>(
     value: T,
-    types_map: &'a TypesMap,
+    store: &'a ExpressionStore,
 ) -> impl HirDisplay + 'a {
-    TypesMapAdapter(value, types_map)
+    ExpressionStoreAdapter(value, store)
 }
 
-struct TypesMapAdapter<'a, T>(T, &'a TypesMap);
+struct ExpressionStoreAdapter<'a, T>(T, &'a ExpressionStore);
 
-impl<'a, T> TypesMapAdapter<'a, T> {
-    fn wrap(types_map: &'a TypesMap) -> impl Fn(T) -> TypesMapAdapter<'a, T> {
-        move |value| TypesMapAdapter(value, types_map)
+impl<'a, T> ExpressionStoreAdapter<'a, T> {
+    fn wrap(store: &'a ExpressionStore) -> impl Fn(T) -> ExpressionStoreAdapter<'a, T> {
+        move |value| ExpressionStoreAdapter(value, store)
     }
 }
 
-impl<T: HirDisplayWithTypesMap> HirDisplay for TypesMapAdapter<'_, T> {
+impl<T: HirDisplayWithExpressionStore> HirDisplay for ExpressionStoreAdapter<'_, T> {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         T::hir_fmt(&self.0, f, self.1)
     }
 }
+impl HirDisplayWithExpressionStore for LifetimeRefId {
+    fn hir_fmt(
+        &self,
+        f: &mut HirFormatter<'_>,
+        store: &ExpressionStore,
+    ) -> Result<(), HirDisplayError> {
+        match &store[*self] {
+            LifetimeRef::Named(name) => write!(f, "{}", name.display(f.db, f.edition())),
+            LifetimeRef::Static => write!(f, "'static"),
+            LifetimeRef::Placeholder => write!(f, "'_"),
+            LifetimeRef::Error => write!(f, "'{{error}}"),
+            &LifetimeRef::Param(lifetime_param_id) => {
+                let generic_params = f.db.generic_params(lifetime_param_id.parent);
+                write!(
+                    f,
+                    "{}",
+                    generic_params[lifetime_param_id.local_id].name.display(f.db, f.edition())
+                )
+            }
+        }
+    }
+}
 
-impl HirDisplayWithTypesMap for TypeRefId {
+impl HirDisplayWithExpressionStore for TypeRefId {
     fn hir_fmt(
         &self,
         f: &mut HirFormatter<'_>,
-        types_map: &TypesMap,
+        store: &ExpressionStore,
     ) -> Result<(), HirDisplayError> {
-        match &types_map[*self] {
+        match &store[*self] {
             TypeRef::Never => write!(f, "!")?,
+            TypeRef::TypeParam(param) => {
+                let generic_params = f.db.generic_params(param.parent());
+                match generic_params[param.local_id()].name() {
+                    Some(name) => write!(f, "{}", name.display(f.db, f.edition()))?,
+                    None => {
+                        write!(f, "impl ")?;
+                        f.write_joined(
+                            generic_params
+                                .where_predicates()
+                                .filter_map(|it| match it {
+                                    WherePredicate::TypeBound { target, bound }
+                                    | WherePredicate::ForLifetime { lifetimes: _, target, bound }
+                                        if matches!(
+                                            store[*target],
+                                            TypeRef::TypeParam(t) if t == *param
+                                        ) =>
+                                    {
+                                        Some(bound)
+                                    }
+                                    _ => None,
+                                })
+                                .map(ExpressionStoreAdapter::wrap(store)),
+                            " + ",
+                        )?;
+                    }
+                }
+            }
             TypeRef::Placeholder => write!(f, "_")?,
             TypeRef::Tuple(elems) => {
                 write!(f, "(")?;
-                f.write_joined(elems.iter().map(TypesMapAdapter::wrap(types_map)), ", ")?;
+                f.write_joined(elems.iter().map(ExpressionStoreAdapter::wrap(store)), ", ")?;
                 if elems.len() == 1 {
                     write!(f, ",")?;
                 }
                 write!(f, ")")?;
             }
-            TypeRef::Path(path) => path.hir_fmt(f, types_map)?,
+            TypeRef::Path(path) => path.hir_fmt(f, store)?,
             TypeRef::RawPtr(inner, mutability) => {
                 let mutability = match mutability {
                     hir_def::type_ref::Mutability::Shared => "*const ",
                     hir_def::type_ref::Mutability::Mut => "*mut ",
                 };
                 write!(f, "{mutability}")?;
-                inner.hir_fmt(f, types_map)?;
+                inner.hir_fmt(f, store)?;
             }
             TypeRef::Reference(ref_) => {
                 let mutability = match ref_.mutability {
@@ -2110,83 +2169,67 @@ impl HirDisplayWithTypesMap for TypeRefId {
                 };
                 write!(f, "&")?;
                 if let Some(lifetime) = &ref_.lifetime {
-                    write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?;
+                    lifetime.hir_fmt(f, store)?;
+                    write!(f, " ")?;
                 }
                 write!(f, "{mutability}")?;
-                ref_.ty.hir_fmt(f, types_map)?;
+                ref_.ty.hir_fmt(f, store)?;
             }
             TypeRef::Array(array) => {
                 write!(f, "[")?;
-                array.ty.hir_fmt(f, types_map)?;
-                write!(f, "; {}]", array.len.display(f.db.upcast(), f.edition()))?;
+                array.ty.hir_fmt(f, store)?;
+                write!(f, "; ")?;
+                array.len.hir_fmt(f, store)?;
+                write!(f, "]")?;
             }
             TypeRef::Slice(inner) => {
                 write!(f, "[")?;
-                inner.hir_fmt(f, types_map)?;
+                inner.hir_fmt(f, store)?;
                 write!(f, "]")?;
             }
             TypeRef::Fn(fn_) => {
-                if fn_.is_unsafe() {
+                if fn_.is_unsafe {
                     write!(f, "unsafe ")?;
                 }
-                if let Some(abi) = fn_.abi() {
+                if let Some(abi) = &fn_.abi {
                     f.write_str("extern \"")?;
                     f.write_str(abi.as_str())?;
                     f.write_str("\" ")?;
                 }
                 write!(f, "fn(")?;
-                if let Some(((_, return_type), function_parameters)) = fn_.params().split_last() {
+                if let Some(((_, return_type), function_parameters)) = fn_.params.split_last() {
                     for index in 0..function_parameters.len() {
                         let (param_name, param_type) = &function_parameters[index];
                         if let Some(name) = param_name {
-                            write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?;
+                            write!(f, "{}: ", name.display(f.db, f.edition()))?;
                         }
 
-                        param_type.hir_fmt(f, types_map)?;
+                        param_type.hir_fmt(f, store)?;
 
                         if index != function_parameters.len() - 1 {
                             write!(f, ", ")?;
                         }
                     }
-                    if fn_.is_varargs() {
-                        write!(f, "{}...", if fn_.params().len() == 1 { "" } else { ", " })?;
+                    if fn_.is_varargs {
+                        write!(f, "{}...", if fn_.params.len() == 1 { "" } else { ", " })?;
                     }
                     write!(f, ")")?;
-                    match &types_map[*return_type] {
+                    match &store[*return_type] {
                         TypeRef::Tuple(tup) if tup.is_empty() => {}
                         _ => {
                             write!(f, " -> ")?;
-                            return_type.hir_fmt(f, types_map)?;
+                            return_type.hir_fmt(f, store)?;
                         }
                     }
                 }
             }
             TypeRef::ImplTrait(bounds) => {
                 write!(f, "impl ")?;
-                f.write_joined(bounds.iter().map(TypesMapAdapter::wrap(types_map)), " + ")?;
+                f.write_joined(bounds.iter().map(ExpressionStoreAdapter::wrap(store)), " + ")?;
             }
             TypeRef::DynTrait(bounds) => {
                 write!(f, "dyn ")?;
-                f.write_joined(bounds.iter().map(TypesMapAdapter::wrap(types_map)), " + ")?;
-            }
-            TypeRef::Macro(macro_call) => {
-                let (mut types_map, mut types_source_map) =
-                    (TypesMap::default(), TypesSourceMap::default());
-                let mut ctx = hir_def::lower::LowerCtx::new(
-                    f.db.upcast(),
-                    macro_call.file_id,
-                    &mut types_map,
-                    &mut types_source_map,
-                );
-                let macro_call = macro_call.to_node(f.db.upcast());
-                match macro_call.path() {
-                    Some(path) => match Path::from_src(&mut ctx, path) {
-                        Some(path) => path.hir_fmt(f, &types_map)?,
-                        None => write!(f, "{{macro}}")?,
-                    },
-                    None => write!(f, "{{macro}}")?,
-                }
-                write!(f, "!(..)")?;
+                f.write_joined(bounds.iter().map(ExpressionStoreAdapter::wrap(store)), " + ")?;
             }
             TypeRef::Error => write!(f, "{{error}}")?,
         }
@@ -2194,11 +2237,24 @@ impl HirDisplayWithTypesMap for TypeRefId {
     }
 }
 
-impl HirDisplayWithTypesMap for TypeBound {
+impl HirDisplayWithExpressionStore for ConstRef {
+    fn hir_fmt(
+        &self,
+        f: &mut HirFormatter<'_>,
+        _store: &ExpressionStore,
+    ) -> Result<(), HirDisplayError> {
+        // FIXME
+        write!(f, "{{const}}")?;
+
+        Ok(())
+    }
+}
+
+impl HirDisplayWithExpressionStore for TypeBound {
     fn hir_fmt(
         &self,
         f: &mut HirFormatter<'_>,
-        types_map: &TypesMap,
+        store: &ExpressionStore,
     ) -> Result<(), HirDisplayError> {
         match self {
             &TypeBound::Path(path, modifier) => {
@@ -2206,48 +2262,47 @@ impl HirDisplayWithTypesMap for TypeBound {
                     TraitBoundModifier::None => (),
                     TraitBoundModifier::Maybe => write!(f, "?")?,
                 }
-                types_map[path].hir_fmt(f, types_map)
-            }
-            TypeBound::Lifetime(lifetime) => {
-                write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))
+                store[path].hir_fmt(f, store)
             }
+            TypeBound::Lifetime(lifetime) => lifetime.hir_fmt(f, store),
             TypeBound::ForLifetime(lifetimes, path) => {
                 let edition = f.edition();
                 write!(
                     f,
                     "for<{}> ",
-                    lifetimes.iter().map(|it| it.display(f.db.upcast(), edition)).format(", ")
+                    lifetimes.iter().map(|it| it.display(f.db, edition)).format(", ")
                 )?;
-                types_map[*path].hir_fmt(f, types_map)
+                store[*path].hir_fmt(f, store)
             }
             TypeBound::Use(args) => {
                 let edition = f.edition();
-                write!(
-                    f,
-                    "use<{}> ",
-                    args.iter()
-                        .map(|it| match it {
-                            UseArgRef::Lifetime(lt) => lt.name.display(f.db.upcast(), edition),
-                            UseArgRef::Name(n) => n.display(f.db.upcast(), edition),
-                        })
-                        .format(", ")
-                )
+                let last = args.len().saturating_sub(1);
+                for (idx, arg) in args.iter().enumerate() {
+                    match arg {
+                        UseArgRef::Lifetime(lt) => lt.hir_fmt(f, store)?,
+                        UseArgRef::Name(n) => write!(f, "{}", n.display(f.db, edition))?,
+                    }
+                    if idx != last {
+                        write!(f, ", ")?;
+                    }
+                }
+                write!(f, "> ")
             }
             TypeBound::Error => write!(f, "{{error}}"),
         }
     }
 }
 
-impl HirDisplayWithTypesMap for Path {
+impl HirDisplayWithExpressionStore for Path {
     fn hir_fmt(
         &self,
         f: &mut HirFormatter<'_>,
-        types_map: &TypesMap,
+        store: &ExpressionStore,
     ) -> Result<(), HirDisplayError> {
         match (self.type_anchor(), self.kind()) {
             (Some(anchor), _) => {
                 write!(f, "<")?;
-                anchor.hir_fmt(f, types_map)?;
+                anchor.hir_fmt(f, store)?;
                 write!(f, ">")?;
             }
             (_, PathKind::Plain) => {}
@@ -2266,12 +2321,12 @@ impl HirDisplayWithTypesMap for Path {
                 // Resolve `$crate` to the crate's display name.
                 // FIXME: should use the dependency name instead if available, but that depends on
                 // the crate invoking `HirDisplay`
-                let crate_graph = f.db.crate_graph();
-                let name = crate_graph[*id]
+                let crate_data = id.extra_data(f.db);
+                let name = crate_data
                     .display_name
                     .as_ref()
-                    .map(|name| name.canonical_name())
-                    .unwrap_or(&sym::dollar_crate);
+                    .map(|name| (*name.canonical_name()).clone())
+                    .unwrap_or(sym::dollar_crate);
                 write!(f, "{name}")?
             }
         }
@@ -2290,7 +2345,7 @@ impl HirDisplayWithTypesMap for Path {
         });
         if let Some(ty) = trait_self_ty {
             write!(f, "<")?;
-            ty.hir_fmt(f, types_map)?;
+            ty.hir_fmt(f, store)?;
             write!(f, " as ")?;
             // Now format the path of the trait...
         }
@@ -2299,81 +2354,89 @@ impl HirDisplayWithTypesMap for Path {
             if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
                 write!(f, "::")?;
             }
-            write!(f, "{}", segment.name.display(f.db.upcast(), f.edition()))?;
+            write!(f, "{}", segment.name.display(f.db, f.edition()))?;
             if let Some(generic_args) = segment.args_and_bindings {
                 // We should be in type context, so format as `Foo<Bar>` instead of `Foo::<Bar>`.
                 // Do we actually format expressions?
-                if generic_args.desugared_from_fn {
-                    // First argument will be a tuple, which already includes the parentheses.
-                    // If the tuple only contains 1 item, write it manually to avoid the trailing `,`.
-                    let tuple = match generic_args.args[0] {
-                        hir_def::path::GenericArg::Type(ty) => match &types_map[ty] {
-                            TypeRef::Tuple(it) => Some(it),
+                match generic_args.parenthesized {
+                    hir_def::expr_store::path::GenericArgsParentheses::ReturnTypeNotation => {
+                        write!(f, "(..)")?;
+                    }
+                    hir_def::expr_store::path::GenericArgsParentheses::ParenSugar => {
+                        // First argument will be a tuple, which already includes the parentheses.
+                        // If the tuple only contains 1 item, write it manually to avoid the trailing `,`.
+                        let tuple = match generic_args.args[0] {
+                            hir_def::expr_store::path::GenericArg::Type(ty) => match &store[ty] {
+                                TypeRef::Tuple(it) => Some(it),
+                                _ => None,
+                            },
                             _ => None,
-                        },
-                        _ => None,
-                    };
-                    if let Some(v) = tuple {
-                        if v.len() == 1 {
-                            write!(f, "(")?;
-                            v[0].hir_fmt(f, types_map)?;
-                            write!(f, ")")?;
-                        } else {
-                            generic_args.args[0].hir_fmt(f, types_map)?;
+                        };
+                        if let Some(v) = tuple {
+                            if v.len() == 1 {
+                                write!(f, "(")?;
+                                v[0].hir_fmt(f, store)?;
+                                write!(f, ")")?;
+                            } else {
+                                generic_args.args[0].hir_fmt(f, store)?;
+                            }
                         }
-                    }
-                    if let Some(ret) = generic_args.bindings[0].type_ref {
-                        if !matches!(&types_map[ret], TypeRef::Tuple(v) if v.is_empty()) {
-                            write!(f, " -> ")?;
-                            ret.hir_fmt(f, types_map)?;
+                        if let Some(ret) = generic_args.bindings[0].type_ref {
+                            if !matches!(&store[ret], TypeRef::Tuple(v) if v.is_empty()) {
+                                write!(f, " -> ")?;
+                                ret.hir_fmt(f, store)?;
+                            }
                         }
                     }
-                    return Ok(());
-                }
-
-                let mut first = true;
-                // Skip the `Self` bound if exists. It's handled outside the loop.
-                for arg in &generic_args.args[generic_args.has_self_type as usize..] {
-                    if first {
-                        first = false;
-                        write!(f, "<")?;
-                    } else {
-                        write!(f, ", ")?;
-                    }
-                    arg.hir_fmt(f, types_map)?;
-                }
-                for binding in generic_args.bindings.iter() {
-                    if first {
-                        first = false;
-                        write!(f, "<")?;
-                    } else {
-                        write!(f, ", ")?;
-                    }
-                    write!(f, "{}", binding.name.display(f.db.upcast(), f.edition()))?;
-                    match &binding.type_ref {
-                        Some(ty) => {
-                            write!(f, " = ")?;
-                            ty.hir_fmt(f, types_map)?
+                    hir_def::expr_store::path::GenericArgsParentheses::No => {
+                        let mut first = true;
+                        // Skip the `Self` bound if exists. It's handled outside the loop.
+                        for arg in &generic_args.args[generic_args.has_self_type as usize..] {
+                            if first {
+                                first = false;
+                                write!(f, "<")?;
+                            } else {
+                                write!(f, ", ")?;
+                            }
+                            arg.hir_fmt(f, store)?;
                         }
-                        None => {
-                            write!(f, ": ")?;
-                            f.write_joined(
-                                binding.bounds.iter().map(TypesMapAdapter::wrap(types_map)),
-                                " + ",
-                            )?;
+                        for binding in generic_args.bindings.iter() {
+                            if first {
+                                first = false;
+                                write!(f, "<")?;
+                            } else {
+                                write!(f, ", ")?;
+                            }
+                            write!(f, "{}", binding.name.display(f.db, f.edition()))?;
+                            match &binding.type_ref {
+                                Some(ty) => {
+                                    write!(f, " = ")?;
+                                    ty.hir_fmt(f, store)?
+                                }
+                                None => {
+                                    write!(f, ": ")?;
+                                    f.write_joined(
+                                        binding
+                                            .bounds
+                                            .iter()
+                                            .map(ExpressionStoreAdapter::wrap(store)),
+                                        " + ",
+                                    )?;
+                                }
+                            }
                         }
-                    }
-                }
 
-                // There may be no generic arguments to print, in case of a trait having only a
-                // single `Self` bound which is converted to `<Ty as Trait>::Assoc`.
-                if !first {
-                    write!(f, ">")?;
-                }
+                        // There may be no generic arguments to print, in case of a trait having only a
+                        // single `Self` bound which is converted to `<Ty as Trait>::Assoc`.
+                        if !first {
+                            write!(f, ">")?;
+                        }
 
-                // Current position: `<Ty as Trait<Args>|`
-                if generic_args.has_self_type {
-                    write!(f, ">")?;
+                        // Current position: `<Ty as Trait<Args>|`
+                        if generic_args.has_self_type {
+                            write!(f, ">")?;
+                        }
+                    }
                 }
             }
         }
@@ -2382,20 +2445,19 @@ impl HirDisplayWithTypesMap for Path {
     }
 }
 
-impl HirDisplayWithTypesMap for hir_def::path::GenericArg {
+impl HirDisplayWithExpressionStore for hir_def::expr_store::path::GenericArg {
     fn hir_fmt(
         &self,
         f: &mut HirFormatter<'_>,
-        types_map: &TypesMap,
+        store: &ExpressionStore,
     ) -> Result<(), HirDisplayError> {
         match self {
-            hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f, types_map),
-            hir_def::path::GenericArg::Const(c) => {
-                write!(f, "{}", c.display(f.db.upcast(), f.edition()))
-            }
-            hir_def::path::GenericArg::Lifetime(lifetime) => {
-                write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))
+            hir_def::expr_store::path::GenericArg::Type(ty) => ty.hir_fmt(f, store),
+            hir_def::expr_store::path::GenericArg::Const(_c) => {
+                // write!(f, "{}", c.display(f.db, f.edition()))
+                write!(f, "<expr>")
             }
+            hir_def::expr_store::path::GenericArg::Lifetime(lifetime) => lifetime.hir_fmt(f, store),
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
index 351926c86c473..9823c854d5b30 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
@@ -1,18 +1,17 @@
 //! Utilities for computing drop info about types.
 
-use base_db::ra_salsa;
 use chalk_ir::cast::Cast;
-use hir_def::data::adt::StructFlags;
-use hir_def::lang_item::LangItem;
 use hir_def::AdtId;
+use hir_def::lang_item::LangItem;
+use hir_def::signatures::StructFlags;
 use stdx::never;
 use triomphe::Arc;
 
 use crate::{
-    db::HirDatabase, method_resolution::TyFingerprint, AliasTy, Canonical, CanonicalVarKinds,
-    InEnvironment, Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind,
+    AliasTy, Canonical, CanonicalVarKinds, ConcreteConst, ConstScalar, ConstValue, InEnvironment,
+    Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind, db::HirDatabase,
+    method_resolution::TyFingerprint,
 };
-use crate::{ConcreteConst, ConstScalar, ConstValue};
 
 fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
     let module = match adt {
@@ -32,8 +31,7 @@ fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
         },
         None => db.trait_impls_in_crate(module.krate()),
     };
-    let result = impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some();
-    result
+    impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some()
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
@@ -55,7 +53,7 @@ pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironm
             }
             match adt.0 {
                 AdtId::StructId(id) => {
-                    if db.struct_data(id).flags.contains(StructFlags::IS_MANUALLY_DROP) {
+                    if db.struct_signature(id).flags.contains(StructFlags::IS_MANUALLY_DROP) {
                         return DropGlue::None;
                     }
                     db.field_types(id.into())
@@ -72,7 +70,7 @@ pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironm
                 // Unions cannot have fields with destructors.
                 AdtId::UnionId(_) => DropGlue::None,
                 AdtId::EnumId(id) => db
-                    .enum_data(id)
+                    .enum_variants(id)
                     .variants
                     .iter()
                     .map(|&(variant, _)| {
@@ -176,11 +174,7 @@ fn projection_has_drop_glue(
     let normalized = db.normalize_projection(projection, env.clone());
     match normalized.kind(Interner) {
         TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(..) => {
-            if is_copy(db, ty, env) {
-                DropGlue::None
-            } else {
-                DropGlue::DependOnParams
-            }
+            if is_copy(db, ty, env) { DropGlue::None } else { DropGlue::DependOnParams }
         }
         _ => db.has_drop_glue(normalized, env),
     }
@@ -199,11 +193,10 @@ fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
     db.trait_solve(env.krate, env.block, goal).is_some()
 }
 
-pub(crate) fn has_drop_glue_recover(
+pub(crate) fn has_drop_glue_cycle_result(
     _db: &dyn HirDatabase,
-    _cycle: &ra_salsa::Cycle,
-    _ty: &Ty,
-    _env: &Arc<TraitEnvironment>,
+    _ty: Ty,
+    _env: Arc<TraitEnvironment>,
 ) -> DropGlue {
     DropGlue::None
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs
index e042c35d0c6f2..80b18473907de 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs
@@ -3,28 +3,26 @@
 use std::ops::ControlFlow;
 
 use chalk_ir::{
+    DebruijnIndex,
     cast::Cast,
     visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
-    DebruijnIndex,
 };
 use chalk_solve::rust_ir::InlineBound;
 use hir_def::{
-    data::TraitFlags, lang_item::LangItem, AssocItemId, ConstId, FunctionId, GenericDefId,
-    HasModule, TraitId, TypeAliasId,
+    AssocItemId, ConstId, FunctionId, GenericDefId, HasModule, TraitId, TypeAliasId,
+    lang_item::LangItem, signatures::TraitFlags,
 };
 use rustc_hash::FxHashSet;
 use smallvec::SmallVec;
 
 use crate::{
-    all_super_traits,
+    AliasEq, AliasTy, Binders, BoundVar, CallableSig, GoalData, ImplTraitId, Interner, OpaqueTyId,
+    ProjectionTyExt, Solution, Substitution, TraitRef, Ty, TyKind, WhereClause, all_super_traits,
     db::HirDatabase,
     from_assoc_type_id, from_chalk_trait_id,
     generics::{generics, trait_self_param_idx},
-    lower::callable_item_sig,
-    to_assoc_type_id, to_chalk_trait_id,
+    to_chalk_trait_id,
     utils::elaborate_clause_supertraits,
-    AliasEq, AliasTy, Binders, BoundVar, CallableSig, GoalData, ImplTraitId, Interner, OpaqueTyId,
-    ProjectionTyExt, Solution, Substitution, TraitRef, Ty, TyKind, WhereClause,
 };
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -54,7 +52,7 @@ pub fn dyn_compatibility(
     db: &dyn HirDatabase,
     trait_: TraitId,
 ) -> Option<DynCompatibilityViolation> {
-    for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
+    for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() {
         if db.dyn_compatibility_of_trait(super_trait).is_some() {
             return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait));
         }
@@ -71,7 +69,7 @@ pub fn dyn_compatibility_with_callback<F>(
 where
     F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
 {
-    for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
+    for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() {
         if db.dyn_compatibility_of_trait(super_trait).is_some() {
             cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?;
         }
@@ -103,7 +101,7 @@ where
 
     // rustc checks for non-lifetime binders here, but we don't support HRTB yet
 
-    let trait_data = db.trait_data(trait_);
+    let trait_data = db.trait_items(trait_);
     for (_, assoc_item) in &trait_data.items {
         dyn_compatibility_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
     }
@@ -116,7 +114,7 @@ pub fn dyn_compatibility_of_trait_query(
     trait_: TraitId,
 ) -> Option<DynCompatibilityViolation> {
     let mut res = None;
-    let _ = dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| {
+    _ = dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| {
         res = Some(osv);
         ControlFlow::Break(())
     });
@@ -125,12 +123,12 @@ pub fn dyn_compatibility_of_trait_query(
 }
 
 fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> bool {
-    let krate = def.module(db.upcast()).krate();
+    let krate = def.module(db).krate();
     let Some(sized) = db.lang_item(krate, LangItem::Sized).and_then(|l| l.as_trait()) else {
         return false;
     };
 
-    let Some(trait_self_param_idx) = trait_self_param_idx(db.upcast(), def) else {
+    let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else {
         return false;
     };
 
@@ -166,14 +164,13 @@ fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
 
 // Same as the above, `predicates_reference_self`
 fn bounds_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
-    let trait_data = db.trait_data(trait_);
+    let trait_data = db.trait_items(trait_);
     trait_data
         .items
         .iter()
         .filter_map(|(_, it)| match *it {
             AssocItemId::TypeAliasId(id) => {
-                let assoc_ty_id = to_assoc_type_id(id);
-                let assoc_ty_data = db.associated_ty_data(assoc_ty_id);
+                let assoc_ty_data = db.associated_ty_data(id);
                 Some(assoc_ty_data)
             }
             _ => None,
@@ -256,7 +253,7 @@ fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>(
     outer_binder: DebruijnIndex,
     allow_self_projection: AllowSelfProjection,
 ) -> bool {
-    let Some(trait_self_param_idx) = trait_self_param_idx(db.upcast(), def) else {
+    let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else {
         return false;
     };
     struct IllegalSelfTypeVisitor<'a> {
@@ -290,8 +287,7 @@ fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>(
                     AllowSelfProjection::Yes => {
                         let trait_ = proj.trait_(self.db);
                         if self.super_traits.is_none() {
-                            self.super_traits =
-                                Some(all_super_traits(self.db.upcast(), self.trait_));
+                            self.super_traits = Some(all_super_traits(self.db, self.trait_));
                         }
                         if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) {
                             ControlFlow::Continue(())
@@ -347,7 +343,7 @@ where
             })
         }
         AssocItemId::TypeAliasId(it) => {
-            let def_map = db.crate_def_map(trait_.krate(db.upcast()));
+            let def_map = db.crate_def_map(trait_.krate(db));
             if def_map.is_unstable_feature_enabled(&intern::sym::generic_associated_type_extended) {
                 ControlFlow::Continue(())
             } else {
@@ -371,7 +367,7 @@ fn virtual_call_violations_for_method<F>(
 where
     F: FnMut(MethodViolationCode) -> ControlFlow<()>,
 {
-    let func_data = db.function_data(func);
+    let func_data = db.function_signature(func);
     if !func_data.has_self_param() {
         cb(MethodViolationCode::StaticMethod)?;
     }
@@ -380,7 +376,7 @@ where
         cb(MethodViolationCode::AsyncFn)?;
     }
 
-    let sig = callable_item_sig(db, func.into());
+    let sig = db.callable_item_signature(func.into());
     if sig.skip_binders().params().iter().skip(1).any(|ty| {
         contains_illegal_self_type_reference(
             db,
@@ -421,7 +417,7 @@ where
     }
 
     let predicates = &*db.generic_predicates_without_parent(func.into());
-    let trait_self_idx = trait_self_param_idx(db.upcast(), func.into());
+    let trait_self_idx = trait_self_param_idx(db, func.into());
     for pred in predicates {
         let pred = pred.skip_binders().skip_binders();
 
@@ -431,8 +427,8 @@ where
 
         // Allow `impl AutoTrait` predicates
         if let WhereClause::Implemented(TraitRef { trait_id, substitution }) = pred {
-            let trait_data = db.trait_data(from_chalk_trait_id(*trait_id));
-            if trait_data.flags.contains(TraitFlags::IS_AUTO)
+            let trait_data = db.trait_signature(from_chalk_trait_id(*trait_id));
+            if trait_data.flags.contains(TraitFlags::AUTO)
                 && substitution
                     .as_slice(Interner)
                     .first()
@@ -468,7 +464,7 @@ fn receiver_is_dispatchable(
     func: FunctionId,
     sig: &Binders<CallableSig>,
 ) -> bool {
-    let Some(trait_self_idx) = trait_self_param_idx(db.upcast(), func.into()) else {
+    let Some(trait_self_idx) = trait_self_param_idx(db, func.into()) else {
         return false;
     };
 
@@ -486,14 +482,14 @@ fn receiver_is_dispatchable(
         return true;
     }
 
-    let placeholder_subst = generics(db.upcast(), func.into()).placeholder_subst(db);
+    let placeholder_subst = generics(db, func.into()).placeholder_subst(db);
 
     let substituted_sig = sig.clone().substitute(Interner, &placeholder_subst);
     let Some(receiver_ty) = substituted_sig.params().first() else {
         return false;
     };
 
-    let krate = func.module(db.upcast()).krate();
+    let krate = func.module(db).krate();
     let traits = (
         db.lang_item(krate, LangItem::Unsize).and_then(|it| it.as_trait()),
         db.lang_item(krate, LangItem::DispatchFromDyn).and_then(|it| it.as_trait()),
@@ -552,20 +548,16 @@ fn receiver_is_dispatchable(
 }
 
 fn receiver_for_self_ty(db: &dyn HirDatabase, func: FunctionId, ty: Ty) -> Option<Ty> {
-    let generics = generics(db.upcast(), func.into());
-    let trait_self_idx = trait_self_param_idx(db.upcast(), func.into())?;
+    let generics = generics(db, func.into());
+    let trait_self_idx = trait_self_param_idx(db, func.into())?;
     let subst = generics.placeholder_subst(db);
     let subst = Substitution::from_iter(
         Interner,
         subst.iter(Interner).enumerate().map(|(idx, arg)| {
-            if idx == trait_self_idx {
-                ty.clone().cast(Interner)
-            } else {
-                arg.clone()
-            }
+            if idx == trait_self_idx { ty.clone().cast(Interner) } else { arg.clone() }
         }),
     );
-    let sig = callable_item_sig(db, func.into());
+    let sig = db.callable_item_signature(func.into());
     let sig = sig.substitute(Interner, &subst);
     sig.params_and_return.first().cloned()
 }
@@ -597,7 +589,7 @@ fn contains_illegal_impl_trait_in_trait(
 
     let ret = sig.skip_binders().ret();
     let mut visitor = OpaqueTypeCollector(FxHashSet::default());
-    let _ = ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST);
+    _ = ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST);
 
     // Since we haven't implemented RPITIT in proper way like rustc yet,
     // just check whether `ret` contains RPIT for now
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs
index 50851325bd519..5078e8cfaa8b9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs
@@ -33,15 +33,18 @@ fn check_dyn_compatibility<'a>(
         expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect();
     let (db, file_ids) = TestDB::with_many_files(ra_fixture);
     for (trait_id, name) in file_ids.into_iter().flat_map(|file_id| {
-        let module_id = db.module_for_file(file_id);
+        let module_id = db.module_for_file(file_id.file_id(&db));
         let def_map = module_id.def_map(&db);
         let scope = &def_map[module_id.local_id].scope;
         scope
             .declarations()
             .filter_map(|def| {
                 if let hir_def::ModuleDefId::TraitId(trait_id) = def {
-                    let name =
-                        db.trait_data(trait_id).name.display_no_db(file_id.edition()).to_smolstr();
+                    let name = db
+                        .trait_signature(trait_id)
+                        .name
+                        .display_no_db(file_id.edition(&db))
+                        .to_smolstr();
                     Some((trait_id, name))
                 } else {
                     None
@@ -53,7 +56,7 @@ fn check_dyn_compatibility<'a>(
             continue;
         };
         let mut osvs = FxHashSet::default();
-        let _ = dyn_compatibility_with_callback(&db, trait_id, &mut |osv| {
+        _ = dyn_compatibility_with_callback(&db, trait_id, &mut |osv| {
             osvs.insert(match osv {
                 DynCompatibilityViolation::SizedSelf => SizedSelf,
                 DynCompatibilityViolation::SelfReferential => SelfReferential,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs
index 18cf6e5ce36ef..bb4aaf7889589 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs
@@ -1,41 +1,41 @@
 //! Utilities for working with generics.
 //!
 //! The layout for generics as expected by chalk are as follows:
+//! - Parent parameters
 //! - Optional Self parameter
 //! - Lifetime parameters
 //! - Type or Const parameters
-//! - Parent parameters
 //!
 //! where parent follows the same scheme.
 use std::ops;
 
-use chalk_ir::{cast::Cast as _, BoundVar, DebruijnIndex};
+use chalk_ir::{BoundVar, DebruijnIndex, cast::Cast as _};
 use hir_def::{
+    ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId, Lookup,
+    TypeOrConstParamId, TypeParamId,
     db::DefDatabase,
-    generics::{
-        GenericParamDataRef, GenericParams, LifetimeParamData, TypeOrConstParamData,
-        TypeParamProvenance,
+    expr_store::ExpressionStore,
+    hir::generics::{
+        GenericParamDataRef, GenericParams, LifetimeParamData, LocalLifetimeParamId,
+        LocalTypeOrConstParamId, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
     },
-    type_ref::TypesMap,
-    ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId,
-    LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
 };
 use itertools::chain;
-use stdx::TupleExt;
 use triomphe::Arc;
 
-use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution};
+use crate::{Interner, Substitution, db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx};
 
 pub fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
     let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
-    let params = db.generic_params(def);
+    let (params, store) = db.generic_params_and_store(def);
     let has_trait_self_param = params.trait_self_param().is_some();
-    Generics { def, params, parent_generics, has_trait_self_param }
+    Generics { def, params, parent_generics, has_trait_self_param, store }
 }
 #[derive(Clone, Debug)]
 pub struct Generics {
     def: GenericDefId,
     params: Arc<GenericParams>,
+    store: Arc<ExpressionStore>,
     parent_generics: Option<Box<Generics>>,
     has_trait_self_param: bool,
 }
@@ -55,12 +55,16 @@ impl Generics {
         self.def
     }
 
-    pub(crate) fn self_types_map(&self) -> &TypesMap {
-        &self.params.types_map
+    pub(crate) fn store(&self) -> &ExpressionStore {
+        &self.store
+    }
+
+    pub(crate) fn where_predicates(&self) -> impl Iterator<Item = &WherePredicate> {
+        self.params.where_predicates()
     }
 
     pub(crate) fn iter_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
-        self.iter_self_id().chain(self.iter_parent_id())
+        self.iter_parent_id().chain(self.iter_self_id())
     }
 
     pub(crate) fn iter_self_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
@@ -73,31 +77,26 @@ impl Generics {
 
     pub(crate) fn iter_self_type_or_consts(
         &self,
-    ) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> {
-        self.params.iter_type_or_consts()
-    }
-
-    pub(crate) fn iter_self_type_or_consts_id(
-        &self,
-    ) -> impl DoubleEndedIterator<Item = GenericParamId> + '_ {
-        self.params.iter_type_or_consts().map(from_toc_id(self)).map(TupleExt::head)
+    ) -> impl DoubleEndedIterator<Item = (LocalTypeOrConstParamId, &TypeOrConstParamData)> + '_
+    {
+        let mut toc = self.params.iter_type_or_consts();
+        let trait_self_param = self.has_trait_self_param.then(|| toc.next()).flatten();
+        chain!(trait_self_param, toc)
     }
 
-    /// Iterate over the params followed by the parent params.
+    /// Iterate over the parent params followed by self params.
     pub(crate) fn iter(
         &self,
     ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
-        self.iter_self().chain(self.iter_parent())
+        self.iter_parent().chain(self.iter_self())
     }
 
-    pub(crate) fn iter_parents_with_types_map(
+    pub(crate) fn iter_parents_with_store(
         &self,
-    ) -> impl Iterator<Item = ((GenericParamId, GenericParamDataRef<'_>), &TypesMap)> + '_ {
-        self.iter_parent().zip(
-            self.parent_generics()
-                .into_iter()
-                .flat_map(|it| std::iter::repeat(&it.params.types_map)),
-        )
+    ) -> impl Iterator<Item = ((GenericParamId, GenericParamDataRef<'_>), &ExpressionStore)> + '_
+    {
+        self.iter_parent()
+            .zip(self.parent_generics().into_iter().flat_map(|it| std::iter::repeat(&*it.store)))
     }
 
     /// Iterate over the params without parent params.
@@ -110,7 +109,7 @@ impl Generics {
     }
 
     /// Iterator over types and const params of parent.
-    fn iter_parent(
+    pub(crate) fn iter_parent(
         &self,
     ) -> impl DoubleEndedIterator<Item = (GenericParamId, GenericParamDataRef<'_>)> + '_ {
         self.parent_generics().into_iter().flat_map(|it| {
@@ -132,6 +131,10 @@ impl Generics {
         self.params.len()
     }
 
+    pub(crate) fn len_lifetimes_self(&self) -> usize {
+        self.params.len_lifetimes()
+    }
+
     /// (parent total, self param, type params, const params, impl trait list, lifetimes)
     pub(crate) fn provenance_split(&self) -> (usize, bool, usize, usize, usize, usize) {
         let mut self_param = false;
@@ -147,7 +150,7 @@ impl Generics {
             TypeOrConstParamData::ConstParamData(_) => const_params += 1,
         });
 
-        let lifetime_params = self.params.iter_lt().count();
+        let lifetime_params = self.params.len_lifetimes();
 
         let parent_len = self.parent_generics().map_or(0, Generics::len);
         (parent_len, self_param, type_params, const_params, impl_trait_params, lifetime_params)
@@ -160,17 +163,19 @@ impl Generics {
     fn find_type_or_const_param(&self, param: TypeOrConstParamId) -> Option<usize> {
         if param.parent == self.def {
             let idx = param.local_id.into_raw().into_u32() as usize;
-            debug_assert!(idx <= self.params.len_type_or_consts());
+            debug_assert!(
+                idx <= self.params.len_type_or_consts(),
+                "idx: {} len: {}",
+                idx,
+                self.params.len_type_or_consts()
+            );
             if self.params.trait_self_param() == Some(param.local_id) {
                 return Some(idx);
             }
-            Some(self.params.len_lifetimes() + idx)
+            Some(self.parent_generics().map_or(0, |g| g.len()) + self.params.len_lifetimes() + idx)
         } else {
             debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(param.parent));
-            self.parent_generics()
-                .and_then(|g| g.find_type_or_const_param(param))
-                // Remember that parent parameters come after parameters for self.
-                .map(|idx| self.len_self() + idx)
+            self.parent_generics().and_then(|g| g.find_type_or_const_param(param))
         }
     }
 
@@ -182,12 +187,14 @@ impl Generics {
         if lifetime.parent == self.def {
             let idx = lifetime.local_id.into_raw().into_u32() as usize;
             debug_assert!(idx <= self.params.len_lifetimes());
-            Some(self.params.trait_self_param().is_some() as usize + idx)
+            Some(
+                self.parent_generics().map_or(0, |g| g.len())
+                    + self.params.trait_self_param().is_some() as usize
+                    + idx,
+            )
         } else {
             debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent));
-            self.parent_generics()
-                .and_then(|g| g.find_lifetime(lifetime))
-                .map(|idx| self.len_self() + idx)
+            self.parent_generics().and_then(|g| g.find_lifetime(lifetime))
         }
     }
 
@@ -251,8 +258,7 @@ pub(crate) fn trait_self_param_idx(db: &dyn DefDatabase, def: GenericDefId) -> O
             let parent_def = parent_generic_def(db, def)?;
             let parent_params = db.generic_params(parent_def);
             let parent_self_idx = parent_params.trait_self_param()?.into_raw().into_u32() as usize;
-            let self_params = db.generic_params(def);
-            Some(self_params.len() + parent_self_idx)
+            Some(parent_self_idx)
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 3e0ce7f1933a3..790914fdaf2a4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -26,27 +26,26 @@ pub(crate) mod unify;
 use std::{cell::OnceCell, convert::identity, iter, ops::Index};
 
 use chalk_ir::{
+    DebruijnIndex, Mutability, Safety, Scalar, TyKind, TypeFlags, Variance,
     cast::Cast,
     fold::TypeFoldable,
     interner::HasInterner,
     visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
-    DebruijnIndex, Mutability, Safety, Scalar, TyKind, TypeFlags, Variance,
 };
 use either::Either;
 use hir_def::{
+    AdtId, AssocItemId, ConstId, DefWithBodyId, FieldId, FunctionId, GenericDefId, GenericParamId,
+    ImplId, ItemContainerId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId,
     builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
-    data::{ConstData, StaticData},
-    expr_store::{Body, HygieneId},
+    expr_store::{Body, ExpressionStore, HygieneId, path::Path},
     hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId},
     lang_item::{LangItem, LangItemTarget},
     layout::Integer,
-    path::{ModPath, Path},
     resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
-    type_ref::{LifetimeRef, TypeRefId, TypesMap},
-    AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ImplId, ItemContainerId, Lookup,
-    TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId,
+    signatures::{ConstSignature, StaticSignature},
+    type_ref::{ConstRef, LifetimeRefId, TypeRefId},
 };
-use hir_expand::name::Name;
+use hir_expand::{mod_path::ModPath, name::Name};
 use indexmap::IndexSet;
 use intern::sym;
 use la_arena::{ArenaMap, Entry};
@@ -55,6 +54,10 @@ use stdx::{always, never};
 use triomphe::Arc;
 
 use crate::{
+    AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId,
+    ImplTraitIdx, InEnvironment, IncorrectGenericsLenKind, Interner, Lifetime, OpaqueTyId,
+    ParamLoweringMode, PathLoweringDiagnostic, ProjectionTy, Substitution, TraitEnvironment, Ty,
+    TyBuilder, TyExt,
     db::HirDatabase,
     fold_tys,
     generics::Generics,
@@ -64,14 +67,11 @@ use crate::{
         expr::ExprIsRead,
         unify::InferenceTable,
     },
-    lower::{diagnostics::TyLoweringDiagnostic, ImplTraitLoweringMode},
+    lower::{ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic},
     mir::MirSpan,
-    to_assoc_type_id,
+    static_lifetime, to_assoc_type_id,
     traits::FnTrait,
-    utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
-    AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId,
-    ImplTraitIdx, InEnvironment, Interner, Lifetime, OpaqueTyId, ParamLoweringMode,
-    PathLoweringDiagnostic, ProjectionTy, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
+    utils::UnevaluatedConstEvaluatorFolder,
 };
 
 // This lint has a false positive here. See the link below for details.
@@ -88,7 +88,7 @@ pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
 /// The entry point of type inference.
 pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
     let _p = tracing::info_span!("infer_query").entered();
-    let resolver = def.resolver(db.upcast());
+    let resolver = def.resolver(db);
     let body = db.body(def);
     let mut ctx = InferenceContext::new(db, def, &body, resolver);
 
@@ -96,11 +96,11 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
         DefWithBodyId::FunctionId(f) => {
             ctx.collect_fn(f);
         }
-        DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
-        DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
+        DefWithBodyId::ConstId(c) => ctx.collect_const(c, &db.const_signature(c)),
+        DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)),
         DefWithBodyId::VariantId(v) => {
             ctx.return_ty = TyBuilder::builtin(
-                match db.enum_data(v.lookup(db.upcast()).parent).variant_body_type() {
+                match db.enum_signature(v.lookup(db).parent).variant_body_type() {
                     hir_def::layout::IntegerType::Pointer(signed) => match signed {
                         true => BuiltinType::Int(BuiltinInt::Isize),
                         false => BuiltinType::Uint(BuiltinUint::Usize),
@@ -124,16 +124,6 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
                 },
             );
         }
-        DefWithBodyId::InTypeConstId(c) => {
-            // FIXME(const-generic-body): We should not get the return type in this way.
-            ctx.return_ty = c
-                .lookup(db.upcast())
-                .expected_ty
-                .box_any()
-                .downcast::<InTypeConstIdMetadata>()
-                .unwrap()
-                .0;
-        }
     }
 
     ctx.infer_body();
@@ -286,6 +276,20 @@ pub enum InferenceDiagnostic {
         node: ExprOrPatId,
         diag: PathLoweringDiagnostic,
     },
+    MethodCallIncorrectGenericsLen {
+        expr: ExprId,
+        provided_count: u32,
+        expected_count: u32,
+        kind: IncorrectGenericsLenKind,
+        def: GenericDefId,
+    },
+    MethodCallIncorrectGenericsOrder {
+        expr: ExprId,
+        param_id: GenericParamId,
+        arg_idx: u32,
+        /// Whether the `GenericArgs` contains a `Self` arg.
+        has_self_arg: bool,
+    },
 }
 
 /// A mismatch between an expected and an inferred type.
@@ -489,7 +493,7 @@ pub struct InferenceResult {
     /// ```
     /// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
     pub binding_modes: ArenaMap<PatId, BindingMode>,
-    pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
+    pub expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
     pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
     // FIXME: remove this field
     pub mutated_bindings_in_closure: FxHashSet<BindingId>,
@@ -597,7 +601,8 @@ pub(crate) struct InferenceContext<'a> {
     /// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext
     /// and resolve the path via its methods. This will ensure proper error reporting.
     pub(crate) resolver: Resolver,
-    generics: OnceCell<Option<Generics>>,
+    generic_def: GenericDefId,
+    generics: OnceCell<Generics>,
     table: unify::InferenceTable<'a>,
     /// The traits in scope, disregarding block modules. This is used for caching purposes.
     traits_in_scope: FxHashSet<TraitId>,
@@ -708,8 +713,14 @@ impl<'a> InferenceContext<'a> {
             return_coercion: None,
             db,
             owner,
+            generic_def: match owner {
+                DefWithBodyId::FunctionId(it) => it.into(),
+                DefWithBodyId::StaticId(it) => it.into(),
+                DefWithBodyId::ConstId(it) => it.into(),
+                DefWithBodyId::VariantId(it) => it.lookup(db).parent.into(),
+            },
             body,
-            traits_in_scope: resolver.traits_in_scope(db.upcast()),
+            traits_in_scope: resolver.traits_in_scope(db),
             resolver,
             diverges: Diverges::Maybe,
             breakables: Vec::new(),
@@ -724,14 +735,8 @@ impl<'a> InferenceContext<'a> {
         }
     }
 
-    pub(crate) fn generics(&self) -> Option<&Generics> {
-        self.generics
-            .get_or_init(|| {
-                self.resolver
-                    .generic_def()
-                    .map(|def| crate::generics::generics(self.db.upcast(), def))
-            })
-            .as_ref()
+    pub(crate) fn generics(&self) -> &Generics {
+        self.generics.get_or_init(|| crate::generics::generics(self.db, self.generic_def))
     }
 
     // FIXME: This function should be private in module. It is currently only used in the consteval, since we need
@@ -780,8 +785,8 @@ impl<'a> InferenceContext<'a> {
         // Comment from rustc:
         // Even though coercion casts provide type hints, we check casts after fallback for
         // backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
-        let mut apply_adjustments = |expr, adj| {
-            expr_adjustments.insert(expr, adj);
+        let mut apply_adjustments = |expr, adj: Vec<_>| {
+            expr_adjustments.insert(expr, adj.into_boxed_slice());
         };
         let mut set_coercion_cast = |expr| {
             coercion_casts.insert(expr);
@@ -803,22 +808,27 @@ impl<'a> InferenceContext<'a> {
             *ty = table.resolve_completely(ty.clone());
             *has_errors = *has_errors || ty.contains_unknown();
         }
+        type_of_expr.shrink_to_fit();
         for ty in type_of_pat.values_mut() {
             *ty = table.resolve_completely(ty.clone());
             *has_errors = *has_errors || ty.contains_unknown();
         }
+        type_of_pat.shrink_to_fit();
         for ty in type_of_binding.values_mut() {
             *ty = table.resolve_completely(ty.clone());
             *has_errors = *has_errors || ty.contains_unknown();
         }
+        type_of_binding.shrink_to_fit();
         for ty in type_of_rpit.values_mut() {
             *ty = table.resolve_completely(ty.clone());
             *has_errors = *has_errors || ty.contains_unknown();
         }
+        type_of_rpit.shrink_to_fit();
         for ty in type_of_for_iterator.values_mut() {
             *ty = table.resolve_completely(ty.clone());
             *has_errors = *has_errors || ty.contains_unknown();
         }
+        type_of_for_iterator.shrink_to_fit();
 
         *has_errors |= !type_mismatches.is_empty();
 
@@ -833,6 +843,7 @@ impl<'a> InferenceContext<'a> {
             )
             .is_ok()
         });
+        type_mismatches.shrink_to_fit();
         diagnostics.retain_mut(|diagnostic| {
             use InferenceDiagnostic::*;
             match diagnostic {
@@ -861,24 +872,29 @@ impl<'a> InferenceContext<'a> {
             }
             true
         });
+        diagnostics.shrink_to_fit();
         for (_, subst) in method_resolutions.values_mut() {
             *subst = table.resolve_completely(subst.clone());
             *has_errors =
                 *has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
         }
+        method_resolutions.shrink_to_fit();
         for (_, subst) in assoc_resolutions.values_mut() {
             *subst = table.resolve_completely(subst.clone());
             *has_errors =
                 *has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
         }
+        assoc_resolutions.shrink_to_fit();
         for adjustment in expr_adjustments.values_mut().flatten() {
             adjustment.target = table.resolve_completely(adjustment.target.clone());
             *has_errors = *has_errors || adjustment.target.contains_unknown();
         }
+        expr_adjustments.shrink_to_fit();
         for adjustment in pat_adjustments.values_mut().flatten() {
             *adjustment = table.resolve_completely(adjustment.clone());
             *has_errors = *has_errors || adjustment.contains_unknown();
         }
+        pat_adjustments.shrink_to_fit();
         result.tuple_field_access_types = tuple_field_accesses_rev
             .into_iter()
             .enumerate()
@@ -888,15 +904,20 @@ impl<'a> InferenceContext<'a> {
                     *has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
             })
             .collect();
+        result.tuple_field_access_types.shrink_to_fit();
 
         result.diagnostics = diagnostics;
 
         result
     }
 
-    fn collect_const(&mut self, data: &ConstData) {
-        let return_ty =
-            self.make_ty(data.type_ref, &data.types_map, InferenceTyDiagnosticSource::Signature);
+    fn collect_const(&mut self, id: ConstId, data: &ConstSignature) {
+        let return_ty = self.make_ty(
+            data.type_ref,
+            &data.store,
+            InferenceTyDiagnosticSource::Signature,
+            LifetimeElisionKind::for_const(id.loc(self.db).container),
+        );
 
         // Constants might be defining usage sites of TAITs.
         self.make_tait_coercion_table(iter::once(&return_ty));
@@ -904,9 +925,13 @@ impl<'a> InferenceContext<'a> {
         self.return_ty = return_ty;
     }
 
-    fn collect_static(&mut self, data: &StaticData) {
-        let return_ty =
-            self.make_ty(data.type_ref, &data.types_map, InferenceTyDiagnosticSource::Signature);
+    fn collect_static(&mut self, data: &StaticSignature) {
+        let return_ty = self.make_ty(
+            data.type_ref,
+            &data.store,
+            InferenceTyDiagnosticSource::Signature,
+            LifetimeElisionKind::Elided(static_lifetime()),
+        );
 
         // Statics might be defining usage sites of TAITs.
         self.make_tait_coercion_table(iter::once(&return_ty));
@@ -915,13 +940,17 @@ impl<'a> InferenceContext<'a> {
     }
 
     fn collect_fn(&mut self, func: FunctionId) {
-        let data = self.db.function_data(func);
-        let mut param_tys =
-            self.with_ty_lowering(&data.types_map, InferenceTyDiagnosticSource::Signature, |ctx| {
-                ctx.type_param_mode(ParamLoweringMode::Placeholder)
-                    .impl_trait_mode(ImplTraitLoweringMode::Param);
+        let data = self.db.function_signature(func);
+        let mut param_tys = self.with_ty_lowering(
+            &data.store,
+            InferenceTyDiagnosticSource::Signature,
+            LifetimeElisionKind::for_fn_params(&data),
+            |ctx| {
+                ctx.type_param_mode(ParamLoweringMode::Placeholder);
                 data.params.iter().map(|&type_ref| ctx.lower_ty(type_ref)).collect::<Vec<_>>()
-            });
+            },
+        );
+
         // Check if function contains a va_list, if it does then we append it to the parameter types
         // that are collected from the function data
         if data.is_varargs() {
@@ -956,35 +985,44 @@ impl<'a> InferenceContext<'a> {
                 tait_candidates.insert(ty);
             }
         }
-        let return_ty = data.ret_type;
-
-        let return_ty =
-            self.with_ty_lowering(&data.types_map, InferenceTyDiagnosticSource::Signature, |ctx| {
-                ctx.type_param_mode(ParamLoweringMode::Placeholder)
-                    .impl_trait_mode(ImplTraitLoweringMode::Opaque)
-                    .lower_ty(return_ty)
-            });
-        let return_ty = self.insert_type_vars(return_ty);
-
-        let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) {
-            // RPIT opaque types use substitution of their parent function.
-            let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
-            let mut mode = ImplTraitReplacingMode::ReturnPosition(FxHashSet::default());
-            let result =
-                self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders, &mut mode);
-            if let ImplTraitReplacingMode::ReturnPosition(taits) = mode {
-                tait_candidates.extend(taits);
-            }
-            let rpits = rpits.skip_binders();
-            for (id, _) in rpits.impl_traits.iter() {
-                if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) {
-                    never!("Missed RPIT in `insert_inference_vars_for_rpit`");
-                    e.insert(TyKind::Error.intern(Interner));
+        let return_ty = match data.ret_type {
+            Some(return_ty) => {
+                let return_ty = self.with_ty_lowering(
+                    &data.store,
+                    InferenceTyDiagnosticSource::Signature,
+                    LifetimeElisionKind::for_fn_ret(),
+                    |ctx| {
+                        ctx.type_param_mode(ParamLoweringMode::Placeholder)
+                            .impl_trait_mode(ImplTraitLoweringMode::Opaque);
+                        ctx.lower_ty(return_ty)
+                    },
+                );
+                let return_ty = self.insert_type_vars(return_ty);
+                if let Some(rpits) = self.db.return_type_impl_traits(func) {
+                    // RPIT opaque types use substitution of their parent function.
+                    let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
+                    let mut mode = ImplTraitReplacingMode::ReturnPosition(FxHashSet::default());
+                    let result = self.insert_inference_vars_for_impl_trait(
+                        return_ty,
+                        fn_placeholders,
+                        &mut mode,
+                    );
+                    if let ImplTraitReplacingMode::ReturnPosition(taits) = mode {
+                        tait_candidates.extend(taits);
+                    }
+                    let rpits = rpits.skip_binders();
+                    for (id, _) in rpits.impl_traits.iter() {
+                        if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) {
+                            never!("Missed RPIT in `insert_inference_vars_for_rpit`");
+                            e.insert(TyKind::Error.intern(Interner));
+                        }
+                    }
+                    result
+                } else {
+                    return_ty
                 }
             }
-            result
-        } else {
-            return_ty
+            None => self.result.standard_types.unit.clone(),
         };
 
         self.return_ty = self.normalize_associated_types_in(return_ty);
@@ -1143,7 +1181,7 @@ impl<'a> InferenceContext<'a> {
             non_assocs: FxHashMap::default(),
         };
         for ty in tait_candidates {
-            let _ = ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST);
+            _ = ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST);
         }
 
         // Non-assoc TAITs can be define-used everywhere as long as they are
@@ -1190,11 +1228,7 @@ impl<'a> InferenceContext<'a> {
 
         if let Some(impl_id) = impl_id {
             taits.extend(collector.assocs.into_iter().filter_map(|(id, (impl_, ty))| {
-                if impl_ == impl_id {
-                    Some((id, ty))
-                } else {
-                    None
-                }
+                if impl_ == impl_id { Some((id, ty)) } else { None }
             }));
         }
 
@@ -1239,7 +1273,7 @@ impl<'a> InferenceContext<'a> {
         self.result.type_of_expr.insert(expr, ty);
     }
 
-    fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec<Adjustment>) {
+    fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment]>) {
         if adjustments.is_empty() {
             return;
         }
@@ -1291,48 +1325,91 @@ impl<'a> InferenceContext<'a> {
 
     fn with_ty_lowering<R>(
         &mut self,
-        types_map: &TypesMap,
+        store: &ExpressionStore,
         types_source: InferenceTyDiagnosticSource,
+        lifetime_elision: LifetimeElisionKind,
         f: impl FnOnce(&mut TyLoweringContext<'_>) -> R,
     ) -> R {
         let mut ctx = TyLoweringContext::new(
             self.db,
             &self.resolver,
-            types_map,
-            self.owner.into(),
+            store,
             &self.diagnostics,
             types_source,
+            self.generic_def,
+            lifetime_elision,
         );
         f(&mut ctx)
     }
 
     fn with_body_ty_lowering<R>(&mut self, f: impl FnOnce(&mut TyLoweringContext<'_>) -> R) -> R {
-        self.with_ty_lowering(&self.body.types, InferenceTyDiagnosticSource::Body, f)
+        self.with_ty_lowering(
+            self.body,
+            InferenceTyDiagnosticSource::Body,
+            LifetimeElisionKind::Infer,
+            f,
+        )
     }
 
     fn make_ty(
         &mut self,
         type_ref: TypeRefId,
-        types_map: &TypesMap,
+        store: &ExpressionStore,
         type_source: InferenceTyDiagnosticSource,
+        lifetime_elision: LifetimeElisionKind,
     ) -> Ty {
-        let ty = self.with_ty_lowering(types_map, type_source, |ctx| ctx.lower_ty(type_ref));
+        let ty = self
+            .with_ty_lowering(store, type_source, lifetime_elision, |ctx| ctx.lower_ty(type_ref));
         let ty = self.insert_type_vars(ty);
         self.normalize_associated_types_in(ty)
     }
 
     fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty {
-        self.make_ty(type_ref, &self.body.types, InferenceTyDiagnosticSource::Body)
+        self.make_ty(
+            type_ref,
+            self.body,
+            InferenceTyDiagnosticSource::Body,
+            LifetimeElisionKind::Infer,
+        )
+    }
+
+    fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty) -> Const {
+        let const_ = self.with_ty_lowering(
+            self.body,
+            InferenceTyDiagnosticSource::Body,
+            LifetimeElisionKind::Infer,
+            |ctx| {
+                ctx.type_param_mode = ParamLoweringMode::Placeholder;
+                ctx.lower_const(&const_ref, ty)
+            },
+        );
+        self.insert_type_vars(const_)
+    }
+
+    fn make_path_as_body_const(&mut self, path: &Path, ty: Ty) -> Const {
+        let const_ = self.with_ty_lowering(
+            self.body,
+            InferenceTyDiagnosticSource::Body,
+            LifetimeElisionKind::Infer,
+            |ctx| {
+                ctx.type_param_mode = ParamLoweringMode::Placeholder;
+                ctx.lower_path_as_const(path, ty)
+            },
+        );
+        self.insert_type_vars(const_)
     }
 
     fn err_ty(&self) -> Ty {
         self.result.standard_types.unknown.clone()
     }
 
-    fn make_body_lifetime(&mut self, lifetime_ref: &LifetimeRef) -> Lifetime {
-        let lt = self.with_ty_lowering(TypesMap::EMPTY, InferenceTyDiagnosticSource::Body, |ctx| {
-            ctx.lower_lifetime(lifetime_ref)
-        });
+    fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Lifetime {
+        let lt = self.with_ty_lowering(
+            self.body,
+            InferenceTyDiagnosticSource::Body,
+            LifetimeElisionKind::Infer,
+            |ctx| ctx.lower_lifetime(lifetime_ref),
+        );
         self.insert_type_vars(lt)
     }
 
@@ -1460,7 +1537,7 @@ impl<'a> InferenceContext<'a> {
     ) -> Ty {
         match assoc_ty {
             Some(res_assoc_ty) => {
-                let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container {
+                let trait_ = match res_assoc_ty.lookup(self.db).container {
                     hir_def::ItemContainerId::TraitId(trait_) => trait_,
                     _ => panic!("resolve_associated_type called with non-associated type"),
                 };
@@ -1498,10 +1575,11 @@ impl<'a> InferenceContext<'a> {
         let mut ctx = TyLoweringContext::new(
             self.db,
             &self.resolver,
-            &self.body.types,
-            self.owner.into(),
+            &self.body.store,
             &self.diagnostics,
             InferenceTyDiagnosticSource::Body,
+            self.generic_def,
+            LifetimeElisionKind::Infer,
         );
         let mut path_ctx = ctx.at_path(path, node);
         let (resolution, unresolved) = if value_ns {
@@ -1511,14 +1589,14 @@ impl<'a> InferenceContext<'a> {
             match res {
                 ResolveValueResult::ValueNs(value, _) => match value {
                     ValueNs::EnumVariantId(var) => {
-                        let substs = path_ctx.substs_from_path(var.into(), true);
+                        let substs = path_ctx.substs_from_path(var.into(), true, false);
                         drop(ctx);
-                        let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
+                        let ty = self.db.ty(var.lookup(self.db).parent.into());
                         let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
                         return (ty, Some(var.into()));
                     }
                     ValueNs::StructId(strukt) => {
-                        let substs = path_ctx.substs_from_path(strukt.into(), true);
+                        let substs = path_ctx.substs_from_path(strukt.into(), true, false);
                         drop(ctx);
                         let ty = self.db.ty(strukt.into());
                         let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
@@ -1538,39 +1616,39 @@ impl<'a> InferenceContext<'a> {
                 None => return (self.err_ty(), None),
             }
         };
-        let Some(mod_path) = path.mod_path() else {
-            never!("resolver should always resolve lang item paths");
-            return (self.err_ty(), None);
-        };
         return match resolution {
             TypeNs::AdtId(AdtId::StructId(strukt)) => {
-                let substs = path_ctx.substs_from_path(strukt.into(), true);
+                let substs = path_ctx.substs_from_path(strukt.into(), true, false);
                 drop(ctx);
                 let ty = self.db.ty(strukt.into());
                 let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
                 forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
             }
             TypeNs::AdtId(AdtId::UnionId(u)) => {
-                let substs = path_ctx.substs_from_path(u.into(), true);
+                let substs = path_ctx.substs_from_path(u.into(), true, false);
                 drop(ctx);
                 let ty = self.db.ty(u.into());
                 let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
                 forbid_unresolved_segments((ty, Some(u.into())), unresolved)
             }
             TypeNs::EnumVariantId(var) => {
-                let substs = path_ctx.substs_from_path(var.into(), true);
+                let substs = path_ctx.substs_from_path(var.into(), true, false);
                 drop(ctx);
-                let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
+                let ty = self.db.ty(var.lookup(self.db).parent.into());
                 let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
                 forbid_unresolved_segments((ty, Some(var.into())), unresolved)
             }
             TypeNs::SelfType(impl_id) => {
-                let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
+                let generics = crate::generics::generics(self.db, impl_id.into());
                 let substs = generics.placeholder_subst(self.db);
                 let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
 
                 let Some(remaining_idx) = unresolved else {
                     drop(ctx);
+                    let Some(mod_path) = path.mod_path() else {
+                        never!("resolver should always resolve lang item paths");
+                        return (self.err_ty(), None);
+                    };
                     return self.resolve_variant_on_alias(ty, None, mod_path);
                 };
 
@@ -1587,7 +1665,7 @@ impl<'a> InferenceContext<'a> {
                     // If we can resolve to an enum variant, it takes priority over associated type
                     // of the same name.
                     if let Some((AdtId::EnumId(id), _)) = ty.as_adt() {
-                        let enum_data = self.db.enum_data(id);
+                        let enum_data = self.db.enum_variants(id);
                         if let Some(variant) = enum_data.variant(current_segment.name) {
                             return if remaining_segments.len() == 1 {
                                 (ty, Some(variant.into()))
@@ -1609,7 +1687,7 @@ impl<'a> InferenceContext<'a> {
                     // `lower_partly_resolved_path()` returns `None` as type namespace unless
                     // `remaining_segments` is empty, which is never the case here. We don't know
                     // which namespace the new `ty` is in until normalized anyway.
-                    (ty, _) = path_ctx.lower_partly_resolved_path(resolution, false);
+                    (ty, _) = path_ctx.lower_partly_resolved_path(resolution, true);
                     tried_resolving_once = true;
 
                     ty = self.table.insert_type_vars(ty);
@@ -1634,7 +1712,11 @@ impl<'a> InferenceContext<'a> {
                 (ty, variant)
             }
             TypeNs::TypeAliasId(it) => {
-                let substs = path_ctx.substs_from_path_segment(it.into(), true, None);
+                let Some(mod_path) = path.mod_path() else {
+                    never!("resolver should always resolve lang item paths");
+                    return (self.err_ty(), None);
+                };
+                let substs = path_ctx.substs_from_path_segment(it.into(), true, None, false);
                 drop(ctx);
                 let ty = self.db.ty(it.into());
                 let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
@@ -1652,7 +1734,8 @@ impl<'a> InferenceContext<'a> {
             TypeNs::AdtId(AdtId::EnumId(_))
             | TypeNs::BuiltinType(_)
             | TypeNs::TraitId(_)
-            | TypeNs::TraitAliasId(_) => {
+            | TypeNs::TraitAliasId(_)
+            | TypeNs::ModuleId(_) => {
                 // FIXME diagnostic
                 (self.err_ty(), None)
             }
@@ -1701,7 +1784,7 @@ impl<'a> InferenceContext<'a> {
                 let segment = path.segments().last().unwrap();
                 // this could be an enum variant or associated type
                 if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
-                    let enum_data = self.db.enum_data(enum_id);
+                    let enum_data = self.db.enum_variants(enum_id);
                     if let Some(variant) = enum_data.variant(segment) {
                         return (ty, Some(variant.into()));
                     }
@@ -1722,9 +1805,7 @@ impl<'a> InferenceContext<'a> {
     }
 
     fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> {
-        self.db
-            .trait_data(trait_)
-            .associated_type_by_name(&Name::new_symbol_root(sym::Output.clone()))
+        self.db.trait_items(trait_).associated_type_by_name(&Name::new_symbol_root(sym::Output))
     }
 
     fn resolve_lang_trait(&self, lang: LangItem) -> Option<TraitId> {
@@ -1743,7 +1824,7 @@ impl<'a> InferenceContext<'a> {
         let ItemContainerId::TraitId(trait_) = self
             .resolve_lang_item(LangItem::IntoFutureIntoFuture)?
             .as_function()?
-            .lookup(self.db.upcast())
+            .lookup(self.db)
             .container
         else {
             return None;
@@ -1914,11 +1995,7 @@ impl Expectation {
         match self {
             Expectation::HasType(ety) => {
                 let ety = table.resolve_ty_shallow(ety);
-                if ety.is_ty_var() {
-                    Expectation::None
-                } else {
-                    Expectation::HasType(ety)
-                }
+                if ety.is_ty_var() { Expectation::None } else { Expectation::HasType(ety) }
             }
             Expectation::RValueLikeUnsized(ety) => Expectation::RValueLikeUnsized(ety.clone()),
             _ => Expectation::None,
@@ -2044,7 +2121,7 @@ impl chalk_ir::zip::Zipper<Interner> for UnknownMismatch<'_> {
             | (_, TyKind::Error)
             | (TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(_, _), _)
             | (_, TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(_, _)) => {
-                return Err(chalk_ir::NoSolution)
+                return Err(chalk_ir::NoSolution);
             }
             _ => (),
         }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
index eb193686e967f..10d85792c9d60 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
@@ -1,13 +1,13 @@
 //! Type cast logic. Basically coercion + additional casts.
 
 use chalk_ir::{Mutability, Scalar, TyVariableKind, UintTy};
-use hir_def::{hir::ExprId, AdtId};
+use hir_def::{AdtId, hir::ExprId};
 use stdx::never;
 
 use crate::{
-    infer::{coerce::CoerceNever, unify::InferenceTable},
     Adjustment, Binders, DynTy, InferenceDiagnostic, Interner, PlaceholderIndex,
     QuantifiedWhereClauses, Ty, TyExt, TyKind, TypeFlags, WhereClause,
+    infer::{coerce::CoerceNever, unify::InferenceTable},
 };
 
 #[derive(Debug)]
@@ -43,14 +43,10 @@ impl CastTy {
                 let (AdtId::EnumId(id), _) = t.as_adt()? else {
                     return None;
                 };
-                let enum_data = table.db.enum_data(id);
-                if enum_data.is_payload_free(table.db.upcast()) {
-                    Some(Self::Int(Int::CEnum))
-                } else {
-                    None
-                }
+                let enum_data = table.db.enum_variants(id);
+                if enum_data.is_payload_free(table.db) { Some(Self::Int(Int::CEnum)) } else { None }
             }
-            TyKind::Raw(m, ty) => Some(Self::Ptr(table.resolve_ty_shallow(ty), *m)),
+            TyKind::Raw(m, ty) => Some(Self::Ptr(ty.clone(), *m)),
             TyKind::Function(_) => Some(Self::FnPtr),
             _ => None,
         }
@@ -105,9 +101,8 @@ impl CastCheck {
         F: FnMut(ExprId, Vec<Adjustment>),
         G: FnMut(ExprId),
     {
-        table.resolve_obligations_as_possible();
-        self.expr_ty = table.resolve_ty_shallow(&self.expr_ty);
-        self.cast_ty = table.resolve_ty_shallow(&self.cast_ty);
+        self.expr_ty = table.eagerly_normalize_and_resolve_shallow_in(self.expr_ty.clone());
+        self.cast_ty = table.eagerly_normalize_and_resolve_shallow_in(self.cast_ty.clone());
 
         if self.expr_ty.contains_unknown() || self.cast_ty.contains_unknown() {
             return Ok(());
@@ -153,7 +148,7 @@ impl CastCheck {
                 (None, Some(t_cast)) => match self.expr_ty.kind(Interner) {
                     TyKind::FnDef(..) => {
                         let sig = self.expr_ty.callable_sig(table.db).expect("FnDef had no sig");
-                        let sig = table.normalize_associated_types_in(sig);
+                        let sig = table.eagerly_normalize_and_resolve_shallow_in(sig);
                         let fn_ptr = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
                         if let Ok((adj, _)) = table.coerce(&self.expr_ty, &fn_ptr, CoerceNever::Yes)
                         {
@@ -165,7 +160,6 @@ impl CastCheck {
                         (CastTy::FnPtr, t_cast)
                     }
                     TyKind::Ref(mutbl, _, inner_ty) => {
-                        let inner_ty = table.resolve_ty_shallow(inner_ty);
                         return match t_cast {
                             CastTy::Int(_) | CastTy::Float => match inner_ty.kind(Interner) {
                                 TyKind::Scalar(
@@ -180,13 +174,13 @@ impl CastCheck {
                             },
                             // array-ptr-cast
                             CastTy::Ptr(t, m) => {
-                                let t = table.resolve_ty_shallow(&t);
+                                let t = table.eagerly_normalize_and_resolve_shallow_in(t);
                                 if !table.is_sized(&t) {
                                     return Err(CastError::IllegalCast);
                                 }
                                 self.check_ref_cast(
                                     table,
-                                    &inner_ty,
+                                    inner_ty,
                                     *mutbl,
                                     &t,
                                     m,
@@ -359,7 +353,7 @@ impl CastCheck {
     }
 }
 
-#[derive(PartialEq, Eq)]
+#[derive(Debug, PartialEq, Eq)]
 enum PointerKind {
     // thin pointer
     Thin,
@@ -373,8 +367,7 @@ enum PointerKind {
 }
 
 fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result<Option<PointerKind>, ()> {
-    let ty = table.resolve_ty_shallow(ty);
-    let ty = table.normalize_associated_types_in(ty);
+    let ty = table.eagerly_normalize_and_resolve_shallow_in(ty.clone());
 
     if table.is_sized(&ty) {
         return Ok(Some(PointerKind::Thin));
@@ -389,8 +382,8 @@ fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result<Option<Pointe
                 return Err(());
             };
 
-            let struct_data = table.db.struct_data(id);
-            if let Some((last_field, _)) = struct_data.variant_data.fields().iter().last() {
+            let struct_data = table.db.variant_fields(id.into());
+            if let Some((last_field, _)) = struct_data.fields().iter().last() {
                 let last_field_ty =
                     table.db.field_types(id.into())[last_field].clone().substitute(Interner, subst);
                 pointer_kind(&last_field_ty, table)
@@ -431,8 +424,8 @@ fn contains_dyn_trait(ty: &Ty) -> bool {
     use std::ops::ControlFlow;
 
     use chalk_ir::{
-        visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
         DebruijnIndex,
+        visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
     };
 
     struct DynTraitVisitor;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
index 9283c46d0f611..cf3b15d2a679c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -1,49 +1,148 @@
 //! Inference of closure parameter types based on the closure's expected type.
 
-use std::{cmp, convert::Infallible, mem};
+use std::{cmp, convert::Infallible, mem, ops::ControlFlow};
 
 use chalk_ir::{
-    cast::Cast,
-    fold::{FallibleTypeFolder, TypeFoldable},
     BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind,
+    cast::Cast,
+    fold::{FallibleTypeFolder, Shift, TypeFoldable},
+    visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
 };
 use either::Either;
 use hir_def::{
-    data::adt::VariantData,
+    DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId,
+    expr_store::path::Path,
     hir::{
-        Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId,
-        Statement, UnaryOp,
+        Array, AsmOperand, BinaryOp, BindingId, CaptureBy, ClosureKind, Expr, ExprId, ExprOrPatId,
+        Pat, PatId, Statement, UnaryOp,
     },
+    item_tree::FieldsShape,
     lang_item::LangItem,
-    path::Path,
     resolver::ValueNs,
-    DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId,
 };
+use hir_def::{Lookup, type_ref::TypeRefId};
 use hir_expand::name::Name;
 use intern::sym;
-use rustc_hash::FxHashMap;
-use smallvec::{smallvec, SmallVec};
+use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::{SmallVec, smallvec};
 use stdx::{format_to, never};
 use syntax::utils::is_raw_identifier;
 
 use crate::{
-    db::{HirDatabase, InternedClosure},
-    error_lifetime, from_chalk_trait_id, from_placeholder_idx,
+    Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy,
+    DynTyExt, FnAbi, FnPointer, FnSig, GenericArg, Interner, OpaqueTy, ProjectionTy,
+    ProjectionTyExt, Substitution, Ty, TyBuilder, TyExt, WhereClause,
+    db::{HirDatabase, InternedClosure, InternedCoroutine},
+    error_lifetime, from_assoc_type_id, from_chalk_trait_id, from_placeholder_idx,
     generics::Generics,
-    infer::coerce::CoerceNever,
+    infer::{BreakableKind, CoerceMany, Diverges, coerce::CoerceNever},
     make_binders,
     mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
     to_chalk_trait_id,
     traits::FnTrait,
     utils::{self, elaborate_clause_supertraits},
-    Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy,
-    DynTyExt, FnAbi, FnPointer, FnSig, Interner, OpaqueTy, ProjectionTyExt, Substitution, Ty,
-    TyExt, WhereClause,
 };
 
 use super::{Expectation, InferenceContext};
 
+#[derive(Debug)]
+pub(super) struct ClosureSignature {
+    pub(super) ret_ty: Ty,
+    pub(super) expected_sig: FnPointer,
+}
+
 impl InferenceContext<'_> {
+    pub(super) fn infer_closure(
+        &mut self,
+        body: &ExprId,
+        args: &[PatId],
+        ret_type: &Option<TypeRefId>,
+        arg_types: &[Option<TypeRefId>],
+        closure_kind: ClosureKind,
+        tgt_expr: ExprId,
+        expected: &Expectation,
+    ) -> Ty {
+        assert_eq!(args.len(), arg_types.len());
+
+        let (expected_sig, expected_kind) = match expected.to_option(&mut self.table) {
+            Some(expected_ty) => self.deduce_closure_signature(&expected_ty, closure_kind),
+            None => (None, None),
+        };
+
+        let ClosureSignature { expected_sig: bound_sig, ret_ty: body_ret_ty } =
+            self.sig_of_closure(body, ret_type, arg_types, closure_kind, expected_sig);
+        let bound_sig = self.normalize_associated_types_in(bound_sig);
+        let sig_ty = TyKind::Function(bound_sig.clone()).intern(Interner);
+
+        let (id, ty, resume_yield_tys) = match closure_kind {
+            ClosureKind::Coroutine(_) => {
+                let sig_tys = bound_sig.substitution.0.as_slice(Interner);
+                // FIXME: report error when there are more than 1 parameter.
+                let resume_ty = match sig_tys.first() {
+                    // When `sig_tys.len() == 1` the first type is the return type, not the
+                    // first parameter type.
+                    Some(ty) if sig_tys.len() > 1 => ty.assert_ty_ref(Interner).clone(),
+                    _ => self.result.standard_types.unit.clone(),
+                };
+                let yield_ty = self.table.new_type_var();
+
+                let subst = TyBuilder::subst_for_coroutine(self.db, self.owner)
+                    .push(resume_ty.clone())
+                    .push(yield_ty.clone())
+                    .push(body_ret_ty.clone())
+                    .build();
+
+                let coroutine_id =
+                    self.db.intern_coroutine(InternedCoroutine(self.owner, tgt_expr)).into();
+                let coroutine_ty = TyKind::Coroutine(coroutine_id, subst).intern(Interner);
+
+                (None, coroutine_ty, Some((resume_ty, yield_ty)))
+            }
+            ClosureKind::Closure | ClosureKind::Async => {
+                let closure_id =
+                    self.db.intern_closure(InternedClosure(self.owner, tgt_expr)).into();
+                let closure_ty = TyKind::Closure(
+                    closure_id,
+                    TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()),
+                )
+                .intern(Interner);
+                self.deferred_closures.entry(closure_id).or_default();
+                self.add_current_closure_dependency(closure_id);
+                (Some(closure_id), closure_ty, None)
+            }
+        };
+
+        // Eagerly try to relate the closure type with the expected
+        // type, otherwise we often won't have enough information to
+        // infer the body.
+        self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected, expected_kind);
+
+        // Now go through the argument patterns
+        for (arg_pat, arg_ty) in args.iter().zip(bound_sig.substitution.0.as_slice(Interner).iter())
+        {
+            self.infer_top_pat(*arg_pat, arg_ty.assert_ty_ref(Interner), None);
+        }
+
+        // FIXME: lift these out into a struct
+        let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+        let prev_closure = mem::replace(&mut self.current_closure, id);
+        let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty.clone());
+        let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(body_ret_ty.clone()));
+        let prev_resume_yield_tys = mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
+
+        self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
+            this.infer_return(*body);
+        });
+
+        self.diverges = prev_diverges;
+        self.return_ty = prev_ret_ty;
+        self.return_coercion = prev_ret_coercion;
+        self.current_closure = prev_closure;
+        self.resume_yield_tys = prev_resume_yield_tys;
+
+        self.table.normalize_associated_types_in(ty)
+    }
+
     // This function handles both closures and coroutines.
     pub(super) fn deduce_closure_type_from_expectations(
         &mut self,
@@ -51,19 +150,21 @@ impl InferenceContext<'_> {
         closure_ty: &Ty,
         sig_ty: &Ty,
         expectation: &Expectation,
+        expected_kind: Option<FnTrait>,
     ) {
         let expected_ty = match expectation.to_option(&mut self.table) {
             Some(ty) => ty,
             None => return,
         };
 
-        if let TyKind::Closure(closure_id, _) = closure_ty.kind(Interner) {
-            if let Some(closure_kind) = self.deduce_closure_kind_from_expectations(&expected_ty) {
+        match (closure_ty.kind(Interner), expected_kind) {
+            (TyKind::Closure(closure_id, _), Some(closure_kind)) => {
                 self.result
                     .closure_info
                     .entry(*closure_id)
                     .or_insert_with(|| (Vec::new(), closure_kind));
             }
+            _ => {}
         }
 
         // Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here.
@@ -86,63 +187,153 @@ impl InferenceContext<'_> {
 
     // Closure kind deductions are mostly from `rustc_hir_typeck/src/closure.rs`.
     // Might need to port closure sig deductions too.
-    fn deduce_closure_kind_from_expectations(&mut self, expected_ty: &Ty) -> Option<FnTrait> {
+    pub(super) fn deduce_closure_signature(
+        &mut self,
+        expected_ty: &Ty,
+        closure_kind: ClosureKind,
+    ) -> (Option<FnSubst<Interner>>, Option<FnTrait>) {
         match expected_ty.kind(Interner) {
             TyKind::Alias(AliasTy::Opaque(OpaqueTy { .. })) | TyKind::OpaqueType(..) => {
-                let clauses = expected_ty
-                    .impl_trait_bounds(self.db)
-                    .into_iter()
-                    .flatten()
-                    .map(|b| b.into_value_and_skipped_binders().0);
-                self.deduce_closure_kind_from_predicate_clauses(clauses)
+                let clauses = expected_ty.impl_trait_bounds(self.db).into_iter().flatten().map(
+                    |b: chalk_ir::Binders<chalk_ir::WhereClause<Interner>>| {
+                        b.into_value_and_skipped_binders().0
+                    },
+                );
+                self.deduce_closure_kind_from_predicate_clauses(expected_ty, clauses, closure_kind)
+            }
+            TyKind::Dyn(dyn_ty) => {
+                let sig =
+                    dyn_ty.bounds.skip_binders().as_slice(Interner).iter().find_map(|bound| {
+                        if let WhereClause::AliasEq(AliasEq {
+                            alias: AliasTy::Projection(projection_ty),
+                            ty: projected_ty,
+                        }) = bound.skip_binders()
+                        {
+                            if let Some(sig) = self.deduce_sig_from_projection(
+                                closure_kind,
+                                projection_ty,
+                                projected_ty,
+                            ) {
+                                return Some(sig);
+                            }
+                        }
+                        None
+                    });
+
+                let kind = dyn_ty.principal().and_then(|principal_trait_ref| {
+                    self.fn_trait_kind_from_trait_id(from_chalk_trait_id(
+                        principal_trait_ref.skip_binders().skip_binders().trait_id,
+                    ))
+                });
+
+                (sig, kind)
             }
-            TyKind::Dyn(dyn_ty) => dyn_ty.principal_id().and_then(|trait_id| {
-                self.fn_trait_kind_from_trait_id(from_chalk_trait_id(trait_id))
-            }),
             TyKind::InferenceVar(ty, chalk_ir::TyVariableKind::General) => {
                 let clauses = self.clauses_for_self_ty(*ty);
-                self.deduce_closure_kind_from_predicate_clauses(clauses.into_iter())
+                self.deduce_closure_kind_from_predicate_clauses(
+                    expected_ty,
+                    clauses.into_iter(),
+                    closure_kind,
+                )
             }
-            TyKind::Function(_) => Some(FnTrait::Fn),
-            _ => None,
+            TyKind::Function(fn_ptr) => match closure_kind {
+                ClosureKind::Closure => (Some(fn_ptr.substitution.clone()), Some(FnTrait::Fn)),
+                ClosureKind::Async | ClosureKind::Coroutine(_) => (None, None),
+            },
+            _ => (None, None),
         }
     }
 
     fn deduce_closure_kind_from_predicate_clauses(
         &self,
+        expected_ty: &Ty,
         clauses: impl DoubleEndedIterator<Item = WhereClause>,
-    ) -> Option<FnTrait> {
+        closure_kind: ClosureKind,
+    ) -> (Option<FnSubst<Interner>>, Option<FnTrait>) {
+        let mut expected_sig = None;
         let mut expected_kind = None;
 
         for clause in elaborate_clause_supertraits(self.db, clauses.rev()) {
+            if expected_sig.is_none() {
+                if let WhereClause::AliasEq(AliasEq {
+                    alias: AliasTy::Projection(projection),
+                    ty,
+                }) = &clause
+                {
+                    let inferred_sig =
+                        self.deduce_sig_from_projection(closure_kind, projection, ty);
+                    // Make sure that we didn't infer a signature that mentions itself.
+                    // This can happen when we elaborate certain supertrait bounds that
+                    // mention projections containing the `Self` type. See rust-lang/rust#105401.
+                    struct MentionsTy<'a> {
+                        expected_ty: &'a Ty,
+                    }
+                    impl TypeVisitor<Interner> for MentionsTy<'_> {
+                        type BreakTy = ();
+
+                        fn interner(&self) -> Interner {
+                            Interner
+                        }
+
+                        fn as_dyn(
+                            &mut self,
+                        ) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy>
+                        {
+                            self
+                        }
+
+                        fn visit_ty(
+                            &mut self,
+                            t: &Ty,
+                            db: chalk_ir::DebruijnIndex,
+                        ) -> ControlFlow<()> {
+                            if t == self.expected_ty {
+                                ControlFlow::Break(())
+                            } else {
+                                t.super_visit_with(self, db)
+                            }
+                        }
+                    }
+                    if inferred_sig
+                        .visit_with(
+                            &mut MentionsTy { expected_ty },
+                            chalk_ir::DebruijnIndex::INNERMOST,
+                        )
+                        .is_continue()
+                    {
+                        expected_sig = inferred_sig;
+                    }
+                }
+            }
+
             let trait_id = match clause {
                 WhereClause::AliasEq(AliasEq {
                     alias: AliasTy::Projection(projection), ..
-                }) => Some(projection.trait_(self.db)),
-                WhereClause::Implemented(trait_ref) => {
-                    Some(from_chalk_trait_id(trait_ref.trait_id))
-                }
-                _ => None,
+                }) => projection.trait_(self.db),
+                WhereClause::Implemented(trait_ref) => from_chalk_trait_id(trait_ref.trait_id),
+                _ => continue,
             };
-            if let Some(closure_kind) =
-                trait_id.and_then(|trait_id| self.fn_trait_kind_from_trait_id(trait_id))
-            {
-                // `FnX`'s variants order is opposite from rustc, so use `cmp::max` instead of `cmp::min`
-                expected_kind = Some(
-                    expected_kind
-                        .map_or_else(|| closure_kind, |current| cmp::max(current, closure_kind)),
-                );
+            if let Some(closure_kind) = self.fn_trait_kind_from_trait_id(trait_id) {
+                // always use the closure kind that is more permissive.
+                match (expected_kind, closure_kind) {
+                    (None, _) => expected_kind = Some(closure_kind),
+                    (Some(FnTrait::FnMut), FnTrait::Fn) => expected_kind = Some(FnTrait::Fn),
+                    (Some(FnTrait::FnOnce), FnTrait::Fn | FnTrait::FnMut) => {
+                        expected_kind = Some(closure_kind)
+                    }
+                    _ => {}
+                }
             }
         }
 
-        expected_kind
+        (expected_sig, expected_kind)
     }
 
     fn deduce_sig_from_dyn_ty(&self, dyn_ty: &DynTy) -> Option<FnPointer> {
         // Search for a predicate like `<$self as FnX<Args>>::Output == Ret`
 
         let fn_traits: SmallVec<[ChalkTraitId; 3]> =
-            utils::fn_traits(self.db.upcast(), self.owner.module(self.db.upcast()).krate())
+            utils::fn_traits(self.db, self.owner.module(self.db).krate())
                 .map(to_chalk_trait_id)
                 .collect();
 
@@ -153,7 +344,8 @@ impl InferenceContext<'_> {
             if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
                 bound.skip_binders()
             {
-                let assoc_data = self.db.associated_ty_data(projection.associated_ty_id);
+                let assoc_data =
+                    self.db.associated_ty_data(from_assoc_type_id(projection.associated_ty_id));
                 if !fn_traits.contains(&assoc_data.trait_id) {
                     return None;
                 }
@@ -185,9 +377,176 @@ impl InferenceContext<'_> {
         None
     }
 
+    fn deduce_sig_from_projection(
+        &self,
+        closure_kind: ClosureKind,
+        projection_ty: &ProjectionTy,
+        projected_ty: &Ty,
+    ) -> Option<FnSubst<Interner>> {
+        let container =
+            from_assoc_type_id(projection_ty.associated_ty_id).lookup(self.db).container;
+        let trait_ = match container {
+            hir_def::ItemContainerId::TraitId(trait_) => trait_,
+            _ => return None,
+        };
+
+        // For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits,
+        // for closures and async closures, respectively.
+        match closure_kind {
+            ClosureKind::Closure | ClosureKind::Async
+                if self.fn_trait_kind_from_trait_id(trait_).is_some() =>
+            {
+                self.extract_sig_from_projection(projection_ty, projected_ty)
+            }
+            _ => None,
+        }
+    }
+
+    fn extract_sig_from_projection(
+        &self,
+        projection_ty: &ProjectionTy,
+        projected_ty: &Ty,
+    ) -> Option<FnSubst<Interner>> {
+        let arg_param_ty = projection_ty.substitution.as_slice(Interner)[1].assert_ty_ref(Interner);
+
+        let TyKind::Tuple(_, input_tys) = arg_param_ty.kind(Interner) else {
+            return None;
+        };
+
+        let ret_param_ty = projected_ty;
+
+        Some(FnSubst(Substitution::from_iter(
+            Interner,
+            input_tys.iter(Interner).map(|t| t.cast(Interner)).chain(Some(GenericArg::new(
+                Interner,
+                chalk_ir::GenericArgData::Ty(ret_param_ty.clone()),
+            ))),
+        )))
+    }
+
     fn fn_trait_kind_from_trait_id(&self, trait_id: hir_def::TraitId) -> Option<FnTrait> {
         FnTrait::from_lang_item(self.db.lang_attr(trait_id.into())?)
     }
+
+    fn supplied_sig_of_closure(
+        &mut self,
+        body: &ExprId,
+        ret_type: &Option<TypeRefId>,
+        arg_types: &[Option<TypeRefId>],
+        closure_kind: ClosureKind,
+    ) -> ClosureSignature {
+        let mut sig_tys = Vec::with_capacity(arg_types.len() + 1);
+
+        // collect explicitly written argument types
+        for arg_type in arg_types.iter() {
+            let arg_ty = match arg_type {
+                // FIXME: I think rustc actually lowers closure params with `LifetimeElisionKind::AnonymousCreateParameter`
+                // (but the return type with infer).
+                Some(type_ref) => self.make_body_ty(*type_ref),
+                None => self.table.new_type_var(),
+            };
+            sig_tys.push(arg_ty);
+        }
+
+        // add return type
+        let ret_ty = match ret_type {
+            Some(type_ref) => self.make_body_ty(*type_ref),
+            None => self.table.new_type_var(),
+        };
+        if let ClosureKind::Async = closure_kind {
+            sig_tys.push(self.lower_async_block_type_impl_trait(ret_ty.clone(), *body));
+        } else {
+            sig_tys.push(ret_ty.clone());
+        }
+
+        let expected_sig = FnPointer {
+            num_binders: 0,
+            sig: FnSig { abi: FnAbi::RustCall, safety: chalk_ir::Safety::Safe, variadic: false },
+            substitution: FnSubst(
+                Substitution::from_iter(Interner, sig_tys.iter().cloned()).shifted_in(Interner),
+            ),
+        };
+
+        ClosureSignature { ret_ty, expected_sig }
+    }
+
+    /// The return type is the signature of the closure, and the return type
+    /// *as represented inside the body* (so, for async closures, the `Output` ty)
+    pub(super) fn sig_of_closure(
+        &mut self,
+        body: &ExprId,
+        ret_type: &Option<TypeRefId>,
+        arg_types: &[Option<TypeRefId>],
+        closure_kind: ClosureKind,
+        expected_sig: Option<FnSubst<Interner>>,
+    ) -> ClosureSignature {
+        if let Some(e) = expected_sig {
+            self.sig_of_closure_with_expectation(body, ret_type, arg_types, closure_kind, e)
+        } else {
+            self.sig_of_closure_no_expectation(body, ret_type, arg_types, closure_kind)
+        }
+    }
+
+    fn sig_of_closure_no_expectation(
+        &mut self,
+        body: &ExprId,
+        ret_type: &Option<TypeRefId>,
+        arg_types: &[Option<TypeRefId>],
+        closure_kind: ClosureKind,
+    ) -> ClosureSignature {
+        self.supplied_sig_of_closure(body, ret_type, arg_types, closure_kind)
+    }
+
+    fn sig_of_closure_with_expectation(
+        &mut self,
+        body: &ExprId,
+        ret_type: &Option<TypeRefId>,
+        arg_types: &[Option<TypeRefId>],
+        closure_kind: ClosureKind,
+        expected_sig: FnSubst<Interner>,
+    ) -> ClosureSignature {
+        let expected_sig = FnPointer {
+            num_binders: 0,
+            sig: FnSig { abi: FnAbi::RustCall, safety: chalk_ir::Safety::Safe, variadic: false },
+            substitution: expected_sig,
+        };
+
+        // If the expected signature does not match the actual arg types,
+        // then just return the expected signature
+        if expected_sig.substitution.0.len(Interner) != arg_types.len() + 1 {
+            let ret_ty = match ret_type {
+                Some(type_ref) => self.make_body_ty(*type_ref),
+                None => self.table.new_type_var(),
+            };
+            return ClosureSignature { expected_sig, ret_ty };
+        }
+
+        self.merge_supplied_sig_with_expectation(
+            body,
+            ret_type,
+            arg_types,
+            closure_kind,
+            expected_sig,
+        )
+    }
+
+    fn merge_supplied_sig_with_expectation(
+        &mut self,
+        body: &ExprId,
+        ret_type: &Option<TypeRefId>,
+        arg_types: &[Option<TypeRefId>],
+        closure_kind: ClosureKind,
+        expected_sig: FnPointer,
+    ) -> ClosureSignature {
+        let supplied_sig = self.supplied_sig_of_closure(body, ret_type, arg_types, closure_kind);
+
+        let snapshot = self.table.snapshot();
+        if !self.table.unify(&expected_sig.substitution, &supplied_sig.expected_sig.substitution) {
+            self.table.rollback_to(snapshot);
+        }
+
+        supplied_sig
+    }
 }
 
 // The below functions handle capture and closure kind (Fn, FnMut, ..)
@@ -208,7 +567,7 @@ impl HirPlace {
                 |_, _, _| {
                     unreachable!("Closure field only happens in MIR");
                 },
-                ctx.owner.module(ctx.db.upcast()).krate(),
+                ctx.owner.module(ctx.db).krate(),
             );
         }
         ty
@@ -223,7 +582,7 @@ impl HirPlace {
             kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow,
         }) = current_capture
         {
-            if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) {
+            if self.projections[len..].contains(&ProjectionElem::Deref) {
                 current_capture =
                     CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture });
             }
@@ -282,18 +641,20 @@ impl CapturedItem {
             match proj {
                 ProjectionElem::Deref => {}
                 ProjectionElem::Field(Either::Left(f)) => {
-                    match &*f.parent.variant_data(db.upcast()) {
-                        VariantData::Record { fields, .. } => {
+                    let variant_data = f.parent.variant_data(db);
+                    match variant_data.shape {
+                        FieldsShape::Record => {
                             result.push('_');
-                            result.push_str(fields[f.local_id].name.as_str())
+                            result.push_str(variant_data.fields()[f.local_id].name.as_str())
                         }
-                        VariantData::Tuple { fields, .. } => {
-                            let index = fields.iter().position(|it| it.0 == f.local_id);
+                        FieldsShape::Tuple => {
+                            let index =
+                                variant_data.fields().iter().position(|it| it.0 == f.local_id);
                             if let Some(index) = index {
                                 format_to!(result, "_{index}");
                             }
                         }
-                        VariantData::Unit => {}
+                        FieldsShape::Unit => {}
                     }
                 }
                 ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index),
@@ -307,7 +668,7 @@ impl CapturedItem {
                 }
             }
         }
-        if is_raw_identifier(&result, db.crate_graph()[owner.module(db.upcast()).krate()].edition) {
+        if is_raw_identifier(&result, owner.module(db).krate().data(db).edition) {
             result.insert_str(0, "r#");
         }
         result
@@ -315,27 +676,31 @@ impl CapturedItem {
 
     pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
         let body = db.body(owner);
-        let krate = owner.krate(db.upcast());
-        let edition = db.crate_graph()[krate].edition;
-        let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string();
+        let krate = owner.krate(db);
+        let edition = krate.data(db).edition;
+        let mut result = body[self.place.local].name.display(db, edition).to_string();
         for proj in &self.place.projections {
             match proj {
                 // In source code autoderef kicks in.
                 ProjectionElem::Deref => {}
                 ProjectionElem::Field(Either::Left(f)) => {
-                    let variant_data = f.parent.variant_data(db.upcast());
-                    match &*variant_data {
-                        VariantData::Record { fields, .. } => format_to!(
+                    let variant_data = f.parent.variant_data(db);
+                    match variant_data.shape {
+                        FieldsShape::Record => format_to!(
                             result,
                             ".{}",
-                            fields[f.local_id].name.display(db.upcast(), edition)
+                            variant_data.fields()[f.local_id].name.display(db, edition)
                         ),
-                        VariantData::Tuple { fields, .. } => format_to!(
+                        FieldsShape::Tuple => format_to!(
                             result,
                             ".{}",
-                            fields.iter().position(|it| it.0 == f.local_id).unwrap_or_default()
+                            variant_data
+                                .fields()
+                                .iter()
+                                .position(|it| it.0 == f.local_id)
+                                .unwrap_or_default()
                         ),
-                        VariantData::Unit => {}
+                        FieldsShape::Unit => {}
                     }
                 }
                 ProjectionElem::Field(Either::Right(f)) => {
@@ -367,9 +732,9 @@ impl CapturedItem {
 
     pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
         let body = db.body(owner);
-        let krate = owner.krate(db.upcast());
-        let edition = db.crate_graph()[krate].edition;
-        let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string();
+        let krate = owner.krate(db);
+        let edition = krate.data(db).edition;
+        let mut result = body[self.place.local].name.display(db, edition).to_string();
         let mut field_need_paren = false;
         for proj in &self.place.projections {
             match proj {
@@ -381,17 +746,18 @@ impl CapturedItem {
                     if field_need_paren {
                         result = format!("({result})");
                     }
-                    let variant_data = f.parent.variant_data(db.upcast());
-                    let field = match &*variant_data {
-                        VariantData::Record { fields, .. } => {
-                            fields[f.local_id].name.as_str().to_owned()
+                    let variant_data = f.parent.variant_data(db);
+                    let field = match variant_data.shape {
+                        FieldsShape::Record => {
+                            variant_data.fields()[f.local_id].name.as_str().to_owned()
                         }
-                        VariantData::Tuple { fields, .. } => fields
+                        FieldsShape::Tuple => variant_data
+                            .fields()
                             .iter()
                             .position(|it| it.0 == f.local_id)
                             .unwrap_or_default()
                             .to_string(),
-                        VariantData::Unit => "[missing field]".to_owned(),
+                        FieldsShape::Unit => "[missing field]".to_owned(),
                     };
                     result = format!("{result}.{field}");
                     field_need_paren = false;
@@ -493,10 +859,7 @@ impl CapturedItemWithoutTy {
                     Ok(BoundVar::new(outer_binder, idx).to_ty(Interner))
                 }
             }
-            let Some(generics) = ctx.generics() else {
-                return Binders::empty(Interner, ty);
-            };
-            let filler = &mut Filler { db: ctx.db, generics };
+            let filler = &mut Filler { db: ctx.db, generics: ctx.generics() };
             let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
             make_binders(ctx.db, filler.generics, result)
         }
@@ -506,8 +869,8 @@ impl CapturedItemWithoutTy {
 impl InferenceContext<'_> {
     fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
         let r = self.place_of_expr_without_adjust(tgt_expr)?;
-        let default = vec![];
-        let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default);
+        let adjustments =
+            self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default();
         apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
     }
 
@@ -517,10 +880,8 @@ impl InferenceContext<'_> {
             return None;
         }
         let hygiene = self.body.expr_or_pat_path_hygiene(id);
-        let result = self
-            .resolver
-            .resolve_path_in_value_ns_fully(self.db.upcast(), path, hygiene)
-            .and_then(|result| match result {
+        self.resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).and_then(|result| {
+            match result {
                 ValueNs::LocalBinding(binding) => {
                     let mir_span = match id {
                         ExprOrPatId::ExprId(id) => MirSpan::ExprId(id),
@@ -530,8 +891,8 @@ impl InferenceContext<'_> {
                     Some(HirPlace { local: binding, projections: Vec::new() })
                 }
                 _ => None,
-            });
-        result
+            }
+        })
     }
 
     /// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
@@ -540,7 +901,7 @@ impl InferenceContext<'_> {
         match &self.body[tgt_expr] {
             Expr::Path(p) => {
                 let resolver_guard =
-                    self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
+                    self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
                 let result = self.path_place(p, tgt_expr.into());
                 self.resolver.reset_to_guard(resolver_guard);
                 return result;
@@ -815,8 +1176,8 @@ impl InferenceContext<'_> {
                         {
                             if let Some(deref_fn) = self
                                 .db
-                                .trait_data(deref_trait)
-                                .method_by_name(&Name::new_symbol_root(sym::deref_mut.clone()))
+                                .trait_items(deref_trait)
+                                .method_by_name(&Name::new_symbol_root(sym::deref_mut))
                             {
                                 break 'b deref_fn == f;
                             }
@@ -902,7 +1263,7 @@ impl InferenceContext<'_> {
             &Expr::Assignment { target, value } => {
                 self.walk_expr(value);
                 let resolver_guard =
-                    self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
+                    self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
                 match self.place_of_expr(value) {
                     Some(rhs_place) => {
                         self.inside_assignment = true;
@@ -961,9 +1322,9 @@ impl InferenceContext<'_> {
             | Pat::Or(_) => (),
             Pat::TupleStruct { .. } | Pat::Record { .. } => {
                 if let Some(variant) = self.result.variant_resolution_for_pat(p) {
-                    let adt = variant.adt_id(self.db.upcast());
+                    let adt = variant.adt_id(self.db);
                     let is_multivariant = match adt {
-                        hir_def::AdtId::EnumId(e) => self.db.enum_data(e).variants.len() != 1,
+                        hir_def::AdtId::EnumId(e) => self.db.enum_variants(e).variants.len() != 1,
                         _ => false,
                     };
                     if is_multivariant {
@@ -1052,7 +1413,7 @@ impl InferenceContext<'_> {
                     |_, _, _| {
                         unreachable!("Closure field only happens in MIR");
                     },
-                    self.owner.module(self.db.upcast()).krate(),
+                    self.owner.module(self.db).krate(),
                 );
                 if ty.as_raw_ptr().is_some() || ty.is_union() {
                     capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
@@ -1159,7 +1520,7 @@ impl InferenceContext<'_> {
                             self.consume_place(place)
                         }
                         VariantId::StructId(s) => {
-                            let vd = &*self.db.struct_data(s).variant_data;
+                            let vd = &*self.db.variant_fields(s.into());
                             for field_pat in args.iter() {
                                 let arg = field_pat.pat;
                                 let Some(local_id) = vd.field(&field_pat.name) else {
@@ -1211,7 +1572,7 @@ impl InferenceContext<'_> {
                             self.consume_place(place)
                         }
                         VariantId::StructId(s) => {
-                            let vd = &*self.db.struct_data(s).variant_data;
+                            let vd = &*self.db.variant_fields(s.into());
                             let (al, ar) =
                                 args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
                             let fields = vd.fields().iter();
@@ -1340,7 +1701,7 @@ impl InferenceContext<'_> {
             for (derefed_callee, callee_ty, params, expr) in exprs {
                 if let &Expr::Call { callee, .. } = &self.body[expr] {
                     let mut adjustments =
-                        self.result.expr_adjustments.remove(&callee).unwrap_or_default();
+                        self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec();
                     self.write_fn_trait_method_resolution(
                         kind,
                         &derefed_callee,
@@ -1349,7 +1710,7 @@ impl InferenceContext<'_> {
                         &params,
                         expr,
                     );
-                    self.result.expr_adjustments.insert(callee, adjustments);
+                    self.result.expr_adjustments.insert(callee, adjustments.into_boxed_slice());
                 }
             }
         }
@@ -1387,8 +1748,42 @@ impl InferenceContext<'_> {
                 }
             }
         }
+        assert!(deferred_closures.is_empty(), "we should have analyzed all closures");
         result
     }
+
+    pub(super) fn add_current_closure_dependency(&mut self, dep: ClosureId) {
+        if let Some(c) = self.current_closure {
+            if !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep) {
+                self.closure_dependencies.entry(c).or_default().push(dep);
+            }
+        }
+
+        fn dep_creates_cycle(
+            closure_dependencies: &FxHashMap<ClosureId, Vec<ClosureId>>,
+            visited: &mut FxHashSet<ClosureId>,
+            from: ClosureId,
+            to: ClosureId,
+        ) -> bool {
+            if !visited.insert(from) {
+                return false;
+            }
+
+            if from == to {
+                return true;
+            }
+
+            if let Some(deps) = closure_dependencies.get(&to) {
+                for dep in deps {
+                    if dep_creates_cycle(closure_dependencies, visited, from, *dep) {
+                        return true;
+                    }
+                }
+            }
+
+            false
+        }
+    }
 }
 
 /// Call this only when the last span in the stack isn't a split.
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
index acd86b1f3ed8a..847dd43a02d62 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -7,7 +7,7 @@
 
 use std::iter;
 
-use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyKind, TyVariableKind};
+use chalk_ir::{BoundVar, Goal, Mutability, TyKind, TyVariableKind, cast::Cast};
 use hir_def::{
     hir::ExprId,
     lang_item::{LangItem, LangItemTarget},
@@ -16,6 +16,8 @@ use stdx::always;
 use triomphe::Arc;
 
 use crate::{
+    Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Lifetime,
+    Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
     autoderef::{Autoderef, AutoderefKind},
     db::HirDatabase,
     infer::{
@@ -23,8 +25,6 @@ use crate::{
         TypeError, TypeMismatch,
     },
     utils::ClosureSubst,
-    Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Lifetime,
-    Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
 };
 
 use super::unify::InferenceTable;
@@ -148,11 +148,11 @@ impl CoerceMany {
             if let (Ok(result1), Ok(result2)) = (result1, result2) {
                 ctx.table.register_infer_ok(InferOk { value: (), goals: result1.goals });
                 for &e in &self.expressions {
-                    ctx.write_expr_adj(e, result1.value.0.clone());
+                    ctx.write_expr_adj(e, result1.value.0.clone().into_boxed_slice());
                 }
                 ctx.table.register_infer_ok(InferOk { value: (), goals: result2.goals });
                 if let Some(expr) = expr {
-                    ctx.write_expr_adj(expr, result2.value.0);
+                    ctx.write_expr_adj(expr, result2.value.0.into_boxed_slice());
                     self.expressions.push(expr);
                 }
                 return self.final_ty = Some(target_ty);
@@ -182,7 +182,7 @@ impl CoerceMany {
         {
             self.final_ty = Some(res);
             for &e in &self.expressions {
-                ctx.write_expr_adj(e, adjustments.clone());
+                ctx.write_expr_adj(e, adjustments.clone().into_boxed_slice());
             }
         } else {
             match cause {
@@ -263,7 +263,7 @@ impl InferenceContext<'_> {
     ) -> Result<Ty, TypeError> {
         let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?;
         if let Some(expr) = expr {
-            self.write_expr_adj(expr, adjustments);
+            self.write_expr_adj(expr, adjustments.into_boxed_slice());
         }
         Ok(ty)
     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs
index e4f5b5ed378dc..e3c4f5562d5c4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs
@@ -6,13 +6,17 @@ use std::cell::RefCell;
 use std::ops::{Deref, DerefMut};
 
 use either::Either;
-use hir_def::{hir::ExprOrPatId, path::Path, resolver::Resolver, type_ref::TypesMap, TypeOwnerId};
+use hir_def::GenericDefId;
+use hir_def::expr_store::ExpressionStore;
+use hir_def::expr_store::path::Path;
+use hir_def::{hir::ExprOrPatId, resolver::Resolver};
 use la_arena::{Idx, RawIdx};
 
+use crate::lower::LifetimeElisionKind;
 use crate::{
+    InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringContext, TyLoweringDiagnostic,
     db::HirDatabase,
     lower::path::{PathDiagnosticCallback, PathLoweringContext},
-    InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringContext, TyLoweringDiagnostic,
 };
 
 // Unfortunately, this struct needs to use interior mutability (but we encapsulate it)
@@ -58,12 +62,17 @@ impl<'a> InferenceTyLoweringContext<'a> {
     pub(super) fn new(
         db: &'a dyn HirDatabase,
         resolver: &'a Resolver,
-        types_map: &'a TypesMap,
-        owner: TypeOwnerId,
+        store: &'a ExpressionStore,
         diagnostics: &'a Diagnostics,
         source: InferenceTyDiagnosticSource,
+        generic_def: GenericDefId,
+        lifetime_elision: LifetimeElisionKind,
     ) -> Self {
-        Self { ctx: TyLoweringContext::new(db, resolver, types_map, owner), diagnostics, source }
+        Self {
+            ctx: TyLoweringContext::new(db, resolver, store, generic_def, lifetime_elision),
+            diagnostics,
+            source,
+        }
     }
 
     #[inline]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 80e3ca1fa282e..5468254ab910f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -5,17 +5,17 @@ use std::{
     mem,
 };
 
-use chalk_ir::{cast::Cast, fold::Shift, DebruijnIndex, Mutability, TyVariableKind};
+use chalk_ir::{DebruijnIndex, Mutability, TyVariableKind, cast::Cast};
 use either::Either;
 use hir_def::{
+    BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId,
+    expr_store::path::{GenericArg, GenericArgs, Path},
     hir::{
-        ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, ClosureKind, Expr, ExprId, ExprOrPatId,
-        LabelId, Literal, Pat, PatId, Statement, UnaryOp,
+        ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, Expr, ExprId, ExprOrPatId, LabelId,
+        Literal, Pat, PatId, Statement, UnaryOp, generics::GenericParamDataRef,
     },
     lang_item::{LangItem, LangItemTarget},
-    path::{GenericArg, GenericArgs, Path},
     resolver::ValueNs,
-    BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId,
 };
 use hir_expand::name::Name;
 use intern::sym;
@@ -23,34 +23,33 @@ use stdx::always;
 use syntax::ast::RangeOp;
 
 use crate::{
-    autoderef::{builtin_deref, deref_by_trait, Autoderef},
+    Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, DeclContext,
+    DeclOrigin, IncorrectGenericsLenKind, Interner, Rawness, Scalar, Substitution,
+    TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+    autoderef::{Autoderef, builtin_deref, deref_by_trait},
     consteval,
-    db::{InternedClosure, InternedCoroutine},
-    error_lifetime,
-    generics::{generics, Generics},
+    generics::generics,
     infer::{
+        BreakableKind,
         coerce::{CoerceMany, CoerceNever, CoercionCause},
         find_continuable,
         pat::contains_explicit_ref_binding,
-        BreakableKind,
     },
     lang_items::lang_items_for_bin_op,
     lower::{
-        const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
+        LifetimeElisionKind, ParamLoweringMode, lower_to_chalk_mutability,
+        path::{GenericArgsLowerer, TypeLikeConst, substs_from_args_and_bindings},
     },
-    mapping::{from_chalk, ToChalk},
+    mapping::{ToChalk, from_chalk},
     method_resolution::{self, VisibleFromModule},
     primitive::{self, UintTy},
     static_lifetime, to_chalk_trait_id,
     traits::FnTrait,
-    Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, DeclContext,
-    DeclOrigin, FnAbi, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, Substitution,
-    TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
 };
 
 use super::{
-    cast::CastCheck, coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges,
-    Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
+    BreakableContext, Diverges, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
+    cast::CastCheck, coerce::auto_deref_adjust_steps, find_breakable,
 };
 
 #[derive(Clone, Copy, PartialEq, Eq)]
@@ -198,14 +197,10 @@ impl InferenceContext<'_> {
         match &self.body[expr] {
             // Lang item paths cannot currently be local variables or statics.
             Expr::Path(Path::LangItem(_, _)) => false,
-            Expr::Path(Path::Normal(path)) => path.type_anchor().is_none(),
+            Expr::Path(Path::Normal(path)) => path.type_anchor.is_none(),
             Expr::Path(path) => self
                 .resolver
-                .resolve_path_in_value_ns_fully(
-                    self.db.upcast(),
-                    path,
-                    self.body.expr_path_hygiene(expr),
-                )
+                .resolve_path_in_value_ns_fully(self.db, path, self.body.expr_path_hygiene(expr))
                 .is_none_or(|res| matches!(res, ValueNs::LocalBinding(_) | ValueNs::StaticId(_))),
             Expr::Underscore => true,
             Expr::UnaryOp { op: UnaryOp::Deref, .. } => true,
@@ -289,7 +284,7 @@ impl InferenceContext<'_> {
         expected: &Expectation,
         is_read: ExprIsRead,
     ) -> Ty {
-        self.db.unwind_if_cancelled();
+        self.db.unwind_if_revision_cancelled();
 
         let ty = match &self.body[tgt_expr] {
             Expr::Missing => self.err_ty(),
@@ -349,8 +344,7 @@ impl InferenceContext<'_> {
             }
             Expr::Const(id) => {
                 self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
-                    let loc = this.db.lookup_intern_anonymous_const(*id);
-                    this.infer_expr(loc.root, expected, ExprIsRead::Yes)
+                    this.infer_expr(*id, expected, ExprIsRead::Yes)
                 })
                 .1
             }
@@ -378,117 +372,8 @@ impl InferenceContext<'_> {
                     None => self.result.standard_types.never.clone(),
                 }
             }
-            Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => {
-                assert_eq!(args.len(), arg_types.len());
-
-                let mut sig_tys = Vec::with_capacity(arg_types.len() + 1);
-
-                // collect explicitly written argument types
-                for arg_type in arg_types.iter() {
-                    let arg_ty = match arg_type {
-                        Some(type_ref) => self.make_body_ty(*type_ref),
-                        None => self.table.new_type_var(),
-                    };
-                    sig_tys.push(arg_ty);
-                }
-
-                // add return type
-                let ret_ty = match ret_type {
-                    Some(type_ref) => self.make_body_ty(*type_ref),
-                    None => self.table.new_type_var(),
-                };
-                if let ClosureKind::Async = closure_kind {
-                    sig_tys.push(self.lower_async_block_type_impl_trait(ret_ty.clone(), *body));
-                } else {
-                    sig_tys.push(ret_ty.clone());
-                }
-
-                let sig_ty = TyKind::Function(FnPointer {
-                    num_binders: 0,
-                    sig: FnSig {
-                        abi: FnAbi::RustCall,
-                        safety: chalk_ir::Safety::Safe,
-                        variadic: false,
-                    },
-                    substitution: FnSubst(
-                        Substitution::from_iter(Interner, sig_tys.iter().cloned())
-                            .shifted_in(Interner),
-                    ),
-                })
-                .intern(Interner);
-
-                let (id, ty, resume_yield_tys) = match closure_kind {
-                    ClosureKind::Coroutine(_) => {
-                        // FIXME: report error when there are more than 1 parameter.
-                        let resume_ty = match sig_tys.first() {
-                            // When `sig_tys.len() == 1` the first type is the return type, not the
-                            // first parameter type.
-                            Some(ty) if sig_tys.len() > 1 => ty.clone(),
-                            _ => self.result.standard_types.unit.clone(),
-                        };
-                        let yield_ty = self.table.new_type_var();
-
-                        let subst = TyBuilder::subst_for_coroutine(self.db, self.owner)
-                            .push(resume_ty.clone())
-                            .push(yield_ty.clone())
-                            .push(ret_ty.clone())
-                            .build();
-
-                        let coroutine_id = self
-                            .db
-                            .intern_coroutine(InternedCoroutine(self.owner, tgt_expr))
-                            .into();
-                        let coroutine_ty = TyKind::Coroutine(coroutine_id, subst).intern(Interner);
-
-                        (None, coroutine_ty, Some((resume_ty, yield_ty)))
-                    }
-                    ClosureKind::Closure | ClosureKind::Async => {
-                        let closure_id =
-                            self.db.intern_closure(InternedClosure(self.owner, tgt_expr)).into();
-                        let closure_ty = TyKind::Closure(
-                            closure_id,
-                            TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()),
-                        )
-                        .intern(Interner);
-                        self.deferred_closures.entry(closure_id).or_default();
-                        if let Some(c) = self.current_closure {
-                            self.closure_dependencies.entry(c).or_default().push(closure_id);
-                        }
-                        (Some(closure_id), closure_ty, None)
-                    }
-                };
-
-                // Eagerly try to relate the closure type with the expected
-                // type, otherwise we often won't have enough information to
-                // infer the body.
-                self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected);
-
-                // Now go through the argument patterns
-                for (arg_pat, arg_ty) in args.iter().zip(&sig_tys) {
-                    self.infer_top_pat(*arg_pat, arg_ty, None);
-                }
-
-                // FIXME: lift these out into a struct
-                let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
-                let prev_closure = mem::replace(&mut self.current_closure, id);
-                let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
-                let prev_ret_coercion =
-                    mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty)));
-                let prev_resume_yield_tys =
-                    mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
-
-                self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
-                    this.infer_return(*body);
-                });
-
-                self.diverges = prev_diverges;
-                self.return_ty = prev_ret_ty;
-                self.return_coercion = prev_ret_coercion;
-                self.current_closure = prev_closure;
-                self.resume_yield_tys = prev_resume_yield_tys;
-
-                ty
-            }
+            Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => self
+                .infer_closure(body, args, ret_type, arg_types, *closure_kind, tgt_expr, expected),
             Expr::Call { callee, args, .. } => self.infer_call(tgt_expr, *callee, args, expected),
             Expr::MethodCall { receiver, args, method_name, generic_args } => self
                 .infer_method_call(
@@ -657,16 +542,15 @@ impl InferenceContext<'_> {
                     _ if fields.is_empty() => {}
                     Some(def) => {
                         let field_types = self.db.field_types(def);
-                        let variant_data = def.variant_data(self.db.upcast());
+                        let variant_data = def.variant_data(self.db);
                         let visibilities = self.db.field_visibilities(def);
                         for field in fields.iter() {
                             let field_def = {
                                 match variant_data.field(&field.name) {
                                     Some(local_id) => {
-                                        if !visibilities[local_id].is_visible_from(
-                                            self.db.upcast(),
-                                            self.resolver.module(),
-                                        ) {
+                                        if !visibilities[local_id]
+                                            .is_visible_from(self.db, self.resolver.module())
+                                        {
                                             self.push_diagnostic(
                                                 InferenceDiagnostic::NoSuchField {
                                                     field: field.expr.into(),
@@ -772,8 +656,8 @@ impl InferenceContext<'_> {
                         if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) {
                             if let Some(deref_fn) = self
                                 .db
-                                .trait_data(deref_trait)
-                                .method_by_name(&Name::new_symbol_root(sym::deref.clone()))
+                                .trait_items(deref_trait)
+                                .method_by_name(&Name::new_symbol_root(sym::deref))
                             {
                                 // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that
                                 // the mutability is not wrong, and will be fixed in `self.infer_mut`).
@@ -858,7 +742,7 @@ impl InferenceContext<'_> {
                 } else {
                     let rhs_ty = self.infer_expr(value, &Expectation::none(), ExprIsRead::Yes);
                     let resolver_guard =
-                        self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
+                        self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
                     self.inside_assignment = true;
                     self.infer_top_pat(target, &rhs_ty, None);
                     self.inside_assignment = false;
@@ -921,17 +805,18 @@ impl InferenceContext<'_> {
                         .map_or((self.err_ty(), Vec::new()), |adj| {
                             adj.apply(&mut self.table, base_ty)
                         });
+
                     // mutability will be fixed up in `InferenceContext::infer_mut`;
                     adj.push(Adjustment::borrow(
                         Mutability::Not,
                         self_ty.clone(),
                         self.table.new_lifetime_var(),
                     ));
-                    self.write_expr_adj(*base, adj);
+                    self.write_expr_adj(*base, adj.into_boxed_slice());
                     if let Some(func) = self
                         .db
-                        .trait_data(index_trait)
-                        .method_by_name(&Name::new_symbol_root(sym::index.clone()))
+                        .trait_items(index_trait)
+                        .method_by_name(&Name::new_symbol_root(sym::index))
                     {
                         let subst = TyBuilder::subst_for_def(self.db, index_trait, None);
                         if subst.remaining() != 2 {
@@ -1143,7 +1028,7 @@ impl InferenceContext<'_> {
     }
 
     fn infer_expr_path(&mut self, path: &Path, id: ExprOrPatId, scope_id: ExprId) -> Ty {
-        let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, scope_id);
+        let g = self.resolver.update_to_inner_scope(self.db, self.owner, scope_id);
         let ty = match self.infer_path(path, id) {
             Some(ty) => ty,
             None => {
@@ -1168,8 +1053,7 @@ impl InferenceContext<'_> {
         let ret_ty = self.table.new_type_var();
         let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
         let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
-        let prev_ret_coercion =
-            mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
+        let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(ret_ty.clone()));
 
         // FIXME: We should handle async blocks like we handle closures
         let expected = &Expectation::has_type(ret_ty);
@@ -1258,7 +1142,7 @@ impl InferenceContext<'_> {
         let Some(trait_) = fn_x.get_id(self.db, self.table.trait_env.krate) else {
             return;
         };
-        let trait_data = self.db.trait_data(trait_);
+        let trait_data = self.db.trait_items(trait_);
         if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) {
             let subst = TyBuilder::subst_for_def(self.db, trait_, None)
                 .push(callee_ty.clone())
@@ -1426,7 +1310,7 @@ impl InferenceContext<'_> {
 
         let trait_func = lang_items_for_bin_op(op).and_then(|(name, lang_item)| {
             let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?;
-            let func = self.db.trait_data(trait_id).method_by_name(&name)?;
+            let func = self.db.trait_items(trait_id).method_by_name(&name)?;
             Some((trait_id, func))
         });
         let (trait_, func) = match trait_func {
@@ -1472,10 +1356,10 @@ impl InferenceContext<'_> {
                     if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) {
                         self.write_expr_adj(
                             lhs,
-                            vec![Adjustment {
+                            Box::new([Adjustment {
                                 kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
                                 target: p_left.clone(),
-                            }],
+                            }]),
                         );
                     }
                 }
@@ -1484,10 +1368,10 @@ impl InferenceContext<'_> {
                     if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) {
                         self.write_expr_adj(
                             rhs,
-                            vec![Adjustment {
+                            Box::new([Adjustment {
                                 kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
                                 target: p_right.clone(),
-                            }],
+                            }]),
                         );
                     }
                 }
@@ -1517,7 +1401,7 @@ impl InferenceContext<'_> {
         expected: &Expectation,
     ) -> Ty {
         let coerce_ty = expected.coercion_target_type(&mut self.table);
-        let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
+        let g = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
         let prev_env = block_id.map(|block_id| {
             let prev_env = self.table.trait_env.clone();
             TraitEnvironment::with_block(&mut self.table.trait_env, block_id);
@@ -1556,11 +1440,7 @@ impl InferenceContext<'_> {
                                         target_is_read,
                                     )
                                 };
-                                if type_ref.is_some() {
-                                    decl_ty
-                                } else {
-                                    ty
-                                }
+                                if type_ref.is_some() { decl_ty } else { ty }
                             } else {
                                 decl_ty
                             };
@@ -1681,20 +1561,20 @@ impl InferenceContext<'_> {
                             })
                     });
                 }
-                TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
-                    let local_id = self.db.struct_data(*s).variant_data.field(name)?;
-                    let field = FieldId { parent: (*s).into(), local_id };
+                &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref parameters) => {
+                    let local_id = self.db.variant_fields(s.into()).field(name)?;
+                    let field = FieldId { parent: s.into(), local_id };
                     (field, parameters.clone())
                 }
-                TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
-                    let local_id = self.db.union_data(*u).variant_data.field(name)?;
-                    let field = FieldId { parent: (*u).into(), local_id };
+                &TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), ref parameters) => {
+                    let local_id = self.db.variant_fields(u.into()).field(name)?;
+                    let field = FieldId { parent: u.into(), local_id };
                     (field, parameters.clone())
                 }
                 _ => return None,
             };
             let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
-                .is_visible_from(self.db.upcast(), self.resolver.module());
+                .is_visible_from(self.db, self.resolver.module());
             if !is_visible {
                 if private_field.is_none() {
                     private_field = Some((field_id, parameters));
@@ -1747,7 +1627,7 @@ impl InferenceContext<'_> {
 
         match self.lookup_field(&receiver_ty, name) {
             Some((ty, field_id, adjustments, is_public)) => {
-                self.write_expr_adj(receiver, adjustments);
+                self.write_expr_adj(receiver, adjustments.into_boxed_slice());
                 self.result.field_resolutions.insert(tgt_expr, field_id);
                 if !is_public {
                     if let Either::Left(field) = field_id {
@@ -1781,9 +1661,8 @@ impl InferenceContext<'_> {
                 match resolved {
                     Some((adjust, func, _)) => {
                         let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
-                        let generics = generics(self.db.upcast(), func.into());
-                        let substs = self.substs_for_method_call(generics, None);
-                        self.write_expr_adj(receiver, adjustments);
+                        let substs = self.substs_for_method_call(tgt_expr, func.into(), None);
+                        self.write_expr_adj(receiver, adjustments.into_boxed_slice());
                         self.write_method_resolution(tgt_expr, func, substs.clone());
 
                         self.check_method_call(
@@ -1828,9 +1707,7 @@ impl InferenceContext<'_> {
                 if let TyKind::Closure(c, _) =
                     self.table.resolve_completely(callee_ty.clone()).kind(Interner)
                 {
-                    if let Some(par) = self.current_closure {
-                        self.closure_dependencies.entry(par).or_default().push(*c);
-                    }
+                    self.add_current_closure_dependency(*c);
                     self.deferred_closures.entry(*c).or_default().push((
                         derefed_callee.clone(),
                         callee_ty.clone(),
@@ -1848,7 +1725,7 @@ impl InferenceContext<'_> {
                         tgt_expr,
                     );
                 }
-                self.write_expr_adj(callee, adjustments);
+                self.write_expr_adj(callee, adjustments.into_boxed_slice());
                 (params, ret_ty)
             }
             None => {
@@ -1932,10 +1809,9 @@ impl InferenceContext<'_> {
                 }
 
                 let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
-                self.write_expr_adj(receiver, adjustments);
+                self.write_expr_adj(receiver, adjustments.into_boxed_slice());
 
-                let generics = generics(self.db.upcast(), func.into());
-                let substs = self.substs_for_method_call(generics, generic_args);
+                let substs = self.substs_for_method_call(tgt_expr, func.into(), generic_args);
                 self.write_method_resolution(tgt_expr, func, substs.clone());
                 self.check_method_call(
                     tgt_expr,
@@ -1952,7 +1828,7 @@ impl InferenceContext<'_> {
                 let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name)
                 {
                     Some((ty, field_id, adjustments, _public)) => {
-                        self.write_expr_adj(receiver, adjustments);
+                        self.write_expr_adj(receiver, adjustments.into_boxed_slice());
                         self.result.field_resolutions.insert(tgt_expr, field_id);
                         Some(ty)
                     }
@@ -1985,8 +1861,7 @@ impl InferenceContext<'_> {
 
                 let recovered = match assoc_func_with_same_name {
                     Some(f) => {
-                        let generics = generics(self.db.upcast(), f.into());
-                        let substs = self.substs_for_method_call(generics, generic_args);
+                        let substs = self.substs_for_method_call(tgt_expr, f.into(), generic_args);
                         let f = self
                             .db
                             .value_ty(f.into())
@@ -2176,87 +2051,147 @@ impl InferenceContext<'_> {
 
     fn substs_for_method_call(
         &mut self,
-        def_generics: Generics,
+        expr: ExprId,
+        def: GenericDefId,
         generic_args: Option<&GenericArgs>,
     ) -> Substitution {
-        let (
-            parent_params,
-            has_self_param,
-            type_params,
-            const_params,
-            impl_trait_params,
-            lifetime_params,
-        ) = def_generics.provenance_split();
-        assert!(!has_self_param); // method shouldn't have another Self param
-        let total_len =
-            parent_params + type_params + const_params + impl_trait_params + lifetime_params;
-        let mut substs = Vec::with_capacity(total_len);
-
-        // handle provided arguments
-        if let Some(generic_args) = generic_args {
-            // if args are provided, it should be all of them, but we can't rely on that
-            let self_params = type_params + const_params + lifetime_params;
-
-            let mut args = generic_args.args.iter().peekable();
-            for kind_id in def_generics.iter_self_id().take(self_params) {
-                let arg = args.peek();
-                let arg = match (kind_id, arg) {
-                    // Lifetimes can be inferred.
-                    // Once we have implemented lifetime inference correctly,
-                    // this should be handled in a proper way.
-                    (
-                        GenericParamId::LifetimeParamId(_),
-                        None | Some(GenericArg::Type(_) | GenericArg::Const(_)),
-                    ) => error_lifetime().cast(Interner),
-
-                    // If we run out of `generic_args`, stop pushing substs
-                    (_, None) => break,
-
-                    // Normal cases
-                    (_, Some(_)) => generic_arg_to_chalk(
-                        self.db,
-                        kind_id,
-                        args.next().unwrap(), // `peek()` is `Some(_)`, so guaranteed no panic
-                        self,
-                        &self.body.types,
-                        |this, type_ref| this.make_body_ty(type_ref),
-                        |this, c, ty| {
-                            const_or_path_to_chalk(
-                                this.db,
-                                &this.resolver,
-                                this.owner.into(),
-                                ty,
-                                c,
-                                ParamLoweringMode::Placeholder,
-                                || this.generics(),
-                                DebruijnIndex::INNERMOST,
-                            )
-                        },
-                        |this, lt_ref| this.make_body_lifetime(lt_ref),
-                    ),
-                };
+        struct LowererCtx<'a, 'b> {
+            ctx: &'a mut InferenceContext<'b>,
+            expr: ExprId,
+        }
 
-                substs.push(arg);
+        impl GenericArgsLowerer for LowererCtx<'_, '_> {
+            fn report_len_mismatch(
+                &mut self,
+                def: GenericDefId,
+                provided_count: u32,
+                expected_count: u32,
+                kind: IncorrectGenericsLenKind,
+            ) {
+                self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsLen {
+                    expr: self.expr,
+                    provided_count,
+                    expected_count,
+                    kind,
+                    def,
+                });
+            }
+
+            fn report_arg_mismatch(
+                &mut self,
+                param_id: GenericParamId,
+                arg_idx: u32,
+                has_self_arg: bool,
+            ) {
+                self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsOrder {
+                    expr: self.expr,
+                    param_id,
+                    arg_idx,
+                    has_self_arg,
+                });
             }
-        };
 
-        // Handle everything else as unknown. This also handles generic arguments for the method's
-        // parent (impl or trait), which should come after those for the method.
-        for (id, _data) in def_generics.iter().skip(substs.len()) {
-            match id {
-                GenericParamId::TypeParamId(_) => {
-                    substs.push(self.table.new_type_var().cast(Interner))
+            fn provided_kind(
+                &mut self,
+                param_id: GenericParamId,
+                param: GenericParamDataRef<'_>,
+                arg: &GenericArg,
+            ) -> crate::GenericArg {
+                match (param, arg) {
+                    (GenericParamDataRef::LifetimeParamData(_), GenericArg::Lifetime(lifetime)) => {
+                        self.ctx.make_body_lifetime(*lifetime).cast(Interner)
+                    }
+                    (GenericParamDataRef::TypeParamData(_), GenericArg::Type(type_ref)) => {
+                        self.ctx.make_body_ty(*type_ref).cast(Interner)
+                    }
+                    (GenericParamDataRef::ConstParamData(_), GenericArg::Const(konst)) => {
+                        let GenericParamId::ConstParamId(const_id) = param_id else {
+                            unreachable!("non-const param ID for const param");
+                        };
+                        let const_ty = self.ctx.db.const_param_ty(const_id);
+                        self.ctx.make_body_const(*konst, const_ty).cast(Interner)
+                    }
+                    _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"),
                 }
-                GenericParamId::ConstParamId(id) => {
-                    substs.push(self.table.new_const_var(self.db.const_param_ty(id)).cast(Interner))
+            }
+
+            fn provided_type_like_const(
+                &mut self,
+                const_ty: Ty,
+                arg: TypeLikeConst<'_>,
+            ) -> crate::Const {
+                match arg {
+                    TypeLikeConst::Path(path) => self.ctx.make_path_as_body_const(path, const_ty),
+                    TypeLikeConst::Infer => self.ctx.table.new_const_var(const_ty),
+                }
+            }
+
+            fn inferred_kind(
+                &mut self,
+                _def: GenericDefId,
+                param_id: GenericParamId,
+                _param: GenericParamDataRef<'_>,
+                _infer_args: bool,
+                _preceding_args: &[crate::GenericArg],
+            ) -> crate::GenericArg {
+                // Always create an inference var, even when `infer_args == false`. This helps with diagnostics,
+                // and I think it's also required in the presence of `impl Trait` (that must be inferred).
+                match param_id {
+                    GenericParamId::TypeParamId(_) => self.ctx.table.new_type_var().cast(Interner),
+                    GenericParamId::ConstParamId(const_id) => self
+                        .ctx
+                        .table
+                        .new_const_var(self.ctx.db.const_param_ty(const_id))
+                        .cast(Interner),
+                    GenericParamId::LifetimeParamId(_) => {
+                        self.ctx.table.new_lifetime_var().cast(Interner)
+                    }
                 }
-                GenericParamId::LifetimeParamId(_) => {
-                    substs.push(self.table.new_lifetime_var().cast(Interner))
+            }
+
+            fn parent_arg(&mut self, param_id: GenericParamId) -> crate::GenericArg {
+                match param_id {
+                    GenericParamId::TypeParamId(_) => self.ctx.table.new_type_var().cast(Interner),
+                    GenericParamId::ConstParamId(const_id) => self
+                        .ctx
+                        .table
+                        .new_const_var(self.ctx.db.const_param_ty(const_id))
+                        .cast(Interner),
+                    GenericParamId::LifetimeParamId(_) => {
+                        self.ctx.table.new_lifetime_var().cast(Interner)
+                    }
                 }
             }
+
+            fn report_elided_lifetimes_in_path(
+                &mut self,
+                _def: GenericDefId,
+                _expected_count: u32,
+                _hard_error: bool,
+            ) {
+                unreachable!("we set `LifetimeElisionKind::Infer`")
+            }
+
+            fn report_elision_failure(&mut self, _def: GenericDefId, _expected_count: u32) {
+                unreachable!("we set `LifetimeElisionKind::Infer`")
+            }
+
+            fn report_missing_lifetime(&mut self, _def: GenericDefId, _expected_count: u32) {
+                unreachable!("we set `LifetimeElisionKind::Infer`")
+            }
         }
-        assert_eq!(substs.len(), total_len);
-        Substitution::from_iter(Interner, substs)
+
+        substs_from_args_and_bindings(
+            self.db,
+            self.body,
+            generic_args,
+            def,
+            true,
+            LifetimeElisionKind::Infer,
+            false,
+            None,
+            &mut LowererCtx { ctx: self, expr },
+        )
     }
 
     fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
@@ -2264,7 +2199,7 @@ impl InferenceContext<'_> {
         if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
             let def: CallableDefId = from_chalk(self.db, *fn_def);
             let generic_predicates =
-                self.db.generic_predicates(GenericDefId::from_callable(self.db.upcast(), def));
+                self.db.generic_predicates(GenericDefId::from_callable(self.db, def));
             for predicate in generic_predicates.iter() {
                 let (predicate, binders) = predicate
                     .clone()
@@ -2276,15 +2211,14 @@ impl InferenceContext<'_> {
             // add obligation for trait implementation, if this is a trait method
             match def {
                 CallableDefId::FunctionId(f) => {
-                    if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container {
+                    if let ItemContainerId::TraitId(trait_) = f.lookup(self.db).container {
                         // construct a TraitRef
-                        let params_len = parameters.len(Interner);
-                        let trait_params_len = generics(self.db.upcast(), trait_.into()).len();
+                        let trait_params_len = generics(self.db, trait_.into()).len();
                         let substs = Substitution::from_iter(
                             Interner,
                             // The generic parameters for the trait come after those for the
                             // function.
-                            &parameters.as_slice(Interner)[params_len - trait_params_len..],
+                            &parameters.as_slice(Interner)[..trait_params_len],
                         );
                         self.push_obligation(
                             TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs }
@@ -2311,7 +2245,7 @@ impl InferenceContext<'_> {
             _ => return Default::default(),
         };
 
-        let data = self.db.function_data(func);
+        let data = self.db.function_signature(func);
         let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else {
             return Default::default();
         };
@@ -2402,11 +2336,7 @@ impl InferenceContext<'_> {
             BinaryOp::Assignment { .. } => unreachable!("handled above"),
         };
 
-        if is_assign {
-            self.result.standard_types.unit.clone()
-        } else {
-            output_ty
-        }
+        if is_assign { self.result.standard_types.unit.clone() } else { output_ty }
     }
 
     fn is_builtin_binop(&mut self, lhs: &Ty, rhs: &Ty, op: BinaryOp) -> bool {
@@ -2468,7 +2398,7 @@ impl InferenceContext<'_> {
         }
     }
 
-    fn with_breakable_ctx<T>(
+    pub(super) fn with_breakable_ctx<T>(
         &mut self,
         kind: BreakableKind,
         ty: Option<Ty>,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
index d74a383f44ef4..cf0152ecd263e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
@@ -1,7 +1,7 @@
 //! Finds if an expression is an immutable context or a mutable context, which is used in selecting
 //! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar.
 
-use chalk_ir::{cast::Cast, Mutability};
+use chalk_ir::{Mutability, cast::Cast};
 use hir_def::{
     hir::{
         Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement,
@@ -13,9 +13,9 @@ use hir_expand::name::Name;
 use intern::sym;
 
 use crate::{
-    infer::{expr::ExprIsRead, Expectation, InferenceContext},
-    lower::lower_to_chalk_mutability,
     Adjust, Adjustment, AutoBorrow, Interner, OverloadedDeref, TyBuilder, TyKind,
+    infer::{Expectation, InferenceContext, expr::ExprIsRead},
+    lower::lower_to_chalk_mutability,
 };
 
 impl InferenceContext<'_> {
@@ -69,8 +69,7 @@ impl InferenceContext<'_> {
                 }
             }
             Expr::Const(id) => {
-                let loc = self.db.lookup_intern_anonymous_const(*id);
-                self.infer_mut_expr(loc.root, Mutability::Not);
+                self.infer_mut_expr(*id, Mutability::Not);
             }
             Expr::Let { pat, expr } => self.infer_mut_expr(*expr, self.pat_bound_mutability(*pat)),
             Expr::Block { id: _, statements, tail, label: _ }
@@ -134,8 +133,8 @@ impl InferenceContext<'_> {
                         {
                             if let Some(index_fn) = self
                                 .db
-                                .trait_data(index_trait)
-                                .method_by_name(&Name::new_symbol_root(sym::index_mut.clone()))
+                                .trait_items(index_trait)
+                                .method_by_name(&Name::new_symbol_root(sym::index_mut))
                             {
                                 *f = index_fn;
                                 let mut base_ty = None;
@@ -201,8 +200,8 @@ impl InferenceContext<'_> {
                                 mutability = Mutability::Not;
                             } else if let Some(deref_fn) = self
                                 .db
-                                .trait_data(deref_trait)
-                                .method_by_name(&Name::new_symbol_root(sym::deref_mut.clone()))
+                                .trait_items(deref_trait)
+                                .method_by_name(&Name::new_symbol_root(sym::deref_mut))
                             {
                                 *f = deref_fn;
                             }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
index db93116f1071a..dc1de3b9e8515 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -3,24 +3,24 @@
 use std::iter::repeat_with;
 
 use hir_def::{
-    expr_store::Body,
-    hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, Literal, Pat, PatId},
-    path::Path,
     HasModule,
+    expr_store::{Body, path::Path},
+    hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, Literal, Pat, PatId},
 };
 use hir_expand::name::Name;
 use stdx::TupleExt;
 
 use crate::{
+    DeclContext, DeclOrigin, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty,
+    TyBuilder, TyExt, TyKind,
     consteval::{self, try_const_usize, usize_const},
     infer::{
-        coerce::CoerceNever, expr::ExprIsRead, BindingMode, Expectation, InferenceContext,
-        TypeMismatch,
+        BindingMode, Expectation, InferenceContext, TypeMismatch, coerce::CoerceNever,
+        expr::ExprIsRead,
     },
     lower::lower_to_chalk_mutability,
     primitive::UintTy,
-    static_lifetime, DeclContext, DeclOrigin, InferenceDiagnostic, Interner, Mutability, Scalar,
-    Substitution, Ty, TyBuilder, TyExt, TyKind,
+    static_lifetime,
 };
 
 impl InferenceContext<'_> {
@@ -38,7 +38,7 @@ impl InferenceContext<'_> {
         decl: Option<DeclContext>,
     ) -> Ty {
         let (ty, def) = self.resolve_variant(id.into(), path, true);
-        let var_data = def.map(|it| it.variant_data(self.db.upcast()));
+        let var_data = def.map(|it| it.variant_data(self.db));
         if let Some(variant) = def {
             self.write_variant_resolution(id.into(), variant);
         }
@@ -60,7 +60,7 @@ impl InferenceContext<'_> {
             _ if subs.is_empty() => {}
             Some(def) => {
                 let field_types = self.db.field_types(def);
-                let variant_data = def.variant_data(self.db.upcast());
+                let variant_data = def.variant_data(self.db);
                 let visibilities = self.db.field_visibilities(def);
 
                 let (pre, post) = match ellipsis {
@@ -79,7 +79,7 @@ impl InferenceContext<'_> {
                         match variant_data.field(&Name::new_tuple_field(i)) {
                             Some(local_id) => {
                                 if !visibilities[local_id]
-                                    .is_visible_from(self.db.upcast(), self.resolver.module())
+                                    .is_visible_from(self.db, self.resolver.module())
                                 {
                                     // FIXME(DIAGNOSE): private tuple field
                                 }
@@ -129,7 +129,7 @@ impl InferenceContext<'_> {
             _ if subs.len() == 0 => {}
             Some(def) => {
                 let field_types = self.db.field_types(def);
-                let variant_data = def.variant_data(self.db.upcast());
+                let variant_data = def.variant_data(self.db);
                 let visibilities = self.db.field_visibilities(def);
 
                 let substs = ty.as_adt().map(TupleExt::tail);
@@ -139,7 +139,7 @@ impl InferenceContext<'_> {
                         match variant_data.field(&name) {
                             Some(local_id) => {
                                 if !visibilities[local_id]
-                                    .is_visible_from(self.db.upcast(), self.resolver.module())
+                                    .is_visible_from(self.db, self.resolver.module())
                                 {
                                     self.push_diagnostic(InferenceDiagnostic::NoSuchField {
                                         field: inner.into(),
@@ -594,8 +594,7 @@ impl InferenceContext<'_> {
         }
 
         let len = before.len() + suffix.len();
-        let size =
-            consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db.upcast()));
+        let size = consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db));
 
         let elem_ty = self.table.new_type_var();
         let array_ty = TyKind::Array(elem_ty.clone(), size).intern(Interner);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
index 6254bc12392b0..9d4bbe53464dc 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -2,21 +2,23 @@
 
 use chalk_ir::cast::Cast;
 use hir_def::{
-    path::{Path, PathSegment},
-    resolver::{ResolveValueResult, TypeNs, ValueNs},
     AdtId, AssocItemId, GenericDefId, ItemContainerId, Lookup,
+    expr_store::path::{Path, PathSegment},
+    resolver::{ResolveValueResult, TypeNs, ValueNs},
 };
 use hir_expand::name::Name;
 use stdx::never;
 
 use crate::{
+    InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
+    TyKind, ValueTyDefId,
     builder::ParamKind,
     consteval, error_lifetime,
     generics::generics,
     infer::diagnostics::InferenceTyLoweringContext as TyLoweringContext,
+    lower::LifetimeElisionKind,
     method_resolution::{self, VisibleFromModule},
-    to_chalk_trait_id, InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty,
-    TyBuilder, TyExt, TyKind, ValueTyDefId,
+    to_chalk_trait_id,
 };
 
 use super::{ExprOrPatId, InferenceContext, InferenceTyDiagnosticSource};
@@ -63,10 +65,10 @@ impl InferenceContext<'_> {
                         never!("uninferred pattern?");
                         None
                     }
-                }
+                };
             }
             ValueNs::ImplSelf(impl_id) => {
-                let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
+                let generics = crate::generics::generics(self.db, impl_id.into());
                 let substs = generics.placeholder_subst(self.db);
                 let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
                 return if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
@@ -81,7 +83,7 @@ impl InferenceContext<'_> {
                 };
             }
             ValueNs::GenericParam(it) => {
-                return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it)))
+                return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it)));
             }
         };
 
@@ -99,16 +101,14 @@ impl InferenceContext<'_> {
             if let Some(last_segment) = last_segment {
                 path_ctx.set_current_segment(last_segment)
             }
-            path_ctx.substs_from_path(value_def, true)
+            path_ctx.substs_from_path(value_def, true, false)
         });
         let substs = substs.as_slice(Interner);
 
         if let ValueNs::EnumVariantId(_) = value {
-            let mut it = self_subst
-                .as_ref()
-                .map_or(&[][..], |s| s.as_slice(Interner))
+            let mut it = substs
                 .iter()
-                .chain(substs)
+                .chain(self_subst.as_ref().map_or(&[][..], |s| s.as_slice(Interner)))
                 .cloned();
             let builder = TyBuilder::subst_for_def(self.db, generic_def, None);
             let substs = builder
@@ -127,13 +127,13 @@ impl InferenceContext<'_> {
         }
 
         let parent_substs = self_subst.or_else(|| {
-            let generics = generics(self.db.upcast(), generic_def);
+            let generics = generics(self.db, generic_def);
             let parent_params_len = generics.parent_generics()?.len();
-            let parent_args = &substs[substs.len() - parent_params_len..];
+            let parent_args = &substs[..parent_params_len];
             Some(Substitution::from_iter(Interner, parent_args))
         });
         let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner));
-        let mut it = substs.iter().take(substs.len() - parent_substs_len).cloned();
+        let mut it = substs.iter().skip(parent_substs_len).cloned();
         let builder = TyBuilder::subst_for_def(self.db, generic_def, parent_substs);
         let substs = builder
             .fill(|x| {
@@ -158,10 +158,11 @@ impl InferenceContext<'_> {
         let mut ctx = TyLoweringContext::new(
             self.db,
             &self.resolver,
-            &self.body.types,
-            self.owner.into(),
+            self.body,
             &self.diagnostics,
             InferenceTyDiagnosticSource::Body,
+            self.generic_def,
+            LifetimeElisionKind::Infer,
         );
         let mut path_ctx = if no_diagnostics {
             ctx.at_path_forget_diagnostics(path)
@@ -176,7 +177,7 @@ impl InferenceContext<'_> {
             let ty = self.table.normalize_associated_types_in(ty);
 
             path_ctx.ignore_last_segment();
-            let (ty, _) = path_ctx.lower_ty_relative_path(ty, orig_ns);
+            let (ty, _) = path_ctx.lower_ty_relative_path(ty, orig_ns, true);
             drop_ctx(ctx, no_diagnostics);
             let ty = self.table.insert_type_vars(ty);
             let ty = self.table.normalize_associated_types_in(ty);
@@ -206,7 +207,7 @@ impl InferenceContext<'_> {
                         (TypeNs::TraitId(trait_), true) => {
                             let self_ty = self.table.new_type_var();
                             let trait_ref =
-                                path_ctx.lower_trait_ref_from_resolved_path(trait_, self_ty);
+                                path_ctx.lower_trait_ref_from_resolved_path(trait_, self_ty, true);
                             drop_ctx(ctx, no_diagnostics);
                             self.resolve_trait_assoc_item(trait_ref, last_segment, id)
                         }
@@ -254,15 +255,15 @@ impl InferenceContext<'_> {
 
         // We need to add `Self: Trait` obligation when `def` is a trait assoc item.
         let container = match def {
-            GenericDefId::FunctionId(id) => id.lookup(self.db.upcast()).container,
-            GenericDefId::ConstId(id) => id.lookup(self.db.upcast()).container,
+            GenericDefId::FunctionId(id) => id.lookup(self.db).container,
+            GenericDefId::ConstId(id) => id.lookup(self.db).container,
             _ => return,
         };
 
         if let ItemContainerId::TraitId(trait_) = container {
-            let param_len = generics(self.db.upcast(), def).len_self();
+            let parent_len = generics(self.db, def).parent_generics().map_or(0, |g| g.len_self());
             let parent_subst =
-                Substitution::from_iter(Interner, subst.iter(Interner).skip(param_len));
+                Substitution::from_iter(Interner, subst.iter(Interner).take(parent_len));
             let trait_ref =
                 TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: parent_subst };
             self.push_obligation(trait_ref.cast(Interner));
@@ -277,10 +278,10 @@ impl InferenceContext<'_> {
     ) -> Option<(ValueNs, Substitution)> {
         let trait_ = trait_ref.hir_trait_id();
         let item =
-            self.db.trait_data(trait_).items.iter().map(|(_name, id)| *id).find_map(|item| {
+            self.db.trait_items(trait_).items.iter().map(|(_name, id)| *id).find_map(|item| {
                 match item {
                     AssocItemId::FunctionId(func) => {
-                        if segment.name == &self.db.function_data(func).name {
+                        if segment.name == &self.db.function_signature(func).name {
                             Some(AssocItemId::FunctionId(func))
                         } else {
                             None
@@ -288,7 +289,7 @@ impl InferenceContext<'_> {
                     }
 
                     AssocItemId::ConstId(konst) => {
-                        if self.db.const_data(konst).name.as_ref() == Some(segment.name) {
+                        if self.db.const_signature(konst).name.as_ref() == Some(segment.name) {
                             Some(AssocItemId::ConstId(konst))
                         } else {
                             None
@@ -350,10 +351,8 @@ impl InferenceContext<'_> {
         let (item, visible) = res?;
 
         let (def, container) = match item {
-            AssocItemId::FunctionId(f) => {
-                (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
-            }
-            AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container),
+            AssocItemId::FunctionId(f) => (ValueNs::FunctionId(f), f.lookup(self.db).container),
+            AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container),
             AssocItemId::TypeAliasId(_) => unreachable!(),
         };
         let substs = match container {
@@ -398,7 +397,7 @@ impl InferenceContext<'_> {
             Some((AdtId::EnumId(e), subst)) => (e, subst),
             _ => return None,
         };
-        let enum_data = self.db.enum_data(enum_id);
+        let enum_data = self.db.enum_variants(enum_id);
         let variant = enum_data.variant(name)?;
         self.write_variant_resolution(id, variant.into());
         Some((ValueNs::EnumVariantId(variant), subst.clone()))
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index 8a8992cf372da..60aa9b5a17a87 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -1,15 +1,15 @@
 //! Unification and canonicalization logic.
 
-use std::{fmt, iter, mem};
+use std::{fmt, mem};
 
 use chalk_ir::{
-    cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy,
-    IntTy, TyVariableKind, UniverseIndex,
+    CanonicalVarKind, FloatTy, IntTy, TyVariableKind, UniverseIndex, cast::Cast,
+    fold::TypeFoldable, interner::HasInterner, zip::Zip,
 };
 use chalk_solve::infer::ParameterEnaVariableExt;
 use either::Either;
 use ena::unify::UnifyKey;
-use hir_def::{lang_item::LangItem, AdtId};
+use hir_def::{AdtId, lang_item::LangItem};
 use hir_expand::name::Name;
 use intern::sym;
 use rustc_hash::FxHashMap;
@@ -18,12 +18,12 @@ use triomphe::Arc;
 
 use super::{InferOk, InferResult, InferenceContext, TypeError};
 use crate::{
+    AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue, DebruijnIndex, DomainGoal,
+    GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment, InferenceVar, Interner,
+    Lifetime, OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution, Substitution,
+    TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, VariableKind, WhereClause,
     consteval::unknown_const, db::HirDatabase, fold_generic_args, fold_tys_and_consts,
-    to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue,
-    DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment,
-    InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar,
-    Solution, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, VariableKind,
-    WhereClause,
+    to_chalk_trait_id, traits::FnTrait,
 };
 
 impl InferenceContext<'_> {
@@ -364,6 +364,64 @@ impl<'a> InferenceTable<'a> {
         )
     }
 
+    /// Works almost same as [`Self::normalize_associated_types_in`], but this also resolves shallow
+    /// the inference variables
+    pub(crate) fn eagerly_normalize_and_resolve_shallow_in<T>(&mut self, ty: T) -> T
+    where
+        T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+    {
+        fn eagerly_resolve_ty<const N: usize>(
+            table: &mut InferenceTable<'_>,
+            ty: Ty,
+            mut tys: SmallVec<[Ty; N]>,
+        ) -> Ty {
+            if tys.contains(&ty) {
+                return ty;
+            }
+            tys.push(ty.clone());
+
+            match ty.kind(Interner) {
+                TyKind::Alias(AliasTy::Projection(proj_ty)) => {
+                    let ty = table.normalize_projection_ty(proj_ty.clone());
+                    eagerly_resolve_ty(table, ty, tys)
+                }
+                TyKind::InferenceVar(..) => {
+                    let ty = table.resolve_ty_shallow(&ty);
+                    eagerly_resolve_ty(table, ty, tys)
+                }
+                _ => ty,
+            }
+        }
+
+        fold_tys_and_consts(
+            ty,
+            |e, _| match e {
+                Either::Left(ty) => {
+                    Either::Left(eagerly_resolve_ty::<8>(self, ty, SmallVec::new()))
+                }
+                Either::Right(c) => Either::Right(match &c.data(Interner).value {
+                    chalk_ir::ConstValue::Concrete(cc) => match &cc.interned {
+                        crate::ConstScalar::UnevaluatedConst(c_id, subst) => {
+                            // FIXME: same as `normalize_associated_types_in`
+                            if subst.len(Interner) == 0 {
+                                if let Ok(eval) = self.db.const_eval(*c_id, subst.clone(), None) {
+                                    eval
+                                } else {
+                                    unknown_const(c.data(Interner).ty.clone())
+                                }
+                            } else {
+                                unknown_const(c.data(Interner).ty.clone())
+                            }
+                        }
+                        _ => c,
+                    },
+                    _ => c,
+                }),
+            },
+            DebruijnIndex::INNERMOST,
+        )
+    }
+
     pub(crate) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
         let var = self.new_type_var();
         let alias_eq = AliasEq { alias: AliasTy::Projection(proj_ty), ty: var.clone() };
@@ -386,7 +444,7 @@ impl<'a> InferenceTable<'a> {
     }
     fn extend_type_variable_table(&mut self, to_index: usize) {
         let count = to_index - self.type_variable_table.len() + 1;
-        self.type_variable_table.extend(iter::repeat(TypeVariableFlags::default()).take(count));
+        self.type_variable_table.extend(std::iter::repeat_n(TypeVariableFlags::default(), count));
     }
 
     fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty {
@@ -795,13 +853,13 @@ impl<'a> InferenceTable<'a> {
         num_args: usize,
     ) -> Option<(FnTrait, Vec<Ty>, Ty)> {
         for (fn_trait_name, output_assoc_name, subtraits) in [
-            (FnTrait::FnOnce, sym::Output.clone(), &[FnTrait::Fn, FnTrait::FnMut][..]),
-            (FnTrait::AsyncFnMut, sym::CallRefFuture.clone(), &[FnTrait::AsyncFn]),
-            (FnTrait::AsyncFnOnce, sym::CallOnceFuture.clone(), &[]),
+            (FnTrait::FnOnce, sym::Output, &[FnTrait::Fn, FnTrait::FnMut][..]),
+            (FnTrait::AsyncFnMut, sym::CallRefFuture, &[FnTrait::AsyncFn]),
+            (FnTrait::AsyncFnOnce, sym::CallOnceFuture, &[]),
         ] {
             let krate = self.trait_env.krate;
             let fn_trait = fn_trait_name.get_id(self.db, krate)?;
-            let trait_data = self.db.trait_data(fn_trait);
+            let trait_data = self.db.trait_items(fn_trait);
             let output_assoc_type =
                 trait_data.associated_type_by_name(&Name::new_symbol_root(output_assoc_name))?;
 
@@ -890,11 +948,7 @@ impl<'a> InferenceTable<'a> {
             TyKind::Error => self.new_type_var(),
             TyKind::InferenceVar(..) => {
                 let ty_resolved = self.resolve_ty_shallow(&ty);
-                if ty_resolved.is_unknown() {
-                    self.new_type_var()
-                } else {
-                    ty
-                }
+                if ty_resolved.is_unknown() { self.new_type_var() } else { ty }
             }
             _ => ty,
         }
@@ -922,15 +976,33 @@ impl<'a> InferenceTable<'a> {
 
     /// Check if given type is `Sized` or not
     pub(crate) fn is_sized(&mut self, ty: &Ty) -> bool {
+        fn short_circuit_trivial_tys(ty: &Ty) -> Option<bool> {
+            match ty.kind(Interner) {
+                TyKind::Scalar(..)
+                | TyKind::Ref(..)
+                | TyKind::Raw(..)
+                | TyKind::Never
+                | TyKind::FnDef(..)
+                | TyKind::Array(..)
+                | TyKind::Function(..) => Some(true),
+                TyKind::Slice(..) | TyKind::Str | TyKind::Dyn(..) => Some(false),
+                _ => None,
+            }
+        }
+
         let mut ty = ty.clone();
+        ty = self.eagerly_normalize_and_resolve_shallow_in(ty);
+        if let Some(sized) = short_circuit_trivial_tys(&ty) {
+            return sized;
+        }
+
         {
             let mut structs = SmallVec::<[_; 8]>::new();
             // Must use a loop here and not recursion because otherwise users will conduct completely
             // artificial examples of structs that have themselves as the tail field and complain r-a crashes.
             while let Some((AdtId::StructId(id), subst)) = ty.as_adt() {
-                let struct_data = self.db.struct_data(id);
-                if let Some((last_field, _)) = struct_data.variant_data.fields().iter().next_back()
-                {
+                let struct_data = self.db.variant_fields(id.into());
+                if let Some((last_field, _)) = struct_data.fields().iter().next_back() {
                     let last_field_ty = self.db.field_types(id.into())[last_field]
                         .clone()
                         .substitute(Interner, subst);
@@ -942,26 +1014,16 @@ impl<'a> InferenceTable<'a> {
                     // Structs can have DST as its last field and such cases are not handled
                     // as unsized by the chalk, so we do this manually.
                     ty = last_field_ty;
+                    ty = self.eagerly_normalize_and_resolve_shallow_in(ty);
+                    if let Some(sized) = short_circuit_trivial_tys(&ty) {
+                        return sized;
+                    }
                 } else {
                     break;
                 };
             }
         }
 
-        // Early return for some obvious types
-        if matches!(
-            ty.kind(Interner),
-            TyKind::Scalar(..)
-                | TyKind::Ref(..)
-                | TyKind::Raw(..)
-                | TyKind::Never
-                | TyKind::FnDef(..)
-                | TyKind::Array(..)
-                | TyKind::Function(_)
-        ) {
-            return true;
-        }
-
         let Some(sized) = self
             .db
             .lang_item(self.trait_env.krate, LangItem::Sized)
@@ -1032,7 +1094,7 @@ mod resolve {
                     .assert_ty_ref(Interner)
                     .clone();
             }
-            let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+            if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
                 // known_ty may contain other variables that are known by now
                 self.var_stack.push(var);
                 let result = known_ty.fold_with(self, outer_binder);
@@ -1043,8 +1105,7 @@ mod resolve {
                 (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
                     .assert_ty_ref(Interner)
                     .clone()
-            };
-            result
+            }
         }
 
         fn fold_inference_const(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
index d6039c548b6f5..e0c3279d3fb01 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
@@ -2,14 +2,14 @@
 use std::ops::ControlFlow::{self, Break, Continue};
 
 use chalk_ir::{
-    visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
     DebruijnIndex,
+    visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
 };
-use hir_def::{visibility::Visibility, AdtId, EnumVariantId, ModuleId, VariantId};
+use hir_def::{AdtId, EnumVariantId, ModuleId, VariantId, visibility::Visibility};
 use rustc_hash::FxHashSet;
 
 use crate::{
-    consteval::try_const_usize, db::HirDatabase, Binders, Interner, Substitution, Ty, TyKind,
+    Binders, Interner, Substitution, Ty, TyKind, consteval::try_const_usize, db::HirDatabase,
 };
 
 // FIXME: Turn this into a query, it can be quite slow
@@ -98,7 +98,7 @@ impl UninhabitedFrom<'_> {
             AdtId::UnionId(_) => CONTINUE_OPAQUELY_INHABITED,
             AdtId::StructId(s) => self.visit_variant(s.into(), subst),
             AdtId::EnumId(e) => {
-                let enum_data = self.db.enum_data(e);
+                let enum_data = self.db.enum_variants(e);
 
                 for &(variant, _) in enum_data.variants.iter() {
                     let variant_inhabitedness = self.visit_variant(variant.into(), subst);
@@ -117,7 +117,7 @@ impl UninhabitedFrom<'_> {
         variant: VariantId,
         subst: &Substitution,
     ) -> ControlFlow<VisiblyUninhabited> {
-        let variant_data = self.db.variant_data(variant);
+        let variant_data = self.db.variant_fields(variant);
         let fields = variant_data.fields();
         if fields.is_empty() {
             return CONTINUE_OPAQUELY_INHABITED;
@@ -139,7 +139,7 @@ impl UninhabitedFrom<'_> {
         ty: &Binders<Ty>,
         subst: &Substitution,
     ) -> ControlFlow<VisiblyUninhabited> {
-        if vis.is_none_or(|it| it.is_visible_from(self.db.upcast(), self.target_mod)) {
+        if vis.is_none_or(|it| it.is_visible_from(self.db, self.target_mod)) {
             let ty = ty.clone().substitute(Interner, subst);
             ty.visit_with(self, DebruijnIndex::INNERMOST)
         } else {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
index 804c3aea3a5c9..fecb3f4242a92 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
@@ -2,16 +2,15 @@
 //! representation of the various objects Chalk deals with (types, goals etc.).
 
 use crate::{
-    chalk_db, tls, AliasTy, CanonicalVarKind, CanonicalVarKinds, ClosureId, Const, ConstData,
-    ConstScalar, Constraint, Constraints, FnAbi, FnDefId, GenericArg, GenericArgData, Goal,
-    GoalData, Goals, InEnvironment, Lifetime, LifetimeData, OpaqueTy, OpaqueTyId, ProgramClause,
-    ProgramClauseData, ProgramClauses, ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses,
-    Substitution, Ty, TyData, TyKind, VariableKind, VariableKinds,
+    AliasTy, CanonicalVarKind, CanonicalVarKinds, ClosureId, Const, ConstData, ConstScalar,
+    Constraint, Constraints, FnAbi, FnDefId, GenericArg, GenericArgData, Goal, GoalData, Goals,
+    InEnvironment, Lifetime, LifetimeData, OpaqueTy, OpaqueTyId, ProgramClause, ProgramClauseData,
+    ProgramClauses, ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, Ty,
+    TyData, TyKind, VariableKind, VariableKinds, chalk_db, tls,
 };
-use base_db::ra_salsa::InternId;
 use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variance};
 use hir_def::TypeAliasId;
-use intern::{impl_internable, Interned};
+use intern::{Interned, impl_internable};
 use smallvec::SmallVec;
 use std::fmt;
 use triomphe::Arc;
@@ -44,7 +43,7 @@ impl_internable!(
     InternedWrapper<ConstData>,
     InternedWrapper<ConstScalar>,
     InternedWrapper<Vec<CanonicalVarKind>>,
-    InternedWrapper<Vec<ProgramClause>>,
+    InternedWrapper<Box<[ProgramClause]>>,
     InternedWrapper<Vec<QuantifiedWhereClause>>,
     InternedWrapper<SmallVec<[Variance; 16]>>,
 );
@@ -61,14 +60,14 @@ impl chalk_ir::interner::Interner for Interner {
     type InternedGoal = Arc<GoalData>;
     type InternedGoals = Vec<Goal>;
     type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>;
-    type InternedProgramClauses = Interned<InternedWrapper<Vec<ProgramClause>>>;
+    type InternedProgramClauses = Interned<InternedWrapper<Box<[ProgramClause]>>>;
     type InternedProgramClause = ProgramClauseData;
     type InternedQuantifiedWhereClauses = Interned<InternedWrapper<Vec<QuantifiedWhereClause>>>;
     type InternedVariableKinds = Interned<InternedWrapper<Vec<VariableKind>>>;
     type InternedCanonicalVarKinds = Interned<InternedWrapper<Vec<CanonicalVarKind>>>;
     type InternedConstraints = Vec<InEnvironment<Constraint>>;
     type InternedVariances = SmallVec<[Variance; 16]>;
-    type DefId = InternId;
+    type DefId = salsa::Id;
     type InternedAdtId = hir_def::AdtId;
     type Identifier = TypeAliasId;
     type FnAbi = FnAbi;
@@ -98,7 +97,7 @@ impl chalk_ir::interner::Interner for Interner {
         opaque_ty_id: OpaqueTyId,
         fmt: &mut fmt::Formatter<'_>,
     ) -> Option<fmt::Result> {
-        Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0))
+        Some(write!(fmt, "OpaqueTy#{:?}", opaque_ty_id.0))
     }
 
     fn debug_fn_def_id(fn_def_id: FnDefId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs
index ff9c52fbb6c17..3ef7f50c9a2ea 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs
@@ -1,6 +1,6 @@
 //! Functions to detect special lang items
 
-use hir_def::{data::adt::StructFlags, lang_item::LangItem, AdtId};
+use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags};
 use hir_expand::name::Name;
 use intern::sym;
 
@@ -8,13 +8,7 @@ use crate::db::HirDatabase;
 
 pub fn is_box(db: &dyn HirDatabase, adt: AdtId) -> bool {
     let AdtId::StructId(id) = adt else { return false };
-    db.struct_data(id).flags.contains(StructFlags::IS_BOX)
-}
-
-pub fn is_unsafe_cell(db: &dyn HirDatabase, adt: AdtId) -> bool {
-    let AdtId::StructId(id) = adt else { return false };
-
-    db.struct_data(id).flags.contains(StructFlags::IS_UNSAFE_CELL)
+    db.struct_signature(id).flags.contains(StructFlags::IS_BOX)
 }
 
 pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> {
@@ -22,53 +16,43 @@ pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangIte
     Some(match op {
         BinaryOp::LogicOp(_) => return None,
         BinaryOp::ArithOp(aop) => match aop {
-            ArithOp::Add => (Name::new_symbol_root(sym::add.clone()), LangItem::Add),
-            ArithOp::Mul => (Name::new_symbol_root(sym::mul.clone()), LangItem::Mul),
-            ArithOp::Sub => (Name::new_symbol_root(sym::sub.clone()), LangItem::Sub),
-            ArithOp::Div => (Name::new_symbol_root(sym::div.clone()), LangItem::Div),
-            ArithOp::Rem => (Name::new_symbol_root(sym::rem.clone()), LangItem::Rem),
-            ArithOp::Shl => (Name::new_symbol_root(sym::shl.clone()), LangItem::Shl),
-            ArithOp::Shr => (Name::new_symbol_root(sym::shr.clone()), LangItem::Shr),
-            ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor.clone()), LangItem::BitXor),
-            ArithOp::BitOr => (Name::new_symbol_root(sym::bitor.clone()), LangItem::BitOr),
-            ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand.clone()), LangItem::BitAnd),
+            ArithOp::Add => (Name::new_symbol_root(sym::add), LangItem::Add),
+            ArithOp::Mul => (Name::new_symbol_root(sym::mul), LangItem::Mul),
+            ArithOp::Sub => (Name::new_symbol_root(sym::sub), LangItem::Sub),
+            ArithOp::Div => (Name::new_symbol_root(sym::div), LangItem::Div),
+            ArithOp::Rem => (Name::new_symbol_root(sym::rem), LangItem::Rem),
+            ArithOp::Shl => (Name::new_symbol_root(sym::shl), LangItem::Shl),
+            ArithOp::Shr => (Name::new_symbol_root(sym::shr), LangItem::Shr),
+            ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor), LangItem::BitXor),
+            ArithOp::BitOr => (Name::new_symbol_root(sym::bitor), LangItem::BitOr),
+            ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand), LangItem::BitAnd),
         },
         BinaryOp::Assignment { op: Some(aop) } => match aop {
-            ArithOp::Add => (Name::new_symbol_root(sym::add_assign.clone()), LangItem::AddAssign),
-            ArithOp::Mul => (Name::new_symbol_root(sym::mul_assign.clone()), LangItem::MulAssign),
-            ArithOp::Sub => (Name::new_symbol_root(sym::sub_assign.clone()), LangItem::SubAssign),
-            ArithOp::Div => (Name::new_symbol_root(sym::div_assign.clone()), LangItem::DivAssign),
-            ArithOp::Rem => (Name::new_symbol_root(sym::rem_assign.clone()), LangItem::RemAssign),
-            ArithOp::Shl => (Name::new_symbol_root(sym::shl_assign.clone()), LangItem::ShlAssign),
-            ArithOp::Shr => (Name::new_symbol_root(sym::shr_assign.clone()), LangItem::ShrAssign),
-            ArithOp::BitXor => {
-                (Name::new_symbol_root(sym::bitxor_assign.clone()), LangItem::BitXorAssign)
-            }
-            ArithOp::BitOr => {
-                (Name::new_symbol_root(sym::bitor_assign.clone()), LangItem::BitOrAssign)
-            }
-            ArithOp::BitAnd => {
-                (Name::new_symbol_root(sym::bitand_assign.clone()), LangItem::BitAndAssign)
-            }
+            ArithOp::Add => (Name::new_symbol_root(sym::add_assign), LangItem::AddAssign),
+            ArithOp::Mul => (Name::new_symbol_root(sym::mul_assign), LangItem::MulAssign),
+            ArithOp::Sub => (Name::new_symbol_root(sym::sub_assign), LangItem::SubAssign),
+            ArithOp::Div => (Name::new_symbol_root(sym::div_assign), LangItem::DivAssign),
+            ArithOp::Rem => (Name::new_symbol_root(sym::rem_assign), LangItem::RemAssign),
+            ArithOp::Shl => (Name::new_symbol_root(sym::shl_assign), LangItem::ShlAssign),
+            ArithOp::Shr => (Name::new_symbol_root(sym::shr_assign), LangItem::ShrAssign),
+            ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor_assign), LangItem::BitXorAssign),
+            ArithOp::BitOr => (Name::new_symbol_root(sym::bitor_assign), LangItem::BitOrAssign),
+            ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand_assign), LangItem::BitAndAssign),
         },
         BinaryOp::CmpOp(cop) => match cop {
-            CmpOp::Eq { negated: false } => {
-                (Name::new_symbol_root(sym::eq.clone()), LangItem::PartialEq)
-            }
-            CmpOp::Eq { negated: true } => {
-                (Name::new_symbol_root(sym::ne.clone()), LangItem::PartialEq)
-            }
+            CmpOp::Eq { negated: false } => (Name::new_symbol_root(sym::eq), LangItem::PartialEq),
+            CmpOp::Eq { negated: true } => (Name::new_symbol_root(sym::ne), LangItem::PartialEq),
             CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
-                (Name::new_symbol_root(sym::le.clone()), LangItem::PartialOrd)
+                (Name::new_symbol_root(sym::le), LangItem::PartialOrd)
             }
             CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
-                (Name::new_symbol_root(sym::lt.clone()), LangItem::PartialOrd)
+                (Name::new_symbol_root(sym::lt), LangItem::PartialOrd)
             }
             CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
-                (Name::new_symbol_root(sym::ge.clone()), LangItem::PartialOrd)
+                (Name::new_symbol_root(sym::ge), LangItem::PartialOrd)
             }
             CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
-                (Name::new_symbol_root(sym::gt.clone()), LangItem::PartialOrd)
+                (Name::new_symbol_root(sym::gt), LangItem::PartialOrd)
             }
         },
         BinaryOp::Assignment { op: None } => return None,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index bbd419d9659bf..c253fe25672f5 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -2,14 +2,13 @@
 
 use std::fmt;
 
-use base_db::ra_salsa::Cycle;
 use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
 use hir_def::{
+    LocalFieldId, StructId,
     layout::{
         Float, Integer, LayoutCalculator, LayoutCalculatorError, LayoutData, Primitive,
         ReprOptions, Scalar, StructKind, TargetDataLayout, WrappingRange,
     },
-    LocalFieldId, StructId,
 };
 use la_arena::{Idx, RawIdx};
 use rustc_abi::AddressSpace;
@@ -18,17 +17,15 @@ use rustc_index::IndexVec;
 use triomphe::Arc;
 
 use crate::{
+    Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
     consteval::try_const_usize,
     db::{HirDatabase, InternedClosure},
     infer::normalize,
     utils::ClosureSubst,
-    Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
 };
 
-pub use self::{
-    adt::{layout_of_adt_query, layout_of_adt_recover},
-    target::target_data_layout_query,
-};
+pub(crate) use self::adt::layout_of_adt_cycle_result;
+pub use self::{adt::layout_of_adt_query, target::target_data_layout_query};
 
 mod adt;
 mod target;
@@ -168,7 +165,7 @@ pub fn layout_of_ty_query(
     let result = match kind {
         TyKind::Adt(AdtId(def), subst) => {
             if let hir_def::AdtId::StructId(s) = def {
-                let data = db.struct_data(*s);
+                let data = db.struct_signature(*s);
                 let repr = data.repr.unwrap_or_default();
                 if repr.simd() {
                     return layout_of_simd_ty(db, *s, repr.packed(), subst, trait_env, &target);
@@ -322,7 +319,7 @@ pub fn layout_of_ty_query(
                     return Err(LayoutError::NotImplemented);
                 }
                 crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
-                    return Err(LayoutError::NotImplemented)
+                    return Err(LayoutError::NotImplemented);
                 }
             }
         }
@@ -344,7 +341,7 @@ pub fn layout_of_ty_query(
             cx.calc.univariant(&fields, &ReprOptions::default(), StructKind::AlwaysSized)?
         }
         TyKind::Coroutine(_, _) | TyKind::CoroutineWitness(_, _) => {
-            return Err(LayoutError::NotImplemented)
+            return Err(LayoutError::NotImplemented);
         }
         TyKind::Error => return Err(LayoutError::HasErrorType),
         TyKind::AssociatedType(id, subst) => {
@@ -367,28 +364,36 @@ pub fn layout_of_ty_query(
     Ok(Arc::new(result))
 }
 
-pub fn layout_of_ty_recover(
+pub(crate) fn layout_of_ty_cycle_result(
     _: &dyn HirDatabase,
-    _: &Cycle,
-    _: &Ty,
-    _: &Arc<TraitEnvironment>,
+    _: Ty,
+    _: Arc<TraitEnvironment>,
 ) -> Result<Arc<Layout>, LayoutError> {
     Err(LayoutError::RecursiveTypeWithoutIndirection)
 }
 
 fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty {
     match pointee.kind(Interner) {
-        TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), subst) => {
-            let data = db.struct_data(*i);
-            let mut it = data.variant_data.fields().iter().rev();
+        &TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), ref subst) => {
+            let data = db.variant_fields(i.into());
+            let mut it = data.fields().iter().rev();
             match it.next() {
                 Some((f, _)) => {
-                    let last_field_ty = field_ty(db, (*i).into(), f, subst);
+                    let last_field_ty = field_ty(db, i.into(), f, subst);
                     struct_tail_erasing_lifetimes(db, last_field_ty)
                 }
                 None => pointee,
             }
         }
+        TyKind::Tuple(_, subst) => {
+            if let Some(last_field_ty) =
+                subst.iter(Interner).last().and_then(|arg| arg.ty(Interner))
+            {
+                struct_tail_erasing_lifetimes(db, last_field_ty.clone())
+            } else {
+                pointee
+            }
+        }
         _ => pointee,
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index eb4729fab8426..3a020bf050d68 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -2,11 +2,10 @@
 
 use std::{cmp, ops::Bound};
 
-use base_db::ra_salsa::Cycle;
 use hir_def::{
-    data::adt::VariantData,
-    layout::{Integer, ReprOptions, TargetDataLayout},
     AdtId, VariantId,
+    layout::{Integer, ReprOptions, TargetDataLayout},
+    signatures::{StructFlags, VariantFields},
 };
 use intern::sym;
 use rustc_index::IndexVec;
@@ -14,10 +13,9 @@ use smallvec::SmallVec;
 use triomphe::Arc;
 
 use crate::{
-    db::HirDatabase,
-    lang_items::is_unsafe_cell,
-    layout::{field_ty, Layout, LayoutError},
     Substitution, TraitEnvironment,
+    db::HirDatabase,
+    layout::{Layout, LayoutError, field_ty},
 };
 
 use super::LayoutCx;
@@ -34,33 +32,37 @@ pub fn layout_of_adt_query(
     };
     let dl = &*target;
     let cx = LayoutCx::new(dl);
-    let handle_variant = |def: VariantId, var: &VariantData| {
+    let handle_variant = |def: VariantId, var: &VariantFields| {
         var.fields()
             .iter()
             .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &subst), trait_env.clone()))
             .collect::<Result<Vec<_>, _>>()
     };
-    let (variants, repr) = match def {
+    let (variants, repr, is_special_no_niche) = match def {
         AdtId::StructId(s) => {
-            let data = db.struct_data(s);
+            let sig = db.struct_signature(s);
             let mut r = SmallVec::<[_; 1]>::new();
-            r.push(handle_variant(s.into(), &data.variant_data)?);
-            (r, data.repr.unwrap_or_default())
+            r.push(handle_variant(s.into(), &db.variant_fields(s.into()))?);
+            (
+                r,
+                sig.repr.unwrap_or_default(),
+                sig.flags.intersects(StructFlags::IS_UNSAFE_CELL | StructFlags::IS_UNSAFE_PINNED),
+            )
         }
         AdtId::UnionId(id) => {
-            let data = db.union_data(id);
+            let data = db.union_signature(id);
             let mut r = SmallVec::new();
-            r.push(handle_variant(id.into(), &data.variant_data)?);
-            (r, data.repr.unwrap_or_default())
+            r.push(handle_variant(id.into(), &db.variant_fields(id.into()))?);
+            (r, data.repr.unwrap_or_default(), false)
         }
         AdtId::EnumId(e) => {
-            let data = db.enum_data(e);
-            let r = data
+            let variants = db.enum_variants(e);
+            let r = variants
                 .variants
                 .iter()
-                .map(|&(v, _)| handle_variant(v.into(), &db.enum_variant_data(v).variant_data))
+                .map(|&(v, _)| handle_variant(v.into(), &db.variant_fields(v.into())))
                 .collect::<Result<SmallVec<_>, _>>()?;
-            (r, data.repr.unwrap_or_default())
+            (r, db.enum_signature(e).repr.unwrap_or_default(), false)
         }
     };
     let variants = variants
@@ -75,12 +77,12 @@ pub fn layout_of_adt_query(
             &repr,
             &variants,
             matches!(def, AdtId::EnumId(..)),
-            is_unsafe_cell(db, def),
+            is_special_no_niche,
             layout_scalar_valid_range(db, def),
             |min, max| repr_discr(dl, &repr, min, max).unwrap_or((Integer::I8, false)),
             variants.iter_enumerated().filter_map(|(id, _)| {
                 let AdtId::EnumId(e) = def else { return None };
-                let d = db.const_eval_discriminant(db.enum_data(e).variants[id.0].0).ok()?;
+                let d = db.const_eval_discriminant(db.enum_variants(e).variants[id.0].0).ok()?;
                 Some((id, d))
             }),
             // FIXME: The current code for niche-filling relies on variant indices
@@ -125,18 +127,14 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
         }
         Bound::Unbounded
     };
-    (
-        get(&sym::rustc_layout_scalar_valid_range_start),
-        get(&sym::rustc_layout_scalar_valid_range_end),
-    )
+    (get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end))
 }
 
-pub fn layout_of_adt_recover(
+pub(crate) fn layout_of_adt_cycle_result(
     _: &dyn HirDatabase,
-    _: &Cycle,
-    _: &AdtId,
-    _: &Substitution,
-    _: &Arc<TraitEnvironment>,
+    _: AdtId,
+    _: Substitution,
+    _: Arc<TraitEnvironment>,
 ) -> Result<Arc<Layout>, LayoutError> {
     Err(LayoutError::RecursiveTypeWithoutIndirection)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs
index 7d77f6d0731a3..e1e1c44996cde 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs
@@ -1,6 +1,6 @@
 //! Target dependent parameters needed for layouts
 
-use base_db::CrateId;
+use base_db::Crate;
 use hir_def::layout::TargetDataLayout;
 use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors};
 use triomphe::Arc;
@@ -9,9 +9,9 @@ use crate::db::HirDatabase;
 
 pub fn target_data_layout_query(
     db: &dyn HirDatabase,
-    krate: CrateId,
+    krate: Crate,
 ) -> Result<Arc<TargetDataLayout>, Arc<str>> {
-    match &db.crate_workspace_data()[&krate].data_layout {
+    match &krate.workspace_data(db).data_layout {
         Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
             Ok(it) => Ok(Arc::new(it)),
             Err(e) => {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
index 8b74b7328bd83..cc7d74f4fb0a3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
@@ -1,17 +1,17 @@
 use chalk_ir::{AdtId, TyKind};
 use either::Either;
 use hir_def::db::DefDatabase;
-use project_model::{toolchain_info::QueryConfig, Sysroot};
+use project_model::{Sysroot, toolchain_info::QueryConfig};
 use rustc_hash::FxHashMap;
 use syntax::ToSmolStr;
 use test_fixture::WithFixture;
 use triomphe::Arc;
 
 use crate::{
+    Interner, Substitution,
     db::HirDatabase,
     layout::{Layout, LayoutError},
     test_db::TestDB,
-    Interner, Substitution,
 };
 
 mod closure;
@@ -38,27 +38,36 @@ fn eval_goal(
     let adt_or_type_alias_id = file_ids
         .into_iter()
         .find_map(|file_id| {
-            let module_id = db.module_for_file(file_id.file_id());
+            let module_id = db.module_for_file(file_id.file_id(&db));
             let def_map = module_id.def_map(&db);
             let scope = &def_map[module_id.local_id].scope;
             let adt_or_type_alias_id = scope.declarations().find_map(|x| match x {
                 hir_def::ModuleDefId::AdtId(x) => {
                     let name = match x {
-                        hir_def::AdtId::StructId(x) => {
-                            db.struct_data(x).name.display_no_db(file_id.edition()).to_smolstr()
-                        }
-                        hir_def::AdtId::UnionId(x) => {
-                            db.union_data(x).name.display_no_db(file_id.edition()).to_smolstr()
-                        }
-                        hir_def::AdtId::EnumId(x) => {
-                            db.enum_data(x).name.display_no_db(file_id.edition()).to_smolstr()
-                        }
+                        hir_def::AdtId::StructId(x) => db
+                            .struct_signature(x)
+                            .name
+                            .display_no_db(file_id.edition(&db))
+                            .to_smolstr(),
+                        hir_def::AdtId::UnionId(x) => db
+                            .union_signature(x)
+                            .name
+                            .display_no_db(file_id.edition(&db))
+                            .to_smolstr(),
+                        hir_def::AdtId::EnumId(x) => db
+                            .enum_signature(x)
+                            .name
+                            .display_no_db(file_id.edition(&db))
+                            .to_smolstr(),
                     };
                     (name == "Goal").then_some(Either::Left(x))
                 }
                 hir_def::ModuleDefId::TypeAliasId(x) => {
-                    let name =
-                        db.type_alias_data(x).name.display_no_db(file_id.edition()).to_smolstr();
+                    let name = db
+                        .type_alias_signature(x)
+                        .name
+                        .display_no_db(file_id.edition(&db))
+                        .to_smolstr();
                     (name == "Goal").then_some(Either::Right(x))
                 }
                 _ => None,
@@ -94,14 +103,15 @@ fn eval_expr(
     );
 
     let (db, file_id) = TestDB::with_single_file(&ra_fixture);
-    let module_id = db.module_for_file(file_id.file_id());
+    let module_id = db.module_for_file(file_id.file_id(&db));
     let def_map = module_id.def_map(&db);
     let scope = &def_map[module_id.local_id].scope;
     let function_id = scope
         .declarations()
         .find_map(|x| match x {
             hir_def::ModuleDefId::FunctionId(x) => {
-                let name = db.function_data(x).name.display_no_db(file_id.edition()).to_smolstr();
+                let name =
+                    db.function_signature(x).name.display_no_db(file_id.edition(&db)).to_smolstr();
                 (name == "main").then_some(x)
             }
             _ => None,
@@ -111,7 +121,7 @@ fn eval_expr(
     let b = hir_body
         .bindings
         .iter()
-        .find(|x| x.1.name.display_no_db(file_id.edition()).to_smolstr() == "goal")
+        .find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
         .unwrap()
         .0;
     let infer = db.infer(function_id.into());
@@ -284,6 +294,18 @@ fn repr_packed() {
     check_size_and_align("#[repr(Rust, packed(5))] struct Goal(i32);", "", 4, 1);
 }
 
+#[test]
+fn multiple_repr_attrs() {
+    size_and_align!(
+        #[repr(C)]
+        #[repr(packed)]
+        struct Goal {
+            id: i32,
+            u: u8,
+        }
+    )
+}
+
 #[test]
 fn generic() {
     size_and_align! {
@@ -468,6 +490,16 @@ fn tuple() {
     }
 }
 
+#[test]
+fn tuple_ptr_with_dst_tail() {
+    size_and_align!(
+        struct Goal(*const ([u8],));
+    );
+    size_and_align!(
+        struct Goal(*const (u128, [u8]));
+    );
+}
+
 #[test]
 fn non_zero_and_non_null() {
     size_and_align! {
@@ -490,10 +522,7 @@ fn niche_optimization() {
 }
 
 #[test]
-fn const_eval() {
-    size_and_align! {
-        struct Goal([i32; 2 + 2]);
-    }
+fn const_eval_simple() {
     size_and_align! {
         const X: usize = 5;
         struct Goal([i32; X]);
@@ -505,6 +534,15 @@ fn const_eval() {
         struct Ar<T>([T; foo::BAR]);
         struct Goal(Ar<Ar<i32>>);
     }
+}
+
+#[test]
+// FIXME
+#[should_panic]
+fn const_eval_complex() {
+    size_and_align! {
+        struct Goal([i32; 2 + 2]);
+    }
     size_and_align! {
         type Goal = [u8; 2 + 2];
     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index cc02b71f05c19..128569d55dc9b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -56,21 +56,20 @@ mod variance;
 
 use std::hash::Hash;
 
-use base_db::ra_salsa::InternValueTrivial;
 use chalk_ir::{
+    NoSolution,
     fold::{Shift, TypeFoldable},
     interner::HasInterner,
-    NoSolution,
 };
 use either::Either;
-use hir_def::{hir::ExprId, type_ref::Rawness, CallableDefId, GeneralConstId, TypeOrConstParamId};
+use hir_def::{CallableDefId, GeneralConstId, TypeOrConstParamId, hir::ExprId, type_ref::Rawness};
 use hir_expand::name::Name;
-use indexmap::{map::Entry, IndexMap};
-use intern::{sym, Symbol};
+use indexmap::{IndexMap, map::Entry};
+use intern::{Symbol, sym};
 use la_arena::{Arena, Idx};
 use mir::{MirEvalError, VTableMap};
 use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
-use syntax::ast::{make, ConstArg};
+use syntax::ast::{ConstArg, make};
 use traits::FnTrait;
 use triomphe::Arc;
 
@@ -87,16 +86,16 @@ pub use builder::{ParamKind, TyBuilder};
 pub use chalk_ext::*;
 pub use drop::DropGlue;
 pub use infer::{
+    Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, InferenceResult,
+    InferenceTyDiagnosticSource, OverloadedDeref, PointerCast,
     cast::CastError,
     closure::{CaptureKind, CapturedItem},
-    could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode,
-    InferenceDiagnostic, InferenceResult, InferenceTyDiagnosticSource, OverloadedDeref,
-    PointerCast,
+    could_coerce, could_unify, could_unify_deeply,
 };
 pub use interner::Interner;
 pub use lower::{
-    associated_type_shorthand_candidates, diagnostics::*, ImplTraitLoweringMode, ParamLoweringMode,
-    TyDefId, TyLoweringContext, ValueTyDefId,
+    ImplTraitLoweringMode, LifetimeElisionKind, ParamLoweringMode, TyDefId, TyLoweringContext,
+    ValueTyDefId, associated_type_shorthand_candidates, diagnostics::*,
 };
 pub use mapping::{
     from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
@@ -106,13 +105,13 @@ pub use mapping::{
 pub use method_resolution::check_orphan_rules;
 pub use target_feature::TargetFeatures;
 pub use traits::TraitEnvironment;
-pub use utils::{all_super_traits, direct_super_traits, is_fn_unsafe_to_call, Unsafety};
+pub use utils::{Unsafety, all_super_traits, direct_super_traits, is_fn_unsafe_to_call};
 pub use variance::Variance;
 
 pub use chalk_ir::{
+    AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
     cast::Cast,
     visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
-    AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
 };
 
 pub type ForeignDefId = chalk_ir::ForeignDefId<Interner>;
@@ -302,7 +301,7 @@ impl Hash for ConstScalar {
 
 /// Return an index of a parameter in the generic type parameter list by it's id.
 pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
-    generics::generics(db.upcast(), id.parent).type_or_const_param_idx(id)
+    generics::generics(db, id.parent).type_or_const_param_idx(id)
 }
 
 pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
@@ -348,20 +347,24 @@ pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
     generics: &Generics,
     value: T,
 ) -> Binders<T> {
-    Binders::new(
-        VariableKinds::from_iter(
-            Interner,
-            generics.iter_id().map(|x| match x {
-                hir_def::GenericParamId::ConstParamId(id) => {
-                    chalk_ir::VariableKind::Const(db.const_param_ty(id))
-                }
-                hir_def::GenericParamId::TypeParamId(_) => {
-                    chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
-                }
-                hir_def::GenericParamId::LifetimeParamId(_) => chalk_ir::VariableKind::Lifetime,
-            }),
-        ),
-        value,
+    Binders::new(variable_kinds_from_iter(db, generics.iter_id()), value)
+}
+
+pub(crate) fn variable_kinds_from_iter(
+    db: &dyn HirDatabase,
+    iter: impl Iterator<Item = hir_def::GenericParamId>,
+) -> VariableKinds {
+    VariableKinds::from_iter(
+        Interner,
+        iter.map(|x| match x {
+            hir_def::GenericParamId::ConstParamId(id) => {
+                chalk_ir::VariableKind::Const(db.const_param_ty(id))
+            }
+            hir_def::GenericParamId::TypeParamId(_) => {
+                chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+            }
+            hir_def::GenericParamId::LifetimeParamId(_) => chalk_ir::VariableKind::Lifetime,
+        }),
     )
 }
 
@@ -526,13 +529,13 @@ pub type PolyFnSig = Binders<CallableSig>;
 
 impl CallableSig {
     pub fn from_params_and_return(
-        params: impl ExactSizeIterator<Item = Ty>,
+        params: impl Iterator<Item = Ty>,
         ret: Ty,
         is_varargs: bool,
         safety: Safety,
         abi: FnAbi,
     ) -> CallableSig {
-        let mut params_and_return = Vec::with_capacity(params.len() + 1);
+        let mut params_and_return = Vec::with_capacity(params.size_hint().0 + 1);
         params_and_return.extend(params);
         params_and_return.push(ret);
         CallableSig { params_and_return: params_and_return.into(), is_varargs, safety, abi }
@@ -610,7 +613,6 @@ pub enum ImplTraitId {
     TypeAliasImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
     AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
 }
-impl InternValueTrivial for ImplTraitId {}
 
 #[derive(PartialEq, Eq, Debug, Hash)]
 pub struct ImplTraits {
@@ -647,10 +649,8 @@ pub(crate) fn fold_free_vars<T: HasInterner<Interner = Interner> + TypeFoldable<
         F1: FnMut(BoundVar, DebruijnIndex) -> Ty,
         F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const,
     >(F1, F2);
-    impl<
-            F1: FnMut(BoundVar, DebruijnIndex) -> Ty,
-            F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const,
-        > TypeFolder<Interner> for FreeVarFolder<F1, F2>
+    impl<F1: FnMut(BoundVar, DebruijnIndex) -> Ty, F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const>
+        TypeFolder<Interner> for FreeVarFolder<F1, F2>
     {
         fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner> {
             self
@@ -780,8 +780,8 @@ where
     T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + Clone,
 {
     use chalk_ir::{
-        fold::{FallibleTypeFolder, TypeSuperFoldable},
         Fallible,
+        fold::{FallibleTypeFolder, TypeSuperFoldable},
     };
     struct ErrorReplacer {
         vars: usize,
@@ -842,11 +842,7 @@ where
             _var: InferenceVar,
             _outer_binder: DebruijnIndex,
         ) -> Fallible<Const> {
-            if cfg!(debug_assertions) {
-                Err(NoSolution)
-            } else {
-                Ok(unknown_const(ty))
-            }
+            if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(unknown_const(ty)) }
         }
 
         fn try_fold_free_var_const(
@@ -855,11 +851,7 @@ where
             _bound_var: BoundVar,
             _outer_binder: DebruijnIndex,
         ) -> Fallible<Const> {
-            if cfg!(debug_assertions) {
-                Err(NoSolution)
-            } else {
-                Ok(unknown_const(ty))
-            }
+            if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(unknown_const(ty)) }
         }
 
         fn try_fold_inference_lifetime(
@@ -867,11 +859,7 @@ where
             _var: InferenceVar,
             _outer_binder: DebruijnIndex,
         ) -> Fallible<Lifetime> {
-            if cfg!(debug_assertions) {
-                Err(NoSolution)
-            } else {
-                Ok(error_lifetime())
-            }
+            if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(error_lifetime()) }
         }
 
         fn try_fold_free_var_lifetime(
@@ -879,11 +867,7 @@ where
             _bound_var: BoundVar,
             _outer_binder: DebruijnIndex,
         ) -> Fallible<Lifetime> {
-            if cfg!(debug_assertions) {
-                Err(NoSolution)
-            } else {
-                Ok(error_lifetime())
-            }
+            if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(error_lifetime()) }
         }
     }
     let mut error_replacer = ErrorReplacer { vars: 0 };
@@ -908,8 +892,8 @@ pub fn callable_sig_from_fn_trait(
     let krate = trait_env.krate;
     let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
     let output_assoc_type = db
-        .trait_data(fn_once_trait)
-        .associated_type_by_name(&Name::new_symbol_root(sym::Output.clone()))?;
+        .trait_items(fn_once_trait)
+        .associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
 
     let mut table = InferenceTable::new(db, trait_env.clone());
     let b = TyBuilder::trait_ref(db, fn_once_trait);
@@ -1033,7 +1017,7 @@ where
     T: ?Sized + TypeVisitable<Interner>,
 {
     let mut collector = PlaceholderCollector { db, placeholders: FxHashSet::default() };
-    let _ = value.visit_with(&mut collector, DebruijnIndex::INNERMOST);
+    _ = value.visit_with(&mut collector, DebruijnIndex::INNERMOST);
     collector.placeholders.into_iter().collect()
 }
 
@@ -1042,15 +1026,6 @@ pub fn known_const_to_ast(
     db: &dyn HirDatabase,
     display_target: DisplayTarget,
 ) -> Option<ConstArg> {
-    if let ConstValue::Concrete(c) = &konst.interned().value {
-        match c.interned {
-            ConstScalar::UnevaluatedConst(GeneralConstId::InTypeConstId(cid), _) => {
-                return Some(cid.source(db.upcast()));
-            }
-            ConstScalar::Unknown => return None,
-            _ => (),
-        }
-    }
     Some(make::expr_const_value(konst.display(db, display_target).to_string().as_str()))
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index af73b5ed9a7b4..e4688d044e981 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -14,65 +14,57 @@ use std::{
     ops::{self, Not as _},
 };
 
-use base_db::{ra_salsa::Cycle, CrateId};
+use base_db::Crate;
 use chalk_ir::{
+    Mutability, Safety, TypeOutlives,
     cast::Cast,
     fold::{Shift, TypeFoldable},
     interner::HasInterner,
-    Mutability, Safety, TypeOutlives,
 };
 
 use either::Either;
 use hir_def::{
+    AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId,
+    FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LocalFieldId,
+    Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, UnionId, VariantId,
     builtin_type::BuiltinType,
-    data::{adt::StructKind, TraitFlags},
-    expander::Expander,
-    generics::{
-        GenericParamDataRef, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
-        WherePredicateTypeTarget,
-    },
+    expr_store::{ExpressionStore, path::Path},
+    hir::generics::{GenericParamDataRef, TypeOrConstParamData, WherePredicate},
+    item_tree::FieldsShape,
     lang_item::LangItem,
-    nameres::MacroSubNs,
-    path::{GenericArg, ModPath, Path, PathKind},
     resolver::{HasResolver, LifetimeNs, Resolver, TypeNs},
+    signatures::{FunctionSignature, TraitFlags, TypeAliasFlags},
     type_ref::{
-        ConstRef, LifetimeRef, PathId, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound,
-        TypeRef, TypeRefId, TypesMap, TypesSourceMap,
+        ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier,
+        TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId,
     },
-    AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId,
-    FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, LocalFieldId,
-    Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, UnionId, VariantId,
 };
-use hir_expand::{name::Name, ExpandResult};
+use hir_expand::name::Name;
 use la_arena::{Arena, ArenaMap};
 use rustc_hash::FxHashSet;
 use rustc_pattern_analysis::Captures;
 use stdx::{impl_from, never};
-use syntax::ast;
 use triomphe::{Arc, ThinArc};
 
 use crate::{
+    AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DynTy, FnAbi, FnPointer, FnSig,
+    FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, LifetimeData,
+    LifetimeOutlives, PolyFnSig, ProgramClause, QuantifiedWhereClause, QuantifiedWhereClauses,
+    Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
     all_super_traits,
-    consteval::{
-        intern_const_ref, intern_const_scalar, path_to_const, unknown_const,
-        unknown_const_as_generic,
-    },
+    consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
     db::HirDatabase,
     error_lifetime,
-    generics::{generics, trait_self_param_idx, Generics},
+    generics::{Generics, generics, trait_self_param_idx},
     lower::{
         diagnostics::*,
         path::{PathDiagnosticCallback, PathLoweringContext},
     },
     make_binders,
-    mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk},
+    mapping::{ToChalk, from_chalk_trait_id, lt_to_placeholder_idx},
     static_lifetime, to_chalk_trait_id, to_placeholder_idx,
-    utils::{all_super_trait_refs, InTypeConstIdMetadata},
-    AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy, FnAbi,
-    FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime,
-    LifetimeData, LifetimeOutlives, ParamKind, PolyFnSig, ProgramClause, QuantifiedWhereClause,
-    QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder,
-    TyKind, WhereClause,
+    utils::all_super_trait_refs,
+    variable_kinds_from_iter,
 };
 
 #[derive(Debug, Default)]
@@ -83,72 +75,108 @@ struct ImplTraitLoweringState {
     mode: ImplTraitLoweringMode,
     // This is structured as a struct with fields and not as an enum because it helps with the borrow checker.
     opaque_type_data: Arena<ImplTrait>,
-    param_and_variable_counter: u16,
 }
 impl ImplTraitLoweringState {
     fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState {
-        Self { mode, opaque_type_data: Arena::new(), param_and_variable_counter: 0 }
+        Self { mode, opaque_type_data: Arena::new() }
     }
-    fn param(counter: u16) -> Self {
-        Self {
-            mode: ImplTraitLoweringMode::Param,
-            opaque_type_data: Arena::new(),
-            param_and_variable_counter: counter,
+}
+
+pub(crate) struct PathDiagnosticCallbackData(TypeRefId);
+
+#[derive(Debug, Clone)]
+pub enum LifetimeElisionKind {
+    /// Create a new anonymous lifetime parameter and reference it.
+    ///
+    /// If `report_in_path`, report an error when encountering lifetime elision in a path:
+    /// ```compile_fail
+    /// struct Foo<'a> { x: &'a () }
+    /// async fn foo(x: Foo) {}
+    /// ```
+    ///
+    /// Note: the error should not trigger when the elided lifetime is in a pattern or
+    /// expression-position path:
+    /// ```
+    /// struct Foo<'a> { x: &'a () }
+    /// async fn foo(Foo { x: _ }: Foo<'_>) {}
+    /// ```
+    AnonymousCreateParameter { report_in_path: bool },
+
+    /// Replace all anonymous lifetimes by provided lifetime.
+    Elided(Lifetime),
+
+    /// Give a hard error when either `&` or `'_` is written. Used to
+    /// rule out things like `where T: Foo<'_>`. Does not imply an
+    /// error on default object bounds (e.g., `Box<dyn Foo>`).
+    AnonymousReportError,
+
+    /// Resolves elided lifetimes to `'static` if there are no other lifetimes in scope,
+    /// otherwise give a warning that the previous behavior of introducing a new early-bound
+    /// lifetime is a bug and will be removed (if `only_lint` is enabled).
+    StaticIfNoLifetimeInScope { only_lint: bool },
+
+    /// Signal we cannot find which should be the anonymous lifetime.
+    ElisionFailure,
+
+    /// Infer all elided lifetimes.
+    Infer,
+}
+
+impl LifetimeElisionKind {
+    #[inline]
+    pub(crate) fn for_const(const_parent: ItemContainerId) -> LifetimeElisionKind {
+        match const_parent {
+            ItemContainerId::ExternBlockId(_) | ItemContainerId::ModuleId(_) => {
+                LifetimeElisionKind::Elided(static_lifetime())
+            }
+            ItemContainerId::ImplId(_) => {
+                LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: true }
+            }
+            ItemContainerId::TraitId(_) => {
+                LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: false }
+            }
         }
     }
-    fn variable(counter: u16) -> Self {
-        Self {
-            mode: ImplTraitLoweringMode::Variable,
-            opaque_type_data: Arena::new(),
-            param_and_variable_counter: counter,
-        }
+
+    #[inline]
+    pub(crate) fn for_fn_params(data: &FunctionSignature) -> LifetimeElisionKind {
+        LifetimeElisionKind::AnonymousCreateParameter { report_in_path: data.is_async() }
     }
-}
 
-pub(crate) struct PathDiagnosticCallbackData(TypeRefId);
+    #[inline]
+    pub(crate) fn for_fn_ret() -> LifetimeElisionKind {
+        // FIXME: We should use the elided lifetime here, or `ElisionFailure`.
+        LifetimeElisionKind::Elided(error_lifetime())
+    }
+}
 
 #[derive(Debug)]
 pub struct TyLoweringContext<'a> {
     pub db: &'a dyn HirDatabase,
     resolver: &'a Resolver,
-    generics: OnceCell<Option<Generics>>,
-    types_map: &'a TypesMap,
-    /// If this is set, that means we're in a context of a freshly expanded macro, and that means
-    /// we should not use `TypeRefId` in diagnostics because the caller won't have the `TypesMap`,
-    /// instead we need to put `TypeSource` from the source map.
-    types_source_map: Option<&'a TypesSourceMap>,
+    store: &'a ExpressionStore,
+    def: GenericDefId,
+    generics: OnceCell<Generics>,
     in_binders: DebruijnIndex,
-    // FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases
-    // where expected
-    owner: Option<TypeOwnerId>,
     /// Note: Conceptually, it's thinkable that we could be in a location where
     /// some type params should be represented as placeholders, and others
     /// should be converted to variables. I think in practice, this isn't
     /// possible currently, so this should be fine for now.
     pub type_param_mode: ParamLoweringMode,
     impl_trait_mode: ImplTraitLoweringState,
-    expander: Option<Expander>,
     /// Tracks types with explicit `?Sized` bounds.
     pub(crate) unsized_types: FxHashSet<Ty>,
     pub(crate) diagnostics: Vec<TyLoweringDiagnostic>,
+    lifetime_elision: LifetimeElisionKind,
 }
 
 impl<'a> TyLoweringContext<'a> {
     pub fn new(
         db: &'a dyn HirDatabase,
         resolver: &'a Resolver,
-        types_map: &'a TypesMap,
-        owner: TypeOwnerId,
-    ) -> Self {
-        Self::new_maybe_unowned(db, resolver, types_map, None, Some(owner))
-    }
-
-    pub fn new_maybe_unowned(
-        db: &'a dyn HirDatabase,
-        resolver: &'a Resolver,
-        types_map: &'a TypesMap,
-        types_source_map: Option<&'a TypesSourceMap>,
-        owner: Option<TypeOwnerId>,
+        store: &'a ExpressionStore,
+        def: GenericDefId,
+        lifetime_elision: LifetimeElisionKind,
     ) -> Self {
         let impl_trait_mode = ImplTraitLoweringState::new(ImplTraitLoweringMode::Disallowed);
         let type_param_mode = ParamLoweringMode::Placeholder;
@@ -156,16 +184,15 @@ impl<'a> TyLoweringContext<'a> {
         Self {
             db,
             resolver,
-            generics: OnceCell::new(),
-            types_map,
-            types_source_map,
-            owner,
+            def,
+            generics: Default::default(),
+            store,
             in_binders,
             impl_trait_mode,
             type_param_mode,
-            expander: None,
             unsized_types: FxHashSet::default(),
             diagnostics: Vec::new(),
+            lifetime_elision,
         }
     }
 
@@ -188,6 +215,17 @@ impl<'a> TyLoweringContext<'a> {
         self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f)
     }
 
+    fn with_lifetime_elision<T>(
+        &mut self,
+        lifetime_elision: LifetimeElisionKind,
+        f: impl FnOnce(&mut TyLoweringContext<'_>) -> T,
+    ) -> T {
+        let old_lifetime_elision = mem::replace(&mut self.lifetime_elision, lifetime_elision);
+        let result = f(self);
+        self.lifetime_elision = old_lifetime_elision;
+        result
+    }
+
     pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
         Self { impl_trait_mode: ImplTraitLoweringState::new(impl_trait_mode), ..self }
     }
@@ -207,17 +245,7 @@ impl<'a> TyLoweringContext<'a> {
     }
 
     pub fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) {
-        let source = match self.types_source_map {
-            Some(source_map) => {
-                let Ok(source) = source_map.type_syntax(type_ref) else {
-                    stdx::never!("error in synthetic type");
-                    return;
-                };
-                Either::Right(source)
-            }
-            None => Either::Left(type_ref),
-        };
-        self.diagnostics.push(TyLoweringDiagnostic { source, kind });
+        self.diagnostics.push(TyLoweringDiagnostic { source: type_ref, kind });
     }
 }
 
@@ -228,15 +256,6 @@ pub enum ImplTraitLoweringMode {
     /// i.e. for arguments of the function we're currently checking, and return
     /// types of functions we're calling.
     Opaque,
-    /// `impl Trait` gets lowered into a type variable. Used for argument
-    /// position impl Trait when inside the respective function, since it allows
-    /// us to support that without Chalk.
-    Param,
-    /// `impl Trait` gets lowered into a variable that can unify with some
-    /// type. This is used in places where values flow 'in', i.e. for arguments
-    /// of functions we're calling, and the return type of the function we're
-    /// currently checking.
-    Variable,
     /// `impl Trait` is disallowed and will be an error.
     #[default]
     Disallowed,
@@ -254,29 +273,57 @@ impl<'a> TyLoweringContext<'a> {
     }
 
     pub fn lower_const(&mut self, const_ref: &ConstRef, const_type: Ty) -> Const {
-        let Some(owner) = self.owner else { return unknown_const(const_type) };
-        let debruijn = self.in_binders;
-        const_or_path_to_chalk(
+        let const_ref = &self.store[const_ref.expr];
+        match const_ref {
+            hir_def::hir::Expr::Path(path) => path_to_const(
+                self.db,
+                self.resolver,
+                path,
+                self.type_param_mode,
+                || self.generics(),
+                self.in_binders,
+                const_type.clone(),
+            )
+            .unwrap_or_else(|| unknown_const(const_type)),
+            hir_def::hir::Expr::Literal(literal) => intern_const_ref(
+                self.db,
+                &match *literal {
+                    hir_def::hir::Literal::Float(_, _)
+                    | hir_def::hir::Literal::String(_)
+                    | hir_def::hir::Literal::ByteString(_)
+                    | hir_def::hir::Literal::CString(_) => LiteralConstRef::Unknown,
+                    hir_def::hir::Literal::Char(c) => LiteralConstRef::Char(c),
+                    hir_def::hir::Literal::Bool(b) => LiteralConstRef::Bool(b),
+                    hir_def::hir::Literal::Int(val, _) => LiteralConstRef::Int(val),
+                    hir_def::hir::Literal::Uint(val, _) => LiteralConstRef::UInt(val),
+                },
+                const_type,
+                self.resolver.krate(),
+            ),
+            _ => unknown_const(const_type),
+        }
+    }
+
+    pub fn lower_path_as_const(&mut self, path: &Path, const_type: Ty) -> Const {
+        path_to_const(
             self.db,
             self.resolver,
-            owner,
-            const_type,
-            const_ref,
+            path,
             self.type_param_mode,
             || self.generics(),
-            debruijn,
+            self.in_binders,
+            const_type.clone(),
         )
+        .unwrap_or_else(|| unknown_const(const_type))
     }
 
-    fn generics(&self) -> Option<&Generics> {
-        self.generics
-            .get_or_init(|| self.resolver.generic_def().map(|def| generics(self.db.upcast(), def)))
-            .as_ref()
+    fn generics(&self) -> &Generics {
+        self.generics.get_or_init(|| generics(self.db, self.def))
     }
 
     pub fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty, Option<TypeNs>) {
         let mut res = None;
-        let type_ref = &self.types_map[type_ref_id];
+        let type_ref = &self.store[type_ref_id];
         let ty = match type_ref {
             TypeRef::Never => TyKind::Never.intern(Interner),
             TypeRef::Tuple(inner) => {
@@ -290,6 +337,20 @@ impl<'a> TyLoweringContext<'a> {
                 res = res_;
                 ty
             }
+            &TypeRef::TypeParam(type_param_id) => {
+                res = Some(TypeNs::GenericParam(type_param_id));
+                match self.type_param_mode {
+                    ParamLoweringMode::Placeholder => {
+                        TyKind::Placeholder(to_placeholder_idx(self.db, type_param_id.into()))
+                    }
+                    ParamLoweringMode::Variable => {
+                        let idx =
+                            self.generics().type_or_const_param_idx(type_param_id.into()).unwrap();
+                        TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
+                    }
+                }
+                .intern(Interner)
+            }
             &TypeRef::RawPtr(inner, mutability) => {
                 let inner_ty = self.lower_ty(inner);
                 TyKind::Raw(lower_to_chalk_mutability(mutability), inner_ty).intern(Interner)
@@ -309,24 +370,32 @@ impl<'a> TyLoweringContext<'a> {
                 let lifetime = ref_
                     .lifetime
                     .as_ref()
-                    .map_or_else(error_lifetime, |lr| self.lower_lifetime(lr));
+                    .map_or_else(error_lifetime, |&lr| self.lower_lifetime(lr));
                 TyKind::Ref(lower_to_chalk_mutability(ref_.mutability), lifetime, inner_ty)
                     .intern(Interner)
             }
             TypeRef::Placeholder => TyKind::Error.intern(Interner),
             TypeRef::Fn(fn_) => {
                 let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
-                    Substitution::from_iter(
-                        Interner,
-                        fn_.params().iter().map(|&(_, tr)| ctx.lower_ty(tr)),
-                    )
+                    let (params, ret) = fn_.split_params_and_ret();
+                    let mut subst = Vec::with_capacity(fn_.params.len());
+                    ctx.with_lifetime_elision(
+                        LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false },
+                        |ctx| {
+                            subst.extend(params.iter().map(|&(_, tr)| ctx.lower_ty(tr)));
+                        },
+                    );
+                    ctx.with_lifetime_elision(LifetimeElisionKind::for_fn_ret(), |ctx| {
+                        subst.push(ctx.lower_ty(ret));
+                    });
+                    Substitution::from_iter(Interner, subst)
                 });
                 TyKind::Function(FnPointer {
                     num_binders: 0, // FIXME lower `for<'a> fn()` correctly
                     sig: FnSig {
-                        abi: fn_.abi().as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
-                        safety: if fn_.is_unsafe() { Safety::Unsafe } else { Safety::Safe },
-                        variadic: fn_.is_varargs(),
+                        abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
+                        safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe },
+                        variadic: fn_.is_varargs,
                     },
                     substitution: FnSubst(substs),
                 })
@@ -336,9 +405,9 @@ impl<'a> TyLoweringContext<'a> {
             TypeRef::ImplTrait(bounds) => {
                 match self.impl_trait_mode.mode {
                     ImplTraitLoweringMode::Opaque => {
-                        let origin = match self.resolver.generic_def() {
-                            Some(GenericDefId::FunctionId(it)) => Either::Left(it),
-                            Some(GenericDefId::TypeAliasId(it)) => Either::Right(it),
+                        let origin = match self.def {
+                            GenericDefId::FunctionId(it) => Either::Left(it),
+                            GenericDefId::TypeAliasId(it) => Either::Right(it),
                             _ => panic!(
                                 "opaque impl trait lowering must be in function or type alias"
                             ),
@@ -370,144 +439,16 @@ impl<'a> TyLoweringContext<'a> {
                             |a| ImplTraitId::TypeAliasImplTrait(a, idx),
                         );
                         let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
-                        let generics =
-                            generics(self.db.upcast(), origin.either(|f| f.into(), |a| a.into()));
+                        let generics = generics(self.db, origin.either(|f| f.into(), |a| a.into()));
                         let parameters = generics.bound_vars_subst(self.db, self.in_binders);
                         TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner)
                     }
-                    ImplTraitLoweringMode::Param => {
-                        let idx = self.impl_trait_mode.param_and_variable_counter;
-                        // Count the number of `impl Trait` things that appear within our bounds.
-                        // Since those have been emitted as implicit type args already.
-                        self.impl_trait_mode.param_and_variable_counter =
-                            idx + self.count_impl_traits(type_ref_id) as u16;
-                        let db = self.db;
-                        let kind = self
-                            .generics()
-                            .expect("param impl trait lowering must be in a generic def")
-                            .iter()
-                            .filter_map(|(id, data)| match (id, data) {
-                                (
-                                    GenericParamId::TypeParamId(id),
-                                    GenericParamDataRef::TypeParamData(data),
-                                ) if data.provenance == TypeParamProvenance::ArgumentImplTrait => {
-                                    Some(id)
-                                }
-                                _ => None,
-                            })
-                            .nth(idx as usize)
-                            .map_or(TyKind::Error, |id| {
-                                TyKind::Placeholder(to_placeholder_idx(db, id.into()))
-                            });
-                        kind.intern(Interner)
-                    }
-                    ImplTraitLoweringMode::Variable => {
-                        let idx = self.impl_trait_mode.param_and_variable_counter;
-                        // Count the number of `impl Trait` things that appear within our bounds.
-                        // Since t hose have been emitted as implicit type args already.
-                        self.impl_trait_mode.param_and_variable_counter =
-                            idx + self.count_impl_traits(type_ref_id) as u16;
-                        let debruijn = self.in_binders;
-                        let kind = self
-                            .generics()
-                            .expect("variable impl trait lowering must be in a generic def")
-                            .iter()
-                            .enumerate()
-                            .filter_map(|(i, (id, data))| match (id, data) {
-                                (
-                                    GenericParamId::TypeParamId(_),
-                                    GenericParamDataRef::TypeParamData(data),
-                                ) if data.provenance == TypeParamProvenance::ArgumentImplTrait => {
-                                    Some(i)
-                                }
-                                _ => None,
-                            })
-                            .nth(idx as usize)
-                            .map_or(TyKind::Error, |id| {
-                                TyKind::BoundVar(BoundVar { debruijn, index: id })
-                            });
-                        kind.intern(Interner)
-                    }
                     ImplTraitLoweringMode::Disallowed => {
                         // FIXME: report error
                         TyKind::Error.intern(Interner)
                     }
                 }
             }
-            TypeRef::Macro(macro_call) => {
-                let (expander, recursion_start) = {
-                    match &mut self.expander {
-                        // There already is an expander here, this means we are already recursing
-                        Some(expander) => (expander, false),
-                        // No expander was created yet, so we are at the start of the expansion recursion
-                        // and therefore have to create an expander.
-                        None => {
-                            let expander = self.expander.insert(Expander::new(
-                                self.db.upcast(),
-                                macro_call.file_id,
-                                self.resolver.module(),
-                            ));
-                            (expander, true)
-                        }
-                    }
-                };
-                let ty = {
-                    let macro_call = macro_call.to_node(self.db.upcast());
-                    let resolver = |path: &_| {
-                        self.resolver
-                            .resolve_path_as_macro(self.db.upcast(), path, Some(MacroSubNs::Bang))
-                            .map(|(it, _)| it)
-                    };
-                    match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call, resolver)
-                    {
-                        Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
-                            let (mut types_map, mut types_source_map) =
-                                (TypesMap::default(), TypesSourceMap::default());
-
-                            let mut ctx = expander.ctx(
-                                self.db.upcast(),
-                                &mut types_map,
-                                &mut types_source_map,
-                            );
-                            // FIXME: Report syntax errors in expansion here
-                            let type_ref = TypeRef::from_ast(&mut ctx, expanded.tree());
-
-                            // Can't mutate `self`, must create a new instance, because of the lifetimes.
-                            let mut inner_ctx = TyLoweringContext {
-                                db: self.db,
-                                resolver: self.resolver,
-                                generics: self.generics.clone(),
-                                types_map: &types_map,
-                                types_source_map: Some(&types_source_map),
-                                in_binders: self.in_binders,
-                                owner: self.owner,
-                                type_param_mode: self.type_param_mode,
-                                impl_trait_mode: mem::take(&mut self.impl_trait_mode),
-                                expander: self.expander.take(),
-                                unsized_types: mem::take(&mut self.unsized_types),
-                                diagnostics: mem::take(&mut self.diagnostics),
-                            };
-
-                            let ty = inner_ctx.lower_ty(type_ref);
-
-                            self.impl_trait_mode = inner_ctx.impl_trait_mode;
-                            self.expander = inner_ctx.expander;
-                            self.unsized_types = inner_ctx.unsized_types;
-                            self.diagnostics = inner_ctx.diagnostics;
-
-                            self.expander.as_mut().unwrap().exit(mark);
-                            Some(ty)
-                        }
-                        _ => None,
-                    }
-                };
-
-                // drop the expander, resetting it to pre-recursion state
-                if recursion_start {
-                    self.expander = None;
-                }
-                ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
-            }
             TypeRef::Error => TyKind::Error.intern(Interner),
         };
         (ty, res)
@@ -517,9 +458,10 @@ impl<'a> TyLoweringContext<'a> {
     /// lower the self types of the predicates since that could lead to cycles.
     /// So we just check here if the `type_ref` resolves to a generic param, and which.
     fn lower_ty_only_param(&mut self, type_ref_id: TypeRefId) -> Option<TypeOrConstParamId> {
-        let type_ref = &self.types_map[type_ref_id];
+        let type_ref = &self.store[type_ref_id];
         let path = match type_ref {
             TypeRef::Path(path) => path,
+            &TypeRef::TypeParam(idx) => return Some(idx.into()),
             _ => return None,
         };
         if path.type_anchor().is_some() {
@@ -555,7 +497,7 @@ impl<'a> TyLoweringContext<'a> {
         PathLoweringContext::new(
             self,
             Self::on_path_diagnostic_callback(path_id.type_ref()),
-            &self.types_map[path_id],
+            &self.store[path_id],
         )
     }
 
@@ -564,7 +506,7 @@ impl<'a> TyLoweringContext<'a> {
         if let Some(type_ref) = path.type_anchor() {
             let (ty, res) = self.lower_ty_ext(type_ref);
             let mut ctx = self.at_path(path_id);
-            return ctx.lower_ty_relative_path(ty, res);
+            return ctx.lower_ty_relative_path(ty, res, false);
         }
 
         let mut ctx = self.at_path(path_id);
@@ -594,7 +536,7 @@ impl<'a> TyLoweringContext<'a> {
             TypeNs::TraitId(tr) => tr,
             _ => return None,
         };
-        Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty), ctx))
+        Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty, false), ctx))
     }
 
     fn lower_trait_ref(
@@ -605,36 +547,21 @@ impl<'a> TyLoweringContext<'a> {
         self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty).map(|it| it.0)
     }
 
+    /// When lowering predicates from parents (impl, traits) for children defs (fns, consts, types), `generics` should
+    /// contain the `Generics` for the **child**, while `predicate_owner` should contain the `GenericDefId` of the
+    /// **parent**. This is important so we generate the correct bound var/placeholder.
     pub(crate) fn lower_where_predicate<'b>(
         &'b mut self,
         where_predicate: &'b WherePredicate,
-        &def: &GenericDefId,
         ignore_bindings: bool,
     ) -> impl Iterator<Item = QuantifiedWhereClause> + use<'a, 'b> {
         match where_predicate {
             WherePredicate::ForLifetime { target, bound, .. }
             | WherePredicate::TypeBound { target, bound } => {
-                let self_ty = match target {
-                    WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(*type_ref),
-                    &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
-                        let param_id = hir_def::TypeOrConstParamId { parent: def, local_id };
-                        match self.type_param_mode {
-                            ParamLoweringMode::Placeholder => {
-                                TyKind::Placeholder(to_placeholder_idx(self.db, param_id))
-                            }
-                            ParamLoweringMode::Variable => {
-                                let idx = generics(self.db.upcast(), def)
-                                    .type_or_const_param_idx(param_id)
-                                    .expect("matching generics");
-                                TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx))
-                            }
-                        }
-                        .intern(Interner)
-                    }
-                };
+                let self_ty = self.lower_ty(*target);
                 Either::Left(self.lower_type_bound(bound, self_ty, ignore_bindings))
             }
-            WherePredicate::Lifetime { bound, target } => Either::Right(iter::once(
+            &WherePredicate::Lifetime { bound, target } => Either::Right(iter::once(
                 crate::wrap_empty_binders(WhereClause::LifetimeOutlives(LifetimeOutlives {
                     a: self.lower_lifetime(bound),
                     b: self.lower_lifetime(target),
@@ -657,8 +584,7 @@ impl<'a> TyLoweringContext<'a> {
                 // FIXME Don't silently drop the hrtb lifetimes here
                 if let Some((trait_ref, ctx)) = self.lower_trait_ref_from_path(path, self_ty) {
                     if !ignore_bindings {
-                        assoc_bounds =
-                            ctx.assoc_type_bindings_from_type_bound(bound, trait_ref.clone());
+                        assoc_bounds = ctx.assoc_type_bindings_from_type_bound(trait_ref.clone());
                     }
                     clause = Some(crate::wrap_empty_binders(WhereClause::Implemented(trait_ref)));
                 }
@@ -678,7 +604,7 @@ impl<'a> TyLoweringContext<'a> {
                     self.unsized_types.insert(self_ty);
                 }
             }
-            TypeBound::Lifetime(l) => {
+            &TypeBound::Lifetime(l) => {
                 let lifetime = self.lower_lifetime(l);
                 clause = Some(crate::wrap_empty_binders(WhereClause::TypeOutlives(TypeOutlives {
                     ty: self_ty,
@@ -725,15 +651,15 @@ impl<'a> TyLoweringContext<'a> {
                         let lhs_id = lhs.trait_id;
                         let lhs_is_auto = ctx
                             .db
-                            .trait_data(from_chalk_trait_id(lhs_id))
+                            .trait_signature(from_chalk_trait_id(lhs_id))
                             .flags
-                            .contains(TraitFlags::IS_AUTO);
+                            .contains(TraitFlags::AUTO);
                         let rhs_id = rhs.trait_id;
                         let rhs_is_auto = ctx
                             .db
-                            .trait_data(from_chalk_trait_id(rhs_id))
+                            .trait_signature(from_chalk_trait_id(rhs_id))
                             .flags
-                            .contains(TraitFlags::IS_AUTO);
+                            .contains(TraitFlags::AUTO);
 
                         if !lhs_is_auto && !rhs_is_auto {
                             multiple_regular_traits = true;
@@ -800,7 +726,7 @@ impl<'a> TyLoweringContext<'a> {
         }
     }
 
-    fn lower_impl_trait(&mut self, bounds: &[TypeBound], krate: CrateId) -> ImplTrait {
+    fn lower_impl_trait(&mut self, bounds: &[TypeBound], krate: Crate) -> ImplTrait {
         cov_mark::hit!(lower_rpit);
         let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
         let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
@@ -829,8 +755,8 @@ impl<'a> TyLoweringContext<'a> {
         ImplTrait { bounds: crate::make_single_type_binders(predicates) }
     }
 
-    pub fn lower_lifetime(&self, lifetime: &LifetimeRef) -> Lifetime {
-        match self.resolver.resolve_lifetime(lifetime) {
+    pub fn lower_lifetime(&self, lifetime: LifetimeRefId) -> Lifetime {
+        match self.resolver.resolve_lifetime(&self.store[lifetime]) {
             Some(resolution) => match resolution {
                 LifetimeNs::Static => static_lifetime(),
                 LifetimeNs::LifetimeParam(id) => match self.type_param_mode {
@@ -838,8 +764,7 @@ impl<'a> TyLoweringContext<'a> {
                         LifetimeData::Placeholder(lt_to_placeholder_idx(self.db, id))
                     }
                     ParamLoweringMode::Variable => {
-                        let generics = self.generics().expect("generics in scope");
-                        let idx = match generics.lifetime_idx(id) {
+                        let idx = match self.generics().lifetime_idx(id) {
                             None => return error_lifetime(),
                             Some(idx) => idx,
                         };
@@ -852,21 +777,10 @@ impl<'a> TyLoweringContext<'a> {
             None => error_lifetime(),
         }
     }
-
-    // FIXME: This does not handle macros!
-    fn count_impl_traits(&self, type_ref: TypeRefId) -> usize {
-        let mut count = 0;
-        TypeRef::walk(type_ref, self.types_map, &mut |type_ref| {
-            if matches!(type_ref, TypeRef::ImplTrait(_)) {
-                count += 1;
-            }
-        });
-        count
-    }
 }
 
 /// Build the signature of a callable item (function, struct or enum variant).
-pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
+pub(crate) fn callable_item_signature_query(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
     match def {
         CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f),
         CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s),
@@ -897,7 +811,7 @@ fn named_associated_type_shorthand_candidates<R>(
 ) -> Option<R> {
     let mut search = |t| {
         all_super_trait_refs(db, t, |t| {
-            let data = db.trait_data(t.hir_trait_id());
+            let data = db.trait_items(t.hir_trait_id());
 
             for (name, assoc_id) in &data.items {
                 if let AssocItemId::TypeAliasId(alias) = assoc_id {
@@ -918,14 +832,8 @@ fn named_associated_type_shorthand_candidates<R>(
 
             let impl_id_as_generic_def: GenericDefId = impl_id.into();
             if impl_id_as_generic_def != def {
-                // `trait_ref` contains `BoundVar`s bound by impl's `Binders`, but here we need
-                // `BoundVar`s from `def`'s point of view.
-                // FIXME: A `HirDatabase` query may be handy if this process is needed in more
-                // places. It'd be almost identical as `impl_trait_query` where `resolver` would be
-                // of `def` instead of `impl_id`.
-                let starting_idx = generics(db.upcast(), def).len_self();
                 let subst = TyBuilder::subst_for_def(db, impl_id, None)
-                    .fill_with_bound_vars(DebruijnIndex::INNERMOST, starting_idx)
+                    .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0)
                     .build();
                 let trait_ref = subst.apply(trait_ref, Interner);
                 search(trait_ref)
@@ -949,18 +857,10 @@ fn named_associated_type_shorthand_candidates<R>(
             }
             // Handle `Self::Type` referring to own associated type in trait definitions
             if let GenericDefId::TraitId(trait_id) = param_id.parent() {
-                let trait_generics = generics(db.upcast(), trait_id.into());
+                let trait_generics = generics(db, trait_id.into());
                 if trait_generics[param_id.local_id()].is_trait_self() {
-                    let def_generics = generics(db.upcast(), def);
-                    let starting_idx = match def {
-                        GenericDefId::TraitId(_) => 0,
-                        // `def` is an item within trait. We need to substitute `BoundVar`s but
-                        // remember that they are for parent (i.e. trait) generic params so they
-                        // come after our own params.
-                        _ => def_generics.len_self(),
-                    };
                     let trait_ref = TyBuilder::trait_ref(db, trait_id)
-                        .fill_with_bound_vars(DebruijnIndex::INNERMOST, starting_idx)
+                        .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0)
                         .build();
                     return search(trait_ref);
                 }
@@ -989,18 +889,22 @@ pub(crate) fn field_types_with_diagnostics_query(
     db: &dyn HirDatabase,
     variant_id: VariantId,
 ) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics) {
-    let var_data = variant_id.variant_data(db.upcast());
+    let var_data = db.variant_fields(variant_id);
     let (resolver, def): (_, GenericDefId) = match variant_id {
-        VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
-        VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
-        VariantId::EnumVariantId(it) => {
-            (it.resolver(db.upcast()), it.lookup(db.upcast()).parent.into())
-        }
+        VariantId::StructId(it) => (it.resolver(db), it.into()),
+        VariantId::UnionId(it) => (it.resolver(db), it.into()),
+        VariantId::EnumVariantId(it) => (it.resolver(db), it.lookup(db).parent.into()),
     };
-    let generics = generics(db.upcast(), def);
+    let generics = generics(db, def);
     let mut res = ArenaMap::default();
-    let mut ctx = TyLoweringContext::new(db, &resolver, var_data.types_map(), def.into())
-        .with_type_param_mode(ParamLoweringMode::Variable);
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        &var_data.store,
+        def,
+        LifetimeElisionKind::AnonymousReportError,
+    )
+    .with_type_param_mode(ParamLoweringMode::Variable);
     for (field_id, field_data) in var_data.fields().iter() {
         res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(field_data.type_ref)));
     }
@@ -1021,34 +925,26 @@ pub(crate) fn generic_predicates_for_param_query(
     param_id: TypeOrConstParamId,
     assoc_name: Option<Name>,
 ) -> GenericPredicates {
-    let resolver = def.resolver(db.upcast());
-    let mut ctx = if let GenericDefId::FunctionId(_) = def {
-        TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into())
-            .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
-            .with_type_param_mode(ParamLoweringMode::Variable)
-    } else {
-        TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into())
-            .with_type_param_mode(ParamLoweringMode::Variable)
-    };
-    let generics = generics(db.upcast(), def);
+    let generics = generics(db, def);
+    let resolver = def.resolver(db);
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        generics.store(),
+        def,
+        LifetimeElisionKind::AnonymousReportError,
+    )
+    .with_type_param_mode(ParamLoweringMode::Variable);
 
     // we have to filter out all other predicates *first*, before attempting to lower them
-    let predicate = |pred: &_, def: &_, ctx: &mut TyLoweringContext<'_>| match pred {
+    let predicate = |pred: &_, ctx: &mut TyLoweringContext<'_>| match pred {
         WherePredicate::ForLifetime { target, bound, .. }
         | WherePredicate::TypeBound { target, bound, .. } => {
-            let invalid_target = match target {
-                WherePredicateTypeTarget::TypeRef(type_ref) => {
-                    ctx.lower_ty_only_param(*type_ref) != Some(param_id)
-                }
-                &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
-                    let target_id = TypeOrConstParamId { parent: *def, local_id };
-                    target_id != param_id
-                }
-            };
+            let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) };
             if invalid_target {
                 // If this is filtered out without lowering, `?Sized` is not gathered into `ctx.unsized_types`
                 if let TypeBound::Path(_, TraitBoundModifier::Maybe) = bound {
-                    ctx.lower_where_predicate(pred, def, true).for_each(drop);
+                    ctx.lower_where_predicate(pred, true).for_each(drop);
                 }
                 return false;
             }
@@ -1057,17 +953,17 @@ pub(crate) fn generic_predicates_for_param_query(
                 &TypeBound::ForLifetime(_, path) | &TypeBound::Path(path, _) => {
                     // Only lower the bound if the trait could possibly define the associated
                     // type we're looking for.
-                    let path = &ctx.types_map[path];
+                    let path = &ctx.store[path];
 
                     let Some(assoc_name) = &assoc_name else { return true };
                     let Some(TypeNs::TraitId(tr)) =
-                        resolver.resolve_path_in_type_ns_fully(db.upcast(), path)
+                        resolver.resolve_path_in_type_ns_fully(db, path)
                     else {
                         return false;
                     };
 
-                    all_super_traits(db.upcast(), tr).iter().any(|tr| {
-                        db.trait_data(*tr).items.iter().any(|(name, item)| {
+                    all_super_traits(db, tr).iter().any(|tr| {
+                        db.trait_items(*tr).items.iter().any(|(name, item)| {
                             matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
                         })
                     })
@@ -1078,13 +974,14 @@ pub(crate) fn generic_predicates_for_param_query(
         WherePredicate::Lifetime { .. } => false,
     };
     let mut predicates = Vec::new();
-    for (params, def) in resolver.all_generic_params() {
-        ctx.types_map = &params.types_map;
-        for pred in params.where_predicates() {
-            if predicate(pred, def, &mut ctx) {
+    for maybe_parent_generics in
+        std::iter::successors(Some(&generics), |generics| generics.parent_generics())
+    {
+        ctx.store = maybe_parent_generics.store();
+        for pred in maybe_parent_generics.where_predicates() {
+            if predicate(pred, &mut ctx) {
                 predicates.extend(
-                    ctx.lower_where_predicate(pred, def, true)
-                        .map(|p| make_binders(db, &generics, p)),
+                    ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p)),
                 );
             }
         }
@@ -1109,12 +1006,11 @@ pub(crate) fn generic_predicates_for_param_query(
     GenericPredicates(predicates.is_empty().not().then(|| predicates.into()))
 }
 
-pub(crate) fn generic_predicates_for_param_recover(
+pub(crate) fn generic_predicates_for_param_cycle_result(
     _db: &dyn HirDatabase,
-    _cycle: &Cycle,
-    _def: &GenericDefId,
-    _param_id: &TypeOrConstParamId,
-    _assoc_name: &Option<Name>,
+    _def: GenericDefId,
+    _param_id: TypeOrConstParamId,
+    _assoc_name: Option<Name>,
 ) -> GenericPredicates {
     GenericPredicates(None)
 }
@@ -1123,8 +1019,8 @@ pub(crate) fn trait_environment_for_body_query(
     db: &dyn HirDatabase,
     def: DefWithBodyId,
 ) -> Arc<TraitEnvironment> {
-    let Some(def) = def.as_generic_def_id(db.upcast()) else {
-        let krate = def.module(db.upcast()).krate();
+    let Some(def) = def.as_generic_def_id(db) else {
+        let krate = def.module(db).krate();
         return TraitEnvironment::empty(krate);
     };
     db.trait_environment(def)
@@ -1134,21 +1030,24 @@ pub(crate) fn trait_environment_query(
     db: &dyn HirDatabase,
     def: GenericDefId,
 ) -> Arc<TraitEnvironment> {
-    let resolver = def.resolver(db.upcast());
-    let mut ctx = if let GenericDefId::FunctionId(_) = def {
-        TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into())
-            .with_impl_trait_mode(ImplTraitLoweringMode::Param)
-            .with_type_param_mode(ParamLoweringMode::Placeholder)
-    } else {
-        TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into())
-            .with_type_param_mode(ParamLoweringMode::Placeholder)
-    };
+    let generics = generics(db, def);
+    let resolver = def.resolver(db);
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        generics.store(),
+        def,
+        LifetimeElisionKind::AnonymousReportError,
+    )
+    .with_type_param_mode(ParamLoweringMode::Placeholder);
     let mut traits_in_scope = Vec::new();
     let mut clauses = Vec::new();
-    for (params, def) in resolver.all_generic_params() {
-        ctx.types_map = &params.types_map;
-        for pred in params.where_predicates() {
-            for pred in ctx.lower_where_predicate(pred, def, false) {
+    for maybe_parent_generics in
+        std::iter::successors(Some(&generics), |generics| generics.parent_generics())
+    {
+        ctx.store = maybe_parent_generics.store();
+        for pred in maybe_parent_generics.where_predicates() {
+            for pred in ctx.lower_where_predicate(pred, false) {
                 if let WhereClause::Implemented(tr) = pred.skip_binders() {
                     traits_in_scope
                         .push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id()));
@@ -1159,7 +1058,7 @@ pub(crate) fn trait_environment_query(
         }
     }
 
-    if let Some(trait_id) = def.assoc_trait_container(db.upcast()) {
+    if let Some(trait_id) = def.assoc_trait_container(db) {
         // add `Self: Trait<T1, T2, ...>` to the environment in trait
         // function default implementations (and speculative code
         // inside consts or type aliases)
@@ -1170,7 +1069,7 @@ pub(crate) fn trait_environment_query(
         clauses.push(pred.cast::<ProgramClause>(Interner).into_from_env_clause(Interner));
     }
 
-    let subst = generics(db.upcast(), def).placeholder_subst(db);
+    let subst = generics.placeholder_subst(db);
     if !subst.is_empty(Interner) {
         let explicitly_unsized_tys = ctx.unsized_types;
         if let Some(implicitly_sized_clauses) =
@@ -1221,7 +1120,7 @@ pub(crate) fn generic_predicates_without_parent_with_diagnostics_query(
     db: &dyn HirDatabase,
     def: GenericDefId,
 ) -> (GenericPredicates, Diagnostics) {
-    generic_predicates_filtered_by(db, def, |_, d| *d == def)
+    generic_predicates_filtered_by(db, def, |_, d| d == def)
 }
 
 /// Resolve the where clause(s) of an item with generics,
@@ -1232,28 +1131,30 @@ fn generic_predicates_filtered_by<F>(
     filter: F,
 ) -> (GenericPredicates, Diagnostics)
 where
-    F: Fn(&WherePredicate, &GenericDefId) -> bool,
+    F: Fn(&WherePredicate, GenericDefId) -> bool,
 {
-    let resolver = def.resolver(db.upcast());
-    let (impl_trait_lowering, param_lowering) = match def {
-        GenericDefId::FunctionId(_) => {
-            (ImplTraitLoweringMode::Variable, ParamLoweringMode::Variable)
-        }
-        _ => (ImplTraitLoweringMode::Disallowed, ParamLoweringMode::Variable),
-    };
-    let mut ctx = TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into())
-        .with_impl_trait_mode(impl_trait_lowering)
-        .with_type_param_mode(param_lowering);
-    let generics = generics(db.upcast(), def);
+    let generics = generics(db, def);
+    let resolver = def.resolver(db);
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        generics.store(),
+        def,
+        LifetimeElisionKind::AnonymousReportError,
+    )
+    .with_type_param_mode(ParamLoweringMode::Variable);
 
     let mut predicates = Vec::new();
-    for (params, def) in resolver.all_generic_params() {
-        ctx.types_map = &params.types_map;
-        for pred in params.where_predicates() {
-            if filter(pred, def) {
+    for maybe_parent_generics in
+        std::iter::successors(Some(&generics), |generics| generics.parent_generics())
+    {
+        ctx.store = maybe_parent_generics.store();
+        for pred in maybe_parent_generics.where_predicates() {
+            if filter(pred, maybe_parent_generics.def()) {
+                // We deliberately use `generics` and not `maybe_parent_generics` here. This is not a mistake!
+                // If we use the parent generics
                 predicates.extend(
-                    ctx.lower_where_predicate(pred, def, false)
-                        .map(|p| make_binders(db, &generics, p)),
+                    ctx.lower_where_predicate(pred, false).map(|p| make_binders(db, &generics, p)),
                 );
             }
         }
@@ -1271,6 +1172,7 @@ where
             );
         };
     }
+
     (
         GenericPredicates(predicates.is_empty().not().then(|| predicates.into())),
         create_diagnostics(ctx.diagnostics),
@@ -1290,7 +1192,7 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>(
         .lang_item(resolver.krate(), LangItem::Sized)
         .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id))?;
 
-    let trait_self_idx = trait_self_param_idx(db.upcast(), def);
+    let trait_self_idx = trait_self_param_idx(db, def);
 
     Some(
         substitution
@@ -1298,11 +1200,7 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>(
             .enumerate()
             .filter_map(
                 move |(idx, generic_arg)| {
-                    if Some(idx) == trait_self_idx {
-                        None
-                    } else {
-                        Some(generic_arg)
-                    }
+                    if Some(idx) == trait_self_idx { None } else { Some(generic_arg) }
                 },
             )
             .filter_map(|generic_arg| generic_arg.ty(Interner))
@@ -1338,35 +1236,46 @@ pub(crate) fn generic_defaults_with_diagnostics_query(
     db: &dyn HirDatabase,
     def: GenericDefId,
 ) -> (GenericDefaults, Diagnostics) {
-    let generic_params = generics(db.upcast(), def);
+    let generic_params = generics(db, def);
     if generic_params.len() == 0 {
         return (GenericDefaults(None), None);
     }
-    let resolver = def.resolver(db.upcast());
-    let parent_start_idx = generic_params.len_self();
+    let resolver = def.resolver(db);
 
-    let mut ctx =
-        TyLoweringContext::new(db, &resolver, generic_params.self_types_map(), def.into())
-            .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed)
-            .with_type_param_mode(ParamLoweringMode::Variable);
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        generic_params.store(),
+        def,
+        LifetimeElisionKind::AnonymousReportError,
+    )
+    .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed)
+    .with_type_param_mode(ParamLoweringMode::Variable);
     let mut idx = 0;
+    let mut has_any_default = false;
     let mut defaults = generic_params
-        .iter_self()
-        .map(|(id, p)| {
-            let result =
-                handle_generic_param(&mut ctx, idx, id, p, parent_start_idx, &generic_params);
+        .iter_parents_with_store()
+        .map(|((id, p), store)| {
+            ctx.store = store;
+            let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params);
+            has_any_default |= has_default;
             idx += 1;
             result
         })
         .collect::<Vec<_>>();
-    let diagnostics = create_diagnostics(mem::take(&mut ctx.diagnostics));
-    defaults.extend(generic_params.iter_parents_with_types_map().map(|((id, p), types_map)| {
-        ctx.types_map = types_map;
-        let result = handle_generic_param(&mut ctx, idx, id, p, parent_start_idx, &generic_params);
+    ctx.diagnostics.clear(); // Don't include diagnostics from the parent.
+    defaults.extend(generic_params.iter_self().map(|(id, p)| {
+        let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params);
+        has_any_default |= has_default;
         idx += 1;
         result
     }));
-    let defaults = GenericDefaults(Some(Arc::from_iter(defaults)));
+    let diagnostics = create_diagnostics(mem::take(&mut ctx.diagnostics));
+    let defaults = if has_any_default {
+        GenericDefaults(Some(Arc::from_iter(defaults)))
+    } else {
+        GenericDefaults(None)
+    };
     return (defaults, diagnostics);
 
     fn handle_generic_param(
@@ -1374,18 +1283,21 @@ pub(crate) fn generic_defaults_with_diagnostics_query(
         idx: usize,
         id: GenericParamId,
         p: GenericParamDataRef<'_>,
-        parent_start_idx: usize,
         generic_params: &Generics,
-    ) -> Binders<crate::GenericArg> {
+    ) -> (Binders<crate::GenericArg>, bool) {
+        let binders = variable_kinds_from_iter(ctx.db, generic_params.iter_id().take(idx));
         match p {
             GenericParamDataRef::TypeParamData(p) => {
-                let ty = p.default.as_ref().map_or(TyKind::Error.intern(Interner), |ty| {
-                    // Each default can only refer to previous parameters.
-                    // Type variable default referring to parameter coming
-                    // after it is forbidden (FIXME: report diagnostic)
-                    fallback_bound_vars(ctx.lower_ty(*ty), idx, parent_start_idx)
-                });
-                crate::make_binders(ctx.db, generic_params, ty.cast(Interner))
+                let ty = p.default.as_ref().map_or_else(
+                    || TyKind::Error.intern(Interner),
+                    |ty| {
+                        // Each default can only refer to previous parameters.
+                        // Type variable default referring to parameter coming
+                        // after it is forbidden (FIXME: report diagnostic)
+                        fallback_bound_vars(ctx.lower_ty(*ty), idx)
+                    },
+                );
+                (Binders::new(binders, ty.cast(Interner)), p.default.is_some())
             }
             GenericParamDataRef::ConstParamData(p) => {
                 let GenericParamId::ConstParamId(id) = id else {
@@ -1401,50 +1313,52 @@ pub(crate) fn generic_defaults_with_diagnostics_query(
                     },
                 );
                 // Each default can only refer to previous parameters, see above.
-                val = fallback_bound_vars(val, idx, parent_start_idx);
-                make_binders(ctx.db, generic_params, val)
+                val = fallback_bound_vars(val, idx);
+                (Binders::new(binders, val), p.default.is_some())
             }
             GenericParamDataRef::LifetimeParamData(_) => {
-                make_binders(ctx.db, generic_params, error_lifetime().cast(Interner))
+                (Binders::new(binders, error_lifetime().cast(Interner)), false)
             }
         }
     }
 }
 
-pub(crate) fn generic_defaults_with_diagnostics_recover(
-    db: &dyn HirDatabase,
-    _cycle: &Cycle,
-    def: &GenericDefId,
+pub(crate) fn generic_defaults_with_diagnostics_cycle_result(
+    _db: &dyn HirDatabase,
+    _def: GenericDefId,
 ) -> (GenericDefaults, Diagnostics) {
-    let generic_params = generics(db.upcast(), *def);
-    if generic_params.len() == 0 {
-        return (GenericDefaults(None), None);
-    }
-    // FIXME: this code is not covered in tests.
-    // we still need one default per parameter
-    let defaults = GenericDefaults(Some(Arc::from_iter(generic_params.iter_id().map(|id| {
-        let val = match id {
-            GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner),
-            GenericParamId::ConstParamId(id) => unknown_const_as_generic(db.const_param_ty(id)),
-            GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
-        };
-        crate::make_binders(db, &generic_params, val)
-    }))));
-    (defaults, None)
+    (GenericDefaults(None), None)
 }
 
 fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
-    let data = db.function_data(def);
-    let resolver = def.resolver(db.upcast());
-    let mut ctx_params = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
-        .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
-        .with_type_param_mode(ParamLoweringMode::Variable);
+    let data = db.function_signature(def);
+    let resolver = def.resolver(db);
+    let mut ctx_params = TyLoweringContext::new(
+        db,
+        &resolver,
+        &data.store,
+        def.into(),
+        LifetimeElisionKind::for_fn_params(&data),
+    )
+    .with_type_param_mode(ParamLoweringMode::Variable);
     let params = data.params.iter().map(|&tr| ctx_params.lower_ty(tr));
-    let mut ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
-        .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
-        .with_type_param_mode(ParamLoweringMode::Variable);
-    let ret = ctx_ret.lower_ty(data.ret_type);
-    let generics = generics(db.upcast(), def.into());
+
+    let ret = match data.ret_type {
+        Some(ret_type) => {
+            let mut ctx_ret = TyLoweringContext::new(
+                db,
+                &resolver,
+                &data.store,
+                def.into(),
+                LifetimeElisionKind::for_fn_ret(),
+            )
+            .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+            .with_type_param_mode(ParamLoweringMode::Variable);
+            ctx_ret.lower_ty(ret_type)
+        }
+        None => TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner),
+    };
+    let generics = generics(db, def.into());
     let sig = CallableSig::from_params_and_return(
         params,
         ret,
@@ -1458,7 +1372,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
 /// Build the declared type of a function. This should not need to look at the
 /// function body.
 fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
-    let generics = generics(db.upcast(), def.into());
+    let generics = generics(db, def.into());
     let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
     make_binders(
         db,
@@ -1469,36 +1383,40 @@ fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
 
 /// Build the declared type of a const.
 fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
-    let data = db.const_data(def);
-    let generics = generics(db.upcast(), def.into());
-    let resolver = def.resolver(db.upcast());
-    let mut ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
-        .with_type_param_mode(ParamLoweringMode::Variable);
+    let data = db.const_signature(def);
+    let generics = generics(db, def.into());
+    let resolver = def.resolver(db);
+    let parent = def.loc(db).container;
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        &data.store,
+        def.into(),
+        LifetimeElisionKind::for_const(parent),
+    )
+    .with_type_param_mode(ParamLoweringMode::Variable);
 
     make_binders(db, &generics, ctx.lower_ty(data.type_ref))
 }
 
 /// Build the declared type of a static.
 fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
-    let data = db.static_data(def);
-    let resolver = def.resolver(db.upcast());
-    let mut ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into());
+    let data = db.static_signature(def);
+    let resolver = def.resolver(db);
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        &data.store,
+        def.into(),
+        LifetimeElisionKind::Elided(static_lifetime()),
+    );
 
     Binders::empty(Interner, ctx.lower_ty(data.type_ref))
 }
 
 fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
-    let struct_data = db.struct_data(def);
-    let fields = struct_data.variant_data.fields();
-    let resolver = def.resolver(db.upcast());
-    let mut ctx = TyLoweringContext::new(
-        db,
-        &resolver,
-        struct_data.variant_data.types_map(),
-        AdtId::from(def).into(),
-    )
-    .with_type_param_mode(ParamLoweringMode::Variable);
-    let params = fields.iter().map(|(_, field)| ctx.lower_ty(field.type_ref));
+    let field_tys = db.field_types(def.into());
+    let params = field_tys.iter().map(|(_, ty)| ty.skip_binders().clone());
     let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
     Binders::new(
         binders,
@@ -1508,12 +1426,12 @@ fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnS
 
 /// Build the type of a tuple struct constructor.
 fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Option<Binders<Ty>> {
-    let struct_data = db.struct_data(def);
-    match struct_data.variant_data.kind() {
-        StructKind::Record => None,
-        StructKind::Unit => Some(type_for_adt(db, def.into())),
-        StructKind::Tuple => {
-            let generics = generics(db.upcast(), AdtId::from(def).into());
+    let struct_data = db.variant_fields(def.into());
+    match struct_data.shape {
+        FieldsShape::Record => None,
+        FieldsShape::Unit => Some(type_for_adt(db, def.into())),
+        FieldsShape::Tuple => {
+            let generics = generics(db, AdtId::from(def).into());
             let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
             Some(make_binders(
                 db,
@@ -1525,19 +1443,10 @@ fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Option<Bi
 }
 
 fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
-    let var_data = db.enum_variant_data(def);
-    let fields = var_data.variant_data.fields();
-    let resolver = def.resolver(db.upcast());
-    let mut ctx = TyLoweringContext::new(
-        db,
-        &resolver,
-        var_data.variant_data.types_map(),
-        DefWithBodyId::VariantId(def).into(),
-    )
-    .with_type_param_mode(ParamLoweringMode::Variable);
-    let params = fields.iter().map(|(_, field)| ctx.lower_ty(field.type_ref));
-    let (ret, binders) =
-        type_for_adt(db, def.lookup(db.upcast()).parent.into()).into_value_and_skipped_binders();
+    let field_tys = db.field_types(def.into());
+    let params = field_tys.iter().map(|(_, ty)| ty.skip_binders().clone());
+    let parent = def.lookup(db).parent;
+    let (ret, binders) = type_for_adt(db, parent.into()).into_value_and_skipped_binders();
     Binders::new(
         binders,
         CallableSig::from_params_and_return(params, ret, false, Safety::Safe, FnAbi::RustCall),
@@ -1549,12 +1458,12 @@ fn type_for_enum_variant_constructor(
     db: &dyn HirDatabase,
     def: EnumVariantId,
 ) -> Option<Binders<Ty>> {
-    let e = def.lookup(db.upcast()).parent;
-    match db.enum_variant_data(def).variant_data.kind() {
-        StructKind::Record => None,
-        StructKind::Unit => Some(type_for_adt(db, e.into())),
-        StructKind::Tuple => {
-            let generics = generics(db.upcast(), e.into());
+    let e = def.lookup(db).parent;
+    match db.variant_fields(def.into()).shape {
+        FieldsShape::Record => None,
+        FieldsShape::Unit => Some(type_for_adt(db, e.into())),
+        FieldsShape::Tuple => {
+            let generics = generics(db, e.into());
             let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
             Some(make_binders(
                 db,
@@ -1566,8 +1475,18 @@ fn type_for_enum_variant_constructor(
     }
 }
 
+#[salsa::tracked(cycle_result = type_for_adt_cycle_result)]
+fn type_for_adt_tracked(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
+    type_for_adt(db, adt)
+}
+
+fn type_for_adt_cycle_result(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
+    let generics = generics(db, adt.into());
+    make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
 fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
-    let generics = generics(db.upcast(), adt.into());
+    let generics = generics(db, adt.into());
     let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
     let ty = TyKind::Adt(crate::AdtId(adt), subst).intern(Interner);
     make_binders(db, &generics, ty)
@@ -1577,21 +1496,40 @@ pub(crate) fn type_for_type_alias_with_diagnostics_query(
     db: &dyn HirDatabase,
     t: TypeAliasId,
 ) -> (Binders<Ty>, Diagnostics) {
-    let generics = generics(db.upcast(), t.into());
-    let resolver = t.resolver(db.upcast());
-    let type_alias_data = db.type_alias_data(t);
-    let mut ctx = TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, t.into())
-        .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
-        .with_type_param_mode(ParamLoweringMode::Variable);
-    let inner = if type_alias_data.is_extern {
+    let generics = generics(db, t.into());
+    let type_alias_data = db.type_alias_signature(t);
+    let mut diags = None;
+    let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) {
         TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner)
     } else {
-        type_alias_data
-            .type_ref
+        let resolver = t.resolver(db);
+        let alias = db.type_alias_signature(t);
+        let mut ctx = TyLoweringContext::new(
+            db,
+            &resolver,
+            &alias.store,
+            t.into(),
+            LifetimeElisionKind::AnonymousReportError,
+        )
+        .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+        .with_type_param_mode(ParamLoweringMode::Variable);
+        let res = alias
+            .ty
             .map(|type_ref| ctx.lower_ty(type_ref))
-            .unwrap_or_else(|| TyKind::Error.intern(Interner))
+            .unwrap_or_else(|| TyKind::Error.intern(Interner));
+        diags = create_diagnostics(ctx.diagnostics);
+        res
     };
-    (make_binders(db, &generics, inner), create_diagnostics(ctx.diagnostics))
+
+    (make_binders(db, &generics, inner), diags)
+}
+
+pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result(
+    db: &dyn HirDatabase,
+    adt: TypeAliasId,
+) -> (Binders<Ty>, Diagnostics) {
+    let generics = generics(db, adt.into());
+    (make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -1602,7 +1540,7 @@ pub enum TyDefId {
 }
 impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum ValueTyDefId {
     FunctionId(FunctionId),
     StructId(StructId),
@@ -1619,7 +1557,7 @@ impl ValueTyDefId {
             Self::FunctionId(id) => id.into(),
             Self::StructId(id) => id.into(),
             Self::UnionId(id) => id.into(),
-            Self::EnumVariantId(var) => var.lookup(db.upcast()).parent.into(),
+            Self::EnumVariantId(var) => var.lookup(db).parent.into(),
             Self::ConstId(id) => id.into(),
             Self::StaticId(id) => id.into(),
         }
@@ -1633,20 +1571,11 @@ impl ValueTyDefId {
 pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
     match def {
         TyDefId::BuiltinType(it) => Binders::empty(Interner, TyBuilder::builtin(it)),
-        TyDefId::AdtId(it) => type_for_adt(db, it),
+        TyDefId::AdtId(it) => type_for_adt_tracked(db, it),
         TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics(it).0,
     }
 }
 
-pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &Cycle, def: &TyDefId) -> Binders<Ty> {
-    let generics = match *def {
-        TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
-        TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
-        TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()),
-    };
-    make_binders(db, &generics, TyKind::Error.intern(Interner))
-}
-
 pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Option<Binders<Ty>> {
     match def {
         ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)),
@@ -1666,11 +1595,17 @@ pub(crate) fn impl_self_ty_with_diagnostics_query(
     db: &dyn HirDatabase,
     impl_id: ImplId,
 ) -> (Binders<Ty>, Diagnostics) {
-    let impl_data = db.impl_data(impl_id);
-    let resolver = impl_id.resolver(db.upcast());
-    let generics = generics(db.upcast(), impl_id.into());
-    let mut ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into())
-        .with_type_param_mode(ParamLoweringMode::Variable);
+    let impl_data = db.impl_signature(impl_id);
+    let resolver = impl_id.resolver(db);
+    let generics = generics(db, impl_id.into());
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        &impl_data.store,
+        impl_id.into(),
+        LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true },
+    )
+    .with_type_param_mode(ParamLoweringMode::Variable);
     (
         make_binders(db, &generics, ctx.lower_ty(impl_data.self_ty)),
         create_diagnostics(ctx.diagnostics),
@@ -1686,11 +1621,16 @@ pub(crate) fn const_param_ty_with_diagnostics_query(
     db: &dyn HirDatabase,
     def: ConstParamId,
 ) -> (Ty, Diagnostics) {
-    let parent_data = db.generic_params(def.parent());
+    let (parent_data, store) = db.generic_params_and_store(def.parent());
     let data = &parent_data[def.local_id()];
-    let resolver = def.parent().resolver(db.upcast());
-    let mut ctx =
-        TyLoweringContext::new(db, &resolver, &parent_data.types_map, def.parent().into());
+    let resolver = def.parent().resolver(db);
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        &store,
+        def.parent(),
+        LifetimeElisionKind::AnonymousReportError,
+    );
     let ty = match data {
         TypeOrConstParamData::TypeParamData(_) => {
             never!();
@@ -1701,12 +1641,11 @@ pub(crate) fn const_param_ty_with_diagnostics_query(
     (ty, create_diagnostics(ctx.diagnostics))
 }
 
-pub(crate) fn impl_self_ty_with_diagnostics_recover(
+pub(crate) fn impl_self_ty_with_diagnostics_cycle_result(
     db: &dyn HirDatabase,
-    _cycle: &Cycle,
-    impl_id: &ImplId,
+    impl_id: ImplId,
 ) -> (Binders<Ty>, Diagnostics) {
-    let generics = generics(db.upcast(), (*impl_id).into());
+    let generics = generics(db, impl_id.into());
     (make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
 }
 
@@ -1718,10 +1657,16 @@ pub(crate) fn impl_trait_with_diagnostics_query(
     db: &dyn HirDatabase,
     impl_id: ImplId,
 ) -> Option<(Binders<TraitRef>, Diagnostics)> {
-    let impl_data = db.impl_data(impl_id);
-    let resolver = impl_id.resolver(db.upcast());
-    let mut ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into())
-        .with_type_param_mode(ParamLoweringMode::Variable);
+    let impl_data = db.impl_signature(impl_id);
+    let resolver = impl_id.resolver(db);
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        &impl_data.store,
+        impl_id.into(),
+        LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true },
+    )
+    .with_type_param_mode(ParamLoweringMode::Variable);
     let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
     let target_trait = impl_data.target_trait.as_ref()?;
     let trait_ref = Binders::new(binders, ctx.lower_trait_ref(target_trait, self_ty)?);
@@ -1733,13 +1678,16 @@ pub(crate) fn return_type_impl_traits(
     def: hir_def::FunctionId,
 ) -> Option<Arc<Binders<ImplTraits>>> {
     // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
-    let data = db.function_data(def);
-    let resolver = def.resolver(db.upcast());
-    let mut ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
-        .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
-        .with_type_param_mode(ParamLoweringMode::Variable);
-    let _ret = ctx_ret.lower_ty(data.ret_type);
-    let generics = generics(db.upcast(), def.into());
+    let data = db.function_signature(def);
+    let resolver = def.resolver(db);
+    let mut ctx_ret =
+        TyLoweringContext::new(db, &resolver, &data.store, def.into(), LifetimeElisionKind::Infer)
+            .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+            .with_type_param_mode(ParamLoweringMode::Variable);
+    if let Some(ret_type) = data.ret_type {
+        let _ret = ctx_ret.lower_ty(ret_type);
+    }
+    let generics = generics(db, def.into());
     let return_type_impl_traits =
         ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data };
     if return_type_impl_traits.impl_traits.is_empty() {
@@ -1753,19 +1701,25 @@ pub(crate) fn type_alias_impl_traits(
     db: &dyn HirDatabase,
     def: hir_def::TypeAliasId,
 ) -> Option<Arc<Binders<ImplTraits>>> {
-    let data = db.type_alias_data(def);
-    let resolver = def.resolver(db.upcast());
-    let mut ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
-        .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
-        .with_type_param_mode(ParamLoweringMode::Variable);
-    if let Some(type_ref) = data.type_ref {
+    let data = db.type_alias_signature(def);
+    let resolver = def.resolver(db);
+    let mut ctx = TyLoweringContext::new(
+        db,
+        &resolver,
+        &data.store,
+        def.into(),
+        LifetimeElisionKind::AnonymousReportError,
+    )
+    .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+    .with_type_param_mode(ParamLoweringMode::Variable);
+    if let Some(type_ref) = data.ty {
         let _ty = ctx.lower_ty(type_ref);
     }
     let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data };
     if type_alias_impl_traits.impl_traits.is_empty() {
         None
     } else {
-        let generics = generics(db.upcast(), def.into());
+        let generics = generics(db, def.into());
         Some(Arc::new(make_binders(db, &generics, type_alias_impl_traits)))
     }
 }
@@ -1777,132 +1731,14 @@ pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mut
     }
 }
 
-/// Checks if the provided generic arg matches its expected kind, then lower them via
-/// provided closures. Use unknown if there was kind mismatch.
-///
-pub(crate) fn generic_arg_to_chalk<'a, T>(
-    db: &dyn HirDatabase,
-    kind_id: GenericParamId,
-    arg: &'a GenericArg,
-    this: &mut T,
-    types_map: &TypesMap,
-    for_type: impl FnOnce(&mut T, TypeRefId) -> Ty + 'a,
-    for_const: impl FnOnce(&mut T, &ConstRef, Ty) -> Const + 'a,
-    for_lifetime: impl FnOnce(&mut T, &LifetimeRef) -> Lifetime + 'a,
-) -> crate::GenericArg {
-    let kind = match kind_id {
-        GenericParamId::TypeParamId(_) => ParamKind::Type,
-        GenericParamId::ConstParamId(id) => {
-            let ty = db.const_param_ty(id);
-            ParamKind::Const(ty)
-        }
-        GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime,
-    };
-    match (arg, kind) {
-        (GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, *type_ref).cast(Interner),
-        (GenericArg::Const(c), ParamKind::Const(c_ty)) => for_const(this, c, c_ty).cast(Interner),
-        (GenericArg::Lifetime(lifetime_ref), ParamKind::Lifetime) => {
-            for_lifetime(this, lifetime_ref).cast(Interner)
-        }
-        (GenericArg::Const(_), ParamKind::Type) => TyKind::Error.intern(Interner).cast(Interner),
-        (GenericArg::Lifetime(_), ParamKind::Type) => TyKind::Error.intern(Interner).cast(Interner),
-        (GenericArg::Type(t), ParamKind::Const(c_ty)) => {
-            // We want to recover simple idents, which parser detects them
-            // as types. Maybe here is not the best place to do it, but
-            // it works.
-            if let TypeRef::Path(p) = &types_map[*t] {
-                if let Some(p) = p.mod_path() {
-                    if p.kind == PathKind::Plain {
-                        if let [n] = p.segments() {
-                            let c = ConstRef::Path(n.clone());
-                            return for_const(this, &c, c_ty).cast(Interner);
-                        }
-                    }
-                }
-            }
-            unknown_const_as_generic(c_ty)
-        }
-        (GenericArg::Lifetime(_), ParamKind::Const(c_ty)) => unknown_const_as_generic(c_ty),
-        (GenericArg::Type(_), ParamKind::Lifetime) => error_lifetime().cast(Interner),
-        (GenericArg::Const(_), ParamKind::Lifetime) => error_lifetime().cast(Interner),
-    }
-}
-
-pub(crate) fn const_or_path_to_chalk<'g>(
-    db: &dyn HirDatabase,
-    resolver: &Resolver,
-    owner: TypeOwnerId,
-    expected_ty: Ty,
-    value: &ConstRef,
-    mode: ParamLoweringMode,
-    args: impl FnOnce() -> Option<&'g Generics>,
-    debruijn: DebruijnIndex,
-) -> Const {
-    match value {
-        ConstRef::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()),
-        ConstRef::Path(n) => {
-            let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
-            path_to_const(
-                db,
-                resolver,
-                &Path::from_known_path_with_no_generic(path),
-                mode,
-                args,
-                debruijn,
-                expected_ty.clone(),
-            )
-            .unwrap_or_else(|| unknown_const(expected_ty))
-        }
-        &ConstRef::Complex(it) => {
-            let crate_data = &db.crate_graph()[resolver.krate()];
-            if crate_data.env.get("__ra_is_test_fixture").is_none() && crate_data.origin.is_local()
-            {
-                // FIXME: current `InTypeConstId` is very unstable, so we only use it in non local crate
-                // that are unlikely to be edited.
-                return unknown_const(expected_ty);
-            }
-            let c = db
-                .intern_in_type_const(InTypeConstLoc {
-                    id: it,
-                    owner,
-                    expected_ty: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
-                })
-                .into();
-            intern_const_scalar(
-                ConstScalar::UnevaluatedConst(c, Substitution::empty(Interner)),
-                expected_ty,
-            )
-        }
-    }
-}
-
 /// Replaces any 'free' `BoundVar`s in `s` by `TyKind::Error` from the perspective of generic
-/// parameter whose index is `param_index`. A `BoundVar` is free when it is or (syntactically)
-/// appears after the generic parameter of `param_index`.
+/// parameter whose index is `param_index`. A `BoundVar` is free when it appears after the
+/// generic parameter of `param_index`.
 fn fallback_bound_vars<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
     s: T,
     param_index: usize,
-    parent_start: usize,
 ) -> T {
-    // Keep in mind that parent generic parameters, if any, come *after* those of the item in
-    // question. In the diagrams below, `c*` and `p*` represent generic parameters of the item and
-    // its parent respectively.
-    let is_allowed = |index| {
-        if param_index < parent_start {
-            // The parameter of `param_index` is one from the item in question. Any parent generic
-            // parameters or the item's generic parameters that come before `param_index` is
-            // allowed.
-            // [c1, .., cj, .., ck, p1, .., pl] where cj is `param_index`
-            //  ^^^^^^              ^^^^^^^^^^ these are allowed
-            !(param_index..parent_start).contains(&index)
-        } else {
-            // The parameter of `param_index` is one from the parent generics. Only parent generic
-            // parameters that come before `param_index` are allowed.
-            // [c1, .., ck, p1, .., pj, .., pl] where pj is `param_index`
-            //              ^^^^^^ these are allowed
-            (parent_start..param_index).contains(&index)
-        }
-    };
+    let is_allowed = |index| (0..param_index).contains(&index);
 
     crate::fold_free_vars(
         s,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs
index 5c77bcd0736ab..009f047109dfb 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs
@@ -1,13 +1,11 @@
 //! This files contains the declaration of diagnostics kinds for ty and path lowering.
 
-use either::Either;
 use hir_def::type_ref::TypeRefId;
-
-type TypeSource = Either<TypeRefId, hir_def::type_ref::TypeSource>;
+use hir_def::{GenericDefId, GenericParamId};
 
 #[derive(Debug, PartialEq, Eq, Clone)]
 pub struct TyLoweringDiagnostic {
-    pub source: TypeSource,
+    pub source: TypeRefId,
     pub kind: TyLoweringDiagnosticKind,
 }
 
@@ -24,13 +22,69 @@ pub enum GenericArgsProhibitedReason {
     PrimitiveTy,
     Const,
     Static,
+    LocalVariable,
     /// When there is a generic enum, within the expression `Enum::Variant`,
     /// either `Enum` or `Variant` are allowed to have generic arguments, but not both.
     EnumVariant,
 }
 
+/// A path can have many generic arguments: each segment may have one associated with the
+/// segment, and in addition, each associated type binding may have generic arguments. This
+/// enum abstracts over both.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum PathGenericsSource {
+    /// Generic arguments directly on the segment.
+    Segment(u32),
+    /// Generic arguments on an associated type, e.g. `Foo<Assoc<A, B> = C>` or `Foo<Assoc<A, B>: Bound>`.
+    AssocType { segment: u32, assoc_type: u32 },
+}
+
 #[derive(Debug, PartialEq, Eq, Clone)]
 pub enum PathLoweringDiagnostic {
-    GenericArgsProhibited { segment: u32, reason: GenericArgsProhibitedReason },
-    ParenthesizedGenericArgsWithoutFnTrait { segment: u32 },
+    GenericArgsProhibited {
+        segment: u32,
+        reason: GenericArgsProhibitedReason,
+    },
+    ParenthesizedGenericArgsWithoutFnTrait {
+        segment: u32,
+    },
+    /// The expected lifetimes & types and consts counts can be found by inspecting the `GenericDefId`.
+    IncorrectGenericsLen {
+        generics_source: PathGenericsSource,
+        provided_count: u32,
+        expected_count: u32,
+        kind: IncorrectGenericsLenKind,
+        def: GenericDefId,
+    },
+    IncorrectGenericsOrder {
+        generics_source: PathGenericsSource,
+        param_id: GenericParamId,
+        arg_idx: u32,
+        /// Whether the `GenericArgs` contains a `Self` arg.
+        has_self_arg: bool,
+    },
+    ElidedLifetimesInPath {
+        generics_source: PathGenericsSource,
+        def: GenericDefId,
+        expected_count: u32,
+        hard_error: bool,
+    },
+    /// An elided lifetimes was used (either implicitly, by not specifying lifetimes, or explicitly, by using `'_`),
+    /// but lifetime elision could not find a lifetime to replace it with.
+    ElisionFailure {
+        generics_source: PathGenericsSource,
+        def: GenericDefId,
+        expected_count: u32,
+    },
+    MissingLifetime {
+        generics_source: PathGenericsSource,
+        def: GenericDefId,
+        expected_count: u32,
+    },
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum IncorrectGenericsLenKind {
+    Lifetimes,
+    TypesAndConsts,
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs
index a165932ddcc8c..726eaf8b0a1dc 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs
@@ -1,33 +1,35 @@
 //! A wrapper around [`TyLoweringContext`] specifically for lowering paths.
 
-use std::iter;
-
-use chalk_ir::{cast::Cast, fold::Shift, BoundVar};
+use chalk_ir::{BoundVar, cast::Cast, fold::Shift};
 use either::Either;
 use hir_def::{
-    data::TraitFlags,
-    expr_store::HygieneId,
-    generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget},
-    path::{GenericArg, GenericArgs, Path, PathSegment, PathSegments},
+    GenericDefId, GenericParamId, Lookup, TraitId,
+    expr_store::{
+        ExpressionStore, HygieneId,
+        path::{GenericArg, GenericArgs, GenericArgsParentheses, Path, PathSegment, PathSegments},
+    },
+    hir::generics::{
+        GenericParamDataRef, TypeOrConstParamData, TypeParamData, TypeParamProvenance,
+    },
     resolver::{ResolveValueResult, TypeNs, ValueNs},
-    type_ref::{TypeBound, TypeRef, TypesMap},
-    GenericDefId, GenericParamId, ItemContainerId, Lookup, TraitId,
+    signatures::TraitFlags,
+    type_ref::{TypeRef, TypeRefId},
 };
 use smallvec::SmallVec;
 use stdx::never;
 
 use crate::{
-    consteval::unknown_const_as_generic,
+    AliasEq, AliasTy, GenericArgsProhibitedReason, ImplTraitLoweringMode, IncorrectGenericsLenKind,
+    Interner, ParamLoweringMode, PathGenericsSource, PathLoweringDiagnostic, ProjectionTy,
+    QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyDefId, TyKind,
+    TyLoweringContext, ValueTyDefId, WhereClause,
+    consteval::{unknown_const, unknown_const_as_generic},
+    db::HirDatabase,
     error_lifetime,
-    generics::generics,
-    lower::{
-        generic_arg_to_chalk, named_associated_type_shorthand_candidates, ImplTraitLoweringState,
-    },
-    to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
+    generics::{Generics, generics},
+    lower::{LifetimeElisionKind, named_associated_type_shorthand_candidates},
+    static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
     utils::associated_type_by_name_including_super_traits,
-    AliasEq, AliasTy, GenericArgsProhibitedReason, ImplTraitLoweringMode, Interner,
-    ParamLoweringMode, PathLoweringDiagnostic, ProjectionTy, QuantifiedWhereClause, Substitution,
-    TraitRef, Ty, TyBuilder, TyDefId, TyKind, TyLoweringContext, ValueTyDefId, WhereClause,
 };
 
 type CallbackData<'a> = Either<
@@ -117,17 +119,31 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
             .expect("invalid segment passed to PathLoweringContext::set_current_segment()");
     }
 
+    #[inline]
+    fn with_lifetime_elision<T>(
+        &mut self,
+        lifetime_elision: LifetimeElisionKind,
+        f: impl FnOnce(&mut PathLoweringContext<'_, '_>) -> T,
+    ) -> T {
+        let old_lifetime_elision =
+            std::mem::replace(&mut self.ctx.lifetime_elision, lifetime_elision);
+        let result = f(self);
+        self.ctx.lifetime_elision = old_lifetime_elision;
+        result
+    }
+
     pub(crate) fn lower_ty_relative_path(
         &mut self,
         ty: Ty,
         // We need the original resolution to lower `Self::AssocTy` correctly
         res: Option<TypeNs>,
+        infer_args: bool,
     ) -> (Ty, Option<TypeNs>) {
         match self.segments.len() - self.current_segment_idx {
             0 => (ty, res),
             1 => {
                 // resolve unselected assoc types
-                (self.select_associated_type(res), None)
+                (self.select_associated_type(res, infer_args), None)
             }
             _ => {
                 // FIXME report error (ambiguous associated type)
@@ -136,19 +152,6 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
         }
     }
 
-    fn prohibit_parenthesized_generic_args(&mut self) -> bool {
-        if let Some(generic_args) = self.current_or_prev_segment.args_and_bindings {
-            if generic_args.desugared_from_fn {
-                let segment = self.current_segment_u32();
-                self.on_diagnostic(
-                    PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment },
-                );
-                return true;
-            }
-        }
-        false
-    }
-
     // When calling this, the current segment is the resolved segment (we don't advance it yet).
     pub(crate) fn lower_partly_resolved_path(
         &mut self,
@@ -164,12 +167,13 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
                         let trait_ref = self.lower_trait_ref_from_resolved_path(
                             trait_,
                             TyKind::Error.intern(Interner),
+                            infer_args,
                         );
 
                         self.skip_resolved_segment();
                         let segment = self.current_or_prev_segment;
                         let found =
-                            self.ctx.db.trait_data(trait_).associated_type_by_name(segment.name);
+                            self.ctx.db.trait_items(trait_).associated_type_by_name(segment.name);
 
                         match found {
                             Some(associated_ty) => {
@@ -179,17 +183,17 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
                                 // this point (`trait_ref.substitution`).
                                 let substitution = self.substs_from_path_segment(
                                     associated_ty.into(),
-                                    false,
+                                    infer_args,
                                     None,
+                                    true,
                                 );
-                                let len_self =
-                                    generics(self.ctx.db.upcast(), associated_ty.into()).len_self();
                                 let substitution = Substitution::from_iter(
                                     Interner,
-                                    substitution
-                                        .iter(Interner)
-                                        .take(len_self)
-                                        .chain(trait_ref.substitution.iter(Interner)),
+                                    trait_ref.substitution.iter(Interner).chain(
+                                        substitution
+                                            .iter(Interner)
+                                            .skip(trait_ref.substitution.len(Interner)),
+                                    ),
                                 );
                                 TyKind::Alias(AliasTy::Projection(ProjectionTy {
                                     associated_ty_id: to_assoc_type_id(associated_ty),
@@ -225,12 +229,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
                     TyKind::Placeholder(to_placeholder_idx(self.ctx.db, param_id.into()))
                 }
                 ParamLoweringMode::Variable => {
-                    let idx = match self
-                        .ctx
-                        .generics()
-                        .expect("generics in scope")
-                        .type_or_const_param_idx(param_id.into())
-                    {
+                    let idx = match self.ctx.generics().type_or_const_param_idx(param_id.into()) {
                         None => {
                             never!("no matching generics");
                             return (TyKind::Error.intern(Interner), None);
@@ -243,7 +242,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
             }
             .intern(Interner),
             TypeNs::SelfType(impl_id) => {
-                let generics = self.ctx.generics().expect("impl should have generic param scope");
+                let generics = self.ctx.generics();
 
                 match self.ctx.type_param_mode {
                     ParamLoweringMode::Placeholder => {
@@ -253,22 +252,13 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
                         let subst = generics.placeholder_subst(self.ctx.db);
                         self.ctx.db.impl_self_ty(impl_id).substitute(Interner, &subst)
                     }
-                    ParamLoweringMode::Variable => {
-                        let starting_from = match generics.def() {
-                            GenericDefId::ImplId(_) => 0,
-                            // `def` is an item within impl. We need to substitute `BoundVar`s but
-                            // remember that they are for parent (i.e. impl) generic params so they
-                            // come after our own params.
-                            _ => generics.len_self(),
-                        };
-                        TyBuilder::impl_self_ty(self.ctx.db, impl_id)
-                            .fill_with_bound_vars(self.ctx.in_binders, starting_from)
-                            .build()
-                    }
+                    ParamLoweringMode::Variable => TyBuilder::impl_self_ty(self.ctx.db, impl_id)
+                        .fill_with_bound_vars(self.ctx.in_binders, 0)
+                        .build(),
                 }
             }
             TypeNs::AdtSelfType(adt) => {
-                let generics = generics(self.ctx.db.upcast(), adt.into());
+                let generics = generics(self.ctx.db, adt.into());
                 let substs = match self.ctx.type_param_mode {
                     ParamLoweringMode::Placeholder => generics.placeholder_subst(self.ctx.db),
                     ParamLoweringMode::Variable => {
@@ -282,11 +272,13 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
             TypeNs::BuiltinType(it) => self.lower_path_inner(it.into(), infer_args),
             TypeNs::TypeAliasId(it) => self.lower_path_inner(it.into(), infer_args),
             // FIXME: report error
-            TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None),
+            TypeNs::EnumVariantId(_) | TypeNs::ModuleId(_) => {
+                return (TyKind::Error.intern(Interner), None);
+            }
         };
 
         self.skip_resolved_segment();
-        self.lower_ty_relative_path(ty, Some(resolution))
+        self.lower_ty_relative_path(ty, Some(resolution), infer_args)
     }
 
     fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) {
@@ -313,6 +305,9 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
             TypeNs::BuiltinType(_) => {
                 prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy)
             }
+            TypeNs::ModuleId(_) => {
+                prohibit_generics_on_resolved(GenericArgsProhibitedReason::Module)
+            }
             TypeNs::AdtId(_)
             | TypeNs::EnumVariantId(_)
             | TypeNs::TypeAliasId(_)
@@ -330,10 +325,8 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
     }
 
     pub(crate) fn resolve_path_in_type_ns(&mut self) -> Option<(TypeNs, Option<usize>)> {
-        let (resolution, remaining_index, _, prefix_info) = self
-            .ctx
-            .resolver
-            .resolve_path_in_type_ns_with_prefix_info(self.ctx.db.upcast(), self.path)?;
+        let (resolution, remaining_index, _, prefix_info) =
+            self.ctx.resolver.resolve_path_in_type_ns_with_prefix_info(self.ctx.db, self.path)?;
 
         let segments = self.segments;
         if segments.is_empty() || matches!(self.path, Path::LangItem(..)) {
@@ -388,7 +381,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
         hygiene_id: HygieneId,
     ) -> Option<ResolveValueResult> {
         let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info(
-            self.ctx.db.upcast(),
+            self.ctx.db,
             self.path,
             hygiene_id,
         )?;
@@ -459,14 +452,19 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
                     // and statics can be generic, or just because it was easier for rustc implementors.
                     // That means we'll show the wrong error code. Because of us it's easier to do it
                     // this way :)
-                    ValueNs::GenericParam(_) | ValueNs::ConstId(_) => {
+                    ValueNs::GenericParam(_) => {
                         prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const)
                     }
                     ValueNs::StaticId(_) => {
                         prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static)
                     }
-                    ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::EnumVariantId(_) => {}
-                    ValueNs::LocalBinding(_) => {}
+                    ValueNs::LocalBinding(_) => {
+                        prohibit_generics_on_resolved(GenericArgsProhibitedReason::LocalVariable)
+                    }
+                    ValueNs::FunctionId(_)
+                    | ValueNs::StructId(_)
+                    | ValueNs::EnumVariantId(_)
+                    | ValueNs::ConstId(_) => {}
                 }
             }
             ResolveValueResult::Partial(resolution, _, _) => {
@@ -476,22 +474,21 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
         Some(res)
     }
 
-    fn select_associated_type(&mut self, res: Option<TypeNs>) -> Ty {
-        let Some((generics, res)) = self.ctx.generics().zip(res) else {
+    fn select_associated_type(&mut self, res: Option<TypeNs>, infer_args: bool) -> Ty {
+        let Some(res) = res else {
             return TyKind::Error.intern(Interner);
         };
         let segment = self.current_or_prev_segment;
         let ty = named_associated_type_shorthand_candidates(
             self.ctx.db,
-            generics.def(),
+            self.ctx.def,
             res,
             Some(segment.name.clone()),
             move |name, t, associated_ty| {
-                let generics = self.ctx.generics().unwrap();
-
                 if name != segment.name {
                     return None;
                 }
+                let generics = self.ctx.generics();
 
                 let parent_subst = t.substitution.clone();
                 let parent_subst = match self.ctx.type_param_mode {
@@ -511,15 +508,14 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
                 // generic params. It's inefficient to splice the `Substitution`s, so we may want
                 // that method to optionally take parent `Substitution` as we already know them at
                 // this point (`t.substitution`).
-                let substs = self.substs_from_path_segment(associated_ty.into(), false, None);
-
-                let len_self =
-                    crate::generics::generics(self.ctx.db.upcast(), associated_ty.into())
-                        .len_self();
+                let substs =
+                    self.substs_from_path_segment(associated_ty.into(), infer_args, None, true);
 
                 let substs = Substitution::from_iter(
                     Interner,
-                    substs.iter(Interner).take(len_self).chain(parent_subst.iter(Interner)),
+                    parent_subst
+                        .iter(Interner)
+                        .chain(substs.iter(Interner).skip(parent_subst.len(Interner))),
                 );
 
                 Some(
@@ -541,7 +537,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
             TyDefId::AdtId(it) => it.into(),
             TyDefId::TypeAliasId(it) => it.into(),
         };
-        let substs = self.substs_from_path_segment(generic_def, infer_args, None);
+        let substs = self.substs_from_path_segment(generic_def, infer_args, None, false);
         self.ctx.db.ty(typeable).substitute(Interner, &substs)
     }
 
@@ -554,6 +550,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
         // special-case enum variants
         resolved: ValueTyDefId,
         infer_args: bool,
+        lowering_assoc_type_generics: bool,
     ) -> Substitution {
         let prev_current_segment_idx = self.current_segment_idx;
         let prev_current_segment = self.current_or_prev_segment;
@@ -587,10 +584,15 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
                         self.current_or_prev_segment = penultimate;
                     }
                 }
-                var.lookup(self.ctx.db.upcast()).parent.into()
+                var.lookup(self.ctx.db).parent.into()
             }
         };
-        let result = self.substs_from_path_segment(generic_def, infer_args, None);
+        let result = self.substs_from_path_segment(
+            generic_def,
+            infer_args,
+            None,
+            lowering_assoc_type_generics,
+        );
         self.current_segment_idx = prev_current_segment_idx;
         self.current_or_prev_segment = prev_current_segment;
         result
@@ -601,16 +603,41 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
         def: GenericDefId,
         infer_args: bool,
         explicit_self_ty: Option<Ty>,
+        lowering_assoc_type_generics: bool,
     ) -> Substitution {
-        let prohibit_parens = match def {
-            GenericDefId::TraitId(trait_) => {
-                let trait_data = self.ctx.db.trait_data(trait_);
-                !trait_data.flags.contains(TraitFlags::RUSTC_PAREN_SUGAR)
+        let mut lifetime_elision = self.ctx.lifetime_elision.clone();
+
+        if let Some(args) = self.current_or_prev_segment.args_and_bindings {
+            if args.parenthesized != GenericArgsParentheses::No {
+                let prohibit_parens = match def {
+                    GenericDefId::TraitId(trait_) => {
+                        // RTN is prohibited anyways if we got here.
+                        let is_rtn =
+                            args.parenthesized == GenericArgsParentheses::ReturnTypeNotation;
+                        let is_fn_trait = self
+                            .ctx
+                            .db
+                            .trait_signature(trait_)
+                            .flags
+                            .contains(TraitFlags::RUSTC_PAREN_SUGAR);
+                        is_rtn || !is_fn_trait
+                    }
+                    _ => true,
+                };
+
+                if prohibit_parens {
+                    let segment = self.current_segment_u32();
+                    self.on_diagnostic(
+                        PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment },
+                    );
+
+                    return TyBuilder::unknown_subst(self.ctx.db, def);
+                }
+
+                // `Fn()`-style generics are treated like functions for the purpose of lifetime elision.
+                lifetime_elision =
+                    LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false };
             }
-            _ => true,
-        };
-        if prohibit_parens && self.prohibit_parenthesized_generic_args() {
-            return TyBuilder::unknown_subst(self.ctx.db, def);
         }
 
         self.substs_from_args_and_bindings(
@@ -618,6 +645,9 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
             def,
             infer_args,
             explicit_self_ty,
+            PathGenericsSource::Segment(self.current_segment_u32()),
+            lowering_assoc_type_generics,
+            lifetime_elision,
         )
     }
 
@@ -627,152 +657,185 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
         def: GenericDefId,
         infer_args: bool,
         explicit_self_ty: Option<Ty>,
+        generics_source: PathGenericsSource,
+        lowering_assoc_type_generics: bool,
+        lifetime_elision: LifetimeElisionKind,
     ) -> Substitution {
-        // Order is
-        // - Optional Self parameter
-        // - Lifetime parameters
-        // - Type or Const parameters
-        // - Parent parameters
-        let def_generics = generics(self.ctx.db.upcast(), def);
-        let (
-            parent_params,
-            self_param,
-            type_params,
-            const_params,
-            impl_trait_params,
-            lifetime_params,
-        ) = def_generics.provenance_split();
-        let item_len =
-            self_param as usize + type_params + const_params + impl_trait_params + lifetime_params;
-        let total_len = parent_params + item_len;
-
-        let mut substs = Vec::new();
-
-        // we need to iterate the lifetime and type/const params separately as our order of them
-        // differs from the supplied syntax
-
-        let ty_error = || TyKind::Error.intern(Interner).cast(Interner);
-        let mut def_toc_iter = def_generics.iter_self_type_or_consts_id();
-        let fill_self_param = || {
-            if self_param {
-                let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error);
-
-                if let Some(id) = def_toc_iter.next() {
-                    assert!(matches!(id, GenericParamId::TypeParamId(_)));
-                    substs.push(self_ty);
+        struct LowererCtx<'a, 'b, 'c> {
+            ctx: &'a mut PathLoweringContext<'b, 'c>,
+            generics_source: PathGenericsSource,
+        }
+
+        impl GenericArgsLowerer for LowererCtx<'_, '_, '_> {
+            fn report_len_mismatch(
+                &mut self,
+                def: GenericDefId,
+                provided_count: u32,
+                expected_count: u32,
+                kind: IncorrectGenericsLenKind,
+            ) {
+                self.ctx.on_diagnostic(PathLoweringDiagnostic::IncorrectGenericsLen {
+                    generics_source: self.generics_source,
+                    provided_count,
+                    expected_count,
+                    kind,
+                    def,
+                });
+            }
+
+            fn report_arg_mismatch(
+                &mut self,
+                param_id: GenericParamId,
+                arg_idx: u32,
+                has_self_arg: bool,
+            ) {
+                self.ctx.on_diagnostic(PathLoweringDiagnostic::IncorrectGenericsOrder {
+                    generics_source: self.generics_source,
+                    param_id,
+                    arg_idx,
+                    has_self_arg,
+                });
+            }
+
+            fn provided_kind(
+                &mut self,
+                param_id: GenericParamId,
+                param: GenericParamDataRef<'_>,
+                arg: &GenericArg,
+            ) -> crate::GenericArg {
+                match (param, arg) {
+                    (GenericParamDataRef::LifetimeParamData(_), GenericArg::Lifetime(lifetime)) => {
+                        self.ctx.ctx.lower_lifetime(*lifetime).cast(Interner)
+                    }
+                    (GenericParamDataRef::TypeParamData(_), GenericArg::Type(type_ref)) => {
+                        self.ctx.ctx.lower_ty(*type_ref).cast(Interner)
+                    }
+                    (GenericParamDataRef::ConstParamData(_), GenericArg::Const(konst)) => {
+                        let GenericParamId::ConstParamId(const_id) = param_id else {
+                            unreachable!("non-const param ID for const param");
+                        };
+                        self.ctx
+                            .ctx
+                            .lower_const(konst, self.ctx.ctx.db.const_param_ty(const_id))
+                            .cast(Interner)
+                    }
+                    _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"),
                 }
             }
-        };
-        let mut had_explicit_args = false;
-
-        if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings {
-            // Fill in the self param first
-            if has_self_type && self_param {
-                had_explicit_args = true;
-                if let Some(id) = def_toc_iter.next() {
-                    assert!(matches!(id, GenericParamId::TypeParamId(_)));
-                    had_explicit_args = true;
-                    if let GenericArg::Type(ty) = &args[0] {
-                        substs.push(self.ctx.lower_ty(*ty).cast(Interner));
+
+            fn provided_type_like_const(
+                &mut self,
+                const_ty: Ty,
+                arg: TypeLikeConst<'_>,
+            ) -> crate::Const {
+                match arg {
+                    TypeLikeConst::Path(path) => self.ctx.ctx.lower_path_as_const(path, const_ty),
+                    TypeLikeConst::Infer => unknown_const(const_ty),
+                }
+            }
+
+            fn inferred_kind(
+                &mut self,
+                def: GenericDefId,
+                param_id: GenericParamId,
+                param: GenericParamDataRef<'_>,
+                infer_args: bool,
+                preceding_args: &[crate::GenericArg],
+            ) -> crate::GenericArg {
+                let default = || {
+                    self.ctx
+                        .ctx
+                        .db
+                        .generic_defaults(def)
+                        .get(preceding_args.len())
+                        .map(|default| default.clone().substitute(Interner, preceding_args))
+                };
+                match param {
+                    GenericParamDataRef::LifetimeParamData(_) => error_lifetime().cast(Interner),
+                    GenericParamDataRef::TypeParamData(param) => {
+                        if !infer_args && param.default.is_some() {
+                            if let Some(default) = default() {
+                                return default;
+                            }
+                        }
+                        TyKind::Error.intern(Interner).cast(Interner)
+                    }
+                    GenericParamDataRef::ConstParamData(param) => {
+                        if !infer_args && param.default.is_some() {
+                            if let Some(default) = default() {
+                                return default;
+                            }
+                        }
+                        let GenericParamId::ConstParamId(const_id) = param_id else {
+                            unreachable!("non-const param ID for const param");
+                        };
+                        unknown_const_as_generic(self.ctx.ctx.db.const_param_ty(const_id))
+                            .cast(Interner)
                     }
                 }
-            } else {
-                fill_self_param()
-            };
-
-            // Then fill in the supplied lifetime args, or error lifetimes if there are too few
-            // (default lifetimes aren't a thing)
-            for arg in args
-                .iter()
-                .filter_map(|arg| match arg {
-                    GenericArg::Lifetime(arg) => Some(self.ctx.lower_lifetime(arg)),
-                    _ => None,
-                })
-                .chain(iter::repeat(error_lifetime()))
-                .take(lifetime_params)
-            {
-                substs.push(arg.cast(Interner));
-            }
-
-            let skip = if has_self_type { 1 } else { 0 };
-            // Fill in supplied type and const args
-            // Note if non-lifetime args are provided, it should be all of them, but we can't rely on that
-            for (arg, id) in args
-                .iter()
-                .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
-                .skip(skip)
-                .take(type_params + const_params)
-                .zip(def_toc_iter)
-            {
-                had_explicit_args = true;
-                let arg = generic_arg_to_chalk(
-                    self.ctx.db,
-                    id,
-                    arg,
-                    self.ctx,
-                    self.ctx.types_map,
-                    |ctx, type_ref| ctx.lower_ty(type_ref),
-                    |ctx, const_ref, ty| ctx.lower_const(const_ref, ty),
-                    |ctx, lifetime_ref| ctx.lower_lifetime(lifetime_ref),
-                );
-                substs.push(arg);
             }
-        } else {
-            fill_self_param();
-        }
 
-        let param_to_err = |id| match id {
-            GenericParamId::ConstParamId(x) => {
-                unknown_const_as_generic(self.ctx.db.const_param_ty(x))
+            fn parent_arg(&mut self, param_id: GenericParamId) -> crate::GenericArg {
+                match param_id {
+                    GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner),
+                    GenericParamId::ConstParamId(const_id) => {
+                        unknown_const_as_generic(self.ctx.ctx.db.const_param_ty(const_id))
+                    }
+                    GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
+                }
             }
-            GenericParamId::TypeParamId(_) => ty_error(),
-            GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
-        };
-        // handle defaults. In expression or pattern path segments without
-        // explicitly specified type arguments, missing type arguments are inferred
-        // (i.e. defaults aren't used).
-        // Generic parameters for associated types are not supposed to have defaults, so we just
-        // ignore them.
-        let is_assoc_ty = || match def {
-            GenericDefId::TypeAliasId(id) => {
-                matches!(id.lookup(self.ctx.db.upcast()).container, ItemContainerId::TraitId(_))
-            }
-            _ => false,
-        };
-        let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty();
-        if fill_defaults {
-            let defaults = &*self.ctx.db.generic_defaults(def);
-            let (item, _parent) = defaults.split_at(item_len);
-            let parent_from = item_len - substs.len();
-
-            let mut rem =
-                def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::<Vec<_>>();
-            // Fill in defaults for type/const params
-            for (idx, default_ty) in item[substs.len()..].iter().enumerate() {
-                // each default can depend on the previous parameters
-                let substs_so_far = Substitution::from_iter(
-                    Interner,
-                    substs.iter().cloned().chain(rem[idx..].iter().cloned()),
-                );
-                substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
+
+            fn report_elided_lifetimes_in_path(
+                &mut self,
+                def: GenericDefId,
+                expected_count: u32,
+                hard_error: bool,
+            ) {
+                self.ctx.on_diagnostic(PathLoweringDiagnostic::ElidedLifetimesInPath {
+                    generics_source: self.generics_source,
+                    def,
+                    expected_count,
+                    hard_error,
+                });
+            }
+
+            fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32) {
+                self.ctx.on_diagnostic(PathLoweringDiagnostic::ElisionFailure {
+                    generics_source: self.generics_source,
+                    def,
+                    expected_count,
+                });
+            }
+
+            fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32) {
+                self.ctx.on_diagnostic(PathLoweringDiagnostic::MissingLifetime {
+                    generics_source: self.generics_source,
+                    def,
+                    expected_count,
+                });
             }
-            // Fill in remaining parent params
-            substs.extend(rem.drain(parent_from..));
-        } else {
-            // Fill in remaining def params and parent params
-            substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err));
         }
 
-        assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len());
-        Substitution::from_iter(Interner, substs)
+        substs_from_args_and_bindings(
+            self.ctx.db,
+            self.ctx.store,
+            args_and_bindings,
+            def,
+            infer_args,
+            lifetime_elision,
+            lowering_assoc_type_generics,
+            explicit_self_ty,
+            &mut LowererCtx { ctx: self, generics_source },
+        )
     }
 
     pub(crate) fn lower_trait_ref_from_resolved_path(
         &mut self,
         resolved: TraitId,
         explicit_self_ty: Ty,
+        infer_args: bool,
     ) -> TraitRef {
-        let substs = self.trait_ref_substs_from_path(resolved, explicit_self_ty);
+        let substs = self.trait_ref_substs_from_path(resolved, explicit_self_ty, infer_args);
         TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
     }
 
@@ -780,17 +843,17 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
         &mut self,
         resolved: TraitId,
         explicit_self_ty: Ty,
+        infer_args: bool,
     ) -> Substitution {
-        self.substs_from_path_segment(resolved.into(), false, Some(explicit_self_ty))
+        self.substs_from_path_segment(resolved.into(), infer_args, Some(explicit_self_ty), false)
     }
 
     pub(super) fn assoc_type_bindings_from_type_bound<'c>(
         mut self,
-        bound: &'c TypeBound,
         trait_ref: TraitRef,
     ) -> Option<impl Iterator<Item = QuantifiedWhereClause> + use<'a, 'b, 'c>> {
         self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| {
-            args_and_bindings.bindings.iter().flat_map(move |binding| {
+            args_and_bindings.bindings.iter().enumerate().flat_map(move |(binding_idx, binding)| {
                 let found = associated_type_by_name_including_super_traits(
                     self.ctx.db,
                     trait_ref.clone(),
@@ -800,23 +863,32 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
                     None => return SmallVec::new(),
                     Some(t) => t,
                 };
-                // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
-                // generic params. It's inefficient to splice the `Substitution`s, so we may want
-                // that method to optionally take parent `Substitution` as we already know them at
-                // this point (`super_trait_ref.substitution`).
-                let substitution = self.substs_from_args_and_bindings(
-                    binding.args.as_ref(),
-                    associated_ty.into(),
-                    false, // this is not relevant
-                    Some(super_trait_ref.self_type_parameter(Interner)),
-                );
-                let self_params = generics(self.ctx.db.upcast(), associated_ty.into()).len_self();
+                let substitution =
+                    self.with_lifetime_elision(LifetimeElisionKind::AnonymousReportError, |this| {
+                        // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
+                        // generic params. It's inefficient to splice the `Substitution`s, so we may want
+                        // that method to optionally take parent `Substitution` as we already know them at
+                        // this point (`super_trait_ref.substitution`).
+                        this.substs_from_args_and_bindings(
+                            binding.args.as_ref(),
+                            associated_ty.into(),
+                            false, // this is not relevant
+                            Some(super_trait_ref.self_type_parameter(Interner)),
+                            PathGenericsSource::AssocType {
+                                segment: this.current_segment_u32(),
+                                assoc_type: binding_idx as u32,
+                            },
+                            false,
+                            this.ctx.lifetime_elision.clone(),
+                        )
+                    });
                 let substitution = Substitution::from_iter(
                     Interner,
-                    substitution
-                        .iter(Interner)
-                        .take(self_params)
-                        .chain(super_trait_ref.substitution.iter(Interner)),
+                    super_trait_ref.substitution.iter(Interner).chain(
+                        substitution
+                            .iter(Interner)
+                            .skip(super_trait_ref.substitution.len(Interner)),
+                    ),
                 );
                 let projection_ty = ProjectionTy {
                     associated_ty_id: to_assoc_type_id(associated_ty),
@@ -825,93 +897,397 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
                 let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity(
                     binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
                 );
+
                 if let Some(type_ref) = binding.type_ref {
-                    match (&self.ctx.types_map[type_ref], self.ctx.impl_trait_mode.mode) {
-                        (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (),
-                        (_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => {
-                            let ty = self.ctx.lower_ty(type_ref);
-                            let alias_eq =
-                                AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
-                            predicates
-                                .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
-                        }
-                        (_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => {
-                            // Find the generic index for the target of our `bound`
-                            let target_param_idx =
-                                self.ctx.resolver.where_predicates_in_scope().find_map(
-                                    |(p, (_, types_map))| match p {
-                                        WherePredicate::TypeBound {
-                                            target: WherePredicateTypeTarget::TypeOrConstParam(idx),
-                                            bound: b,
-                                        } if std::ptr::eq::<TypesMap>(
-                                            self.ctx.types_map,
-                                            types_map,
-                                        ) && bound == b =>
-                                        {
-                                            Some(idx)
-                                        }
-                                        _ => None,
-                                    },
-                                );
-                            let ty = if let Some(target_param_idx) = target_param_idx {
-                                let mut counter = 0;
-                                let generics = self.ctx.generics().expect("generics in scope");
-                                for (idx, data) in generics.iter_self_type_or_consts() {
-                                    // Count the number of `impl Trait` things that appear before
-                                    // the target of our `bound`.
-                                    // Our counter within `impl_trait_mode` should be that number
-                                    // to properly lower each types within `type_ref`
-                                    if data.type_param().is_some_and(|p| {
-                                        p.provenance == TypeParamProvenance::ArgumentImplTrait
-                                    }) {
-                                        counter += 1;
-                                    }
-                                    if idx == *target_param_idx {
-                                        break;
-                                    }
-                                }
-                                let mut ext = TyLoweringContext::new_maybe_unowned(
-                                    self.ctx.db,
-                                    self.ctx.resolver,
-                                    self.ctx.types_map,
-                                    self.ctx.types_source_map,
-                                    self.ctx.owner,
-                                )
-                                .with_type_param_mode(self.ctx.type_param_mode);
-                                match self.ctx.impl_trait_mode.mode {
-                                    ImplTraitLoweringMode::Param => {
-                                        ext.impl_trait_mode =
-                                            ImplTraitLoweringState::param(counter);
-                                    }
-                                    ImplTraitLoweringMode::Variable => {
-                                        ext.impl_trait_mode =
-                                            ImplTraitLoweringState::variable(counter);
-                                    }
-                                    _ => unreachable!(),
-                                }
-                                let ty = ext.lower_ty(type_ref);
-                                self.ctx.diagnostics.extend(ext.diagnostics);
-                                ty
-                            } else {
-                                self.ctx.lower_ty(type_ref)
-                            };
-
-                            let alias_eq =
-                                AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
-                            predicates
-                                .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+                    let lifetime_elision =
+                        if args_and_bindings.parenthesized == GenericArgsParentheses::ParenSugar {
+                            // `Fn()`-style generics are elided like functions. This is `Output` (we lower to it in hir-def).
+                            LifetimeElisionKind::for_fn_ret()
+                        } else {
+                            self.ctx.lifetime_elision.clone()
+                        };
+                    self.with_lifetime_elision(lifetime_elision, |this| {
+                        match (&this.ctx.store[type_ref], this.ctx.impl_trait_mode.mode) {
+                            (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (),
+                            (
+                                _,
+                                ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque,
+                            ) => {
+                                let ty = this.ctx.lower_ty(type_ref);
+                                let alias_eq = AliasEq {
+                                    alias: AliasTy::Projection(projection_ty.clone()),
+                                    ty,
+                                };
+                                predicates.push(crate::wrap_empty_binders(WhereClause::AliasEq(
+                                    alias_eq,
+                                )));
+                            }
                         }
-                    }
-                }
-                for bound in binding.bounds.iter() {
-                    predicates.extend(self.ctx.lower_type_bound(
-                        bound,
-                        TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
-                        false,
-                    ));
+                    });
                 }
+
+                self.with_lifetime_elision(LifetimeElisionKind::AnonymousReportError, |this| {
+                    for bound in binding.bounds.iter() {
+                        predicates.extend(
+                            this.ctx.lower_type_bound(
+                                bound,
+                                TyKind::Alias(AliasTy::Projection(projection_ty.clone()))
+                                    .intern(Interner),
+                                false,
+                            ),
+                        );
+                    }
+                });
+
                 predicates
             })
         })
     }
 }
+
+/// A const that were parsed like a type.
+pub(crate) enum TypeLikeConst<'a> {
+    Infer,
+    Path(&'a Path),
+}
+
+pub(crate) trait GenericArgsLowerer {
+    fn report_elided_lifetimes_in_path(
+        &mut self,
+        def: GenericDefId,
+        expected_count: u32,
+        hard_error: bool,
+    );
+
+    fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32);
+
+    fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32);
+
+    fn report_len_mismatch(
+        &mut self,
+        def: GenericDefId,
+        provided_count: u32,
+        expected_count: u32,
+        kind: IncorrectGenericsLenKind,
+    );
+
+    fn report_arg_mismatch(&mut self, param_id: GenericParamId, arg_idx: u32, has_self_arg: bool);
+
+    fn provided_kind(
+        &mut self,
+        param_id: GenericParamId,
+        param: GenericParamDataRef<'_>,
+        arg: &GenericArg,
+    ) -> crate::GenericArg;
+
+    fn provided_type_like_const(&mut self, const_ty: Ty, arg: TypeLikeConst<'_>) -> crate::Const;
+
+    fn inferred_kind(
+        &mut self,
+        def: GenericDefId,
+        param_id: GenericParamId,
+        param: GenericParamDataRef<'_>,
+        infer_args: bool,
+        preceding_args: &[crate::GenericArg],
+    ) -> crate::GenericArg;
+
+    fn parent_arg(&mut self, param_id: GenericParamId) -> crate::GenericArg;
+}
+
+/// Returns true if there was an error.
+fn check_generic_args_len(
+    args_and_bindings: Option<&GenericArgs>,
+    def: GenericDefId,
+    def_generics: &Generics,
+    infer_args: bool,
+    lifetime_elision: &LifetimeElisionKind,
+    lowering_assoc_type_generics: bool,
+    ctx: &mut impl GenericArgsLowerer,
+) -> bool {
+    let mut had_error = false;
+
+    let (mut provided_lifetimes_count, mut provided_types_and_consts_count) = (0usize, 0usize);
+    if let Some(args_and_bindings) = args_and_bindings {
+        let args_no_self = &args_and_bindings.args[usize::from(args_and_bindings.has_self_type)..];
+        for arg in args_no_self {
+            match arg {
+                GenericArg::Lifetime(_) => provided_lifetimes_count += 1,
+                GenericArg::Type(_) | GenericArg::Const(_) => provided_types_and_consts_count += 1,
+            }
+        }
+    }
+
+    let lifetime_args_len = def_generics.len_lifetimes_self();
+    if provided_lifetimes_count == 0 && lifetime_args_len > 0 && !lowering_assoc_type_generics {
+        // In generic associated types, we never allow inferring the lifetimes.
+        match lifetime_elision {
+            &LifetimeElisionKind::AnonymousCreateParameter { report_in_path } => {
+                ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, report_in_path);
+                had_error |= report_in_path;
+            }
+            LifetimeElisionKind::AnonymousReportError => {
+                ctx.report_missing_lifetime(def, lifetime_args_len as u32);
+                had_error = true
+            }
+            LifetimeElisionKind::ElisionFailure => {
+                ctx.report_elision_failure(def, lifetime_args_len as u32);
+                had_error = true;
+            }
+            LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => {
+                // FIXME: Check there are other lifetimes in scope, and error/lint.
+            }
+            LifetimeElisionKind::Elided(_) => {
+                ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, false);
+            }
+            LifetimeElisionKind::Infer => {
+                // Allow eliding lifetimes.
+            }
+        }
+    } else if lifetime_args_len != provided_lifetimes_count {
+        ctx.report_len_mismatch(
+            def,
+            provided_lifetimes_count as u32,
+            lifetime_args_len as u32,
+            IncorrectGenericsLenKind::Lifetimes,
+        );
+        had_error = true;
+    }
+
+    let defaults_count =
+        def_generics.iter_self_type_or_consts().filter(|(_, param)| param.has_default()).count();
+    let named_type_and_const_params_count = def_generics
+        .iter_self_type_or_consts()
+        .filter(|(_, param)| match param {
+            TypeOrConstParamData::TypeParamData(param) => {
+                param.provenance == TypeParamProvenance::TypeParamList
+            }
+            TypeOrConstParamData::ConstParamData(_) => true,
+        })
+        .count();
+    let expected_max = named_type_and_const_params_count;
+    let expected_min =
+        if infer_args { 0 } else { named_type_and_const_params_count - defaults_count };
+    if provided_types_and_consts_count < expected_min
+        || expected_max < provided_types_and_consts_count
+    {
+        ctx.report_len_mismatch(
+            def,
+            provided_types_and_consts_count as u32,
+            named_type_and_const_params_count as u32,
+            IncorrectGenericsLenKind::TypesAndConsts,
+        );
+        had_error = true;
+    }
+
+    had_error
+}
+
+pub(crate) fn substs_from_args_and_bindings(
+    db: &dyn HirDatabase,
+    store: &ExpressionStore,
+    args_and_bindings: Option<&GenericArgs>,
+    def: GenericDefId,
+    mut infer_args: bool,
+    lifetime_elision: LifetimeElisionKind,
+    lowering_assoc_type_generics: bool,
+    explicit_self_ty: Option<Ty>,
+    ctx: &mut impl GenericArgsLowerer,
+) -> Substitution {
+    // Order is
+    // - Parent parameters
+    // - Optional Self parameter
+    // - Lifetime parameters
+    // - Type or Const parameters
+    let def_generics = generics(db, def);
+    let args_slice = args_and_bindings.map(|it| &*it.args).unwrap_or_default();
+
+    // We do not allow inference if there are specified args, i.e. we do not allow partial inference.
+    let has_non_lifetime_args =
+        args_slice.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_)));
+    infer_args &= !has_non_lifetime_args;
+
+    let had_count_error = check_generic_args_len(
+        args_and_bindings,
+        def,
+        &def_generics,
+        infer_args,
+        &lifetime_elision,
+        lowering_assoc_type_generics,
+        ctx,
+    );
+
+    let mut substs = Vec::with_capacity(def_generics.len());
+
+    substs.extend(def_generics.iter_parent_id().map(|id| ctx.parent_arg(id)));
+
+    let mut args = args_slice.iter().enumerate().peekable();
+    let mut params = def_generics.iter_self().peekable();
+
+    // If we encounter a type or const when we expect a lifetime, we infer the lifetimes.
+    // If we later encounter a lifetime, we know that the arguments were provided in the
+    // wrong order. `force_infer_lt` records the type or const that forced lifetimes to be
+    // inferred, so we can use it for diagnostics later.
+    let mut force_infer_lt = None;
+
+    let has_self_arg = args_and_bindings.is_some_and(|it| it.has_self_type);
+    // First, handle `Self` parameter. Consume it from the args if provided, otherwise from `explicit_self_ty`,
+    // and lastly infer it.
+    if let Some(&(
+        self_param_id,
+        self_param @ GenericParamDataRef::TypeParamData(TypeParamData {
+            provenance: TypeParamProvenance::TraitSelf,
+            ..
+        }),
+    )) = params.peek()
+    {
+        let self_ty = if has_self_arg {
+            let (_, self_ty) = args.next().expect("has_self_type=true, should have Self type");
+            ctx.provided_kind(self_param_id, self_param, self_ty)
+        } else {
+            explicit_self_ty.map(|it| it.cast(Interner)).unwrap_or_else(|| {
+                ctx.inferred_kind(def, self_param_id, self_param, infer_args, &substs)
+            })
+        };
+        params.next();
+        substs.push(self_ty);
+    }
+
+    loop {
+        // We're going to iterate through the generic arguments that the user
+        // provided, matching them with the generic parameters we expect.
+        // Mismatches can occur as a result of elided lifetimes, or for malformed
+        // input. We try to handle both sensibly.
+        match (args.peek(), params.peek()) {
+            (Some(&(arg_idx, arg)), Some(&(param_id, param))) => match (arg, param) {
+                (GenericArg::Type(_), GenericParamDataRef::TypeParamData(type_param))
+                    if type_param.provenance == TypeParamProvenance::ArgumentImplTrait =>
+                {
+                    // Do not allow specifying `impl Trait` explicitly. We already err at that, but if we won't handle it here
+                    // we will handle it as if it was specified, instead of inferring it.
+                    substs.push(ctx.inferred_kind(def, param_id, param, infer_args, &substs));
+                    params.next();
+                }
+                (GenericArg::Lifetime(_), GenericParamDataRef::LifetimeParamData(_))
+                | (GenericArg::Type(_), GenericParamDataRef::TypeParamData(_))
+                | (GenericArg::Const(_), GenericParamDataRef::ConstParamData(_)) => {
+                    substs.push(ctx.provided_kind(param_id, param, arg));
+                    args.next();
+                    params.next();
+                }
+                (
+                    GenericArg::Type(_) | GenericArg::Const(_),
+                    GenericParamDataRef::LifetimeParamData(_),
+                ) => {
+                    // We expected a lifetime argument, but got a type or const
+                    // argument. That means we're inferring the lifetime.
+                    substs.push(ctx.inferred_kind(def, param_id, param, infer_args, &substs));
+                    params.next();
+                    force_infer_lt = Some((arg_idx as u32, param_id));
+                }
+                (GenericArg::Type(type_ref), GenericParamDataRef::ConstParamData(_)) => {
+                    if let Some(konst) = type_looks_like_const(store, *type_ref) {
+                        let GenericParamId::ConstParamId(param_id) = param_id else {
+                            panic!("unmatching param kinds");
+                        };
+                        let const_ty = db.const_param_ty(param_id);
+                        substs.push(ctx.provided_type_like_const(const_ty, konst).cast(Interner));
+                        args.next();
+                        params.next();
+                    } else {
+                        // See the `_ => { ... }` branch.
+                        if !had_count_error {
+                            ctx.report_arg_mismatch(param_id, arg_idx as u32, has_self_arg);
+                        }
+                        while args.next().is_some() {}
+                    }
+                }
+                _ => {
+                    // We expected one kind of parameter, but the user provided
+                    // another. This is an error. However, if we already know that
+                    // the arguments don't match up with the parameters, we won't issue
+                    // an additional error, as the user already knows what's wrong.
+                    if !had_count_error {
+                        ctx.report_arg_mismatch(param_id, arg_idx as u32, has_self_arg);
+                    }
+
+                    // We've reported the error, but we want to make sure that this
+                    // problem doesn't bubble down and create additional, irrelevant
+                    // errors. In this case, we're simply going to ignore the argument
+                    // and any following arguments. The rest of the parameters will be
+                    // inferred.
+                    while args.next().is_some() {}
+                }
+            },
+
+            (Some(&(_, arg)), None) => {
+                // We should never be able to reach this point with well-formed input.
+                // There are two situations in which we can encounter this issue.
+                //
+                //  1. The number of arguments is incorrect. In this case, an error
+                //     will already have been emitted, and we can ignore it.
+                //  2. We've inferred some lifetimes, which have been provided later (i.e.
+                //     after a type or const). We want to throw an error in this case.
+                if !had_count_error {
+                    assert!(
+                        matches!(arg, GenericArg::Lifetime(_)),
+                        "the only possible situation here is incorrect lifetime order"
+                    );
+                    let (provided_arg_idx, param_id) =
+                        force_infer_lt.expect("lifetimes ought to have been inferred");
+                    ctx.report_arg_mismatch(param_id, provided_arg_idx, has_self_arg);
+                }
+
+                break;
+            }
+
+            (None, Some(&(param_id, param))) => {
+                // If there are fewer arguments than parameters, it means we're inferring the remaining arguments.
+                let param = if let GenericParamId::LifetimeParamId(_) = param_id {
+                    match &lifetime_elision {
+                        LifetimeElisionKind::ElisionFailure
+                        | LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }
+                        | LifetimeElisionKind::AnonymousReportError => {
+                            assert!(had_count_error);
+                            ctx.inferred_kind(def, param_id, param, infer_args, &substs)
+                        }
+                        LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => {
+                            static_lifetime().cast(Interner)
+                        }
+                        LifetimeElisionKind::Elided(lifetime) => lifetime.clone().cast(Interner),
+                        LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }
+                        | LifetimeElisionKind::Infer => {
+                            // FIXME: With `AnonymousCreateParameter`, we need to create a new lifetime parameter here
+                            // (but this will probably be done in hir-def lowering instead).
+                            ctx.inferred_kind(def, param_id, param, infer_args, &substs)
+                        }
+                    }
+                } else {
+                    ctx.inferred_kind(def, param_id, param, infer_args, &substs)
+                };
+                substs.push(param);
+                params.next();
+            }
+
+            (None, None) => break,
+        }
+    }
+
+    Substitution::from_iter(Interner, substs)
+}
+
+fn type_looks_like_const(
+    store: &ExpressionStore,
+    type_ref: TypeRefId,
+) -> Option<TypeLikeConst<'_>> {
+    // A path/`_` const will be parsed as a type, instead of a const, because when parsing/lowering
+    // in hir-def we don't yet know the expected argument kind. rustc does this a bit differently,
+    // when lowering to HIR it resolves the path, and if it doesn't resolve to the type namespace
+    // it is lowered as a const. Our behavior could deviate from rustc when the value is resolvable
+    // in both the type and value namespaces, but I believe we only allow more code.
+    let type_ref = &store[type_ref];
+    match type_ref {
+        TypeRef::Path(path) => Some(TypeLikeConst::Path(path)),
+        TypeRef::Placeholder => Some(TypeLikeConst::Infer),
+        _ => None,
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
index 2f38e8fa14c0b..2abc1ac62a99a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
@@ -5,12 +5,15 @@
 
 use chalk_solve::rust_ir;
 
-use base_db::ra_salsa::{self, InternKey};
 use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId};
+use salsa::{
+    Id,
+    plumbing::{AsId, FromId},
+};
 
 use crate::{
-    chalk_db, db::HirDatabase, AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId,
-    Interner, OpaqueTyId, PlaceholderIndex,
+    AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId, Interner, OpaqueTyId,
+    PlaceholderIndex, chalk_db, db::HirDatabase,
 };
 
 pub(crate) trait ToChalk {
@@ -30,11 +33,11 @@ impl ToChalk for hir_def::ImplId {
     type Chalk = chalk_db::ImplId;
 
     fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::ImplId {
-        chalk_ir::ImplId(self.as_intern_id())
+        chalk_ir::ImplId(self.as_id())
     }
 
     fn from_chalk(_db: &dyn HirDatabase, impl_id: chalk_db::ImplId) -> hir_def::ImplId {
-        InternKey::from_intern_id(impl_id.0)
+        FromId::from_id(impl_id.0.as_id())
     }
 }
 
@@ -56,84 +59,85 @@ impl ToChalk for TypeAliasAsValue {
     type Chalk = chalk_db::AssociatedTyValueId;
 
     fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::AssociatedTyValueId {
-        rust_ir::AssociatedTyValueId(self.0.as_intern_id())
+        rust_ir::AssociatedTyValueId(self.0.as_id())
     }
 
     fn from_chalk(
         _db: &dyn HirDatabase,
         assoc_ty_value_id: chalk_db::AssociatedTyValueId,
     ) -> TypeAliasAsValue {
-        TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0))
+        TypeAliasAsValue(TypeAliasId::from_id(assoc_ty_value_id.0))
     }
 }
 
 impl From<FnDefId> for crate::db::InternedCallableDefId {
     fn from(fn_def_id: FnDefId) -> Self {
-        InternKey::from_intern_id(fn_def_id.0)
+        Self::from_id(fn_def_id.0)
     }
 }
 
 impl From<crate::db::InternedCallableDefId> for FnDefId {
     fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
-        chalk_ir::FnDefId(callable_def_id.as_intern_id())
+        chalk_ir::FnDefId(callable_def_id.as_id())
     }
 }
 
 impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
     fn from(id: OpaqueTyId) -> Self {
-        InternKey::from_intern_id(id.0)
+        FromId::from_id(id.0)
     }
 }
 
 impl From<crate::db::InternedOpaqueTyId> for OpaqueTyId {
     fn from(id: crate::db::InternedOpaqueTyId) -> Self {
-        chalk_ir::OpaqueTyId(id.as_intern_id())
+        chalk_ir::OpaqueTyId(id.as_id())
     }
 }
 
 impl From<chalk_ir::ClosureId<Interner>> for crate::db::InternedClosureId {
     fn from(id: chalk_ir::ClosureId<Interner>) -> Self {
-        Self::from_intern_id(id.0)
+        FromId::from_id(id.0)
     }
 }
 
 impl From<crate::db::InternedClosureId> for chalk_ir::ClosureId<Interner> {
     fn from(id: crate::db::InternedClosureId) -> Self {
-        chalk_ir::ClosureId(id.as_intern_id())
+        chalk_ir::ClosureId(id.as_id())
     }
 }
 
 impl From<chalk_ir::CoroutineId<Interner>> for crate::db::InternedCoroutineId {
     fn from(id: chalk_ir::CoroutineId<Interner>) -> Self {
-        Self::from_intern_id(id.0)
+        Self::from_id(id.0)
     }
 }
 
 impl From<crate::db::InternedCoroutineId> for chalk_ir::CoroutineId<Interner> {
     fn from(id: crate::db::InternedCoroutineId) -> Self {
-        chalk_ir::CoroutineId(id.as_intern_id())
+        chalk_ir::CoroutineId(id.as_id())
     }
 }
 
 pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId {
-    chalk_ir::ForeignDefId(ra_salsa::InternKey::as_intern_id(&id))
+    chalk_ir::ForeignDefId(id.as_id())
 }
 
 pub fn from_foreign_def_id(id: ForeignDefId) -> TypeAliasId {
-    ra_salsa::InternKey::from_intern_id(id.0)
+    FromId::from_id(id.0)
 }
 
 pub fn to_assoc_type_id(id: TypeAliasId) -> AssocTypeId {
-    chalk_ir::AssocTypeId(ra_salsa::InternKey::as_intern_id(&id))
+    chalk_ir::AssocTypeId(id.as_id())
 }
 
 pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId {
-    ra_salsa::InternKey::from_intern_id(id.0)
+    FromId::from_id(id.0)
 }
 
 pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
     assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
-    let interned_id = ra_salsa::InternKey::from_intern_id(ra_salsa::InternId::from(idx.idx));
+    // SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
+    let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
     db.lookup_intern_type_or_const_param_id(interned_id)
 }
 
@@ -141,13 +145,14 @@ pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Place
     let interned_id = db.intern_type_or_const_param_id(id);
     PlaceholderIndex {
         ui: chalk_ir::UniverseIndex::ROOT,
-        idx: ra_salsa::InternKey::as_intern_id(&interned_id).as_usize(),
+        idx: interned_id.as_id().as_u32() as usize,
     }
 }
 
 pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
     assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
-    let interned_id = ra_salsa::InternKey::from_intern_id(ra_salsa::InternId::from(idx.idx));
+    // SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
+    let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
     db.lookup_intern_lifetime_param_id(interned_id)
 }
 
@@ -155,14 +160,14 @@ pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> Place
     let interned_id = db.intern_lifetime_param_id(id);
     PlaceholderIndex {
         ui: chalk_ir::UniverseIndex::ROOT,
-        idx: ra_salsa::InternKey::as_intern_id(&interned_id).as_usize(),
+        idx: interned_id.as_id().as_u32() as usize,
     }
 }
 
 pub fn to_chalk_trait_id(id: TraitId) -> ChalkTraitId {
-    chalk_ir::TraitId(ra_salsa::InternKey::as_intern_id(&id))
+    chalk_ir::TraitId(id.as_id())
 }
 
 pub fn from_chalk_trait_id(id: ChalkTraitId) -> TraitId {
-    ra_salsa::InternKey::from_intern_id(id.0)
+    FromId::from_id(id.0)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index c722800527190..8f8e26eca2ae3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -5,34 +5,34 @@
 use std::ops::ControlFlow;
 
 use arrayvec::ArrayVec;
-use base_db::CrateId;
-use chalk_ir::{cast::Cast, UniverseIndex, WithKind};
+use base_db::Crate;
+use chalk_ir::{UniverseIndex, WithKind, cast::Cast};
 use hir_def::{
-    data::{adt::StructFlags, ImplData, TraitFlags},
-    nameres::DefMap,
     AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup,
     ModuleId, TraitId,
+    nameres::{DefMap, assoc::ImplItems},
+    signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags},
 };
 use hir_expand::name::Name;
 use intern::sym;
 use rustc_hash::{FxHashMap, FxHashSet};
-use smallvec::{smallvec, SmallVec};
+use smallvec::{SmallVec, smallvec};
 use stdx::never;
 use triomphe::Arc;
 
 use crate::{
+    AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, GenericArgData,
+    Goal, Guidance, InEnvironment, Interner, Mutability, Scalar, Solution, Substitution,
+    TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, TyVariableKind,
+    VariableKind, WhereClause,
     autoderef::{self, AutoderefKind},
     db::HirDatabase,
     error_lifetime, from_chalk_trait_id, from_foreign_def_id,
-    infer::{unify::InferenceTable, Adjust, Adjustment, OverloadedDeref, PointerCast},
+    infer::{Adjust, Adjustment, OverloadedDeref, PointerCast, unify::InferenceTable},
     lang_items::is_box,
     primitive::{FloatTy, IntTy, UintTy},
     to_chalk_trait_id,
     utils::all_super_traits,
-    AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, GenericArgData,
-    Goal, Guidance, InEnvironment, Interner, Mutability, Scalar, Solution, Substitution,
-    TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, TyVariableKind,
-    VariableKind, WhereClause,
 };
 
 /// This is used as a key for indexing impls.
@@ -148,7 +148,7 @@ pub struct TraitImpls {
 }
 
 impl TraitImpls {
-    pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+    pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: Crate) -> Arc<Self> {
         let _p = tracing::info_span!("trait_impls_in_crate_query", ?krate).entered();
         let mut impls = FxHashMap::default();
 
@@ -166,22 +166,16 @@ impl TraitImpls {
 
         Self::collect_def_map(db, &mut impls, &db.block_def_map(block));
 
-        if impls.is_empty() {
-            None
-        } else {
-            Some(Arc::new(Self::finish(impls)))
-        }
+        if impls.is_empty() { None } else { Some(Arc::new(Self::finish(impls))) }
     }
 
     pub(crate) fn trait_impls_in_deps_query(
         db: &dyn HirDatabase,
-        krate: CrateId,
+        krate: Crate,
     ) -> Arc<[Arc<Self>]> {
         let _p = tracing::info_span!("trait_impls_in_deps_query", ?krate).entered();
-        let crate_graph = db.crate_graph();
-
         Arc::from_iter(
-            crate_graph.transitive_deps(krate).map(|krate| db.trait_impls_in_crate(krate)),
+            db.transitive_deps(krate).into_iter().map(|krate| db.trait_impls_in_crate(krate)),
         )
     }
 
@@ -203,7 +197,7 @@ impl TraitImpls {
                 // FIXME: Reservation impls should be considered during coherence checks. If we are
                 // (ever) to implement coherence checks, this filtering should be done by the trait
                 // solver.
-                if db.attrs(impl_id.into()).by_key(&sym::rustc_reservation_impl).exists() {
+                if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() {
                     continue;
                 }
                 let target_trait = match db.impl_trait(impl_id) {
@@ -219,7 +213,7 @@ impl TraitImpls {
             // const _: () = { ... };
             for konst in module_data.scope.unnamed_consts() {
                 let body = db.body(konst.into());
-                for (_, block_def_map) in body.blocks(db.upcast()) {
+                for (_, block_def_map) in body.blocks(db) {
                     Self::collect_def_map(db, map, &block_def_map);
                 }
             }
@@ -282,7 +276,7 @@ pub struct InherentImpls {
 }
 
 impl InherentImpls {
-    pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+    pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: Crate) -> Arc<Self> {
         let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered();
         let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
 
@@ -319,7 +313,7 @@ impl InherentImpls {
     fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
         for (_module_id, module_data) in def_map.modules() {
             for impl_id in module_data.scope.impls() {
-                let data = db.impl_data(impl_id);
+                let data = db.impl_signature(impl_id);
                 if data.target_trait.is_some() {
                     continue;
                 }
@@ -327,7 +321,7 @@ impl InherentImpls {
                 let self_ty = db.impl_self_ty(impl_id);
                 let self_ty = self_ty.skip_binders();
 
-                match is_inherent_impl_coherent(db, def_map, &data, self_ty) {
+                match is_inherent_impl_coherent(db, def_map, impl_id, self_ty) {
                     true => {
                         // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
                         if let Some(fp) = TyFingerprint::for_inherent_impl(self_ty) {
@@ -342,7 +336,7 @@ impl InherentImpls {
             // const _: () = { ... };
             for konst in module_data.scope.unnamed_consts() {
                 let body = db.body(konst.into());
-                for (_, block_def_map) in body.blocks(db.upcast()) {
+                for (_, block_def_map) in body.blocks(db) {
                     self.collect_def_map(db, &block_def_map);
                 }
             }
@@ -367,16 +361,15 @@ impl InherentImpls {
 
 pub(crate) fn incoherent_inherent_impl_crates(
     db: &dyn HirDatabase,
-    krate: CrateId,
+    krate: Crate,
     fp: TyFingerprint,
-) -> SmallVec<[CrateId; 2]> {
+) -> SmallVec<[Crate; 2]> {
     let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered();
     let mut res = SmallVec::new();
-    let crate_graph = db.crate_graph();
 
     // should pass crate for finger print and do reverse deps
 
-    for krate in crate_graph.transitive_deps(krate) {
+    for krate in db.transitive_deps(krate) {
         let impls = db.inherent_impls_in_crate(krate);
         if impls.map.get(&fp).is_some_and(|v| !v.is_empty()) {
             res.push(krate);
@@ -386,49 +379,54 @@ pub(crate) fn incoherent_inherent_impl_crates(
     res
 }
 
-pub fn def_crates(
-    db: &dyn HirDatabase,
-    ty: &Ty,
-    cur_crate: CrateId,
-) -> Option<SmallVec<[CrateId; 2]>> {
+pub fn def_crates(db: &dyn HirDatabase, ty: &Ty, cur_crate: Crate) -> Option<SmallVec<[Crate; 2]>> {
     match ty.kind(Interner) {
         &TyKind::Adt(AdtId(def_id), _) => {
             let rustc_has_incoherent_inherent_impls = match def_id {
                 hir_def::AdtId::StructId(id) => db
-                    .struct_data(id)
+                    .struct_signature(id)
                     .flags
-                    .contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL),
+                    .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
                 hir_def::AdtId::UnionId(id) => db
-                    .union_data(id)
+                    .union_signature(id)
+                    .flags
+                    .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
+                hir_def::AdtId::EnumId(id) => db
+                    .enum_signature(id)
                     .flags
-                    .contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL),
-                hir_def::AdtId::EnumId(id) => db.enum_data(id).rustc_has_incoherent_inherent_impls,
+                    .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
             };
             Some(if rustc_has_incoherent_inherent_impls {
                 db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Adt(def_id))
             } else {
-                smallvec![def_id.module(db.upcast()).krate()]
+                smallvec![def_id.module(db).krate()]
             })
         }
         &TyKind::Foreign(id) => {
             let alias = from_foreign_def_id(id);
-            Some(if db.type_alias_data(alias).rustc_has_incoherent_inherent_impls {
-                db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(id))
-            } else {
-                smallvec![alias.module(db.upcast()).krate()]
-            })
+            Some(
+                if db
+                    .type_alias_signature(alias)
+                    .flags
+                    .contains(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL)
+                {
+                    db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(id))
+                } else {
+                    smallvec![alias.module(db).krate()]
+                },
+            )
         }
         TyKind::Dyn(_) => {
             let trait_id = ty.dyn_trait()?;
             Some(
                 if db
-                    .trait_data(trait_id)
+                    .trait_signature(trait_id)
                     .flags
                     .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
                 {
                     db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Dyn(trait_id))
                 } else {
-                    smallvec![trait_id.module(db.upcast()).krate()]
+                    smallvec![trait_id.module(db).krate()]
                 },
             )
         }
@@ -596,7 +594,7 @@ pub(crate) fn iterate_method_candidates<T>(
     mut callback: impl FnMut(ReceiverAdjustments, AssocItemId, bool) -> Option<T>,
 ) -> Option<T> {
     let mut slot = None;
-    let _ = iterate_method_candidates_dyn(
+    _ = iterate_method_candidates_dyn(
         ty,
         db,
         env,
@@ -622,15 +620,15 @@ pub fn lookup_impl_const(
     const_id: ConstId,
     subs: Substitution,
 ) -> (ConstId, Substitution) {
-    let trait_id = match const_id.lookup(db.upcast()).container {
+    let trait_id = match const_id.lookup(db).container {
         ItemContainerId::TraitId(id) => id,
         _ => return (const_id, subs),
     };
     let substitution = Substitution::from_iter(Interner, subs.iter(Interner));
     let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution };
 
-    let const_data = db.const_data(const_id);
-    let name = match const_data.name.as_ref() {
+    let const_signature = db.const_signature(const_id);
+    let name = match const_signature.name.as_ref() {
         Some(name) => name,
         None => return (const_id, subs),
     };
@@ -650,14 +648,14 @@ pub fn is_dyn_method(
     func: FunctionId,
     fn_subst: Substitution,
 ) -> Option<usize> {
-    let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
+    let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else {
         return None;
     };
     let trait_params = db.generic_params(trait_id.into()).len();
     let fn_params = fn_subst.len(Interner) - trait_params;
     let trait_ref = TraitRef {
         trait_id: to_chalk_trait_id(trait_id),
-        substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).skip(fn_params)),
+        substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).take(trait_params)),
     };
     let self_ty = trait_ref.self_type_parameter(Interner);
     if let TyKind::Dyn(d) = self_ty.kind(Interner) {
@@ -669,7 +667,7 @@ pub fn is_dyn_method(
             .map(|it| it.skip_binders())
             .flat_map(|it| match it {
                 WhereClause::Implemented(tr) => {
-                    all_super_traits(db.upcast(), from_chalk_trait_id(tr.trait_id))
+                    all_super_traits(db, from_chalk_trait_id(tr.trait_id))
                 }
                 _ => smallvec![],
             })
@@ -692,33 +690,29 @@ pub(crate) fn lookup_impl_method_query(
     func: FunctionId,
     fn_subst: Substitution,
 ) -> (FunctionId, Substitution) {
-    let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
+    let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else {
         return (func, fn_subst);
     };
     let trait_params = db.generic_params(trait_id.into()).len();
-    let fn_params = fn_subst.len(Interner) - trait_params;
     let trait_ref = TraitRef {
         trait_id: to_chalk_trait_id(trait_id),
-        substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).skip(fn_params)),
+        substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).take(trait_params)),
     };
 
-    let name = &db.function_data(func).name;
+    let name = &db.function_signature(func).name;
     let Some((impl_fn, impl_subst)) =
         lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name).and_then(|assoc| {
-            if let (AssocItemId::FunctionId(id), subst) = assoc {
-                Some((id, subst))
-            } else {
-                None
-            }
+            if let (AssocItemId::FunctionId(id), subst) = assoc { Some((id, subst)) } else { None }
         })
     else {
         return (func, fn_subst);
     };
+
     (
         impl_fn,
         Substitution::from_iter(
             Interner,
-            fn_subst.iter(Interner).take(fn_params).chain(impl_subst.iter(Interner)),
+            impl_subst.iter(Interner).chain(fn_subst.iter(Interner).skip(trait_params)),
         ),
     )
 }
@@ -734,13 +728,11 @@ fn lookup_impl_assoc_item_for_trait_ref(
     let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
     let impls = db.trait_impls_in_deps(env.krate);
 
-    let trait_module = hir_trait_id.module(db.upcast());
+    let trait_module = hir_trait_id.module(db);
     let type_module = match self_ty_fp {
-        TyFingerprint::Adt(adt_id) => Some(adt_id.module(db.upcast())),
-        TyFingerprint::ForeignType(type_id) => {
-            Some(from_foreign_def_id(type_id).module(db.upcast()))
-        }
-        TyFingerprint::Dyn(trait_id) => Some(trait_id.module(db.upcast())),
+        TyFingerprint::Adt(adt_id) => Some(adt_id.module(db)),
+        TyFingerprint::ForeignType(type_id) => Some(from_foreign_def_id(type_id).module(db)),
+        TyFingerprint::Dyn(trait_id) => Some(trait_id.module(db)),
         _ => None,
     };
 
@@ -771,11 +763,10 @@ fn find_matching_impl(
     mut impls: impl Iterator<Item = ImplId>,
     mut table: InferenceTable<'_>,
     actual_trait_ref: TraitRef,
-) -> Option<(Arc<ImplData>, Substitution)> {
+) -> Option<(Arc<ImplItems>, Substitution)> {
     let db = table.db;
     impls.find_map(|impl_| {
         table.run_in_snapshot(|table| {
-            let impl_data = db.impl_data(impl_);
             let impl_substs =
                 TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build();
             let trait_ref = db
@@ -793,7 +784,7 @@ fn find_matching_impl(
             let goal = crate::Goal::all(Interner, wcs);
             table.try_obligation(goal.clone())?;
             table.register_obligation(goal);
-            Some((impl_data, table.resolve_completely(impl_substs)))
+            Some((db.impl_items(impl_), table.resolve_completely(impl_substs)))
         })
     })
 }
@@ -801,7 +792,7 @@ fn find_matching_impl(
 fn is_inherent_impl_coherent(
     db: &dyn HirDatabase,
     def_map: &DefMap,
-    impl_data: &ImplData,
+    impl_id: ImplId,
     self_ty: &Ty,
 ) -> bool {
     let self_ty = self_ty.kind(Interner);
@@ -816,9 +807,9 @@ fn is_inherent_impl_coherent(
         | TyKind::Str
         | TyKind::Scalar(_) => def_map.is_rustc_coherence_is_core(),
 
-        &TyKind::Adt(AdtId(adt), _) => adt.module(db.upcast()).krate() == def_map.krate(),
+        &TyKind::Adt(AdtId(adt), _) => adt.module(db).krate() == def_map.krate(),
         TyKind::Dyn(it) => it.principal_id().is_some_and(|trait_id| {
-            from_chalk_trait_id(trait_id).module(db.upcast()).krate() == def_map.krate()
+            from_chalk_trait_id(trait_id).module(db).krate() == def_map.krate()
         }),
 
         _ => true,
@@ -837,29 +828,40 @@ fn is_inherent_impl_coherent(
 
             &TyKind::Adt(AdtId(adt), _) => match adt {
                 hir_def::AdtId::StructId(id) => db
-                    .struct_data(id)
+                    .struct_signature(id)
                     .flags
-                    .contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL),
+                    .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
                 hir_def::AdtId::UnionId(id) => db
-                    .union_data(id)
+                    .union_signature(id)
+                    .flags
+                    .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
+                hir_def::AdtId::EnumId(it) => db
+                    .enum_signature(it)
                     .flags
-                    .contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL),
-                hir_def::AdtId::EnumId(it) => db.enum_data(it).rustc_has_incoherent_inherent_impls,
+                    .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
             },
             TyKind::Dyn(it) => it.principal_id().is_some_and(|trait_id| {
-                db.trait_data(from_chalk_trait_id(trait_id))
+                db.trait_signature(from_chalk_trait_id(trait_id))
                     .flags
                     .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
             }),
 
             _ => false,
         };
+        let items = db.impl_items(impl_id);
         rustc_has_incoherent_inherent_impls
-            && !impl_data.items.is_empty()
-            && impl_data.items.iter().all(|&(_, assoc)| match assoc {
-                AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl,
-                AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl,
-                AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl,
+            && !items.items.is_empty()
+            && items.items.iter().all(|&(_, assoc)| match assoc {
+                AssocItemId::FunctionId(it) => {
+                    db.function_signature(it).flags.contains(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
+                }
+                AssocItemId::ConstId(it) => {
+                    db.const_signature(it).flags.contains(ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
+                }
+                AssocItemId::TypeAliasId(it) => db
+                    .type_alias_signature(it)
+                    .flags
+                    .contains(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL),
             })
     }
 }
@@ -878,45 +880,52 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
         return true;
     };
 
-    let local_crate = impl_.lookup(db.upcast()).container.krate();
+    let local_crate = impl_.lookup(db).container.krate();
     let is_local = |tgt_crate| tgt_crate == local_crate;
 
     let trait_ref = impl_trait.substitute(Interner, &substs);
     let trait_id = from_chalk_trait_id(trait_ref.trait_id);
-    if is_local(trait_id.module(db.upcast()).krate()) {
+    if is_local(trait_id.module(db).krate()) {
         // trait to be implemented is local
         return true;
     }
 
-    let unwrap_fundamental = |ty: Ty| match ty.kind(Interner) {
-        TyKind::Ref(_, _, referenced) => referenced.clone(),
-        &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref subs) => {
-            let struct_data = db.struct_data(s);
-            if struct_data.flags.contains(StructFlags::IS_FUNDAMENTAL) {
-                let next = subs.type_parameters(Interner).next();
-                match next {
-                    Some(ty) => ty,
-                    None => ty,
+    let unwrap_fundamental = |mut ty: Ty| {
+        // Unwrap all layers of fundamental types with a loop.
+        loop {
+            match ty.kind(Interner) {
+                TyKind::Ref(_, _, referenced) => ty = referenced.clone(),
+                &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref subs) => {
+                    let struct_signature = db.struct_signature(s);
+                    if struct_signature.flags.contains(StructFlags::FUNDAMENTAL) {
+                        let next = subs.type_parameters(Interner).next();
+                        match next {
+                            Some(it) => ty = it,
+                            None => break ty,
+                        }
+                    } else {
+                        break ty;
+                    }
                 }
-            } else {
-                ty
+                _ => break ty,
             }
         }
-        _ => ty,
     };
     //   - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
+
+    // FIXME: param coverage
+    //   - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
     let is_not_orphan = trait_ref.substitution.type_parameters(Interner).any(|ty| {
         match unwrap_fundamental(ty).kind(Interner) {
-            &TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()),
+            &TyKind::Adt(AdtId(id), _) => is_local(id.module(db).krate()),
             TyKind::Error => true,
-            TyKind::Dyn(it) => it.principal_id().is_some_and(|trait_id| {
-                is_local(from_chalk_trait_id(trait_id).module(db.upcast()).krate())
-            }),
+            TyKind::Dyn(it) => it
+                .principal_id()
+                .is_some_and(|trait_id| is_local(from_chalk_trait_id(trait_id).module(db).krate())),
             _ => false,
         }
     });
-    // FIXME: param coverage
-    //   - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
+    #[allow(clippy::let_and_return)]
     is_not_orphan
 }
 
@@ -1213,7 +1222,7 @@ fn iterate_trait_method_candidates(
     let TraitEnvironment { krate, block, .. } = *table.trait_env;
 
     'traits: for &t in traits_in_scope {
-        let data = db.trait_data(t);
+        let data = db.trait_signature(t);
 
         // Traits annotated with `#[rustc_skip_during_method_dispatch]` are skipped during
         // method resolution, if the receiver is an array, and we're compiling for editions before
@@ -1225,7 +1234,7 @@ fn iterate_trait_method_candidates(
         {
             // FIXME: this should really be using the edition of the method name's span, in case it
             // comes from a macro
-            if !db.crate_graph()[krate].edition.at_least_2021() {
+            if !krate.data(db).edition.at_least_2021() {
                 continue;
             }
         }
@@ -1238,7 +1247,7 @@ fn iterate_trait_method_candidates(
         {
             // FIXME: this should really be using the edition of the method name's span, in case it
             // comes from a macro
-            if !db.crate_graph()[krate].edition.at_least_2024() {
+            if !krate.data(db).edition.at_least_2024() {
                 continue;
             }
         }
@@ -1247,7 +1256,7 @@ fn iterate_trait_method_candidates(
         // trait, but if we find out it doesn't, we'll skip the rest of the
         // iteration
         let mut known_implemented = false;
-        for &(_, item) in data.items.iter() {
+        for &(_, item) in db.trait_items(t).items.iter() {
             // Don't pass a `visible_from_module` down to `is_valid_candidate`,
             // since only inherent methods should be included into visibility checking.
             let visible =
@@ -1291,7 +1300,7 @@ fn iterate_inherent_methods(
             let env = table.trait_env.clone();
             let traits = env
                 .traits_in_scope_from_clauses(self_ty.clone())
-                .flat_map(|t| all_super_traits(db.upcast(), t));
+                .flat_map(|t| all_super_traits(db, t));
             iterate_inherent_trait_methods(
                 self_ty,
                 table,
@@ -1304,7 +1313,7 @@ fn iterate_inherent_methods(
         }
         TyKind::Dyn(_) => {
             if let Some(principal_trait) = self_ty.dyn_trait() {
-                let traits = all_super_traits(db.upcast(), principal_trait);
+                let traits = all_super_traits(db, principal_trait);
                 iterate_inherent_trait_methods(
                     self_ty,
                     table,
@@ -1374,7 +1383,7 @@ fn iterate_inherent_methods(
     ) -> ControlFlow<()> {
         let db = table.db;
         for t in traits {
-            let data = db.trait_data(t);
+            let data = db.trait_items(t);
             for &(_, item) in data.items.iter() {
                 // We don't pass `visible_from_module` as all trait items should be visible.
                 let visible = match is_valid_trait_method_candidate(
@@ -1407,7 +1416,7 @@ fn iterate_inherent_methods(
         callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
     ) -> ControlFlow<()> {
         for &impl_id in impls.for_self_ty(self_ty) {
-            for &(ref item_name, item) in table.db.impl_data(impl_id).items.iter() {
+            for &(ref item_name, item) in table.db.impl_items(impl_id).items.iter() {
                 let visible = match is_valid_impl_method_candidate(
                     table,
                     self_ty,
@@ -1495,7 +1504,7 @@ fn is_valid_impl_method_candidate(
             check_that!(name.is_none_or(|n| n == item_name));
 
             if let Some(from_module) = visible_from_module {
-                if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) {
+                if !db.const_visibility(c).is_visible_from(db, from_module) {
                     cov_mark::hit!(const_candidate_not_visible);
                     return IsValidCandidate::NotVisible;
                 }
@@ -1528,7 +1537,7 @@ fn is_valid_trait_method_candidate(
     let db = table.db;
     match item {
         AssocItemId::FunctionId(fn_id) => {
-            let data = db.function_data(fn_id);
+            let data = db.function_signature(fn_id);
 
             check_that!(name.is_none_or(|n| n == &data.name));
 
@@ -1559,7 +1568,7 @@ fn is_valid_trait_method_candidate(
         }
         AssocItemId::ConstId(c) => {
             check_that!(receiver_ty.is_none());
-            check_that!(name.is_none_or(|n| db.const_data(c).name.as_ref() == Some(n)));
+            check_that!(name.is_none_or(|n| db.const_signature(c).name.as_ref() == Some(n)));
 
             IsValidCandidate::Yes
         }
@@ -1581,10 +1590,10 @@ fn is_valid_impl_fn_candidate(
     check_that!(name.is_none_or(|n| n == item_name));
 
     let db = table.db;
-    let data = db.function_data(fn_id);
+    let data = db.function_signature(fn_id);
 
     if let Some(from_module) = visible_from_module {
-        if !db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module) {
+        if !db.function_visibility(fn_id).is_visible_from(db, from_module) {
             cov_mark::hit!(autoderef_candidate_not_visible);
             return IsValidCandidate::NotVisible;
         }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
index 56c431ef8dab6..6dc20203e0eef 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
@@ -3,22 +3,22 @@
 use std::{collections::hash_map::Entry, fmt::Display, iter};
 
 use crate::{
+    CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
+    Substitution, TraitEnvironment, Ty, TyExt, TyKind,
     consteval::usize_const,
     db::HirDatabase,
     display::{DisplayTarget, HirDisplay},
-    infer::{normalize, PointerCast},
+    infer::{PointerCast, normalize},
     lang_items::is_box,
     mapping::ToChalk,
-    CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
-    Substitution, TraitEnvironment, Ty, TyExt, TyKind,
 };
-use base_db::CrateId;
+use base_db::Crate;
 use chalk_ir::Mutability;
 use either::Either;
 use hir_def::{
+    DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
     expr_store::Body,
     hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId},
-    DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
 };
 use la_arena::{Arena, ArenaMap, Idx, RawIdx};
 
@@ -28,21 +28,22 @@ mod lower;
 mod monomorphization;
 mod pretty;
 
-pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
+pub use borrowck::{BorrowckResult, MutabilityReason, borrowck_query};
 pub use eval::{
-    interpret_mir, pad16, render_const_using_debug_impl, Evaluator, MirEvalError, VTableMap,
-};
-pub use lower::{
-    lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
+    Evaluator, MirEvalError, VTableMap, interpret_mir, pad16, render_const_using_debug_impl,
 };
+pub use lower::{MirLowerError, lower_to_mir, mir_body_for_closure_query, mir_body_query};
 pub use monomorphization::{
     monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query,
-    monomorphized_mir_body_query, monomorphized_mir_body_recover,
+    monomorphized_mir_body_query,
 };
 use rustc_hash::FxHashMap;
-use smallvec::{smallvec, SmallVec};
+use smallvec::{SmallVec, smallvec};
 use stdx::{impl_from, never};
 
+pub(crate) use lower::mir_body_cycle_result;
+pub(crate) use monomorphization::monomorphized_mir_body_cycle_result;
+
 use super::consteval::{intern_const_scalar, try_const_usize};
 
 pub type BasicBlockId = Idx<BasicBlock>;
@@ -142,7 +143,7 @@ impl<V, T> ProjectionElem<V, T> {
         mut base: Ty,
         db: &dyn HirDatabase,
         closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
-        krate: CrateId,
+        krate: Crate,
     ) -> Ty {
         // we only bail on mir building when there are type mismatches
         // but error types may pop up resulting in us still attempting to build the mir
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
index fbcca388e781d..eca6f4692a4c6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -12,11 +12,11 @@ use stdx::never;
 use triomphe::Arc;
 
 use crate::{
+    ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags,
     db::{HirDatabase, InternedClosure},
     display::DisplayTarget,
     mir::Operand,
     utils::ClosureSubst,
-    ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags,
 };
 
 use super::{
@@ -71,7 +71,7 @@ fn all_mir_bodies(
         c: ClosureId,
         cb: &mut impl FnMut(Arc<MirBody>),
     ) -> Result<(), MirLowerError> {
-        match db.mir_body_for_closure(c) {
+        match db.mir_body_for_closure(c.into()) {
             Ok(body) => {
                 cb(body.clone());
                 body.closures.iter().try_for_each(|&it| for_closure(db, it, cb))
@@ -132,7 +132,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
                     ty,
                     db,
                     make_fetch_closure_field(db),
-                    body.owner.module(db.upcast()).krate(),
+                    body.owner.module(db).krate(),
                 );
             }
             if is_dereference_of_ref
@@ -145,7 +145,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
         Operand::Constant(_) | Operand::Static(_) => (),
     };
     for (_, block) in body.basic_blocks.iter() {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         for statement in &block.statements {
             match &statement.kind {
                 StatementKind::Assign(_, r) => match r {
@@ -223,7 +223,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
                     ty,
                     db,
                     make_fetch_closure_field(db),
-                    body.owner.module(db.upcast()).krate(),
+                    body.owner.module(db).krate(),
                 );
             }
             if !ty.clone().is_copy(db, body.owner)
@@ -235,7 +235,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
         Operand::Constant(_) | Operand::Static(_) => (),
     };
     for (_, block) in body.basic_blocks.iter() {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         for statement in &block.statements {
             match &statement.kind {
                 StatementKind::Assign(_, r) => match r {
@@ -306,7 +306,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
 fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
     let mut borrows = FxHashMap::default();
     for (_, block) in body.basic_blocks.iter() {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         for statement in &block.statements {
             if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind {
                 borrows
@@ -369,18 +369,9 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
             }
             ProjectionElem::OpaqueCast(_) => (),
         }
-        ty = proj.projected_ty(
-            ty,
-            db,
-            make_fetch_closure_field(db),
-            body.owner.module(db.upcast()).krate(),
-        );
-    }
-    if is_part_of {
-        ProjectionCase::DirectPart
-    } else {
-        ProjectionCase::Direct
+        ty = proj.projected_ty(ty, db, make_fetch_closure_field(db), body.owner.module(db).krate());
     }
+    if is_part_of { ProjectionCase::DirectPart } else { ProjectionCase::Direct }
 }
 
 /// Returns a map from basic blocks to the set of locals that might be ever initialized before
@@ -423,10 +414,7 @@ fn ever_initialized_map(
             let Some(terminator) = &block.terminator else {
                 never!(
                     "Terminator should be none only in construction.\nThe body:\n{}",
-                    body.pretty_print(
-                        db,
-                        DisplayTarget::from_crate(db, body.owner.krate(db.upcast()))
-                    )
+                    body.pretty_print(db, DisplayTarget::from_crate(db, body.owner.krate(db)))
                 );
                 return;
             };
@@ -477,7 +465,7 @@ fn ever_initialized_map(
         dfs(db, body, l, &mut stack, &mut result);
     }
     for l in body.locals.iter().map(|it| it.0) {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         if !result[body.start_block].contains_idx(l) {
             result[body.start_block].insert(l, false);
             stack.clear();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
index 74a34e2981710..386226b16d5d3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -2,26 +2,27 @@
 
 use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range};
 
-use base_db::CrateId;
-use chalk_ir::{cast::Cast, Mutability};
+use base_db::Crate;
+use chalk_ir::{Mutability, cast::Cast};
 use either::Either;
 use hir_def::{
+    AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId,
+    VariantId,
     builtin_type::BuiltinType,
-    data::adt::{StructFlags, VariantData},
     expr_store::HygieneId,
+    item_tree::FieldsShape,
     lang_item::LangItem,
     layout::{TagEncoding, Variants},
     resolver::{HasResolver, TypeNs, ValueNs},
-    AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId,
-    VariantId,
+    signatures::{StaticFlags, StructFlags},
 };
-use hir_expand::{mod_path::path, name::Name, HirFileIdExt, InFile};
+use hir_expand::{InFile, mod_path::path, name::Name};
 use intern::sym;
 use la_arena::ArenaMap;
 use rustc_abi::TargetDataLayout;
 use rustc_apfloat::{
-    ieee::{Half as f16, Quad as f128},
     Float,
+    ieee::{Half as f16, Quad as f128},
 };
 use rustc_hash::{FxHashMap, FxHashSet};
 use span::FileId;
@@ -30,7 +31,9 @@ use syntax::{SyntaxNodePtr, TextRange};
 use triomphe::Arc;
 
 use crate::{
-    consteval::{intern_const_scalar, try_const_usize, ConstEvalError},
+    CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, Interner,
+    MemoryMap, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
+    consteval::{ConstEvalError, intern_const_scalar, try_const_usize},
     db::{HirDatabase, InternedClosure},
     display::{ClosureStyle, DisplayTarget, HirDisplay},
     infer::PointerCast,
@@ -39,15 +42,13 @@ use crate::{
     method_resolution::{is_dyn_method, lookup_impl_const},
     static_lifetime,
     traits::FnTrait,
-    utils::{detect_variant_from_bytes, ClosureSubst},
-    CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, Interner,
-    MemoryMap, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
+    utils::{ClosureSubst, detect_variant_from_bytes},
 };
 
 use super::{
-    return_slot, AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError,
-    MirSpan, Operand, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind,
-    TerminatorKind, UnOp,
+    AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan,
+    Operand, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind,
+    TerminatorKind, UnOp, return_slot,
 };
 
 mod shim;
@@ -186,7 +187,7 @@ pub struct Evaluator<'a> {
     cached_fn_trait_func: Option<FunctionId>,
     cached_fn_mut_trait_func: Option<FunctionId>,
     cached_fn_once_trait_func: Option<FunctionId>,
-    crate_id: CrateId,
+    crate_id: Crate,
     // FIXME: This is a workaround, see the comment on `interpret_mir`
     assert_placeholder_ty_is_unused: bool,
     /// A general limit on execution, to prevent non terminating programs from breaking r-a main process
@@ -368,11 +369,11 @@ impl MirEvalError {
             for (func, span, def) in stack.iter().take(30).rev() {
                 match func {
                     Either::Left(func) => {
-                        let function_name = db.function_data(*func);
+                        let function_name = db.function_signature(*func);
                         writeln!(
                             f,
                             "In function {} ({:?})",
-                            function_name.name.display(db.upcast(), display_target.edition),
+                            function_name.name.display(db, display_target.edition),
                             func
                         )?;
                     }
@@ -406,9 +407,9 @@ impl MirEvalError {
                     },
                     MirSpan::Unknown => continue,
                 };
-                let file_id = span.file_id.original_file(db.upcast());
+                let file_id = span.file_id.original_file(db);
                 let text_range = span.value.text_range();
-                writeln!(f, "{}", span_formatter(file_id.file_id(), text_range))?;
+                writeln!(f, "{}", span_formatter(file_id.file_id(db), text_range))?;
             }
         }
         match err {
@@ -421,10 +422,10 @@ impl MirEvalError {
                 )?;
             }
             MirEvalError::MirLowerError(func, err) => {
-                let function_name = db.function_data(*func);
-                let self_ = match func.lookup(db.upcast()).container {
+                let function_name = db.function_signature(*func);
+                let self_ = match func.lookup(db).container {
                     ItemContainerId::ImplId(impl_id) => Some({
-                        let generics = crate::generics::generics(db.upcast(), impl_id.into());
+                        let generics = crate::generics::generics(db, impl_id.into());
                         let substs = generics.placeholder_subst(db);
                         db.impl_self_ty(impl_id)
                             .substitute(Interner, &substs)
@@ -432,10 +433,7 @@ impl MirEvalError {
                             .to_string()
                     }),
                     ItemContainerId::TraitId(it) => Some(
-                        db.trait_data(it)
-                            .name
-                            .display(db.upcast(), display_target.edition)
-                            .to_string(),
+                        db.trait_signature(it).name.display(db, display_target.edition).to_string(),
                     ),
                     _ => None,
                 };
@@ -444,7 +442,7 @@ impl MirEvalError {
                     "MIR lowering for function `{}{}{}` ({:?}) failed due:",
                     self_.as_deref().unwrap_or_default(),
                     if self_.is_some() { "::" } else { "" },
-                    function_name.name.display(db.upcast(), display_target.edition),
+                    function_name.name.display(db, display_target.edition),
                     func
                 )?;
                 err.pretty_print(f, db, span_formatter, display_target)?;
@@ -627,7 +625,7 @@ impl Evaluator<'_> {
         assert_placeholder_ty_is_unused: bool,
         trait_env: Option<Arc<TraitEnvironment>>,
     ) -> Result<Evaluator<'_>> {
-        let crate_id = owner.module(db.upcast()).krate();
+        let crate_id = owner.module(db).krate();
         let target_data_layout = match db.target_data_layout(crate_id) {
             Ok(target_data_layout) => target_data_layout,
             Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
@@ -660,20 +658,18 @@ impl Evaluator<'_> {
             cached_fn_trait_func: db
                 .lang_item(crate_id, LangItem::Fn)
                 .and_then(|x| x.as_trait())
-                .and_then(|x| {
-                    db.trait_data(x).method_by_name(&Name::new_symbol_root(sym::call.clone()))
-                }),
+                .and_then(|x| db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call))),
             cached_fn_mut_trait_func: db
                 .lang_item(crate_id, LangItem::FnMut)
                 .and_then(|x| x.as_trait())
                 .and_then(|x| {
-                    db.trait_data(x).method_by_name(&Name::new_symbol_root(sym::call_mut.clone()))
+                    db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_mut))
                 }),
             cached_fn_once_trait_func: db
                 .lang_item(crate_id, LangItem::FnOnce)
                 .and_then(|x| x.as_trait())
                 .and_then(|x| {
-                    db.trait_data(x).method_by_name(&Name::new_symbol_root(sym::call_once.clone()))
+                    db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_once))
                 }),
         })
     }
@@ -820,12 +816,12 @@ impl Evaluator<'_> {
                         Variants::Multiple { variants, .. } => {
                             &variants[match f.parent {
                                 hir_def::VariantId::EnumVariantId(it) => {
-                                    RustcEnumVariantIdx(it.lookup(self.db.upcast()).index as usize)
+                                    RustcEnumVariantIdx(it.lookup(self.db).index as usize)
                                 }
                                 _ => {
                                     return Err(MirEvalError::InternalError(
                                         "mismatched layout".into(),
-                                    ))
+                                    ));
                                 }
                             }]
                         }
@@ -1119,7 +1115,7 @@ impl Evaluator<'_> {
                 "Stack overflow. Tried to grow stack to {stack_size} bytes"
             )));
         }
-        self.stack.extend(iter::repeat(0).take(stack_size));
+        self.stack.extend(std::iter::repeat_n(0, stack_size));
         Ok((locals, prev_stack_pointer))
     }
 
@@ -1641,7 +1637,8 @@ impl Evaluator<'_> {
         match &layout.variants {
             Variants::Empty => unreachable!(),
             Variants::Single { index } => {
-                let r = self.const_eval_discriminant(self.db.enum_data(e).variants[index.0].0)?;
+                let r =
+                    self.const_eval_discriminant(self.db.enum_variants(e).variants[index.0].0)?;
                 Ok(r)
             }
             Variants::Multiple { tag, tag_encoding, variants, .. } => {
@@ -1666,7 +1663,7 @@ impl Evaluator<'_> {
                             .unwrap_or(*untagged_variant)
                             .0;
                         let result =
-                            self.const_eval_discriminant(self.db.enum_data(e).variants[idx].0)?;
+                            self.const_eval_discriminant(self.db.enum_variants(e).variants[idx].0)?;
                         Ok(result)
                     }
                 }
@@ -1760,7 +1757,7 @@ impl Evaluator<'_> {
                         AdtId::EnumId(_) => not_supported!("unsizing enums"),
                     };
                     let Some((last_field, _)) =
-                        self.db.struct_data(id).variant_data.fields().iter().next_back()
+                        self.db.variant_fields(id.into()).fields().iter().next_back()
                     else {
                         not_supported!("unsizing struct without field");
                     };
@@ -1788,11 +1785,11 @@ impl Evaluator<'_> {
         subst: Substitution,
         locals: &Locals,
     ) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
-        let adt = it.adt_id(self.db.upcast());
+        let adt = it.adt_id(self.db);
         if let DefWithBodyId::VariantId(f) = locals.body.owner {
             if let VariantId::EnumVariantId(it) = it {
                 if let AdtId::EnumId(e) = adt {
-                    if f.lookup(self.db.upcast()).parent == e {
+                    if f.lookup(self.db).parent == e {
                         // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
                         // infinite sized type errors) we use a dummy layout
                         let i = self.const_eval_discriminant(it)?;
@@ -1810,7 +1807,7 @@ impl Evaluator<'_> {
                     _ => not_supported!("multi variant layout for non-enums"),
                 };
                 let mut discriminant = self.const_eval_discriminant(enum_variant_id)?;
-                let lookup = enum_variant_id.lookup(self.db.upcast());
+                let lookup = enum_variant_id.lookup(self.db);
                 let rustc_enum_variant_idx = RustcEnumVariantIdx(lookup.index as usize);
                 let variant_layout = variants[rustc_enum_variant_idx].clone();
                 let have_tag = match tag_encoding {
@@ -1863,7 +1860,7 @@ impl Evaluator<'_> {
                             "encoded tag ({offset}, {size}, {value}) is out of bounds 0..{size}"
                         )
                         .into(),
-                    ))
+                    ));
                 }
             }
         }
@@ -1875,7 +1872,7 @@ impl Evaluator<'_> {
                 None => {
                     return Err(MirEvalError::InternalError(
                         format!("field offset ({offset}) is out of bounds 0..{size}").into(),
-                    ))
+                    ));
                 }
             }
         }
@@ -1917,7 +1914,7 @@ impl Evaluator<'_> {
                     .db
                     .const_eval(const_id, subst, Some(self.trait_env.clone()))
                     .map_err(|e| {
-                        let name = const_id.name(self.db.upcast());
+                        let name = const_id.name(self.db);
                         MirEvalError::ConstEvalError(name, Box::new(e))
                     })?;
                 if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value {
@@ -2053,7 +2050,7 @@ impl Evaluator<'_> {
             _ => {
                 return Err(MirEvalError::UndefinedBehavior(format!(
                     "invalid memory write at address {addr:?}"
-                )))
+                )));
             }
         }
 
@@ -2068,7 +2065,7 @@ impl Evaluator<'_> {
         }
         if let DefWithBodyId::VariantId(f) = locals.body.owner {
             if let Some((AdtId::EnumId(e), _)) = ty.as_adt() {
-                if f.lookup(self.db.upcast()).parent == e {
+                if f.lookup(self.db).parent == e {
                     // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
                     // infinite sized type errors) we use a dummy size
                     return Ok(Some((16, 16)));
@@ -2121,7 +2118,7 @@ impl Evaluator<'_> {
             return Err(MirEvalError::Panic(format!("Memory allocation of {size} bytes failed")));
         }
         let pos = self.heap.len();
-        self.heap.extend(iter::repeat(0).take(size));
+        self.heap.extend(std::iter::repeat_n(0, size));
         Ok(Address::Heap(pos))
     }
 
@@ -2242,10 +2239,10 @@ impl Evaluator<'_> {
                 }
                 chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
                     AdtId::StructId(s) => {
-                        let data = this.db.struct_data(s);
+                        let data = this.db.variant_fields(s.into());
                         let layout = this.layout(ty)?;
                         let field_types = this.db.field_types(s.into());
-                        for (f, _) in data.variant_data.fields().iter() {
+                        for (f, _) in data.fields().iter() {
                             let offset = layout
                                 .fields
                                 .offset(u32::from(f.into_raw()) as usize)
@@ -2271,7 +2268,7 @@ impl Evaluator<'_> {
                             bytes,
                             e,
                         ) {
-                            let data = &this.db.enum_variant_data(v).variant_data;
+                            let data = &this.db.variant_fields(v.into());
                             let field_types = this.db.field_types(v.into());
                             for (f, _) in data.fields().iter() {
                                 let offset =
@@ -2451,7 +2448,7 @@ impl Evaluator<'_> {
         let mir_body = self
             .db
             .monomorphized_mir_body_for_closure(
-                closure,
+                closure.into(),
                 generic_args.clone(),
                 self.trait_env.clone(),
             )
@@ -2558,6 +2555,7 @@ impl Evaluator<'_> {
         } else {
             let (imp, generic_args) =
                 self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args.clone());
+
             let mir_body = self
                 .db
                 .monomorphized_mir_body(imp.into(), generic_args, self.trait_env.clone())
@@ -2616,13 +2614,10 @@ impl Evaluator<'_> {
                 let ty = ty.clone().cast(Interner);
                 let generics_for_target = Substitution::from_iter(
                     Interner,
-                    generic_args.iter(Interner).enumerate().map(|(i, it)| {
-                        if i == self_ty_idx {
-                            &ty
-                        } else {
-                            it
-                        }
-                    }),
+                    generic_args
+                        .iter(Interner)
+                        .enumerate()
+                        .map(|(i, it)| if i == self_ty_idx { &ty } else { it }),
                 );
                 self.exec_fn_with_args(
                     def,
@@ -2756,8 +2751,8 @@ impl Evaluator<'_> {
         if let Some(o) = self.static_locations.get(&st) {
             return Ok(*o);
         };
-        let static_data = self.db.static_data(st);
-        let result = if !static_data.is_extern {
+        let static_data = self.db.static_signature(st);
+        let result = if !static_data.flags.contains(StaticFlags::EXTERN) {
             let konst = self.db.const_eval_static(st).map_err(|e| {
                 MirEvalError::ConstEvalError(static_data.name.as_str().to_owned(), Box::new(e))
             })?;
@@ -2781,14 +2776,14 @@ impl Evaluator<'_> {
         match r {
             Ok(r) => Ok(r),
             Err(e) => {
-                let db = self.db.upcast();
+                let db = self.db;
                 let loc = variant.lookup(db);
                 let enum_loc = loc.parent.lookup(db);
-                let edition = self.db.crate_graph()[self.crate_id].edition;
+                let edition = self.crate_id.data(self.db).edition;
                 let name = format!(
                     "{}::{}",
-                    enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
-                    loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
+                    enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
+                    loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
                 );
                 Err(MirEvalError::ConstEvalError(name, Box::new(e)))
             }
@@ -2817,7 +2812,7 @@ impl Evaluator<'_> {
     ) -> Result<()> {
         let Some(drop_fn) = (|| {
             let drop_trait = self.db.lang_item(self.crate_id, LangItem::Drop)?.as_trait()?;
-            self.db.trait_data(drop_trait).method_by_name(&Name::new_symbol_root(sym::drop.clone()))
+            self.db.trait_items(drop_trait).method_by_name(&Name::new_symbol_root(sym::drop))
         })() else {
             // in some tests we don't have drop trait in minicore, and
             // we can ignore drop in them.
@@ -2842,16 +2837,16 @@ impl Evaluator<'_> {
             TyKind::Adt(id, subst) => {
                 match id.0 {
                     AdtId::StructId(s) => {
-                        let data = self.db.struct_data(s);
+                        let data = self.db.struct_signature(s);
                         if data.flags.contains(StructFlags::IS_MANUALLY_DROP) {
                             return Ok(());
                         }
                         let layout = self.layout_adt(id.0, subst.clone())?;
-                        match data.variant_data.as_ref() {
-                            VariantData::Record { fields, .. }
-                            | VariantData::Tuple { fields, .. } => {
+                        let variant_fields = self.db.variant_fields(s.into());
+                        match variant_fields.shape {
+                            FieldsShape::Record | FieldsShape::Tuple => {
                                 let field_types = self.db.field_types(s.into());
-                                for (field, _) in fields.iter() {
+                                for (field, _) in variant_fields.fields().iter() {
                                     let offset = layout
                                         .fields
                                         .offset(u32::from(field.into_raw()) as usize)
@@ -2861,7 +2856,7 @@ impl Evaluator<'_> {
                                     self.run_drop_glue_deep(ty, locals, addr, &[], span)?;
                                 }
                             }
-                            VariantData::Unit => (),
+                            FieldsShape::Unit => (),
                         }
                     }
                     AdtId::UnionId(_) => (), // union fields don't need drop
@@ -2919,15 +2914,15 @@ pub fn render_const_using_debug_impl(
         drop_flags: DropFlags::default(),
     };
     let data = evaluator.allocate_const_in_heap(locals, c)?;
-    let resolver = owner.resolver(db.upcast());
+    let resolver = owner.resolver(db);
     let Some(TypeNs::TraitId(debug_trait)) = resolver.resolve_path_in_type_ns_fully(
-        db.upcast(),
-        &hir_def::path::Path::from_known_path_with_no_generic(path![core::fmt::Debug]),
+        db,
+        &hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![core::fmt::Debug]),
     ) else {
         not_supported!("core::fmt::Debug not found");
     };
     let Some(debug_fmt_fn) =
-        db.trait_data(debug_trait).method_by_name(&Name::new_symbol_root(sym::fmt.clone()))
+        db.trait_items(debug_trait).method_by_name(&Name::new_symbol_root(sym::fmt))
     else {
         not_supported!("core::fmt::Debug::fmt not found");
     };
@@ -2952,8 +2947,8 @@ pub fn render_const_using_debug_impl(
     evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a2.to_bytes())?;
     evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?;
     let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully(
-        db.upcast(),
-        &hir_def::path::Path::from_known_path_with_no_generic(path![std::fmt::format]),
+        db,
+        &hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![std::fmt::format]),
         HygieneId::ROOT,
     ) else {
         not_supported!("std::fmt::format not found");
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
index f61ecabb7e41d..4de44cfd02e97 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
@@ -10,19 +10,19 @@ use hir_def::{
     resolver::HasResolver,
 };
 use hir_expand::name::Name;
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use stdx::never;
 
 use crate::{
+    DropGlue,
     display::DisplayTarget,
     error_lifetime,
     mir::eval::{
-        pad16, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule, HirDisplay,
+        Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule, HirDisplay,
         InternedClosure, Interner, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId,
         LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Substitution,
-        Ty, TyBuilder, TyExt,
+        Ty, TyBuilder, TyExt, pad16,
     },
-    DropGlue,
 };
 
 mod simd;
@@ -57,16 +57,16 @@ impl Evaluator<'_> {
             return Ok(false);
         }
 
-        let function_data = self.db.function_data(def);
+        let function_data = self.db.function_signature(def);
         let attrs = self.db.attrs(def.into());
-        let is_intrinsic = attrs.by_key(&sym::rustc_intrinsic).exists()
+        let is_intrinsic = attrs.by_key(sym::rustc_intrinsic).exists()
             // Keep this around for a bit until extern "rustc-intrinsic" abis are no longer used
             || (match &function_data.abi {
                 Some(abi) => *abi == sym::rust_dash_intrinsic,
-                None => match def.lookup(self.db.upcast()).container {
+                None => match def.lookup(self.db).container {
                     hir_def::ItemContainerId::ExternBlockId(block) => {
-                        let id = block.lookup(self.db.upcast()).id;
-                        id.item_tree(self.db.upcast())[id.value].abi.as_ref()
+                        let id = block.lookup(self.db).id;
+                        id.item_tree(self.db)[id.value].abi.as_ref()
                             == Some(&sym::rust_dash_intrinsic)
                     }
                     _ => false,
@@ -82,13 +82,13 @@ impl Evaluator<'_> {
                 locals,
                 span,
                 !function_data.has_body()
-                    || attrs.by_key(&sym::rustc_intrinsic_must_be_overridden).exists(),
+                    || attrs.by_key(sym::rustc_intrinsic_must_be_overridden).exists(),
             );
         }
-        let is_extern_c = match def.lookup(self.db.upcast()).container {
+        let is_extern_c = match def.lookup(self.db).container {
             hir_def::ItemContainerId::ExternBlockId(block) => {
-                let id = block.lookup(self.db.upcast()).id;
-                id.item_tree(self.db.upcast())[id.value].abi.as_ref() == Some(&sym::C)
+                let id = block.lookup(self.db).id;
+                id.item_tree(self.db)[id.value].abi.as_ref() == Some(&sym::C)
             }
             _ => false,
         };
@@ -124,7 +124,7 @@ impl Evaluator<'_> {
             destination.write_from_bytes(self, &result)?;
             return Ok(true);
         }
-        if let ItemContainerId::TraitId(t) = def.lookup(self.db.upcast()).container {
+        if let ItemContainerId::TraitId(t) = def.lookup(self.db).container {
             if self.db.lang_attr(t.into()) == Some(LangItem::Clone) {
                 let [self_ty] = generic_args.as_slice(Interner) else {
                     not_supported!("wrong generic arg count for clone");
@@ -154,8 +154,7 @@ impl Evaluator<'_> {
     ) -> Result<Option<FunctionId>> {
         // `PanicFmt` is redirected to `ConstPanicFmt`
         if let Some(LangItem::PanicFmt) = self.db.lang_attr(def.into()) {
-            let resolver =
-                self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast());
+            let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db);
 
             let Some(hir_def::lang_item::LangItemTarget::Function(const_panic_fmt)) =
                 self.db.lang_item(resolver.krate(), LangItem::ConstPanicFmt)
@@ -300,7 +299,7 @@ impl Evaluator<'_> {
         use LangItem::*;
         let attrs = self.db.attrs(def.into());
 
-        if attrs.by_key(&sym::rustc_const_panic_str).exists() {
+        if attrs.by_key(sym::rustc_const_panic_str).exists() {
             // `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE.
             return Some(LangItem::BeginPanic);
         }
@@ -569,7 +568,7 @@ impl Evaluator<'_> {
                     }
                     String::from_utf8_lossy(&name_buf)
                 };
-                let value = self.db.crate_graph()[self.crate_id].env.get(&name);
+                let value = self.crate_id.env(self.db).get(&name);
                 match value {
                     None => {
                         // Write null as fail
@@ -828,14 +827,14 @@ impl Evaluator<'_> {
                 };
                 let ty_name = match ty.display_source_code(
                     self.db,
-                    locals.body.owner.module(self.db.upcast()),
+                    locals.body.owner.module(self.db),
                     true,
                 ) {
                     Ok(ty_name) => ty_name,
                     // Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to
                     // render full paths.
                     Err(_) => {
-                        let krate = locals.body.owner.krate(self.db.upcast());
+                        let krate = locals.body.owner.krate(self.db);
                         ty.display(self.db, DisplayTarget::from_crate(self.db, krate)).to_string()
                     }
                 };
@@ -1261,8 +1260,8 @@ impl Evaluator<'_> {
                 if let Some(target) = self.db.lang_item(self.crate_id, LangItem::FnOnce) {
                     if let Some(def) = target.as_trait().and_then(|it| {
                         self.db
-                            .trait_data(it)
-                            .method_by_name(&Name::new_symbol_root(sym::call_once.clone()))
+                            .trait_items(it)
+                            .method_by_name(&Name::new_symbol_root(sym::call_once))
                     }) {
                         self.exec_fn_trait(
                             def,
@@ -1357,7 +1356,7 @@ impl Evaluator<'_> {
                     _ => {
                         return Err(MirEvalError::InternalError(
                             "three_way_compare expects an integral type".into(),
-                        ))
+                        ));
                     }
                 };
                 let rhs = rhs.get(self)?;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs
index e229a4ab31727..984648cfec328 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs
@@ -2,8 +2,8 @@
 
 use std::cmp::Ordering;
 
-use crate::consteval::try_const_usize;
 use crate::TyKind;
+use crate::consteval::try_const_usize;
 
 use super::*;
 
@@ -31,8 +31,8 @@ impl Evaluator<'_> {
                     Some(len) => len,
                     _ => {
                         if let AdtId::StructId(id) = id.0 {
-                            let struct_data = self.db.struct_data(id);
-                            let fields = struct_data.variant_data.fields();
+                            let struct_data = self.db.variant_fields(id.into());
+                            let fields = struct_data.fields();
                             let Some((first_field, _)) = fields.iter().next() else {
                                 not_supported!("simd type with no field");
                             };
@@ -127,7 +127,7 @@ impl Evaluator<'_> {
                         Ordering::Greater => ["ge", "gt", "ne"].contains(&name),
                     };
                     let result = if result { 255 } else { 0 };
-                    destination_bytes.extend(std::iter::repeat(result).take(dest_size));
+                    destination_bytes.extend(std::iter::repeat_n(result, dest_size));
                 }
 
                 destination.write_from_bytes(self, &destination_bytes)
@@ -164,7 +164,7 @@ impl Evaluator<'_> {
                     None => {
                         return Err(MirEvalError::InternalError(
                             "simd type with unevaluatable len param".into(),
-                        ))
+                        ));
                     }
                 };
                 let (left_len, _) = self.detect_simd_ty(&left.ty)?;
@@ -179,7 +179,7 @@ impl Evaluator<'_> {
                         None => {
                             return Err(MirEvalError::InternalError(
                                 "out of bound access in simd shuffle".into(),
-                            ))
+                            ));
                         }
                     };
                     result.extend(val);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
index 084c391d26cbb..3abbbe45e6f87 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
@@ -1,22 +1,24 @@
 use hir_def::db::DefDatabase;
-use span::{Edition, EditionedFileId};
+use hir_expand::EditionedFileId;
+use span::Edition;
 use syntax::{TextRange, TextSize};
 use test_fixture::WithFixture;
 
 use crate::display::DisplayTarget;
-use crate::{db::HirDatabase, mir::MirLowerError, test_db::TestDB, Interner, Substitution};
+use crate::{Interner, Substitution, db::HirDatabase, mir::MirLowerError, test_db::TestDB};
 
-use super::{interpret_mir, MirEvalError};
+use super::{MirEvalError, interpret_mir};
 
 fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> {
-    let module_id = db.module_for_file(file_id);
+    let module_id = db.module_for_file(file_id.file_id(db));
     let def_map = module_id.def_map(db);
     let scope = &def_map[module_id.local_id].scope;
     let func_id = scope
         .declarations()
         .find_map(|x| match x {
             hir_def::ModuleDefId::FunctionId(x) => {
-                if db.function_data(x).name.display(db, Edition::CURRENT).to_string() == "main" {
+                if db.function_signature(x).name.display(db, Edition::CURRENT).to_string() == "main"
+                {
                     Some(x)
                 } else {
                     None
@@ -68,7 +70,7 @@ fn check_pass_and_stdio(
             let span_formatter = |file, range: TextRange| {
                 format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end()))
             };
-            let krate = db.module_for_file(file_id).krate();
+            let krate = db.module_for_file(file_id.file_id(&db)).krate();
             e.pretty_print(&mut err, &db, span_formatter, DisplayTarget::from_crate(&db, krate))
                 .unwrap();
             panic!("Error in interpreting: {err}");
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
index 520717e799521..557027756f39b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -2,21 +2,19 @@
 
 use std::{fmt::Write, iter, mem};
 
-use base_db::{ra_salsa::Cycle, CrateId};
+use base_db::Crate;
 use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
 use hir_def::{
-    data::adt::{StructKind, VariantData},
-    expr_store::{Body, HygieneId},
+    AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId,
+    Lookup, TraitId, TupleId, TypeOrConstParamId,
+    expr_store::{Body, ExpressionStore, HygieneId, path::Path},
     hir::{
         ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm,
         Pat, PatId, RecordFieldPat, RecordLitField,
     },
+    item_tree::FieldsShape,
     lang_item::{LangItem, LangItemTarget},
-    path::Path,
     resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
-    type_ref::TypesMap,
-    AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId,
-    Lookup, TraitId, TupleId, TypeOrConstParamId,
 };
 use hir_expand::name::Name;
 use la_arena::ArenaMap;
@@ -27,27 +25,27 @@ use syntax::TextRange;
 use triomphe::Arc;
 
 use crate::{
+    Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
     consteval::ConstEvalError,
-    db::{HirDatabase, InternedClosure},
-    display::{hir_display_with_types_map, DisplayTarget, HirDisplay},
+    db::{HirDatabase, InternedClosure, InternedClosureId},
+    display::{DisplayTarget, HirDisplay, hir_display_with_store},
     error_lifetime,
     generics::generics,
-    infer::{cast::CastTy, unify::InferenceTable, CaptureKind, CapturedItem, TypeMismatch},
+    infer::{CaptureKind, CapturedItem, TypeMismatch, cast::CastTy, unify::InferenceTable},
     inhabitedness::is_ty_uninhabited_from,
     layout::LayoutError,
     mapping::ToChalk,
     mir::{
-        intern_const_scalar, return_slot, AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp,
-        BorrowKind, CastKind, ClosureId, ConstScalar, Either, Expr, FieldId, Idx, InferenceResult,
-        Interner, Local, LocalId, MemoryMap, MirBody, MirSpan, Mutability, Operand, Place,
-        PlaceElem, PointerCast, ProjectionElem, ProjectionStore, RawIdx, Rvalue, Statement,
-        StatementKind, Substitution, SwitchTargets, Terminator, TerminatorKind, TupleFieldId, Ty,
-        UnOp, VariantId,
+        AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, BorrowKind, CastKind, ConstScalar,
+        Either, Expr, FieldId, Idx, InferenceResult, Interner, Local, LocalId, MemoryMap, MirBody,
+        MirSpan, Mutability, Operand, Place, PlaceElem, PointerCast, ProjectionElem,
+        ProjectionStore, RawIdx, Rvalue, Statement, StatementKind, Substitution, SwitchTargets,
+        Terminator, TerminatorKind, TupleFieldId, Ty, UnOp, VariantId, intern_const_scalar,
+        return_slot,
     },
     static_lifetime,
     traits::FnTrait,
     utils::ClosureSubst,
-    Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
 };
 
 mod as_place;
@@ -179,7 +177,7 @@ impl MirLowerError {
                 writeln!(
                     f,
                     "Missing function definition for {}",
-                    body.pretty_print_expr(db.upcast(), *owner, *it, display_target.edition)
+                    body.pretty_print_expr(db, *owner, *it, display_target.edition)
                 )?;
             }
             MirLowerError::HasErrors => writeln!(f, "Type inference result contains errors")?,
@@ -195,10 +193,7 @@ impl MirLowerError {
                 writeln!(
                     f,
                     "Generic arg not provided for {}",
-                    param
-                        .name()
-                        .unwrap_or(&Name::missing())
-                        .display(db.upcast(), display_target.edition)
+                    param.name().unwrap_or(&Name::missing()).display(db, display_target.edition)
                 )?;
                 writeln!(f, "Provided args: [")?;
                 for g in subst.iter(Interner) {
@@ -255,10 +250,10 @@ impl MirLowerError {
         db: &dyn HirDatabase,
         p: &Path,
         display_target: DisplayTarget,
-        types_map: &TypesMap,
+        store: &ExpressionStore,
     ) -> Self {
         Self::UnresolvedName(
-            hir_display_with_types_map(p, types_map).display(db, display_target).to_string(),
+            hir_display_with_store(p, store).display(db, display_target).to_string(),
         )
     }
 }
@@ -290,7 +285,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
             owner,
             closures: vec![],
         };
-        let resolver = owner.resolver(db.upcast());
+        let resolver = owner.resolver(db);
 
         MirLowerCtx {
             result: mir,
@@ -415,63 +410,62 @@ impl<'ctx> MirLowerCtx<'ctx> {
             }
             Expr::Missing => {
                 if let DefWithBodyId::FunctionId(f) = self.owner {
-                    let assoc = f.lookup(self.db.upcast());
+                    let assoc = f.lookup(self.db);
                     if let ItemContainerId::TraitId(t) = assoc.container {
-                        let name = &self.db.function_data(f).name;
+                        let name = &self.db.function_signature(f).name;
                         return Err(MirLowerError::TraitFunctionDefinition(t, name.clone()));
                     }
                 }
                 Err(MirLowerError::IncompleteExpr)
             }
             Expr::Path(p) => {
-                let pr = if let Some((assoc, subst)) =
-                    self.infer.assoc_resolutions_for_expr(expr_id)
-                {
-                    match assoc {
-                        hir_def::AssocItemId::ConstId(c) => {
-                            self.lower_const(
-                                c.into(),
-                                current,
-                                place,
-                                subst,
-                                expr_id.into(),
-                                self.expr_ty_without_adjust(expr_id),
-                            )?;
-                            return Ok(Some(current));
-                        }
-                        hir_def::AssocItemId::FunctionId(_) => {
-                            // FnDefs are zero sized, no action is needed.
-                            return Ok(Some(current));
+                let pr =
+                    if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) {
+                        match assoc {
+                            hir_def::AssocItemId::ConstId(c) => {
+                                self.lower_const(
+                                    c.into(),
+                                    current,
+                                    place,
+                                    subst,
+                                    expr_id.into(),
+                                    self.expr_ty_without_adjust(expr_id),
+                                )?;
+                                return Ok(Some(current));
+                            }
+                            hir_def::AssocItemId::FunctionId(_) => {
+                                // FnDefs are zero sized, no action is needed.
+                                return Ok(Some(current));
+                            }
+                            hir_def::AssocItemId::TypeAliasId(_) => {
+                                // FIXME: If it is unreachable, use proper error instead of `not_supported`.
+                                not_supported!("associated functions and types")
+                            }
                         }
-                        hir_def::AssocItemId::TypeAliasId(_) => {
-                            // FIXME: If it is unreachable, use proper error instead of `not_supported`.
-                            not_supported!("associated functions and types")
+                    } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) {
+                        match variant {
+                            VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
+                            VariantId::StructId(s) => ValueNs::StructId(s),
+                            VariantId::UnionId(_) => implementation_error!("Union variant as path"),
                         }
-                    }
-                } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) {
-                    match variant {
-                        VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
-                        VariantId::StructId(s) => ValueNs::StructId(s),
-                        VariantId::UnionId(_) => implementation_error!("Union variant as path"),
-                    }
-                } else {
-                    let resolver_guard =
-                        self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
-                    let hygiene = self.body.expr_path_hygiene(expr_id);
-                    let result = self
-                        .resolver
-                        .resolve_path_in_value_ns_fully(self.db.upcast(), p, hygiene)
-                        .ok_or_else(|| {
-                            MirLowerError::unresolved_path(
-                                self.db,
-                                p,
-                                DisplayTarget::from_crate(self.db, self.krate()),
-                                &self.body.types,
-                            )
-                        })?;
-                    self.resolver.reset_to_guard(resolver_guard);
-                    result
-                };
+                    } else {
+                        let resolver_guard =
+                            self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
+                        let hygiene = self.body.expr_path_hygiene(expr_id);
+                        let result = self
+                            .resolver
+                            .resolve_path_in_value_ns_fully(self.db, p, hygiene)
+                            .ok_or_else(|| {
+                                MirLowerError::unresolved_path(
+                                    self.db,
+                                    p,
+                                    DisplayTarget::from_crate(self.db, self.krate()),
+                                    self.body,
+                                )
+                            })?;
+                        self.resolver.reset_to_guard(resolver_guard);
+                        result
+                    };
                 match pr {
                     ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => {
                         let Some((temp, current)) =
@@ -499,8 +493,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
                         Ok(Some(current))
                     }
                     ValueNs::EnumVariantId(variant_id) => {
-                        let variant_data = &self.db.enum_variant_data(variant_id).variant_data;
-                        if variant_data.kind() == StructKind::Unit {
+                        let variant_fields = &self.db.variant_fields(variant_id.into());
+                        if variant_fields.shape == FieldsShape::Unit {
                             let ty = self.infer.type_of_expr[expr_id].clone();
                             current = self.lower_enum_variant(
                                 variant_id,
@@ -515,10 +509,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
                         Ok(Some(current))
                     }
                     ValueNs::GenericParam(p) => {
-                        let Some(def) = self.owner.as_generic_def_id(self.db.upcast()) else {
+                        let Some(def) = self.owner.as_generic_def_id(self.db) else {
                             not_supported!("owner without generic def id");
                         };
-                        let gen = generics(self.db.upcast(), def);
+                        let generics = generics(self.db, def);
                         let ty = self.expr_ty_without_adjust(expr_id);
                         self.push_assignment(
                             current,
@@ -528,7 +522,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                                     ty,
                                     value: chalk_ir::ConstValue::BoundVar(BoundVar::new(
                                         DebruijnIndex::INNERMOST,
-                                        gen.type_or_const_param_idx(p.into()).ok_or(
+                                        generics.type_or_const_param_idx(p.into()).ok_or(
                                             MirLowerError::TypeError(
                                                 "fail to lower const generic param",
                                             ),
@@ -579,7 +573,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                 };
                 self.push_fake_read(current, cond_place, expr_id.into());
                 let resolver_guard =
-                    self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
+                    self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
                 let (then_target, else_target) =
                     self.pattern_match(current, None, cond_place, *pat)?;
                 self.resolver.reset_to_guard(resolver_guard);
@@ -695,7 +689,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                 let (func_id, generic_args) =
                     self.infer.method_resolution(expr_id).ok_or_else(|| {
                         MirLowerError::UnresolvedMethod(
-                            method_name.display(self.db.upcast(), self.edition()).to_string(),
+                            method_name.display(self.db, self.edition()).to_string(),
                         )
                     })?;
                 let func = Operand::from_fn(self.db, func_id, generic_args);
@@ -717,7 +711,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                 self.push_fake_read(current, cond_place, expr_id.into());
                 let mut end = None;
                 let resolver_guard =
-                    self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
+                    self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
                 for MatchArm { pat, guard, expr } in arms.iter() {
                     let (then, mut otherwise) =
                         self.pattern_match(current, None, cond_place, *pat)?;
@@ -840,7 +834,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                 let variant_id =
                     self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
                         Some(p) => MirLowerError::UnresolvedName(
-                            hir_display_with_types_map(&**p, &self.body.types)
+                            hir_display_with_store(&**p, self.body)
                                 .display(self.db, self.display_target())
                                 .to_string(),
                         ),
@@ -850,13 +844,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
                     TyKind::Adt(_, s) => s.clone(),
                     _ => not_supported!("Non ADT record literal"),
                 };
-                let variant_data = variant_id.variant_data(self.db.upcast());
+                let variant_fields = self.db.variant_fields(variant_id);
                 match variant_id {
                     VariantId::EnumVariantId(_) | VariantId::StructId(_) => {
-                        let mut operands = vec![None; variant_data.fields().len()];
+                        let mut operands = vec![None; variant_fields.fields().len()];
                         for RecordLitField { name, expr } in fields.iter() {
                             let field_id =
-                                variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
+                                variant_fields.field(name).ok_or(MirLowerError::UnresolvedField)?;
                             let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)?
                             else {
                                 return Ok(None);
@@ -899,7 +893,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                             not_supported!("Union record literal with more than one field");
                         };
                         let local_id =
-                            variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
+                            variant_fields.field(name).ok_or(MirLowerError::UnresolvedField)?;
                         let place = place.project(
                             PlaceElem::Field(Either::Left(FieldId {
                                 parent: union_id.into(),
@@ -914,17 +908,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
             Expr::Await { .. } => not_supported!("await"),
             Expr::Yeet { .. } => not_supported!("yeet"),
             Expr::Async { .. } => not_supported!("async block"),
-            &Expr::Const(id) => {
-                let subst = self.placeholder_subst();
-                self.lower_const(
-                    id.into(),
-                    current,
-                    place,
-                    subst,
-                    expr_id.into(),
-                    self.expr_ty_without_adjust(expr_id),
-                )?;
-                Ok(Some(current))
+            &Expr::Const(_) => {
+                // let subst = self.placeholder_subst();
+                // self.lower_const(
+                //     id.into(),
+                //     current,
+                //     place,
+                //     subst,
+                //     expr_id.into(),
+                //     self.expr_ty_without_adjust(expr_id),
+                // )?;
+                // Ok(Some(current))
+                not_supported!("const block")
             }
             Expr::Cast { expr, type_ref: _ } => {
                 let Some((it, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
@@ -1130,7 +1125,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                 };
                 self.push_fake_read(current, value, expr_id.into());
                 let resolver_guard =
-                    self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
+                    self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
                 current = self.pattern_match_assignment(current, value, target)?;
                 self.resolver.reset_to_guard(resolver_guard);
                 Ok(Some(current))
@@ -1165,8 +1160,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                     Rvalue::Aggregate(
                         AggregateKind::Adt(st.into(), subst.clone()),
                         self.db
-                            .struct_data(st)
-                            .variant_data
+                            .variant_fields(st.into())
                             .fields()
                             .iter()
                             .map(|it| {
@@ -1279,7 +1273,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                         _ => {
                             return Err(MirLowerError::TypeError(
                                 "Array expression with non array type",
-                            ))
+                            ));
                         }
                     };
                     let Some(values) = elements
@@ -1311,7 +1305,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                         _ => {
                             return Err(MirLowerError::TypeError(
                                 "Array repeat expression with non array type",
-                            ))
+                            ));
                         }
                     };
                     let r = Rvalue::Repeat(init, len);
@@ -1330,7 +1324,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
     }
 
     fn placeholder_subst(&mut self) -> Substitution {
-        match self.owner.as_generic_def_id(self.db.upcast()) {
+        match self.owner.as_generic_def_id(self.db) {
             Some(it) => TyBuilder::placeholder_subst(self.db, it),
             None => Substitution::empty(Interner),
         }
@@ -1371,13 +1365,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
                     MirLowerError::unresolved_path(
                         self.db,
                         c,
-                        DisplayTarget::from_crate(db, owner.krate(db.upcast())),
-                        &self.body.types,
+                        DisplayTarget::from_crate(db, owner.krate(db)),
+                        self.body,
                     )
                 };
                 let pr = self
                     .resolver
-                    .resolve_path_in_value_ns(self.db.upcast(), c, HygieneId::ROOT)
+                    .resolve_path_in_value_ns(self.db, c, HygieneId::ROOT)
                     .ok_or_else(unresolved_name)?;
                 match pr {
                     ResolveValueResult::ValueNs(v, _) => {
@@ -1442,7 +1436,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
                 _ => {
                     return Err(MirLowerError::TypeError(
                         "float with size other than 2, 4, 8 or 16 bytes",
-                    ))
+                    ));
                 }
             },
         };
@@ -1477,7 +1471,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
             // We can't evaluate constant with substitution now, as generics are not monomorphized in lowering.
             intern_const_scalar(ConstScalar::UnevaluatedConst(const_id, subst), ty)
         } else {
-            let name = const_id.name(self.db.upcast());
+            let name = const_id.name(self.db);
             self.db
                 .const_eval(const_id, subst, None)
                 .map_err(|e| MirLowerError::ConstEvalError(name.into(), Box::new(e)))?
@@ -1636,10 +1630,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
         f: impl FnOnce(&mut MirLowerCtx<'_>, BasicBlockId) -> Result<()>,
     ) -> Result<Option<BasicBlockId>> {
         let begin = self.new_basic_block();
-        let prev = mem::replace(
-            &mut self.current_loop_blocks,
-            Some(LoopBlocks { begin, end: None, place, drop_scope_index: self.drop_scopes.len() }),
-        );
+        let prev = self.current_loop_blocks.replace(LoopBlocks {
+            begin,
+            end: None,
+            place,
+            drop_scope_index: self.drop_scopes.len(),
+        });
         let prev_label = if let Some(label) = label {
             // We should generate the end now, to make sure that it wouldn't change later. It is
             // bad as we may emit end (unnecessary unreachable block) for unterminating loop, but
@@ -1708,7 +1704,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
     }
 
     fn is_uninhabited(&self, expr_id: ExprId) -> bool {
-        is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db.upcast()))
+        is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db))
     }
 
     /// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and
@@ -1730,7 +1726,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
     }
 
     fn resolve_lang_item(&self, item: LangItem) -> Result<LangItemTarget> {
-        let crate_id = self.owner.module(self.db.upcast()).krate();
+        let crate_id = self.owner.module(self.db).krate();
         self.db.lang_item(crate_id, item).ok_or(MirLowerError::LangItemNotFound(item))
     }
 
@@ -1758,11 +1754,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
                         self.push_fake_read(current, init_place, span);
                         // Using the initializer for the resolver scope is good enough for us, as it cannot create new declarations
                         // and has all declarations of the `let`.
-                        let resolver_guard = self.resolver.update_to_inner_scope(
-                            self.db.upcast(),
-                            self.owner,
-                            *expr_id,
-                        );
+                        let resolver_guard =
+                            self.resolver.update_to_inner_scope(self.db, self.owner, *expr_id);
                         (current, else_block) =
                             self.pattern_match(current, None, init_place, *pat)?;
                         self.resolver.reset_to_guard(resolver_guard);
@@ -1906,13 +1899,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
             Ok(r) => Ok(r),
             Err(e) => {
                 let edition = self.edition();
-                let db = self.db.upcast();
+                let db = self.db;
                 let loc = variant.lookup(db);
                 let enum_loc = loc.parent.lookup(db);
                 let name = format!(
                     "{}::{}",
-                    enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
-                    loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
+                    enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
+                    loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
                 );
                 Err(MirLowerError::ConstEvalError(name.into(), Box::new(e)))
             }
@@ -1920,11 +1913,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
     }
 
     fn edition(&self) -> Edition {
-        self.db.crate_graph()[self.krate()].edition
+        self.krate().data(self.db).edition
     }
 
-    fn krate(&self) -> CrateId {
-        self.owner.krate(self.db.upcast())
+    fn krate(&self) -> Crate {
+        self.owner.krate(self.db)
     }
 
     fn display_target(&self) -> DisplayTarget {
@@ -2016,9 +2009,9 @@ fn cast_kind(table: &mut InferenceTable<'_>, source_ty: &Ty, target_ty: &Ty) ->
 
 pub fn mir_body_for_closure_query(
     db: &dyn HirDatabase,
-    closure: ClosureId,
+    closure: InternedClosureId,
 ) -> Result<Arc<MirBody>> {
-    let InternedClosure(owner, expr) = db.lookup_intern_closure(closure.into());
+    let InternedClosure(owner, expr) = db.lookup_intern_closure(closure);
     let body = db.body(owner);
     let infer = db.infer(owner);
     let Expr::Closure { args, body: root, .. } = &body[expr] else {
@@ -2027,7 +2020,7 @@ pub fn mir_body_for_closure_query(
     let TyKind::Closure(_, substs) = &infer[expr].kind(Interner) else {
         implementation_error!("closure expression is not closure");
     };
-    let (captures, kind) = infer.closure_info(&closure);
+    let (captures, kind) = infer.closure_info(&closure.into());
     let mut ctx = MirLowerCtx::new(db, owner, &body, &infer);
     // 0 is return local
     ctx.result.locals.alloc(Local { ty: infer[*root].clone() });
@@ -2046,7 +2039,7 @@ pub fn mir_body_for_closure_query(
     let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else {
         implementation_error!("closure has not callable sig");
     };
-    let resolver_guard = ctx.resolver.update_to_inner_scope(db.upcast(), owner, expr);
+    let resolver_guard = ctx.resolver.update_to_inner_scope(db, owner, expr);
     let current = ctx.lower_params_and_bindings(
         args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())),
         None,
@@ -2120,26 +2113,29 @@ pub fn mir_body_for_closure_query(
 }
 
 pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
-    let krate = def.krate(db.upcast());
-    let edition = db.crate_graph()[krate].edition;
+    let krate = def.krate(db);
+    let edition = krate.data(db).edition;
     let detail = match def {
         DefWithBodyId::FunctionId(it) => {
-            db.function_data(it).name.display(db.upcast(), edition).to_string()
+            db.function_signature(it).name.display(db, edition).to_string()
         }
         DefWithBodyId::StaticId(it) => {
-            db.static_data(it).name.display(db.upcast(), edition).to_string()
+            db.static_signature(it).name.display(db, edition).to_string()
         }
         DefWithBodyId::ConstId(it) => db
-            .const_data(it)
+            .const_signature(it)
             .name
             .clone()
             .unwrap_or_else(Name::missing)
-            .display(db.upcast(), edition)
+            .display(db, edition)
             .to_string(),
         DefWithBodyId::VariantId(it) => {
-            db.enum_variant_data(it).name.display(db.upcast(), edition).to_string()
+            let loc = it.lookup(db);
+            db.enum_variants(loc.parent).variants[loc.index as usize]
+                .1
+                .display(db, edition)
+                .to_string()
         }
-        DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
     };
     let _p = tracing::info_span!("mir_body_query", ?detail).entered();
     let body = db.body(def);
@@ -2149,10 +2145,9 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
     Ok(Arc::new(result))
 }
 
-pub fn mir_body_recover(
+pub(crate) fn mir_body_cycle_result(
     _db: &dyn HirDatabase,
-    _cycle: &Cycle,
-    _def: &DefWithBodyId,
+    _def: DefWithBodyId,
 ) -> Result<Arc<MirBody>> {
     Err(MirLowerError::Loop)
 }
@@ -2174,11 +2169,7 @@ pub fn lower_to_mir(
     ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) });
     let binding_picker = |b: BindingId| {
         let owner = ctx.body.binding_owners.get(&b).copied();
-        if root_expr == body.body_expr {
-            owner.is_none()
-        } else {
-            owner == Some(root_expr)
-        }
+        if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) }
     };
     // 1 to param_len is for params
     // FIXME: replace with let chain once it becomes stable
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
index 420f2aaff46d6..d3cd0099246a8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
@@ -136,10 +136,9 @@ impl MirLowerCtx<'_> {
         match &self.body.exprs[expr_id] {
             Expr::Path(p) => {
                 let resolver_guard =
-                    self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
+                    self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
                 let hygiene = self.body.expr_path_hygiene(expr_id);
-                let resolved =
-                    self.resolver.resolve_path_in_value_ns_fully(self.db.upcast(), p, hygiene);
+                let resolved = self.resolver.resolve_path_in_value_ns_fully(self.db, p, hygiene);
                 self.resolver.reset_to_guard(resolver_guard);
                 let Some(pr) = resolved else {
                     return try_rvalue(self);
@@ -194,10 +193,10 @@ impl MirLowerCtx<'_> {
                                 if let Some(deref_trait) =
                                     self.resolve_lang_item(LangItem::DerefMut)?.as_trait()
                                 {
-                                    if let Some(deref_fn) =
-                                        self.db.trait_data(deref_trait).method_by_name(
-                                            &Name::new_symbol_root(sym::deref_mut.clone()),
-                                        )
+                                    if let Some(deref_fn) = self
+                                        .db
+                                        .trait_items(deref_trait)
+                                        .method_by_name(&Name::new_symbol_root(sym::deref_mut))
                                     {
                                         break 'b deref_fn == f;
                                     }
@@ -332,14 +331,14 @@ impl MirLowerCtx<'_> {
             (
                 Mutability::Not,
                 LangItem::Deref,
-                Name::new_symbol_root(sym::deref.clone()),
+                Name::new_symbol_root(sym::deref),
                 BorrowKind::Shared,
             )
         } else {
             (
                 Mutability::Mut,
                 LangItem::DerefMut,
-                Name::new_symbol_root(sym::deref_mut.clone()),
+                Name::new_symbol_root(sym::deref_mut),
                 BorrowKind::Mut { kind: MutBorrowKind::Default },
             )
         };
@@ -353,7 +352,7 @@ impl MirLowerCtx<'_> {
             .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
         let deref_fn = self
             .db
-            .trait_data(deref_trait)
+            .trait_items(deref_trait)
             .method_by_name(&trait_method_name)
             .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
         let deref_fn_op = Operand::const_zst(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
index 783f92b2043f6..abfa7aee04f74 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -1,19 +1,19 @@
 //! MIR lowering for patterns
 
-use hir_def::{hir::ExprId, AssocItemId};
+use hir_def::{AssocItemId, hir::ExprId, signatures::VariantFields};
 
 use crate::{
+    BindingMode,
     mir::{
+        LocalId, MutBorrowKind,
         lower::{
             BasicBlockId, BinOp, BindingId, BorrowKind, Either, Expr, FieldId, Idx, Interner,
             MemoryMap, MirLowerCtx, MirLowerError, MirSpan, Mutability, Operand, Pat, PatId, Place,
             PlaceElem, ProjectionElem, RecordFieldPat, ResolveValueResult, Result, Rvalue,
             Substitution, SwitchTargets, TerminatorKind, TupleFieldId, TupleId, TyBuilder, TyKind,
-            ValueNs, VariantData, VariantId,
+            ValueNs, VariantId,
         },
-        LocalId, MutBorrowKind,
     },
-    BindingMode,
 };
 
 macro_rules! not_supported {
@@ -139,7 +139,7 @@ impl MirLowerCtx<'_> {
                     _ => {
                         return Err(MirLowerError::TypeError(
                             "non tuple type matched with tuple pattern",
-                        ))
+                        ));
                     }
                 };
                 self.pattern_match_tuple_like(
@@ -350,17 +350,12 @@ impl MirLowerCtx<'_> {
                 )?,
                 None => {
                     let unresolved_name = || {
-                        MirLowerError::unresolved_path(
-                            self.db,
-                            p,
-                            self.display_target(),
-                            &self.body.types,
-                        )
+                        MirLowerError::unresolved_path(self.db, p, self.display_target(), self.body)
                     };
                     let hygiene = self.body.pat_path_hygiene(pattern);
                     let pr = self
                         .resolver
-                        .resolve_path_in_value_ns(self.db.upcast(), p, hygiene)
+                        .resolve_path_in_value_ns(self.db, p, hygiene)
                         .ok_or_else(unresolved_name)?;
 
                     if let (
@@ -597,7 +592,7 @@ impl MirLowerCtx<'_> {
                 }
                 self.pattern_matching_variant_fields(
                     shape,
-                    &self.db.enum_variant_data(v).variant_data,
+                    &self.db.variant_fields(v.into()),
                     variant,
                     current,
                     current_else,
@@ -607,7 +602,7 @@ impl MirLowerCtx<'_> {
             }
             VariantId::StructId(s) => self.pattern_matching_variant_fields(
                 shape,
-                &self.db.struct_data(s).variant_data,
+                &self.db.variant_fields(s.into()),
                 variant,
                 current,
                 current_else,
@@ -615,7 +610,7 @@ impl MirLowerCtx<'_> {
                 mode,
             )?,
             VariantId::UnionId(_) => {
-                return Err(MirLowerError::TypeError("pattern matching on union"))
+                return Err(MirLowerError::TypeError("pattern matching on union"));
             }
         })
     }
@@ -623,7 +618,7 @@ impl MirLowerCtx<'_> {
     fn pattern_matching_variant_fields(
         &mut self,
         shape: AdtPatternShape<'_>,
-        variant_data: &VariantData,
+        variant_data: &VariantFields,
         v: VariantId,
         current: BasicBlockId,
         current_else: Option<BasicBlockId>,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
index 92132fa047362..d4f10c032cb1b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
@@ -9,21 +9,20 @@
 
 use std::mem;
 
-use base_db::ra_salsa::Cycle;
 use chalk_ir::{
-    fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
     ConstData, DebruijnIndex,
+    fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
 };
 use hir_def::DefWithBodyId;
 use triomphe::Arc;
 
 use crate::{
+    Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
     consteval::{intern_const_scalar, unknown_const},
-    db::{HirDatabase, InternedClosure},
+    db::{HirDatabase, InternedClosure, InternedClosureId},
     from_placeholder_idx,
-    generics::{generics, Generics},
+    generics::{Generics, generics},
     infer::normalize,
-    ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
 };
 
 use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind};
@@ -78,7 +77,7 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
                             owner: self.owner,
                             trait_env: self.trait_env.clone(),
                             subst: &subst,
-                            generics: Some(generics(self.db.upcast(), func.into())),
+                            generics: Some(generics(self.db, func.into())),
                         };
                         filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
                     }
@@ -306,7 +305,7 @@ pub fn monomorphized_mir_body_query(
     subst: Substitution,
     trait_env: Arc<crate::TraitEnvironment>,
 ) -> Result<Arc<MirBody>, MirLowerError> {
-    let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
+    let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));
     let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
     let body = db.mir_body(owner)?;
     let mut body = (*body).clone();
@@ -314,24 +313,23 @@ pub fn monomorphized_mir_body_query(
     Ok(Arc::new(body))
 }
 
-pub fn monomorphized_mir_body_recover(
-    _: &dyn HirDatabase,
-    _: &Cycle,
-    _: &DefWithBodyId,
-    _: &Substitution,
-    _: &Arc<crate::TraitEnvironment>,
+pub(crate) fn monomorphized_mir_body_cycle_result(
+    _db: &dyn HirDatabase,
+    _: DefWithBodyId,
+    _: Substitution,
+    _: Arc<crate::TraitEnvironment>,
 ) -> Result<Arc<MirBody>, MirLowerError> {
     Err(MirLowerError::Loop)
 }
 
 pub fn monomorphized_mir_body_for_closure_query(
     db: &dyn HirDatabase,
-    closure: ClosureId,
+    closure: InternedClosureId,
     subst: Substitution,
     trait_env: Arc<crate::TraitEnvironment>,
 ) -> Result<Arc<MirBody>, MirLowerError> {
-    let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into());
-    let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
+    let InternedClosure(owner, _) = db.lookup_intern_closure(closure);
+    let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));
     let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
     let body = db.mir_body_for_closure(closure)?;
     let mut body = (*body).clone();
@@ -347,7 +345,7 @@ pub fn monomorphize_mir_body_bad(
     trait_env: Arc<crate::TraitEnvironment>,
 ) -> Result<MirBody, MirLowerError> {
     let owner = body.owner;
-    let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
+    let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));
     let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
     filler.fill_body(&mut body)?;
     Ok(body)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
index 7d7d4106cb955..f71e29789766e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
@@ -7,14 +7,14 @@ use std::{
 
 use either::Either;
 use hir_def::{expr_store::Body, hir::BindingId};
-use hir_expand::{name::Name, Lookup};
+use hir_expand::{Lookup, name::Name};
 use la_arena::ArenaMap;
 
 use crate::{
+    ClosureId,
     db::HirDatabase,
     display::{ClosureStyle, DisplayTarget, HirDisplay},
     mir::{PlaceElem, ProjectionElem, StatementKind, TerminatorKind},
-    ClosureId,
 };
 
 use super::{
@@ -43,45 +43,38 @@ impl MirBody {
         let mut ctx = MirPrettyCtx::new(self, &hir_body, db, display_target);
         ctx.for_body(|this| match ctx.body.owner {
             hir_def::DefWithBodyId::FunctionId(id) => {
-                let data = db.function_data(id);
-                w!(this, "fn {}() ", data.name.display(db.upcast(), this.display_target.edition));
+                let data = db.function_signature(id);
+                w!(this, "fn {}() ", data.name.display(db, this.display_target.edition));
             }
             hir_def::DefWithBodyId::StaticId(id) => {
-                let data = db.static_data(id);
-                w!(
-                    this,
-                    "static {}: _ = ",
-                    data.name.display(db.upcast(), this.display_target.edition)
-                );
+                let data = db.static_signature(id);
+                w!(this, "static {}: _ = ", data.name.display(db, this.display_target.edition));
             }
             hir_def::DefWithBodyId::ConstId(id) => {
-                let data = db.const_data(id);
+                let data = db.const_signature(id);
                 w!(
                     this,
                     "const {}: _ = ",
                     data.name
                         .as_ref()
                         .unwrap_or(&Name::missing())
-                        .display(db.upcast(), this.display_target.edition)
+                        .display(db, this.display_target.edition)
                 );
             }
             hir_def::DefWithBodyId::VariantId(id) => {
-                let loc = id.lookup(db.upcast());
-                let enum_loc = loc.parent.lookup(db.upcast());
+                let loc = id.lookup(db);
+                let enum_loc = loc.parent.lookup(db);
                 w!(
                     this,
                     "enum {}::{} = ",
-                    enum_loc.id.item_tree(db.upcast())[enum_loc.id.value]
+                    enum_loc.id.item_tree(db)[enum_loc.id.value]
                         .name
-                        .display(db.upcast(), this.display_target.edition),
-                    loc.id.item_tree(db.upcast())[loc.id.value]
+                        .display(db, this.display_target.edition),
+                    loc.id.item_tree(db)[loc.id.value]
                         .name
-                        .display(db.upcast(), this.display_target.edition),
+                        .display(db, this.display_target.edition),
                 )
             }
-            hir_def::DefWithBodyId::InTypeConstId(id) => {
-                w!(this, "in type const {id:?} = ");
-            }
         });
         ctx.result
     }
@@ -134,7 +127,7 @@ impl HirDisplay for LocalName {
         match self {
             LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())),
             LocalName::Binding(n, l) => {
-                write!(f, "{}_{}", n.display(f.db.upcast(), f.edition()), u32::from(l.into_raw()))
+                write!(f, "{}_{}", n.display(f.db, f.edition()), u32::from(l.into_raw()))
             }
         }
     }
@@ -154,7 +147,7 @@ impl<'a> MirPrettyCtx<'a> {
     }
 
     fn for_closure(&mut self, closure: ClosureId) {
-        let body = match self.db.mir_body_for_closure(closure) {
+        let body = match self.db.mir_body_for_closure(closure.into()) {
             Ok(it) => it,
             Err(e) => {
                 wln!(self, "// error in {closure:?}: {e:?}");
@@ -333,27 +326,25 @@ impl<'a> MirPrettyCtx<'a> {
                     w!(this, ")");
                 }
                 ProjectionElem::Field(Either::Left(field)) => {
-                    let variant_data = field.parent.variant_data(this.db.upcast());
-                    let name = &variant_data.fields()[field.local_id].name;
+                    let variant_fields = this.db.variant_fields(field.parent);
+                    let name = &variant_fields.fields()[field.local_id].name;
                     match field.parent {
                         hir_def::VariantId::EnumVariantId(e) => {
                             w!(this, "(");
                             f(this, local, head);
-                            let variant_name = &this.db.enum_variant_data(e).name;
+                            let loc = e.lookup(this.db);
                             w!(
                                 this,
                                 " as {}).{}",
-                                variant_name.display(this.db.upcast(), this.display_target.edition),
-                                name.display(this.db.upcast(), this.display_target.edition)
+                                this.db.enum_variants(loc.parent).variants[loc.index as usize]
+                                    .1
+                                    .display(this.db, this.display_target.edition),
+                                name.display(this.db, this.display_target.edition)
                             );
                         }
                         hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => {
                             f(this, local, head);
-                            w!(
-                                this,
-                                ".{}",
-                                name.display(this.db.upcast(), this.display_target.edition)
-                            );
+                            w!(this, ".{}", name.display(this.db, this.display_target.edition));
                         }
                     }
                 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
index fe9416c6cfc69..9d1238701bcfa 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
@@ -4,7 +4,7 @@ use std::sync::LazyLock;
 
 use hir_def::attr::Attrs;
 use hir_def::tt;
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use rustc_hash::{FxHashMap, FxHashSet};
 
 #[derive(Debug, Default)]
@@ -36,17 +36,19 @@ impl TargetFeatures {
     /// Retrieves the target features from the attributes, and does not expand the target features implied by them.
     pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self {
         let enabled = attrs
-            .by_key(&sym::target_feature)
+            .by_key(sym::target_feature)
             .tt_values()
-            .filter_map(|tt| {
-                match tt.token_trees().flat_tokens() {
-                    [
-                        tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
-                        tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
-                        tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { kind: tt::LitKind::Str, symbol: features, .. })),
-                    ] if enable_ident.sym == sym::enable => Some(features),
-                    _ => None,
-                }
+            .filter_map(|tt| match tt.token_trees().flat_tokens() {
+                [
+                    tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
+                    tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
+                    tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+                        kind: tt::LitKind::Str,
+                        symbol: features,
+                        ..
+                    })),
+                ] if enable_ident.sym == sym::enable => Some(features),
+                _ => None,
             })
             .flat_map(|features| features.as_str().split(',').map(Symbol::intern))
             .collect();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
index f37dd91d8e90f..d2bba120b68e4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
@@ -3,35 +3,40 @@
 use std::{fmt, panic, sync::Mutex};
 
 use base_db::{
-    ra_salsa::{self, Durability},
-    AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
+    CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, RootQueryDb, SourceDatabase,
+    SourceRoot, SourceRootId, SourceRootInput,
 };
-use hir_def::{db::DefDatabase, ModuleId};
-use hir_expand::db::ExpandDatabase;
+
+use hir_def::{ModuleId, db::DefDatabase};
+use hir_expand::EditionedFileId;
 use rustc_hash::FxHashMap;
-use span::{EditionedFileId, FileId};
+use salsa::{AsDynDatabase, Durability};
+use span::FileId;
 use syntax::TextRange;
 use test_utils::extract_annotations;
 use triomphe::Arc;
 
-#[ra_salsa::database(
-    base_db::SourceRootDatabaseStorage,
-    base_db::SourceDatabaseStorage,
-    hir_expand::db::ExpandDatabaseStorage,
-    hir_def::db::InternDatabaseStorage,
-    hir_def::db::DefDatabaseStorage,
-    crate::db::HirDatabaseStorage
-)]
+#[salsa::db]
+#[derive(Clone)]
 pub(crate) struct TestDB {
-    storage: ra_salsa::Storage<TestDB>,
-    events: Mutex<Option<Vec<ra_salsa::Event>>>,
+    storage: salsa::Storage<Self>,
+    files: Arc<base_db::Files>,
+    crates_map: Arc<CratesMap>,
+    events: Arc<Mutex<Option<Vec<salsa::Event>>>>,
 }
 
 impl Default for TestDB {
     fn default() -> Self {
-        let mut this = Self { storage: Default::default(), events: Default::default() };
-        this.setup_syntax_context_root();
+        let mut this = Self {
+            storage: Default::default(),
+            events: Default::default(),
+            files: Default::default(),
+            crates_map: Default::default(),
+        };
         this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
+        // This needs to be here otherwise `CrateGraphBuilder` panics.
+        this.set_all_crates(Arc::new(Box::new([])));
+        CrateGraphBuilder::default().set_in_db(&mut this);
         this
     }
 }
@@ -42,54 +47,80 @@ impl fmt::Debug for TestDB {
     }
 }
 
-impl Upcast<dyn ExpandDatabase> for TestDB {
-    fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
-        self
+#[salsa::db]
+impl SourceDatabase for TestDB {
+    fn file_text(&self, file_id: base_db::FileId) -> FileText {
+        self.files.file_text(file_id)
     }
-}
 
-impl Upcast<dyn DefDatabase> for TestDB {
-    fn upcast(&self) -> &(dyn DefDatabase + 'static) {
-        self
+    fn set_file_text(&mut self, file_id: base_db::FileId, text: &str) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text(self, file_id, text);
     }
-}
 
-impl ra_salsa::Database for TestDB {
-    fn salsa_event(&self, event: ra_salsa::Event) {
-        let mut events = self.events.lock().unwrap();
-        if let Some(events) = &mut *events {
-            events.push(event);
-        }
+    fn set_file_text_with_durability(
+        &mut self,
+        file_id: base_db::FileId,
+        text: &str,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text_with_durability(self, file_id, text, durability);
     }
-}
 
-impl ra_salsa::ParallelDatabase for TestDB {
-    fn snapshot(&self) -> ra_salsa::Snapshot<TestDB> {
-        ra_salsa::Snapshot::new(TestDB {
-            storage: self.storage.snapshot(),
-            events: Default::default(),
-        })
+    /// Source root of the file.
+    fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
+        self.files.source_root(source_root_id)
     }
-}
 
-impl panic::RefUnwindSafe for TestDB {}
+    fn set_source_root_with_durability(
+        &mut self,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_source_root_with_durability(self, source_root_id, source_root, durability);
+    }
 
-impl FileLoader for TestDB {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
-        FileLoaderDelegate(self).resolve_path(path)
+    fn file_source_root(&self, id: base_db::FileId) -> FileSourceRootInput {
+        self.files.file_source_root(id)
     }
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
-        FileLoaderDelegate(self).relevant_crates(file_id)
+
+    fn set_file_source_root_with_durability(
+        &mut self,
+        id: base_db::FileId,
+        source_root_id: SourceRootId,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_source_root_with_durability(self, id, source_root_id, durability);
+    }
+
+    fn crates_map(&self) -> Arc<CratesMap> {
+        self.crates_map.clone()
     }
 }
 
+#[salsa::db]
+impl salsa::Database for TestDB {
+    fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
+        let mut events = self.events.lock().unwrap();
+        if let Some(events) = &mut *events {
+            events.push(event());
+        }
+    }
+}
+
+impl panic::RefUnwindSafe for TestDB {}
+
 impl TestDB {
     pub(crate) fn module_for_file_opt(&self, file_id: impl Into<FileId>) -> Option<ModuleId> {
         let file_id = file_id.into();
         for &krate in self.relevant_crates(file_id).iter() {
             let crate_def_map = self.crate_def_map(krate);
             for (local_id, data) in crate_def_map.modules() {
-                if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
+                if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
                     return Some(crate_def_map.module_id(local_id));
                 }
             }
@@ -105,8 +136,7 @@ impl TestDB {
         &self,
     ) -> FxHashMap<EditionedFileId, Vec<(TextRange, String)>> {
         let mut files = Vec::new();
-        let crate_graph = self.crate_graph();
-        for krate in crate_graph.iter() {
+        for &krate in self.all_crates().iter() {
             let crate_def_map = self.crate_def_map(krate);
             for (module_id, _) in crate_def_map.modules() {
                 let file_id = crate_def_map[module_id].origin.file_id();
@@ -116,8 +146,8 @@ impl TestDB {
         files
             .into_iter()
             .filter_map(|file_id| {
-                let text = self.file_text(file_id.file_id());
-                let annotations = extract_annotations(&text);
+                let text = self.file_text(file_id.file_id(self));
+                let annotations = extract_annotations(&text.text(self));
                 if annotations.is_empty() {
                     return None;
                 }
@@ -128,7 +158,7 @@ impl TestDB {
 }
 
 impl TestDB {
-    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<ra_salsa::Event> {
+    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
         *self.events.lock().unwrap() = Some(Vec::new());
         f();
         self.events.lock().unwrap().take().unwrap()
@@ -141,8 +171,11 @@ impl TestDB {
             .filter_map(|e| match e.kind {
                 // This is pretty horrible, but `Debug` is the only way to inspect
                 // QueryDescriptor at the moment.
-                ra_salsa::EventKind::WillExecute { database_key } => {
-                    Some(format!("{:?}", database_key.debug(self)))
+                salsa::EventKind::WillExecute { database_key } => {
+                    let ingredient = self
+                        .as_dyn_database()
+                        .ingredient_debug_name(database_key.ingredient_index());
+                    Some(ingredient.to_string())
                 }
                 _ => None,
             })
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
index 81e38be2285ab..cc37f65c26c21 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -15,36 +15,36 @@ mod type_alias_impl_traits;
 use std::env;
 use std::sync::LazyLock;
 
-use base_db::{CrateId, SourceDatabaseFileInputExt as _};
+use base_db::{Crate, SourceDatabase};
 use expect_test::Expect;
 use hir_def::{
+    AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId, SyntheticSyntax,
     db::DefDatabase,
     expr_store::{Body, BodySourceMap},
     hir::{ExprId, Pat, PatId},
     item_scope::ItemScope,
     nameres::DefMap,
     src::HasSource,
-    AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId, SyntheticSyntax,
 };
-use hir_expand::{db::ExpandDatabase, FileRange, InFile};
+use hir_expand::{FileRange, InFile, db::ExpandDatabase};
 use itertools::Itertools;
 use rustc_hash::FxHashMap;
 use stdx::format_to;
 use syntax::{
-    ast::{self, AstNode, HasName},
     SyntaxNode,
+    ast::{self, AstNode, HasName},
 };
 use test_fixture::WithFixture;
-use tracing_subscriber::{layer::SubscriberExt, Registry};
+use tracing_subscriber::{Registry, layer::SubscriberExt};
 use tracing_tree::HierarchicalLayer;
 use triomphe::Arc;
 
 use crate::{
+    InferenceResult, Ty,
     db::HirDatabase,
     display::{DisplayTarget, HirDisplay},
     infer::{Adjustment, TypeMismatch},
     test_db::TestDB,
-    InferenceResult, Ty,
 };
 
 // These tests compare the inference results for all expressions in a file
@@ -124,9 +124,9 @@ fn check_impl(
     }
     assert!(had_annotations || allow_none, "no `//^` annotations found");
 
-    let mut defs: Vec<(DefWithBodyId, CrateId)> = Vec::new();
+    let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
     for file_id in files {
-        let module = db.module_for_file_opt(file_id);
+        let module = db.module_for_file_opt(file_id.file_id(&db));
         let module = match module {
             Some(m) => m,
             None => continue,
@@ -160,7 +160,6 @@ fn check_impl(
             let loc = it.lookup(&db);
             loc.source(&db).value.syntax().text_range().start()
         }
-        DefWithBodyId::InTypeConstId(it) => it.source(&db).syntax().text_range().start(),
     });
     let mut unexpected_type_mismatches = String::new();
     for (def, krate) in defs {
@@ -302,7 +301,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
     let mut infer_def = |inference_result: Arc<InferenceResult>,
                          body: Arc<Body>,
                          body_source_map: Arc<BodySourceMap>,
-                         krate: CrateId| {
+                         krate: Crate| {
         let display_target = DisplayTarget::from_crate(&db, krate);
         let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
         let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
@@ -388,10 +387,10 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
         }
     };
 
-    let module = db.module_for_file(file_id);
+    let module = db.module_for_file(file_id.file_id(&db));
     let def_map = module.def_map(&db);
 
-    let mut defs: Vec<(DefWithBodyId, CrateId)> = Vec::new();
+    let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
     visit_module(&db, &def_map, module.local_id, &mut |it| {
         let def = match it {
             ModuleDefId::FunctionId(it) => it.into(),
@@ -419,7 +418,6 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
             let loc = it.lookup(&db);
             loc.source(&db).value.syntax().text_range().start()
         }
-        DefWithBodyId::InTypeConstId(it) => it.source(&db).syntax().text_range().start(),
     });
     for (def, krate) in defs {
         let (body, source_map) = db.body_with_source_map(def);
@@ -439,7 +437,7 @@ pub(crate) fn visit_module(
 ) {
     visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
     for impl_id in crate_def_map[module_id].scope.impls() {
-        let impl_data = db.impl_data(impl_id);
+        let impl_data = db.impl_items(impl_id);
         for &(_, item) in impl_data.items.iter() {
             match item {
                 AssocItemId::FunctionId(it) => {
@@ -481,14 +479,14 @@ pub(crate) fn visit_module(
                     visit_body(db, &body, cb);
                 }
                 ModuleDefId::AdtId(hir_def::AdtId::EnumId(it)) => {
-                    db.enum_data(it).variants.iter().for_each(|&(it, _)| {
+                    db.enum_variants(it).variants.iter().for_each(|&(it, _)| {
                         let body = db.body(it.into());
                         cb(it.into());
                         visit_body(db, &body, cb);
                     });
                 }
                 ModuleDefId::TraitId(it) => {
-                    let trait_data = db.trait_data(it);
+                    let trait_data = db.trait_items(it);
                     for &(_, item) in trait_data.items.iter() {
                         match item {
                             AssocItemId::FunctionId(it) => cb(it.into()),
@@ -570,7 +568,7 @@ fn salsa_bug() {
     ",
     );
 
-    let module = db.module_for_file(pos.file_id);
+    let module = db.module_for_file(pos.file_id.file_id(&db));
     let crate_def_map = module.def_map(&db);
     visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
         db.infer(match def {
@@ -607,9 +605,9 @@ fn salsa_bug() {
         }
     ";
 
-    db.set_file_text(pos.file_id.file_id(), new_text);
+    db.set_file_text(pos.file_id.file_id(&db), new_text);
 
-    let module = db.module_for_file(pos.file_id);
+    let module = db.module_for_file(pos.file_id.file_id(&db));
     let crate_def_map = module.def_map(&db);
     visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
         db.infer(match def {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs
index 6f7bfc4ea7a00..73f1ae56457d6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs
@@ -1,9 +1,9 @@
-use base_db::ra_salsa::InternKey;
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 use hir_def::db::DefDatabase;
-use hir_expand::files::InFileWrapper;
+use hir_expand::{HirFileId, files::InFileWrapper};
 use itertools::Itertools;
-use span::{HirFileId, TextRange};
+use salsa::plumbing::FromId;
+use span::TextRange;
 use syntax::{AstNode, AstPtr};
 use test_fixture::WithFixture;
 
@@ -16,7 +16,7 @@ use super::visit_module;
 
 fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
     let (db, file_id) = TestDB::with_single_file(ra_fixture);
-    let module = db.module_for_file(file_id);
+    let module = db.module_for_file(file_id.file_id(&db));
     let def_map = module.def_map(&db);
 
     let mut defs = Vec::new();
@@ -34,8 +34,8 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
         let infer = db.infer(def);
         let db = &db;
         captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| {
-            let closure = db.lookup_intern_closure(InternedClosureId::from_intern_id(closure_id.0));
-            let (_, source_map) = db.body_with_source_map(closure.0);
+            let closure = db.lookup_intern_closure(InternedClosureId::from_id(closure_id.0));
+            let source_map = db.body_with_source_map(closure.0).1;
             let closure_text_range = source_map
                 .expr_syntax(closure.1)
                 .expect("failed to map closure to SyntaxNode")
@@ -384,7 +384,9 @@ fn main() {
     };
 }
 "#,
-        expect!["57..149;20..25;78..80,98..100,118..124,134..135 ByRef(Mut { kind: Default }) a &'? mut bool"],
+        expect![
+            "57..149;20..25;78..80,98..100,118..124,134..135 ByRef(Mut { kind: Default }) a &'? mut bool"
+        ],
     );
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
index 7e7c1f835c787..eeaacbf12eac6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
@@ -22,9 +22,9 @@ struct S<T> { a: T }
 fn f<T>(_: &[T]) -> T { loop {} }
 fn g<T>(_: S<&[T]>) -> T { loop {} }
 
-fn gen<T>() -> *mut [T; 2] { loop {} }
+fn generate<T>() -> *mut [T; 2] { loop {} }
 fn test1<U>() -> *mut [U] {
-    gen()
+    generate()
 }
 
 fn test2() {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
index 3757d722ac83b..0542be0ba896d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
@@ -1,4 +1,4 @@
-use base_db::SourceDatabaseFileInputExt as _;
+use base_db::SourceDatabase;
 use hir_def::ModuleDefId;
 use test_fixture::WithFixture;
 
@@ -17,7 +17,7 @@ fn foo() -> i32 {
     );
     {
         let events = db.log_executed(|| {
-            let module = db.module_for_file(pos.file_id.file_id());
+            let module = db.module_for_file(pos.file_id.file_id(&db));
             let crate_def_map = module.def_map(&db);
             visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
                 if let ModuleDefId::FunctionId(it) = def {
@@ -25,7 +25,7 @@ fn foo() -> i32 {
                 }
             });
         });
-        assert!(format!("{events:?}").contains("infer"))
+        assert!(format!("{events:?}").contains("infer_shim"))
     }
 
     let new_text = "
@@ -35,11 +35,11 @@ fn foo() -> i32 {
     1
 }";
 
-    db.set_file_text(pos.file_id.file_id(), new_text);
+    db.set_file_text(pos.file_id.file_id(&db), new_text);
 
     {
         let events = db.log_executed(|| {
-            let module = db.module_for_file(pos.file_id.file_id());
+            let module = db.module_for_file(pos.file_id.file_id(&db));
             let crate_def_map = module.def_map(&db);
             visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
                 if let ModuleDefId::FunctionId(it) = def {
@@ -47,7 +47,7 @@ fn foo() -> i32 {
                 }
             });
         });
-        assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
+        assert!(!format!("{events:?}").contains("infer_shim"), "{events:#?}")
     }
 }
 
@@ -68,7 +68,7 @@ fn baz() -> i32 {
     );
     {
         let events = db.log_executed(|| {
-            let module = db.module_for_file(pos.file_id.file_id());
+            let module = db.module_for_file(pos.file_id.file_id(&db));
             let crate_def_map = module.def_map(&db);
             visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
                 if let ModuleDefId::FunctionId(it) = def {
@@ -76,7 +76,7 @@ fn baz() -> i32 {
                 }
             });
         });
-        assert!(format!("{events:?}").contains("infer"))
+        assert!(format!("{events:?}").contains("infer_shim"))
     }
 
     let new_text = "
@@ -91,11 +91,11 @@ fn baz() -> i32 {
 }
 ";
 
-    db.set_file_text(pos.file_id.file_id(), new_text);
+    db.set_file_text(pos.file_id.file_id(&db), new_text);
 
     {
         let events = db.log_executed(|| {
-            let module = db.module_for_file(pos.file_id.file_id());
+            let module = db.module_for_file(pos.file_id.file_id(&db));
             let crate_def_map = module.def_map(&db);
             visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
                 if let ModuleDefId::FunctionId(it) = def {
@@ -103,6 +103,6 @@ fn baz() -> i32 {
                 }
             });
         });
-        assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}")
+        assert_eq!(format!("{events:?}").matches("infer_shim").count(), 1, "{events:#?}")
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
index c4822a90f9e7d..638306054a9d6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -1584,23 +1584,6 @@ type Member<U> = ConstGen<U, N>;
     );
 }
 
-#[test]
-fn cfgd_out_self_param() {
-    cov_mark::check!(cfgd_out_self_param);
-    check_no_mismatches(
-        r#"
-struct S;
-impl S {
-    fn f(#[cfg(never)] &self) {}
-}
-
-fn f(s: S) {
-    s.f();
-}
-"#,
-    );
-}
-
 #[test]
 fn tuple_struct_pattern_with_unmatched_args_crash() {
     check_infer(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index 4c5cca21655d0..0f5e44151de27 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -1784,6 +1784,8 @@ impl Foo for u8 {
 }
 
 #[test]
+// FIXME
+#[should_panic]
 fn const_eval_in_function_signature() {
     check_types(
         r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index dda7bfb2baf9a..2fb51acea8738 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -4213,7 +4213,7 @@ fn g<'a, T: 'a>(v: impl Trait<Assoc<T> = &'a T>) {
     let a = v.get::<T>();
       //^ &'a T
     let a = v.get::<()>();
-      //^ Trait::Assoc<(), impl Trait<Assoc<T> = &'a T>>
+      //^ Trait::Assoc<impl Trait<Assoc<T> = &'a T>, ()>
 }
 fn h<'a>(v: impl Trait<Assoc<i32> = &'a i32> + Trait<Assoc<i64> = &'a i64>) {
     let a = v.get::<i32>();
@@ -4280,7 +4280,7 @@ where
     let a = t.get::<isize>();
       //^ usize
     let a = t.get::<()>();
-      //^ Trait::Assoc<(), T>
+      //^ Trait::Assoc<T, ()>
 }
 
     "#,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs
index 6cb59491fac82..f5911e2161d0c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs
@@ -5,8 +5,8 @@ use itertools::Itertools;
 use span::Edition;
 
 use crate::{
-    chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
-    CallableDefId, Interner, ProjectionTyExt,
+    CallableDefId, Interner, ProjectionTyExt, chalk_db, db::HirDatabase, from_assoc_type_id,
+    from_chalk_trait_id, mapping::from_chalk,
 };
 use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId};
 
@@ -21,11 +21,11 @@ impl DebugContext<'_> {
         f: &mut fmt::Formatter<'_>,
     ) -> Result<(), fmt::Error> {
         let name = match id.0 {
-            AdtId::StructId(it) => self.0.struct_data(it).name.clone(),
-            AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
-            AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
+            AdtId::StructId(it) => self.0.struct_signature(it).name.clone(),
+            AdtId::UnionId(it) => self.0.union_signature(it).name.clone(),
+            AdtId::EnumId(it) => self.0.enum_signature(it).name.clone(),
         };
-        name.display(self.0.upcast(), Edition::LATEST).fmt(f)?;
+        name.display(self.0, Edition::LATEST).fmt(f)?;
         Ok(())
     }
 
@@ -35,8 +35,8 @@ impl DebugContext<'_> {
         f: &mut fmt::Formatter<'_>,
     ) -> Result<(), fmt::Error> {
         let trait_: hir_def::TraitId = from_chalk_trait_id(id);
-        let trait_data = self.0.trait_data(trait_);
-        trait_data.name.display(self.0.upcast(), Edition::LATEST).fmt(f)?;
+        let trait_data = self.0.trait_signature(trait_);
+        trait_data.name.display(self.0, Edition::LATEST).fmt(f)?;
         Ok(())
     }
 
@@ -46,17 +46,17 @@ impl DebugContext<'_> {
         fmt: &mut fmt::Formatter<'_>,
     ) -> Result<(), fmt::Error> {
         let type_alias: TypeAliasId = from_assoc_type_id(id);
-        let type_alias_data = self.0.type_alias_data(type_alias);
-        let trait_ = match type_alias.lookup(self.0.upcast()).container {
+        let type_alias_data = self.0.type_alias_signature(type_alias);
+        let trait_ = match type_alias.lookup(self.0).container {
             ItemContainerId::TraitId(t) => t,
             _ => panic!("associated type not in trait"),
         };
-        let trait_data = self.0.trait_data(trait_);
+        let trait_data = self.0.trait_signature(trait_);
         write!(
             fmt,
             "{}::{}",
-            trait_data.name.display(self.0.upcast(), Edition::LATEST),
-            type_alias_data.name.display(self.0.upcast(), Edition::LATEST)
+            trait_data.name.display(self.0, Edition::LATEST),
+            type_alias_data.name.display(self.0, Edition::LATEST)
         )?;
         Ok(())
     }
@@ -67,16 +67,16 @@ impl DebugContext<'_> {
         fmt: &mut fmt::Formatter<'_>,
     ) -> Result<(), fmt::Error> {
         let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
-        let type_alias_data = self.0.type_alias_data(type_alias);
-        let trait_ = match type_alias.lookup(self.0.upcast()).container {
+        let type_alias_data = self.0.type_alias_signature(type_alias);
+        let trait_ = match type_alias.lookup(self.0).container {
             ItemContainerId::TraitId(t) => t,
             _ => panic!("associated type not in trait"),
         };
-        let trait_name = &self.0.trait_data(trait_).name;
+        let trait_name = &self.0.trait_signature(trait_).name;
         let trait_ref = projection_ty.trait_ref(self.0);
         let trait_params = trait_ref.substitution.as_slice(Interner);
         let self_ty = trait_ref.self_type_parameter(Interner);
-        write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast(), Edition::LATEST))?;
+        write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0, Edition::LATEST))?;
         if trait_params.len() > 1 {
             write!(
                 fmt,
@@ -84,10 +84,9 @@ impl DebugContext<'_> {
                 trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))),
             )?;
         }
-        write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast(), Edition::LATEST))?;
+        write!(fmt, ">::{}", type_alias_data.name.display(self.0, Edition::LATEST))?;
 
-        let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len();
-        let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count];
+        let proj_params = &projection_ty.substitution.as_slice(Interner)[trait_params.len()..];
         if !proj_params.is_empty() {
             write!(
                 fmt,
@@ -106,16 +105,19 @@ impl DebugContext<'_> {
     ) -> Result<(), fmt::Error> {
         let def: CallableDefId = from_chalk(self.0, fn_def_id);
         let name = match def {
-            CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
-            CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
-            CallableDefId::EnumVariantId(e) => self.0.enum_variant_data(e).name.clone(),
+            CallableDefId::FunctionId(ff) => self.0.function_signature(ff).name.clone(),
+            CallableDefId::StructId(s) => self.0.struct_signature(s).name.clone(),
+            CallableDefId::EnumVariantId(e) => {
+                let loc = e.lookup(self.0);
+                self.0.enum_variants(loc.parent).variants[loc.index as usize].1.clone()
+            }
         };
         match def {
             CallableDefId::FunctionId(_) => {
-                write!(fmt, "{{fn {}}}", name.display(self.0.upcast(), Edition::LATEST))
+                write!(fmt, "{{fn {}}}", name.display(self.0, Edition::LATEST))
             }
             CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
-                write!(fmt, "{{ctor {}}}", name.display(self.0.upcast(), Edition::LATEST))
+                write!(fmt, "{{ctor {}}}", name.display(self.0, Edition::LATEST))
             }
         }
     }
@@ -131,11 +133,7 @@ mod unsafe_tls {
     pub(crate) fn with_current_program<R>(
         op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
     ) -> R {
-        if PROGRAM.is_set() {
-            PROGRAM.with(|prog| op(Some(prog)))
-        } else {
-            op(None)
-        }
+        if PROGRAM.is_set() { PROGRAM.with(|prog| op(Some(prog))) } else { op(None) }
     }
 
     pub(crate) fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
index 8cb7dbf60f37b..a5c195d4086af 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -3,14 +3,14 @@
 use core::fmt;
 use std::env::var;
 
-use chalk_ir::{fold::TypeFoldable, DebruijnIndex, GoalData};
+use chalk_ir::{DebruijnIndex, GoalData, fold::TypeFoldable};
 use chalk_recursive::Cache;
-use chalk_solve::{logging_db::LoggingRustIrDatabase, rust_ir, Solver};
+use chalk_solve::{Solver, logging_db::LoggingRustIrDatabase, rust_ir};
 
-use base_db::CrateId;
+use base_db::Crate;
 use hir_def::{
-    lang_item::{LangItem, LangItemTarget},
     BlockId, TraitId,
+    lang_item::{LangItem, LangItemTarget},
 };
 use hir_expand::name::Name;
 use intern::sym;
@@ -19,9 +19,9 @@ use stdx::{never, panic_context};
 use triomphe::Arc;
 
 use crate::{
-    db::HirDatabase, infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder, AliasEq,
-    AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy,
-    ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause,
+    AliasEq, AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy,
+    ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause, db::HirDatabase,
+    infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder,
 };
 
 /// This controls how much 'time' we give the Chalk solver before giving up.
@@ -30,7 +30,7 @@ const CHALK_SOLVER_FUEL: i32 = 1000;
 #[derive(Debug, Copy, Clone)]
 pub(crate) struct ChalkContext<'a> {
     pub(crate) db: &'a dyn HirDatabase,
-    pub(crate) krate: CrateId,
+    pub(crate) krate: Crate,
     pub(crate) block: Option<BlockId>,
 }
 
@@ -48,7 +48,7 @@ fn create_chalk_solver() -> chalk_recursive::RecursiveSolver<Interner> {
 /// we assume that `T: Default`.
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TraitEnvironment {
-    pub krate: CrateId,
+    pub krate: Crate,
     pub block: Option<BlockId>,
     // FIXME make this a BTreeMap
     traits_from_clauses: Box<[(Ty, TraitId)]>,
@@ -56,7 +56,7 @@ pub struct TraitEnvironment {
 }
 
 impl TraitEnvironment {
-    pub fn empty(krate: CrateId) -> Arc<Self> {
+    pub fn empty(krate: Crate) -> Arc<Self> {
         Arc::new(TraitEnvironment {
             krate,
             block: None,
@@ -66,7 +66,7 @@ impl TraitEnvironment {
     }
 
     pub fn new(
-        krate: CrateId,
+        krate: Crate,
         block: Option<BlockId>,
         traits_from_clauses: Box<[(Ty, TraitId)]>,
         env: chalk_ir::Environment<Interner>,
@@ -109,19 +109,20 @@ pub(crate) fn normalize_projection_query(
 /// Solve a trait goal using Chalk.
 pub(crate) fn trait_solve_query(
     db: &dyn HirDatabase,
-    krate: CrateId,
+    krate: Crate,
     block: Option<BlockId>,
     goal: Canonical<InEnvironment<Goal>>,
 ) -> Option<Solution> {
-    let detail = match &goal.value.goal.data(Interner) {
-        GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
-            db.trait_data(it.hir_trait_id()).name.display(db.upcast(), Edition::LATEST).to_string()
-        }
+    let _p = tracing::info_span!("trait_solve_query", detail = ?match &goal.value.goal.data(Interner) {
+        GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => db
+            .trait_signature(it.hir_trait_id())
+            .name
+            .display(db, Edition::LATEST)
+            .to_string(),
         GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(),
         _ => "??".to_owned(),
-    };
-    let _p = tracing::info_span!("trait_solve_query", ?detail).entered();
-    tracing::info!("trait_solve_query({:?})", goal.value.goal);
+    })
+    .entered();
 
     if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {
         alias: AliasTy::Projection(projection_ty),
@@ -148,7 +149,7 @@ pub(crate) fn trait_solve_query(
 
 fn solve(
     db: &dyn HirDatabase,
-    krate: CrateId,
+    krate: Crate,
     block: Option<BlockId>,
     goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
 ) -> Option<chalk_solve::Solution<Interner>> {
@@ -160,7 +161,7 @@ fn solve(
     let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
 
     let should_continue = || {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         let remaining = fuel.get();
         fuel.set(remaining - 1);
         if remaining == 0 {
@@ -190,11 +191,7 @@ fn solve(
 
     // don't set the TLS for Chalk unless Chalk debugging is active, to make
     // extra sure we only use it for debugging
-    if is_chalk_debug() {
-        crate::tls::set_current_program(db, solve)
-    } else {
-        solve()
-    }
+    if is_chalk_debug() { crate::tls::set_current_program(db, solve) } else { solve() }
 }
 
 struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, ChalkContext<'a>>);
@@ -285,16 +282,16 @@ impl FnTrait {
 
     pub fn method_name(self) -> Name {
         match self {
-            FnTrait::FnOnce => Name::new_symbol_root(sym::call_once.clone()),
-            FnTrait::FnMut => Name::new_symbol_root(sym::call_mut.clone()),
-            FnTrait::Fn => Name::new_symbol_root(sym::call.clone()),
-            FnTrait::AsyncFnOnce => Name::new_symbol_root(sym::async_call_once.clone()),
-            FnTrait::AsyncFnMut => Name::new_symbol_root(sym::async_call_mut.clone()),
-            FnTrait::AsyncFn => Name::new_symbol_root(sym::async_call.clone()),
+            FnTrait::FnOnce => Name::new_symbol_root(sym::call_once),
+            FnTrait::FnMut => Name::new_symbol_root(sym::call_mut),
+            FnTrait::Fn => Name::new_symbol_root(sym::call),
+            FnTrait::AsyncFnOnce => Name::new_symbol_root(sym::async_call_once),
+            FnTrait::AsyncFnMut => Name::new_symbol_root(sym::async_call_mut),
+            FnTrait::AsyncFn => Name::new_symbol_root(sym::async_call),
         }
     }
 
-    pub fn get_id(self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
+    pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option<TraitId> {
         let target = db.lang_item(krate, self.lang_item())?;
         match target {
             LangItemTarget::Trait(t) => Some(t),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
index 89d89fe2230af..198f715a6db2a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -1,43 +1,39 @@
 //! Helper functions for working with def, which don't need to be a separate
 //! query, but can't be computed directly from `*Data` (ie, which need a `db`).
 
-use std::{hash::Hash, iter};
+use std::iter;
 
-use base_db::CrateId;
+use base_db::Crate;
 use chalk_ir::{
-    fold::{FallibleTypeFolder, Shift},
     DebruijnIndex,
+    fold::{FallibleTypeFolder, Shift},
 };
 use hir_def::{
+    EnumId, EnumVariantId, FunctionId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId,
     db::DefDatabase,
-    generics::{WherePredicate, WherePredicateTypeTarget},
+    hir::generics::WherePredicate,
     lang_item::LangItem,
     resolver::{HasResolver, TypeNs},
     type_ref::{TraitBoundModifier, TypeRef},
-    EnumId, EnumVariantId, FunctionId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId,
-    TypeOrConstParamId,
 };
 use hir_expand::name::Name;
 use intern::sym;
 use rustc_abi::TargetDataLayout;
 use rustc_hash::FxHashSet;
-use smallvec::{smallvec, SmallVec};
+use smallvec::{SmallVec, smallvec};
 use span::Edition;
 use stdx::never;
 
 use crate::{
+    ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TargetFeatures, TraitRef,
+    TraitRefExt, Ty, WhereClause,
     consteval::unknown_const,
     db::HirDatabase,
     layout::{Layout, TagEncoding},
     mir::pad16,
-    ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TargetFeatures, TraitRef,
-    TraitRefExt, Ty, WhereClause,
 };
 
-pub(crate) fn fn_traits(
-    db: &dyn DefDatabase,
-    krate: CrateId,
-) -> impl Iterator<Item = TraitId> + '_ {
+pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: Crate) -> impl Iterator<Item = TraitId> + '_ {
     [LangItem::Fn, LangItem::FnMut, LangItem::FnOnce]
         .into_iter()
         .filter_map(move |lang| db.lang_item(krate, lang))
@@ -167,26 +163,20 @@ impl Iterator for ClauseElaborator<'_> {
 
 fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) {
     let resolver = trait_.resolver(db);
-    let generic_params = db.generic_params(trait_.into());
+    let (generic_params, store) = db.generic_params_and_store(trait_.into());
     let trait_self = generic_params.trait_self_param();
     generic_params
         .where_predicates()
         .filter_map(|pred| match pred {
             WherePredicate::ForLifetime { target, bound, .. }
             | WherePredicate::TypeBound { target, bound } => {
-                let is_trait = match target {
-                    WherePredicateTypeTarget::TypeRef(type_ref) => {
-                        match &generic_params.types_map[*type_ref] {
-                            TypeRef::Path(p) => p.is_self_type(),
-                            _ => false,
-                        }
-                    }
-                    WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
-                        Some(*local_id) == trait_self
-                    }
+                let is_trait = match &store[*target] {
+                    TypeRef::Path(p) => p.is_self_type(),
+                    TypeRef::TypeParam(p) => Some(p.local_id()) == trait_self,
+                    _ => false,
                 };
                 match is_trait {
-                    true => bound.as_path(&generic_params.types_map),
+                    true => bound.as_path(&store),
                     false => None,
                 }
             }
@@ -229,14 +219,14 @@ pub(super) fn associated_type_by_name_including_super_traits(
     name: &Name,
 ) -> Option<(TraitRef, TypeAliasId)> {
     all_super_trait_refs(db, trait_ref, |t| {
-        let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?;
+        let assoc_type = db.trait_items(t.hir_trait_id()).associated_type_by_name(name)?;
         Some((t, assoc_type))
     })
 }
 
 /// It is a bit different from the rustc equivalent. Currently it stores:
-/// - 0: the function signature, encoded as a function pointer type
-/// - 1..n: generics of the parent
+/// - 0..n-1: generics of the parent
+/// - n: the function signature, encoded as a function pointer type
 ///
 /// and it doesn't store the closure types and fields.
 ///
@@ -247,7 +237,7 @@ pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution);
 impl<'a> ClosureSubst<'a> {
     pub(crate) fn parent_subst(&self) -> &'a [GenericArg] {
         match self.0.as_slice(Interner) {
-            [_, x @ ..] => x,
+            [x @ .., _] => x,
             _ => {
                 never!("Closure missing parameter");
                 &[]
@@ -257,7 +247,7 @@ impl<'a> ClosureSubst<'a> {
 
     pub(crate) fn sig_ty(&self) -> &'a Ty {
         match self.0.as_slice(Interner) {
-            [x, ..] => x.assert_ty_ref(Interner),
+            [.., x] => x.assert_ty_ref(Interner),
             _ => {
                 unreachable!("Closure missing sig_ty parameter");
             }
@@ -279,7 +269,7 @@ pub fn is_fn_unsafe_to_call(
     caller_target_features: &TargetFeatures,
     call_edition: Edition,
 ) -> Unsafety {
-    let data = db.function_data(func);
+    let data = db.function_signature(func);
     if data.is_unsafe() {
         return Unsafety::Unsafe;
     }
@@ -301,16 +291,16 @@ pub fn is_fn_unsafe_to_call(
         }
     }
 
-    let loc = func.lookup(db.upcast());
+    let loc = func.lookup(db);
     match loc.container {
         hir_def::ItemContainerId::ExternBlockId(block) => {
-            let id = block.lookup(db.upcast()).id;
+            let id = block.lookup(db).id;
             let is_intrinsic_block =
-                id.item_tree(db.upcast())[id.value].abi.as_ref() == Some(&sym::rust_dash_intrinsic);
+                id.item_tree(db)[id.value].abi.as_ref() == Some(&sym::rust_dash_intrinsic);
             if is_intrinsic_block {
                 // legacy intrinsics
                 // extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
-                if db.attrs(func.into()).by_key(&sym::rustc_safe_intrinsic).exists() {
+                if db.attrs(func.into()).by_key(sym::rustc_safe_intrinsic).exists() {
                     Unsafety::Safe
                 } else {
                     Unsafety::Unsafe
@@ -318,11 +308,7 @@ pub fn is_fn_unsafe_to_call(
             } else {
                 // Function in an `extern` block are always unsafe to call, except when
                 // it is marked as `safe`.
-                if data.is_safe() {
-                    Unsafety::Safe
-                } else {
-                    Unsafety::Unsafe
-                }
+                if data.is_safe() { Unsafety::Safe } else { Unsafety::Unsafe }
             }
         }
         _ => Unsafety::Safe,
@@ -372,7 +358,7 @@ pub(crate) fn detect_variant_from_bytes<'a>(
     let (var_id, var_layout) = match &layout.variants {
         hir_def::layout::Variants::Empty => unreachable!(),
         hir_def::layout::Variants::Single { index } => {
-            (db.enum_data(e).variants[index.0].0, layout)
+            (db.enum_variants(e).variants[index.0].0, layout)
         }
         hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
             let size = tag.size(target_data_layout).bytes_usize();
@@ -382,7 +368,7 @@ pub(crate) fn detect_variant_from_bytes<'a>(
                 TagEncoding::Direct => {
                     let (var_idx, layout) =
                         variants.iter_enumerated().find_map(|(var_idx, v)| {
-                            let def = db.enum_data(e).variants[var_idx.0].0;
+                            let def = db.enum_variants(e).variants[var_idx.0].0;
                             (db.const_eval_discriminant(def) == Ok(tag)).then_some((def, v))
                         })?;
                     (var_idx, layout)
@@ -395,35 +381,10 @@ pub(crate) fn detect_variant_from_bytes<'a>(
                         .filter(|x| x != untagged_variant)
                         .nth(candidate_tag)
                         .unwrap_or(*untagged_variant);
-                    (db.enum_data(e).variants[variant.0].0, &variants[variant])
+                    (db.enum_variants(e).variants[variant.0].0, &variants[variant])
                 }
             }
         }
     };
     Some((var_id, var_layout))
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub(crate) struct InTypeConstIdMetadata(pub(crate) Ty);
-
-impl OpaqueInternableThing for InTypeConstIdMetadata {
-    fn dyn_hash(&self, mut state: &mut dyn std::hash::Hasher) {
-        self.hash(&mut state);
-    }
-
-    fn dyn_eq(&self, other: &dyn OpaqueInternableThing) -> bool {
-        other.as_any().downcast_ref::<Self>() == Some(self)
-    }
-
-    fn dyn_clone(&self) -> Box<dyn OpaqueInternableThing> {
-        Box::new(self.clone())
-    }
-
-    fn as_any(&self) -> &dyn std::any::Any {
-        self
-    }
-
-    fn box_any(&self) -> Box<dyn std::any::Any> {
-        Box::new(self.clone())
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs
index 3a22158ce6f1d..4e9aa5610a526 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs
@@ -14,15 +14,15 @@
 //! while installing firewall per item queries to prevent invalidation issues.
 
 use crate::db::HirDatabase;
-use crate::generics::{generics, Generics};
+use crate::generics::{Generics, generics};
 use crate::{
     AliasTy, Const, ConstScalar, DynTyExt, GenericArg, GenericArgData, Interner, Lifetime,
     LifetimeData, Ty, TyKind,
 };
-use base_db::ra_salsa::Cycle;
 use chalk_ir::Mutability;
-use hir_def::data::adt::StructFlags;
+use hir_def::signatures::StructFlags;
 use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId};
+use salsa::CycleRecoveryAction;
 use std::fmt;
 use std::ops::Not;
 use stdx::never;
@@ -34,7 +34,7 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
         GenericDefId::FunctionId(_) => (),
         GenericDefId::AdtId(adt) => {
             if let AdtId::StructId(id) = adt {
-                let flags = &db.struct_data(id).flags;
+                let flags = &db.struct_signature(id).flags;
                 if flags.contains(StructFlags::IS_UNSAFE_CELL) {
                     return Some(Arc::from_iter(vec![Variance::Invariant; 1]));
                 } else if flags.contains(StructFlags::IS_PHANTOM_DATA) {
@@ -45,7 +45,7 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
         _ => return None,
     }
 
-    let generics = generics(db.upcast(), def);
+    let generics = generics(db, def);
     let count = generics.len();
     if count == 0 {
         return None;
@@ -55,12 +55,20 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
     variances.is_empty().not().then(|| Arc::from_iter(variances))
 }
 
-pub(crate) fn variances_of_cycle(
+pub(crate) fn variances_of_cycle_fn(
+    _db: &dyn HirDatabase,
+    _result: &Option<Arc<[Variance]>>,
+    _count: u32,
+    _def: GenericDefId,
+) -> CycleRecoveryAction<Option<Arc<[Variance]>>> {
+    CycleRecoveryAction::Iterate
+}
+
+pub(crate) fn variances_of_cycle_initial(
     db: &dyn HirDatabase,
-    _cycle: &Cycle,
-    def: &GenericDefId,
+    def: GenericDefId,
 ) -> Option<Arc<[Variance]>> {
-    let generics = generics(db.upcast(), *def);
+    let generics = generics(db, def);
     let count = generics.len();
 
     if count == 0 {
@@ -206,7 +214,7 @@ impl Context<'_> {
                     AdtId::StructId(s) => add_constraints_from_variant(VariantId::StructId(s)),
                     AdtId::UnionId(u) => add_constraints_from_variant(VariantId::UnionId(u)),
                     AdtId::EnumId(e) => {
-                        db.enum_data(e).variants.iter().for_each(|&(variant, _)| {
+                        db.enum_variants(e).variants.iter().for_each(|&(variant, _)| {
                             add_constraints_from_variant(VariantId::EnumVariantId(variant))
                         });
                     }
@@ -487,13 +495,13 @@ impl Context<'_> {
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use hir_def::{
-        generics::GenericParamDataRef, src::HasSource, AdtId, GenericDefId, ModuleDefId,
+        AdtId, GenericDefId, ModuleDefId, hir::generics::GenericParamDataRef, src::HasSource,
     };
     use itertools::Itertools;
     use stdx::format_to;
-    use syntax::{ast::HasName, AstNode};
+    use syntax::{AstNode, ast::HasName};
     use test_fixture::WithFixture;
 
     use hir_def::Lookup;
@@ -953,16 +961,12 @@ struct S3<T>(S<T, T>);
 
     #[test]
     fn prove_fixedpoint() {
-        // FIXME: This is wrong, this should be `FixedPoint[T: covariant, U: covariant, V: covariant]`
-        // This is a limitation of current salsa where a cycle may only set a fallback value to the
-        // query result, but we need to solve a fixpoint here. The new salsa will have this
-        // fortunately.
         check(
             r#"
 struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
 "#,
             expect![[r#"
-                FixedPoint[T: bivariant, U: bivariant, V: bivariant]
+                FixedPoint[T: covariant, U: covariant, V: covariant]
             "#]],
         );
     }
@@ -979,7 +983,7 @@ struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
         let (db, file_id) = TestDB::with_single_file(ra_fixture);
 
         let mut defs: Vec<GenericDefId> = Vec::new();
-        let module = db.module_for_file_opt(file_id).unwrap();
+        let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap();
         let def_map = module.def_map(&db);
         crate::tests::visit_module(&db, &def_map, module.local_id, &mut |it| {
             defs.push(match it {
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index 4e45b5a250eb5..b1c478d1bf401 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -3,14 +3,17 @@
 use std::ops::ControlFlow;
 
 use hir_def::{
+    AssocItemId, AttrDefId, ModuleDefId,
     attr::AttrsWithOwner,
+    expr_store::path::Path,
     item_scope::ItemInNs,
-    path::{ModPath, Path},
     per_ns::Namespace,
     resolver::{HasResolver, Resolver, TypeNs},
-    AssocItemId, AttrDefId, ModuleDefId,
 };
-use hir_expand::{mod_path::PathKind, name::Name};
+use hir_expand::{
+    mod_path::{ModPath, PathKind},
+    name::Name,
+};
 use hir_ty::{db::HirDatabase, method_resolution};
 
 use crate::{
@@ -30,7 +33,7 @@ macro_rules! impl_has_attrs {
         impl HasAttrs for $def {
             fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
                 let def = AttrDefId::$def_id(self.into());
-                AttrsWithOwner::new(db.upcast(), def)
+                AttrsWithOwner::new(db, def)
             }
             fn attr_id(self) -> AttrDefId {
                 AttrDefId::$def_id(self.into())
@@ -92,7 +95,7 @@ impl HasAttrs for AssocItem {
 impl HasAttrs for crate::Crate {
     fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
         let def = AttrDefId::ModuleId(self.root_module().id);
-        AttrsWithOwner::new(db.upcast(), def)
+        AttrsWithOwner::new(db, def)
     }
     fn attr_id(self) -> AttrDefId {
         AttrDefId::ModuleId(self.root_module().id)
@@ -116,27 +119,27 @@ fn resolve_doc_path_on_(
     ns: Option<Namespace>,
 ) -> Option<DocLinkDef> {
     let resolver = match attr_id {
-        AttrDefId::ModuleId(it) => it.resolver(db.upcast()),
-        AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
-        AttrDefId::AdtId(it) => it.resolver(db.upcast()),
-        AttrDefId::FunctionId(it) => it.resolver(db.upcast()),
-        AttrDefId::EnumVariantId(it) => it.resolver(db.upcast()),
-        AttrDefId::StaticId(it) => it.resolver(db.upcast()),
-        AttrDefId::ConstId(it) => it.resolver(db.upcast()),
-        AttrDefId::TraitId(it) => it.resolver(db.upcast()),
-        AttrDefId::TraitAliasId(it) => it.resolver(db.upcast()),
-        AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
-        AttrDefId::ImplId(it) => it.resolver(db.upcast()),
-        AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
-        AttrDefId::UseId(it) => it.resolver(db.upcast()),
-        AttrDefId::MacroId(it) => it.resolver(db.upcast()),
-        AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()),
+        AttrDefId::ModuleId(it) => it.resolver(db),
+        AttrDefId::FieldId(it) => it.parent.resolver(db),
+        AttrDefId::AdtId(it) => it.resolver(db),
+        AttrDefId::FunctionId(it) => it.resolver(db),
+        AttrDefId::EnumVariantId(it) => it.resolver(db),
+        AttrDefId::StaticId(it) => it.resolver(db),
+        AttrDefId::ConstId(it) => it.resolver(db),
+        AttrDefId::TraitId(it) => it.resolver(db),
+        AttrDefId::TraitAliasId(it) => it.resolver(db),
+        AttrDefId::TypeAliasId(it) => it.resolver(db),
+        AttrDefId::ImplId(it) => it.resolver(db),
+        AttrDefId::ExternBlockId(it) => it.resolver(db),
+        AttrDefId::UseId(it) => it.resolver(db),
+        AttrDefId::MacroId(it) => it.resolver(db),
+        AttrDefId::ExternCrateId(it) => it.resolver(db),
         AttrDefId::GenericParamId(_) => return None,
     };
 
     let mut modpath = doc_modpath_from_str(link)?;
 
-    let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
+    let resolved = resolver.resolve_module_path_in_items(db, &modpath);
     if resolved.is_none() {
         let last_name = modpath.pop_segment()?;
         resolve_assoc_or_field(db, resolver, modpath, last_name, ns)
@@ -165,7 +168,7 @@ fn resolve_assoc_or_field(
     let path = Path::from_known_path_with_no_generic(path);
     // FIXME: This does not handle `Self` on trait definitions, which we should resolve to the
     // trait itself.
-    let base_def = resolver.resolve_path_in_type_ns_fully(db.upcast(), &path)?;
+    let base_def = resolver.resolve_path_in_type_ns_fully(db, &path)?;
 
     let ty = match base_def {
         TypeNs::SelfType(id) => Impl::from(id).self_ty(db),
@@ -194,7 +197,7 @@ fn resolve_assoc_or_field(
             // Doc paths in this context may only resolve to an item of this trait
             // (i.e. no items of its supertraits), so we need to handle them here
             // independently of others.
-            return db.trait_data(id).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
+            return db.trait_items(id).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
                 let def = match *assoc_id {
                     AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()),
                     AssocItemId::ConstId(it) => ModuleDef::Const(it.into()),
@@ -207,6 +210,9 @@ fn resolve_assoc_or_field(
             // XXX: Do these get resolved?
             return None;
         }
+        TypeNs::ModuleId(_) => {
+            return None;
+        }
     };
 
     // Resolve inherent items first, then trait items, then fields.
@@ -252,7 +258,7 @@ fn resolve_impl_trait_item(
     let environment = resolver
         .generic_def()
         .map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
-    let traits_in_scope = resolver.traits_in_scope(db.upcast());
+    let traits_in_scope = resolver.traits_in_scope(db);
 
     let mut result = None;
 
@@ -260,7 +266,7 @@ fn resolve_impl_trait_item(
     // attributes here. Use path resolution directly instead.
     //
     // FIXME: resolve type aliases (which are not yielded by iterate_path_candidates)
-    let _ = method_resolution::iterate_path_candidates(
+    _ = method_resolution::iterate_path_candidates(
         &canonical,
         db,
         environment,
@@ -273,11 +279,7 @@ fn resolve_impl_trait_item(
             // disambiguation) so we just pick the first one we find as well.
             result = as_module_def_if_namespace_matches(assoc_item_id.into(), ns);
 
-            if result.is_some() {
-                ControlFlow::Break(())
-            } else {
-                ControlFlow::Continue(())
-            }
+            if result.is_some() { ControlFlow::Break(()) } else { ControlFlow::Continue(()) }
         },
     );
 
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
index 22760c41aaecf..64d97b3f2a238 100644
--- a/src/tools/rust-analyzer/crates/hir/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -3,44 +3,43 @@
 //! we didn't do that.
 //!
 //! But we need this for at least LRU caching at the query level.
-pub use hir_def::db::{
-    AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BlockItemTreeWithSourceMapQuery, BodyQuery,
-    BodyWithSourceMapQuery, ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery,
-    CrateLangItemsQuery, CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase,
-    DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery,
-    ExpandProcAttrMacrosQuery, ExprScopesQuery, ExternCrateDeclDataQuery, FieldVisibilitiesQuery,
-    FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, FileItemTreeWithSourceMapQuery,
-    FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery,
-    GenericParamsWithSourceMapQuery, ImplDataWithDiagnosticsQuery, ImportMapQuery,
-    IncludeMacroInvocQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery,
-    InternDatabase, InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery,
-    InternExternCrateQuery, InternFunctionQuery, InternImplQuery, InternInTypeConstQuery,
-    InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery,
-    InternStructQuery, InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery,
-    InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery, MacroDefQuery,
-    MacroRulesDataQuery, NotableTraitsInDepsQuery, ProcMacroDataQuery, StaticDataQuery,
-    StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataWithDiagnosticsQuery,
-    TypeAliasDataQuery, UnionDataWithDiagnosticsQuery,
-};
-pub use hir_expand::db::{
-    AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
-    ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
-    ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacroSpanQuery, ProcMacrosQuery,
-    RealSpanMapQuery,
-};
-pub use hir_ty::db::{
-    AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery,
-    CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery,
-    ConstParamTyQuery, DynCompatibilityOfTraitQuery, FieldTypesQuery, FnDefDatumQuery,
-    FnDefVarianceQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery,
-    GenericPredicatesQuery, GenericPredicatesWithoutParentQuery, HirDatabase, HirDatabaseStorage,
-    ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, InferQuery,
-    InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery,
-    InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery,
-    InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, LayoutOfTyQuery, LookupImplMethodQuery,
-    MirBodyForClosureQuery, MirBodyQuery, MonomorphizedMirBodyForClosureQuery,
-    MonomorphizedMirBodyQuery, ProgramClausesForChalkEnvQuery, ReturnTypeImplTraitsQuery,
-    TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, TraitImplsInBlockQuery,
-    TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery,
-    TypeAliasImplTraitsQuery, ValueTyQuery,
-};
+pub use hir_def::db::DefDatabase;
+//     AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BlockItemTreeWithSourceMapQuery, BodyQuery,
+//     BodyWithSourceMapQuery, ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery,
+//     CrateLangItemsQuery, CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase,
+//     DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery,
+//     ExpandProcAttrMacrosQuery, ExprScopesQuery, ExternCrateDeclDataQuery, FieldVisibilitiesQuery,
+//     FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, FileItemTreeWithSourceMapQuery,
+//     FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery,
+//     GenericParamsWithSourceMapQuery, ImplItemsWithDiagnosticsQuery, ImportMapQuery,
+//     IncludeMacroInvocQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery,
+//     InternDatabase, InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery,
+//     InternExternCrateQuery, InternFunctionQuery, InternImplQuery, InternInTypeConstQuery,
+//     InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery,
+//     InternStructQuery, InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery,
+//     InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery, MacroDefQuery,
+//     MacroRulesDataQuery, NotableTraitsInDepsQuery, ProcMacroDataQuery, StaticDataQuery,
+//     StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitItemsWithDiagnosticsQuery,
+//     TypeAliasDataQuery, UnionDataWithDiagnosticsQuery,
+// };
+pub use hir_expand::db::ExpandDatabase;
+// AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
+// ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
+// ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacroSpanQuery, ProcMacrosQuery,
+// RealSpanMapQuery,
+pub use hir_ty::db::HirDatabase;
+//     AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery,
+//     CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery,
+//     ConstParamTyQuery, DynCompatibilityOfTraitQuery, FieldTypesQuery, FnDefDatumQuery,
+//     FnDefVarianceQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery,
+//     GenericPredicatesQuery, GenericPredicatesWithoutParentQuery, HirDatabase, HirDatabaseStorage,
+//     ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, InferQuery,
+//     InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery,
+//     InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery,
+//     InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, LayoutOfTyQuery, LookupImplMethodQuery,
+//     MirBodyForClosureQuery, MirBodyQuery, MonomorphizedMirBodyForClosureQuery,
+//     MonomorphizedMirBodyQuery, ProgramClausesForChalkEnvQuery, ReturnTypeImplTraitsQuery,
+//     TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, TraitImplsInBlockQuery,
+//     TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery,
+//     TypeAliasImplTraitsQuery, ValueTyQuery,
+// };
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index 1ed0daa375630..b6e3002ed5d4e 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -6,31 +6,33 @@
 use cfg::{CfgExpr, CfgOptions};
 use either::Either;
 use hir_def::{
-    expr_store::ExprOrPatPtr,
+    DefWithBodyId, GenericParamId, SyntheticSyntax,
+    expr_store::{
+        ExprOrPatPtr, ExpressionStoreSourceMap, hir_assoc_type_binding_to_ast,
+        hir_generic_arg_to_ast, hir_segment_to_ast_segment,
+    },
     hir::ExprOrPatId,
-    path::{hir_segment_to_ast_segment, ModPath},
-    type_ref::TypesSourceMap,
-    DefWithBodyId, SyntheticSyntax,
 };
-use hir_expand::{name::Name, HirFileId, InFile};
+use hir_expand::{HirFileId, InFile, mod_path::ModPath, name::Name};
 use hir_ty::{
+    CastError, InferenceDiagnostic, InferenceTyDiagnosticSource, PathGenericsSource,
+    PathLoweringDiagnostic, TyLoweringDiagnostic, TyLoweringDiagnosticKind,
     db::HirDatabase,
     diagnostics::{BodyValidationDiagnostic, UnsafetyReason},
-    CastError, InferenceDiagnostic, InferenceTyDiagnosticSource, PathLoweringDiagnostic,
-    TyLoweringDiagnostic, TyLoweringDiagnosticKind,
 };
 use syntax::{
+    AstNode, AstPtr, SyntaxError, SyntaxNodePtr, TextRange,
     ast::{self, HasGenericArgs},
-    match_ast, AstNode, AstPtr, SyntaxError, SyntaxNodePtr, TextRange,
+    match_ast,
 };
 use triomphe::Arc;
 
-use crate::{AssocItem, Field, Function, Local, Trait, Type};
+use crate::{AssocItem, Field, Function, GenericDef, Local, Trait, Type};
 
 pub use hir_def::VariantId;
 pub use hir_ty::{
+    GenericArgsProhibitedReason, IncorrectGenericsLenKind,
     diagnostics::{CaseType, IncorrectCase},
-    GenericArgsProhibitedReason,
 };
 
 macro_rules! diagnostics {
@@ -113,6 +115,11 @@ diagnostics![
     UnusedVariable,
     GenericArgsProhibited,
     ParenthesizedGenericArgsWithoutFnTrait,
+    BadRtn,
+    IncorrectGenericsLen,
+    IncorrectGenericsOrder,
+    MissingLifetime,
+    ElidedLifetimesInPath,
 ];
 
 #[derive(Debug)]
@@ -420,6 +427,61 @@ pub struct ParenthesizedGenericArgsWithoutFnTrait {
     pub args: InFile<AstPtr<ast::ParenthesizedArgList>>,
 }
 
+#[derive(Debug)]
+pub struct BadRtn {
+    pub rtn: InFile<AstPtr<ast::ReturnTypeSyntax>>,
+}
+
+#[derive(Debug)]
+pub struct IncorrectGenericsLen {
+    /// Points at the name if there are no generics.
+    pub generics_or_segment: InFile<AstPtr<Either<ast::GenericArgList, ast::NameRef>>>,
+    pub kind: IncorrectGenericsLenKind,
+    pub provided: u32,
+    pub expected: u32,
+    pub def: GenericDef,
+}
+
+#[derive(Debug)]
+pub struct MissingLifetime {
+    /// Points at the name if there are no generics.
+    pub generics_or_segment: InFile<AstPtr<Either<ast::GenericArgList, ast::NameRef>>>,
+    pub expected: u32,
+    pub def: GenericDef,
+}
+
+#[derive(Debug)]
+pub struct ElidedLifetimesInPath {
+    /// Points at the name if there are no generics.
+    pub generics_or_segment: InFile<AstPtr<Either<ast::GenericArgList, ast::NameRef>>>,
+    pub expected: u32,
+    pub def: GenericDef,
+    pub hard_error: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum GenericArgKind {
+    Lifetime,
+    Type,
+    Const,
+}
+
+impl GenericArgKind {
+    fn from_id(id: GenericParamId) -> Self {
+        match id {
+            GenericParamId::TypeParamId(_) => GenericArgKind::Type,
+            GenericParamId::ConstParamId(_) => GenericArgKind::Const,
+            GenericParamId::LifetimeParamId(_) => GenericArgKind::Lifetime,
+        }
+    }
+}
+
+#[derive(Debug)]
+pub struct IncorrectGenericsOrder {
+    pub provided_arg: InFile<AstPtr<ast::GenericArg>>,
+    pub expected_kind: GenericArgKind,
+}
+
 impl AnyDiagnostic {
     pub(crate) fn body_validation_diagnostic(
         db: &dyn HirDatabase,
@@ -428,7 +490,7 @@ impl AnyDiagnostic {
     ) -> Option<AnyDiagnostic> {
         match diagnostic {
             BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
-                let variant_data = variant.variant_data(db.upcast());
+                let variant_data = variant.variant_data(db);
                 let missed_fields = missed_fields
                     .into_iter()
                     .map(|idx| variant_data.fields()[idx].name.clone())
@@ -439,7 +501,7 @@ impl AnyDiagnostic {
                     Either::Right(record_pat) => source_map.pat_syntax(record_pat).ok()?,
                 };
                 let file = record.file_id;
-                let root = record.file_syntax(db.upcast());
+                let root = record.file_syntax(db);
                 match record.value.to_node(&root) {
                     Either::Left(ast::Expr::RecordExpr(record_expr)) => {
                         if record_expr.record_expr_field_list().is_some() {
@@ -488,7 +550,7 @@ impl AnyDiagnostic {
             BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
                 match source_map.expr_syntax(match_expr) {
                     Ok(source_ptr) => {
-                        let root = source_ptr.file_syntax(db.upcast());
+                        let root = source_ptr.file_syntax(db);
                         if let Either::Left(ast::Expr::MatchExpr(match_expr)) =
                             &source_ptr.value.to_node(&root)
                         {
@@ -559,14 +621,21 @@ impl AnyDiagnostic {
         db: &dyn HirDatabase,
         def: DefWithBodyId,
         d: &InferenceDiagnostic,
-        outer_types_source_map: &TypesSourceMap,
         source_map: &hir_def::expr_store::BodySourceMap,
+        sig_map: &hir_def::expr_store::ExpressionStoreSourceMap,
     ) -> Option<AnyDiagnostic> {
         let expr_syntax = |expr| {
-            source_map.expr_syntax(expr).inspect_err(|_| stdx::never!("synthetic syntax")).ok()
+            source_map
+                .expr_syntax(expr)
+                .inspect_err(|_| stdx::never!("inference diagnostic in desugared expr"))
+                .ok()
+        };
+        let pat_syntax = |pat| {
+            source_map
+                .pat_syntax(pat)
+                .inspect_err(|_| stdx::never!("inference diagnostic in desugared pattern"))
+                .ok()
         };
-        let pat_syntax =
-            |pat| source_map.pat_syntax(pat).inspect_err(|_| stdx::never!("synthetic syntax")).ok();
         let expr_or_pat_syntax = |id| match id {
             ExprOrPatId::ExprId(expr) => expr_syntax(expr),
             ExprOrPatId::PatId(pat) => pat_syntax(pat),
@@ -682,8 +751,8 @@ impl AnyDiagnostic {
             }
             InferenceDiagnostic::TyDiagnostic { source, diag } => {
                 let source_map = match source {
-                    InferenceTyDiagnosticSource::Body => &source_map.types,
-                    InferenceTyDiagnosticSource::Signature => outer_types_source_map,
+                    InferenceTyDiagnosticSource::Body => source_map,
+                    InferenceTyDiagnosticSource::Signature => sig_map,
                 };
                 Self::ty_diagnostic(diag, source_map, db)?
             }
@@ -702,6 +771,47 @@ impl AnyDiagnostic {
                 };
                 Self::path_diagnostic(diag, source.with_value(path))?
             }
+            &InferenceDiagnostic::MethodCallIncorrectGenericsLen {
+                expr,
+                provided_count,
+                expected_count,
+                kind,
+                def,
+            } => {
+                let syntax = expr_syntax(expr)?;
+                let file_id = syntax.file_id;
+                let syntax =
+                    syntax.with_value(syntax.value.cast::<ast::MethodCallExpr>()?).to_node(db);
+                let generics_or_name = syntax
+                    .generic_arg_list()
+                    .map(Either::Left)
+                    .or_else(|| syntax.name_ref().map(Either::Right))?;
+                let generics_or_name = InFile::new(file_id, AstPtr::new(&generics_or_name));
+                IncorrectGenericsLen {
+                    generics_or_segment: generics_or_name,
+                    kind,
+                    provided: provided_count,
+                    expected: expected_count,
+                    def: def.into(),
+                }
+                .into()
+            }
+            &InferenceDiagnostic::MethodCallIncorrectGenericsOrder {
+                expr,
+                param_id,
+                arg_idx,
+                has_self_arg,
+            } => {
+                let syntax = expr_syntax(expr)?;
+                let file_id = syntax.file_id;
+                let syntax =
+                    syntax.with_value(syntax.value.cast::<ast::MethodCallExpr>()?).to_node(db);
+                let generic_args = syntax.generic_arg_list()?;
+                let provided_arg = hir_generic_arg_to_ast(&generic_args, arg_idx, has_self_arg)?;
+                let provided_arg = InFile::new(file_id, AstPtr::new(&provided_arg));
+                let expected_kind = GenericArgKind::from_id(param_id);
+                IncorrectGenericsOrder { provided_arg, expected_kind }.into()
+            }
         })
     }
 
@@ -712,6 +822,12 @@ impl AnyDiagnostic {
         Some(match *diag {
             PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => {
                 let segment = hir_segment_to_ast_segment(&path.value, segment)?;
+
+                if let Some(rtn) = segment.return_type_syntax() {
+                    // RTN errors are emitted as `GenericArgsProhibited` or `ParenthesizedGenericArgsWithoutFnTrait`.
+                    return Some(BadRtn { rtn: path.with_value(AstPtr::new(&rtn)) }.into());
+                }
+
                 let args = if let Some(generics) = segment.generic_arg_list() {
                     AstPtr::new(&generics).wrap_left()
                 } else {
@@ -722,27 +838,84 @@ impl AnyDiagnostic {
             }
             PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment } => {
                 let segment = hir_segment_to_ast_segment(&path.value, segment)?;
+
+                if let Some(rtn) = segment.return_type_syntax() {
+                    // RTN errors are emitted as `GenericArgsProhibited` or `ParenthesizedGenericArgsWithoutFnTrait`.
+                    return Some(BadRtn { rtn: path.with_value(AstPtr::new(&rtn)) }.into());
+                }
+
                 let args = AstPtr::new(&segment.parenthesized_arg_list()?);
                 let args = path.with_value(args);
                 ParenthesizedGenericArgsWithoutFnTrait { args }.into()
             }
+            PathLoweringDiagnostic::IncorrectGenericsLen {
+                generics_source,
+                provided_count,
+                expected_count,
+                kind,
+                def,
+            } => {
+                let generics_or_segment =
+                    path_generics_source_to_ast(&path.value, generics_source)?;
+                let generics_or_segment = path.with_value(AstPtr::new(&generics_or_segment));
+                IncorrectGenericsLen {
+                    generics_or_segment,
+                    kind,
+                    provided: provided_count,
+                    expected: expected_count,
+                    def: def.into(),
+                }
+                .into()
+            }
+            PathLoweringDiagnostic::IncorrectGenericsOrder {
+                generics_source,
+                param_id,
+                arg_idx,
+                has_self_arg,
+            } => {
+                let generic_args =
+                    path_generics_source_to_ast(&path.value, generics_source)?.left()?;
+                let provided_arg = hir_generic_arg_to_ast(&generic_args, arg_idx, has_self_arg)?;
+                let provided_arg = path.with_value(AstPtr::new(&provided_arg));
+                let expected_kind = GenericArgKind::from_id(param_id);
+                IncorrectGenericsOrder { provided_arg, expected_kind }.into()
+            }
+            PathLoweringDiagnostic::MissingLifetime { generics_source, expected_count, def }
+            | PathLoweringDiagnostic::ElisionFailure { generics_source, expected_count, def } => {
+                let generics_or_segment =
+                    path_generics_source_to_ast(&path.value, generics_source)?;
+                let generics_or_segment = path.with_value(AstPtr::new(&generics_or_segment));
+                MissingLifetime { generics_or_segment, expected: expected_count, def: def.into() }
+                    .into()
+            }
+            PathLoweringDiagnostic::ElidedLifetimesInPath {
+                generics_source,
+                expected_count,
+                def,
+                hard_error,
+            } => {
+                let generics_or_segment =
+                    path_generics_source_to_ast(&path.value, generics_source)?;
+                let generics_or_segment = path.with_value(AstPtr::new(&generics_or_segment));
+                ElidedLifetimesInPath {
+                    generics_or_segment,
+                    expected: expected_count,
+                    def: def.into(),
+                    hard_error,
+                }
+                .into()
+            }
         })
     }
 
     pub(crate) fn ty_diagnostic(
         diag: &TyLoweringDiagnostic,
-        source_map: &TypesSourceMap,
+        source_map: &ExpressionStoreSourceMap,
         db: &dyn HirDatabase,
     ) -> Option<AnyDiagnostic> {
-        let source = match diag.source {
-            Either::Left(type_ref_id) => {
-                let Ok(source) = source_map.type_syntax(type_ref_id) else {
-                    stdx::never!("error on synthetic type syntax");
-                    return None;
-                };
-                source
-            }
-            Either::Right(source) => source,
+        let Ok(source) = source_map.type_syntax(diag.source) else {
+            stdx::never!("error on synthetic type syntax");
+            return None;
         };
         let syntax = || source.value.to_node(&db.parse_or_expand(source.file_id));
         Some(match &diag.kind {
@@ -753,3 +926,27 @@ impl AnyDiagnostic {
         })
     }
 }
+
+fn path_generics_source_to_ast(
+    path: &ast::Path,
+    generics_source: PathGenericsSource,
+) -> Option<Either<ast::GenericArgList, ast::NameRef>> {
+    Some(match generics_source {
+        PathGenericsSource::Segment(segment) => {
+            let segment = hir_segment_to_ast_segment(path, segment)?;
+            segment
+                .generic_arg_list()
+                .map(Either::Left)
+                .or_else(|| segment.name_ref().map(Either::Right))?
+        }
+        PathGenericsSource::AssocType { segment, assoc_type } => {
+            let segment = hir_segment_to_ast_segment(path, segment)?;
+            let segment_args = segment.generic_arg_list()?;
+            let assoc = hir_assoc_type_binding_to_ast(&segment_args, assoc_type)?;
+            assoc
+                .generic_arg_list()
+                .map(Either::Left)
+                .or_else(|| assoc.name_ref().map(Either::Right))?
+        }
+    })
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index 6f4168ab0867d..53817f37aa669 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -1,59 +1,59 @@
 //! HirDisplay implementations for various hir types.
+
 use either::Either;
 use hir_def::{
-    data::{
-        adt::{StructKind, VariantData},
-        TraitFlags,
-    },
-    generics::{
-        GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
-        WherePredicateTypeTarget,
-    },
-    lang_item::LangItem,
-    type_ref::{TypeBound, TypeRef},
     AdtId, GenericDefId,
+    expr_store::ExpressionStore,
+    hir::generics::{GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate},
+    item_tree::FieldsShape,
+    lang_item::LangItem,
+    signatures::{StaticFlags, TraitFlags},
+    type_ref::{TypeBound, TypeRef, TypeRefId},
 };
 use hir_ty::{
+    AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause,
+    db::HirDatabase,
     display::{
-        hir_display_with_types_map, write_bounds_like_dyn_trait_with_prefix, write_visibility,
-        HirDisplay, HirDisplayError, HirDisplayWithTypesMap, HirFormatter, SizedByDefault,
+        HirDisplay, HirDisplayError, HirDisplayWithExpressionStore, HirFormatter, SizedByDefault,
+        hir_display_with_store, write_bounds_like_dyn_trait_with_prefix, write_visibility,
     },
-    AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause,
 };
 use itertools::Itertools;
 
 use crate::{
     Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Crate, Enum,
     ExternCrateDecl, Field, Function, GenericParam, HasCrate, HasVisibility, Impl, LifetimeParam,
-    Macro, Module, SelfParam, Static, Struct, Trait, TraitAlias, TraitRef, TupleField, TyBuilder,
-    Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
+    Macro, Module, SelfParam, Static, Struct, StructKind, Trait, TraitAlias, TraitRef, TupleField,
+    TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
 };
 
 impl HirDisplay for Function {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         let db = f.db;
-        let data = db.function_data(self.id);
+        let data = db.function_signature(self.id);
         let container = self.as_assoc_item(db).map(|it| it.container(db));
         let mut module = self.module(db);
 
         // Write container (trait or impl)
         let container_params = match container {
             Some(AssocItemContainer::Trait(trait_)) => {
-                let params = f.db.generic_params(trait_.id.into());
+                let (params, params_store) = f.db.generic_params_and_store(trait_.id.into());
                 if f.show_container_bounds() && !params.is_empty() {
                     write_trait_header(&trait_, f)?;
                     f.write_char('\n')?;
-                    has_disaplayable_predicates(&params).then_some(params)
+                    has_disaplayable_predicates(f.db, &params, &params_store)
+                        .then_some((params, params_store))
                 } else {
                     None
                 }
             }
             Some(AssocItemContainer::Impl(impl_)) => {
-                let params = f.db.generic_params(impl_.id.into());
+                let (params, params_store) = f.db.generic_params_and_store(impl_.id.into());
                 if f.show_container_bounds() && !params.is_empty() {
                     write_impl_header(&impl_, f)?;
                     f.write_char('\n')?;
-                    has_disaplayable_predicates(&params).then_some(params)
+                    has_disaplayable_predicates(f.db, &params, &params_store)
+                        .then_some((params, params_store))
                 } else {
                     None
                 }
@@ -88,7 +88,7 @@ impl HirDisplay for Function {
         if let Some(abi) = &data.abi {
             write!(f, "extern \"{}\" ", abi.as_str())?;
         }
-        write!(f, "fn {}", data.name.display(f.db.upcast(), f.edition()))?;
+        write!(f, "fn {}", data.name.display(f.db, f.edition()))?;
 
         write_generic_params(GenericDefId::FunctionId(self.id), f)?;
 
@@ -112,12 +112,11 @@ impl HirDisplay for Function {
             }
 
             let pat_id = body.params[param.idx - body.self_param.is_some() as usize];
-            let pat_str =
-                body.pretty_print_pat(db.upcast(), self.id.into(), pat_id, true, f.edition());
+            let pat_str = body.pretty_print_pat(db, self.id.into(), pat_id, true, f.edition());
             f.write_str(&pat_str)?;
 
             f.write_str(": ")?;
-            type_ref.hir_fmt(f, &data.types_map)?;
+            type_ref.hir_fmt(f, &data.store)?;
         }
 
         if data.is_varargs() {
@@ -133,12 +132,12 @@ impl HirDisplay for Function {
         // Use ugly pattern match to strip the Future trait.
         // Better way?
         let ret_type = if !data.is_async() {
-            Some(data.ret_type)
-        } else {
-            match &data.types_map[data.ret_type] {
+            data.ret_type
+        } else if let Some(ret_type) = data.ret_type {
+            match &data.store[ret_type] {
                 TypeRef::ImplTrait(bounds) => match &bounds[0] {
                     &TypeBound::Path(path, _) => Some(
-                        *data.types_map[path]
+                        *data.store[path]
                             .segments()
                             .iter()
                             .last()
@@ -154,21 +153,23 @@ impl HirDisplay for Function {
                 },
                 _ => None,
             }
+        } else {
+            None
         };
 
         if let Some(ret_type) = ret_type {
-            match &data.types_map[ret_type] {
+            match &data.store[ret_type] {
                 TypeRef::Tuple(tup) if tup.is_empty() => {}
                 _ => {
                     f.write_str(" -> ")?;
-                    ret_type.hir_fmt(f, &data.types_map)?;
+                    ret_type.hir_fmt(f, &data.store)?;
                 }
             }
         }
 
         // Write where clauses
         let has_written_where = write_where_clause(GenericDefId::FunctionId(self.id), f)?;
-        if let Some(container_params) = container_params {
+        if let Some((container_params, container_params_store)) = container_params {
             if !has_written_where {
                 f.write_str("\nwhere")?;
             }
@@ -177,7 +178,7 @@ impl HirDisplay for Function {
                 AssocItemContainer::Impl(_) => "impl",
             };
             write!(f, "\n    // Bounds from {container_name}:",)?;
-            write_where_predicates(&container_params, f)?;
+            write_where_predicates(&container_params, &container_params_store, f)?;
         }
         Ok(())
     }
@@ -191,8 +192,8 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi
     write_generic_params(def_id, f)?;
 
     if let Some(trait_) = impl_.trait_(db) {
-        let trait_data = db.trait_data(trait_.id);
-        write!(f, " {} for", trait_data.name.display(db.upcast(), f.edition()))?;
+        let trait_data = db.trait_signature(trait_.id);
+        write!(f, " {} for", trait_data.name.display(db, f.edition()))?;
     }
 
     f.write_char(' ')?;
@@ -203,15 +204,16 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi
 
 impl HirDisplay for SelfParam {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
-        let data = f.db.function_data(self.func);
+        let data = f.db.function_signature(self.func);
         let param = *data.params.first().unwrap();
-        match &data.types_map[param] {
+        match &data.store[param] {
             TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
-            TypeRef::Reference(ref_) if matches!(&data.types_map[ref_.ty], TypeRef::Path(p) if p.is_self_type()) =>
+            TypeRef::Reference(ref_) if matches!(&data.store[ref_.ty], TypeRef::Path(p) if p.is_self_type()) =>
             {
                 f.write_char('&')?;
                 if let Some(lifetime) = &ref_.lifetime {
-                    write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?;
+                    lifetime.hir_fmt(f, &data.store)?;
+                    f.write_char(' ')?;
                 }
                 if let hir_def::type_ref::Mutability::Mut = ref_.mutability {
                     f.write_str("mut ")?;
@@ -220,7 +222,7 @@ impl HirDisplay for SelfParam {
             }
             _ => {
                 f.write_str("self: ")?;
-                param.hir_fmt(f, &data.types_map)
+                param.hir_fmt(f, &data.store)
             }
         }
     }
@@ -242,12 +244,12 @@ impl HirDisplay for Struct {
         // FIXME: Render repr if its set explicitly?
         write_visibility(module_id, self.visibility(f.db), f)?;
         f.write_str("struct ")?;
-        write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
+        write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
         let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
         write_generic_params(def_id, f)?;
 
-        let variant_data = self.variant_data(f.db);
-        match variant_data.kind() {
+        let variant_data = self.variant_fields(f.db);
+        match self.kind(f.db) {
             StructKind::Tuple => {
                 f.write_char('(')?;
                 let mut it = variant_data.fields().iter().peekable();
@@ -281,7 +283,7 @@ impl HirDisplay for Enum {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
         f.write_str("enum ")?;
-        write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
+        write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
         let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
         write_generic_params(def_id, f)?;
 
@@ -298,7 +300,7 @@ impl HirDisplay for Union {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
         f.write_str("union ")?;
-        write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
+        write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
         let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
         write_generic_params(def_id, f)?;
 
@@ -358,7 +360,7 @@ fn write_variants(
     } else {
         f.write_str("{\n")?;
         for variant in &variants[..count] {
-            write!(f, "    {}", variant.name(f.db).display(f.db.upcast(), f.edition()))?;
+            write!(f, "    {}", variant.name(f.db).display(f.db, f.edition()))?;
             match variant.kind(f.db) {
                 StructKind::Tuple => {
                     let fields_str =
@@ -387,39 +389,39 @@ fn write_variants(
 impl HirDisplay for Field {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?;
-        write!(f, "{}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?;
+        write!(f, "{}: ", self.name(f.db).display(f.db, f.edition()))?;
         self.ty(f.db).hir_fmt(f)
     }
 }
 
 impl HirDisplay for TupleField {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
-        write!(f, "pub {}: ", self.name().display(f.db.upcast(), f.edition()))?;
+        write!(f, "pub {}: ", self.name().display(f.db, f.edition()))?;
         self.ty(f.db).hir_fmt(f)
     }
 }
 
 impl HirDisplay for Variant {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
-        write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
-        let data = self.variant_data(f.db);
-        match &*data {
-            VariantData::Unit => {}
-            VariantData::Tuple { fields, types_map } => {
+        write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
+        let data = f.db.variant_fields(self.id.into());
+        match data.shape {
+            FieldsShape::Unit => {}
+            FieldsShape::Tuple => {
                 f.write_char('(')?;
                 let mut first = true;
-                for (_, field) in fields.iter() {
+                for (_, field) in data.fields().iter() {
                     if first {
                         first = false;
                     } else {
                         f.write_str(", ")?;
                     }
                     // Enum variant fields must be pub.
-                    field.type_ref.hir_fmt(f, types_map)?;
+                    field.type_ref.hir_fmt(f, &data.store)?;
                 }
                 f.write_char(')')?;
             }
-            VariantData::Record { .. } => {
+            FieldsShape::Record => {
                 if let Some(limit) = f.entity_limit {
                     write_fields(&self.fields(f.db), false, limit, true, f)?;
                 }
@@ -439,7 +441,7 @@ impl HirDisplay for ExternCrateDecl {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
         f.write_str("extern crate ")?;
-        write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
+        write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
         if let Some(alias) = self.alias(f.db) {
             write!(f, " as {}", alias.display(f.edition()))?;
         }
@@ -493,7 +495,7 @@ impl HirDisplay for TypeParam {
         match param_data {
             TypeOrConstParamData::TypeParamData(p) => match p.provenance {
                 TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
-                    write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast(), f.edition()))?
+                    write!(f, "{}", p.name.clone().unwrap().display(f.db, f.edition()))?
                 }
                 TypeParamProvenance::ArgumentImplTrait => {
                     return write_bounds_like_dyn_trait_with_prefix(
@@ -506,7 +508,7 @@ impl HirDisplay for TypeParam {
                 }
             },
             TypeOrConstParamData::ConstParamData(p) => {
-                write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?;
+                write!(f, "{}", p.name.display(f.db, f.edition()))?;
             }
         }
 
@@ -540,13 +542,13 @@ impl HirDisplay for TypeParam {
 
 impl HirDisplay for LifetimeParam {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
-        write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))
+        write!(f, "{}", self.name(f.db).display(f.db, f.edition()))
     }
 }
 
 impl HirDisplay for ConstParam {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
-        write!(f, "const {}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?;
+        write!(f, "const {}: ", self.name(f.db).display(f.db, f.edition()))?;
         self.ty(f.db).hir_fmt(f)
     }
 }
@@ -555,7 +557,7 @@ fn write_generic_params(
     def: GenericDefId,
     f: &mut HirFormatter<'_>,
 ) -> Result<(), HirDisplayError> {
-    let params = f.db.generic_params(def);
+    let (params, store) = f.db.generic_params_and_store(def);
     if params.iter_lt().next().is_none()
         && params.iter_type_or_consts().all(|it| it.1.const_param().is_none())
         && params
@@ -578,7 +580,7 @@ fn write_generic_params(
     };
     for (_, lifetime) in params.iter_lt() {
         delim(f)?;
-        write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))?;
+        write!(f, "{}", lifetime.name.display(f.db, f.edition()))?;
     }
     for (_, ty) in params.iter_type_or_consts() {
         if let Some(name) = &ty.name() {
@@ -588,20 +590,20 @@ fn write_generic_params(
                         continue;
                     }
                     delim(f)?;
-                    write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
+                    write!(f, "{}", name.display(f.db, f.edition()))?;
                     if let Some(default) = &ty.default {
                         f.write_str(" = ")?;
-                        default.hir_fmt(f, &params.types_map)?;
+                        default.hir_fmt(f, &store)?;
                     }
                 }
                 TypeOrConstParamData::ConstParamData(c) => {
                     delim(f)?;
-                    write!(f, "const {}: ", name.display(f.db.upcast(), f.edition()))?;
-                    c.ty.hir_fmt(f, &params.types_map)?;
+                    write!(f, "const {}: ", name.display(f.db, f.edition()))?;
+                    c.ty.hir_fmt(f, &store)?;
 
                     if let Some(default) = &c.default {
                         f.write_str(" = ")?;
-                        write!(f, "{}", default.display(f.db.upcast(), f.edition()))?;
+                        default.hir_fmt(f, &store)?;
                     }
                 }
             }
@@ -616,48 +618,47 @@ fn write_where_clause(
     def: GenericDefId,
     f: &mut HirFormatter<'_>,
 ) -> Result<bool, HirDisplayError> {
-    let params = f.db.generic_params(def);
-    if !has_disaplayable_predicates(&params) {
+    let (params, store) = f.db.generic_params_and_store(def);
+    if !has_disaplayable_predicates(f.db, &params, &store) {
         return Ok(false);
     }
 
     f.write_str("\nwhere")?;
-    write_where_predicates(&params, f)?;
+    write_where_predicates(&params, &store, f)?;
 
     Ok(true)
 }
 
-fn has_disaplayable_predicates(params: &GenericParams) -> bool {
+fn has_disaplayable_predicates(
+    db: &dyn HirDatabase,
+    params: &GenericParams,
+    store: &ExpressionStore,
+) -> bool {
     params.where_predicates().any(|pred| {
         !matches!(
             pred,
-            WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(id), .. }
-            if params[*id].name().is_none()
+            WherePredicate::TypeBound { target, .. }
+            if  matches!(store[*target],
+                TypeRef::TypeParam(id) if db.generic_params(id.parent())[id.local_id()].name().is_none()
+            )
         )
     })
 }
 
 fn write_where_predicates(
     params: &GenericParams,
+    store: &ExpressionStore,
     f: &mut HirFormatter<'_>,
 ) -> Result<(), HirDisplayError> {
     use WherePredicate::*;
 
     // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
-    let is_unnamed_type_target = |params: &GenericParams, target: &WherePredicateTypeTarget| {
-        matches!(target,
-            WherePredicateTypeTarget::TypeOrConstParam(id) if params[*id].name().is_none()
+    let is_unnamed_type_target = |target: TypeRefId| {
+        matches!(store[target],
+            TypeRef::TypeParam(id) if f.db.generic_params(id.parent())[id.local_id()].name().is_none()
         )
     };
 
-    let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
-        WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f, &params.types_map),
-        WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
-            Some(name) => write!(f, "{}", name.display(f.db.upcast(), f.edition())),
-            None => f.write_str("{unnamed}"),
-        },
-    };
-
     let check_same_target = |pred1: &WherePredicate, pred2: &WherePredicate| match (pred1, pred2) {
         (TypeBound { target: t1, .. }, TypeBound { target: t2, .. }) => t1 == t2,
         (Lifetime { target: t1, .. }, Lifetime { target: t2, .. }) => t1 == t2,
@@ -670,41 +671,36 @@ fn write_where_predicates(
 
     let mut iter = params.where_predicates().peekable();
     while let Some(pred) = iter.next() {
-        if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(params, target)) {
+        if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(*target)) {
             continue;
         }
 
         f.write_str("\n    ")?;
         match pred {
             TypeBound { target, bound } => {
-                write_target(target, f)?;
+                target.hir_fmt(f, store)?;
                 f.write_str(": ")?;
-                bound.hir_fmt(f, &params.types_map)?;
+                bound.hir_fmt(f, store)?;
             }
             Lifetime { target, bound } => {
-                let target = target.name.display(f.db.upcast(), f.edition());
-                let bound = bound.name.display(f.db.upcast(), f.edition());
-                write!(f, "{target}: {bound}")?;
+                target.hir_fmt(f, store)?;
+                write!(f, ": ")?;
+                bound.hir_fmt(f, store)?;
             }
             ForLifetime { lifetimes, target, bound } => {
-                let lifetimes =
-                    lifetimes.iter().map(|it| it.display(f.db.upcast(), f.edition())).join(", ");
+                let lifetimes = lifetimes.iter().map(|it| it.display(f.db, f.edition())).join(", ");
                 write!(f, "for<{lifetimes}> ")?;
-                write_target(target, f)?;
+                target.hir_fmt(f, store)?;
                 f.write_str(": ")?;
-                bound.hir_fmt(f, &params.types_map)?;
+                bound.hir_fmt(f, store)?;
             }
         }
 
         while let Some(nxt) = iter.next_if(|nxt| check_same_target(pred, nxt)) {
             f.write_str(" + ")?;
             match nxt {
-                TypeBound { bound, .. } | ForLifetime { bound, .. } => {
-                    bound.hir_fmt(f, &params.types_map)?
-                }
-                Lifetime { bound, .. } => {
-                    write!(f, "{}", bound.name.display(f.db.upcast(), f.edition()))?
-                }
+                TypeBound { bound, .. } | ForLifetime { bound, .. } => bound.hir_fmt(f, store)?,
+                Lifetime { bound, .. } => bound.hir_fmt(f, store)?,
             }
         }
         f.write_str(",")?;
@@ -723,13 +719,13 @@ impl HirDisplay for Const {
             module = module.nearest_non_block_module(db);
         }
         write_visibility(module.id, self.visibility(db), f)?;
-        let data = db.const_data(self.id);
+        let data = db.const_signature(self.id);
         f.write_str("const ")?;
         match &data.name {
-            Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?,
+            Some(name) => write!(f, "{}: ", name.display(f.db, f.edition()))?,
             None => f.write_str("_: ")?,
         }
-        data.type_ref.hir_fmt(f, &data.types_map)?;
+        data.type_ref.hir_fmt(f, &data.store)?;
         Ok(())
     }
 }
@@ -737,13 +733,13 @@ impl HirDisplay for Const {
 impl HirDisplay for Static {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
-        let data = f.db.static_data(self.id);
+        let data = f.db.static_signature(self.id);
         f.write_str("static ")?;
-        if data.mutable {
+        if data.flags.contains(StaticFlags::MUTABLE) {
             f.write_str("mut ")?;
         }
-        write!(f, "{}: ", data.name.display(f.db.upcast(), f.edition()))?;
-        data.type_ref.hir_fmt(f, &data.types_map)?;
+        write!(f, "{}: ", data.name.display(f.db, f.edition()))?;
+        data.type_ref.hir_fmt(f, &data.store)?;
         Ok(())
     }
 }
@@ -795,14 +791,14 @@ impl HirDisplay for Trait {
 
 fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
     write_visibility(trait_.module(f.db).id, trait_.visibility(f.db), f)?;
-    let data = f.db.trait_data(trait_.id);
-    if data.flags.contains(TraitFlags::IS_UNSAFE) {
+    let data = f.db.trait_signature(trait_.id);
+    if data.flags.contains(TraitFlags::UNSAFE) {
         f.write_str("unsafe ")?;
     }
-    if data.flags.contains(TraitFlags::IS_AUTO) {
+    if data.flags.contains(TraitFlags::AUTO) {
         f.write_str("auto ")?;
     }
-    write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?;
+    write!(f, "trait {}", data.name.display(f.db, f.edition()))?;
     write_generic_params(GenericDefId::TraitId(trait_.id), f)?;
     Ok(())
 }
@@ -810,8 +806,8 @@ fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), Hi
 impl HirDisplay for TraitAlias {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
-        let data = f.db.trait_alias_data(self.id);
-        write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?;
+        let data = f.db.trait_alias_signature(self.id);
+        write!(f, "trait {}", data.name.display(f.db, f.edition()))?;
         let def_id = GenericDefId::TraitAliasId(self.id);
         write_generic_params(def_id, f)?;
         f.write_str(" = ")?;
@@ -826,20 +822,20 @@ impl HirDisplay for TraitAlias {
 impl HirDisplay for TypeAlias {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
-        let data = f.db.type_alias_data(self.id);
-        write!(f, "type {}", data.name.display(f.db.upcast(), f.edition()))?;
+        let data = f.db.type_alias_signature(self.id);
+        write!(f, "type {}", data.name.display(f.db, f.edition()))?;
         let def_id = GenericDefId::TypeAliasId(self.id);
         write_generic_params(def_id, f)?;
         if !data.bounds.is_empty() {
             f.write_str(": ")?;
             f.write_joined(
-                data.bounds.iter().map(|bound| hir_display_with_types_map(bound, &data.types_map)),
+                data.bounds.iter().map(|bound| hir_display_with_store(bound, &data.store)),
                 " + ",
             )?;
         }
-        if let Some(ty) = data.type_ref {
+        if let Some(ty) = data.ty {
             f.write_str(" = ")?;
-            ty.hir_fmt(f, &data.types_map)?;
+            ty.hir_fmt(f, &data.store)?;
         }
         write_where_clause(def_id, f)?;
         Ok(())
@@ -854,11 +850,11 @@ impl HirDisplay for Module {
                 return match self.krate(f.db).display_name(f.db) {
                     Some(name) => write!(f, "extern crate {name}"),
                     None => f.write_str("extern crate {unknown}"),
-                }
+                };
             }
         }
         match self.name(f.db) {
-            Some(name) => write!(f, "mod {}", name.display(f.db.upcast(), f.edition())),
+            Some(name) => write!(f, "mod {}", name.display(f.db, f.edition())),
             None => f.write_str("mod {unknown}"),
         }
     }
@@ -880,6 +876,6 @@ impl HirDisplay for Macro {
             hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"),
             hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"),
         }?;
-        write!(f, " {}", self.name(f.db).display(f.db.upcast(), f.edition()))
+        write!(f, " {}", self.name(f.db).display(f.db, f.edition()))
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
index 72df07ef8c0cc..c6446693df3e4 100644
--- a/src/tools/rust-analyzer/crates/hir/src/from_id.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
@@ -4,9 +4,9 @@
 //! are splitting the hir.
 
 use hir_def::{
-    hir::{BindingId, LabelId},
     AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, GenericParamId,
     ModuleDefId, VariantId,
+    hir::{BindingId, LabelId},
 };
 
 use crate::{
@@ -30,7 +30,7 @@ macro_rules! from_id {
 }
 
 from_id![
-    (base_db::CrateId, crate::Crate),
+    (base_db::Crate, crate::Crate),
     (hir_def::ModuleId, crate::Module),
     (hir_def::StructId, crate::Struct),
     (hir_def::UnionId, crate::Union),
@@ -40,7 +40,6 @@ from_id![
     (hir_def::TraitAliasId, crate::TraitAlias),
     (hir_def::StaticId, crate::Static),
     (hir_def::ConstId, crate::Const),
-    (hir_def::InTypeConstId, crate::InTypeConst),
     (hir_def::FunctionId, crate::Function),
     (hir_def::ImplId, crate::Impl),
     (hir_def::TypeOrConstParamId, crate::TypeOrConstParam),
@@ -147,7 +146,6 @@ impl From<DefWithBody> for DefWithBodyId {
             DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id),
             DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id),
             DefWithBody::Variant(it) => DefWithBodyId::VariantId(it.into()),
-            DefWithBody::InTypeConst(it) => DefWithBodyId::InTypeConstId(it.id),
         }
     }
 }
@@ -159,7 +157,6 @@ impl From<DefWithBodyId> for DefWithBody {
             DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()),
             DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()),
             DefWithBodyId::VariantId(it) => DefWithBody::Variant(it.into()),
-            DefWithBodyId::InTypeConstId(it) => DefWithBody::InTypeConst(it.into()),
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
index 372c725293441..fe7429c867254 100644
--- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
@@ -2,20 +2,20 @@
 
 use either::Either;
 use hir_def::{
+    CallableDefId, Lookup, MacroId, VariantId,
     nameres::{ModuleOrigin, ModuleSource},
     src::{HasChildSource, HasSource as _},
-    CallableDefId, Lookup, MacroId, VariantId,
 };
-use hir_expand::{HirFileId, InFile};
+use hir_expand::{EditionedFileId, HirFileId, InFile};
 use hir_ty::db::InternedClosure;
-use span::EditionedFileId;
 use syntax::ast;
 use tt::TextRange;
 
 use crate::{
-    db::HirDatabase, Adt, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
+    Adt, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
     InlineAsmOperand, Label, LifetimeParam, LocalSource, Macro, Module, Param, SelfParam, Static,
     Struct, Trait, TraitAlias, TypeAlias, TypeOrConstParam, Union, Variant, VariantDef,
+    db::HirDatabase,
 };
 
 pub trait HasSource {
@@ -35,23 +35,23 @@ pub trait HasSource {
 impl Module {
     /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
     pub fn definition_source(self, db: &dyn HirDatabase) -> InFile<ModuleSource> {
-        let def_map = self.id.def_map(db.upcast());
-        def_map[self.id.local_id].definition_source(db.upcast())
+        let def_map = self.id.def_map(db);
+        def_map[self.id.local_id].definition_source(db)
     }
 
     /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
     pub fn definition_source_range(self, db: &dyn HirDatabase) -> InFile<TextRange> {
-        let def_map = self.id.def_map(db.upcast());
-        def_map[self.id.local_id].definition_source_range(db.upcast())
+        let def_map = self.id.def_map(db);
+        def_map[self.id.local_id].definition_source_range(db)
     }
 
     pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         def_map[self.id.local_id].definition_source_file_id()
     }
 
     pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         match def_map[self.id.local_id].origin {
             ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs,
             _ => false,
@@ -59,7 +59,7 @@ impl Module {
     }
 
     pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option<EditionedFileId> {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         match def_map[self.id.local_id].origin {
             ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition, .. } => {
                 Some(definition)
@@ -69,22 +69,22 @@ impl Module {
     }
 
     pub fn is_inline(self, db: &dyn HirDatabase) -> bool {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         def_map[self.id.local_id].origin.is_inline()
     }
 
     /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
     /// `None` for the crate root.
     pub fn declaration_source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Module>> {
-        let def_map = self.id.def_map(db.upcast());
-        def_map[self.id.local_id].declaration_source(db.upcast())
+        let def_map = self.id.def_map(db);
+        def_map[self.id.local_id].declaration_source(db)
     }
 
     /// Returns a text range which declares this module, either a `mod foo;` or a `mod foo {}`.
     /// `None` for the crate root.
     pub fn declaration_source_range(self, db: &dyn HirDatabase) -> Option<InFile<TextRange>> {
-        let def_map = self.id.def_map(db.upcast());
-        def_map[self.id.local_id].declaration_source_range(db.upcast())
+        let def_map = self.id.def_map(db);
+        def_map[self.id.local_id].declaration_source_range(db)
     }
 }
 
@@ -92,7 +92,7 @@ impl HasSource for Field {
     type Ast = FieldSource;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
         let var = VariantId::from(self.parent);
-        let src = var.child_source(db.upcast());
+        let src = var.child_source(db);
         let field_source = src.map(|it| match it[self.id].clone() {
             Either::Left(it) => FieldSource::Pos(it),
             Either::Right(it) => FieldSource::Named(it),
@@ -123,96 +123,88 @@ impl HasSource for VariantDef {
 impl HasSource for Struct {
     type Ast = ast::Struct;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for Union {
     type Ast = ast::Union;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for Enum {
     type Ast = ast::Enum;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for Variant {
     type Ast = ast::Variant;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Variant>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for Function {
     type Ast = ast::Fn;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for Const {
     type Ast = ast::Const;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for Static {
     type Ast = ast::Static;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for Trait {
     type Ast = ast::Trait;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for TraitAlias {
     type Ast = ast::TraitAlias;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for TypeAlias {
     type Ast = ast::TypeAlias;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 impl HasSource for Macro {
     type Ast = Either<ast::Macro, ast::Fn>;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
         match self.id {
-            MacroId::Macro2Id(it) => Some(
-                it.lookup(db.upcast())
-                    .source(db.upcast())
-                    .map(ast::Macro::MacroDef)
-                    .map(Either::Left),
-            ),
-            MacroId::MacroRulesId(it) => Some(
-                it.lookup(db.upcast())
-                    .source(db.upcast())
-                    .map(ast::Macro::MacroRules)
-                    .map(Either::Left),
-            ),
-            MacroId::ProcMacroId(it) => {
-                Some(it.lookup(db.upcast()).source(db.upcast()).map(Either::Right))
+            MacroId::Macro2Id(it) => {
+                Some(it.lookup(db).source(db).map(ast::Macro::MacroDef).map(Either::Left))
+            }
+            MacroId::MacroRulesId(it) => {
+                Some(it.lookup(db).source(db).map(ast::Macro::MacroRules).map(Either::Left))
             }
+            MacroId::ProcMacroId(it) => Some(it.lookup(db).source(db).map(Either::Right)),
         }
     }
 }
 impl HasSource for Impl {
     type Ast = ast::Impl;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 
 impl HasSource for TypeOrConstParam {
     type Ast = Either<ast::TypeOrConstParam, ast::TraitOrAlias>;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        let child_source = self.id.parent.child_source(db.upcast());
+        let child_source = self.id.parent.child_source(db);
         child_source.map(|it| it.get(self.id.local_id).cloned()).transpose()
     }
 }
@@ -220,7 +212,7 @@ impl HasSource for TypeOrConstParam {
 impl HasSource for LifetimeParam {
     type Ast = ast::LifetimeParam;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        let child_source = self.id.parent.child_source(db.upcast());
+        let child_source = self.id.parent.child_source(db);
         child_source.map(|it| it.get(self.id.local_id).cloned()).transpose()
     }
 }
@@ -290,7 +282,7 @@ impl HasSource for Label {
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
         let (_body, source_map) = db.body_with_source_map(self.parent);
         let src = source_map.label_syntax(self.label_id);
-        let root = src.file_syntax(db.upcast());
+        let root = src.file_syntax(db);
         Some(src.map(|ast| ast.to_node(&root)))
     }
 }
@@ -299,16 +291,16 @@ impl HasSource for ExternCrateDecl {
     type Ast = ast::ExternCrate;
 
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        Some(self.id.lookup(db.upcast()).source(db.upcast()))
+        Some(self.id.lookup(db).source(db))
     }
 }
 
 impl HasSource for InlineAsmOperand {
     type Ast = ast::AsmOperandNamed;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        let (_body, source_map) = db.body_with_source_map(self.owner);
+        let source_map = db.body_with_source_map(self.owner).1;
         if let Ok(src) = source_map.expr_syntax(self.expr) {
-            let root = src.file_syntax(db.upcast());
+            let root = src.file_syntax(db);
             return src
                 .map(|ast| match ast.to_node(&root) {
                     Either::Left(ast::Expr::AsmExpr(asm)) => asm
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 29f4584665087..143c13069e42d 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -39,62 +39,62 @@ use std::{
 };
 
 use arrayvec::ArrayVec;
-use base_db::{CrateDisplayName, CrateId, CrateOrigin, LangCrateOrigin};
+use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin};
 use either::Either;
 use hir_def::{
-    data::{adt::VariantData, TraitFlags},
-    expr_store::ExpressionStoreDiagnostics,
-    generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance},
-    hir::{BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat},
-    item_tree::{AttrOwner, FieldParent, ItemTreeFieldId, ItemTreeNode},
+    AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId,
+    CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
+    FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+    LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, SyntheticSyntax,
+    TraitAliasId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+    expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap},
+    hir::{
+        BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat,
+        generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance},
+    },
+    item_tree::{AttrOwner, FieldParent, ImportAlias, ItemTreeFieldId, ItemTreeNode},
     lang_item::LangItemTarget,
     layout::{self, ReprOptions, TargetDataLayout},
     nameres::{self, diagnostics::DefDiagnostic},
-    path::ImportAlias,
     per_ns::PerNs,
     resolver::{HasResolver, Resolver},
-    type_ref::TypesSourceMap,
-    AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId,
-    CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
-    FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId,
-    LifetimeParamId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
-    SyntheticSyntax, TraitAliasId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+    signatures::{ImplFlags, StaticFlags, TraitFlags, VariantFields},
 };
 use hir_expand::{
-    attrs::collect_attrs, proc_macro::ProcMacroKind, AstId, MacroCallKind, RenderedExpandError,
-    ValueResult,
+    AstId, MacroCallKind, RenderedExpandError, ValueResult, attrs::collect_attrs,
+    proc_macro::ProcMacroKind,
 };
 use hir_ty::{
-    all_super_traits, autoderef, check_orphan_rules,
-    consteval::{try_const_usize, unknown_const_as_generic, ConstExt},
+    AliasTy, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
+    GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
+    TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, TyLoweringDiagnostic,
+    ValueTyDefId, WhereClause, all_super_traits, autoderef, check_orphan_rules,
+    consteval::{ConstExt, try_const_usize, unknown_const_as_generic},
     diagnostics::BodyValidationDiagnostic,
     direct_super_traits, error_lifetime, known_const_to_ast,
     layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
     method_resolution,
-    mir::{interpret_mir, MutBorrowKind},
+    mir::{MutBorrowKind, interpret_mir},
     primitive::UintTy,
     traits::FnTrait,
-    AliasTy, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
-    GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
-    TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, TyLoweringDiagnostic,
-    ValueTyDefId, WhereClause,
 };
 use itertools::Itertools;
 use nameres::diagnostics::DefDiagnosticKind;
 use rustc_hash::FxHashSet;
 use smallvec::SmallVec;
-use span::{Edition, EditionedFileId, FileId, MacroCallId};
+use span::{Edition, FileId};
 use stdx::{format_to, impl_from, never};
 use syntax::{
-    ast::{self, HasAttrs as _, HasGenericParams, HasName},
-    format_smolstr, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr, T,
+    AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
+    ast::{self, HasAttrs as _, HasName},
+    format_smolstr,
 };
 use triomphe::{Arc, ThinArc};
 
 use crate::db::{DefDatabase, HirDatabase};
 
 pub use crate::{
-    attrs::{resolve_doc_path_on, HasAttrs},
+    attrs::{HasAttrs, resolve_doc_path_on},
     diagnostics::*,
     has_source::HasSource,
     semantics::{
@@ -114,22 +114,22 @@ pub use crate::{
 pub use {
     cfg::{CfgAtom, CfgExpr, CfgOptions},
     hir_def::{
+        Complete,
+        ImportPathConfig,
         attr::{AttrSourceMap, Attrs, AttrsWithOwner},
-        data::adt::StructKind,
         find_path::PrefixKind,
         import_map,
         lang_item::LangItem,
         nameres::{DefMap, ModuleSource},
-        path::{ModPath, PathKind},
         per_ns::Namespace,
         type_ref::{Mutability, TypeRef},
         visibility::Visibility,
-        ImportPathConfig,
         // FIXME: This is here since some queries take it as input that are used
         // outside of hir.
         {ModuleDefId, TraitId},
     },
     hir_expand::{
+        EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
         attrs::{Attr, AttrId},
         change::ChangeWithProcMacros,
         files::{
@@ -137,15 +137,16 @@ pub use {
             HirFileRange, InFile, InFileWrapper, InMacroFile, InRealFile, MacroFilePosition,
             MacroFileRange,
         },
-        hygiene::{marks_rev, SyntaxContextExt},
+        hygiene::{SyntaxContextExt, marks_rev},
         inert_attr_macro::AttributeTemplate,
-        mod_path::tool_path,
+        mod_path::{ModPath, PathKind, tool_path},
         name::Name,
         prettify_macro_expansion,
         proc_macro::{ProcMacros, ProcMacrosBuilder},
-        tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, MacroKind,
+        tt,
     },
     hir_ty::{
+        CastError, DropGlue, FnAbi, PointerCast, Safety, Variance,
         consteval::ConstEvalError,
         diagnostics::UnsafetyReason,
         display::{ClosureStyle, DisplayTarget, HirDisplay, HirDisplayError, HirWrite},
@@ -153,18 +154,17 @@ pub use {
         layout::LayoutError,
         method_resolution::TyFingerprint,
         mir::{MirEvalError, MirLowerError},
-        CastError, DropGlue, FnAbi, PointerCast, Safety, Variance,
     },
     // FIXME: Properly encapsulate mir
-    hir_ty::{mir, Interner as ChalkTyInterner},
-    intern::{sym, Symbol},
+    hir_ty::{Interner as ChalkTyInterner, mir},
+    intern::{Symbol, sym},
 };
 
 // These are negative re-exports: pub using these names is forbidden, they
 // should remain private to hir internals.
 #[allow(unused)]
 use {
-    hir_def::path::Path,
+    hir_def::expr_store::path::Path,
     hir_expand::{
         name::AsName,
         span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
@@ -176,7 +176,7 @@ use {
 /// root module.
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct Crate {
-    pub(crate) id: CrateId,
+    pub(crate) id: base_db::Crate,
 }
 
 #[derive(Debug)]
@@ -187,7 +187,7 @@ pub struct CrateDependency {
 
 impl Crate {
     pub fn origin(self, db: &dyn HirDatabase) -> CrateOrigin {
-        db.crate_graph()[self.id].origin.clone()
+        self.id.data(db).origin.clone()
     }
 
     pub fn is_builtin(self, db: &dyn HirDatabase) -> bool {
@@ -195,7 +195,8 @@ impl Crate {
     }
 
     pub fn dependencies(self, db: &dyn HirDatabase) -> Vec<CrateDependency> {
-        db.crate_graph()[self.id]
+        self.id
+            .data(db)
             .dependencies
             .iter()
             .map(|dep| {
@@ -207,12 +208,11 @@ impl Crate {
     }
 
     pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec<Crate> {
-        let crate_graph = db.crate_graph();
-        crate_graph
+        let all_crates = db.all_crates();
+        all_crates
             .iter()
-            .filter(|&krate| {
-                crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id)
-            })
+            .copied()
+            .filter(|&krate| krate.data(db).dependencies.iter().any(|it| it.crate_id == self.id))
             .map(|id| Crate { id })
             .collect()
     }
@@ -221,7 +221,7 @@ impl Crate {
         self,
         db: &dyn HirDatabase,
     ) -> impl Iterator<Item = Crate> {
-        db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id })
+        db.transitive_rev_deps(self.id).into_iter().map(|id| Crate { id })
     }
 
     pub fn root_module(self) -> Module {
@@ -234,54 +234,57 @@ impl Crate {
     }
 
     pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
-        db.crate_graph()[self.id].root_file_id
+        self.id.data(db).root_file_id
     }
 
     pub fn edition(self, db: &dyn HirDatabase) -> Edition {
-        db.crate_graph()[self.id].edition
+        self.id.data(db).edition
     }
 
     pub fn version(self, db: &dyn HirDatabase) -> Option<String> {
-        db.crate_graph()[self.id].version.clone()
+        self.id.extra_data(db).version.clone()
     }
 
     pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateDisplayName> {
-        db.crate_graph()[self.id].display_name.clone()
+        self.id.extra_data(db).display_name.clone()
     }
 
     pub fn query_external_importables(
         self,
         db: &dyn DefDatabase,
         query: import_map::Query,
-    ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
+    ) -> impl Iterator<Item = (Either<ModuleDef, Macro>, Complete)> {
         let _p = tracing::info_span!("query_external_importables").entered();
-        import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
-            match ItemInNs::from(item) {
-                ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
-                ItemInNs::Macros(mac_id) => Either::Right(mac_id),
-            }
-        })
+        import_map::search_dependencies(db, self.into(), &query).into_iter().map(
+            |(item, do_not_complete)| {
+                let item = match ItemInNs::from(item) {
+                    ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
+                    ItemInNs::Macros(mac_id) => Either::Right(mac_id),
+                };
+                (item, do_not_complete)
+            },
+        )
     }
 
     pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
-        db.crate_graph().iter().map(|id| Crate { id }).collect()
+        db.all_crates().iter().map(|&id| Crate { id }).collect()
     }
 
     /// Try to get the root URL of the documentation of a crate.
     pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
         // Look for #![doc(html_root_url = "...")]
         let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into()));
-        let doc_url = attrs.by_key(&sym::doc).find_string_value_in_tt(&sym::html_root_url);
+        let doc_url = attrs.by_key(sym::doc).find_string_value_in_tt(sym::html_root_url);
         doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
     }
 
-    pub fn cfg(&self, db: &dyn HirDatabase) -> Arc<CfgOptions> {
-        db.crate_graph()[self.id].cfg_options.clone()
+    pub fn cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions {
+        self.id.cfg_options(db)
     }
 
-    pub fn potential_cfg(&self, db: &dyn HirDatabase) -> Arc<CfgOptions> {
-        let data = &db.crate_graph()[self.id];
-        data.potential_cfg_options.clone().unwrap_or_else(|| data.cfg_options.clone())
+    pub fn potential_cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions {
+        let data = self.id.extra_data(db);
+        data.potential_cfg_options.as_ref().unwrap_or_else(|| self.id.cfg_options(db))
     }
 
     pub fn to_display_target(self, db: &dyn HirDatabase) -> DisplayTarget {
@@ -289,14 +292,13 @@ impl Crate {
     }
 
     fn core(db: &dyn HirDatabase) -> Option<Crate> {
-        let crate_graph = db.crate_graph();
-        let result = crate_graph
+        db.all_crates()
             .iter()
+            .copied()
             .find(|&krate| {
-                matches!(crate_graph[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core))
+                matches!(krate.data(db).origin, CrateOrigin::Lang(LangCrateOrigin::Core))
             })
-            .map(Crate::from);
-        result
+            .map(Crate::from)
     }
 }
 
@@ -370,7 +372,7 @@ impl ModuleDef {
             segments.extend(m.name(db))
         }
         segments.reverse();
-        Some(segments.iter().map(|it| it.display(db.upcast(), edition)).join("::"))
+        Some(segments.iter().map(|it| it.display(db, edition)).join("::"))
     }
 
     pub fn canonical_module_path(
@@ -490,9 +492,7 @@ impl HasCrate for ModuleDef {
     fn krate(&self, db: &dyn HirDatabase) -> Crate {
         match self.module(db) {
             Some(module) => module.krate(),
-            None => Crate::core(db).unwrap_or_else(|| {
-                (*db.crate_graph().crates_in_topological_order().last().unwrap()).into()
-            }),
+            None => Crate::core(db).unwrap_or_else(|| db.all_crates()[0].into()),
         }
     }
 }
@@ -518,7 +518,7 @@ impl HasVisibility for ModuleDef {
 impl Module {
     /// Name of this module.
     pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
-        self.id.name(db.upcast())
+        self.id.name(db)
     }
 
     /// Returns the crate this module is part of.
@@ -540,7 +540,7 @@ impl Module {
 
     /// Iterates over all child modules.
     pub fn children(self, db: &dyn HirDatabase) -> impl Iterator<Item = Module> {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         let children = def_map[self.id.local_id]
             .children
             .values()
@@ -551,7 +551,7 @@ impl Module {
 
     /// Finds a parent module.
     pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         let parent_id = def_map.containing_module(self.id.local_id)?;
         Some(Module { id: parent_id })
     }
@@ -560,7 +560,7 @@ impl Module {
     pub fn nearest_non_block_module(self, db: &dyn HirDatabase) -> Module {
         let mut id = self.id;
         while id.is_block_module() {
-            id = id.containing_module(db.upcast()).expect("block without parent module");
+            id = id.containing_module(db).expect("block without parent module");
         }
         Module { id }
     }
@@ -581,18 +581,13 @@ impl Module {
         db: &dyn HirDatabase,
         visible_from: Option<Module>,
     ) -> Vec<(Name, ScopeDef)> {
-        self.id.def_map(db.upcast())[self.id.local_id]
+        self.id.def_map(db)[self.id.local_id]
             .scope
             .entries()
             .filter_map(|(name, def)| {
                 if let Some(m) = visible_from {
-                    let filtered =
-                        def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id));
-                    if filtered.is_none() && !def.is_none() {
-                        None
-                    } else {
-                        Some((name, filtered))
-                    }
+                    let filtered = def.filter_visibility(|vis| vis.is_visible_from(db, m.id));
+                    if filtered.is_none() && !def.is_none() { None } else { Some((name, filtered)) }
                 } else {
                     Some((name, def))
                 }
@@ -603,6 +598,18 @@ impl Module {
             .collect()
     }
 
+    pub fn resolve_mod_path(
+        &self,
+        db: &dyn HirDatabase,
+        segments: impl IntoIterator<Item = Name>,
+    ) -> Option<impl Iterator<Item = ItemInNs>> {
+        let items = self
+            .id
+            .resolver(db)
+            .resolve_module_path_in_items(db, &ModPath::from_segments(PathKind::Plain, segments));
+        Some(items.iter_items().map(|(item, _)| item.into()))
+    }
+
     /// Fills `acc` with the module's diagnostics.
     pub fn diagnostics(
         self,
@@ -611,8 +618,8 @@ impl Module {
         style_lints: bool,
     ) {
         let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered();
-        let edition = db.crate_graph()[self.id.krate()].edition;
-        let def_map = self.id.def_map(db.upcast());
+        let edition = self.id.krate().data(db).edition;
+        let def_map = self.id.def_map(db);
         for diag in def_map.diagnostics() {
             if diag.in_module != self.id.local_id {
                 // FIXME: This is accidentally quadratic.
@@ -637,7 +644,7 @@ impl Module {
                     acc.extend(def.diagnostics(db, style_lints))
                 }
                 ModuleDef::Trait(t) => {
-                    for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
+                    for diag in db.trait_items_with_diagnostics(t.id).1.iter() {
                         emit_def_diagnostic(db, acc, diag, edition);
                     }
 
@@ -654,46 +661,59 @@ impl Module {
                 ModuleDef::Adt(adt) => {
                     match adt {
                         Adt::Struct(s) => {
-                            let tree_id = s.id.lookup(db.upcast()).id;
-                            let tree_source_maps = tree_id.item_tree_with_source_map(db.upcast()).1;
+                            let source_map = db.struct_signature_with_source_map(s.id).1;
+                            expr_store_diagnostics(db, acc, &source_map);
+                            let source_map = db.variant_fields_with_source_map(s.id.into()).1;
+                            expr_store_diagnostics(db, acc, &source_map);
                             push_ty_diagnostics(
                                 db,
                                 acc,
                                 db.field_types_with_diagnostics(s.id.into()).1,
-                                tree_source_maps.strukt(tree_id.value).item(),
+                                &source_map,
                             );
-                            for diag in db.struct_data_with_diagnostics(s.id).1.iter() {
-                                emit_def_diagnostic(db, acc, diag, edition);
-                            }
                         }
                         Adt::Union(u) => {
-                            let tree_id = u.id.lookup(db.upcast()).id;
-                            let tree_source_maps = tree_id.item_tree_with_source_map(db.upcast()).1;
+                            let source_map = db.union_signature_with_source_map(u.id).1;
+                            expr_store_diagnostics(db, acc, &source_map);
+                            let source_map = db.variant_fields_with_source_map(u.id.into()).1;
+                            expr_store_diagnostics(db, acc, &source_map);
                             push_ty_diagnostics(
                                 db,
                                 acc,
                                 db.field_types_with_diagnostics(u.id.into()).1,
-                                tree_source_maps.union(tree_id.value).item(),
+                                &source_map,
                             );
-                            for diag in db.union_data_with_diagnostics(u.id).1.iter() {
-                                emit_def_diagnostic(db, acc, diag, edition);
-                            }
                         }
                         Adt::Enum(e) => {
-                            for v in e.variants(db) {
-                                let tree_id = v.id.lookup(db.upcast()).id;
-                                let tree_source_maps =
-                                    tree_id.item_tree_with_source_map(db.upcast()).1;
+                            let source_map = db.enum_signature_with_source_map(e.id).1;
+                            expr_store_diagnostics(db, acc, &source_map);
+                            let (variants, diagnostics) = db.enum_variants_with_diagnostics(e.id);
+                            let file = e.id.lookup(db).id.file_id();
+                            let ast_id_map = db.ast_id_map(file);
+                            if let Some(diagnostics) = &diagnostics {
+                                for diag in diagnostics.iter() {
+                                    acc.push(
+                                        InactiveCode {
+                                            node: InFile::new(
+                                                file,
+                                                ast_id_map.get(diag.ast_id).syntax_node_ptr(),
+                                            ),
+                                            cfg: diag.cfg.clone(),
+                                            opts: diag.opts.clone(),
+                                        }
+                                        .into(),
+                                    );
+                                }
+                            }
+                            for &(v, _) in &variants.variants {
+                                let source_map = db.variant_fields_with_source_map(v.into()).1;
                                 push_ty_diagnostics(
                                     db,
                                     acc,
-                                    db.field_types_with_diagnostics(v.id.into()).1,
-                                    tree_source_maps.variant(tree_id.value),
+                                    db.field_types_with_diagnostics(v.into()).1,
+                                    &source_map,
                                 );
-                                acc.extend(ModuleDef::Variant(v).diagnostics(db, style_lints));
-                                for diag in db.enum_variant_data_with_diagnostics(v.id).1.iter() {
-                                    emit_def_diagnostic(db, acc, diag, edition);
-                                }
+                                expr_store_diagnostics(db, acc, &source_map);
                             }
                         }
                     }
@@ -701,13 +721,13 @@ impl Module {
                 }
                 ModuleDef::Macro(m) => emit_macro_def_diagnostics(db, acc, m),
                 ModuleDef::TypeAlias(type_alias) => {
-                    let tree_id = type_alias.id.lookup(db.upcast()).id;
-                    let tree_source_maps = tree_id.item_tree_with_source_map(db.upcast()).1;
+                    let source_map = db.type_alias_signature_with_source_map(type_alias.id).1;
+                    expr_store_diagnostics(db, acc, &source_map);
                     push_ty_diagnostics(
                         db,
                         acc,
                         db.type_for_type_alias_with_diagnostics(type_alias.id).1,
-                        tree_source_maps.type_alias(tree_id.value).item(),
+                        &source_map,
                     );
                     acc.extend(def.diagnostics(db, style_lints));
                 }
@@ -722,15 +742,14 @@ impl Module {
         for impl_def in self.impl_defs(db) {
             GenericDef::Impl(impl_def).diagnostics(db, acc);
 
-            let loc = impl_def.id.lookup(db.upcast());
-            let (tree, tree_source_maps) = loc.id.item_tree_with_source_map(db.upcast());
-            let source_map = tree_source_maps.impl_(loc.id.value).item();
+            let loc = impl_def.id.lookup(db);
+            let tree = loc.id.item_tree(db);
+            let source_map = db.impl_signature_with_source_map(impl_def.id).1;
+            expr_store_diagnostics(db, acc, &source_map);
+
             let node = &tree[loc.id.value];
             let file_id = loc.id.file_id();
-            if file_id
-                .macro_file()
-                .is_some_and(|it| it.kind(db.upcast()) == MacroKind::DeriveBuiltIn)
-            {
+            if file_id.macro_file().is_some_and(|it| it.kind(db) == MacroKind::DeriveBuiltIn) {
                 // these expansion come from us, diagnosing them is a waste of resources
                 // FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
                 continue;
@@ -742,7 +761,7 @@ impl Module {
 
             let ast_id_map = db.ast_id_map(file_id);
 
-            for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
+            for diag in db.impl_items_with_diagnostics(impl_def.id).1.iter() {
                 emit_def_diagnostic(db, acc, diag, edition);
             }
 
@@ -780,7 +799,7 @@ impl Module {
                         ))
                     });
                 let res = type_params.chain(lifetime_params).any(|p| {
-                    db.attrs(AttrDefId::GenericParamId(p)).by_key(&sym::may_dangle).exists()
+                    db.attrs(AttrDefId::GenericParamId(p)).by_key(sym::may_dangle).exists()
                 });
                 Some(res)
             })()
@@ -800,13 +819,13 @@ impl Module {
 
             // Negative impls can't have items, don't emit missing items diagnostic for them
             if let (false, Some(trait_)) = (impl_is_negative, trait_) {
-                let items = &db.trait_data(trait_.into()).items;
+                let items = &db.trait_items(trait_.into()).items;
                 let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
-                    AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
-                    AssocItemId::ConstId(id) => !db.const_data(id).has_body,
-                    AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
+                    AssocItemId::FunctionId(it) => !db.function_signature(it).has_body(),
+                    AssocItemId::ConstId(id) => !db.const_signature(id).has_body(),
+                    AssocItemId::TypeAliasId(it) => db.type_alias_signature(it).ty.is_none(),
                 });
-                impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().cloned());
+                impl_assoc_items_scratch.extend(db.impl_items(impl_def.id).items.iter().cloned());
 
                 let redundant = impl_assoc_items_scratch
                     .iter()
@@ -853,23 +872,23 @@ impl Module {
                 db,
                 acc,
                 db.impl_self_ty_with_diagnostics(impl_def.id).1,
-                source_map,
+                &source_map,
             );
             push_ty_diagnostics(
                 db,
                 acc,
                 db.impl_trait_with_diagnostics(impl_def.id).and_then(|it| it.1),
-                source_map,
+                &source_map,
             );
 
-            for &(_, item) in db.impl_data(impl_def.id).items.iter() {
+            for &(_, item) in db.impl_items(impl_def.id).items.iter() {
                 AssocItem::from(item).diagnostics(db, acc, style_lints);
             }
         }
     }
 
     pub fn declarations(self, db: &dyn HirDatabase) -> Vec<ModuleDef> {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         let scope = &def_map[self.id.local_id].scope;
         scope
             .declarations()
@@ -879,13 +898,13 @@ impl Module {
     }
 
     pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         let scope = &def_map[self.id.local_id].scope;
         scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| it.into()).collect()
     }
 
     pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         def_map[self.id.local_id].scope.impls().map(Impl::from).collect()
     }
 
@@ -934,11 +953,12 @@ fn macro_call_diagnostics(
         let file_id = loc.kind.file_id();
         let node =
             InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
-        let RenderedExpandError { message, error, kind } = err.render_to_string(db.upcast());
-        let precise_location = if err.span().anchor.file_id == file_id {
+        let RenderedExpandError { message, error, kind } = err.render_to_string(db);
+        let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
+        let precise_location = if editioned_file_id == file_id {
             Some(
                 err.span().range
-                    + db.ast_id_map(err.span().anchor.file_id.into())
+                    + db.ast_id_map(editioned_file_id.into())
                         .get_erased(err.span().anchor.ast_id)
                         .text_range()
                         .start(),
@@ -967,8 +987,8 @@ fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>
                 never!("declarative expander for non decl-macro: {:?}", e);
                 return;
             };
-            let krate = HasModule::krate(&m.id, db.upcast());
-            let edition = db.crate_graph()[krate].edition;
+            let krate = HasModule::krate(&m.id, db);
+            let edition = krate.data(db).edition;
             emit_def_diagnostic_(
                 db,
                 acc,
@@ -996,7 +1016,7 @@ fn emit_def_diagnostic_(
 ) {
     match diag {
         DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
-            let decl = declaration.to_ptr(db.upcast());
+            let decl = declaration.to_ptr(db);
             acc.push(
                 UnresolvedModule {
                     decl: InFile::new(declaration.file_id, decl),
@@ -1006,18 +1026,18 @@ fn emit_def_diagnostic_(
             )
         }
         DefDiagnosticKind::UnresolvedExternCrate { ast } => {
-            let item = ast.to_ptr(db.upcast());
+            let item = ast.to_ptr(db);
             acc.push(UnresolvedExternCrate { decl: InFile::new(ast.file_id, item) }.into());
         }
 
         DefDiagnosticKind::MacroError { ast, path, err } => {
-            let item = ast.to_ptr(db.upcast());
-            let RenderedExpandError { message, error, kind } = err.render_to_string(db.upcast());
+            let item = ast.to_ptr(db);
+            let RenderedExpandError { message, error, kind } = err.render_to_string(db);
             acc.push(
                 MacroError {
                     node: InFile::new(ast.file_id, item.syntax_node_ptr()),
                     precise_location: None,
-                    message: format!("{}: {message}", path.display(db.upcast(), edition)),
+                    message: format!("{}: {message}", path.display(db, edition)),
                     error,
                     kind,
                 }
@@ -1026,17 +1046,17 @@ fn emit_def_diagnostic_(
         }
         DefDiagnosticKind::UnresolvedImport { id, index } => {
             let file_id = id.file_id();
-            let item_tree = id.item_tree(db.upcast());
+            let item_tree = id.item_tree(db);
             let import = &item_tree[id.value];
 
-            let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index);
+            let use_tree = import.use_tree_to_ast(db, file_id, *index);
             acc.push(
                 UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }.into(),
             );
         }
 
         DefDiagnosticKind::UnconfiguredCode { tree, item, cfg, opts } => {
-            let item_tree = tree.item_tree(db.upcast());
+            let item_tree = tree.item_tree(db);
             let ast_id_map = db.ast_id_map(tree.file_id());
             // FIXME: This parses... We could probably store relative ranges for the children things
             // here in the item tree?
@@ -1058,7 +1078,7 @@ fn emit_def_diagnostic_(
                     AttrOwner::Variant(it) => {
                         ast_id_map.get(item_tree[it].ast_id).syntax_node_ptr()
                     }
-                    AttrOwner::Field(FieldParent::Variant(parent), idx) => process_field_list(
+                    AttrOwner::Field(FieldParent::EnumVariant(parent), idx) => process_field_list(
                         ast_id_map
                             .get(item_tree[parent].ast_id)
                             .to_node(&db.parse_or_expand(tree.file_id()))
@@ -1081,33 +1101,6 @@ fn emit_def_diagnostic_(
                             .nth(idx.into_raw().into_u32() as usize)?
                             .syntax(),
                     ),
-                    AttrOwner::Param(parent, idx) => SyntaxNodePtr::new(
-                        ast_id_map
-                            .get(item_tree[parent.index()].ast_id)
-                            .to_node(&db.parse_or_expand(tree.file_id()))
-                            .param_list()?
-                            .params()
-                            .nth(idx.into_raw().into_u32() as usize)?
-                            .syntax(),
-                    ),
-                    AttrOwner::TypeOrConstParamData(parent, idx) => SyntaxNodePtr::new(
-                        ast_id_map
-                            .get(parent.ast_id(&item_tree))
-                            .to_node(&db.parse_or_expand(tree.file_id()))
-                            .generic_param_list()?
-                            .type_or_const_params()
-                            .nth(idx.into_raw().into_u32() as usize)?
-                            .syntax(),
-                    ),
-                    AttrOwner::LifetimeParamData(parent, idx) => SyntaxNodePtr::new(
-                        ast_id_map
-                            .get(parent.ast_id(&item_tree))
-                            .to_node(&db.parse_or_expand(tree.file_id()))
-                            .generic_param_list()?
-                            .lifetime_params()
-                            .nth(idx.into_raw().into_u32() as usize)?
-                            .syntax(),
-                    ),
                 };
                 acc.push(
                     InactiveCode {
@@ -1133,7 +1126,7 @@ fn emit_def_diagnostic_(
             );
         }
         DefDiagnosticKind::UnimplementedBuiltinMacro { ast } => {
-            let node = ast.to_node(db.upcast());
+            let node = ast.to_node(db);
             // Must have a name, otherwise we wouldn't emit it.
             let name = node.name().expect("unimplemented builtin macro with no name");
             acc.push(
@@ -1144,7 +1137,7 @@ fn emit_def_diagnostic_(
             );
         }
         DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
-            let node = ast.to_node(db.upcast());
+            let node = ast.to_node(db);
             let derive = node.attrs().nth(*id);
             match derive {
                 Some(derive) => {
@@ -1159,7 +1152,7 @@ fn emit_def_diagnostic_(
             }
         }
         DefDiagnosticKind::MalformedDerive { ast, id } => {
-            let node = ast.to_node(db.upcast());
+            let node = ast.to_node(db);
             let derive = node.attrs().nth(*id);
             match derive {
                 Some(derive) => {
@@ -1174,7 +1167,7 @@ fn emit_def_diagnostic_(
             }
         }
         DefDiagnosticKind::MacroDefError { ast, message } => {
-            let node = ast.to_node(db.upcast());
+            let node = ast.to_node(db);
             acc.push(
                 MacroDefError {
                     node: InFile::new(ast.file_id, AstPtr::new(&node)),
@@ -1195,7 +1188,7 @@ fn precise_macro_call_location(
     // - e.g. the full attribute for macro errors, but only the name for name resolution
     match ast {
         MacroCallKind::FnLike { ast_id, .. } => {
-            let node = ast_id.to_node(db.upcast());
+            let node = ast_id.to_node(db);
             (
                 ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
                 node.path()
@@ -1205,7 +1198,7 @@ fn precise_macro_call_location(
             )
         }
         MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
-            let node = ast_id.to_node(db.upcast());
+            let node = ast_id.to_node(db);
             // Compute the precise location of the macro name's token in the derive
             // list.
             let token = (|| {
@@ -1213,15 +1206,15 @@ fn precise_macro_call_location(
                     .nth(derive_attr_index.ast_index())
                     .and_then(|x| Either::left(x.1))?;
                 let token_tree = derive_attr.meta()?.token_tree()?;
-                let group_by = token_tree
+                let chunk_by = token_tree
                     .syntax()
                     .children_with_tokens()
                     .filter_map(|elem| match elem {
                         syntax::NodeOrToken::Token(tok) => Some(tok),
                         _ => None,
                     })
-                    .group_by(|t| t.kind() == T![,]);
-                let (_, mut group) = group_by
+                    .chunk_by(|t| t.kind() == T![,]);
+                let (_, mut group) = chunk_by
                     .into_iter()
                     .filter(|&(comma, _)| !comma)
                     .nth(*derive_index as usize)?;
@@ -1233,7 +1226,7 @@ fn precise_macro_call_location(
             )
         }
         MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
-            let node = ast_id.to_node(db.upcast());
+            let node = ast_id.to_node(db);
             let attr = collect_attrs(&node)
                 .nth(invoc_attr_index.ast_index())
                 .and_then(|x| Either::left(x.1))
@@ -1251,7 +1244,7 @@ fn precise_macro_call_location(
 
 impl HasVisibility for Module {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         let module_data = &def_map[self.id.local_id];
         module_data.visibility
     }
@@ -1323,7 +1316,7 @@ impl AstNode for FieldSource {
 
 impl Field {
     pub fn name(&self, db: &dyn HirDatabase) -> Name {
-        self.parent.variant_data(db).fields()[self.id].name.clone()
+        db.variant_fields(self.parent.into()).fields()[self.id].name.clone()
     }
 
     pub fn index(&self) -> usize {
@@ -1338,7 +1331,7 @@ impl Field {
         let generic_def_id: GenericDefId = match self.parent {
             VariantDef::Struct(it) => it.id.into(),
             VariantDef::Union(it) => it.id.into(),
-            VariantDef::Variant(it) => it.id.lookup(db.upcast()).parent.into(),
+            VariantDef::Variant(it) => it.id.lookup(db).parent.into(),
         };
         let substs = TyBuilder::placeholder_subst(db, generic_def_id);
         let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
@@ -1372,7 +1365,7 @@ impl Field {
             self.ty(db).ty,
             db.trait_environment(match hir_def::VariantId::from(self.parent) {
                 hir_def::VariantId::EnumVariantId(id) => {
-                    GenericDefId::AdtId(id.lookup(db.upcast()).parent.into())
+                    GenericDefId::AdtId(id.lookup(db).parent.into())
                 }
                 hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
                 hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()),
@@ -1388,10 +1381,11 @@ impl Field {
 
 impl HasVisibility for Field {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        let variant_data = self.parent.variant_data(db);
+        let variant_data = db.variant_fields(self.parent.into());
         let visibility = &variant_data.fields()[self.id].visibility;
         let parent_id: hir_def::VariantId = self.parent.into();
-        visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast()))
+        // FIXME: RawVisibility::Public doesn't need to construct a resolver
+        Visibility::resolve(db, &parent_id.resolver(db), visibility)
     }
 }
 
@@ -1402,16 +1396,15 @@ pub struct Struct {
 
 impl Struct {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.lookup(db.upcast()).container }
+        Module { id: self.id.lookup(db).container }
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.struct_data(self.id).name.clone()
+        db.struct_signature(self.id).name.clone()
     }
 
     pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
-        db.struct_data(self.id)
-            .variant_data
+        db.variant_fields(self.id.into())
             .fields()
             .iter()
             .map(|(id, _)| Field { parent: self.into(), id })
@@ -1431,15 +1424,19 @@ impl Struct {
     }
 
     pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
-        db.struct_data(self.id).repr
+        db.struct_signature(self.id).repr
     }
 
     pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
-        self.variant_data(db).kind()
+        match self.variant_fields(db).shape {
+            hir_def::item_tree::FieldsShape::Record => StructKind::Record,
+            hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple,
+            hir_def::item_tree::FieldsShape::Unit => StructKind::Unit,
+        }
     }
 
-    fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
-        db.struct_data(self.id).variant_data.clone()
+    fn variant_fields(self, db: &dyn HirDatabase) -> Arc<VariantFields> {
+        db.variant_fields(self.id.into())
     }
 
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
@@ -1449,7 +1446,13 @@ impl Struct {
 
 impl HasVisibility for Struct {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        db.struct_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        Visibility::resolve(
+            db,
+            &self.id.resolver(db),
+            &item_tree[item_tree[loc.id.value].visibility],
+        )
     }
 }
 
@@ -1460,11 +1463,11 @@ pub struct Union {
 
 impl Union {
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.union_data(self.id).name.clone()
+        db.union_signature(self.id).name.clone()
     }
 
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.lookup(db.upcast()).container }
+        Module { id: self.id.lookup(db).container }
     }
 
     pub fn ty(self, db: &dyn HirDatabase) -> Type {
@@ -1479,19 +1482,21 @@ impl Union {
         Type::from_value_def(db, self.id)
     }
 
+    pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
+        match db.variant_fields(self.id.into()).shape {
+            hir_def::item_tree::FieldsShape::Record => StructKind::Record,
+            hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple,
+            hir_def::item_tree::FieldsShape::Unit => StructKind::Unit,
+        }
+    }
+
     pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
-        db.union_data(self.id)
-            .variant_data
+        db.variant_fields(self.id.into())
             .fields()
             .iter()
             .map(|(id, _)| Field { parent: self.into(), id })
             .collect()
     }
-
-    fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
-        db.union_data(self.id).variant_data.clone()
-    }
-
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
         db.attrs(self.id.into()).is_unstable()
     }
@@ -1499,7 +1504,13 @@ impl Union {
 
 impl HasVisibility for Union {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        db.union_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        Visibility::resolve(
+            db,
+            &self.id.resolver(db),
+            &item_tree[item_tree[loc.id.value].visibility],
+        )
     }
 }
 
@@ -1510,19 +1521,23 @@ pub struct Enum {
 
 impl Enum {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.lookup(db.upcast()).container }
+        Module { id: self.id.lookup(db).container }
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.enum_data(self.id).name.clone()
+        db.enum_signature(self.id).name.clone()
     }
 
     pub fn variants(self, db: &dyn HirDatabase) -> Vec<Variant> {
-        db.enum_data(self.id).variants.iter().map(|&(id, _)| Variant { id }).collect()
+        db.enum_variants(self.id).variants.iter().map(|&(id, _)| Variant { id }).collect()
+    }
+
+    pub fn num_variants(self, db: &dyn HirDatabase) -> usize {
+        db.enum_variants(self.id).variants.len()
     }
 
     pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
-        db.enum_data(self.id).repr
+        db.enum_signature(self.id).repr
     }
 
     pub fn ty(self, db: &dyn HirDatabase) -> Type {
@@ -1536,8 +1551,8 @@ impl Enum {
     /// The type of the enum variant bodies.
     pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type {
         Type::new_for_crate(
-            self.id.lookup(db.upcast()).container.krate(),
-            TyBuilder::builtin(match db.enum_data(self.id).variant_body_type() {
+            self.id.lookup(db).container.krate(),
+            TyBuilder::builtin(match db.enum_signature(self.id).variant_body_type() {
                 layout::IntegerType::Pointer(sign) => match sign {
                     true => hir_def::builtin_type::BuiltinType::Int(
                         hir_def::builtin_type::BuiltinInt::Isize,
@@ -1582,7 +1597,13 @@ impl Enum {
 
 impl HasVisibility for Enum {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        db.enum_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        Visibility::resolve(
+            db,
+            &self.id.resolver(db),
+            &item_tree[item_tree[loc.id.value].visibility],
+        )
     }
 }
 
@@ -1600,11 +1621,11 @@ pub struct Variant {
 
 impl Variant {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.module(db.upcast()) }
+        Module { id: self.id.module(db) }
     }
 
     pub fn parent_enum(self, db: &dyn HirDatabase) -> Enum {
-        self.id.lookup(db.upcast()).parent.into()
+        self.id.lookup(db).parent.into()
     }
 
     pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
@@ -1612,11 +1633,13 @@ impl Variant {
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.enum_variant_data(self.id).name.clone()
+        let lookup = self.id.lookup(db);
+        let enum_ = lookup.parent;
+        db.enum_variants(enum_).variants[lookup.index as usize].1.clone()
     }
 
     pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
-        self.variant_data(db)
+        db.variant_fields(self.id.into())
             .fields()
             .iter()
             .map(|(id, _)| Field { parent: self.into(), id })
@@ -1624,11 +1647,11 @@ impl Variant {
     }
 
     pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
-        self.variant_data(db).kind()
-    }
-
-    pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
-        db.enum_variant_data(self.id).variant_data.clone()
+        match db.variant_fields(self.id.into()).shape {
+            hir_def::item_tree::FieldsShape::Record => StructKind::Record,
+            hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple,
+            hir_def::item_tree::FieldsShape::Unit => StructKind::Unit,
+        }
     }
 
     pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
@@ -1645,7 +1668,7 @@ impl Variant {
         Ok(match &parent_layout.0.variants {
             layout::Variants::Multiple { variants, .. } => Layout(
                 {
-                    let lookup = self.id.lookup(db.upcast());
+                    let lookup = self.id.lookup(db);
                     let rustc_enum_variant_idx = RustcEnumVariantIdx(lookup.index as usize);
                     Arc::new(variants[rustc_enum_variant_idx].clone())
                 },
@@ -1660,6 +1683,13 @@ impl Variant {
     }
 }
 
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum StructKind {
+    Record,
+    Tuple,
+    Unit,
+}
+
 /// Variants inherit visibility from the parent enum.
 impl HasVisibility for Variant {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
@@ -1679,10 +1709,11 @@ impl_from!(Struct, Union, Enum for Adt);
 impl Adt {
     pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
         let subst = db.generic_defaults(self.into());
-        subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
-            GenericArgData::Ty(it) => it.is_unknown(),
-            _ => false,
-        })
+        (subst.is_empty() && db.generic_params(self.into()).len_type_or_consts() != 0)
+            || subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+                GenericArgData::Ty(it) => it.is_unknown(),
+                _ => false,
+            })
     }
 
     pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
@@ -1741,9 +1772,9 @@ impl Adt {
     /// Returns the lifetime of the DataType
     pub fn lifetime(&self, db: &dyn HirDatabase) -> Option<LifetimeParamData> {
         let resolver = match self {
-            Adt::Struct(s) => s.id.resolver(db.upcast()),
-            Adt::Union(u) => u.id.resolver(db.upcast()),
-            Adt::Enum(e) => e.id.resolver(db.upcast()),
+            Adt::Struct(s) => s.id.resolver(db),
+            Adt::Union(u) => u.id.resolver(db),
+            Adt::Enum(e) => e.id.resolver(db),
         };
         resolver
             .generic_params()
@@ -1757,19 +1788,11 @@ impl Adt {
     }
 
     pub fn as_struct(&self) -> Option<Struct> {
-        if let Self::Struct(v) = self {
-            Some(*v)
-        } else {
-            None
-        }
+        if let Self::Struct(v) = self { Some(*v) } else { None }
     }
 
     pub fn as_enum(&self) -> Option<Enum> {
-        if let Self::Enum(v) = self {
-            Some(*v)
-        } else {
-            None
-        }
+        if let Self::Enum(v) = self { Some(*v) } else { None }
     }
 }
 
@@ -1815,14 +1838,6 @@ impl VariantDef {
             VariantDef::Variant(e) => e.name(db),
         }
     }
-
-    pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
-        match self {
-            VariantDef::Struct(it) => it.variant_data(db),
-            VariantDef::Union(it) => it.variant_data(db),
-            VariantDef::Variant(it) => it.variant_data(db),
-        }
-    }
 }
 
 /// The defs which have a body.
@@ -1832,9 +1847,8 @@ pub enum DefWithBody {
     Static(Static),
     Const(Const),
     Variant(Variant),
-    InTypeConst(InTypeConst),
 }
-impl_from!(Function, Const, Static, Variant, InTypeConst for DefWithBody);
+impl_from!(Function, Const, Static, Variant for DefWithBody);
 
 impl DefWithBody {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
@@ -1843,7 +1857,6 @@ impl DefWithBody {
             DefWithBody::Function(f) => f.module(db),
             DefWithBody::Static(s) => s.module(db),
             DefWithBody::Variant(v) => v.module(db),
-            DefWithBody::InTypeConst(c) => c.module(db),
         }
     }
 
@@ -1853,7 +1866,6 @@ impl DefWithBody {
             DefWithBody::Static(s) => Some(s.name(db)),
             DefWithBody::Const(c) => c.name(db),
             DefWithBody::Variant(v) => Some(v.name(db)),
-            DefWithBody::InTypeConst(_) => None,
         }
     }
 
@@ -1864,11 +1876,6 @@ impl DefWithBody {
             DefWithBody::Static(it) => it.ty(db),
             DefWithBody::Const(it) => it.ty(db),
             DefWithBody::Variant(it) => it.parent_enum(db).variant_body_ty(db),
-            DefWithBody::InTypeConst(it) => Type::new_with_resolver_inner(
-                db,
-                &DefWithBodyId::from(it.id).resolver(db.upcast()),
-                TyKind::Error.intern(Interner),
-            ),
         }
     }
 
@@ -1878,14 +1885,13 @@ impl DefWithBody {
             DefWithBody::Static(it) => it.id.into(),
             DefWithBody::Const(it) => it.id.into(),
             DefWithBody::Variant(it) => it.into(),
-            DefWithBody::InTypeConst(it) => it.id.into(),
         }
     }
 
     /// A textual representation of the HIR of this def's body for debugging purposes.
     pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
         let body = db.body(self.id());
-        body.pretty_print(db.upcast(), self.id(), Edition::CURRENT)
+        body.pretty_print(db, self.id(), Edition::CURRENT)
     }
 
     /// A textual representation of the MIR of this def's body for debugging purposes.
@@ -1906,83 +1912,25 @@ impl DefWithBody {
         let krate = self.module(db).id.krate();
 
         let (body, source_map) = db.body_with_source_map(self.into());
-        let item_tree_source_maps;
-        let outer_types_source_map = match self {
-            DefWithBody::Function(function) => {
-                let function = function.id.lookup(db.upcast()).id;
-                item_tree_source_maps = function.item_tree_with_source_map(db.upcast()).1;
-                item_tree_source_maps.function(function.value).item()
+        let sig_source_map = match self {
+            DefWithBody::Function(id) => db.function_signature_with_source_map(id.into()).1,
+            DefWithBody::Static(id) => db.static_signature_with_source_map(id.into()).1,
+            DefWithBody::Const(id) => db.const_signature_with_source_map(id.into()).1,
+            DefWithBody::Variant(variant) => {
+                let enum_id = variant.parent_enum(db).id;
+                db.enum_signature_with_source_map(enum_id).1
             }
-            DefWithBody::Static(statik) => {
-                let statik = statik.id.lookup(db.upcast()).id;
-                item_tree_source_maps = statik.item_tree_with_source_map(db.upcast()).1;
-                item_tree_source_maps.statik(statik.value)
-            }
-            DefWithBody::Const(konst) => {
-                let konst = konst.id.lookup(db.upcast()).id;
-                item_tree_source_maps = konst.item_tree_with_source_map(db.upcast()).1;
-                item_tree_source_maps.konst(konst.value)
-            }
-            DefWithBody::Variant(_) | DefWithBody::InTypeConst(_) => &TypesSourceMap::EMPTY,
         };
 
-        for (_, def_map) in body.blocks(db.upcast()) {
+        for (_, def_map) in body.blocks(db) {
             Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints);
         }
 
         source_map
             .macro_calls()
-            .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id.macro_call_id, acc));
+            .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
 
-        for diag in source_map.diagnostics() {
-            acc.push(match diag {
-                ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => {
-                    InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
-                }
-                ExpressionStoreDiagnostics::MacroError { node, err } => {
-                    let RenderedExpandError { message, error, kind } =
-                        err.render_to_string(db.upcast());
-
-                    let precise_location = if err.span().anchor.file_id == node.file_id {
-                        Some(
-                            err.span().range
-                                + db.ast_id_map(err.span().anchor.file_id.into())
-                                    .get_erased(err.span().anchor.ast_id)
-                                    .text_range()
-                                    .start(),
-                        )
-                    } else {
-                        None
-                    };
-                    MacroError {
-                        node: (*node).map(|it| it.into()),
-                        precise_location,
-                        message,
-                        error,
-                        kind,
-                    }
-                    .into()
-                }
-                ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => {
-                    UnresolvedMacroCall {
-                        macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
-                        precise_location: None,
-                        path: path.clone(),
-                        is_bang: true,
-                    }
-                    .into()
-                }
-                ExpressionStoreDiagnostics::AwaitOutsideOfAsync { node, location } => {
-                    AwaitOutsideOfAsync { node: *node, location: location.clone() }.into()
-                }
-                ExpressionStoreDiagnostics::UnreachableLabel { node, name } => {
-                    UnreachableLabel { node: *node, name: name.clone() }.into()
-                }
-                ExpressionStoreDiagnostics::UndeclaredLabel { node, name } => {
-                    UndeclaredLabel { node: *node, name: name.clone() }.into()
-                }
-            });
-        }
+        expr_store_diagnostics(db, acc, &source_map);
 
         let infer = db.infer(self.into());
         for d in &infer.diagnostics {
@@ -1990,8 +1938,8 @@ impl DefWithBody {
                 db,
                 self.into(),
                 d,
-                outer_types_source_map,
                 &source_map,
+                &sig_source_map,
             ));
         }
 
@@ -2109,7 +2057,7 @@ impl DefWithBody {
                         continue;
                     }
                     let mut need_mut = &mol[local];
-                    if body[binding_id].name == sym::self_.clone()
+                    if body[binding_id].name == sym::self_
                         && need_mut == &mir::MutabilityReason::Unused
                     {
                         need_mut = &mir::MutabilityReason::Not;
@@ -2179,14 +2127,66 @@ impl DefWithBody {
             DefWithBody::Static(it) => it.into(),
             DefWithBody::Const(it) => it.into(),
             DefWithBody::Variant(it) => it.into(),
-            // FIXME: don't ignore diagnostics for in type const
-            DefWithBody::InTypeConst(_) => return,
         };
         for diag in hir_ty::diagnostics::incorrect_case(db, def.into()) {
             acc.push(diag.into())
         }
     }
 }
+
+fn expr_store_diagnostics(
+    db: &dyn HirDatabase,
+    acc: &mut Vec<AnyDiagnostic>,
+    source_map: &ExpressionStoreSourceMap,
+) {
+    for diag in source_map.diagnostics() {
+        acc.push(match diag {
+            ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => {
+                InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
+            }
+            ExpressionStoreDiagnostics::MacroError { node, err } => {
+                let RenderedExpandError { message, error, kind } = err.render_to_string(db);
+
+                let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
+                let precise_location = if editioned_file_id == node.file_id {
+                    Some(
+                        err.span().range
+                            + db.ast_id_map(editioned_file_id.into())
+                                .get_erased(err.span().anchor.ast_id)
+                                .text_range()
+                                .start(),
+                    )
+                } else {
+                    None
+                };
+                MacroError {
+                    node: (node).map(|it| it.into()),
+                    precise_location,
+                    message,
+                    error,
+                    kind,
+                }
+                .into()
+            }
+            ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
+                macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
+                precise_location: None,
+                path: path.clone(),
+                is_bang: true,
+            }
+            .into(),
+            ExpressionStoreDiagnostics::AwaitOutsideOfAsync { node, location } => {
+                AwaitOutsideOfAsync { node: *node, location: location.clone() }.into()
+            }
+            ExpressionStoreDiagnostics::UnreachableLabel { node, name } => {
+                UnreachableLabel { node: *node, name: name.clone() }.into()
+            }
+            ExpressionStoreDiagnostics::UndeclaredLabel { node, name } => {
+                UndeclaredLabel { node: *node, name: name.clone() }.into()
+            }
+        });
+    }
+}
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct Function {
     pub(crate) id: FunctionId,
@@ -2194,11 +2194,11 @@ pub struct Function {
 
 impl Function {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        self.id.module(db.upcast()).into()
+        self.id.module(db).into()
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.function_data(self.id).name.clone()
+        db.function_signature(self.id).name.clone()
     }
 
     pub fn ty(self, db: &dyn HirDatabase) -> Type {
@@ -2206,7 +2206,7 @@ impl Function {
     }
 
     pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type {
-        let resolver = self.id.resolver(db.upcast());
+        let resolver = self.id.resolver(db);
         let substs = TyBuilder::placeholder_subst(db, self.id);
         let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
         let ty = TyKind::Function(callable_sig.to_fn_ptr()).intern(Interner);
@@ -2215,7 +2215,7 @@ impl Function {
 
     /// Get this function's return type
     pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
-        let resolver = self.id.resolver(db.upcast());
+        let resolver = self.id.resolver(db);
         let substs = TyBuilder::placeholder_subst(db, self.id);
         let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
         let ty = callable_sig.ret().clone();
@@ -2228,8 +2228,8 @@ impl Function {
         db: &dyn HirDatabase,
         generics: impl Iterator<Item = Type>,
     ) -> Type {
-        let resolver = self.id.resolver(db.upcast());
-        let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container {
+        let resolver = self.id.resolver(db);
+        let parent_id: Option<GenericDefId> = match self.id.lookup(db).container {
             ItemContainerId::ImplId(it) => Some(it.into()),
             ItemContainerId::TraitId(it) => Some(it.into()),
             ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
@@ -2256,7 +2256,7 @@ impl Function {
         if !self.is_async(db) {
             return None;
         }
-        let resolver = self.id.resolver(db.upcast());
+        let resolver = self.id.resolver(db);
         let substs = TyBuilder::placeholder_subst(db, self.id);
         let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
         let ret_ty = callable_sig.ret().clone();
@@ -2269,7 +2269,7 @@ impl Function {
     }
 
     pub fn has_self_param(self, db: &dyn HirDatabase) -> bool {
-        db.function_data(self.id).has_self_param()
+        db.function_signature(self.id).has_self_param()
     }
 
     pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
@@ -2292,7 +2292,7 @@ impl Function {
     }
 
     pub fn num_params(self, db: &dyn HirDatabase) -> usize {
-        db.function_data(self.id).params.len()
+        db.function_signature(self.id).params.len()
     }
 
     pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
@@ -2304,7 +2304,7 @@ impl Function {
         let environment = db.trait_environment(self.id.into());
         let substs = TyBuilder::placeholder_subst(db, self.id);
         let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
-        let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
+        let skip = if db.function_signature(self.id).has_self_param() { 1 } else { 0 };
         callable_sig
             .params()
             .iter()
@@ -2324,7 +2324,7 @@ impl Function {
         generics: impl Iterator<Item = Type>,
     ) -> Vec<Param> {
         let environment = db.trait_environment(self.id.into());
-        let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container {
+        let parent_id: Option<GenericDefId> = match self.id.lookup(db).container {
             ItemContainerId::ImplId(it) => Some(it.into()),
             ItemContainerId::TraitId(it) => Some(it.into()),
             ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
@@ -2350,7 +2350,7 @@ impl Function {
             })
             .build();
         let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
-        let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
+        let skip = if db.function_signature(self.id).has_self_param() { 1 } else { 0 };
         callable_sig
             .params()
             .iter()
@@ -2364,15 +2364,19 @@ impl Function {
     }
 
     pub fn is_const(self, db: &dyn HirDatabase) -> bool {
-        db.function_data(self.id).is_const()
+        db.function_signature(self.id).is_const()
     }
 
     pub fn is_async(self, db: &dyn HirDatabase) -> bool {
-        db.function_data(self.id).is_async()
+        db.function_signature(self.id).is_async()
+    }
+
+    pub fn is_varargs(self, db: &dyn HirDatabase) -> bool {
+        db.function_signature(self.id).is_varargs()
     }
 
     pub fn extern_block(self, db: &dyn HirDatabase) -> Option<ExternBlock> {
-        match self.id.lookup(db.upcast()).container {
+        match self.id.lookup(db).container {
             ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }),
             _ => None,
         }
@@ -2415,7 +2419,7 @@ impl Function {
     /// is this a `fn main` or a function with an `export_name` of `main`?
     pub fn is_main(self, db: &dyn HirDatabase) -> bool {
         db.attrs(self.id.into()).export_name() == Some(&sym::main)
-            || self.module(db).is_crate_root() && db.function_data(self.id).name == sym::main
+            || self.module(db).is_crate_root() && db.function_signature(self.id).name == sym::main
     }
 
     /// Is this a function with an `export_name` of `main`?
@@ -2457,7 +2461,7 @@ impl Function {
     ///
     /// This is false in the case of required (not provided) trait methods.
     pub fn has_body(self, db: &dyn HirDatabase) -> bool {
-        db.function_data(self.id).has_body()
+        db.function_signature(self.id).has_body()
     }
 
     pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
@@ -2469,7 +2473,7 @@ impl Function {
         {
             return None;
         }
-        let def_map = db.crate_def_map(HasModule::krate(&self.id, db.upcast()));
+        let def_map = db.crate_def_map(HasModule::krate(&self.id, db));
         def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
     }
 
@@ -2601,11 +2605,11 @@ pub struct SelfParam {
 
 impl SelfParam {
     pub fn access(self, db: &dyn HirDatabase) -> Access {
-        let func_data = db.function_data(self.func);
+        let func_data = db.function_signature(self.func);
         func_data
             .params
             .first()
-            .map(|&param| match &func_data.types_map[param] {
+            .map(|&param| match &func_data.store[param] {
                 TypeRef::Reference(ref_) => match ref_.mutability {
                     hir_def::type_ref::Mutability::Shared => Access::Shared,
                     hir_def::type_ref::Mutability::Mut => Access::Exclusive,
@@ -2630,7 +2634,7 @@ impl SelfParam {
 
     // FIXME: Find better API to also handle const generics
     pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
-        let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container {
+        let parent_id: GenericDefId = match self.func.lookup(db).container {
             ItemContainerId::ImplId(it) => it.into(),
             ItemContainerId::TraitId(it) => it.into(),
             ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
@@ -2671,48 +2675,57 @@ pub struct ExternCrateDecl {
 
 impl ExternCrateDecl {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        self.id.module(db.upcast()).into()
+        self.id.module(db).into()
     }
 
     pub fn resolved_crate(self, db: &dyn HirDatabase) -> Option<Crate> {
-        db.extern_crate_decl_data(self.id).crate_id.map(Into::into)
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        let krate = loc.container.krate();
+        let name = &item_tree[loc.id.value].name;
+        if *name == sym::self_ {
+            Some(krate.into())
+        } else {
+            krate.data(db).dependencies.iter().find_map(|dep| {
+                if dep.name.symbol() == name.symbol() { Some(dep.crate_id.into()) } else { None }
+            })
+        }
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.extern_crate_decl_data(self.id).name.clone()
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        item_tree[loc.id.value].name.clone()
     }
 
     pub fn alias(self, db: &dyn HirDatabase) -> Option<ImportAlias> {
-        db.extern_crate_decl_data(self.id).alias.clone()
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        item_tree[loc.id.value].alias.clone()
     }
 
     /// Returns the name under which this crate is made accessible, taking `_` into account.
     pub fn alias_or_name(self, db: &dyn HirDatabase) -> Option<Name> {
-        let extern_crate_decl_data = db.extern_crate_decl_data(self.id);
-        match &extern_crate_decl_data.alias {
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+
+        match &item_tree[loc.id.value].alias {
             Some(ImportAlias::Underscore) => None,
             Some(ImportAlias::Alias(alias)) => Some(alias.clone()),
-            None => Some(extern_crate_decl_data.name.clone()),
+            None => Some(item_tree[loc.id.value].name.clone()),
         }
     }
 }
 
 impl HasVisibility for ExternCrateDecl {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        db.extern_crate_decl_data(self.id)
-            .visibility
-            .resolve(db.upcast(), &self.id.resolver(db.upcast()))
-    }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InTypeConst {
-    pub(crate) id: InTypeConstId,
-}
-
-impl InTypeConst {
-    pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.lookup(db.upcast()).owner.module(db.upcast()) }
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        Visibility::resolve(
+            db,
+            &self.id.resolver(db),
+            &item_tree[item_tree[loc.id.value].visibility],
+        )
     }
 }
 
@@ -2723,11 +2736,11 @@ pub struct Const {
 
 impl Const {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.module(db.upcast()) }
+        Module { id: self.id.module(db) }
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
-        db.const_data(self.id).name.clone()
+        db.const_signature(self.id).name.clone()
     }
 
     pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
@@ -2796,15 +2809,15 @@ pub struct Static {
 
 impl Static {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.module(db.upcast()) }
+        Module { id: self.id.module(db) }
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.static_data(self.id).name.clone()
+        db.static_signature(self.id).name.clone()
     }
 
     pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
-        db.static_data(self.id).mutable
+        db.static_signature(self.id).flags.contains(StaticFlags::MUTABLE)
     }
 
     pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
@@ -2816,7 +2829,7 @@ impl Static {
     }
 
     pub fn extern_block(self, db: &dyn HirDatabase) -> Option<ExternBlock> {
-        match self.id.lookup(db.upcast()).container {
+        match self.id.lookup(db).container {
             ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }),
             _ => None,
         }
@@ -2831,7 +2844,13 @@ impl Static {
 
 impl HasVisibility for Static {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        db.static_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        Visibility::resolve(
+            db,
+            &self.id.resolver(db),
+            &item_tree[item_tree[loc.id.value].visibility],
+        )
     }
 }
 
@@ -2848,34 +2867,33 @@ impl Trait {
     }
 
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.lookup(db.upcast()).container }
+        Module { id: self.id.lookup(db).container }
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.trait_data(self.id).name.clone()
+        db.trait_signature(self.id).name.clone()
     }
 
     pub fn direct_supertraits(self, db: &dyn HirDatabase) -> Vec<Trait> {
-        let traits = direct_super_traits(db.upcast(), self.into());
+        let traits = direct_super_traits(db, self.into());
         traits.iter().map(|tr| Trait::from(*tr)).collect()
     }
 
     pub fn all_supertraits(self, db: &dyn HirDatabase) -> Vec<Trait> {
-        let traits = all_super_traits(db.upcast(), self.into());
+        let traits = all_super_traits(db, self.into());
         traits.iter().map(|tr| Trait::from(*tr)).collect()
     }
 
     pub fn function(self, db: &dyn HirDatabase, name: impl PartialEq<Name>) -> Option<Function> {
-        db.trait_data(self.id).items.iter().find(|(n, _)| name == *n).and_then(
-            |&(_, it)| match it {
-                AssocItemId::FunctionId(id) => Some(Function { id }),
-                _ => None,
-            },
-        )
+        db.trait_items(self.id).items.iter().find(|(n, _)| name == *n).and_then(|&(_, it)| match it
+        {
+            AssocItemId::FunctionId(id) => Some(Function { id }),
+            _ => None,
+        })
     }
 
     pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
-        db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
+        db.trait_items(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
     }
 
     pub fn items_with_supertraits(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
@@ -2883,11 +2901,11 @@ impl Trait {
     }
 
     pub fn is_auto(self, db: &dyn HirDatabase) -> bool {
-        db.trait_data(self.id).flags.contains(TraitFlags::IS_AUTO)
+        db.trait_signature(self.id).flags.contains(TraitFlags::AUTO)
     }
 
     pub fn is_unsafe(&self, db: &dyn HirDatabase) -> bool {
-        db.trait_data(self.id).flags.contains(TraitFlags::IS_UNSAFE)
+        db.trait_signature(self.id).flags.contains(TraitFlags::UNSAFE)
     }
 
     pub fn type_or_const_param_count(
@@ -2911,25 +2929,40 @@ impl Trait {
         db: &dyn HirDatabase,
     ) -> Option<Vec<DynCompatibilityViolation>> {
         let mut violations = vec![];
-        let _ = hir_ty::dyn_compatibility::dyn_compatibility_with_callback(db, self.id, &mut |violation| {
-            violations.push(violation);
-            ControlFlow::Continue(())
-        });
+        _ = hir_ty::dyn_compatibility::dyn_compatibility_with_callback(
+            db,
+            self.id,
+            &mut |violation| {
+                violations.push(violation);
+                ControlFlow::Continue(())
+            },
+        );
         violations.is_empty().not().then_some(violations)
     }
 
     fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
-        db.trait_data(self.id)
+        db.trait_items(self.id)
             .macro_calls
             .as_ref()
             .map(|it| it.as_ref().clone().into_boxed_slice())
             .unwrap_or_default()
     }
+
+    /// `#[rust_analyzer::completions(...)]` mode.
+    pub fn complete(self, db: &dyn HirDatabase) -> Complete {
+        Complete::extract(true, &self.attrs(db))
+    }
 }
 
 impl HasVisibility for Trait {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        db.trait_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        Visibility::resolve(
+            db,
+            &self.id.resolver(db),
+            &item_tree[item_tree[loc.id.value].visibility],
+        )
     }
 }
 
@@ -2940,17 +2973,23 @@ pub struct TraitAlias {
 
 impl TraitAlias {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.lookup(db.upcast()).container }
+        Module { id: self.id.lookup(db).container }
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.trait_alias_data(self.id).name.clone()
+        db.trait_alias_signature(self.id).name.clone()
     }
 }
 
 impl HasVisibility for TraitAlias {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        db.trait_alias_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+        let loc = self.id.lookup(db);
+        let item_tree = loc.id.item_tree(db);
+        Visibility::resolve(
+            db,
+            &self.id.resolver(db),
+            &item_tree[item_tree[loc.id.value].visibility],
+        )
     }
 }
 
@@ -2962,14 +3001,15 @@ pub struct TypeAlias {
 impl TypeAlias {
     pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
         let subst = db.generic_defaults(self.id.into());
-        subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
-            GenericArgData::Ty(it) => it.is_unknown(),
-            _ => false,
-        })
+        (subst.is_empty() && db.generic_params(self.id.into()).len_type_or_consts() != 0)
+            || subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+                GenericArgData::Ty(it) => it.is_unknown(),
+                _ => false,
+            })
     }
 
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.module(db.upcast()) }
+        Module { id: self.id.module(db) }
     }
 
     pub fn ty(self, db: &dyn HirDatabase) -> Type {
@@ -2981,15 +3021,13 @@ impl TypeAlias {
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
-        db.type_alias_data(self.id).name.clone()
+        db.type_alias_signature(self.id).name.clone()
     }
 }
 
 impl HasVisibility for TypeAlias {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
-        let function_data = db.type_alias_data(self.id);
-        let visibility = &function_data.visibility;
-        visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+        db.type_alias_visibility(self.id)
     }
 }
 
@@ -3000,7 +3038,7 @@ pub struct ExternBlock {
 
 impl ExternBlock {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.module(db.upcast()) }
+        Module { id: self.id.module(db) }
     }
 }
 
@@ -3009,7 +3047,7 @@ pub struct StaticLifetime;
 
 impl StaticLifetime {
     pub fn name(self) -> Name {
-        Name::new_symbol_root(sym::tick_static.clone())
+        Name::new_symbol_root(sym::tick_static)
     }
 }
 
@@ -3024,7 +3062,8 @@ impl BuiltinType {
     }
 
     pub fn ty(self, db: &dyn HirDatabase) -> Type {
-        Type::new_for_crate(db.crate_graph().iter().next().unwrap(), TyBuilder::builtin(self.inner))
+        let core = Crate::core(db).map(|core| core.id).unwrap_or_else(|| db.all_crates()[0]);
+        Type::new_for_crate(core, TyBuilder::builtin(self.inner))
     }
 
     pub fn name(self) -> Name {
@@ -3091,19 +3130,39 @@ pub struct Macro {
 
 impl Macro {
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        Module { id: self.id.module(db.upcast()) }
+        Module { id: self.id.module(db) }
     }
 
     pub fn name(self, db: &dyn HirDatabase) -> Name {
         match self.id {
-            MacroId::Macro2Id(id) => db.macro2_data(id).name.clone(),
-            MacroId::MacroRulesId(id) => db.macro_rules_data(id).name.clone(),
-            MacroId::ProcMacroId(id) => db.proc_macro_data(id).name.clone(),
+            MacroId::Macro2Id(id) => {
+                let loc = id.lookup(db);
+                let item_tree = loc.id.item_tree(db);
+                item_tree[loc.id.value].name.clone()
+            }
+            MacroId::MacroRulesId(id) => {
+                let loc = id.lookup(db);
+                let item_tree = loc.id.item_tree(db);
+                item_tree[loc.id.value].name.clone()
+            }
+            MacroId::ProcMacroId(id) => {
+                let loc = id.lookup(db);
+                let item_tree = loc.id.item_tree(db);
+                match loc.kind {
+                    ProcMacroKind::CustomDerive => db
+                        .attrs(id.into())
+                        .parse_proc_macro_derive()
+                        .map_or_else(|| item_tree[loc.id.value].name.clone(), |(it, _)| it),
+                    ProcMacroKind::Bang | ProcMacroKind::Attr => {
+                        item_tree[loc.id.value].name.clone()
+                    }
+                }
+            }
         }
     }
 
     pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
-        matches!(self.id, MacroId::MacroRulesId(id) if db.macro_rules_data(id).macro_export)
+        matches!(self.id, MacroId::MacroRulesId(_) if db.attrs(self.id.into()).by_key(sym::macro_export).exists())
     }
 
     pub fn is_proc_macro(self) -> bool {
@@ -3112,7 +3171,7 @@ impl Macro {
 
     pub fn kind(&self, db: &dyn HirDatabase) -> MacroKind {
         match self.id {
-            MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander {
+            MacroId::Macro2Id(it) => match it.lookup(db).expander {
                 MacroExpander::Declarative => MacroKind::Declarative,
                 MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => {
                     MacroKind::DeclarativeBuiltIn
@@ -3120,7 +3179,7 @@ impl Macro {
                 MacroExpander::BuiltInAttr(_) => MacroKind::AttrBuiltIn,
                 MacroExpander::BuiltInDerive(_) => MacroKind::DeriveBuiltIn,
             },
-            MacroId::MacroRulesId(it) => match it.lookup(db.upcast()).expander {
+            MacroId::MacroRulesId(it) => match it.lookup(db).expander {
                 MacroExpander::Declarative => MacroKind::Declarative,
                 MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => {
                     MacroKind::DeclarativeBuiltIn
@@ -3128,7 +3187,7 @@ impl Macro {
                 MacroExpander::BuiltInAttr(_) => MacroKind::AttrBuiltIn,
                 MacroExpander::BuiltInDerive(_) => MacroKind::DeriveBuiltIn,
             },
-            MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind {
+            MacroId::ProcMacroId(it) => match it.lookup(db).kind {
                 ProcMacroKind::CustomDerive => MacroKind::Derive,
                 ProcMacroKind::Bang => MacroKind::ProcMacro,
                 ProcMacroKind::Attr => MacroKind::Attr,
@@ -3146,10 +3205,10 @@ impl Macro {
     pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool {
         match self.id {
             MacroId::Macro2Id(it) => {
-                matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+                matches!(it.lookup(db).expander, MacroExpander::BuiltInDerive(_))
             }
             MacroId::MacroRulesId(it) => {
-                matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+                matches!(it.lookup(db).expander, MacroExpander::BuiltInDerive(_))
             }
             MacroId::ProcMacroId(_) => false,
         }
@@ -3158,10 +3217,10 @@ impl Macro {
     pub fn is_env_or_option_env(&self, db: &dyn HirDatabase) -> bool {
         match self.id {
             MacroId::Macro2Id(it) => {
-                matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env())
+                matches!(it.lookup(db).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env())
             }
             MacroId::MacroRulesId(it) => {
-                matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env())
+                matches!(it.lookup(db).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env())
             }
             MacroId::ProcMacroId(_) => false,
         }
@@ -3170,10 +3229,10 @@ impl Macro {
     pub fn is_asm_or_global_asm(&self, db: &dyn HirDatabase) -> bool {
         match self.id {
             MacroId::Macro2Id(it) => {
-                matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltIn(m) if m.is_asm())
+                matches!(it.lookup(db).expander, MacroExpander::BuiltIn(m) if m.is_asm())
             }
             MacroId::MacroRulesId(it) => {
-                matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltIn(m) if m.is_asm())
+                matches!(it.lookup(db).expander, MacroExpander::BuiltIn(m) if m.is_asm())
             }
             MacroId::ProcMacroId(_) => false,
         }
@@ -3192,9 +3251,13 @@ impl HasVisibility for Macro {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
         match self.id {
             MacroId::Macro2Id(id) => {
-                let data = db.macro2_data(id);
-                let visibility = &data.visibility;
-                visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+                let loc = id.lookup(db);
+                let item_tree = loc.id.item_tree(db);
+                Visibility::resolve(
+                    db,
+                    &id.resolver(db),
+                    &item_tree[item_tree[loc.id.value].visibility],
+                )
             }
             MacroId::MacroRulesId(_) => Visibility::Public,
             MacroId::ProcMacroId(_) => Visibility::Public,
@@ -3335,7 +3398,7 @@ impl AsAssocItem for DefWithBody {
         match self {
             DefWithBody::Function(it) => it.as_assoc_item(db),
             DefWithBody::Const(it) => it.as_assoc_item(db),
-            DefWithBody::Static(_) | DefWithBody::Variant(_) | DefWithBody::InTypeConst(_) => None,
+            DefWithBody::Static(_) | DefWithBody::Variant(_) => None,
         }
     }
 }
@@ -3346,11 +3409,11 @@ fn as_assoc_item<'db, ID, DEF, LOC>(
     id: ID,
 ) -> Option<AssocItem>
 where
-    ID: Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<LOC>>,
+    ID: Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<LOC>>,
     DEF: From<ID>,
     LOC: ItemTreeNode,
 {
-    match id.lookup(db.upcast()).container {
+    match id.lookup(db).container {
         ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => Some(ctor(DEF::from(id))),
         ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
     }
@@ -3362,11 +3425,11 @@ fn as_extern_assoc_item<'db, ID, DEF, LOC>(
     id: ID,
 ) -> Option<ExternAssocItem>
 where
-    ID: Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<LOC>>,
+    ID: Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<LOC>>,
     DEF: From<ID>,
     LOC: ItemTreeNode,
 {
-    match id.lookup(db.upcast()).container {
+    match id.lookup(db).container {
         ItemContainerId::ExternBlockId(_) => Some(ctor(DEF::from(id))),
         ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) | ItemContainerId::ModuleId(_) => {
             None
@@ -3432,9 +3495,9 @@ impl AssocItem {
 
     pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer {
         let container = match self {
-            AssocItem::Function(it) => it.id.lookup(db.upcast()).container,
-            AssocItem::Const(it) => it.id.lookup(db.upcast()).container,
-            AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container,
+            AssocItem::Function(it) => it.id.lookup(db).container,
+            AssocItem::Const(it) => it.id.lookup(db).container,
+            AssocItem::TypeAlias(it) => it.id.lookup(db).container,
         };
         match container {
             ItemContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()),
@@ -3506,17 +3569,16 @@ impl AssocItem {
                 DefWithBody::from(func).diagnostics(db, acc, style_lints);
             }
             AssocItem::Const(const_) => {
+                GenericDef::Const(const_).diagnostics(db, acc);
                 DefWithBody::from(const_).diagnostics(db, acc, style_lints);
             }
             AssocItem::TypeAlias(type_alias) => {
                 GenericDef::TypeAlias(type_alias).diagnostics(db, acc);
-                let tree_id = type_alias.id.lookup(db.upcast()).id;
-                let tree_source_maps = tree_id.item_tree_with_source_map(db.upcast()).1;
                 push_ty_diagnostics(
                     db,
                     acc,
                     db.type_for_type_alias_with_diagnostics(type_alias.id).1,
-                    tree_source_maps.type_alias(tree_id.value).item(),
+                    &db.type_alias_signature_with_source_map(type_alias.id).1,
                 );
                 for diag in hir_ty::diagnostics::incorrect_case(db, type_alias.id.into()) {
                     acc.push(diag.into());
@@ -3623,67 +3685,40 @@ impl GenericDef {
     pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
         let def = self.id();
 
-        let item_tree_source_maps;
-        let (generics, generics_source_map) = db.generic_params_with_source_map(def);
+        let generics = db.generic_params(def);
 
         if generics.is_empty() && generics.no_predicates() {
             return;
         }
 
-        let source_map = match &generics_source_map {
-            Some(it) => it,
-            None => match def {
-                GenericDefId::FunctionId(it) => {
-                    let id = it.lookup(db.upcast()).id;
-                    item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1;
-                    item_tree_source_maps.function(id.value).generics()
-                }
-                GenericDefId::AdtId(AdtId::EnumId(it)) => {
-                    let id = it.lookup(db.upcast()).id;
-                    item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1;
-                    item_tree_source_maps.enum_generic(id.value)
-                }
-                GenericDefId::AdtId(AdtId::StructId(it)) => {
-                    let id = it.lookup(db.upcast()).id;
-                    item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1;
-                    item_tree_source_maps.strukt(id.value).generics()
-                }
-                GenericDefId::AdtId(AdtId::UnionId(it)) => {
-                    let id = it.lookup(db.upcast()).id;
-                    item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1;
-                    item_tree_source_maps.union(id.value).generics()
-                }
-                GenericDefId::TraitId(it) => {
-                    let id = it.lookup(db.upcast()).id;
-                    item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1;
-                    item_tree_source_maps.trait_generic(id.value)
-                }
-                GenericDefId::TraitAliasId(it) => {
-                    let id = it.lookup(db.upcast()).id;
-                    item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1;
-                    item_tree_source_maps.trait_alias_generic(id.value)
-                }
-                GenericDefId::TypeAliasId(it) => {
-                    let id = it.lookup(db.upcast()).id;
-                    item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1;
-                    item_tree_source_maps.type_alias(id.value).generics()
-                }
-                GenericDefId::ImplId(it) => {
-                    let id = it.lookup(db.upcast()).id;
-                    item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1;
-                    item_tree_source_maps.impl_(id.value).generics()
-                }
-                GenericDefId::ConstId(_) => return,
-                GenericDefId::StaticId(_) => return,
-            },
+        let source_map = match def {
+            GenericDefId::AdtId(AdtId::EnumId(it)) => {
+                db.enum_signature_with_source_map(it).1.clone()
+            }
+            GenericDefId::AdtId(AdtId::StructId(it)) => {
+                db.struct_signature_with_source_map(it).1.clone()
+            }
+            GenericDefId::AdtId(AdtId::UnionId(it)) => {
+                db.union_signature_with_source_map(it).1.clone()
+            }
+            GenericDefId::ConstId(_) => return,
+            GenericDefId::FunctionId(it) => db.function_signature_with_source_map(it).1.clone(),
+            GenericDefId::ImplId(it) => db.impl_signature_with_source_map(it).1.clone(),
+            GenericDefId::StaticId(_) => return,
+            GenericDefId::TraitAliasId(it) => {
+                db.trait_alias_signature_with_source_map(it).1.clone()
+            }
+            GenericDefId::TraitId(it) => db.trait_signature_with_source_map(it).1.clone(),
+            GenericDefId::TypeAliasId(it) => db.type_alias_signature_with_source_map(it).1.clone(),
         };
 
-        push_ty_diagnostics(db, acc, db.generic_defaults_with_diagnostics(def).1, source_map);
+        expr_store_diagnostics(db, acc, &source_map);
+        push_ty_diagnostics(db, acc, db.generic_defaults_with_diagnostics(def).1, &source_map);
         push_ty_diagnostics(
             db,
             acc,
             db.generic_predicates_without_parent_with_diagnostics(def).1,
-            source_map,
+            &source_map,
         );
         for (param_id, param) in generics.iter_type_or_consts() {
             if let TypeOrConstParamData::ConstParamData(_) = param {
@@ -3694,11 +3729,28 @@ impl GenericDef {
                         TypeOrConstParamId { parent: def, local_id: param_id },
                     ))
                     .1,
-                    source_map,
+                    &source_map,
                 );
             }
         }
     }
+
+    /// Returns a string describing the kind of this type.
+    #[inline]
+    pub fn description(self) -> &'static str {
+        match self {
+            GenericDef::Function(_) => "function",
+            GenericDef::Adt(Adt::Struct(_)) => "struct",
+            GenericDef::Adt(Adt::Enum(_)) => "enum",
+            GenericDef::Adt(Adt::Union(_)) => "union",
+            GenericDef::Trait(_) => "trait",
+            GenericDef::TraitAlias(_) => "trait alias",
+            GenericDef::TypeAlias(_) => "type alias",
+            GenericDef::Impl(_) => "impl",
+            GenericDef::Const(_) => "constant",
+            GenericDef::Static(_) => "static",
+        }
+    }
 }
 
 // We cannot call this `Substitution` unfortunately...
@@ -3716,9 +3768,9 @@ impl GenericSubstitution {
 
     pub fn types(&self, db: &dyn HirDatabase) -> Vec<(Symbol, Type)> {
         let container = match self.def {
-            GenericDefId::ConstId(id) => Some(id.lookup(db.upcast()).container),
-            GenericDefId::FunctionId(id) => Some(id.lookup(db.upcast()).container),
-            GenericDefId::TypeAliasId(id) => Some(id.lookup(db.upcast()).container),
+            GenericDefId::ConstId(id) => Some(id.lookup(db).container),
+            GenericDefId::FunctionId(id) => Some(id.lookup(db).container),
+            GenericDefId::TypeAliasId(id) => Some(id.lookup(db).container),
             _ => None,
         };
         let container_type_params = container
@@ -3741,16 +3793,23 @@ impl GenericSubstitution {
             TypeOrConstParamData::TypeParamData(param) => Some(param.name.clone()),
             TypeOrConstParamData::ConstParamData(_) => None,
         });
-        // The `Substitution` is first self then container, we want the reverse order.
-        let self_params = self.subst.type_parameters(Interner).zip(type_params);
-        let container_params = self.subst.as_slice(Interner)[generics.len()..]
+        let parent_len = self.subst.len(Interner)
+            - generics
+                .iter_type_or_consts()
+                .filter(|g| matches!(g.1, TypeOrConstParamData::TypeParamData(..)))
+                .count();
+        let container_params = self.subst.as_slice(Interner)[..parent_len]
             .iter()
             .filter_map(|param| param.ty(Interner).cloned())
             .zip(container_type_params.into_iter().flatten());
+        let self_params = self.subst.as_slice(Interner)[parent_len..]
+            .iter()
+            .filter_map(|param| param.ty(Interner).cloned())
+            .zip(type_params);
         container_params
             .chain(self_params)
             .filter_map(|(ty, name)| {
-                Some((name?.symbol().clone(), Type { ty, env: self.env.clone() }))
+                Some((name?.symbol().clone(), Type { ty: ty.clone(), env: self.env.clone() }))
             })
             .collect()
     }
@@ -3784,7 +3843,7 @@ impl LocalSource {
     }
 
     pub fn original_file(&self, db: &dyn HirDatabase) -> EditionedFileId {
-        self.source.file_id.original_file(db.upcast())
+        self.source.file_id.original_file(db)
     }
 
     pub fn file(&self) -> HirFileId {
@@ -3832,7 +3891,7 @@ impl Local {
     }
 
     pub fn is_self(self, db: &dyn HirDatabase) -> bool {
-        self.name(db) == sym::self_.clone()
+        self.name(db) == sym::self_
     }
 
     pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
@@ -3865,7 +3924,7 @@ impl Local {
         let (body, source_map) = db.body_with_source_map(self.parent);
         match body.self_param.zip(source_map.self_param_syntax()) {
             Some((param, source)) if param == self.binding_id => {
-                let root = source.file_syntax(db.upcast());
+                let root = source.file_syntax(db);
                 vec![LocalSource {
                     local: self,
                     source: source.map(|ast| Either::Right(ast.to_node(&root))),
@@ -3876,7 +3935,7 @@ impl Local {
                 .iter()
                 .map(|&definition| {
                     let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
-                    let root = src.file_syntax(db.upcast());
+                    let root = src.file_syntax(db);
                     LocalSource {
                         local: self,
                         source: src.map(|ast| match ast.to_node(&root) {
@@ -3894,7 +3953,7 @@ impl Local {
         let (body, source_map) = db.body_with_source_map(self.parent);
         match body.self_param.zip(source_map.self_param_syntax()) {
             Some((param, source)) if param == self.binding_id => {
-                let root = source.file_syntax(db.upcast());
+                let root = source.file_syntax(db);
                 LocalSource {
                     local: self,
                     source: source.map(|ast| Either::Right(ast.to_node(&root))),
@@ -3905,7 +3964,7 @@ impl Local {
                 .first()
                 .map(|&definition| {
                     let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
-                    let root = src.file_syntax(db.upcast());
+                    let root = src.file_syntax(db);
                     LocalSource {
                         local: self,
                         source: src.map(|ast| match ast.to_node(&root) {
@@ -3944,19 +4003,15 @@ impl DeriveHelper {
 
     pub fn name(&self, db: &dyn HirDatabase) -> Name {
         match self.derive {
-            MacroId::Macro2Id(it) => db
-                .macro2_data(it)
-                .helpers
-                .as_deref()
-                .and_then(|it| it.get(self.idx as usize))
-                .cloned(),
+            makro @ MacroId::Macro2Id(_) => db
+                .attrs(makro.into())
+                .parse_rustc_builtin_macro()
+                .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
             MacroId::MacroRulesId(_) => None,
-            MacroId::ProcMacroId(proc_macro) => db
-                .proc_macro_data(proc_macro)
-                .helpers
-                .as_deref()
-                .and_then(|it| it.get(self.idx as usize))
-                .cloned(),
+            makro @ MacroId::ProcMacroId(_) => db
+                .attrs(makro.into())
+                .parse_proc_macro_derive()
+                .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
         }
         .unwrap_or_else(Name::missing)
     }
@@ -3965,7 +4020,7 @@ impl DeriveHelper {
 // FIXME: Wrong name? This is could also be a registered attribute
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 pub struct BuiltinAttr {
-    krate: Option<CrateId>,
+    krate: Option<base_db::Crate>,
     idx: u32,
 }
 
@@ -4011,7 +4066,7 @@ impl BuiltinAttr {
 
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 pub struct ToolModule {
-    krate: CrateId,
+    krate: base_db::Crate,
     idx: u32,
 }
 
@@ -4096,7 +4151,7 @@ impl GenericParam {
             GenericParam::ConstParam(_) => return None,
             GenericParam::LifetimeParam(it) => it.id.parent,
         };
-        let generics = hir_ty::generics::generics(db.upcast(), parent);
+        let generics = hir_ty::generics::generics(db, parent);
         let index = match self {
             GenericParam::TypeParam(it) => generics.type_or_const_param_idx(it.id.into())?,
             GenericParam::ConstParam(_) => return None,
@@ -4121,7 +4176,7 @@ impl TypeParam {
     }
 
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        self.id.parent().module(db.upcast()).into()
+        self.id.parent().module(db).into()
     }
 
     /// Is this type parameter implicitly introduced (eg. `Self` in a trait or an `impl Trait`
@@ -4130,14 +4185,13 @@ impl TypeParam {
         let params = db.generic_params(self.id.parent());
         let data = &params[self.id.local_id()];
         match data.type_param().unwrap().provenance {
-            hir_def::generics::TypeParamProvenance::TypeParamList => false,
-            hir_def::generics::TypeParamProvenance::TraitSelf
-            | hir_def::generics::TypeParamProvenance::ArgumentImplTrait => true,
+            TypeParamProvenance::TypeParamList => false,
+            TypeParamProvenance::TraitSelf | TypeParamProvenance::ArgumentImplTrait => true,
         }
     }
 
     pub fn ty(self, db: &dyn HirDatabase) -> Type {
-        let resolver = self.id.parent().resolver(db.upcast());
+        let resolver = self.id.parent().resolver(db);
         let ty =
             TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner);
         Type::new_with_resolver_inner(db, &resolver, ty)
@@ -4160,7 +4214,7 @@ impl TypeParam {
 
     pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
         let ty = generic_arg_from_param(db, self.id.into())?;
-        let resolver = self.id.parent().resolver(db.upcast());
+        let resolver = self.id.parent().resolver(db);
         match ty.data(Interner) {
             GenericArgData::Ty(it) if *it.kind(Interner) != TyKind::Error => {
                 Some(Type::new_with_resolver_inner(db, &resolver, it.clone()))
@@ -4186,7 +4240,7 @@ impl LifetimeParam {
     }
 
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        self.id.parent.module(db.upcast()).into()
+        self.id.parent.module(db).into()
     }
 
     pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
@@ -4216,7 +4270,7 @@ impl ConstParam {
     }
 
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        self.id.parent().module(db.upcast()).into()
+        self.id.parent().module(db).into()
     }
 
     pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
@@ -4241,7 +4295,8 @@ fn generic_arg_from_param(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Optio
     let local_idx = hir_ty::param_idx(db, id)?;
     let defaults = db.generic_defaults(id.parent);
     let ty = defaults.get(local_idx)?.clone();
-    let subst = TyBuilder::placeholder_subst(db, id.parent);
+    let full_subst = TyBuilder::placeholder_subst(db, id.parent);
+    let subst = &full_subst.as_slice(Interner)[..local_idx];
     Some(ty.substitute(Interner, &subst))
 }
 
@@ -4260,7 +4315,7 @@ impl TypeOrConstParam {
     }
 
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        self.id.parent.module(db.upcast()).into()
+        self.id.parent.module(db).into()
     }
 
     pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
@@ -4270,10 +4325,10 @@ impl TypeOrConstParam {
     pub fn split(self, db: &dyn HirDatabase) -> Either<ConstParam, TypeParam> {
         let params = db.generic_params(self.id.parent);
         match &params[self.id.local_id] {
-            hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
+            TypeOrConstParamData::TypeParamData(_) => {
                 Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) })
             }
-            hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
+            TypeOrConstParamData::ConstParamData(_) => {
                 Either::Left(ConstParam { id: ConstParamId::from_unchecked(self.id) })
             }
         }
@@ -4289,18 +4344,18 @@ impl TypeOrConstParam {
     pub fn as_type_param(self, db: &dyn HirDatabase) -> Option<TypeParam> {
         let params = db.generic_params(self.id.parent);
         match &params[self.id.local_id] {
-            hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
+            TypeOrConstParamData::TypeParamData(_) => {
                 Some(TypeParam { id: TypeParamId::from_unchecked(self.id) })
             }
-            hir_def::generics::TypeOrConstParamData::ConstParamData(_) => None,
+            TypeOrConstParamData::ConstParamData(_) => None,
         }
     }
 
     pub fn as_const_param(self, db: &dyn HirDatabase) -> Option<ConstParam> {
         let params = db.generic_params(self.id.parent);
         match &params[self.id.local_id] {
-            hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None,
-            hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
+            TypeOrConstParamData::TypeParamData(_) => None,
+            TypeOrConstParamData::ConstParamData(_) => {
                 Some(ConstParam { id: ConstParamId::from_unchecked(self.id) })
             }
         }
@@ -4321,7 +4376,7 @@ impl Impl {
     }
 
     pub fn all_in_module(db: &dyn HirDatabase, module: Module) -> Vec<Impl> {
-        module.id.def_map(db.upcast())[module.id.local_id].scope.impls().map(Into::into).collect()
+        module.id.def_map(db)[module.id.local_id].scope.impls().map(Into::into).collect()
     }
 
     pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
@@ -4367,8 +4422,7 @@ impl Impl {
             );
         }
 
-        if let Some(block) =
-            ty.adt_id(Interner).and_then(|def| def.0.module(db.upcast()).containing_block())
+        if let Some(block) = ty.adt_id(Interner).and_then(|def| def.0.module(db).containing_block())
         {
             if let Some(inherent_impls) = db.inherent_impls_in_block(block) {
                 all.extend(
@@ -4413,41 +4467,41 @@ impl Impl {
     pub fn trait_ref(self, db: &dyn HirDatabase) -> Option<TraitRef> {
         let substs = TyBuilder::placeholder_subst(db, self.id);
         let trait_ref = db.impl_trait(self.id)?.substitute(Interner, &substs);
-        let resolver = self.id.resolver(db.upcast());
+        let resolver = self.id.resolver(db);
         Some(TraitRef::new_with_resolver(db, &resolver, trait_ref))
     }
 
     pub fn self_ty(self, db: &dyn HirDatabase) -> Type {
-        let resolver = self.id.resolver(db.upcast());
+        let resolver = self.id.resolver(db);
         let substs = TyBuilder::placeholder_subst(db, self.id);
         let ty = db.impl_self_ty(self.id).substitute(Interner, &substs);
         Type::new_with_resolver_inner(db, &resolver, ty)
     }
 
     pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
-        db.impl_data(self.id).items.iter().map(|&(_, it)| it.into()).collect()
+        db.impl_items(self.id).items.iter().map(|&(_, it)| it.into()).collect()
     }
 
     pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
-        db.impl_data(self.id).is_negative
+        db.impl_signature(self.id).flags.contains(ImplFlags::NEGATIVE)
     }
 
     pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool {
-        db.impl_data(self.id).is_unsafe
+        db.impl_signature(self.id).flags.contains(ImplFlags::UNSAFE)
     }
 
     pub fn module(self, db: &dyn HirDatabase) -> Module {
-        self.id.lookup(db.upcast()).container.into()
+        self.id.lookup(db).container.into()
     }
 
     pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option<InMacroFile<ast::Path>> {
         let src = self.source(db)?;
 
         let macro_file = src.file_id.macro_file()?;
-        let loc = macro_file.macro_call_id.lookup(db.upcast());
+        let loc = macro_file.lookup(db);
         let (derive_attr, derive_index) = match loc.kind {
             MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
-                let module_id = self.id.lookup(db.upcast()).container;
+                let module_id = self.id.lookup(db).container;
                 (
                     db.crate_def_map(module_id.krate())[module_id.local_id]
                         .scope
@@ -4457,9 +4511,8 @@ impl Impl {
             }
             _ => return None,
         };
-        let file_id = MacroFileId { macro_call_id: derive_attr };
         let path = db
-            .parse_macro_expansion(file_id)
+            .parse_macro_expansion(derive_attr)
             .value
             .0
             .syntax_node()
@@ -4467,7 +4520,7 @@ impl Impl {
             .nth(derive_index as usize)
             .and_then(<ast::Attr as AstNode>::cast)
             .and_then(|it| it.path())?;
-        Some(InMacroFile { file_id, value: path })
+        Some(InMacroFile { file_id: derive_attr, value: path })
     }
 
     pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {
@@ -4475,7 +4528,7 @@ impl Impl {
     }
 
     fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
-        db.impl_data(self.id)
+        db.impl_items(self.id)
             .macro_calls
             .as_ref()
             .map(|it| it.as_ref().clone().into_boxed_slice())
@@ -4656,6 +4709,7 @@ pub struct CaptureUsages {
 impl CaptureUsages {
     pub fn sources(&self, db: &dyn HirDatabase) -> Vec<CaptureUsageSource> {
         let (body, source_map) = db.body_with_source_map(self.parent);
+
         let mut result = Vec::with_capacity(self.spans.len());
         for &span in self.spans.iter() {
             let is_ref = span.is_ref_span(&body);
@@ -4728,12 +4782,12 @@ impl Type {
         Type { env: environment, ty }
     }
 
-    pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
+    pub(crate) fn new_for_crate(krate: base_db::Crate, ty: Ty) -> Type {
         Type { env: TraitEnvironment::empty(krate), ty }
     }
 
     fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
-        let resolver = lexical_env.resolver(db.upcast());
+        let resolver = lexical_env.resolver(db);
         let environment = resolver
             .generic_def()
             .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
@@ -4778,7 +4832,7 @@ impl Type {
                 ValueTyDefId::StructId(it) => GenericDefId::AdtId(AdtId::StructId(it)),
                 ValueTyDefId::UnionId(it) => GenericDefId::AdtId(AdtId::UnionId(it)),
                 ValueTyDefId::EnumVariantId(it) => {
-                    GenericDefId::AdtId(AdtId::EnumId(it.lookup(db.upcast()).parent))
+                    GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent))
                 }
                 ValueTyDefId::StaticId(_) => return Type::new(db, def, ty.skip_binders().clone()),
             },
@@ -4790,7 +4844,7 @@ impl Type {
         Type { env: ty.env, ty: TyBuilder::slice(ty.ty) }
     }
 
-    pub fn new_tuple(krate: CrateId, tys: &[Type]) -> Type {
+    pub fn new_tuple(krate: base_db::Crate, tys: &[Type]) -> Type {
         let tys = tys.iter().map(|it| it.ty.clone());
         Type { env: TraitEnvironment::empty(krate), ty: TyBuilder::tuple_with(tys) }
     }
@@ -4822,7 +4876,7 @@ impl Type {
     pub fn contains_reference(&self, db: &dyn HirDatabase) -> bool {
         return go(db, self.env.krate, &self.ty);
 
-        fn go(db: &dyn HirDatabase, krate: CrateId, ty: &Ty) -> bool {
+        fn go(db: &dyn HirDatabase, krate: base_db::Crate, ty: &Ty) -> bool {
             match ty.kind(Interner) {
                 // Reference itself
                 TyKind::Ref(_, _, _) => true,
@@ -4954,9 +5008,8 @@ impl Type {
             return None;
         }
 
-        let output_assoc_type = db
-            .trait_data(trait_)
-            .associated_type_by_name(&Name::new_symbol_root(sym::Output.clone()))?;
+        let output_assoc_type =
+            db.trait_items(trait_).associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
         self.normalize_trait_assoc_type(db, &[], output_assoc_type.into())
     }
 
@@ -4971,8 +5024,8 @@ impl Type {
     pub fn iterator_item(self, db: &dyn HirDatabase) -> Option<Type> {
         let iterator_trait = db.lang_item(self.env.krate, LangItem::Iterator)?.as_trait()?;
         let iterator_item = db
-            .trait_data(iterator_trait)
-            .associated_type_by_name(&Name::new_symbol_root(sym::Item.clone()))?;
+            .trait_items(iterator_trait)
+            .associated_type_by_name(&Name::new_symbol_root(sym::Item))?;
         self.normalize_trait_assoc_type(db, &[], iterator_item.into())
     }
 
@@ -5003,8 +5056,8 @@ impl Type {
         }
 
         let into_iter_assoc_type = db
-            .trait_data(trait_)
-            .associated_type_by_name(&Name::new_symbol_root(sym::IntoIter.clone()))?;
+            .trait_items(trait_)
+            .associated_type_by_name(&Name::new_symbol_root(sym::IntoIter))?;
         self.normalize_trait_assoc_type(db, &[], into_iter_assoc_type.into())
     }
 
@@ -5057,7 +5110,7 @@ impl Type {
         alias: TypeAlias,
     ) -> Option<Type> {
         let mut args = args.iter();
-        let trait_id = match alias.id.lookup(db.upcast()).container {
+        let trait_id = match alias.id.lookup(db).container {
             ItemContainerId::TraitId(id) => id,
             _ => unreachable!("non assoc type alias reached in normalize_trait_assoc_type()"),
         };
@@ -5076,11 +5129,7 @@ impl Type {
         let projection = TyBuilder::assoc_type_projection(db, alias.id, Some(parent_subst)).build();
 
         let ty = db.normalize_projection(projection, self.env.clone());
-        if ty.is_unknown() {
-            None
-        } else {
-            Some(self.derived(ty))
-        }
+        if ty.is_unknown() { None } else { Some(self.derived(ty)) }
     }
 
     pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
@@ -5257,7 +5306,10 @@ impl Type {
 
     /// Returns types that this type dereferences to (including this type itself). The returned
     /// iterator won't yield the same type more than once even if the deref chain contains a cycle.
-    pub fn autoderef(&self, db: &dyn HirDatabase) -> impl Iterator<Item = Type> + '_ {
+    pub fn autoderef<'db>(
+        &self,
+        db: &'db dyn HirDatabase,
+    ) -> impl Iterator<Item = Type> + use<'_, 'db> {
         self.autoderef_(db).map(move |ty| self.derived(ty))
     }
 
@@ -5297,7 +5349,7 @@ impl Type {
             let impls = db.inherent_impls_in_crate(krate);
 
             for impl_def in impls.for_self_ty(&self.ty) {
-                for &(_, item) in db.impl_data(*impl_def).items.iter() {
+                for &(_, item) in db.impl_items(*impl_def).items.iter() {
                     if callback(item) {
                         return;
                     }
@@ -5497,7 +5549,7 @@ impl Type {
             .generic_def()
             .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
 
-        let _ = method_resolution::iterate_method_candidates_dyn(
+        _ = method_resolution::iterate_method_candidates_dyn(
             &canonical,
             db,
             environment,
@@ -5584,7 +5636,7 @@ impl Type {
             .generic_def()
             .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
 
-        let _ = method_resolution::iterate_path_candidates(
+        _ = method_resolution::iterate_path_candidates(
             &canonical,
             db,
             environment,
@@ -5617,7 +5669,7 @@ impl Type {
         let _p = tracing::info_span!("applicable_inherent_traits").entered();
         self.autoderef_(db)
             .filter_map(|ty| ty.dyn_trait())
-            .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
+            .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db, dyn_trait_id))
             .map(Trait::from)
     }
 
@@ -5628,12 +5680,15 @@ impl Type {
             .flat_map(|ty| {
                 self.env
                     .traits_in_scope_from_clauses(ty)
-                    .flat_map(|t| hir_ty::all_super_traits(db.upcast(), t))
+                    .flat_map(|t| hir_ty::all_super_traits(db, t))
             })
             .map(Trait::from)
     }
 
-    pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option<impl Iterator<Item = Trait>> {
+    pub fn as_impl_traits(
+        &self,
+        db: &dyn HirDatabase,
+    ) -> Option<impl Iterator<Item = Trait> + use<>> {
         self.ty.impl_trait_bounds(db).map(|it| {
             it.into_iter().filter_map(|pred| match pred.skip_binders() {
                 hir_ty::WhereClause::Implemented(trait_ref) => {
@@ -6072,7 +6127,7 @@ pub trait HasVisibility {
     fn visibility(&self, db: &dyn HirDatabase) -> Visibility;
     fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool {
         let vis = self.visibility(db);
-        vis.is_visible_from(db.upcast(), module.id)
+        vis.is_visible_from(db, module.id)
     }
 }
 
@@ -6083,7 +6138,7 @@ pub trait HasCrate {
 
 impl<T: hir_def::HasModule> HasCrate for T {
     fn krate(&self, db: &dyn HirDatabase) -> Crate {
-        self.module(db.upcast()).krate().into()
+        self.module(db).krate().into()
     }
 }
 
@@ -6195,78 +6250,78 @@ pub trait HasContainer {
 
 impl HasContainer for ExternCrateDecl {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        container_id_to_hir(self.id.lookup(db.upcast()).container.into())
+        container_id_to_hir(self.id.lookup(db).container.into())
     }
 }
 
 impl HasContainer for Module {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
         // FIXME: handle block expressions as modules (their parent is in a different DefMap)
-        let def_map = self.id.def_map(db.upcast());
+        let def_map = self.id.def_map(db);
         match def_map[self.id.local_id].parent {
             Some(parent_id) => ItemContainer::Module(Module { id: def_map.module_id(parent_id) }),
-            None => ItemContainer::Crate(def_map.krate()),
+            None => ItemContainer::Crate(def_map.krate().into()),
         }
     }
 }
 
 impl HasContainer for Function {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        container_id_to_hir(self.id.lookup(db.upcast()).container)
+        container_id_to_hir(self.id.lookup(db).container)
     }
 }
 
 impl HasContainer for Struct {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container })
+        ItemContainer::Module(Module { id: self.id.lookup(db).container })
     }
 }
 
 impl HasContainer for Union {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container })
+        ItemContainer::Module(Module { id: self.id.lookup(db).container })
     }
 }
 
 impl HasContainer for Enum {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container })
+        ItemContainer::Module(Module { id: self.id.lookup(db).container })
     }
 }
 
 impl HasContainer for TypeAlias {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        container_id_to_hir(self.id.lookup(db.upcast()).container)
+        container_id_to_hir(self.id.lookup(db).container)
     }
 }
 
 impl HasContainer for Const {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        container_id_to_hir(self.id.lookup(db.upcast()).container)
+        container_id_to_hir(self.id.lookup(db).container)
     }
 }
 
 impl HasContainer for Static {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        container_id_to_hir(self.id.lookup(db.upcast()).container)
+        container_id_to_hir(self.id.lookup(db).container)
     }
 }
 
 impl HasContainer for Trait {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container })
+        ItemContainer::Module(Module { id: self.id.lookup(db).container })
     }
 }
 
 impl HasContainer for TraitAlias {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container })
+        ItemContainer::Module(Module { id: self.id.lookup(db).container })
     }
 }
 
 impl HasContainer for ExternBlock {
     fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
-        ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container })
+        ItemContainer::Module(Module { id: self.id.lookup(db).container })
     }
 }
 
@@ -6285,7 +6340,7 @@ pub enum ItemContainer {
     Impl(Impl),
     Module(Module),
     ExternBlock(ExternBlock),
-    Crate(CrateId),
+    Crate(Crate),
 }
 
 /// Subset of `ide_db::Definition` that doc links can resolve to.
@@ -6299,7 +6354,7 @@ fn push_ty_diagnostics(
     db: &dyn HirDatabase,
     acc: &mut Vec<AnyDiagnostic>,
     diagnostics: Option<ThinArc<(), TyLoweringDiagnostic>>,
-    source_map: &TypesSourceMap,
+    source_map: &ExpressionStoreSourceMap,
 ) {
     if let Some(diagnostics) = diagnostics {
         acc.extend(
@@ -6348,3 +6403,48 @@ where
         self(item)
     }
 }
+
+pub fn resolve_absolute_path<'a, I: Iterator<Item = Symbol> + Clone + 'a>(
+    db: &'a dyn HirDatabase,
+    mut segments: I,
+) -> impl Iterator<Item = ItemInNs> + use<'a, I> {
+    segments
+        .next()
+        .into_iter()
+        .flat_map(move |crate_name| {
+            db.all_crates()
+                .iter()
+                .filter(|&krate| {
+                    krate
+                        .extra_data(db)
+                        .display_name
+                        .as_ref()
+                        .is_some_and(|name| *name.crate_name().symbol() == crate_name)
+                })
+                .filter_map(|&krate| {
+                    let segments = segments.clone();
+                    let mut def_map = db.crate_def_map(krate);
+                    let mut module = &def_map[DefMap::ROOT];
+                    let mut segments = segments.with_position().peekable();
+                    while let Some((_, segment)) = segments.next_if(|&(position, _)| {
+                        !matches!(position, itertools::Position::Last | itertools::Position::Only)
+                    }) {
+                        let res = module
+                            .scope
+                            .get(&Name::new_symbol_root(segment))
+                            .take_types()
+                            .and_then(|res| match res {
+                                ModuleDefId::ModuleId(it) => Some(it),
+                                _ => None,
+                            })?;
+                        def_map = res.def_map(db);
+                        module = &def_map[res.local_id];
+                    }
+                    let (_, item_name) = segments.next()?;
+                    let res = module.scope.get(&Name::new_symbol_root(item_name));
+                    Some(res.iter_items().map(|(item, _)| item.into()))
+                })
+                .collect::<Vec<_>>()
+        })
+        .flatten()
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index 5e2eebcd13c69..f708f2e166735 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -12,49 +12,47 @@ use std::{
 
 use either::Either;
 use hir_def::{
-    expr_store::{Body, ExprOrPatSource},
+    DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId,
+    expr_store::{Body, ExprOrPatSource, path::Path},
     hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
-    lower::LowerCtx,
-    nameres::{MacroSubNs, ModuleOrigin},
-    path::ModPath,
+    nameres::ModuleOrigin,
     resolver::{self, HasResolver, Resolver, TypeNs},
-    type_ref::{Mutability, TypesMap, TypesSourceMap},
-    AsMacroCall, DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId,
+    type_ref::Mutability,
 };
 use hir_expand::{
+    EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
     attrs::collect_attrs,
     builtin::{BuiltinFnLikeExpander, EagerExpander},
     db::ExpandDatabase,
-    files::InRealFile,
+    files::{FileRangeWrapper, InRealFile},
     hygiene::SyntaxContextExt as _,
     inert_attr_macro::find_builtin_attr_idx,
+    mod_path::{ModPath, PathKind},
     name::AsName,
-    ExpandResult, FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
 };
 use hir_ty::diagnostics::unsafe_operations_for_body;
-use intern::{sym, Symbol};
+use intern::{Interned, Symbol, sym};
 use itertools::Itertools;
 use rustc_hash::{FxHashMap, FxHashSet};
-use smallvec::{smallvec, SmallVec};
-use span::{AstIdMap, EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId};
+use smallvec::{SmallVec, smallvec};
+use span::{Edition, FileId, SyntaxContext};
 use stdx::TupleExt;
 use syntax::{
-    algo::skip_trivia_token,
-    ast::{self, HasAttrs as _, HasGenericParams},
     AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
     TextSize,
+    algo::skip_trivia_token,
+    ast::{self, HasAttrs as _, HasGenericParams},
 };
-use triomphe::Arc;
 
 use crate::{
+    Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
+    Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl,
+    InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
+    Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField,
+    Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
     db::HirDatabase,
     semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
-    source_analyzer::{name_hygiene, resolve_hir_path, SourceAnalyzer},
-    Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
-    Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource,
-    HirFileId, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro,
-    Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Static, Struct, ToolModule, Trait,
-    TraitAlias, TupleField, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
+    source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path},
 };
 
 const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
@@ -138,8 +136,8 @@ pub struct Semantics<'db, DB> {
 pub struct SemanticsImpl<'db> {
     pub db: &'db dyn HirDatabase,
     s2d_cache: RefCell<SourceToDefCache>,
-    /// MacroCall to its expansion's MacroFileId cache
-    macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
+    /// MacroCall to its expansion's MacroCallId cache
+    macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
 }
 
 impl<DB> fmt::Debug for Semantics<'_, DB> {
@@ -308,21 +306,23 @@ impl<'db> SemanticsImpl<'db> {
     }
 
     pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
+        let hir_file_id = file_id.into();
         let tree = self.db.parse(file_id).tree();
-        self.cache(tree.syntax().clone(), file_id.into());
+        self.cache(tree.syntax().clone(), hir_file_id);
         tree
     }
 
-    /// If not crate is found for the file, returns the last crate in topological order.
-    pub fn first_crate_or_default(&self, file: FileId) -> Crate {
+    /// If not crate is found for the file, try to return the last crate in topological order.
+    pub fn first_crate(&self, file: FileId) -> Option<Crate> {
         match self.file_to_module_defs(file).next() {
-            Some(module) => module.krate(),
-            None => (*self.db.crate_graph().crates_in_topological_order().last().unwrap()).into(),
+            Some(module) => Some(module.krate()),
+            None => self.db.all_crates().last().copied().map(Into::into),
         }
     }
 
     pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
         Some(EditionedFileId::new(
+            self.db,
             file,
             self.file_to_module_defs(file).next()?.krate().edition(self.db),
         ))
@@ -331,23 +331,24 @@ impl<'db> SemanticsImpl<'db> {
     pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
         let file_id = self
             .attach_first_edition(file_id)
-            .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+            .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
+
         let tree = self.db.parse(file_id).tree();
         self.cache(tree.syntax().clone(), file_id.into());
         tree
     }
 
     pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
-        match file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => {
-                let module = self.file_to_module_defs(file_id.file_id()).next()?;
+        match file_id {
+            HirFileId::FileId(file_id) => {
+                let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
                 let def_map = self.db.crate_def_map(module.krate().id);
                 match def_map[module.id.local_id].origin {
                     ModuleOrigin::CrateRoot { .. } => None,
                     ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
                         let file_id = declaration_tree_id.file_id();
                         let in_file = InFile::new(file_id, declaration);
-                        let node = in_file.to_node(self.db.upcast());
+                        let node = in_file.to_node(self.db);
                         let root = find_root(node.syntax());
                         self.cache(root, file_id);
                         Some(in_file.with_value(node.syntax().clone()))
@@ -355,11 +356,8 @@ impl<'db> SemanticsImpl<'db> {
                     _ => unreachable!("FileId can only belong to a file module"),
                 }
             }
-            HirFileIdRepr::MacroFile(macro_file) => {
-                let node = self
-                    .db
-                    .lookup_intern_macro_call(macro_file.macro_call_id)
-                    .to_node(self.db.upcast());
+            HirFileId::MacroFile(macro_file) => {
+                let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
                 let root = find_root(&node.value);
                 self.cache(root, node.file_id);
                 Some(node)
@@ -370,8 +368,8 @@ impl<'db> SemanticsImpl<'db> {
     /// Returns the `SyntaxNode` of the module. If this is a file module, returns
     /// the `SyntaxNode` of the *definition* file, not of the *declaration*.
     pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
-        let def_map = module.id.def_map(self.db.upcast());
-        let definition = def_map[module.id.local_id].origin.definition_source(self.db.upcast());
+        let def_map = module.id.def_map(self.db);
+        let definition = def_map[module.id.local_id].origin.definition_source(self.db);
         let definition = definition.map(|it| it.node());
         let root_node = find_root(&definition.value);
         self.cache(root_node, definition.file_id);
@@ -384,7 +382,7 @@ impl<'db> SemanticsImpl<'db> {
         node
     }
 
-    pub fn expand(&self, file_id: MacroFileId) -> ExpandResult<SyntaxNode> {
+    pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
         let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
         self.cache(res.value.clone(), file_id.into());
         res
@@ -394,13 +392,7 @@ impl<'db> SemanticsImpl<'db> {
         let sa = self.analyze_no_infer(macro_call.syntax())?;
 
         let macro_call = InFile::new(sa.file_id, macro_call);
-        let file_id = if let Some(call) =
-            <ast::MacroCall as crate::semantics::ToDef>::to_def(self, macro_call)
-        {
-            call.as_macro_file()
-        } else {
-            sa.expand(self.db, macro_call)?
-        };
+        let file_id = sa.expand(self.db, macro_call)?;
 
         let node = self.parse_or_expand(file_id.into());
         Some(node)
@@ -408,15 +400,13 @@ impl<'db> SemanticsImpl<'db> {
 
     pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
         let file_id = self.find_file(attr.syntax()).file_id;
-        let krate = match file_id.repr() {
-            HirFileIdRepr::FileId(file_id) => {
-                self.file_to_module_defs(file_id.file_id()).next()?.krate().id
-            }
-            HirFileIdRepr::MacroFile(macro_file) => {
-                self.db.lookup_intern_macro_call(macro_file.macro_call_id).krate
+        let krate = match file_id {
+            HirFileId::FileId(file_id) => {
+                self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate().id
             }
+            HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
         };
-        hir_expand::check_cfg_attr_value(self.db.upcast(), attr, krate)
+        hir_expand::check_cfg_attr_value(self.db, attr, krate)
     }
 
     /// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
@@ -428,14 +418,8 @@ impl<'db> SemanticsImpl<'db> {
         let sa = self.analyze_no_infer(macro_call.syntax())?;
 
         let macro_call = InFile::new(sa.file_id, macro_call);
-        let file_id = if let Some(call) =
-            <ast::MacroCall as crate::semantics::ToDef>::to_def(self, macro_call)
-        {
-            call.as_macro_file()
-        } else {
-            sa.expand(self.db, macro_call)?
-        };
-        let macro_call = self.db.lookup_intern_macro_call(file_id.macro_call_id);
+        let file_id = sa.expand(self.db, macro_call)?;
+        let macro_call = self.db.lookup_intern_macro_call(file_id);
 
         let skip = matches!(
             macro_call.def.kind,
@@ -468,7 +452,7 @@ impl<'db> SemanticsImpl<'db> {
     pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
         let src = self.wrap_node_infile(item.clone());
         let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
-        Some(self.expand(macro_call_id.as_macro_file()))
+        Some(self.expand(macro_call_id))
     }
 
     pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
@@ -477,7 +461,7 @@ impl<'db> SemanticsImpl<'db> {
         let call_id = self.with_ctx(|ctx| {
             ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
         })?;
-        Some(self.parse_or_expand(call_id.as_file()))
+        Some(self.parse_or_expand(call_id.into()))
     }
 
     pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
@@ -497,7 +481,7 @@ impl<'db> SemanticsImpl<'db> {
             .derive_macro_calls(attr)?
             .into_iter()
             .flat_map(|call| {
-                let file_id = call?.as_macro_file();
+                let file_id = call?;
                 let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
                 let root_node = value.0.syntax_node();
                 self.cache(root_node.clone(), file_id.into());
@@ -538,7 +522,7 @@ impl<'db> SemanticsImpl<'db> {
         Some(result)
     }
 
-    pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroFileId)>> {
+    pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
         let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
             ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
             ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
@@ -554,7 +538,7 @@ impl<'db> SemanticsImpl<'db> {
             .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
             .iter()
             .filter(|&(name, _, _)| *name == attr_name)
-            .map(|&(_, macro_, call)| (macro_.into(), call.as_macro_file()))
+            .map(|&(_, macro_, call)| (macro_.into(), call))
             .collect();
         res.is_empty().not().then_some(res)
     }
@@ -571,16 +555,12 @@ impl<'db> SemanticsImpl<'db> {
         speculative_args: &ast::TokenTree,
         token_to_map: SyntaxToken,
     ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
-        let SourceAnalyzer { file_id, resolver, .. } =
-            self.analyze_no_infer(actual_macro_call.syntax())?;
-        let macro_call = InFile::new(file_id, actual_macro_call);
-        let krate = resolver.krate();
-        let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
-            resolver.resolve_path_as_macro_def(self.db.upcast(), path, Some(MacroSubNs::Bang))
-        })?;
+        let analyzer = self.analyze_no_infer(actual_macro_call.syntax())?;
+        let macro_call = InFile::new(analyzer.file_id, actual_macro_call);
+        let macro_file = analyzer.expansion(macro_call)?;
         hir_expand::db::expand_speculative(
-            self.db.upcast(),
-            macro_call_id,
+            self.db,
+            macro_file,
             speculative_args.syntax(),
             token_to_map,
         )
@@ -588,16 +568,11 @@ impl<'db> SemanticsImpl<'db> {
 
     pub fn speculative_expand_raw(
         &self,
-        macro_file: MacroFileId,
+        macro_file: MacroCallId,
         speculative_args: &SyntaxNode,
         token_to_map: SyntaxToken,
     ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
-        hir_expand::db::expand_speculative(
-            self.db.upcast(),
-            macro_file.macro_call_id,
-            speculative_args,
-            token_to_map,
-        )
+        hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
     }
 
     /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
@@ -611,7 +586,7 @@ impl<'db> SemanticsImpl<'db> {
         let macro_call = self.wrap_node_infile(actual_macro_call.clone());
         let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
         hir_expand::db::expand_speculative(
-            self.db.upcast(),
+            self.db,
             macro_call_id,
             speculative_args.syntax(),
             token_to_map,
@@ -630,7 +605,7 @@ impl<'db> SemanticsImpl<'db> {
             ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
         })?;
         hir_expand::db::expand_speculative(
-            self.db.upcast(),
+            self.db,
             macro_call_id,
             speculative_args.syntax(),
             token_to_map,
@@ -641,7 +616,7 @@ impl<'db> SemanticsImpl<'db> {
     /// and returns the conflicting locals.
     pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &str) -> Vec<Local> {
         let body = self.db.body(to_be_renamed.parent);
-        let resolver = to_be_renamed.parent.resolver(self.db.upcast());
+        let resolver = to_be_renamed.parent.resolver(self.db);
         let starting_expr =
             body.binding_owners.get(&to_be_renamed.binding_id).copied().unwrap_or(body.body_expr);
         let mut visitor = RenameConflictsVisitor {
@@ -755,6 +730,35 @@ impl<'db> SemanticsImpl<'db> {
         }
     }
 
+    pub fn debug_hir_at(&self, token: SyntaxToken) -> Option<String> {
+        self.analyze_no_infer(&token.parent()?).and_then(|it| {
+            Some(match it.body_or_sig.as_ref()? {
+                crate::source_analyzer::BodyOrSig::Body { def, body, .. } => {
+                    hir_def::expr_store::pretty::print_body_hir(
+                        self.db,
+                        body,
+                        *def,
+                        it.file_id.edition(self.db),
+                    )
+                }
+                &crate::source_analyzer::BodyOrSig::VariantFields { def, .. } => {
+                    hir_def::expr_store::pretty::print_variant_body_hir(
+                        self.db,
+                        def,
+                        it.file_id.edition(self.db),
+                    )
+                }
+                &crate::source_analyzer::BodyOrSig::Sig { def, .. } => {
+                    hir_def::expr_store::pretty::print_signature(
+                        self.db,
+                        def,
+                        it.file_id.edition(self.db),
+                    )
+                }
+            })
+        })
+    }
+
     /// Maps a node down by mapping its first and last token down.
     pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
         // This might not be the correct way to do this, but it works for now
@@ -873,7 +877,7 @@ impl<'db> SemanticsImpl<'db> {
     pub fn descend_into_macros_cb(
         &self,
         token: SyntaxToken,
-        mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId),
+        mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
     ) {
         if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
             self.descend_into_macros_impl(token, &mut |t, ctx| {
@@ -897,13 +901,17 @@ impl<'db> SemanticsImpl<'db> {
         res
     }
 
-    pub fn descend_into_macros_no_opaque(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+    pub fn descend_into_macros_no_opaque(
+        &self,
+        token: SyntaxToken,
+    ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
         let mut res = smallvec![];
-        if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
+        let token = self.wrap_token_infile(token);
+        if let Ok(token) = token.clone().into_real_file() {
             self.descend_into_macros_impl(token, &mut |t, ctx| {
-                if !ctx.is_opaque(self.db.upcast()) {
+                if !ctx.is_opaque(self.db) {
                     // Don't descend into opaque contexts
-                    res.push(t.value);
+                    res.push(t);
                 }
                 CONTINUE_NO_BREAKS
             });
@@ -917,7 +925,7 @@ impl<'db> SemanticsImpl<'db> {
     pub fn descend_into_macros_breakable<T>(
         &self,
         token: InRealFile<SyntaxToken>,
-        mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
+        mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
     ) -> Option<T> {
         self.descend_into_macros_impl(token.clone(), &mut cb)
     }
@@ -934,7 +942,7 @@ impl<'db> SemanticsImpl<'db> {
             let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
             let matches = (kind == mapped_kind || any_ident_match())
                 && text == value.text()
-                && !ctx.is_opaque(self.db.upcast());
+                && !ctx.is_opaque(self.db);
             if matches {
                 r.push(value);
             }
@@ -959,11 +967,7 @@ impl<'db> SemanticsImpl<'db> {
                         || kind.is_any_identifier() && value.kind().is_any_identifier();
                     let matches =
                         (kind == mapped_kind || any_ident_match()) && text == value.text();
-                    if matches {
-                        ControlFlow::Break(value)
-                    } else {
-                        ControlFlow::Continue(())
-                    }
+                    if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
                 },
             )
         } else {
@@ -975,7 +979,7 @@ impl<'db> SemanticsImpl<'db> {
     fn descend_into_macros_impl<T>(
         &self,
         InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
-        f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
+        f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
     ) -> Option<T> {
         let _p = tracing::info_span!("descend_into_macros_impl").entered();
 
@@ -1012,7 +1016,7 @@ impl<'db> SemanticsImpl<'db> {
             None => {
                 stack.push((
                     file_id.into(),
-                    smallvec![(token, SyntaxContextId::root(file_id.edition()))],
+                    smallvec![(token, SyntaxContext::root(file_id.edition(self.db)))],
                 ));
             }
         }
@@ -1041,7 +1045,6 @@ impl<'db> SemanticsImpl<'db> {
                         })
                     });
                     if let Some((call_id, item)) = containing_attribute_macro_call {
-                        let file_id = call_id.as_macro_file();
                         let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
                             hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
                                 invoc_attr_index.ast_index()
@@ -1070,7 +1073,7 @@ impl<'db> SemanticsImpl<'db> {
                             .unwrap_or_else(|| text_range.start());
                         let text_range = TextRange::new(start, text_range.end());
                         filter_duplicates(tokens, text_range);
-                        return process_expansion_for_token(&mut stack, file_id);
+                        return process_expansion_for_token(&mut stack, call_id);
                     }
 
                     // Then check for token trees, that means we are either in a function-like macro or
@@ -1093,24 +1096,16 @@ impl<'db> SemanticsImpl<'db> {
                             let file_id = match m_cache.get(&mcall) {
                                 Some(&it) => it,
                                 None => {
-                                    let it = if let Some(call) =
-                                        <ast::MacroCall as crate::semantics::ToDef>::to_def(
-                                            self,
-                                            mcall.as_ref(),
-                                        ) {
-                                        call.as_macro_file()
-                                    } else {
-                                        token
-                                            .parent()
-                                            .and_then(|parent| {
-                                                self.analyze_impl(
-                                                    InFile::new(expansion, &parent),
-                                                    None,
-                                                    false,
-                                                )
-                                            })?
-                                            .expand(self.db, mcall.as_ref())?
-                                    };
+                                    let it = token
+                                        .parent()
+                                        .and_then(|parent| {
+                                            self.analyze_impl(
+                                                InFile::new(expansion, &parent),
+                                                None,
+                                                false,
+                                            )
+                                        })?
+                                        .expand(self.db, mcall.as_ref())?;
                                     m_cache.insert(mcall, it);
                                     it
                                 }
@@ -1119,10 +1114,10 @@ impl<'db> SemanticsImpl<'db> {
                             filter_duplicates(tokens, text_range);
 
                             process_expansion_for_token(&mut stack, file_id).or(file_id
-                                .eager_arg(self.db.upcast())
+                                .eager_arg(self.db)
                                 .and_then(|arg| {
                                     // also descend into eager expansions
-                                    process_expansion_for_token(&mut stack, arg.as_macro_file())
+                                    process_expansion_for_token(&mut stack, arg)
                                 }))
                         }
                         // derive or derive helper
@@ -1146,7 +1141,6 @@ impl<'db> SemanticsImpl<'db> {
                                     match derive_call {
                                         Some(call_id) => {
                                             // resolved to a derive
-                                            let file_id = call_id.as_macro_file();
                                             let text_range = attr.syntax().text_range();
                                             // remove any other token in this macro input, all their mappings are the
                                             // same as this
@@ -1154,7 +1148,7 @@ impl<'db> SemanticsImpl<'db> {
                                                 !text_range.contains_range(t.text_range())
                                             });
                                             return process_expansion_for_token(
-                                                &mut stack, file_id,
+                                                &mut stack, call_id,
                                             );
                                         }
                                         None => Some(adt),
@@ -1202,10 +1196,7 @@ impl<'db> SemanticsImpl<'db> {
                                 // as there may be multiple derives registering the same helper
                                 // name, we gotta make sure to call this for all of them!
                                 // FIXME: We need to call `f` for all of them as well though!
-                                res = res.or(process_expansion_for_token(
-                                    &mut stack,
-                                    derive.as_macro_file(),
-                                ));
+                                res = res.or(process_expansion_for_token(&mut stack, *derive));
                             }
                             res
                         }
@@ -1251,21 +1242,19 @@ impl<'db> SemanticsImpl<'db> {
     /// macro file the node resides in.
     pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
         let node = self.find_file(node);
-        node.original_file_range_rooted(self.db.upcast())
+        node.original_file_range_rooted(self.db)
     }
 
     /// Attempts to map the node out of macro expanded files returning the original file range.
     pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
         let node = self.find_file(node);
-        node.original_file_range_opt(self.db.upcast())
-            .filter(|(_, ctx)| ctx.is_root())
-            .map(TupleExt::head)
+        node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head)
     }
 
     /// Attempts to map the node out of macro expanded files.
     /// This only work for attribute expansions, as other ones do not have nodes as input.
     pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
-        self.wrap_node_infile(node).original_ast_node_rooted(self.db.upcast()).map(
+        self.wrap_node_infile(node).original_ast_node_rooted(self.db).map(
             |InRealFile { file_id, value }| {
                 self.cache(find_root(value.syntax()), file_id.into());
                 value
@@ -1277,7 +1266,7 @@ impl<'db> SemanticsImpl<'db> {
     /// This only work for attribute expansions, as other ones do not have nodes as input.
     pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
         let InFile { file_id, .. } = self.find_file(node);
-        InFile::new(file_id, node).original_syntax_node_rooted(self.db.upcast()).map(
+        InFile::new(file_id, node).original_syntax_node_rooted(self.db).map(
             |InRealFile { file_id, value }| {
                 self.cache(find_root(&value), file_id.into());
                 value
@@ -1285,10 +1274,14 @@ impl<'db> SemanticsImpl<'db> {
         )
     }
 
-    pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
+    pub fn diagnostics_display_range(
+        &self,
+        src: InFile<SyntaxNodePtr>,
+    ) -> FileRangeWrapper<FileId> {
         let root = self.parse_or_expand(src.file_id);
         let node = src.map(|it| it.to_node(&root));
-        node.as_ref().original_file_range_rooted(self.db.upcast())
+        let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
+        FileRangeWrapper { file_id: file_id.file_id(self.db), range }
     }
 
     fn token_ancestors_with_macros(
@@ -1349,31 +1342,19 @@ impl<'db> SemanticsImpl<'db> {
 
     pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
         let analyze = self.analyze(ty.syntax())?;
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut ctx =
-            LowerCtx::new(self.db.upcast(), analyze.file_id, &mut types_map, &mut types_source_map);
-        let type_ref = crate::TypeRef::from_ast(&mut ctx, ty.clone());
-        let ty = hir_ty::TyLoweringContext::new_maybe_unowned(
-            self.db,
-            &analyze.resolver,
-            &types_map,
-            None,
-            analyze.resolver.type_owner(),
-        )
-        .lower_ty(type_ref);
-        Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
+        analyze.type_of_type(self.db, ty)
     }
 
     pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
+        let parent_ty = path.syntax().parent().and_then(ast::Type::cast)?;
         let analyze = self.analyze(path.syntax())?;
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut ctx =
-            LowerCtx::new(self.db.upcast(), analyze.file_id, &mut types_map, &mut types_source_map);
-        let hir_path = Path::from_src(&mut ctx, path.clone())?;
-        match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
-            TypeNs::TraitId(id) => Some(Trait { id }),
+        let ty = analyze.store_sm()?.node_type(InFile::new(analyze.file_id, &parent_ty))?;
+        let path = match &analyze.store()?.types[ty] {
+            hir_def::type_ref::TypeRef::Path(path) => path,
+            _ => return None,
+        };
+        match analyze.resolver.resolve_path_in_type_ns_fully(self.db, path)? {
+            TypeNs::TraitId(trait_id) => Some(trait_id.into()),
             _ => None,
         }
     }
@@ -1388,7 +1369,7 @@ impl<'db> SemanticsImpl<'db> {
 
         let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?;
 
-        analyzer.expr_adjustments(self.db, expr).map(|it| {
+        analyzer.expr_adjustments(expr).map(|it| {
             it.iter()
                 .map(|adjust| {
                     let target =
@@ -1521,7 +1502,7 @@ impl<'db> SemanticsImpl<'db> {
     }
 
     pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Either<Field, TupleField>> {
-        self.analyze(field.syntax())?.resolve_field(self.db, field)
+        self.analyze(field.syntax())?.resolve_field(field)
     }
 
     pub fn resolve_field_fallback(
@@ -1641,30 +1622,25 @@ impl<'db> SemanticsImpl<'db> {
         self.analyze(name.syntax())?.resolve_use_type_arg(name)
     }
 
+    pub fn resolve_offset_of_field(
+        &self,
+        name_ref: &ast::NameRef,
+    ) -> Option<(Either<Variant, Field>, GenericSubstitution)> {
+        self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
+    }
+
     pub fn resolve_mod_path(
         &self,
         scope: &SyntaxNode,
         path: &ModPath,
     ) -> Option<impl Iterator<Item = ItemInNs>> {
         let analyze = self.analyze(scope)?;
-        let items = analyze.resolver.resolve_module_path_in_items(self.db.upcast(), path);
-        Some(items.iter_items().map(|(item, _)| item.into()))
-    }
-
-    pub fn resolve_mod_path_relative(
-        &self,
-        to: Module,
-        segments: impl IntoIterator<Item = Name>,
-    ) -> Option<impl Iterator<Item = ItemInNs>> {
-        let items = to.id.resolver(self.db.upcast()).resolve_module_path_in_items(
-            self.db.upcast(),
-            &ModPath::from_segments(hir_def::path::PathKind::Plain, segments),
-        );
+        let items = analyze.resolver.resolve_module_path_in_items(self.db, path);
         Some(items.iter_items().map(|(item, _)| item.into()))
     }
 
     fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
-        self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
+        self.analyze(record_lit.syntax())?.resolve_variant(record_lit)
     }
 
     pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
@@ -1764,6 +1740,7 @@ impl<'db> SemanticsImpl<'db> {
         &self,
         node: InFile<&SyntaxNode>,
         offset: Option<TextSize>,
+        // replace this, just make the inference result a `LazyCell`
         infer_body: bool,
     ) -> Option<SourceAnalyzer> {
         let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
@@ -1776,16 +1753,30 @@ impl<'db> SemanticsImpl<'db> {
                     SourceAnalyzer::new_for_body(self.db, def, node, offset)
                 } else {
                     SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
-                })
+                });
+            }
+            ChildContainer::VariantId(def) => {
+                return Some(SourceAnalyzer::new_variant_body(self.db, def, node, offset));
+            }
+            ChildContainer::TraitId(it) => {
+                return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
             }
-            ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
-            ChildContainer::TraitAliasId(it) => it.resolver(self.db.upcast()),
-            ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
-            ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
-            ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
-            ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
-            ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
-            ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
+            ChildContainer::TraitAliasId(it) => {
+                return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
+            }
+            ChildContainer::ImplId(it) => {
+                return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
+            }
+            ChildContainer::EnumId(it) => {
+                return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
+            }
+            ChildContainer::TypeAliasId(it) => {
+                return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
+            }
+            ChildContainer::GenericDefId(it) => {
+                return Some(SourceAnalyzer::new_generic_def(self.db, it, node, offset));
+            }
+            ChildContainer::ModuleId(it) => it.resolver(self.db),
         };
         Some(SourceAnalyzer::new_for_resolver(resolver, node))
     }
@@ -1891,22 +1882,21 @@ impl<'db> SemanticsImpl<'db> {
     }
 }
 
+// FIXME This can't be the best way to do this
 fn macro_call_to_macro_id(
     ctx: &mut SourceToDefCtx<'_, '_>,
     macro_call_id: MacroCallId,
 ) -> Option<MacroId> {
-    use span::HirFileIdRepr;
-
-    let db: &dyn ExpandDatabase = ctx.db.upcast();
+    let db: &dyn ExpandDatabase = ctx.db;
     let loc = db.lookup_intern_macro_call(macro_call_id);
 
     match loc.def.ast_id() {
         Either::Left(it) => {
-            let node = match it.file_id.repr() {
-                HirFileIdRepr::FileId(file_id) => {
+            let node = match it.file_id {
+                HirFileId::FileId(file_id) => {
                     it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
                 }
-                HirFileIdRepr::MacroFile(macro_file) => {
+                HirFileId::MacroFile(macro_file) => {
                     let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
                     it.to_ptr(db).to_node(&expansion_info.expanded().value)
                 }
@@ -1914,11 +1904,11 @@ fn macro_call_to_macro_id(
             ctx.macro_to_def(InFile::new(it.file_id, &node))
         }
         Either::Right(it) => {
-            let node = match it.file_id.repr() {
-                HirFileIdRepr::FileId(file_id) => {
+            let node = match it.file_id {
+                HirFileId::FileId(file_id) => {
                     it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
                 }
-                HirFileIdRepr::MacroFile(macro_file) => {
+                HirFileId::MacroFile(macro_file) => {
                     let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
                     it.to_ptr(db).to_node(&expansion_info.expanded().value)
                 }
@@ -2028,12 +2018,12 @@ impl SemanticsScope<'_> {
     /// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
     pub fn visible_traits(&self) -> VisibleTraits {
         let resolver = &self.resolver;
-        VisibleTraits(resolver.traits_in_scope(self.db.upcast()))
+        VisibleTraits(resolver.traits_in_scope(self.db))
     }
 
     /// Calls the passed closure `f` on all names in scope.
     pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
-        let scope = self.resolver.names_in_scope(self.db.upcast());
+        let scope = self.resolver.names_in_scope(self.db);
         for (name, entries) in scope {
             for entry in entries {
                 let def = match entry {
@@ -2059,28 +2049,45 @@ impl SemanticsScope<'_> {
     /// Resolve a path as-if it was written at the given scope. This is
     /// necessary a heuristic, as it doesn't take hygiene into account.
     pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option<PathResolution> {
-        let root = ast_path.syntax().ancestors().last().unwrap();
-        let ast_id_map = Arc::new(AstIdMap::from_source(&root));
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut ctx = LowerCtx::for_synthetic_ast(
-            self.db.upcast(),
-            ast_id_map,
-            &mut types_map,
-            &mut types_source_map,
-        );
-        let path = Path::from_src(&mut ctx, ast_path.clone())?;
+        let mut kind = PathKind::Plain;
+        let mut segments = vec![];
+        let mut first = true;
+        for segment in ast_path.segments() {
+            if first {
+                first = false;
+                if segment.coloncolon_token().is_some() {
+                    kind = PathKind::Abs;
+                }
+            }
+
+            let Some(k) = segment.kind() else { continue };
+            match k {
+                ast::PathSegmentKind::Name(name_ref) => segments.push(name_ref.as_name()),
+                ast::PathSegmentKind::Type { .. } => continue,
+                ast::PathSegmentKind::SelfTypeKw => {
+                    segments.push(Name::new_symbol_root(sym::Self_))
+                }
+                ast::PathSegmentKind::SelfKw => kind = PathKind::Super(0),
+                ast::PathSegmentKind::SuperKw => match kind {
+                    PathKind::Super(s) => kind = PathKind::Super(s + 1),
+                    PathKind::Plain => kind = PathKind::Super(1),
+                    PathKind::Crate | PathKind::Abs | PathKind::DollarCrate(_) => continue,
+                },
+                ast::PathSegmentKind::CrateKw => kind = PathKind::Crate,
+            }
+        }
+
         resolve_hir_path(
             self.db,
             &self.resolver,
-            &path,
+            &Path::BarePath(Interned::new(ModPath::from_segments(kind, segments))),
             name_hygiene(self.db, InFile::new(self.file_id, ast_path.syntax())),
-            &types_map,
+            None,
         )
     }
 
-    pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator<Item = ItemInNs> {
-        let items = self.resolver.resolve_module_path_in_items(self.db.upcast(), path);
+    pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator<Item = ItemInNs> + use<> {
+        let items = self.resolver.resolve_module_path_in_items(self.db, path);
         items.iter_items().map(|(item, _)| item.into())
     }
 
@@ -2109,7 +2116,7 @@ impl SemanticsScope<'_> {
     }
 
     pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
-        self.resolver.extern_crate_decls_in_scope(self.db.upcast())
+        self.resolver.extern_crate_decls_in_scope(self.db)
     }
 
     pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
@@ -2145,7 +2152,7 @@ impl RenameConflictsVisitor<'_> {
             if let Some(name) = path.as_ident() {
                 if *name.symbol() == self.new_name {
                     if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed(
-                        self.db.upcast(),
+                        self.db,
                         name,
                         path,
                         self.body.expr_or_pat_path_hygiene(node),
@@ -2156,7 +2163,7 @@ impl RenameConflictsVisitor<'_> {
                 } else if *name.symbol() == self.old_name {
                     if let Some(conflicting) =
                         self.resolver.rename_will_conflict_with_another_variable(
-                            self.db.upcast(),
+                            self.db,
                             name,
                             path,
                             self.body.expr_or_pat_path_hygiene(node),
@@ -2174,12 +2181,12 @@ impl RenameConflictsVisitor<'_> {
     fn rename_conflicts(&mut self, expr: ExprId) {
         match &self.body[expr] {
             Expr::Path(path) => {
-                let guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
+                let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
                 self.resolve_path(expr.into(), path);
                 self.resolver.reset_to_guard(guard);
             }
             &Expr::Assignment { target, .. } => {
-                let guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
+                let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
                 self.body.walk_pats(target, &mut |pat| {
                     if let Pat::Path(path) = &self.body[pat] {
                         self.resolve_path(pat.into(), path);
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
index d0fdf5cbdf7a3..9393d08ad3f96 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
@@ -5,22 +5,22 @@
 //! node for a *child*, and get its hir.
 
 use either::Either;
-use hir_expand::{attrs::collect_attrs, HirFileId};
-use syntax::{ast, AstPtr};
+use hir_expand::{HirFileId, attrs::collect_attrs};
+use syntax::{AstPtr, ast};
 
 use hir_def::{
+    AdtId, AssocItemId, DefWithBodyId, EnumId, FieldId, GenericDefId, ImplId, ItemTreeLoc,
+    LifetimeParamId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, TypeOrConstParamId,
+    VariantId,
     db::DefDatabase,
     dyn_map::{
-        keys::{self, Key},
         DynMap,
+        keys::{self, Key},
     },
     item_scope::ItemScope,
     item_tree::ItemTreeNode,
     nameres::DefMap,
     src::{HasChildSource, HasSource},
-    AdtId, AssocItemId, DefWithBodyId, EnumId, FieldId, GenericDefId, ImplId, ItemTreeLoc,
-    LifetimeParamId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, TypeOrConstParamId,
-    VariantId,
 };
 
 pub(crate) trait ChildBySource {
@@ -34,11 +34,11 @@ pub(crate) trait ChildBySource {
 
 impl ChildBySource for TraitId {
     fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
-        let data = db.trait_data(*self);
+        let data = db.trait_items(*self);
 
         data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
             |(ast_id, call_id)| {
-                res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
+                res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
             },
         );
         data.items.iter().for_each(|&(_, item)| {
@@ -49,11 +49,11 @@ impl ChildBySource for TraitId {
 
 impl ChildBySource for ImplId {
     fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
-        let data = db.impl_data(*self);
+        let data = db.impl_items(*self);
         // FIXME: Macro calls
         data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
             |(ast_id, call_id)| {
-                res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
+                res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
             },
         );
         data.items.iter().for_each(|&(_, item)| {
@@ -84,7 +84,7 @@ impl ChildBySource for ItemScope {
             .for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST));
         self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
             |(ast_id, call_id)| {
-                res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
+                res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
             },
         );
         self.legacy_macros().for_each(|(_, ids)| {
@@ -99,7 +99,7 @@ impl ChildBySource for ItemScope {
         });
         self.derive_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
             |(ast_id, calls)| {
-                let adt = ast_id.to_node(db.upcast());
+                let adt = ast_id.to_node(db);
                 calls.for_each(|(attr_id, call_id, calls)| {
                     if let Some((_, Either::Left(attr))) =
                         collect_attrs(&adt).nth(attr_id.ast_index())
@@ -112,7 +112,7 @@ impl ChildBySource for ItemScope {
         );
         self.iter_macro_invoc().filter(|(id, _)| id.file_id == file_id).for_each(
             |(ast_id, &call)| {
-                let ast = ast_id.to_ptr(db.upcast());
+                let ast = ast_id.to_ptr(db);
                 res[keys::MACRO_CALL].insert(ast, call);
             },
         );
@@ -182,7 +182,7 @@ impl ChildBySource for EnumId {
         let tree = loc.id.item_tree(db);
         let ast_id_map = db.ast_id_map(loc.id.file_id());
 
-        db.enum_data(*self).variants.iter().for_each(|&(variant, _)| {
+        db.enum_variants(*self).variants.iter().for_each(|&(variant, _)| {
             res[keys::ENUM_VARIANT]
                 .insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant);
         });
@@ -197,14 +197,14 @@ impl ChildBySource for DefWithBodyId {
         }
 
         sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
-            res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id);
+            res[keys::MACRO_CALL].insert(ast.value, exp_id);
         });
 
         for (block, def_map) in body.blocks(db) {
             // All block expressions are merged into the same map, because they logically all add
             // inner items to the containing `DefWithBodyId`.
             def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id);
-            res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db.upcast()), block);
+            res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db), block);
         }
     }
 }
@@ -254,7 +254,7 @@ fn insert_item_loc<ID, N, Data>(
     id: ID,
     key: Key<N::Source, ID>,
 ) where
-    ID: for<'db> Lookup<Database<'db> = dyn DefDatabase + 'db, Data = Data> + 'static,
+    ID: Lookup<Database = dyn DefDatabase, Data = Data> + 'static,
     Data: ItemTreeLoc<Id = N>,
     N: ItemTreeNode,
     N::Source: 'static,
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index 18cbaa15aeaed..466bf7f6c826d 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -87,37 +87,38 @@
 
 use either::Either;
 use hir_def::{
-    dyn_map::{
-        keys::{self, Key},
-        DynMap,
-    },
-    hir::{BindingId, Expr, LabelId},
     AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
     ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId,
     Lookup, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId,
     UnionId, UseId, VariantId,
+    dyn_map::{
+        DynMap,
+        keys::{self, Key},
+    },
+    hir::{BindingId, Expr, LabelId},
 };
 use hir_expand::{
-    attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId,
-    MacroFileIdExt,
+    EditionedFileId, ExpansionInfo, HirFileId, InMacroFile, MacroCallId, attrs::AttrId,
+    name::AsName,
 };
 use rustc_hash::FxHashMap;
 use smallvec::SmallVec;
-use span::{EditionedFileId, FileId, MacroFileId};
+use span::FileId;
 use stdx::impl_from;
 use syntax::{
-    ast::{self, HasName},
     AstNode, AstPtr, SyntaxNode,
+    ast::{self, HasName},
 };
+use tt::TextRange;
 
-use crate::{db::HirDatabase, semantics::child_by_source::ChildBySource, InFile, InlineAsmOperand};
+use crate::{InFile, InlineAsmOperand, db::HirDatabase, semantics::child_by_source::ChildBySource};
 
 #[derive(Default)]
 pub(super) struct SourceToDefCache {
     pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
-    expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
+    expansion_info_cache: FxHashMap<MacroCallId, ExpansionInfo>,
     pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
-    pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroFileId>>,
+    pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroCallId>>,
     /// Rootnode to HirFileId cache
     pub(super) root_to_file_cache: FxHashMap<SyntaxNode, HirFileId>,
 }
@@ -137,14 +138,14 @@ impl SourceToDefCache {
         &mut self,
         db: &dyn HirDatabase,
         file: EditionedFileId,
-    ) -> Option<MacroFileId> {
+    ) -> Option<MacroCallId> {
         if let Some(&m) = self.included_file_cache.get(&file) {
             return m;
         }
         self.included_file_cache.insert(file, None);
-        for &crate_id in db.relevant_crates(file.into()).iter() {
+        for &crate_id in db.relevant_crates(file.file_id(db)).iter() {
             db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
-                self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id }));
+                self.included_file_cache.insert(file_id, Some(macro_call_id));
             });
         }
         self.included_file_cache.get(&file).copied().flatten()
@@ -153,10 +154,10 @@ impl SourceToDefCache {
     pub(super) fn get_or_insert_expansion(
         &mut self,
         db: &dyn HirDatabase,
-        macro_file: MacroFileId,
+        macro_file: MacroCallId,
     ) -> &ExpansionInfo {
         self.expansion_info_cache.entry(macro_file).or_insert_with(|| {
-            let exp_info = macro_file.expansion_info(db.upcast());
+            let exp_info = macro_file.expansion_info(db);
 
             let InMacroFile { file_id, value } = exp_info.expanded();
             Self::cache(&mut self.root_to_file_cache, value, file_id.into());
@@ -176,13 +177,14 @@ impl SourceToDefCtx<'_, '_> {
         let _p = tracing::info_span!("SourceToDefCtx::file_to_def").entered();
         self.cache.file_to_def_cache.entry(file).or_insert_with(|| {
             let mut mods = SmallVec::new();
+
             for &crate_id in self.db.relevant_crates(file).iter() {
                 // Note: `mod` declarations in block modules cannot be supported here
                 let crate_def_map = self.db.crate_def_map(crate_id);
                 let n_mods = mods.len();
                 let modules = |file| {
                     crate_def_map
-                        .modules_for_file(file)
+                        .modules_for_file(self.db, file)
                         .map(|local_id| crate_def_map.module_id(local_id))
                 };
                 mods.extend(modules(file));
@@ -191,18 +193,16 @@ impl SourceToDefCtx<'_, '_> {
                         self.db
                             .include_macro_invoc(crate_id)
                             .iter()
-                            .filter(|&&(_, file_id)| file_id == file)
+                            .filter(|&&(_, file_id)| file_id.file_id(self.db) == file)
                             .flat_map(|&(macro_call_id, file_id)| {
-                                self.cache
-                                    .included_file_cache
-                                    .insert(file_id, Some(MacroFileId { macro_call_id }));
+                                self.cache.included_file_cache.insert(file_id, Some(macro_call_id));
                                 modules(
                                     macro_call_id
-                                        .lookup(self.db.upcast())
+                                        .lookup(self.db)
                                         .kind
                                         .file_id()
-                                        .original_file(self.db.upcast())
-                                        .file_id(),
+                                        .original_file(self.db)
+                                        .file_id(self.db),
                                 )
                             }),
                     );
@@ -218,7 +218,7 @@ impl SourceToDefCtx<'_, '_> {
     pub(super) fn module_to_def(&mut self, src: InFile<&ast::Module>) -> Option<ModuleId> {
         let _p = tracing::info_span!("module_to_def").entered();
         let parent_declaration = self
-            .ancestors_with_macros(src.syntax_ref(), |_, ancestor| {
+            .parent_ancestors_with_macros(src.syntax_ref(), |_, ancestor, _| {
                 ancestor.map(Either::<ast::Module, ast::BlockExpr>::cast).transpose()
             })
             .map(|it| it.transpose());
@@ -231,21 +231,21 @@ impl SourceToDefCtx<'_, '_> {
                 self.module_to_def(parent_declaration.as_ref())
             }
             None => {
-                let file_id = src.file_id.original_file(self.db.upcast());
-                self.file_to_def(file_id.file_id()).first().copied()
+                let file_id = src.file_id.original_file(self.db);
+                self.file_to_def(file_id.file_id(self.db)).first().copied()
             }
         }?;
 
         let child_name = src.value.name()?.as_name();
-        let def_map = parent_module.def_map(self.db.upcast());
+        let def_map = parent_module.def_map(self.db);
         let &child_id = def_map[parent_module.local_id].children.get(&child_name)?;
         Some(def_map.module_id(child_id))
     }
 
     pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
         let _p = tracing::info_span!("source_file_to_def").entered();
-        let file_id = src.file_id.original_file(self.db.upcast());
-        self.file_to_def(file_id.file_id()).first().copied()
+        let file_id = src.file_id.original_file(self.db);
+        self.file_to_def(file_id.file_id(self.db)).first().copied()
     }
 
     pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> {
@@ -344,7 +344,7 @@ impl SourceToDefCtx<'_, '_> {
             })
             .position(|it| it == *src.value)?;
         let container = self.find_pat_or_label_container(src.syntax_ref())?;
-        let (_, source_map) = self.db.body_with_source_map(container);
+        let source_map = self.db.body_with_source_map(container).1;
         let expr = source_map.node_expr(src.with_value(&ast::Expr::AsmExpr(asm)))?.as_expr()?;
         Some(InlineAsmOperand { owner: container, expr, index })
     }
@@ -377,7 +377,8 @@ impl SourceToDefCtx<'_, '_> {
         src: InFile<&ast::Label>,
     ) -> Option<(DefWithBodyId, LabelId)> {
         let container = self.find_pat_or_label_container(src.syntax_ref())?;
-        let (_body, source_map) = self.db.body_with_source_map(container);
+        let source_map = self.db.body_with_source_map(container).1;
+
         let label_id = source_map.node_label(src)?;
         Some((container, label_id))
     }
@@ -516,45 +517,22 @@ impl SourceToDefCtx<'_, '_> {
 
     pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
         let _p = tracing::info_span!("find_container").entered();
-        let def =
-            self.ancestors_with_macros(src, |this, container| this.container_to_def(container));
+        let def = self.parent_ancestors_with_macros(src, |this, container, child| {
+            this.container_to_def(container, child)
+        });
         if let Some(def) = def {
             return Some(def);
         }
 
         let def = self
-            .file_to_def(src.file_id.original_file(self.db.upcast()).file_id())
+            .file_to_def(src.file_id.original_file(self.db).file_id(self.db))
             .first()
             .copied()?;
         Some(def.into())
     }
 
-    /// Skips the attributed item that caused the macro invocation we are climbing up
-    fn ancestors_with_macros<T>(
-        &mut self,
-        node: InFile<&SyntaxNode>,
-        mut cb: impl FnMut(&mut Self, InFile<SyntaxNode>) -> Option<T>,
-    ) -> Option<T> {
-        let parent = |this: &mut Self, node: InFile<&SyntaxNode>| match node.value.parent() {
-            Some(parent) => Some(node.with_value(parent)),
-            None => {
-                let macro_file = node.file_id.macro_file()?;
-                let expansion_info = this.cache.get_or_insert_expansion(this.db, macro_file);
-                expansion_info.arg().map(|node| node?.parent()).transpose()
-            }
-        };
-        let mut node = node.cloned();
-        while let Some(parent) = parent(self, node.as_ref()) {
-            if let Some(res) = cb(self, parent.clone()) {
-                return Some(res);
-            }
-            node = parent;
-        }
-        None
-    }
-
     fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
-        self.ancestors_with_macros(src, |this, InFile { file_id, value }| {
+        self.parent_ancestors_with_macros(src, |this, InFile { file_id, value }, _| {
             let item = ast::Item::cast(value)?;
             match &item {
                 ast::Item::Fn(it) => this.fn_to_def(InFile::new(file_id, it)).map(Into::into),
@@ -575,8 +553,9 @@ impl SourceToDefCtx<'_, '_> {
         })
     }
 
+    // FIXME: Remove this when we do inference in signatures
     fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
-        self.ancestors_with_macros(src, |this, InFile { file_id, value }| {
+        self.parent_ancestors_with_macros(src, |this, InFile { file_id, value }, _| {
             let item = match ast::Item::cast(value.clone()) {
                 Some(it) => it,
                 None => {
@@ -597,7 +576,43 @@ impl SourceToDefCtx<'_, '_> {
         })
     }
 
-    fn container_to_def(&mut self, container: InFile<SyntaxNode>) -> Option<ChildContainer> {
+    /// Skips the attributed item that caused the macro invocation we are climbing up
+    fn parent_ancestors_with_macros<T>(
+        &mut self,
+        node: InFile<&SyntaxNode>,
+        mut cb: impl FnMut(
+            &mut Self,
+            /*parent: */ InFile<SyntaxNode>,
+            /*child: */ &SyntaxNode,
+        ) -> Option<T>,
+    ) -> Option<T> {
+        let parent = |this: &mut Self, node: InFile<&SyntaxNode>| match node.value.parent() {
+            Some(parent) => Some(node.with_value(parent)),
+            None => {
+                let macro_file = node.file_id.macro_file()?;
+                let expansion_info = this.cache.get_or_insert_expansion(this.db, macro_file);
+                expansion_info.arg().map(|node| node?.parent()).transpose()
+            }
+        };
+        let mut deepest_child_in_same_file = node.cloned();
+        let mut node = node.cloned();
+        while let Some(parent) = parent(self, node.as_ref()) {
+            if parent.file_id != node.file_id {
+                deepest_child_in_same_file = parent.clone();
+            }
+            if let Some(res) = cb(self, parent.clone(), &deepest_child_in_same_file.value) {
+                return Some(res);
+            }
+            node = parent;
+        }
+        None
+    }
+
+    fn container_to_def(
+        &mut self,
+        container: InFile<SyntaxNode>,
+        child: &SyntaxNode,
+    ) -> Option<ChildContainer> {
         let cont = if let Some(item) = ast::Item::cast(container.value.clone()) {
             match &item {
                 ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
@@ -612,29 +627,92 @@ impl SourceToDefCtx<'_, '_> {
                 }
                 ast::Item::Struct(it) => {
                     let def = self.struct_to_def(container.with_value(it))?;
-                    VariantId::from(def).into()
+                    let is_in_body = it.field_list().is_some_and(|it| {
+                        it.syntax().text_range().contains(child.text_range().start())
+                    });
+                    if is_in_body {
+                        VariantId::from(def).into()
+                    } else {
+                        ChildContainer::GenericDefId(def.into())
+                    }
                 }
                 ast::Item::Union(it) => {
                     let def = self.union_to_def(container.with_value(it))?;
-                    VariantId::from(def).into()
+                    let is_in_body = it.record_field_list().is_some_and(|it| {
+                        it.syntax().text_range().contains(child.text_range().start())
+                    });
+                    if is_in_body {
+                        VariantId::from(def).into()
+                    } else {
+                        ChildContainer::GenericDefId(def.into())
+                    }
                 }
                 ast::Item::Fn(it) => {
                     let def = self.fn_to_def(container.with_value(it))?;
-                    DefWithBodyId::from(def).into()
+                    let child_offset = child.text_range().start();
+                    let is_in_body =
+                        it.body().is_some_and(|it| it.syntax().text_range().contains(child_offset));
+                    let in_param_pat = || {
+                        it.param_list().is_some_and(|it| {
+                            it.self_param()
+                                .and_then(|it| {
+                                    Some(TextRange::new(
+                                        it.syntax().text_range().start(),
+                                        it.name()?.syntax().text_range().end(),
+                                    ))
+                                })
+                                .is_some_and(|r| r.contains_inclusive(child_offset))
+                                || it
+                                    .params()
+                                    .filter_map(|it| it.pat())
+                                    .any(|it| it.syntax().text_range().contains(child_offset))
+                        })
+                    };
+                    if is_in_body || in_param_pat() {
+                        DefWithBodyId::from(def).into()
+                    } else {
+                        ChildContainer::GenericDefId(def.into())
+                    }
                 }
                 ast::Item::Static(it) => {
                     let def = self.static_to_def(container.with_value(it))?;
-                    DefWithBodyId::from(def).into()
+                    let is_in_body = it.body().is_some_and(|it| {
+                        it.syntax().text_range().contains(child.text_range().start())
+                    });
+                    if is_in_body {
+                        DefWithBodyId::from(def).into()
+                    } else {
+                        ChildContainer::GenericDefId(def.into())
+                    }
                 }
                 ast::Item::Const(it) => {
                     let def = self.const_to_def(container.with_value(it))?;
-                    DefWithBodyId::from(def).into()
+                    let is_in_body = it.body().is_some_and(|it| {
+                        it.syntax().text_range().contains(child.text_range().start())
+                    });
+                    if is_in_body {
+                        DefWithBodyId::from(def).into()
+                    } else {
+                        ChildContainer::GenericDefId(def.into())
+                    }
                 }
                 _ => return None,
             }
-        } else {
-            let it = ast::Variant::cast(container.value)?;
+        } else if let Some(it) = ast::Variant::cast(container.value.clone()) {
             let def = self.enum_variant_to_def(InFile::new(container.file_id, &it))?;
+            let is_in_body =
+                it.eq_token().is_some_and(|it| it.text_range().end() < child.text_range().start());
+            if is_in_body { DefWithBodyId::from(def).into() } else { VariantId::from(def).into() }
+        } else {
+            let it = match Either::<ast::Pat, ast::Name>::cast(container.value)? {
+                Either::Left(it) => ast::Param::cast(it.syntax().parent()?)?.syntax().parent(),
+                Either::Right(it) => ast::SelfParam::cast(it.syntax().parent()?)?.syntax().parent(),
+            }
+            .and_then(ast::ParamList::cast)?
+            .syntax()
+            .parent()
+            .and_then(ast::Fn::cast)?;
+            let def = self.fn_to_def(InFile::new(container.file_id, &it))?;
             DefWithBodyId::from(def).into()
         };
         Some(cont)
@@ -671,7 +749,6 @@ impl_from! {
 impl ChildContainer {
     fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap {
         let _p = tracing::info_span!("ChildContainer::child_by_source").entered();
-        let db = db.upcast();
         match self {
             ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
             ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index d1245f5f7d681..666efe8ec645f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -8,48 +8,49 @@
 use std::iter::{self, once};
 
 use crate::{
-    db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
-    BuiltinType, Callable, Const, DeriveHelper, Field, Function, GenericSubstitution, Local, Macro,
-    ModuleDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField, Type, TypeAlias, Variant,
+    Adt, AssocItem, BindingMode, BuiltinAttr, BuiltinType, Callable, Const, DeriveHelper, Field,
+    Function, GenericSubstitution, Local, Macro, ModuleDef, Static, Struct, ToolModule, Trait,
+    TraitAlias, TupleField, Type, TypeAlias, Variant, db::HirDatabase, semantics::PathResolution,
 };
 use either::Either;
 use hir_def::{
+    AdtId, AssocItemId, CallableDefId, ConstId, DefWithBodyId, FieldId, FunctionId, GenericDefId,
+    ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StructId, TraitId, VariantId,
     expr_store::{
+        Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, HygieneId,
+        lower::ExprCollector,
+        path::Path,
         scope::{ExprScopes, ScopeId},
-        Body, BodySourceMap, HygieneId,
     },
     hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
     lang_item::LangItem,
-    lower::LowerCtx,
     nameres::MacroSubNs,
-    path::{ModPath, Path, PathKind},
-    resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
-    type_ref::{Mutability, TypesMap, TypesSourceMap},
-    AsMacroCall, AssocItemId, CallableDefId, ConstId, DefWithBodyId, FieldId, FunctionId,
-    ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StructId, TraitId, VariantId,
+    resolver::{HasResolver, Resolver, TypeNs, ValueNs, resolver_for_scope},
+    type_ref::{Mutability, TypeRefId},
 };
 use hir_expand::{
-    mod_path::path,
+    HirFileId, InFile, MacroCallId,
+    mod_path::{ModPath, PathKind, path},
     name::{AsName, Name},
-    HirFileId, InFile, InMacroFile, MacroFileId, MacroFileIdExt,
 };
 use hir_ty::{
+    Adjustment, AliasTy, InferenceResult, Interner, LifetimeElisionKind, ProjectionTy,
+    Substitution, TraitEnvironment, Ty, TyExt, TyKind, TyLoweringContext,
     diagnostics::{
-        record_literal_missing_fields, record_pattern_missing_fields, unsafe_operations,
-        InsideUnsafeBlock,
+        InsideUnsafeBlock, record_literal_missing_fields, record_pattern_missing_fields,
+        unsafe_operations,
     },
     from_assoc_type_id,
     lang_items::lang_items_for_bin_op,
-    method_resolution, Adjustment, InferenceResult, Interner, Substitution, TraitEnvironment, Ty,
-    TyExt, TyKind, TyLoweringContext,
+    method_resolution,
 };
 use intern::sym;
 use itertools::Itertools;
 use smallvec::SmallVec;
-use syntax::ast::{RangeItem, RangeOp};
+use stdx::never;
 use syntax::{
-    ast::{self, AstNode},
     SyntaxKind, SyntaxNode, TextRange, TextSize,
+    ast::{self, AstNode, RangeItem, RangeOp},
 };
 use triomphe::Arc;
 
@@ -59,8 +60,29 @@ use triomphe::Arc;
 pub(crate) struct SourceAnalyzer {
     pub(crate) file_id: HirFileId,
     pub(crate) resolver: Resolver,
-    def: Option<(DefWithBodyId, Arc<Body>, Arc<BodySourceMap>)>,
-    infer: Option<Arc<InferenceResult>>,
+    pub(crate) body_or_sig: Option<BodyOrSig>,
+}
+
+#[derive(Debug)]
+pub(crate) enum BodyOrSig {
+    Body {
+        def: DefWithBodyId,
+        body: Arc<Body>,
+        source_map: Arc<BodySourceMap>,
+        infer: Option<Arc<InferenceResult>>,
+    },
+    // To be folded into body once it is considered one
+    VariantFields {
+        def: VariantId,
+        store: Arc<ExpressionStore>,
+        source_map: Arc<ExpressionStoreSourceMap>,
+    },
+    Sig {
+        def: GenericDefId,
+        store: Arc<ExpressionStore>,
+        source_map: Arc<ExpressionStoreSourceMap>,
+        // infer: Option<Arc<InferenceResult>>,
+    },
 }
 
 impl SourceAnalyzer {
@@ -103,99 +125,163 @@ impl SourceAnalyzer {
                 scope_for_offset(db, &scopes, &source_map, node.file_id, offset)
             }
         };
-        let resolver = resolver_for_scope(db.upcast(), def, scope);
-        SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer, file_id }
+        let resolver = resolver_for_scope(db, def, scope);
+        SourceAnalyzer {
+            resolver,
+            body_or_sig: Some(BodyOrSig::Body { def, body, source_map, infer }),
+            file_id,
+        }
+    }
+
+    pub(crate) fn new_generic_def(
+        db: &dyn HirDatabase,
+        def: GenericDefId,
+        InFile { file_id, .. }: InFile<&SyntaxNode>,
+        _offset: Option<TextSize>,
+    ) -> SourceAnalyzer {
+        let (_params, store, source_map) = db.generic_params_and_store_and_source_map(def);
+        let resolver = def.resolver(db);
+        SourceAnalyzer {
+            resolver,
+            body_or_sig: Some(BodyOrSig::Sig { def, store, source_map }),
+            file_id,
+        }
+    }
+
+    pub(crate) fn new_variant_body(
+        db: &dyn HirDatabase,
+        def: VariantId,
+        InFile { file_id, .. }: InFile<&SyntaxNode>,
+        _offset: Option<TextSize>,
+    ) -> SourceAnalyzer {
+        let (fields, source_map) = db.variant_fields_with_source_map(def);
+        let resolver = def.resolver(db);
+        SourceAnalyzer {
+            resolver,
+            body_or_sig: Some(BodyOrSig::VariantFields {
+                def,
+                store: fields.store.clone(),
+                source_map,
+            }),
+            file_id,
+        }
     }
 
     pub(crate) fn new_for_resolver(
         resolver: Resolver,
         node: InFile<&SyntaxNode>,
     ) -> SourceAnalyzer {
-        SourceAnalyzer { resolver, def: None, infer: None, file_id: node.file_id }
+        SourceAnalyzer { resolver, body_or_sig: None, file_id: node.file_id }
     }
 
-    fn body_source_map(&self) -> Option<&BodySourceMap> {
-        self.def.as_ref().map(|(.., source_map)| &**source_map)
+    // FIXME: Remove this
+    fn body_(&self) -> Option<(DefWithBodyId, &Body, &BodySourceMap, Option<&InferenceResult>)> {
+        self.body_or_sig.as_ref().and_then(|it| match it {
+            BodyOrSig::Body { def, body, source_map, infer } => {
+                Some((*def, &**body, &**source_map, infer.as_deref()))
+            }
+            _ => None,
+        })
+    }
+
+    fn infer(&self) -> Option<&InferenceResult> {
+        self.body_or_sig.as_ref().and_then(|it| match it {
+            BodyOrSig::Sig { .. } => None,
+            BodyOrSig::VariantFields { .. } => None,
+            BodyOrSig::Body { infer, .. } => infer.as_deref(),
+        })
     }
+
     fn body(&self) -> Option<&Body> {
-        self.def.as_ref().map(|(_, body, _)| &**body)
+        self.body_or_sig.as_ref().and_then(|it| match it {
+            BodyOrSig::Sig { .. } => None,
+            BodyOrSig::VariantFields { .. } => None,
+            BodyOrSig::Body { body, .. } => Some(&**body),
+        })
+    }
+
+    pub(crate) fn store(&self) -> Option<&ExpressionStore> {
+        self.body_or_sig.as_ref().map(|it| match it {
+            BodyOrSig::Sig { store, .. } => &**store,
+            BodyOrSig::VariantFields { store, .. } => &**store,
+            BodyOrSig::Body { body, .. } => &body.store,
+        })
+    }
+
+    pub(crate) fn store_sm(&self) -> Option<&ExpressionStoreSourceMap> {
+        self.body_or_sig.as_ref().map(|it| match it {
+            BodyOrSig::Sig { source_map, .. } => &**source_map,
+            BodyOrSig::VariantFields { source_map, .. } => &**source_map,
+            BodyOrSig::Body { source_map, .. } => &source_map.store,
+        })
+    }
+
+    pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
+        self.store_sm()?.expansion(node)
     }
 
     fn trait_environment(&self, db: &dyn HirDatabase) -> Arc<TraitEnvironment> {
-        self.def.as_ref().map(|(def, ..)| *def).map_or_else(
+        self.body_().map(|(def, ..)| def).map_or_else(
             || TraitEnvironment::empty(self.resolver.krate()),
             |def| db.trait_environment_for_body(def),
         )
     }
 
-    fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<ExprOrPatId> {
-        let src = match expr {
-            ast::Expr::MacroExpr(expr) => {
-                self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?))?.into()
-            }
-            _ => InFile::new(self.file_id, expr.clone()),
-        };
-        let sm = self.body_source_map()?;
-        sm.node_expr(src.as_ref())
+    fn expr_id(&self, expr: ast::Expr) -> Option<ExprOrPatId> {
+        let src = InFile { file_id: self.file_id, value: expr };
+        self.store_sm()?.node_expr(src.as_ref())
     }
 
     fn pat_id(&self, pat: &ast::Pat) -> Option<ExprOrPatId> {
-        // FIXME: macros, see `expr_id`
         let src = InFile { file_id: self.file_id, value: pat };
-        self.body_source_map()?.node_pat(src)
+        self.store_sm()?.node_pat(src)
+    }
+
+    fn type_id(&self, pat: &ast::Type) -> Option<TypeRefId> {
+        let src = InFile { file_id: self.file_id, value: pat };
+        self.store_sm()?.node_type(src)
     }
 
     fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingId> {
         let pat_id = self.pat_id(&pat.clone().into())?;
-        if let Pat::Bind { id, .. } = self.body()?.pats[pat_id.as_pat()?] {
+        if let Pat::Bind { id, .. } = self.store()?.pats[pat_id.as_pat()?] {
             Some(id)
         } else {
             None
         }
     }
 
-    fn expand_expr(
-        &self,
-        db: &dyn HirDatabase,
-        expr: InFile<ast::MacroCall>,
-    ) -> Option<InMacroFile<ast::Expr>> {
-        let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?;
-        let expanded = db.parse_macro_expansion(macro_file).value.0.syntax_node();
-        let res = if let Some(stmts) = ast::MacroStmts::cast(expanded.clone()) {
-            match stmts.expr()? {
-                ast::Expr::MacroExpr(mac) => {
-                    self.expand_expr(db, InFile::new(macro_file.into(), mac.macro_call()?))?
-                }
-                expr => InMacroFile::new(macro_file, expr),
-            }
-        } else if let Some(call) = ast::MacroCall::cast(expanded.clone()) {
-            self.expand_expr(db, InFile::new(macro_file.into(), call))?
-        } else {
-            InMacroFile::new(macro_file, ast::Expr::cast(expanded)?)
-        };
-
-        Some(res)
-    }
-
-    pub(crate) fn expr_adjustments(
-        &self,
-        db: &dyn HirDatabase,
-        expr: &ast::Expr,
-    ) -> Option<&[Adjustment]> {
+    pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> {
         // It is safe to omit destructuring assignments here because they have no adjustments (neither
         // expressions nor patterns).
-        let expr_id = self.expr_id(db, expr)?.as_expr()?;
-        let infer = self.infer.as_ref()?;
+        let expr_id = self.expr_id(expr.clone())?.as_expr()?;
+        let infer = self.infer()?;
         infer.expr_adjustments.get(&expr_id).map(|v| &**v)
     }
 
+    pub(crate) fn type_of_type(&self, db: &dyn HirDatabase, ty: &ast::Type) -> Option<Type> {
+        let type_ref = self.type_id(ty)?;
+        let ty = TyLoweringContext::new(
+            db,
+            &self.resolver,
+            self.store()?,
+            self.resolver.generic_def()?,
+            // FIXME: Is this correct here? Anyway that should impact mostly diagnostics, which we don't emit here
+            // (this can impact the lifetimes generated, e.g. in `const` they won't be `'static`, but this seems like a
+            // small problem).
+            LifetimeElisionKind::Infer,
+        )
+        .lower_ty(type_ref);
+        Some(Type::new_with_resolver(db, &self.resolver, ty))
+    }
+
     pub(crate) fn type_of_expr(
         &self,
         db: &dyn HirDatabase,
         expr: &ast::Expr,
     ) -> Option<(Type, Option<Type>)> {
-        let expr_id = self.expr_id(db, expr)?;
-        let infer = self.infer.as_ref()?;
+        let expr_id = self.expr_id(expr.clone())?;
+        let infer = self.infer()?;
         let coerced = expr_id
             .as_expr()
             .and_then(|expr_id| infer.expr_adjustments.get(&expr_id))
@@ -211,7 +297,7 @@ impl SourceAnalyzer {
         pat: &ast::Pat,
     ) -> Option<(Type, Option<Type>)> {
         let expr_or_pat_id = self.pat_id(pat)?;
-        let infer = self.infer.as_ref()?;
+        let infer = self.infer()?;
         let coerced = match expr_or_pat_id {
             ExprOrPatId::ExprId(idx) => infer
                 .expr_adjustments
@@ -234,7 +320,7 @@ impl SourceAnalyzer {
         pat: &ast::IdentPat,
     ) -> Option<Type> {
         let binding_id = self.binding_id_of_pat(pat)?;
-        let infer = self.infer.as_ref()?;
+        let infer = self.infer()?;
         let ty = infer[binding_id].clone();
         let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
         Some(mk_ty(ty))
@@ -246,7 +332,7 @@ impl SourceAnalyzer {
         _param: &ast::SelfParam,
     ) -> Option<Type> {
         let binding = self.body()?.self_param?;
-        let ty = self.infer.as_ref()?[binding].clone();
+        let ty = self.infer()?[binding].clone();
         Some(Type::new_with_resolver(db, &self.resolver, ty))
     }
 
@@ -256,7 +342,7 @@ impl SourceAnalyzer {
         pat: &ast::IdentPat,
     ) -> Option<BindingMode> {
         let id = self.pat_id(&pat.clone().into())?;
-        let infer = self.infer.as_ref()?;
+        let infer = self.infer()?;
         infer.binding_modes.get(id.as_pat()?).map(|bm| match bm {
             hir_ty::BindingMode::Move => BindingMode::Move,
             hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
@@ -271,7 +357,7 @@ impl SourceAnalyzer {
         pat: &ast::Pat,
     ) -> Option<SmallVec<[Type; 1]>> {
         let pat_id = self.pat_id(pat)?;
-        let infer = self.infer.as_ref()?;
+        let infer = self.infer()?;
         Some(
             infer
                 .pat_adjustments
@@ -287,8 +373,8 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         call: &ast::MethodCallExpr,
     ) -> Option<Callable> {
-        let expr_id = self.expr_id(db, &call.clone().into())?.as_expr()?;
-        let (func, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+        let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
+        let (func, substs) = self.infer()?.method_resolution(expr_id)?;
         let ty = db.value_ty(func.into())?.substitute(Interner, &substs);
         let ty = Type::new_with_resolver(db, &self.resolver, ty);
         let mut res = ty.as_callable(db)?;
@@ -301,8 +387,8 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         call: &ast::MethodCallExpr,
     ) -> Option<Function> {
-        let expr_id = self.expr_id(db, &call.clone().into())?.as_expr()?;
-        let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+        let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
+        let (f_in_trait, substs) = self.infer()?.method_resolution(expr_id)?;
 
         Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into())
     }
@@ -312,8 +398,8 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         call: &ast::MethodCallExpr,
     ) -> Option<(Either<Function, Field>, Option<GenericSubstitution>)> {
-        let expr_id = self.expr_id(db, &call.clone().into())?.as_expr()?;
-        let inference_result = self.infer.as_ref()?;
+        let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
+        let inference_result = self.infer()?;
         match inference_result.method_resolution(expr_id) {
             Some((f_in_trait, substs)) => {
                 let (fn_, subst) =
@@ -342,12 +428,11 @@ impl SourceAnalyzer {
 
     pub(crate) fn resolve_field(
         &self,
-        db: &dyn HirDatabase,
         field: &ast::FieldExpr,
     ) -> Option<Either<Field, TupleField>> {
-        let &(def, ..) = self.def.as_ref()?;
-        let expr_id = self.expr_id(db, &field.clone().into())?.as_expr()?;
-        self.infer.as_ref()?.field_resolution(expr_id).map(|it| {
+        let (def, ..) = self.body_()?;
+        let expr_id = self.expr_id(field.clone().into())?.as_expr()?;
+        self.infer()?.field_resolution(expr_id).map(|it| {
             it.map_either(Into::into, |f| TupleField { owner: def, tuple: f.tuple, index: f.index })
         })
     }
@@ -358,7 +443,7 @@ impl SourceAnalyzer {
         infer: &InferenceResult,
         db: &dyn HirDatabase,
     ) -> Option<GenericSubstitution> {
-        let body = self.body()?;
+        let body = self.store()?;
         if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] {
             let (adt, subst) = type_of_expr_including_adjust(infer, object_expr)?.as_adt()?;
             return Some(GenericSubstitution::new(
@@ -375,9 +460,9 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         field: &ast::FieldExpr,
     ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution>)> {
-        let &(def, ..) = self.def.as_ref()?;
-        let expr_id = self.expr_id(db, &field.clone().into())?.as_expr()?;
-        let inference_result = self.infer.as_ref()?;
+        let (def, ..) = self.body_()?;
+        let expr_id = self.expr_id(field.clone().into())?.as_expr()?;
+        let inference_result = self.infer()?;
         match inference_result.field_resolution(expr_id) {
             Some(field) => match field {
                 Either::Left(field) => Some((
@@ -419,7 +504,7 @@ impl SourceAnalyzer {
             (RangeOp::Inclusive, None, None) => return None,
             (RangeOp::Inclusive, Some(_), None) => return None,
         };
-        self.resolver.resolve_known_struct(db.upcast(), &path)
+        self.resolver.resolve_known_struct(db, &path)
     }
 
     pub(crate) fn resolve_range_expr(
@@ -439,7 +524,7 @@ impl SourceAnalyzer {
             (RangeOp::Inclusive, None, None) => return None,
             (RangeOp::Inclusive, Some(_), None) => return None,
         };
-        self.resolver.resolve_known_struct(db.upcast(), &path)
+        self.resolver.resolve_known_struct(db, &path)
     }
 
     pub(crate) fn resolve_await_to_poll(
@@ -447,11 +532,11 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         await_expr: &ast::AwaitExpr,
     ) -> Option<FunctionId> {
-        let mut ty = self.ty_of_expr(db, &await_expr.expr()?)?.clone();
+        let mut ty = self.ty_of_expr(await_expr.expr()?)?.clone();
 
         let into_future_trait = self
             .resolver
-            .resolve_known_trait(db.upcast(), &path![core::future::IntoFuture])
+            .resolve_known_trait(db, &path![core::future::IntoFuture])
             .map(Trait::from);
 
         if let Some(into_future_trait) = into_future_trait {
@@ -460,7 +545,7 @@ impl SourceAnalyzer {
                 let items = into_future_trait.items(db);
                 let into_future_type = items.into_iter().find_map(|item| match item {
                     AssocItem::TypeAlias(alias)
-                        if alias.name(db) == Name::new_symbol_root(sym::IntoFuture.clone()) =>
+                        if alias.name(db) == Name::new_symbol_root(sym::IntoFuture) =>
                     {
                         Some(alias)
                     }
@@ -489,38 +574,30 @@ impl SourceAnalyzer {
                 // This can be either `Deref::deref` or `DerefMut::deref_mut`.
                 // Since deref kind is inferenced and stored in `InferenceResult.method_resolution`,
                 // use that result to find out which one it is.
-                let (deref_trait, deref) = self.lang_trait_fn(
-                    db,
-                    LangItem::Deref,
-                    &Name::new_symbol_root(sym::deref.clone()),
-                )?;
-                self.infer
-                    .as_ref()
+                let (deref_trait, deref) =
+                    self.lang_trait_fn(db, LangItem::Deref, &Name::new_symbol_root(sym::deref))?;
+                self.infer()
                     .and_then(|infer| {
-                        let expr = self.expr_id(db, &prefix_expr.clone().into())?.as_expr()?;
+                        let expr = self.expr_id(prefix_expr.clone().into())?.as_expr()?;
                         let (func, _) = infer.method_resolution(expr)?;
                         let (deref_mut_trait, deref_mut) = self.lang_trait_fn(
                             db,
                             LangItem::DerefMut,
-                            &Name::new_symbol_root(sym::deref_mut.clone()),
+                            &Name::new_symbol_root(sym::deref_mut),
                         )?;
-                        if func == deref_mut {
-                            Some((deref_mut_trait, deref_mut))
-                        } else {
-                            None
-                        }
+                        if func == deref_mut { Some((deref_mut_trait, deref_mut)) } else { None }
                     })
                     .unwrap_or((deref_trait, deref))
             }
             ast::UnaryOp::Not => {
-                self.lang_trait_fn(db, LangItem::Not, &Name::new_symbol_root(sym::not.clone()))?
+                self.lang_trait_fn(db, LangItem::Not, &Name::new_symbol_root(sym::not))?
             }
             ast::UnaryOp::Neg => {
-                self.lang_trait_fn(db, LangItem::Neg, &Name::new_symbol_root(sym::neg.clone()))?
+                self.lang_trait_fn(db, LangItem::Neg, &Name::new_symbol_root(sym::neg))?
             }
         };
 
-        let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?;
+        let ty = self.ty_of_expr(prefix_expr.expr()?)?;
 
         // HACK: subst for all methods coincides with that for their trait because the methods
         // don't have any generic parameters, so we skip building another subst for the methods.
@@ -534,27 +611,22 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         index_expr: &ast::IndexExpr,
     ) -> Option<FunctionId> {
-        let base_ty = self.ty_of_expr(db, &index_expr.base()?)?;
-        let index_ty = self.ty_of_expr(db, &index_expr.index()?)?;
+        let base_ty = self.ty_of_expr(index_expr.base()?)?;
+        let index_ty = self.ty_of_expr(index_expr.index()?)?;
 
         let (index_trait, index_fn) =
-            self.lang_trait_fn(db, LangItem::Index, &Name::new_symbol_root(sym::index.clone()))?;
+            self.lang_trait_fn(db, LangItem::Index, &Name::new_symbol_root(sym::index))?;
         let (op_trait, op_fn) = self
-            .infer
-            .as_ref()
+            .infer()
             .and_then(|infer| {
-                let expr = self.expr_id(db, &index_expr.clone().into())?.as_expr()?;
+                let expr = self.expr_id(index_expr.clone().into())?.as_expr()?;
                 let (func, _) = infer.method_resolution(expr)?;
                 let (index_mut_trait, index_mut_fn) = self.lang_trait_fn(
                     db,
                     LangItem::IndexMut,
-                    &Name::new_symbol_root(sym::index_mut.clone()),
+                    &Name::new_symbol_root(sym::index_mut),
                 )?;
-                if func == index_mut_fn {
-                    Some((index_mut_trait, index_mut_fn))
-                } else {
-                    None
-                }
+                if func == index_mut_fn { Some((index_mut_trait, index_mut_fn)) } else { None }
             })
             .unwrap_or((index_trait, index_fn));
         // HACK: subst for all methods coincides with that for their trait because the methods
@@ -572,8 +644,8 @@ impl SourceAnalyzer {
         binop_expr: &ast::BinExpr,
     ) -> Option<FunctionId> {
         let op = binop_expr.op_kind()?;
-        let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?;
-        let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?;
+        let lhs = self.ty_of_expr(binop_expr.lhs()?)?;
+        let rhs = self.ty_of_expr(binop_expr.rhs()?)?;
 
         let (op_trait, op_fn) = lang_items_for_bin_op(op)
             .and_then(|(name, lang_item)| self.lang_trait_fn(db, lang_item, &name))?;
@@ -592,10 +664,10 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         try_expr: &ast::TryExpr,
     ) -> Option<FunctionId> {
-        let ty = self.ty_of_expr(db, &try_expr.expr()?)?;
+        let ty = self.ty_of_expr(try_expr.expr()?)?;
 
         let op_fn = db.lang_item(self.resolver.krate(), LangItem::TryTraitBranch)?.as_function()?;
-        let op_trait = match op_fn.lookup(db.upcast()).container {
+        let op_trait = match op_fn.lookup(db).container {
             ItemContainerId::TraitId(id) => id,
             _ => return None,
         };
@@ -613,7 +685,7 @@ impl SourceAnalyzer {
     ) -> Option<(Field, Option<Local>, Type, GenericSubstitution)> {
         let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
         let expr = ast::Expr::from(record_expr);
-        let expr_id = self.body_source_map()?.node_expr(InFile::new(self.file_id, &expr))?;
+        let expr_id = self.store_sm()?.node_expr(InFile::new(self.file_id, &expr))?;
 
         let ast_name = field.field_name()?;
         let local_name = ast_name.as_name();
@@ -626,7 +698,7 @@ impl SourceAnalyzer {
                 once(local_name.clone()),
             ));
             match self.resolver.resolve_path_in_value_ns_fully(
-                db.upcast(),
+                db,
                 &path,
                 name_hygiene(db, InFile::new(self.file_id, ast_name.syntax())),
             ) {
@@ -636,9 +708,9 @@ impl SourceAnalyzer {
                 _ => None,
             }
         };
-        let (adt, subst) = self.infer.as_ref()?.type_of_expr_or_pat(expr_id)?.as_adt()?;
-        let variant = self.infer.as_ref()?.variant_resolution_for_expr_or_pat(expr_id)?;
-        let variant_data = variant.variant_data(db.upcast());
+        let (adt, subst) = self.infer()?.type_of_expr_or_pat(expr_id)?.as_adt()?;
+        let variant = self.infer()?.variant_resolution_for_expr_or_pat(expr_id)?;
+        let variant_data = variant.variant_data(db);
         let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
         let field_ty =
             db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
@@ -658,10 +730,10 @@ impl SourceAnalyzer {
         let field_name = field.field_name()?.as_name();
         let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
         let pat_id = self.pat_id(&record_pat.into())?;
-        let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
-        let variant_data = variant.variant_data(db.upcast());
+        let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
+        let variant_data = variant.variant_data(db);
         let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
-        let (adt, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
+        let (adt, subst) = self.infer()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
         let field_ty =
             db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
         Some((
@@ -676,14 +748,15 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         macro_call: InFile<&ast::MacroCall>,
     ) -> Option<Macro> {
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut ctx =
-            LowerCtx::new(db.upcast(), macro_call.file_id, &mut types_map, &mut types_source_map);
-        let path = macro_call.value.path().and_then(|ast| Path::from_src(&mut ctx, ast))?;
-        self.resolver
-            .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
-            .map(|(it, _)| it.into())
+        let bs = self.store_sm()?;
+        bs.expansion(macro_call).and_then(|it| {
+            // FIXME: Block def maps
+            let def = it.lookup(db).def;
+            db.crate_def_map(def.krate)
+                .macro_def_to_macro_id
+                .get(&def.kind.erased_ast_id())
+                .map(|it| (*it).into())
+        })
     }
 
     pub(crate) fn resolve_bind_pat_to_const(
@@ -692,20 +765,20 @@ impl SourceAnalyzer {
         pat: &ast::IdentPat,
     ) -> Option<ModuleDef> {
         let expr_or_pat_id = self.pat_id(&pat.clone().into())?;
-        let body = self.body()?;
+        let store = self.store()?;
 
         let path = match expr_or_pat_id {
-            ExprOrPatId::ExprId(idx) => match &body[idx] {
+            ExprOrPatId::ExprId(idx) => match &store[idx] {
                 Expr::Path(path) => path,
                 _ => return None,
             },
-            ExprOrPatId::PatId(idx) => match &body[idx] {
+            ExprOrPatId::PatId(idx) => match &store[idx] {
                 Pat::Path(path) => path,
                 _ => return None,
             },
         };
 
-        let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, TypesMap::EMPTY)?;
+        let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, Some(store))?;
         match res {
             PathResolution::Def(def) => Some(def),
             _ => None,
@@ -720,6 +793,78 @@ impl SourceAnalyzer {
             .map(crate::TypeParam::from)
     }
 
+    pub(crate) fn resolve_offset_of_field(
+        &self,
+        db: &dyn HirDatabase,
+        name_ref: &ast::NameRef,
+    ) -> Option<(Either<crate::Variant, crate::Field>, GenericSubstitution)> {
+        let offset_of_expr = ast::OffsetOfExpr::cast(name_ref.syntax().parent()?)?;
+        let container = offset_of_expr.ty()?;
+        let container = self.type_of_type(db, &container)?;
+
+        let trait_env = container.env;
+        let mut container = Either::Right(container.ty);
+        for field_name in offset_of_expr.fields() {
+            if let Some(
+                TyKind::Alias(AliasTy::Projection(ProjectionTy { associated_ty_id, substitution }))
+                | TyKind::AssociatedType(associated_ty_id, substitution),
+            ) = container.as_ref().right().map(|it| it.kind(Interner))
+            {
+                let projection = ProjectionTy {
+                    associated_ty_id: *associated_ty_id,
+                    substitution: substitution.clone(),
+                };
+                container = Either::Right(db.normalize_projection(projection, trait_env.clone()));
+            }
+            let handle_variants = |variant, subst: &Substitution, container: &mut _| {
+                let fields = db.variant_fields(variant);
+                let field = fields.field(&field_name.as_name())?;
+                let field_types = db.field_types(variant);
+                *container = Either::Right(field_types[field].clone().substitute(Interner, subst));
+                let generic_def = match variant {
+                    VariantId::EnumVariantId(it) => it.loc(db).parent.into(),
+                    VariantId::StructId(it) => it.into(),
+                    VariantId::UnionId(it) => it.into(),
+                };
+                Some((
+                    Either::Right(Field { parent: variant.into(), id: field }),
+                    generic_def,
+                    subst.clone(),
+                ))
+            };
+            let temp_ty = TyKind::Error.intern(Interner);
+            let (field_def, generic_def, subst) =
+                match std::mem::replace(&mut container, Either::Right(temp_ty.clone())) {
+                    Either::Left((variant_id, subst)) => {
+                        handle_variants(VariantId::from(variant_id), &subst, &mut container)?
+                    }
+                    Either::Right(container_ty) => match container_ty.kind(Interner) {
+                        TyKind::Adt(adt_id, subst) => match adt_id.0 {
+                            AdtId::StructId(id) => {
+                                handle_variants(id.into(), subst, &mut container)?
+                            }
+                            AdtId::UnionId(id) => {
+                                handle_variants(id.into(), subst, &mut container)?
+                            }
+                            AdtId::EnumId(id) => {
+                                let variants = db.enum_variants(id);
+                                let variant = variants.variant(&field_name.as_name())?;
+                                container = Either::Left((variant, subst.clone()));
+                                (Either::Left(Variant { id: variant }), id.into(), subst.clone())
+                            }
+                        },
+                        _ => return None,
+                    },
+                };
+
+            if field_name.syntax().text_range() == name_ref.syntax().text_range() {
+                return Some((field_def, GenericSubstitution::new(generic_def, subst, trait_env)));
+            }
+        }
+        never!("the `NameRef` is a child of the `OffsetOfExpr`, we should've visited it");
+        None
+    }
+
     pub(crate) fn resolve_path(
         &self,
         db: &dyn HirDatabase,
@@ -730,9 +875,9 @@ impl SourceAnalyzer {
 
         let mut prefer_value_ns = false;
         let resolved = (|| {
-            let infer = self.infer.as_deref()?;
+            let infer = self.infer()?;
             if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) {
-                let expr_id = self.expr_id(db, &path_expr.into())?;
+                let expr_id = self.expr_id(path_expr.into())?;
                 if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_id) {
                     let (assoc, subst) = match assoc {
                         AssocItemId::FunctionId(f_in_trait) => {
@@ -830,7 +975,7 @@ impl SourceAnalyzer {
                     return Some((PathResolution::Def(ModuleDef::Variant(variant.into())), None));
                 }
             } else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) {
-                let expr_id = self.expr_id(db, &rec_lit.into())?;
+                let expr_id = self.expr_id(rec_lit.into())?;
                 if let Some(VariantId::EnumVariantId(variant)) =
                     infer.variant_resolution_for_expr_or_pat(expr_id)
                 {
@@ -857,17 +1002,20 @@ impl SourceAnalyzer {
             return resolved;
         }
 
-        let (mut types_map, mut types_source_map) =
-            (TypesMap::default(), TypesSourceMap::default());
-        let mut ctx =
-            LowerCtx::new(db.upcast(), self.file_id, &mut types_map, &mut types_source_map);
-        let hir_path = Path::from_src(&mut ctx, path.clone())?;
+        // FIXME: collectiong here shouldnt be necessary?
+        let mut collector = ExprCollector::new(db, self.resolver.module(), self.file_id);
+        let hir_path =
+            collector.lower_path(path.clone(), &mut ExprCollector::impl_trait_error_allocator)?;
+        let parent_hir_path = path
+            .parent_path()
+            .and_then(|p| collector.lower_path(p, &mut ExprCollector::impl_trait_error_allocator));
+        let store = collector.store.finish();
 
         // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
         // trying to resolve foo::bar.
         if let Some(use_tree) = parent().and_then(ast::UseTree::cast) {
             if use_tree.coloncolon_token().is_some() {
-                return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map)
+                return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store)
                     .map(|it| (it, None));
             }
         }
@@ -884,9 +1032,8 @@ impl SourceAnalyzer {
 
         // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are
         // trying to resolve foo::bar.
-        if let Some(parent_path) = path.parent_path() {
-            let parent_hir_path = Path::from_src(&mut ctx, parent_path);
-            return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map) {
+        if let Some(parent_hir_path) = parent_hir_path {
+            return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store) {
                 None if meta_path.is_some() => path
                     .first_segment()
                     .and_then(|it| it.name_ref())
@@ -906,13 +1053,9 @@ impl SourceAnalyzer {
                 // }
                 // ```
                 Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => {
-                    if let (Some(mod_path), Some(parent_hir_path)) =
-                        (hir_path.mod_path(), parent_hir_path)
-                    {
-                        if let Some(ModuleDefId::ModuleId(id)) = self
-                            .resolver
-                            .resolve_module_path_in_items(db.upcast(), mod_path)
-                            .take_types()
+                    if let Some(mod_path) = hir_path.mod_path() {
+                        if let Some(ModuleDefId::ModuleId(id)) =
+                            self.resolver.resolve_module_path_in_items(db, mod_path).take_types()
                         {
                             let parent_hir_name =
                                 parent_hir_path.segments().get(1).map(|it| it.name);
@@ -973,7 +1116,7 @@ impl SourceAnalyzer {
                             // FIXME: Multiple derives can have the same helper
                             let name_ref = name_ref.as_name();
                             for (macro_id, mut helpers) in
-                                helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter()
+                                helpers.iter().chunk_by(|(_, macro_id, ..)| macro_id).into_iter()
                             {
                                 if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref)
                                 {
@@ -1006,8 +1149,7 @@ impl SourceAnalyzer {
         }
         if parent().is_some_and(|it| ast::Visibility::can_cast(it.kind())) {
             // No substitution because only modules can be inside visibilities, and those have no generics.
-            resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map)
-                .map(|it| (it, None))
+            resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store).map(|it| (it, None))
         } else {
             // Probably a type, no need to show substitutions for those.
             let res = resolve_hir_path_(
@@ -1016,16 +1158,16 @@ impl SourceAnalyzer {
                 &hir_path,
                 prefer_value_ns,
                 name_hygiene(db, InFile::new(self.file_id, path.syntax())),
-                &types_map,
+                Some(&store),
             )?;
             let subst = (|| {
                 let parent = parent()?;
                 let ty = if let Some(expr) = ast::Expr::cast(parent.clone()) {
-                    let expr_id = self.expr_id(db, &expr)?;
-                    self.infer.as_ref()?.type_of_expr_or_pat(expr_id)?
+                    let expr_id = self.expr_id(expr)?;
+                    self.infer()?.type_of_expr_or_pat(expr_id)?
                 } else if let Some(pat) = ast::Pat::cast(parent) {
                     let pat_id = self.pat_id(&pat)?;
-                    &self.infer.as_ref()?[pat_id]
+                    &self.infer()?[pat_id]
                 } else {
                     return None;
                 };
@@ -1072,10 +1214,10 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         literal: &ast::RecordExpr,
     ) -> Option<Vec<(Field, Type)>> {
-        let body = self.body()?;
-        let infer = self.infer.as_ref()?;
+        let body = self.store()?;
+        let infer = self.infer()?;
 
-        let expr_id = self.expr_id(db, &literal.clone().into())?;
+        let expr_id = self.expr_id(literal.clone().into())?;
         let substs = infer[expr_id].as_adt()?.1;
 
         let (variant, missing_fields, _exhaustive) = match expr_id {
@@ -1095,8 +1237,8 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         pattern: &ast::RecordPat,
     ) -> Option<Vec<(Field, Type)>> {
-        let body = self.body()?;
-        let infer = self.infer.as_ref()?;
+        let body = self.store()?;
+        let infer = self.infer()?;
 
         let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
         let substs = infer.type_of_pat[pat_id].as_adt()?.1;
@@ -1130,24 +1272,17 @@ impl SourceAnalyzer {
         &self,
         db: &dyn HirDatabase,
         macro_call: InFile<&ast::MacroCall>,
-    ) -> Option<MacroFileId> {
-        let krate = self.resolver.krate();
-        // FIXME: This causes us to parse, generally this is the wrong approach for resolving a
-        // macro call to a macro call id!
-        let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
-            self.resolver.resolve_path_as_macro_def(db.upcast(), path, Some(MacroSubNs::Bang))
-        })?;
-        // why the 64?
-        Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
+    ) -> Option<MacroCallId> {
+        self.store_sm().and_then(|bs| bs.expansion(macro_call)).or_else(|| {
+            self.resolver.item_scope().macro_invoc(
+                macro_call.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
+            )
+        })
     }
 
-    pub(crate) fn resolve_variant(
-        &self,
-        db: &dyn HirDatabase,
-        record_lit: ast::RecordExpr,
-    ) -> Option<VariantId> {
-        let infer = self.infer.as_ref()?;
-        let expr_id = self.expr_id(db, &record_lit.into())?;
+    pub(crate) fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
+        let infer = self.infer()?;
+        let expr_id = self.expr_id(record_lit.into())?;
         infer.variant_resolution_for_expr_or_pat(expr_id)
     }
 
@@ -1156,11 +1291,11 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         macro_expr: InFile<&ast::MacroExpr>,
     ) -> bool {
-        if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) {
+        if let Some((def, body, sm, Some(infer))) = self.body_() {
             if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) {
                 let mut is_unsafe = false;
                 let mut walk_expr = |expr_id| {
-                    unsafe_operations(db, infer, *def, body, expr_id, &mut |inside_unsafe_block| {
+                    unsafe_operations(db, infer, def, body, expr_id, &mut |inside_unsafe_block| {
                         is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No
                     })
                 };
@@ -1182,7 +1317,7 @@ impl SourceAnalyzer {
         format_args: InFile<&ast::FormatArgsExpr>,
         offset: TextSize,
     ) -> Option<(TextRange, Option<PathResolution>)> {
-        let (hygiene, implicits) = self.body_source_map()?.implicit_format_args(format_args)?;
+        let (hygiene, implicits) = self.store_sm()?.implicit_format_args(format_args)?;
         implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| {
             (
                 *range,
@@ -1206,9 +1341,9 @@ impl SourceAnalyzer {
         line: usize,
         offset: TextSize,
     ) -> Option<(DefWithBodyId, (ExprId, TextRange, usize))> {
-        let (def, _, body_source_map) = self.def.as_ref()?;
+        let (def, _, body_source_map, _) = self.body_()?;
         let (expr, args) = body_source_map.asm_template_args(asm)?;
-        Some(*def).zip(
+        Some(def).zip(
             args.get(line)?
                 .iter()
                 .find(|(range, _)| range.contains_inclusive(offset))
@@ -1221,7 +1356,7 @@ impl SourceAnalyzer {
         db: &'a dyn HirDatabase,
         format_args: InFile<&ast::FormatArgsExpr>,
     ) -> Option<impl Iterator<Item = (TextRange, Option<PathResolution>)> + 'a> {
-        let (hygiene, names) = self.body_source_map()?.implicit_format_args(format_args)?;
+        let (hygiene, names) = self.store_sm()?.implicit_format_args(format_args)?;
         Some(names.iter().map(move |(range, name)| {
             (
                 *range,
@@ -1243,8 +1378,8 @@ impl SourceAnalyzer {
         &self,
         asm: InFile<&ast::AsmExpr>,
     ) -> Option<(DefWithBodyId, (ExprId, &[Vec<(TextRange, usize)>]))> {
-        let (def, _, body_source_map) = self.def.as_ref()?;
-        Some(*def).zip(body_source_map.asm_template_args(asm))
+        let (def, _, body_source_map, _) = self.body_()?;
+        Some(def).zip(body_source_map.asm_template_args(asm))
     }
 
     fn resolve_impl_method_or_trait_def(
@@ -1291,12 +1426,12 @@ impl SourceAnalyzer {
         method_name: &Name,
     ) -> Option<(TraitId, FunctionId)> {
         let trait_id = db.lang_item(self.resolver.krate(), lang_trait)?.as_trait()?;
-        let fn_id = db.trait_data(trait_id).method_by_name(method_name)?;
+        let fn_id = db.trait_items(trait_id).method_by_name(method_name)?;
         Some((trait_id, fn_id))
     }
 
-    fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> {
-        self.infer.as_ref()?.type_of_expr_or_pat(self.expr_id(db, expr)?)
+    fn ty_of_expr(&self, expr: ast::Expr) -> Option<&Ty> {
+        self.infer()?.type_of_expr_or_pat(self.expr_id(expr.clone())?)
     }
 }
 
@@ -1306,7 +1441,7 @@ fn scope_for(
     source_map: &BodySourceMap,
     node: InFile<&SyntaxNode>,
 ) -> Option<ScopeId> {
-    node.ancestors_with_macros(db.upcast())
+    node.ancestors_with_macros(db)
         .take_while(|it| {
             !ast::Item::can_cast(it.kind())
                 || ast::MacroCall::can_cast(it.kind())
@@ -1334,12 +1469,11 @@ fn scope_for_offset(
             }
 
             // FIXME handle attribute expansion
-            let source =
-                iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| {
-                    Some(it.file_id.macro_file()?.call_node(db.upcast()))
-                })
-                .find(|it| it.file_id == from_file)
-                .filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?;
+            let source = iter::successors(file_id.macro_file().map(|it| it.call_node(db)), |it| {
+                Some(it.file_id.macro_file()?.call_node(db))
+            })
+            .find(|it| it.file_id == from_file)
+            .filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?;
             Some((source.text_range(), scope))
         })
         .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
@@ -1369,7 +1503,7 @@ fn adjust(
             if source.file_id != from_file {
                 return None;
             }
-            let root = source.file_syntax(db.upcast());
+            let root = source.file_syntax(db);
             let node = source.value.to_node(&root);
             Some((node.syntax().text_range(), scope))
         })
@@ -1396,9 +1530,9 @@ pub(crate) fn resolve_hir_path(
     resolver: &Resolver,
     path: &Path,
     hygiene: HygieneId,
-    types_map: &TypesMap,
+    store: Option<&ExpressionStore>,
 ) -> Option<PathResolution> {
-    resolve_hir_path_(db, resolver, path, false, hygiene, types_map)
+    resolve_hir_path_(db, resolver, path, false, hygiene, store)
 }
 
 #[inline]
@@ -1408,7 +1542,7 @@ pub(crate) fn resolve_hir_path_as_attr_macro(
     path: &Path,
 ) -> Option<Macro> {
     resolver
-        .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Attr))
+        .resolve_path_as_macro(db, path.mod_path()?, Some(MacroSubNs::Attr))
         .map(|(it, _)| it)
         .map(Into::into)
 }
@@ -1419,23 +1553,18 @@ fn resolve_hir_path_(
     path: &Path,
     prefer_value_ns: bool,
     hygiene: HygieneId,
-    types_map: &TypesMap,
+    store: Option<&ExpressionStore>,
 ) -> Option<PathResolution> {
     let types = || {
         let (ty, unresolved) = match path.type_anchor() {
-            Some(type_ref) => {
-                let (_, res) = TyLoweringContext::new_maybe_unowned(
-                    db,
-                    resolver,
-                    types_map,
-                    None,
-                    resolver.type_owner(),
-                )
-                .lower_ty_ext(type_ref);
+            Some(type_ref) => resolver.generic_def().and_then(|def| {
+                let (_, res) =
+                    TyLoweringContext::new(db, resolver, store?, def, LifetimeElisionKind::Infer)
+                        .lower_ty_ext(type_ref);
                 res.map(|ty_ns| (ty_ns, path.segments().first()))
-            }
+            }),
             None => {
-                let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
+                let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db, path)?;
                 match remaining_idx {
                     Some(remaining_idx) => {
                         if remaining_idx + 1 == path.segments().len() {
@@ -1453,7 +1582,7 @@ fn resolve_hir_path_(
         // within the trait's associated types.
         if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
             if let Some(type_alias_id) =
-                db.trait_data(trait_id).associated_type_by_name(unresolved.name)
+                db.trait_items(trait_id).associated_type_by_name(unresolved.name)
             {
                 return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
             }
@@ -1470,6 +1599,7 @@ fn resolve_hir_path_(
             TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
             TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
             TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()),
+            TypeNs::ModuleId(it) => PathResolution::Def(ModuleDef::Module(it.into())),
         };
         match unresolved {
             Some(unresolved) => resolver
@@ -1494,14 +1624,14 @@ fn resolve_hir_path_(
 
     let items = || {
         resolver
-            .resolve_module_path_in_items(db.upcast(), path.mod_path()?)
+            .resolve_module_path_in_items(db, path.mod_path()?)
             .take_types()
             .map(|it| PathResolution::Def(it.into()))
     };
 
     let macros = || {
         resolver
-            .resolve_path_as_macro(db.upcast(), path.mod_path()?, None)
+            .resolve_path_as_macro(db, path.mod_path()?, None)
             .map(|(def, _)| PathResolution::Def(ModuleDef::Macro(def.into())))
     };
 
@@ -1517,7 +1647,7 @@ fn resolve_hir_value_path(
     path: &Path,
     hygiene: HygieneId,
 ) -> Option<PathResolution> {
-    resolver.resolve_path_in_value_ns_fully(db.upcast(), path, hygiene).and_then(|val| {
+    resolver.resolve_path_in_value_ns_fully(db, path, hygiene).and_then(|val| {
         let res = match val {
             ValueNs::LocalBinding(binding_id) => {
                 let var = Local { parent: body_owner?, binding_id };
@@ -1552,23 +1682,18 @@ fn resolve_hir_path_qualifier(
     db: &dyn HirDatabase,
     resolver: &Resolver,
     path: &Path,
-    types_map: &TypesMap,
+    store: &ExpressionStore,
 ) -> Option<PathResolution> {
     (|| {
         let (ty, unresolved) = match path.type_anchor() {
-            Some(type_ref) => {
-                let (_, res) = TyLoweringContext::new_maybe_unowned(
-                    db,
-                    resolver,
-                    types_map,
-                    None,
-                    resolver.type_owner(),
-                )
-                .lower_ty_ext(type_ref);
+            Some(type_ref) => resolver.generic_def().and_then(|def| {
+                let (_, res) =
+                    TyLoweringContext::new(db, resolver, store, def, LifetimeElisionKind::Infer)
+                        .lower_ty_ext(type_ref);
                 res.map(|ty_ns| (ty_ns, path.segments().first()))
-            }
+            }),
             None => {
-                let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
+                let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db, path)?;
                 match remaining_idx {
                     Some(remaining_idx) => {
                         if remaining_idx + 1 == path.segments().len() {
@@ -1586,7 +1711,7 @@ fn resolve_hir_path_qualifier(
         // within the trait's associated types.
         if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
             if let Some(type_alias_id) =
-                db.trait_data(trait_id).associated_type_by_name(unresolved.name)
+                db.trait_items(trait_id).associated_type_by_name(unresolved.name)
             {
                 return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
             }
@@ -1603,6 +1728,7 @@ fn resolve_hir_path_qualifier(
             TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
             TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
             TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()),
+            TypeNs::ModuleId(it) => PathResolution::Def(ModuleDef::Module(it.into())),
         };
         match unresolved {
             Some(unresolved) => resolver
@@ -1623,7 +1749,7 @@ fn resolve_hir_path_qualifier(
     })()
     .or_else(|| {
         resolver
-            .resolve_module_path_in_items(db.upcast(), path.mod_path()?)
+            .resolve_module_path_in_items(db, path.mod_path()?)
             .take_types()
             .map(|it| PathResolution::Def(it.into()))
     })
@@ -1635,8 +1761,7 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
     };
     let span_map = db.expansion_span_map(macro_file);
     let ctx = span_map.span_at(name.value.text_range().start()).ctx;
-    let ctx = db.lookup_intern_syntax_context(ctx);
-    HygieneId::new(ctx.opaque_and_semitransparent)
+    HygieneId::new(ctx.opaque_and_semitransparent(db))
 }
 
 fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index 81eb6a70ad73e..41064d047a5a9 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -2,22 +2,22 @@
 
 use either::Either;
 use hir_def::{
+    AdtId, AssocItemId, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
+    ModuleDefId, ModuleId, TraitId,
     db::DefDatabase,
     item_scope::{ImportId, ImportOrExternCrate, ImportOrGlob},
     per_ns::Item,
     src::{HasChildSource, HasSource},
     visibility::{Visibility, VisibilityExplicitness},
-    AdtId, AssocItemId, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
-    ModuleDefId, ModuleId, TraitId,
 };
-use hir_expand::{name::Name, HirFileId};
+use hir_expand::{HirFileId, name::Name};
 use hir_ty::{
     db::HirDatabase,
-    display::{hir_display_with_types_map, DisplayTarget, HirDisplay},
+    display::{DisplayTarget, HirDisplay, hir_display_with_store},
 };
 use intern::Symbol;
 use rustc_hash::FxHashMap;
-use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr};
+use syntax::{AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName};
 
 use crate::{Module, ModuleDef, Semantics};
 
@@ -34,6 +34,7 @@ pub struct FileSymbol {
     /// Whether this symbol is a doc alias for the original symbol.
     pub is_alias: bool,
     pub is_assoc: bool,
+    pub do_not_complete: Complete,
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -79,7 +80,7 @@ impl<'a> SymbolCollector<'a> {
             current_container_name: None,
             display_target: DisplayTarget::from_crate(
                 db,
-                *db.crate_graph().crates_in_topological_order().last().unwrap(),
+                *db.all_crates().last().expect("no crate graph present"),
             ),
         }
     }
@@ -111,7 +112,7 @@ impl<'a> SymbolCollector<'a> {
     fn do_work(&mut self, work: SymbolCollectorWork) {
         let _p = tracing::info_span!("SymbolCollector::do_work", ?work).entered();
         tracing::info!(?work, "SymbolCollector::do_work");
-        self.db.unwind_if_cancelled();
+        self.db.unwind_if_revision_cancelled();
 
         let parent_name = work.parent.map(|name| name.as_str().to_smolstr());
         self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id));
@@ -122,35 +123,43 @@ impl<'a> SymbolCollector<'a> {
             match def {
                 ModuleDefId::ModuleId(id) => this.push_module(id, name),
                 ModuleDefId::FunctionId(id) => {
-                    this.push_decl(id, name, false);
+                    this.push_decl(id, name, false, None);
                     this.collect_from_body(id, Some(name.clone()));
                 }
-                ModuleDefId::AdtId(AdtId::StructId(id)) => this.push_decl(id, name, false),
-                ModuleDefId::AdtId(AdtId::EnumId(id)) => this.push_decl(id, name, false),
-                ModuleDefId::AdtId(AdtId::UnionId(id)) => this.push_decl(id, name, false),
+                ModuleDefId::AdtId(AdtId::StructId(id)) => {
+                    this.push_decl(id, name, false, None);
+                }
+                ModuleDefId::AdtId(AdtId::EnumId(id)) => {
+                    this.push_decl(id, name, false, None);
+                }
+                ModuleDefId::AdtId(AdtId::UnionId(id)) => {
+                    this.push_decl(id, name, false, None);
+                }
                 ModuleDefId::ConstId(id) => {
-                    this.push_decl(id, name, false);
+                    this.push_decl(id, name, false, None);
                     this.collect_from_body(id, Some(name.clone()));
                 }
                 ModuleDefId::StaticId(id) => {
-                    this.push_decl(id, name, false);
+                    this.push_decl(id, name, false, None);
                     this.collect_from_body(id, Some(name.clone()));
                 }
                 ModuleDefId::TraitId(id) => {
-                    this.push_decl(id, name, false);
-                    this.collect_from_trait(id);
+                    let trait_do_not_complete = this.push_decl(id, name, false, None);
+                    this.collect_from_trait(id, trait_do_not_complete);
                 }
                 ModuleDefId::TraitAliasId(id) => {
-                    this.push_decl(id, name, false);
+                    this.push_decl(id, name, false, None);
                 }
                 ModuleDefId::TypeAliasId(id) => {
-                    this.push_decl(id, name, false);
+                    this.push_decl(id, name, false, None);
+                }
+                ModuleDefId::MacroId(id) => {
+                    match id {
+                        MacroId::Macro2Id(id) => this.push_decl(id, name, false, None),
+                        MacroId::MacroRulesId(id) => this.push_decl(id, name, false, None),
+                        MacroId::ProcMacroId(id) => this.push_decl(id, name, false, None),
+                    };
                 }
-                ModuleDefId::MacroId(id) => match id {
-                    MacroId::Macro2Id(id) => this.push_decl(id, name, false),
-                    MacroId::MacroRulesId(id) => this.push_decl(id, name, false),
-                    MacroId::ProcMacroId(id) => this.push_decl(id, name, false),
-                },
                 // Don't index these.
                 ModuleDefId::BuiltinType(_) => {}
                 ModuleDefId::EnumVariantId(_) => {}
@@ -169,7 +178,7 @@ impl<'a> SymbolCollector<'a> {
         let mut push_import = |this: &mut Self, i: ImportId, name: &Name, def: ModuleDefId, vis| {
             let source = import_child_source_cache
                 .entry(i.use_)
-                .or_insert_with(|| i.use_.child_source(this.db.upcast()));
+                .or_insert_with(|| i.use_.child_source(this.db));
             let Some(use_tree_src) = source.value.get(i.idx) else { return };
             let rename = use_tree_src.rename().and_then(|rename| rename.name());
             let name_syntax = match rename {
@@ -194,13 +203,14 @@ impl<'a> SymbolCollector<'a> {
                 loc: dec_loc,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Complete::Yes,
             });
         };
 
         let push_extern_crate =
             |this: &mut Self, i: ExternCrateId, name: &Name, def: ModuleDefId, vis| {
-                let loc = i.lookup(this.db.upcast());
-                let source = loc.source(this.db.upcast());
+                let loc = i.lookup(this.db);
+                let source = loc.source(this.db);
                 let rename = source.value.rename().and_then(|rename| rename.name());
 
                 let name_syntax = match rename {
@@ -223,10 +233,11 @@ impl<'a> SymbolCollector<'a> {
                     loc: dec_loc,
                     is_alias: false,
                     is_assoc: false,
+                    do_not_complete: Complete::Yes,
                 });
             };
 
-        let def_map = module_id.def_map(self.db.upcast());
+        let def_map = module_id.def_map(self.db);
         let scope = &def_map[module_id.local_id].scope;
 
         for impl_id in scope.impls() {
@@ -279,12 +290,12 @@ impl<'a> SymbolCollector<'a> {
 
         for (name, id) in scope.legacy_macros() {
             for &id in id {
-                if id.module(self.db.upcast()) == module_id {
+                if id.module(self.db) == module_id {
                     match id {
-                        MacroId::Macro2Id(id) => self.push_decl(id, name, false),
-                        MacroId::MacroRulesId(id) => self.push_decl(id, name, false),
-                        MacroId::ProcMacroId(id) => self.push_decl(id, name, false),
-                    }
+                        MacroId::Macro2Id(id) => self.push_decl(id, name, false, None),
+                        MacroId::MacroRulesId(id) => self.push_decl(id, name, false, None),
+                        MacroId::ProcMacroId(id) => self.push_decl(id, name, false, None),
+                    };
                 }
             }
         }
@@ -295,7 +306,7 @@ impl<'a> SymbolCollector<'a> {
         let body = self.db.body(body_id);
 
         // Descend into the blocks and enqueue collection of all modules within.
-        for (_, def_map) in body.blocks(self.db.upcast()) {
+        for (_, def_map) in body.blocks(self.db) {
             for (id, _) in def_map.modules() {
                 self.work.push(SymbolCollectorWork {
                     module_id: def_map.module_id(id),
@@ -306,24 +317,24 @@ impl<'a> SymbolCollector<'a> {
     }
 
     fn collect_from_impl(&mut self, impl_id: ImplId) {
-        let impl_data = self.db.impl_data(impl_id);
+        let impl_data = self.db.impl_signature(impl_id);
         let impl_name = Some(
-            hir_display_with_types_map(impl_data.self_ty, &impl_data.types_map)
+            hir_display_with_store(impl_data.self_ty, &impl_data.store)
                 .display(self.db, self.display_target)
                 .to_smolstr(),
         );
         self.with_container_name(impl_name, |s| {
-            for &(ref name, assoc_item_id) in &impl_data.items {
-                s.push_assoc_item(assoc_item_id, name)
+            for &(ref name, assoc_item_id) in &self.db.impl_items(impl_id).items {
+                s.push_assoc_item(assoc_item_id, name, None)
             }
         })
     }
 
-    fn collect_from_trait(&mut self, trait_id: TraitId) {
-        let trait_data = self.db.trait_data(trait_id);
+    fn collect_from_trait(&mut self, trait_id: TraitId, trait_do_not_complete: Complete) {
+        let trait_data = self.db.trait_signature(trait_id);
         self.with_container_name(Some(trait_data.name.as_str().into()), |s| {
-            for &(ref name, assoc_item_id) in &trait_data.items {
-                s.push_assoc_item(assoc_item_id, name);
+            for &(ref name, assoc_item_id) in &self.db.trait_items(trait_id).items {
+                s.push_assoc_item(assoc_item_id, name, Some(trait_do_not_complete));
             }
         });
     }
@@ -338,23 +349,34 @@ impl<'a> SymbolCollector<'a> {
         }
     }
 
-    fn push_assoc_item(&mut self, assoc_item_id: AssocItemId, name: &Name) {
+    fn push_assoc_item(
+        &mut self,
+        assoc_item_id: AssocItemId,
+        name: &Name,
+        trait_do_not_complete: Option<Complete>,
+    ) {
         match assoc_item_id {
-            AssocItemId::FunctionId(id) => self.push_decl(id, name, true),
-            AssocItemId::ConstId(id) => self.push_decl(id, name, true),
-            AssocItemId::TypeAliasId(id) => self.push_decl(id, name, true),
-        }
+            AssocItemId::FunctionId(id) => self.push_decl(id, name, true, trait_do_not_complete),
+            AssocItemId::ConstId(id) => self.push_decl(id, name, true, trait_do_not_complete),
+            AssocItemId::TypeAliasId(id) => self.push_decl(id, name, true, trait_do_not_complete),
+        };
     }
 
-    fn push_decl<'db, L>(&mut self, id: L, name: &Name, is_assoc: bool)
+    fn push_decl<L>(
+        &mut self,
+        id: L,
+        name: &Name,
+        is_assoc: bool,
+        trait_do_not_complete: Option<Complete>,
+    ) -> Complete
     where
-        L: Lookup<Database<'db> = dyn DefDatabase + 'db> + Into<ModuleDefId>,
+        L: Lookup<Database = dyn DefDatabase> + Into<ModuleDefId>,
         <L as Lookup>::Data: HasSource,
         <<L as Lookup>::Data as HasSource>::Value: HasName,
     {
-        let loc = id.lookup(self.db.upcast());
-        let source = loc.source(self.db.upcast());
-        let Some(name_node) = source.value.name() else { return };
+        let loc = id.lookup(self.db);
+        let source = loc.source(self.db);
+        let Some(name_node) = source.value.name() else { return Complete::Yes };
         let def = ModuleDef::from(id.into());
         let dec_loc = DeclarationLocation {
             hir_file_id: source.file_id,
@@ -362,7 +384,14 @@ impl<'a> SymbolCollector<'a> {
             name_ptr: AstPtr::new(&name_node).wrap_left(),
         };
 
+        let mut do_not_complete = Complete::Yes;
+
         if let Some(attrs) = def.attrs(self.db) {
+            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
+            if let Some(trait_do_not_complete) = trait_do_not_complete {
+                do_not_complete = Complete::for_trait_item(trait_do_not_complete, do_not_complete);
+            }
+
             for alias in attrs.doc_aliases() {
                 self.symbols.insert(FileSymbol {
                     name: alias.clone(),
@@ -371,6 +400,7 @@ impl<'a> SymbolCollector<'a> {
                     container_name: self.current_container_name.clone(),
                     is_alias: true,
                     is_assoc,
+                    do_not_complete,
                 });
             }
         }
@@ -382,14 +412,17 @@ impl<'a> SymbolCollector<'a> {
             loc: dec_loc,
             is_alias: false,
             is_assoc,
+            do_not_complete,
         });
+
+        do_not_complete
     }
 
     fn push_module(&mut self, module_id: ModuleId, name: &Name) {
-        let def_map = module_id.def_map(self.db.upcast());
+        let def_map = module_id.def_map(self.db);
         let module_data = &def_map[module_id.local_id];
         let Some(declaration) = module_data.origin.declaration() else { return };
-        let module = declaration.to_node(self.db.upcast());
+        let module = declaration.to_node(self.db);
         let Some(name_node) = module.name() else { return };
         let dec_loc = DeclarationLocation {
             hir_file_id: declaration.file_id,
@@ -399,7 +432,10 @@ impl<'a> SymbolCollector<'a> {
 
         let def = ModuleDef::Module(module_id.into());
 
+        let mut do_not_complete = Complete::Yes;
         if let Some(attrs) = def.attrs(self.db) {
+            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
+
             for alias in attrs.doc_aliases() {
                 self.symbols.insert(FileSymbol {
                     name: alias.clone(),
@@ -408,6 +444,7 @@ impl<'a> SymbolCollector<'a> {
                     container_name: self.current_container_name.clone(),
                     is_alias: true,
                     is_assoc: false,
+                    do_not_complete,
                 });
             }
         }
@@ -419,6 +456,7 @@ impl<'a> SymbolCollector<'a> {
             loc: dec_loc,
             is_alias: false,
             is_assoc: false,
+            do_not_complete,
         });
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs
index 0d672dc332f39..78ee3b5aa683a 100644
--- a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs
@@ -22,7 +22,7 @@ fn mod_item_path(
 ) -> Option<ModPath> {
     let db = sema_scope.db;
     let m = sema_scope.module();
-    m.find_path(db.upcast(), *def, cfg)
+    m.find_path(db, *def, cfg)
 }
 
 /// Helper function to get path to `ModuleDef` as string
@@ -33,7 +33,7 @@ fn mod_item_path_str(
     edition: Edition,
 ) -> Result<String, DisplaySourceCodeError> {
     let path = mod_item_path(sema_scope, def, cfg);
-    path.map(|it| it.display(sema_scope.db.upcast(), edition).to_string())
+    path.map(|it| it.display(sema_scope.db, edition).to_string())
         .ok_or(DisplaySourceCodeError::PathNotFound)
 }
 
@@ -111,15 +111,15 @@ impl Expr {
                         container_name(container, sema_scope, cfg, edition, display_target)?;
                     let const_name = it
                         .name(db)
-                        .map(|c| c.display(db.upcast(), edition).to_string())
+                        .map(|c| c.display(db, edition).to_string())
                         .unwrap_or(String::new());
                     Ok(format!("{container_name}::{const_name}"))
                 }
                 None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
             },
             Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
-            Expr::Local(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()),
-            Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()),
+            Expr::Local(it) => Ok(it.name(db).display(db, edition).to_string()),
+            Expr::ConstParam(it) => Ok(it.name(db).display(db, edition).to_string()),
             Expr::FamousType { value, .. } => Ok(value.to_string()),
             Expr::Function { func, params, .. } => {
                 let args = params
@@ -133,7 +133,7 @@ impl Expr {
                     Some(container) => {
                         let container_name =
                             container_name(container, sema_scope, cfg, edition, display_target)?;
-                        let fn_name = func.name(db).display(db.upcast(), edition).to_string();
+                        let fn_name = func.name(db).display(db, edition).to_string();
                         Ok(format!("{container_name}::{fn_name}({args})"))
                     }
                     None => {
@@ -147,7 +147,7 @@ impl Expr {
                     return Ok(many_formatter(&target.ty(db)));
                 }
 
-                let func_name = func.name(db).display(db.upcast(), edition).to_string();
+                let func_name = func.name(db).display(db, edition).to_string();
                 let self_param = func.self_param(db).unwrap();
                 let target_str =
                     target.gen_source_code(sema_scope, many_formatter, cfg, display_target)?;
@@ -199,7 +199,7 @@ impl Expr {
                             .map(|(a, f)| {
                                 let tmp = format!(
                                     "{}: {}",
-                                    f.name(db).display(db.upcast(), edition),
+                                    f.name(db).display(db, edition),
                                     a.gen_source_code(
                                         sema_scope,
                                         many_formatter,
@@ -241,7 +241,7 @@ impl Expr {
                             .map(|(a, f)| {
                                 let tmp = format!(
                                     "{}: {}",
-                                    f.name(db).display(db.upcast(), edition),
+                                    f.name(db).display(db, edition),
                                     a.gen_source_code(
                                         sema_scope,
                                         many_formatter,
@@ -279,7 +279,7 @@ impl Expr {
 
                 let strukt =
                     expr.gen_source_code(sema_scope, many_formatter, cfg, display_target)?;
-                let field = field.name(db).display(db.upcast(), edition).to_string();
+                let field = field.name(db).display(db, edition).to_string();
                 Ok(format!("{strukt}.{field}"))
             }
             Expr::Reference(expr) => {
@@ -387,7 +387,7 @@ fn container_name(
             let self_ty = imp.self_ty(sema_scope.db);
             // Should it be guaranteed that `mod_item_path` always exists?
             match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) {
-                Some(path) => path.display(sema_scope.db.upcast(), edition).to_string(),
+                Some(path) => path.display(sema_scope.db, edition).to_string(),
                 None => self_ty.display(sema_scope.db, display_target).to_string(),
             }
         }
diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs
index 847304d503a84..bcff44fcd016e 100644
--- a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs
@@ -10,9 +10,9 @@
 
 use std::iter;
 
+use hir_ty::TyBuilder;
 use hir_ty::db::HirDatabase;
 use hir_ty::mir::BorrowKind;
-use hir_ty::TyBuilder;
 use itertools::Itertools;
 use rustc_hash::FxHashSet;
 use span::Edition;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
index 3768c2257cadd..53af980c194c5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
 
 itertools.workspace = true
 either.workspace = true
@@ -26,7 +26,7 @@ ide-db.workspace = true
 hir.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 
 # local deps
 test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
index 05105c8c92c5e..2de0013bb126d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
@@ -5,7 +5,7 @@
 //! assists if we are allowed to.
 
 use hir::ImportPathConfig;
-use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
+use ide_db::{SnippetCap, imports::insert_use::InsertUseConfig};
 
 use crate::AssistKind;
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
index b1189f0d0b06e..9eb9452a2b836 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
@@ -1,17 +1,16 @@
 //! See [`AssistContext`].
 
-use hir::{FileRange, Semantics};
-use ide_db::EditionedFileId;
-use ide_db::{label::Label, FileId, RootDatabase};
+use hir::{EditionedFileId, FileRange, Semantics};
+use ide_db::{FileId, RootDatabase, label::Label};
 use syntax::Edition;
 use syntax::{
-    algo::{self, find_node_at_offset, find_node_at_range},
     AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange,
     TextSize, TokenAtOffset,
+    algo::{self, find_node_at_offset, find_node_at_range},
 };
 
 use crate::{
-    assist_config::AssistConfig, Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel,
+    Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel, assist_config::AssistConfig,
 };
 
 pub(crate) use ide_db::source_change::{SourceChangeBuilder, TreeMutator};
@@ -105,12 +104,16 @@ impl<'a> AssistContext<'a> {
         self.frange.range.start()
     }
 
+    pub(crate) fn vfs_file_id(&self) -> FileId {
+        self.frange.file_id.file_id(self.db())
+    }
+
     pub(crate) fn file_id(&self) -> EditionedFileId {
         self.frange.file_id
     }
 
     pub(crate) fn edition(&self) -> Edition {
-        self.frange.file_id.edition()
+        self.frange.file_id.edition(self.db())
     }
 
     pub(crate) fn has_empty_selection(&self) -> bool {
@@ -165,7 +168,7 @@ impl Assists {
     pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists {
         Assists {
             resolve,
-            file: ctx.frange.file_id.file_id(),
+            file: ctx.frange.file_id.file_id(ctx.db()),
             buf: Vec::new(),
             allowed: ctx.config.allowed.clone(),
         }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
index 42f615e71daf6..745ae67f30959 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
@@ -1,13 +1,13 @@
 use syntax::{
-    ast::{self, edit_in_place::Indent, syntax_factory::SyntaxFactory},
     AstNode,
+    ast::{self, edit_in_place::Indent, syntax_factory::SyntaxFactory},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: add_braces
 //
-// Adds braces to lambda and match arm expressions.
+// Adds braces to closure bodies and match arm expressions.
 //
 // ```
 // fn foo(n: i32) -> i32 {
@@ -32,14 +32,14 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
     let (expr_type, expr) = get_replacement_node(ctx)?;
 
     acc.add(
-        AssistId("add_braces", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("add_braces"),
         match expr_type {
-            ParentType::ClosureExpr => "Add braces to closure body",
-            ParentType::MatchArmExpr => "Add braces to arm expression",
+            ParentType::ClosureExpr => "Add braces to this closure body",
+            ParentType::MatchArmExpr => "Add braces to this match arm expression",
         },
         expr.syntax().text_range(),
         |builder| {
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
             let mut editor = builder.make_editor(expr.syntax());
 
             let block_expr = make.block_expr(None, Some(expr.clone()));
@@ -48,7 +48,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
             editor.replace(expr.syntax(), block_expr.syntax());
 
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs
index 1a5de9cb071bb..10b0879e6364d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs
@@ -1,10 +1,6 @@
 use hir::Semantics;
-use ide_db::{
-    assists::{AssistId, AssistKind},
-    source_change::SourceChangeBuilder,
-    RootDatabase,
-};
-use syntax::{ast, AstNode};
+use ide_db::{RootDatabase, assists::AssistId, source_change::SourceChangeBuilder};
+use syntax::{AstNode, ast};
 
 use crate::{AssistContext, Assists};
 
@@ -53,7 +49,7 @@ pub(crate) fn add_explicit_enum_discriminant(
     }
 
     acc.add(
-        AssistId("add_explicit_enum_discriminant", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("add_explicit_enum_discriminant"),
         "Add explicit enum discriminants",
         enum_node.syntax().text_range(),
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
index 8bc285614e039..35a65cc309111 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs
@@ -2,7 +2,7 @@ use hir::HirDisplay;
 use ide_db::syntax_helpers::node_ext::walk_ty;
 use syntax::ast::{self, AstNode, LetStmt, Param};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: add_explicit_type
 //
@@ -71,7 +71,7 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
 
     let inferred_type = ty.display_source_code(ctx.db(), module.into(), false).ok()?;
     acc.add(
-        AssistId("add_explicit_type", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("add_explicit_type"),
         format!("Insert explicit type `{inferred_type}`"),
         pat_range,
         |builder| match ascribed_ty {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs
index 001f1e8bb1585..d2b903447133f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs
@@ -1,10 +1,10 @@
 use ide_db::syntax_helpers::node_ext::for_each_break_and_continue_expr;
 use syntax::{
-    ast::{self, AstNode, HasLoopBody},
     T,
+    ast::{self, AstNode, HasLoopBody},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: add_label_to_loop
 //
@@ -35,7 +35,7 @@ pub(crate) fn add_label_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
     }
 
     acc.add(
-        AssistId("add_label_to_loop", AssistKind::Generate),
+        AssistId::generate("add_label_to_loop"),
         "Add Label",
         loop_expr.syntax().text_range(),
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs
index 43c0a72fa4774..dcdc7ea9cdced 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs
@@ -1,6 +1,6 @@
 use syntax::ast::{self, AstNode, HasGenericParams, HasName};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: add_lifetime_to_type
 //
@@ -37,31 +37,26 @@ pub(crate) fn add_lifetime_to_type(acc: &mut Assists, ctx: &AssistContext<'_>) -
     let ref_types = fetch_borrowed_types(&node)?;
     let target = node.syntax().text_range();
 
-    acc.add(
-        AssistId("add_lifetime_to_type", AssistKind::Generate),
-        "Add lifetime",
-        target,
-        |builder| {
-            match node.generic_param_list() {
-                Some(gen_param) => {
-                    if let Some(left_angle) = gen_param.l_angle_token() {
-                        builder.insert(left_angle.text_range().end(), "'a, ");
-                    }
+    acc.add(AssistId::generate("add_lifetime_to_type"), "Add lifetime", target, |builder| {
+        match node.generic_param_list() {
+            Some(gen_param) => {
+                if let Some(left_angle) = gen_param.l_angle_token() {
+                    builder.insert(left_angle.text_range().end(), "'a, ");
                 }
-                None => {
-                    if let Some(name) = node.name() {
-                        builder.insert(name.syntax().text_range().end(), "<'a>");
-                    }
+            }
+            None => {
+                if let Some(name) = node.name() {
+                    builder.insert(name.syntax().text_range().end(), "<'a>");
                 }
             }
+        }
 
-            for ref_type in ref_types {
-                if let Some(amp_token) = ref_type.amp_token() {
-                    builder.insert(amp_token.text_range().end(), "'a ");
-                }
+        for ref_type in ref_types {
+            if let Some(amp_token) = ref_type.amp_token() {
+                builder.insert(amp_token.text_range().end(), "'a ");
             }
-        },
-    )
+        }
+    })
 }
 
 fn fetch_borrowed_types(node: &ast::Adt) -> Option<Vec<ast::RefType>> {
@@ -99,11 +94,7 @@ fn fetch_borrowed_types(node: &ast::Adt) -> Option<Vec<ast::RefType>> {
         }
     };
 
-    if ref_types.is_empty() {
-        None
-    } else {
-        Some(ref_types)
-    }
+    if ref_types.is_empty() { None } else { Some(ref_types) }
 }
 
 fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option<Vec<ast::RefType>> {
@@ -134,11 +125,7 @@ fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option<Vec<ast
             .collect(),
     };
 
-    if ref_types.is_empty() {
-        None
-    } else {
-        Some(ref_types)
-    }
+    if ref_types.is_empty() { None } else { Some(ref_types) }
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index 57df39d541e94..887ec5aeec9a2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -1,16 +1,16 @@
 use hir::HasSource;
 use syntax::{
-    ast::{self, make, AstNode},
     Edition,
+    ast::{self, AstNode, make},
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
     utils::{
-        add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, DefaultMethods,
-        IgnoreAssocItems,
+        DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl, filter_assoc_items,
+        gen_trait_fn_body,
     },
-    AssistId, AssistKind,
 };
 
 // Assist: add_impl_missing_members
@@ -146,7 +146,7 @@ fn add_missing_impl_members_inner(
     }
 
     let target = impl_def.syntax().text_range();
-    acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |edit| {
+    acc.add(AssistId::quick_fix(assist_id), label, target, |edit| {
         let new_impl_def = edit.make_mut(impl_def.clone());
         let first_new_item = add_trait_assoc_items_to_impl(
             &ctx.sema,
@@ -590,9 +590,9 @@ mod m {
 }
 
 impl m::Foo for () {
-    $0fn get_n(&self) -> usize { {40 + 2} }
+    $0fn get_n(&self) -> usize { N }
 
-    fn get_m(&self) -> usize { {m::VAL + 1} }
+    fn get_m(&self) -> usize { M }
 }"#,
         )
     }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 37f5f44dfa020..8c1c83e3f716a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -1,17 +1,17 @@
 use std::iter::{self, Peekable};
 
 use either::Either;
-use hir::{sym, Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics};
-use ide_db::syntax_helpers::suggest_name;
+use hir::{Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym};
 use ide_db::RootDatabase;
+use ide_db::syntax_helpers::suggest_name;
 use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
 use itertools::Itertools;
 use syntax::ast::edit::IndentLevel;
 use syntax::ast::edit_in_place::Indent;
 use syntax::ast::syntax_factory::SyntaxFactory;
-use syntax::ast::{self, make, AstNode, MatchArmList, MatchExpr, Pat};
+use syntax::ast::{self, AstNode, MatchArmList, MatchExpr, Pat, make};
 
-use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, utils};
 
 // Assist: add_missing_match_arms
 //
@@ -76,7 +76,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
 
     let cfg = ctx.config.import_path_config();
 
-    let make = SyntaxFactory::new();
+    let make = SyntaxFactory::with_mappings();
 
     let module = ctx.sema.scope(expr.syntax())?.module();
     let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
@@ -204,7 +204,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
     }
 
     acc.add(
-        AssistId("add_missing_match_arms", AssistKind::QuickFix),
+        AssistId::quick_fix("add_missing_match_arms"),
         "Fill match arms",
         ctx.sema.original_range(match_expr.syntax()).range,
         |builder| {
@@ -294,7 +294,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
             }
 
             editor.add_mappings(make.take());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
@@ -386,7 +386,7 @@ impl ExtendedEnum {
     fn is_non_exhaustive(self, db: &RootDatabase, krate: Crate) -> bool {
         match self {
             ExtendedEnum::Enum(e) => {
-                e.attrs(db).by_key(&sym::non_exhaustive).exists() && e.module(db).krate() != krate
+                e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate
             }
             _ => false,
         }
@@ -461,7 +461,7 @@ fn build_pat(
             let fields = var.fields(db);
             let pat: ast::Pat = match var.kind(db) {
                 hir::StructKind::Tuple => {
-                    let mut name_generator = suggest_name::NameGenerator::new();
+                    let mut name_generator = suggest_name::NameGenerator::default();
                     let pats = fields.into_iter().map(|f| {
                         let name = name_generator.for_type(&f.ty(db), db, edition);
                         match name {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs
index e5f0201bd527e..a7104ce068da8 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs
@@ -1,7 +1,7 @@
 use hir::HirDisplay;
-use syntax::{ast, match_ast, AstNode, SyntaxKind, SyntaxToken, TextRange, TextSize};
+use syntax::{AstNode, SyntaxKind, SyntaxToken, TextRange, TextSize, ast, match_ast};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: add_return_type
 //
@@ -25,7 +25,7 @@ pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
     let ty = ty.display_source_code(ctx.db(), module.into(), true).ok()?;
 
     acc.add(
-        AssistId("add_return_type", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("add_return_type"),
         match fn_type {
             FnType::Function => "Add this function's return type",
             FnType::Closure { .. } => "Add this closure's return type",
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
index 04d63f5bc8fe6..be13b04873c8e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
@@ -1,14 +1,14 @@
 use either::Either;
 use ide_db::defs::{Definition, NameRefClass};
 use syntax::{
-    ast::{self, make, syntax_factory::SyntaxFactory, HasArgList, HasGenericArgs},
-    syntax_editor::Position,
     AstNode,
+    ast::{self, HasArgList, HasGenericArgs, make, syntax_factory::SyntaxFactory},
+    syntax_editor::Position,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: add_turbo_fish
@@ -71,7 +71,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
     let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
         NameRefClass::Definition(def, _) => def,
         NameRefClass::FieldShorthand { .. } | NameRefClass::ExternCrateShorthand { .. } => {
-            return None
+            return None;
         }
     };
     let fun = match def {
@@ -89,7 +89,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
             let_stmt.pat()?;
 
             acc.add(
-                AssistId("add_type_ascription", AssistKind::RefactorRewrite),
+                AssistId::refactor_rewrite("add_type_ascription"),
                 "Add `: _` before assignment operator",
                 ident.text_range(),
                 |builder| {
@@ -119,7 +119,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
                         }
                     }
 
-                    builder.add_file_edits(ctx.file_id(), editor);
+                    builder.add_file_edits(ctx.vfs_file_id(), editor);
                 },
             )?
         } else {
@@ -135,13 +135,13 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
         .count();
 
     acc.add(
-        AssistId("add_turbo_fish", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("add_turbo_fish"),
         "Add `::<>`",
         ident.text_range(),
         |builder| {
             builder.trigger_parameter_hints();
 
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
             let mut editor = match &turbofish_target {
                 Either::Left(it) => builder.make_editor(it.syntax()),
                 Either::Right(it) => builder.make_editor(it.syntax()),
@@ -181,7 +181,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
             }
 
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
index 67bf8eed23df1..3b447d1f6d572 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -6,19 +6,18 @@ use ide_db::{
     syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
 };
 use syntax::{
+    SyntaxKind, T,
     ast::{
-        self,
-        prec::{precedence, ExprPrecedence},
-        syntax_factory::SyntaxFactory,
-        AstNode,
+        self, AstNode,
         Expr::BinExpr,
         HasArgList,
+        prec::{ExprPrecedence, precedence},
+        syntax_factory::SyntaxFactory,
     },
     syntax_editor::{Position, SyntaxEditor},
-    SyntaxKind, T,
 };
 
-use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, utils::invert_boolean_expression};
 
 // Assist: apply_demorgan
 //
@@ -65,7 +64,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
         _ => return None,
     };
 
-    let make = SyntaxFactory::new();
+    let make = SyntaxFactory::with_mappings();
 
     let demorganed = bin_expr.clone_subtree();
     let mut editor = SyntaxEditor::new(demorganed.syntax().clone());
@@ -108,11 +107,11 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
 
     acc.add_group(
         &GroupLabel("Apply De Morgan's law".to_owned()),
-        AssistId("apply_demorgan", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("apply_demorgan"),
         "Apply De Morgan's law",
         op_range,
         |builder| {
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
             let paren_expr = bin_expr.syntax().parent().and_then(ast::ParenExpr::cast);
             let neg_expr = paren_expr
                 .clone()
@@ -148,7 +147,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
             }
 
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
@@ -191,11 +190,11 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
     let label = format!("Apply De Morgan's law to `Iterator::{}`", name.text().as_str());
     acc.add_group(
         &GroupLabel("Apply De Morgan's law".to_owned()),
-        AssistId("apply_demorgan_iterator", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("apply_demorgan_iterator"),
         label,
         op_range,
         |builder| {
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
             let mut editor = builder.make_editor(method_call.syntax());
             // replace the method name
             let new_name = match name.text().as_str() {
@@ -231,7 +230,7 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
             }
 
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
index a92a000c3fbd3..d310e11011be1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
@@ -1,16 +1,18 @@
 use std::cmp::Reverse;
 
-use hir::{db::HirDatabase, Module};
+use either::Either;
+use hir::{Module, Type, db::HirDatabase};
 use ide_db::{
+    active_parameter::ActiveParameter,
     helpers::mod_path_to_ast,
     imports::{
         import_assets::{ImportAssets, ImportCandidate, LocatedImport},
-        insert_use::{insert_use, insert_use_as_alias, ImportScope},
+        insert_use::{ImportScope, insert_use, insert_use_as_alias},
     },
 };
-use syntax::{ast, AstNode, Edition, NodeOrToken, SyntaxElement};
+use syntax::{AstNode, Edition, SyntaxNode, ast, match_ast};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
+use crate::{AssistContext, AssistId, Assists, GroupLabel};
 
 // Feature: Auto Import
 //
@@ -92,7 +94,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
 pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let cfg = ctx.config.import_path_config();
 
-    let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
+    let (import_assets, syntax_under_caret, expected) = find_importable_node(ctx)?;
     let mut proposed_imports: Vec<_> = import_assets
         .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind)
         .collect();
@@ -100,17 +102,8 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
         return None;
     }
 
-    let range = match &syntax_under_caret {
-        NodeOrToken::Node(node) => ctx.sema.original_range(node).range,
-        NodeOrToken::Token(token) => token.text_range(),
-    };
-    let scope = ImportScope::find_insert_use_container(
-        &match syntax_under_caret {
-            NodeOrToken::Node(it) => it,
-            NodeOrToken::Token(it) => it.parent()?,
-        },
-        &ctx.sema,
-    )?;
+    let range = ctx.sema.original_range(&syntax_under_caret).range;
+    let scope = ImportScope::find_insert_use_container(&syntax_under_caret, &ctx.sema)?;
 
     // we aren't interested in different namespaces
     proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path));
@@ -118,8 +111,9 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
 
     let current_module = ctx.sema.scope(scope.as_syntax_node()).map(|scope| scope.module());
     // prioritize more relevant imports
-    proposed_imports
-        .sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref())));
+    proposed_imports.sort_by_key(|import| {
+        Reverse(relevance_score(ctx, import, expected.as_ref(), current_module.as_ref()))
+    });
     let edition = current_module.map(|it| it.krate().edition(ctx.db())).unwrap_or(Edition::CURRENT);
 
     let group_label = group_label(import_assets.import_candidate());
@@ -127,7 +121,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
         let import_path = import.import_path;
 
         let (assist_id, import_name) =
-            (AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db(), edition));
+            (AssistId::quick_fix("auto_import"), import_path.display(ctx.db(), edition));
         acc.add_group(
             &group_label,
             assist_id,
@@ -180,22 +174,61 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
 
 pub(super) fn find_importable_node(
     ctx: &AssistContext<'_>,
-) -> Option<(ImportAssets, SyntaxElement)> {
+) -> Option<(ImportAssets, SyntaxNode, Option<Type>)> {
+    // Deduplicate this with the `expected_type_and_name` logic for completions
+    let expected = |expr_or_pat: Either<ast::Expr, ast::Pat>| match expr_or_pat {
+        Either::Left(expr) => {
+            let parent = expr.syntax().parent()?;
+            // FIXME: Expand this
+            match_ast! {
+                match parent {
+                    ast::ArgList(list) => {
+                        ActiveParameter::at_arg(
+                            &ctx.sema,
+                            list,
+                            expr.syntax().text_range().start(),
+                        ).map(|ap| ap.ty)
+                    },
+                    ast::LetStmt(stmt) => {
+                        ctx.sema.type_of_pat(&stmt.pat()?).map(|t| t.original)
+                    },
+                    _ => None,
+                }
+            }
+        }
+        Either::Right(pat) => {
+            let parent = pat.syntax().parent()?;
+            // FIXME: Expand this
+            match_ast! {
+                match parent {
+                    ast::LetStmt(stmt) => {
+                        ctx.sema.type_of_expr(&stmt.initializer()?).map(|t| t.original)
+                    },
+                    _ => None,
+                }
+            }
+        }
+    };
+
     if let Some(path_under_caret) = ctx.find_node_at_offset_with_descend::<ast::Path>() {
+        let expected =
+            path_under_caret.top_path().syntax().parent().and_then(Either::cast).and_then(expected);
         ImportAssets::for_exact_path(&path_under_caret, &ctx.sema)
-            .zip(Some(path_under_caret.syntax().clone().into()))
+            .map(|it| (it, path_under_caret.syntax().clone(), expected))
     } else if let Some(method_under_caret) =
         ctx.find_node_at_offset_with_descend::<ast::MethodCallExpr>()
     {
+        let expected = expected(Either::Left(method_under_caret.clone().into()));
         ImportAssets::for_method_call(&method_under_caret, &ctx.sema)
-            .zip(Some(method_under_caret.syntax().clone().into()))
+            .map(|it| (it, method_under_caret.syntax().clone(), expected))
     } else if ctx.find_node_at_offset_with_descend::<ast::Param>().is_some() {
         None
     } else if let Some(pat) = ctx
         .find_node_at_offset_with_descend::<ast::IdentPat>()
         .filter(ast::IdentPat::is_simple_ident)
     {
-        ImportAssets::for_ident_pat(&ctx.sema, &pat).zip(Some(pat.syntax().clone().into()))
+        let expected = expected(Either::Right(pat.clone().into()));
+        ImportAssets::for_ident_pat(&ctx.sema, &pat).map(|it| (it, pat.syntax().clone(), expected))
     } else {
         None
     }
@@ -219,6 +252,7 @@ fn group_label(import_candidate: &ImportCandidate) -> GroupLabel {
 pub(crate) fn relevance_score(
     ctx: &AssistContext<'_>,
     import: &LocatedImport,
+    expected: Option<&Type>,
     current_module: Option<&Module>,
 ) -> i32 {
     let mut score = 0;
@@ -230,6 +264,35 @@ pub(crate) fn relevance_score(
         hir::ItemInNs::Macros(makro) => Some(makro.module(db)),
     };
 
+    if let Some(expected) = expected {
+        let ty = match import.item_to_import {
+            hir::ItemInNs::Types(module_def) | hir::ItemInNs::Values(module_def) => {
+                match module_def {
+                    hir::ModuleDef::Function(function) => Some(function.ret_type(ctx.db())),
+                    hir::ModuleDef::Adt(adt) => Some(match adt {
+                        hir::Adt::Struct(it) => it.ty(ctx.db()),
+                        hir::Adt::Union(it) => it.ty(ctx.db()),
+                        hir::Adt::Enum(it) => it.ty(ctx.db()),
+                    }),
+                    hir::ModuleDef::Variant(variant) => Some(variant.constructor_ty(ctx.db())),
+                    hir::ModuleDef::Const(it) => Some(it.ty(ctx.db())),
+                    hir::ModuleDef::Static(it) => Some(it.ty(ctx.db())),
+                    hir::ModuleDef::TypeAlias(it) => Some(it.ty(ctx.db())),
+                    hir::ModuleDef::BuiltinType(it) => Some(it.ty(ctx.db())),
+                    _ => None,
+                }
+            }
+            hir::ItemInNs::Macros(_) => None,
+        };
+        if let Some(ty) = ty {
+            if ty == *expected {
+                score = 100000;
+            } else if ty.could_unify_with(ctx.db(), expected) {
+                score = 10000;
+            }
+        }
+    }
+
     match item_module.zip(current_module) {
         // get the distance between the imported path and the current module
         // (prefer items that are more local)
@@ -279,12 +342,12 @@ mod tests {
     use super::*;
 
     use hir::{FileRange, Semantics};
-    use ide_db::{assists::AssistResolveStrategy, RootDatabase};
+    use ide_db::{RootDatabase, assists::AssistResolveStrategy};
     use test_fixture::WithFixture;
 
     use crate::tests::{
-        check_assist, check_assist_by_label, check_assist_not_applicable, check_assist_target,
-        TEST_CONFIG,
+        TEST_CONFIG, check_assist, check_assist_by_label, check_assist_not_applicable,
+        check_assist_target,
     };
 
     fn check_auto_import_order(before: &str, order: &[&str]) {
@@ -554,7 +617,7 @@ mod baz {
             }
             ",
             r"
-            use PubMod3::PubStruct;
+            use PubMod1::PubStruct;
 
             PubStruct
 
@@ -1722,4 +1785,96 @@ mod foo {
             ",
         );
     }
+
+    #[test]
+    fn prefers_type_match() {
+        check_assist(
+            auto_import,
+            r"
+mod sync { pub mod atomic { pub enum Ordering { V } } }
+mod cmp { pub enum Ordering { V } }
+fn takes_ordering(_: sync::atomic::Ordering) {}
+fn main() {
+    takes_ordering(Ordering$0);
+}
+",
+            r"
+use sync::atomic::Ordering;
+
+mod sync { pub mod atomic { pub enum Ordering { V } } }
+mod cmp { pub enum Ordering { V } }
+fn takes_ordering(_: sync::atomic::Ordering) {}
+fn main() {
+    takes_ordering(Ordering);
+}
+",
+        );
+        check_assist(
+            auto_import,
+            r"
+mod sync { pub mod atomic { pub enum Ordering { V } } }
+mod cmp { pub enum Ordering { V } }
+fn takes_ordering(_: cmp::Ordering) {}
+fn main() {
+    takes_ordering(Ordering$0);
+}
+",
+            r"
+use cmp::Ordering;
+
+mod sync { pub mod atomic { pub enum Ordering { V } } }
+mod cmp { pub enum Ordering { V } }
+fn takes_ordering(_: cmp::Ordering) {}
+fn main() {
+    takes_ordering(Ordering);
+}
+",
+        );
+    }
+
+    #[test]
+    fn prefers_type_match2() {
+        check_assist(
+            auto_import,
+            r"
+mod sync { pub mod atomic { pub enum Ordering { V } } }
+mod cmp { pub enum Ordering { V } }
+fn takes_ordering(_: sync::atomic::Ordering) {}
+fn main() {
+    takes_ordering(Ordering$0::V);
+}
+",
+            r"
+use sync::atomic::Ordering;
+
+mod sync { pub mod atomic { pub enum Ordering { V } } }
+mod cmp { pub enum Ordering { V } }
+fn takes_ordering(_: sync::atomic::Ordering) {}
+fn main() {
+    takes_ordering(Ordering::V);
+}
+",
+        );
+        check_assist(
+            auto_import,
+            r"
+mod sync { pub mod atomic { pub enum Ordering { V } } }
+mod cmp { pub enum Ordering { V } }
+fn takes_ordering(_: cmp::Ordering) {}
+fn main() {
+    takes_ordering(Ordering$0::V);
+}
+",
+            r"
+use cmp::Ordering;
+
+mod sync { pub mod atomic { pub enum Ordering { V } } }
+mod cmp { pub enum Ordering { V } }
+fn takes_ordering(_: cmp::Ordering) {}
+fn main() {
+    takes_ordering(Ordering::V);
+}
+",
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs
index 8f053f4df949f..00c7d25b257b2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs
@@ -1,12 +1,8 @@
 use crate::assist_context::{AssistContext, Assists};
-use ide_db::{
-    assists::{AssistId, AssistKind},
-    defs::Definition,
-    LineIndexDatabase,
-};
+use ide_db::{LineIndexDatabase, assists::AssistId, defs::Definition};
 use syntax::{
-    ast::{self, edit_in_place::Indent},
     AstNode,
+    ast::{self, edit_in_place::Indent},
 };
 
 // Assist: bind_unused_param
@@ -42,11 +38,11 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
     let r_curly_range = stmt_list.r_curly_token()?.text_range();
 
     acc.add(
-        AssistId("bind_unused_param", AssistKind::QuickFix),
+        AssistId::quick_fix("bind_unused_param"),
         format!("Bind as `let _ = {ident_pat};`"),
         param.syntax().text_range(),
         |builder| {
-            let line_index = ctx.db().line_index(ctx.file_id().into());
+            let line_index = ctx.db().line_index(ctx.vfs_file_id());
 
             let indent = func.indent_level();
             let text_indent = indent + 1;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
index 07fd5e34181ef..9b9f0c4522ed2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
@@ -1,14 +1,14 @@
 use syntax::{
-    ast::{self, HasName, HasVisibility},
     AstNode,
     SyntaxKind::{
         self, ASSOC_ITEM_LIST, CONST, ENUM, FN, MACRO_DEF, MODULE, SOURCE_FILE, STATIC, STRUCT,
         TRAIT, TYPE_ALIAS, USE, VISIBILITY,
     },
     SyntaxNode, T,
+    ast::{self, HasName, HasVisibility},
 };
 
-use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, utils::vis_offset};
 
 // Assist: change_visibility
 //
@@ -76,7 +76,7 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     };
 
     acc.add(
-        AssistId("change_visibility", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("change_visibility"),
         "Change visibility to pub(crate)",
         target,
         |edit| {
@@ -112,7 +112,7 @@ fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> {
     if vis.syntax().text() == "pub" {
         let target = vis.syntax().text_range();
         return acc.add(
-            AssistId("change_visibility", AssistKind::RefactorRewrite),
+            AssistId::refactor_rewrite("change_visibility"),
             "Change Visibility to pub(crate)",
             target,
             |edit| {
@@ -123,7 +123,7 @@ fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> {
     if vis.syntax().text() == "pub(crate)" {
         let target = vis.syntax().text_range();
         return acc.add(
-            AssistId("change_visibility", AssistKind::RefactorRewrite),
+            AssistId::refactor_rewrite("change_visibility"),
             "Change visibility to pub",
             target,
             |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs
index 151c71c0a767e..cd23ad2237298 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs
@@ -1,21 +1,21 @@
-use hir::{sym, AsAssocItem, Semantics};
+use hir::{AsAssocItem, Semantics, sym};
 use ide_db::{
+    RootDatabase,
     famous_defs::FamousDefs,
     syntax_helpers::node_ext::{
         block_as_lone_tail, for_each_tail_expr, is_pattern_cond, preorder_expr,
     },
-    RootDatabase,
 };
 use itertools::Itertools;
 use syntax::{
-    ast::{self, edit::AstNodeEdit, syntax_factory::SyntaxFactory, HasArgList},
-    syntax_editor::SyntaxEditor,
     AstNode, SyntaxNode,
+    ast::{self, HasArgList, edit::AstNodeEdit, syntax_factory::SyntaxFactory},
+    syntax_editor::SyntaxEditor,
 };
 
 use crate::{
+    AssistContext, AssistId, Assists,
     utils::{invert_boolean_expression, unwrap_trivial_block},
-    AssistContext, AssistId, AssistKind, Assists,
 };
 
 // Assist: convert_if_to_bool_then
@@ -73,7 +73,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
 
     let target = expr.syntax().text_range();
     acc.add(
-        AssistId("convert_if_to_bool_then", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_if_to_bool_then"),
         "Convert `if` expression to `bool::then` call",
         target,
         |builder| {
@@ -98,7 +98,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
             let closure_body = ast::Expr::cast(edit.new_root().clone()).unwrap();
 
             let mut editor = builder.make_editor(expr.syntax());
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
             let closure_body = match closure_body {
                 ast::Expr::BlockExpr(block) => unwrap_trivial_block(block),
                 e => e,
@@ -135,7 +135,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
             editor.replace(expr.syntax(), mcall.syntax());
 
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
@@ -181,7 +181,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
 
     let target = mcall.syntax().text_range();
     acc.add(
-        AssistId("convert_bool_then_to_if", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_bool_then_to_if"),
         "Convert `bool::then` call to `if`",
         target,
         |builder| {
@@ -216,7 +216,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
             let closure_body = ast::BlockExpr::cast(edit.new_root().clone()).unwrap();
 
             let mut editor = builder.make_editor(mcall.syntax());
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
 
             let cond = match &receiver {
                 ast::Expr::ParenExpr(expr) => expr.expr().unwrap_or(receiver),
@@ -233,7 +233,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
             editor.replace(mcall.syntax().clone(), if_expr.syntax().clone());
 
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
@@ -245,7 +245,7 @@ fn option_variants(
     let fam = FamousDefs(sema, sema.scope(expr)?.krate());
     let option_variants = fam.core_option_Option()?.variants(sema.db);
     match &*option_variants {
-        &[variant0, variant1] => Some(if variant0.name(sema.db) == sym::None.clone() {
+        &[variant0, variant1] => Some(if variant0.name(sema.db) == sym::None {
             (variant0, variant1)
         } else {
             (variant1, variant0)
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
index 7716e99e604b3..00e9fdf124d16 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
@@ -2,23 +2,23 @@ use either::Either;
 use hir::ModuleDef;
 use ide_db::text_edit::TextRange;
 use ide_db::{
-    assists::{AssistId, AssistKind},
+    FxHashSet,
+    assists::AssistId,
     defs::Definition,
     helpers::mod_path_to_ast,
-    imports::insert_use::{insert_use, ImportScope},
+    imports::insert_use::{ImportScope, insert_use},
     search::{FileReference, UsageSearchResult},
     source_change::SourceChangeBuilder,
-    FxHashSet,
 };
 use itertools::Itertools;
 use syntax::{
+    AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T,
     ast::{
-        self,
+        self, HasName,
         edit::IndentLevel,
         edit_in_place::{AttrsOwnerEdit, Indent},
-        make, HasName,
+        make,
     },
-    AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T,
 };
 
 use crate::{
@@ -62,7 +62,7 @@ pub(crate) fn convert_bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -
 
     let target = name.syntax().text_range();
     acc.add(
-        AssistId("convert_bool_to_enum", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_bool_to_enum"),
         "Convert boolean to enum",
         target,
         |edit| {
@@ -209,7 +209,7 @@ fn replace_usages(
     delayed_mutations: &mut Vec<(ImportScope, ast::Path)>,
 ) {
     for (file_id, references) in usages {
-        edit.edit_file(file_id.file_id());
+        edit.edit_file(file_id.file_id(ctx.db()));
 
         let refs_with_imports = augment_references_with_imports(ctx, references, target_module);
 
@@ -1136,7 +1136,7 @@ fn foo() {
 }
 
 //- /main.rs
-use foo::Foo;
+use foo::{Bool, Foo};
 
 mod foo;
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
index d34cf895cd90a..1d3a2db3352bb 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
@@ -1,23 +1,20 @@
 use either::Either;
 use hir::{CaptureKind, ClosureCapture, FileRangeWrapper, HirDisplay};
 use ide_db::{
-    assists::{AssistId, AssistKind},
-    base_db::SourceDatabase,
-    defs::Definition,
-    search::FileReferenceNode,
-    source_change::SourceChangeBuilder,
-    FxHashSet,
+    FxHashSet, assists::AssistId, base_db::SourceDatabase, defs::Definition,
+    search::FileReferenceNode, source_change::SourceChangeBuilder,
 };
 use stdx::format_to;
 use syntax::{
+    AstNode, Direction, SyntaxKind, SyntaxNode, T, TextSize, ToSmolStr,
     algo::{skip_trivia_token, skip_whitespace_token},
     ast::{
-        self,
+        self, HasArgList, HasGenericParams, HasName,
         edit::{AstNodeEdit, IndentLevel},
-        make, HasArgList, HasGenericParams, HasName,
+        make,
     },
     hacks::parse_expr_from_str,
-    ted, AstNode, Direction, SyntaxKind, SyntaxNode, TextSize, ToSmolStr, T,
+    ted,
 };
 
 use crate::assist_context::{AssistContext, Assists};
@@ -146,7 +143,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
     };
 
     acc.add(
-        AssistId("convert_closure_to_fn", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_closure_to_fn"),
         "Convert closure to fn",
         closure.param_list()?.syntax().text_range(),
         |builder| {
@@ -252,7 +249,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
             );
             fn_ = fn_.dedent(IndentLevel::from_token(&fn_.syntax().last_token().unwrap()));
 
-            builder.edit_file(ctx.file_id());
+            builder.edit_file(ctx.vfs_file_id());
             match &closure_name {
                 Some((closure_decl, _, _)) => {
                     fn_ = fn_.indent(closure_decl.indent_level());
@@ -509,9 +506,8 @@ fn wrap_capture_in_deref_if_needed(
 }
 
 fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Expr {
-    let place =
-        parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.file_id().edition())
-            .expect("`display_place_source_code()` produced an invalid expr");
+    let place = parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.edition())
+        .expect("`display_place_source_code()` produced an invalid expr");
     let needs_mut = match capture.kind() {
         CaptureKind::SharedRef => false,
         CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true,
@@ -590,7 +586,7 @@ fn handle_call(
     let indent =
         if insert_newlines { first_arg_indent.unwrap().to_string() } else { String::new() };
     // FIXME: This text manipulation seems risky.
-    let text = ctx.db().file_text(file_id.file_id());
+    let text = ctx.db().file_text(file_id.file_id(ctx.db())).text(ctx.db());
     let mut text = text[..u32::from(range.end()).try_into().unwrap()].trim_end();
     if !text.ends_with(')') {
         return None;
@@ -633,7 +629,7 @@ fn handle_call(
         to_insert.push(',');
     }
 
-    builder.edit_file(file_id);
+    builder.edit_file(file_id.file_id(ctx.db()));
     builder.insert(offset, to_insert);
 
     Some(())
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
index fbc0b9f6739ff..0d36a5ddb304c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
@@ -1,10 +1,10 @@
 use itertools::Itertools;
 use syntax::{
-    ast::{self, edit::IndentLevel, Comment, CommentKind, CommentShape, Whitespace},
     AstToken, Direction, SyntaxElement, TextRange,
+    ast::{self, Comment, CommentKind, CommentShape, Whitespace, edit::IndentLevel},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: line_to_block
 //
@@ -38,7 +38,7 @@ fn block_to_line(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
     let target = comment.syntax().text_range();
 
     acc.add(
-        AssistId("block_to_line", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("block_to_line"),
         "Replace block comment with line comments",
         target,
         |edit| {
@@ -80,7 +80,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
     );
 
     acc.add(
-        AssistId("line_to_block", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("line_to_block"),
         "Replace line comments with a single block comment",
         target,
         |edit| {
@@ -167,11 +167,7 @@ pub(crate) fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) ->
     let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix);
 
     // Don't add the indentation if the line is empty
-    if contents.is_empty() {
-        contents.to_owned()
-    } else {
-        indentation.to_string() + contents
-    }
+    if contents.is_empty() { contents.to_owned() } else { indentation.to_string() + contents }
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs
index 5a9db67a5fb68..187cc74306e25 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs
@@ -1,10 +1,10 @@
 use itertools::Itertools;
 use syntax::{
-    ast::{self, edit::IndentLevel, Comment, CommentPlacement, Whitespace},
     AstToken, Direction, SyntaxElement, TextRange,
+    ast::{self, Comment, CommentPlacement, Whitespace, edit::IndentLevel},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: comment_to_doc
 //
@@ -39,7 +39,7 @@ fn doc_to_comment(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
     };
 
     acc.add(
-        AssistId("doc_to_comment", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("doc_to_comment"),
         "Replace doc comment with comment",
         target,
         |edit| {
@@ -86,7 +86,7 @@ fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacem
     };
 
     acc.add(
-        AssistId("comment_to_doc", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("comment_to_doc"),
         "Replace comment with doc comment",
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_for_to_while_let.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
new file mode 100644
index 0000000000000..2d6a59a7c365c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
@@ -0,0 +1,422 @@
+use hir::{
+    Name,
+    sym::{self},
+};
+use ide_db::{famous_defs::FamousDefs, syntax_helpers::suggest_name};
+use syntax::{
+    AstNode,
+    ast::{self, HasLoopBody, edit::IndentLevel, make, syntax_factory::SyntaxFactory},
+    syntax_editor::Position,
+};
+
+use crate::{AssistContext, AssistId, Assists};
+
+// Assist: convert_for_loop_to_while_let
+//
+// Converts a for loop into a while let on the Iterator.
+//
+// ```
+// fn main() {
+//     let x = vec![1, 2, 3];
+//     for$0 v in x {
+//         let y = v * 2;
+//     };
+// }
+// ```
+// ->
+// ```
+// fn main() {
+//     let x = vec![1, 2, 3];
+//     let mut tmp = x.into_iter();
+//     while let Some(v) = tmp.next() {
+//         let y = v * 2;
+//     };
+// }
+// ```
+pub(crate) fn convert_for_loop_to_while_let(
+    acc: &mut Assists,
+    ctx: &AssistContext<'_>,
+) -> Option<()> {
+    let for_loop = ctx.find_node_at_offset::<ast::ForExpr>()?;
+    let iterable = for_loop.iterable()?;
+    let pat = for_loop.pat()?;
+    let body = for_loop.loop_body()?;
+    if body.syntax().text_range().start() < ctx.offset() {
+        cov_mark::hit!(not_available_in_body);
+        return None;
+    }
+
+    acc.add(
+        AssistId::refactor_rewrite("convert_for_loop_to_while_let"),
+        "Replace this for loop with `while let`",
+        for_loop.syntax().text_range(),
+        |builder| {
+            let make = SyntaxFactory::with_mappings();
+            let mut editor = builder.make_editor(for_loop.syntax());
+
+            let (iterable, method) = if impls_core_iter(&ctx.sema, &iterable) {
+                (iterable, None)
+            } else if let Some((expr, method)) = is_ref_and_impls_iter_method(&ctx.sema, &iterable)
+            {
+                (expr, Some(make.name_ref(method.as_str())))
+            } else if let ast::Expr::RefExpr(_) = iterable {
+                (make::expr_paren(iterable).into(), Some(make.name_ref("into_iter")))
+            } else {
+                (iterable, Some(make.name_ref("into_iter")))
+            };
+
+            let iterable = if let Some(method) = method {
+                make::expr_method_call(iterable, method, make::arg_list([])).into()
+            } else {
+                iterable
+            };
+
+            let mut new_name = suggest_name::NameGenerator::new_from_scope_locals(
+                ctx.sema.scope(for_loop.syntax()),
+            );
+            let tmp_var = new_name.suggest_name("tmp");
+
+            let mut_expr = make.let_stmt(
+                make.ident_pat(false, true, make.name(&tmp_var)).into(),
+                None,
+                Some(iterable),
+            );
+            let indent = IndentLevel::from_node(for_loop.syntax());
+            editor.insert(
+                Position::before(for_loop.syntax()),
+                make::tokens::whitespace(format!("\n{indent}").as_str()),
+            );
+            editor.insert(Position::before(for_loop.syntax()), mut_expr.syntax());
+
+            let opt_pat = make.tuple_struct_pat(make::ext::ident_path("Some"), [pat]);
+            let iter_next_expr = make.expr_method_call(
+                make.expr_path(make::ext::ident_path(&tmp_var)),
+                make.name_ref("next"),
+                make.arg_list([]),
+            );
+            let cond = make.expr_let(opt_pat.into(), iter_next_expr.into());
+
+            let while_loop = make.expr_while_loop(cond.into(), body);
+
+            editor.replace(for_loop.syntax(), while_loop.syntax());
+
+            editor.add_mappings(make.finish_with_mappings());
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
+        },
+    )
+}
+
+/// If iterable is a reference where the expression behind the reference implements a method
+/// returning an Iterator called iter or iter_mut (depending on the type of reference) then return
+/// the expression behind the reference and the method name
+fn is_ref_and_impls_iter_method(
+    sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+    iterable: &ast::Expr,
+) -> Option<(ast::Expr, hir::Name)> {
+    let ref_expr = match iterable {
+        ast::Expr::RefExpr(r) => r,
+        _ => return None,
+    };
+    let wanted_method = Name::new_symbol_root(if ref_expr.mut_token().is_some() {
+        sym::iter_mut
+    } else {
+        sym::iter
+    });
+    let expr_behind_ref = ref_expr.expr()?;
+    let ty = sema.type_of_expr(&expr_behind_ref)?.adjusted();
+    let scope = sema.scope(iterable.syntax())?;
+    let krate = scope.krate();
+    let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+
+    let has_wanted_method = ty
+        .iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| {
+            if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
+                return Some(());
+            }
+            None
+        })
+        .is_some();
+    if !has_wanted_method {
+        return None;
+    }
+
+    Some((expr_behind_ref, wanted_method))
+}
+
+/// Whether iterable implements core::Iterator
+fn impls_core_iter(sema: &hir::Semantics<'_, ide_db::RootDatabase>, iterable: &ast::Expr) -> bool {
+    (|| {
+        let it_typ = sema.type_of_expr(iterable)?.adjusted();
+
+        let module = sema.scope(iterable.syntax())?.module();
+
+        let krate = module.krate();
+        let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+        cov_mark::hit!(test_already_impls_iterator);
+        Some(it_typ.impls_trait(sema.db, iter_trait, &[]))
+    })()
+    .unwrap_or(false)
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::{check_assist, check_assist_not_applicable};
+
+    use super::*;
+
+    #[test]
+    fn each_to_for_simple_for() {
+        check_assist(
+            convert_for_loop_to_while_let,
+            r"
+fn main() {
+    let mut x = vec![1, 2, 3];
+    for $0v in x {
+        v *= 2;
+    };
+}",
+            r"
+fn main() {
+    let mut x = vec![1, 2, 3];
+    let mut tmp = x.into_iter();
+    while let Some(v) = tmp.next() {
+        v *= 2;
+    };
+}",
+        )
+    }
+
+    #[test]
+    fn each_to_for_for_in_range() {
+        check_assist(
+            convert_for_loop_to_while_let,
+            r#"
+//- minicore: range, iterators
+impl<T> core::iter::Iterator for core::ops::Range<T> {
+    type Item = T;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        None
+    }
+}
+
+fn main() {
+    for $0x in 0..92 {
+        print!("{}", x);
+    }
+}"#,
+            r#"
+impl<T> core::iter::Iterator for core::ops::Range<T> {
+    type Item = T;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        None
+    }
+}
+
+fn main() {
+    let mut tmp = 0..92;
+    while let Some(x) = tmp.next() {
+        print!("{}", x);
+    }
+}"#,
+        )
+    }
+
+    #[test]
+    fn each_to_for_not_available_in_body() {
+        cov_mark::check!(not_available_in_body);
+        check_assist_not_applicable(
+            convert_for_loop_to_while_let,
+            r"
+fn main() {
+    let mut x = vec![1, 2, 3];
+    for v in x {
+        $0v *= 2;
+    }
+}",
+        )
+    }
+
+    #[test]
+    fn each_to_for_for_borrowed() {
+        check_assist(
+            convert_for_loop_to_while_let,
+            r#"
+//- minicore: iterators
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+    fn iter(&self) -> Repeat<i32> { repeat(92) }
+    fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+    let x = S;
+    for $0v in &x {
+        let a = v * 2;
+    }
+}
+"#,
+            r#"
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+    fn iter(&self) -> Repeat<i32> { repeat(92) }
+    fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+    let x = S;
+    let mut tmp = x.iter();
+    while let Some(v) = tmp.next() {
+        let a = v * 2;
+    }
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn each_to_for_for_borrowed_no_iter_method() {
+        check_assist(
+            convert_for_loop_to_while_let,
+            r"
+struct NoIterMethod;
+fn main() {
+    let x = NoIterMethod;
+    for $0v in &x {
+        let a = v * 2;
+    }
+}
+",
+            r"
+struct NoIterMethod;
+fn main() {
+    let x = NoIterMethod;
+    let mut tmp = (&x).into_iter();
+    while let Some(v) = tmp.next() {
+        let a = v * 2;
+    }
+}
+",
+        )
+    }
+
+    #[test]
+    fn each_to_for_for_borrowed_no_iter_method_mut() {
+        check_assist(
+            convert_for_loop_to_while_let,
+            r"
+struct NoIterMethod;
+fn main() {
+    let x = NoIterMethod;
+    for $0v in &mut x {
+        let a = v * 2;
+    }
+}
+",
+            r"
+struct NoIterMethod;
+fn main() {
+    let x = NoIterMethod;
+    let mut tmp = (&mut x).into_iter();
+    while let Some(v) = tmp.next() {
+        let a = v * 2;
+    }
+}
+",
+        )
+    }
+
+    #[test]
+    fn each_to_for_for_borrowed_mut() {
+        check_assist(
+            convert_for_loop_to_while_let,
+            r#"
+//- minicore: iterators
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+    fn iter(&self) -> Repeat<i32> { repeat(92) }
+    fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+    let x = S;
+    for $0v in &mut x {
+        let a = v * 2;
+    }
+}
+"#,
+            r#"
+use core::iter::{Repeat, repeat};
+
+struct S;
+impl S {
+    fn iter(&self) -> Repeat<i32> { repeat(92) }
+    fn iter_mut(&mut self) -> Repeat<i32> { repeat(92) }
+}
+
+fn main() {
+    let x = S;
+    let mut tmp = x.iter_mut();
+    while let Some(v) = tmp.next() {
+        let a = v * 2;
+    }
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn each_to_for_for_borrowed_mut_behind_var() {
+        check_assist(
+            convert_for_loop_to_while_let,
+            r"
+fn main() {
+    let mut x = vec![1, 2, 3];
+    let y = &mut x;
+    for $0v in y {
+        *v *= 2;
+    }
+}",
+            r"
+fn main() {
+    let mut x = vec![1, 2, 3];
+    let y = &mut x;
+    let mut tmp = y.into_iter();
+    while let Some(v) = tmp.next() {
+        *v *= 2;
+    }
+}",
+        )
+    }
+
+    #[test]
+    fn each_to_for_already_impls_iterator() {
+        cov_mark::check!(test_already_impls_iterator);
+        check_assist(
+            convert_for_loop_to_while_let,
+            r#"
+//- minicore: iterators
+fn main() {
+    for$0 a in core::iter::repeat(92).take(1) {
+        println!("{}", a);
+    }
+}
+"#,
+            r#"
+fn main() {
+    let mut tmp = core::iter::repeat(92).take(1);
+    while let Some(a) = tmp.next() {
+        println!("{}", a);
+    }
+}
+"#,
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs
index dd2e9cbcb5f2d..24cc32d10d888 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs
@@ -1,11 +1,11 @@
 use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
 use itertools::Itertools;
 use syntax::{
-    ast::{self, make, AstNode, HasGenericArgs, HasName},
+    ast::{self, AstNode, HasGenericArgs, HasName, make},
     ted,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: convert_from_to_tryfrom
 //
@@ -71,7 +71,7 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_>
     }
 
     acc.add(
-        AssistId("convert_from_to_tryfrom", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_from_to_tryfrom"),
         "Convert From to TryFrom",
         impl_.syntax().text_range(),
         |builder| {
@@ -128,6 +128,7 @@ fn wrap_ok(expr: ast::Expr) -> ast::Expr {
         make::expr_path(make::ext::ident_path("Ok")),
         make::arg_list(std::iter::once(expr)),
     )
+    .into()
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs
index fd3378e8c2636..846f4e9b258ae 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs
@@ -1,6 +1,6 @@
-use syntax::{ast, ast::Radix, AstToken};
+use syntax::{AstToken, ast, ast::Radix};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
+use crate::{AssistContext, AssistId, Assists, GroupLabel};
 
 // Assist: convert_integer_literal
 //
@@ -47,7 +47,7 @@ pub(crate) fn convert_integer_literal(acc: &mut Assists, ctx: &AssistContext<'_>
 
         acc.add_group(
             &group_id,
-            AssistId("convert_integer_literal", AssistKind::RefactorInline),
+            AssistId::refactor_rewrite("convert_integer_literal"),
             label,
             range,
             |builder| builder.replace(range, converted),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
index 8c59ef4314f06..b80276a95fbf5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
@@ -1,7 +1,7 @@
 use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait};
 use syntax::ast::{self, AstNode, HasGenericArgs, HasName};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // FIXME: this should be a diagnostic
 
@@ -85,7 +85,7 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
         .filter(|name| name.text() == "self" || name.text() == "Self");
 
     acc.add(
-        AssistId("convert_into_to_from", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_into_to_from"),
         "Convert Into to From",
         impl_.syntax().text_range(),
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
index 3c9a91741047e..3917ca197bb8c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
@@ -1,12 +1,12 @@
-use hir::{sym, Name};
+use hir::{Name, sym};
 use ide_db::famous_defs::FamousDefs;
 use stdx::format_to;
 use syntax::{
-    ast::{self, edit_in_place::Indent, make, HasArgList, HasLoopBody},
     AstNode,
+    ast::{self, HasArgList, HasLoopBody, edit_in_place::Indent, make},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: convert_iter_for_each_to_for
 //
@@ -53,7 +53,7 @@ pub(crate) fn convert_iter_for_each_to_for(
     let range = stmt.as_ref().map_or(method.syntax(), AstNode::syntax).text_range();
 
     acc.add(
-        AssistId("convert_iter_for_each_to_for", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_iter_for_each_to_for"),
         "Replace this `Iterator::for_each` with a for loop",
         range,
         |builder| {
@@ -108,7 +108,7 @@ pub(crate) fn convert_for_loop_with_for_each(
     }
 
     acc.add(
-        AssistId("convert_for_loop_with_for_each", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_for_loop_with_for_each"),
         "Replace this for loop with `Iterator::for_each`",
         for_loop.syntax().text_range(),
         |builder| {
@@ -154,9 +154,9 @@ fn is_ref_and_impls_iter_method(
         _ => return None,
     };
     let wanted_method = Name::new_symbol_root(if ref_expr.mut_token().is_some() {
-        sym::iter_mut.clone()
+        sym::iter_mut
     } else {
-        sym::iter.clone()
+        sym::iter
     });
     let expr_behind_ref = ref_expr.expr()?;
     let ty = sema.type_of_expr(&expr_behind_ref)?.adjusted();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
index 79c34c14da720..df92b07cba7cd 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs
@@ -1,10 +1,10 @@
 use hir::Semantics;
 use ide_db::RootDatabase;
-use syntax::ast::RangeItem;
-use syntax::ast::{edit::AstNodeEdit, AstNode, HasName, LetStmt, Name, Pat};
 use syntax::T;
+use syntax::ast::RangeItem;
+use syntax::ast::{AstNode, HasName, LetStmt, Name, Pat, edit::AstNodeEdit};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: convert_let_else_to_match
 //
@@ -43,7 +43,7 @@ pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<'
 
     let target = let_stmt.syntax().text_range();
     acc.add(
-        AssistId("convert_let_else_to_match", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_let_else_to_match"),
         "Convert let-else to let and match",
         target,
         |edit| {
@@ -162,11 +162,7 @@ fn binders_to_str(binders: &[(Name, bool)], addmut: bool) -> String {
         .iter()
         .map(
             |(ident, ismut)| {
-                if *ismut && addmut {
-                    format!("mut {ident}")
-                } else {
-                    ident.to_string()
-                }
+                if *ismut && addmut { format!("mut {ident}") } else { ident.to_string() }
             },
         )
         .collect::<Vec<_>>()
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
index fd159eb824d6d..efcbcef00e903 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
@@ -1,12 +1,13 @@
 use ide_db::defs::{Definition, NameRefClass};
 use syntax::{
+    AstNode, SyntaxNode,
     ast::{self, HasName, Name},
-    ted, AstNode, SyntaxNode,
+    ted,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: convert_match_to_let_else
@@ -54,7 +55,7 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'
     let extracted_variable_positions = find_extracted_variable(ctx, &extracting_arm)?;
 
     acc.add(
-        AssistId("convert_match_to_let_else", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_match_to_let_else"),
         "Convert match to let-else",
         let_stmt.syntax().text_range(),
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
index 8d4ff84084bd3..ed8aad7b2c605 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
@@ -2,11 +2,12 @@ use either::Either;
 use ide_db::{defs::Definition, search::FileReference};
 use itertools::Itertools;
 use syntax::{
+    SyntaxKind,
     ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility},
-    match_ast, ted, SyntaxKind,
+    match_ast, ted,
 };
 
-use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder};
 
 // Assist: convert_named_struct_to_tuple_struct
 //
@@ -68,7 +69,7 @@ pub(crate) fn convert_named_struct_to_tuple_struct(
     };
 
     acc.add(
-        AssistId("convert_named_struct_to_tuple_struct", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_named_struct_to_tuple_struct"),
         "Convert to tuple struct",
         strukt.syntax().text_range(),
         |edit| {
@@ -98,7 +99,7 @@ fn edit_struct_def(
     let tuple_fields = ast::make::tuple_field_list(tuple_fields);
     let record_fields_text_range = record_fields.syntax().text_range();
 
-    edit.edit_file(ctx.file_id());
+    edit.edit_file(ctx.vfs_file_id());
     edit.replace(record_fields_text_range, tuple_fields.syntax().text());
 
     if let Either::Left(strukt) = strukt {
@@ -148,7 +149,7 @@ fn edit_struct_references(
     let usages = strukt_def.usages(&ctx.sema).include_self_refs().all();
 
     for (file_id, refs) in usages {
-        edit.edit_file(file_id.file_id());
+        edit.edit_file(file_id.file_id(ctx.db()));
         for r in refs {
             process_struct_name_reference(ctx, r, edit);
         }
@@ -226,7 +227,7 @@ fn edit_field_references(
         let def = Definition::Field(field);
         let usages = def.usages(&ctx.sema).all();
         for (file_id, refs) in usages {
-            edit.edit_file(file_id.file_id());
+            edit.edit_file(file_id.file_id(ctx.db()));
             for r in refs {
                 if let Some(name_ref) = r.name.as_name_ref() {
                     // Only edit the field reference if it's part of a `.field` access
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs
index ea2752b881857..c0fd69779aeae 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs
@@ -1,4 +1,4 @@
-use ide_db::assists::{AssistId, AssistKind};
+use ide_db::assists::AssistId;
 use syntax::ast::{self, HasGenericParams, HasName};
 use syntax::{AstNode, SyntaxKind};
 
@@ -44,7 +44,7 @@ pub(crate) fn convert_nested_function_to_closure(
     let param_list = function.param_list()?;
 
     acc.add(
-        AssistId("convert_nested_function_to_closure", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_nested_function_to_closure"),
         "Convert nested function to closure",
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
index b7a77644496fa..71a61f2db0011 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -5,20 +5,21 @@ use ide_db::{
     ty_filter::TryEnum,
 };
 use syntax::{
+    AstNode,
+    SyntaxKind::{FN, FOR_EXPR, LOOP_EXPR, WHILE_EXPR, WHITESPACE},
+    T,
     ast::{
         self,
         edit::{AstNodeEdit, IndentLevel},
         make,
     },
-    ted, AstNode,
-    SyntaxKind::{FN, FOR_EXPR, LOOP_EXPR, WHILE_EXPR, WHITESPACE},
-    T,
+    ted,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
     utils::invert_boolean_expression_legacy,
-    AssistId, AssistKind,
 };
 
 // Assist: convert_to_guarded_return
@@ -127,7 +128,7 @@ fn if_expr_to_guarded_return(
 
     let target = if_expr.syntax().text_range();
     acc.add(
-        AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_to_guarded_return"),
         "Convert to guarded return",
         target,
         |edit| {
@@ -209,7 +210,7 @@ fn let_stmt_to_guarded_return(
     };
 
     acc.add(
-        AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_to_guarded_return"),
         "Convert to guarded return",
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
index 91af9b05bbb85..cca4cb9d8f775 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
@@ -1,18 +1,19 @@
 use either::Either;
 use hir::ModuleDef;
 use ide_db::{
-    assists::{AssistId, AssistKind},
+    FxHashSet,
+    assists::AssistId,
     defs::Definition,
     helpers::mod_path_to_ast,
-    imports::insert_use::{insert_use, ImportScope},
+    imports::insert_use::{ImportScope, insert_use},
     search::{FileReference, UsageSearchResult},
     source_change::SourceChangeBuilder,
     syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
-    FxHashSet,
 };
 use syntax::{
-    ast::{self, edit::IndentLevel, edit_in_place::Indent, make, HasName},
-    match_ast, ted, AstNode, SyntaxNode,
+    AstNode, SyntaxNode,
+    ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, make},
+    match_ast, ted,
 };
 
 use crate::assist_context::{AssistContext, Assists};
@@ -62,7 +63,7 @@ pub(crate) fn convert_tuple_return_type_to_struct(
 
     let target = type_ref.syntax().text_range();
     acc.add(
-        AssistId("convert_tuple_return_type_to_struct", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_tuple_return_type_to_struct"),
         "Convert tuple return type to tuple struct",
         target,
         move |edit| {
@@ -105,7 +106,7 @@ fn replace_usages(
     target_module: &hir::Module,
 ) {
     for (file_id, references) in usages.iter() {
-        edit.edit_file(file_id.file_id());
+        edit.edit_file(file_id.file_id(ctx.db()));
 
         let refs_with_imports =
             augment_references_with_imports(edit, ctx, references, struct_name, target_module);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index f6e516db88835..777e366da956b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -1,11 +1,12 @@
 use either::Either;
 use ide_db::defs::{Definition, NameRefClass};
 use syntax::{
+    SyntaxKind, SyntaxNode,
     ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility},
-    match_ast, ted, SyntaxKind, SyntaxNode,
+    match_ast, ted,
 };
 
-use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder};
 
 // Assist: convert_tuple_struct_to_named_struct
 //
@@ -64,7 +65,7 @@ pub(crate) fn convert_tuple_struct_to_named_struct(
     let target = strukt.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range();
 
     acc.add(
-        AssistId("convert_tuple_struct_to_named_struct", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_tuple_struct_to_named_struct"),
         "Convert to named struct",
         target,
         |edit| {
@@ -94,7 +95,7 @@ fn edit_struct_def(
     let record_fields = ast::make::record_field_list(record_fields);
     let tuple_fields_text_range = tuple_fields.syntax().text_range();
 
-    edit.edit_file(ctx.file_id());
+    edit.edit_file(ctx.vfs_file_id());
 
     if let Either::Left(strukt) = strukt {
         if let Some(w) = strukt.where_clause() {
@@ -141,7 +142,7 @@ fn edit_struct_references(
             match node {
                 ast::TupleStructPat(tuple_struct_pat) => {
                     let file_range = ctx.sema.original_range_opt(&node)?;
-                    edit.edit_file(file_range.file_id);
+                    edit.edit_file(file_range.file_id.file_id(ctx.db()));
                     edit.replace(
                         file_range.range,
                         ast::make::record_pat_with_fields(
@@ -196,7 +197,7 @@ fn edit_struct_references(
     };
 
     for (file_id, refs) in usages {
-        edit.edit_file(file_id.file_id());
+        edit.edit_file(file_id.file_id(ctx.db()));
         for r in refs {
             for node in r.name.syntax().ancestors() {
                 if edit_node(edit, node).is_some() {
@@ -221,7 +222,7 @@ fn edit_field_references(
         let def = Definition::Field(field);
         let usages = def.usages(&ctx.sema).all();
         for (file_id, refs) in usages {
-            edit.edit_file(file_id.file_id());
+            edit.edit_file(file_id.file_id(ctx.db()));
             for r in refs {
                 if let Some(name_ref) = r.name.as_name_ref() {
                     edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text());
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
index 6a5b11f542560..e582aa814ae14 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
@@ -3,7 +3,7 @@ use ide_db::RootDatabase;
 use stdx::format_to;
 use syntax::ast::{self, AstNode};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: convert_two_arm_bool_match_to_matches_macro
 //
@@ -56,7 +56,7 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
     let expr = match_expr.expr()?;
 
     acc.add(
-        AssistId("convert_two_arm_bool_match_to_matches_macro", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_two_arm_bool_match_to_matches_macro"),
         "Convert to matches!",
         target_range,
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs
index beec64d13b689..dbe3ee0ed6039 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs
@@ -3,18 +3,18 @@ use std::iter;
 use either::Either;
 use ide_db::syntax_helpers::node_ext::is_pattern_cond;
 use syntax::{
+    AstNode, T,
     ast::{
-        self,
+        self, HasLoopBody,
         edit::{AstNodeEdit, IndentLevel},
-        make, HasLoopBody,
+        make,
     },
-    AstNode, T,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
     utils::invert_boolean_expression_legacy,
-    AssistId, AssistKind,
 };
 
 // Assist: convert_while_to_loop
@@ -47,7 +47,7 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>)
 
     let target = while_expr.syntax().text_range();
     acc.add(
-        AssistId("convert_while_to_loop", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("convert_while_to_loop"),
         "Convert while to loop",
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs
index e34e50904875d..800ef89ac6edc 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs
@@ -1,14 +1,15 @@
-use hir::{sym, HasVisibility};
-use ide_db::text_edit::TextRange;
+use hir::{HasVisibility, sym};
 use ide_db::{
-    assists::{AssistId, AssistKind},
+    FxHashMap, FxHashSet,
+    assists::AssistId,
     defs::Definition,
     helpers::mod_path_to_ast,
     search::{FileReference, SearchScope},
-    FxHashMap, FxHashSet,
 };
 use itertools::Itertools;
-use syntax::{ast, ted, AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr};
+use syntax::ast::syntax_factory::SyntaxFactory;
+use syntax::syntax_editor::SyntaxEditor;
+use syntax::{AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr, ast};
 
 use crate::{
     assist_context::{AssistContext, Assists, SourceChangeBuilder},
@@ -47,7 +48,7 @@ pub(crate) fn destructure_struct_binding(acc: &mut Assists, ctx: &AssistContext<
     let data = collect_data(ident_pat, ctx)?;
 
     acc.add(
-        AssistId("destructure_struct_binding", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("destructure_struct_binding"),
         "Destructure struct binding",
         data.ident_pat.syntax().text_range(),
         |edit| destructure_struct_binding_impl(ctx, edit, &data),
@@ -62,13 +63,10 @@ fn destructure_struct_binding_impl(
     data: &StructEditData,
 ) {
     let field_names = generate_field_names(ctx, data);
-    let assignment_edit = build_assignment_edit(ctx, builder, data, &field_names);
-    let usage_edits = build_usage_edits(ctx, builder, data, &field_names.into_iter().collect());
-
-    assignment_edit.apply();
-    for edit in usage_edits {
-        edit.apply(builder);
-    }
+    let mut editor = builder.make_editor(data.ident_pat.syntax());
+    destructure_pat(ctx, &mut editor, data, &field_names);
+    update_usages(ctx, &mut editor, data, &field_names.into_iter().collect());
+    builder.add_file_edits(ctx.vfs_file_id(), editor);
 }
 
 struct StructEditData {
@@ -95,7 +93,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
     let kind = struct_type.kind(ctx.db());
     let struct_def_path = module.find_path(ctx.db(), struct_def, cfg)?;
 
-    let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key(&sym::non_exhaustive).exists();
+    let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key(sym::non_exhaustive).exists();
     let is_foreign_crate = struct_def.module(ctx.db()).is_some_and(|m| m.krate() != module.krate());
 
     let fields = struct_type.fields(ctx.db());
@@ -173,64 +171,57 @@ fn get_names_in_scope(
     Some(names)
 }
 
-fn build_assignment_edit(
+fn destructure_pat(
     _ctx: &AssistContext<'_>,
-    builder: &mut SourceChangeBuilder,
+    editor: &mut SyntaxEditor,
     data: &StructEditData,
     field_names: &[(SmolStr, SmolStr)],
-) -> AssignmentEdit {
-    let ident_pat = builder.make_mut(data.ident_pat.clone());
+) {
+    let ident_pat = &data.ident_pat;
 
     let struct_path = mod_path_to_ast(&data.struct_def_path, data.edition);
     let is_ref = ident_pat.ref_token().is_some();
     let is_mut = ident_pat.mut_token().is_some();
 
+    let make = SyntaxFactory::with_mappings();
     let new_pat = match data.kind {
         hir::StructKind::Tuple => {
             let ident_pats = field_names.iter().map(|(_, new_name)| {
-                let name = ast::make::name(new_name);
-                ast::Pat::from(ast::make::ident_pat(is_ref, is_mut, name))
+                let name = make.name(new_name);
+                ast::Pat::from(make.ident_pat(is_ref, is_mut, name))
             });
-            ast::Pat::TupleStructPat(ast::make::tuple_struct_pat(struct_path, ident_pats))
+            ast::Pat::TupleStructPat(make.tuple_struct_pat(struct_path, ident_pats))
         }
         hir::StructKind::Record => {
             let fields = field_names.iter().map(|(old_name, new_name)| {
                 // Use shorthand syntax if possible
                 if old_name == new_name && !is_mut {
-                    ast::make::record_pat_field_shorthand(ast::make::name_ref(old_name))
+                    make.record_pat_field_shorthand(make.name_ref(old_name))
                 } else {
-                    ast::make::record_pat_field(
-                        ast::make::name_ref(old_name),
-                        ast::Pat::IdentPat(ast::make::ident_pat(
-                            is_ref,
-                            is_mut,
-                            ast::make::name(new_name),
-                        )),
+                    make.record_pat_field(
+                        make.name_ref(old_name),
+                        ast::Pat::IdentPat(make.ident_pat(is_ref, is_mut, make.name(new_name))),
                     )
                 }
             });
+            let field_list = make
+                .record_pat_field_list(fields, data.has_private_members.then_some(make.rest_pat()));
 
-            let field_list = ast::make::record_pat_field_list(
-                fields,
-                data.has_private_members.then_some(ast::make::rest_pat()),
-            );
-            ast::Pat::RecordPat(ast::make::record_pat_with_fields(struct_path, field_list))
+            ast::Pat::RecordPat(make.record_pat_with_fields(struct_path, field_list))
         }
-        hir::StructKind::Unit => ast::make::path_pat(struct_path),
+        hir::StructKind::Unit => make.path_pat(struct_path),
     };
 
     // If the binding is nested inside a record, we need to wrap the new
     // destructured pattern in a non-shorthand record field
-    let new_pat = if data.is_nested {
-        let record_pat_field =
-            ast::make::record_pat_field(ast::make::name_ref(&ident_pat.to_string()), new_pat)
-                .clone_for_update();
-        NewPat::RecordPatField(record_pat_field)
+    let destructured_pat = if data.is_nested {
+        make.record_pat_field(make.name_ref(&ident_pat.to_string()), new_pat).syntax().clone()
     } else {
-        NewPat::Pat(new_pat.clone_for_update())
+        new_pat.syntax().clone()
     };
 
-    AssignmentEdit { old_pat: ident_pat, new_pat }
+    editor.add_mappings(make.finish_with_mappings());
+    editor.replace(data.ident_pat.syntax(), destructured_pat);
 }
 
 fn generate_field_names(ctx: &AssistContext<'_>, data: &StructEditData) -> Vec<(SmolStr, SmolStr)> {
@@ -267,85 +258,57 @@ fn new_field_name(base_name: SmolStr, names_in_scope: &FxHashSet<SmolStr>) -> Sm
     name
 }
 
-struct AssignmentEdit {
-    old_pat: ast::IdentPat,
-    new_pat: NewPat,
-}
-
-enum NewPat {
-    Pat(ast::Pat),
-    RecordPatField(ast::RecordPatField),
-}
-
-impl AssignmentEdit {
-    fn apply(self) {
-        match self.new_pat {
-            NewPat::Pat(pat) => ted::replace(self.old_pat.syntax(), pat.syntax()),
-            NewPat::RecordPatField(record_pat_field) => {
-                ted::replace(self.old_pat.syntax(), record_pat_field.syntax())
-            }
-        }
-    }
-}
-
-fn build_usage_edits(
+fn update_usages(
     ctx: &AssistContext<'_>,
-    builder: &mut SourceChangeBuilder,
+    editor: &mut SyntaxEditor,
     data: &StructEditData,
     field_names: &FxHashMap<SmolStr, SmolStr>,
-) -> Vec<StructUsageEdit> {
-    data.usages
+) {
+    let make = SyntaxFactory::with_mappings();
+    let edits = data
+        .usages
         .iter()
-        .filter_map(|r| build_usage_edit(ctx, builder, data, r, field_names))
-        .collect_vec()
+        .filter_map(|r| build_usage_edit(ctx, &make, data, r, field_names))
+        .collect_vec();
+    editor.add_mappings(make.finish_with_mappings());
+    for (old, new) in edits {
+        editor.replace(old, new);
+    }
 }
 
 fn build_usage_edit(
     ctx: &AssistContext<'_>,
-    builder: &mut SourceChangeBuilder,
+    make: &SyntaxFactory,
     data: &StructEditData,
     usage: &FileReference,
     field_names: &FxHashMap<SmolStr, SmolStr>,
-) -> Option<StructUsageEdit> {
+) -> Option<(SyntaxNode, SyntaxNode)> {
     match usage.name.syntax().ancestors().find_map(ast::FieldExpr::cast) {
         Some(field_expr) => Some({
             let field_name: SmolStr = field_expr.name_ref()?.to_string().into();
             let new_field_name = field_names.get(&field_name)?;
-            let new_expr = ast::make::expr_path(ast::make::ext::ident_path(new_field_name));
+            let new_expr = make.expr_path(ast::make::ext::ident_path(new_field_name));
 
             // If struct binding is a reference, we might need to deref field usages
             if data.is_ref {
                 let (replace_expr, ref_data) = determine_ref_and_parens(ctx, &field_expr);
-                StructUsageEdit::IndexField(
-                    builder.make_mut(replace_expr),
-                    ref_data.wrap_expr(new_expr).clone_for_update(),
+                (
+                    replace_expr.syntax().clone_for_update(),
+                    ref_data.wrap_expr(new_expr).syntax().clone_for_update(),
                 )
             } else {
-                StructUsageEdit::IndexField(
-                    builder.make_mut(field_expr).into(),
-                    new_expr.clone_for_update(),
-                )
+                (field_expr.syntax().clone(), new_expr.syntax().clone())
             }
         }),
-        None => Some(StructUsageEdit::Path(usage.range)),
-    }
-}
-
-enum StructUsageEdit {
-    Path(TextRange),
-    IndexField(ast::Expr, ast::Expr),
-}
-
-impl StructUsageEdit {
-    fn apply(self, edit: &mut SourceChangeBuilder) {
-        match self {
-            StructUsageEdit::Path(target_expr) => {
-                edit.replace(target_expr, "todo!()");
-            }
-            StructUsageEdit::IndexField(target_expr, replace_with) => {
-                ted::replace(target_expr.syntax(), replace_with.syntax())
-            }
-        }
+        None => Some((
+            usage.name.syntax().as_node().unwrap().clone(),
+            make.expr_macro(
+                ast::make::ext::ident_path("todo"),
+                make.token_tree(syntax::SyntaxKind::L_PAREN, []),
+            )
+            .syntax()
+            .clone(),
+        )),
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
index 39142d606207c..adf0f0997b39d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -1,5 +1,5 @@
 use ide_db::{
-    assists::{AssistId, AssistKind},
+    assists::AssistId,
     defs::Definition,
     search::{FileReference, SearchScope},
     syntax_helpers::suggest_name,
@@ -7,7 +7,7 @@ use ide_db::{
 };
 use itertools::Itertools;
 use syntax::{
-    ast::{self, make, AstNode, FieldExpr, HasName, IdentPat},
+    ast::{self, AstNode, FieldExpr, HasName, IdentPat, make},
     ted,
 };
 
@@ -65,7 +65,7 @@ pub(crate) fn destructure_tuple_binding_impl(
 
     if with_sub_pattern {
         acc.add(
-            AssistId("destructure_tuple_binding_in_sub_pattern", AssistKind::RefactorRewrite),
+            AssistId::refactor_rewrite("destructure_tuple_binding_in_sub_pattern"),
             "Destructure tuple in sub-pattern",
             data.ident_pat.syntax().text_range(),
             |edit| destructure_tuple_edit_impl(ctx, edit, &data, true),
@@ -73,7 +73,7 @@ pub(crate) fn destructure_tuple_binding_impl(
     }
 
     acc.add(
-        AssistId("destructure_tuple_binding", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("destructure_tuple_binding"),
         if with_sub_pattern { "Destructure tuple in place" } else { "Destructure tuple" },
         data.ident_pat.syntax().text_range(),
         |edit| destructure_tuple_edit_impl(ctx, edit, &data, false),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
index d264928046707..74bb0ba3f6020 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
@@ -1,14 +1,14 @@
 use either::Either;
 use itertools::Itertools;
 use syntax::{
-    ast::{self, edit::IndentLevel, CommentPlacement, Whitespace},
     AstToken, TextRange,
+    ast::{self, CommentPlacement, Whitespace, edit::IndentLevel},
 };
 
 use crate::{
+    AssistContext, AssistId, Assists,
     handlers::convert_comment_block::{line_comment_text, relevant_line_comments},
     utils::required_hashes,
-    AssistContext, AssistId, AssistKind, Assists,
 };
 
 // Assist: desugar_doc_comment
@@ -54,7 +54,7 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
     };
 
     acc.add(
-        AssistId("desugar_doc_comment", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("desugar_doc_comment"),
         "Desugar doc-comment to attribute macro",
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
index 0b95d6177f904..307414c79715a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
@@ -7,13 +7,14 @@ use ide_db::{
 };
 use stdx::never;
 use syntax::{
-    ast::{self, make, Use, UseTree, VisibilityKind},
-    ted, AstNode, Direction, SyntaxNode, SyntaxToken, T,
+    AstNode, Direction, SyntaxNode, SyntaxToken, T,
+    ast::{self, Use, UseTree, VisibilityKind, make},
+    ted,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: expand_glob_import
@@ -61,7 +62,7 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
 
     let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
     acc.add(
-        AssistId("expand_glob_import", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("expand_glob_import"),
         "Expand glob import",
         target.text_range(),
         |builder| {
@@ -122,7 +123,7 @@ pub(crate) fn expand_glob_reexport(acc: &mut Assists, ctx: &AssistContext<'_>) -
 
     let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
     acc.add(
-        AssistId("expand_glob_reexport", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("expand_glob_reexport"),
         "Expand glob reexport",
         target.text_range(),
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs
index c79a982c38d09..4e487e2162649 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs
@@ -1,28 +1,13 @@
 use hir::{PathResolution, StructKind};
 use ide_db::syntax_helpers::suggest_name::NameGenerator;
 use syntax::{
-    ast::{self, make},
-    match_ast, AstNode, ToSmolStr,
+    AstNode, ToSmolStr,
+    ast::{self, syntax_factory::SyntaxFactory},
+    match_ast,
 };
 
 use crate::{AssistContext, AssistId, Assists};
 
-pub(crate) fn expand_rest_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
-    let rest_pat = ctx.find_node_at_offset::<ast::RestPat>()?;
-    let parent = rest_pat.syntax().parent()?;
-    match_ast! {
-        match parent {
-            ast::RecordPatFieldList(it) => expand_record_rest_pattern(acc, ctx, it.syntax().parent().and_then(ast::RecordPat::cast)?, rest_pat),
-            ast::TupleStructPat(it) => expand_tuple_struct_rest_pattern(acc, ctx, it, rest_pat),
-            // FIXME
-            // ast::TuplePat(it) => (),
-            // FIXME
-            // ast::SlicePat(it) => (),
-            _ => return None,
-        }
-    }
-}
-
 // Assist: expand_record_rest_pattern
 //
 // Fills fields by replacing rest pattern in record patterns.
@@ -49,7 +34,6 @@ fn expand_record_rest_pattern(
     rest_pat: ast::RestPat,
 ) -> Option<()> {
     let missing_fields = ctx.sema.record_pattern_missing_fields(&record_pat);
-
     if missing_fields.is_empty() {
         cov_mark::hit!(no_missing_fields);
         return None;
@@ -61,24 +45,30 @@ fn expand_record_rest_pattern(
         return None;
     }
 
-    let new_field_list =
-        make::record_pat_field_list(old_field_list.fields(), None).clone_for_update();
-    for (f, _) in missing_fields.iter() {
-        let edition = ctx.sema.scope(record_pat.syntax())?.krate().edition(ctx.db());
-        let field = make::record_pat_field_shorthand(make::name_ref(
-            &f.name(ctx.sema.db).display_no_db(edition).to_smolstr(),
-        ));
-        new_field_list.add_field(field.clone_for_update());
-    }
-
-    let target_range = rest_pat.syntax().text_range();
+    let edition = ctx.sema.scope(record_pat.syntax())?.krate().edition(ctx.db());
     acc.add(
-        AssistId("expand_record_rest_pattern", crate::AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("expand_record_rest_pattern"),
         "Fill struct fields",
-        target_range,
-        move |builder| builder.replace_ast(old_field_list, new_field_list),
+        rest_pat.syntax().text_range(),
+        |builder| {
+            let make = SyntaxFactory::with_mappings();
+            let mut editor = builder.make_editor(rest_pat.syntax());
+            let new_field_list = make.record_pat_field_list(old_field_list.fields(), None);
+            for (f, _) in missing_fields.iter() {
+                let field = make.record_pat_field_shorthand(
+                    make.name_ref(&f.name(ctx.sema.db).display_no_db(edition).to_smolstr()),
+                );
+                new_field_list.add_field(field);
+            }
+
+            editor.replace(old_field_list.syntax(), new_field_list.syntax());
+
+            editor.add_mappings(make.finish_with_mappings());
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
+        },
     )
 }
+
 // Assist: expand_tuple_struct_rest_pattern
 //
 // Fills fields by replacing rest pattern in tuple struct patterns.
@@ -133,34 +123,58 @@ fn expand_tuple_struct_rest_pattern(
         return None;
     }
 
-    let mut name_gen = NameGenerator::new_from_scope_locals(ctx.sema.scope(pat.syntax()));
-    let new_pat = make::tuple_struct_pat(
-        path,
-        pat.fields()
-            .take(prefix_count)
-            .chain(fields[prefix_count..fields.len() - suffix_count].iter().map(|f| {
-                make::ident_pat(
-                    false,
-                    false,
-                    match name_gen.for_type(&f.ty(ctx.sema.db), ctx.sema.db, ctx.edition()) {
-                        Some(name) => make::name(&name),
-                        None => make::name(&format!("_{}", f.index())),
-                    },
-                )
-                .into()
-            }))
-            .chain(pat.fields().skip(prefix_count + 1)),
-    );
-
-    let target_range = rest_pat.syntax().text_range();
     acc.add(
-        AssistId("expand_tuple_struct_rest_pattern", crate::AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("expand_tuple_struct_rest_pattern"),
         "Fill tuple struct fields",
-        target_range,
-        move |builder| builder.replace_ast(pat, new_pat),
+        rest_pat.syntax().text_range(),
+        |builder| {
+            let make = SyntaxFactory::with_mappings();
+            let mut editor = builder.make_editor(rest_pat.syntax());
+
+            let mut name_gen = NameGenerator::new_from_scope_locals(ctx.sema.scope(pat.syntax()));
+            let new_pat = make.tuple_struct_pat(
+                path,
+                pat.fields()
+                    .take(prefix_count)
+                    .chain(fields[prefix_count..fields.len() - suffix_count].iter().map(|f| {
+                        make.ident_pat(
+                            false,
+                            false,
+                            match name_gen.for_type(&f.ty(ctx.sema.db), ctx.sema.db, ctx.edition())
+                            {
+                                Some(name) => make.name(&name),
+                                None => make.name(&format!("_{}", f.index())),
+                            },
+                        )
+                        .into()
+                    }))
+                    .chain(pat.fields().skip(prefix_count + 1)),
+            );
+
+            editor.replace(pat.syntax(), new_pat.syntax());
+
+            editor.add_mappings(make.finish_with_mappings());
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
+        },
     )
 }
 
+pub(crate) fn expand_rest_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+    let rest_pat = ctx.find_node_at_offset::<ast::RestPat>()?;
+    let parent = rest_pat.syntax().parent()?;
+    match_ast! {
+        match parent {
+            ast::RecordPatFieldList(it) => expand_record_rest_pattern(acc, ctx, it.syntax().parent().and_then(ast::RecordPat::cast)?, rest_pat),
+            ast::TupleStructPat(it) => expand_tuple_struct_rest_pattern(acc, ctx, it, rest_pat),
+            // FIXME
+            // ast::TuplePat(it) => (),
+            // FIXME
+            // ast::SlicePat(it) => (),
+            _ => return None,
+        }
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
index e4d347ef16bd6..54699a9454f09 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
@@ -1,14 +1,15 @@
-use crate::{utils, AssistContext, Assists};
+use crate::{AssistContext, Assists, utils};
 use ide_db::{
     assists::{AssistId, AssistKind},
-    syntax_helpers::format_string_exprs::{parse_format_exprs, Arg},
+    syntax_helpers::format_string_exprs::{Arg, parse_format_exprs},
 };
 use itertools::Itertools;
 use syntax::{
-    ast::{self, make},
-    ted, AstNode, AstToken, NodeOrToken,
+    AstNode, AstToken, NodeOrToken,
     SyntaxKind::WHITESPACE,
     T,
+    ast::{self, make},
+    ted,
 };
 
 // Assist: extract_expressions_from_format_string
@@ -52,6 +53,7 @@ pub(crate) fn extract_expressions_from_format_string(
             } else {
                 AssistKind::QuickFix
             },
+            None,
         ),
         "Extract format expressions",
         tt.syntax().text_range(),
@@ -61,21 +63,28 @@ pub(crate) fn extract_expressions_from_format_string(
             // Extract existing arguments in macro
             let tokens = tt.token_trees_and_tokens().collect_vec();
 
-            let existing_args = if let [_opening_bracket, NodeOrToken::Token(_format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(_end_bracket)] =
-                tokens.as_slice()
+            let existing_args = if let [
+                _opening_bracket,
+                NodeOrToken::Token(_format_string),
+                _args_start_comma,
+                tokens @ ..,
+                NodeOrToken::Token(_end_bracket),
+            ] = tokens.as_slice()
             {
-                let args = tokens.split(|it| matches!(it, NodeOrToken::Token(t) if t.kind() == T![,])).map(|arg| {
-                    // Strip off leading and trailing whitespace tokens
-                    let arg = match arg.split_first() {
-                        Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
-                        _ => arg,
-                    };
-                    let arg = match arg.split_last() {
-                        Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
-                        _ => arg,
-                    };
-                    arg
-                });
+                let args = tokens
+                    .split(|it| matches!(it, NodeOrToken::Token(t) if t.kind() == T![,]))
+                    .map(|arg| {
+                        // Strip off leading and trailing whitespace tokens
+                        let arg = match arg.split_first() {
+                            Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
+                            _ => arg,
+                        };
+
+                        match arg.split_last() {
+                            Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
+                            _ => arg,
+                        }
+                    });
 
                 args.collect()
             } else {
@@ -100,7 +109,8 @@ pub(crate) fn extract_expressions_from_format_string(
                     Arg::Expr(s) => {
                         // insert arg
                         // FIXME: use the crate's edition for parsing
-                        let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT_FIXME).syntax_node();
+                        let expr =
+                            ast::Expr::parse(&s, syntax::Edition::CURRENT_FIXME).syntax_node();
                         let mut expr_tt = utils::tt_from_syntax(expr);
                         new_tt_bits.append(&mut expr_tt);
                     }
@@ -120,7 +130,6 @@ pub(crate) fn extract_expressions_from_format_string(
                 }
             }
 
-
             // Insert new args
             let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update();
             ted::replace(tt.syntax(), new_tt.syntax());
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
index 6f4b886a28d75..046af71a9dc0c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -7,33 +7,34 @@ use hir::{
     TypeInfo, TypeParam,
 };
 use ide_db::{
+    FxIndexSet, RootDatabase,
     assists::GroupLabel,
     defs::{Definition, NameRefClass},
     famous_defs::FamousDefs,
     helpers::mod_path_to_ast,
-    imports::insert_use::{insert_use, ImportScope},
+    imports::insert_use::{ImportScope, insert_use},
     search::{FileReference, ReferenceCategory, SearchScope},
     source_change::SourceChangeBuilder,
     syntax_helpers::node_ext::{
         for_each_tail_expr, preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr,
     },
-    FxIndexSet, RootDatabase,
 };
 use itertools::Itertools;
 use syntax::{
+    Edition, SyntaxElement,
+    SyntaxKind::{self, COMMENT},
+    SyntaxNode, SyntaxToken, T, TextRange, TextSize, TokenAtOffset, WalkEvent,
     ast::{
-        self, edit::IndentLevel, edit_in_place::Indent, AstNode, AstToken, HasGenericParams,
-        HasName,
+        self, AstNode, AstToken, HasGenericParams, HasName, edit::IndentLevel,
+        edit_in_place::Indent,
     },
-    match_ast, ted, Edition, SyntaxElement,
-    SyntaxKind::{self, COMMENT},
-    SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
+    match_ast, ted,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists, TreeMutator},
     utils::generate_impl,
-    AssistId,
 };
 
 // Assist: extract_function
@@ -107,7 +108,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
 
     acc.add_group(
         &GroupLabel("Extract into...".to_owned()),
-        AssistId("extract_function", crate::AssistKind::RefactorExtract),
+        AssistId::refactor_extract("extract_function"),
         "Extract into function",
         target_range,
         move |builder| {
@@ -247,11 +248,8 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef
     let mut names_in_scope = vec![];
     semantics_scope.process_all_names(&mut |name, _| {
         names_in_scope.push(
-            name.display(
-                semantics_scope.db.upcast(),
-                semantics_scope.krate().edition(semantics_scope.db),
-            )
-            .to_string(),
+            name.display(semantics_scope.db, semantics_scope.krate().edition(semantics_scope.db))
+                .to_string(),
         )
     });
 
@@ -750,7 +748,7 @@ impl FunctionBody {
                         ast::Stmt::Item(_) => (),
                         ast::Stmt::LetStmt(stmt) => {
                             if let Some(pat) = stmt.pat() {
-                                let _ = walk_pat(&pat, &mut |pat| {
+                                _ = walk_pat(&pat, &mut |pat| {
                                     cb(pat);
                                     std::ops::ControlFlow::<(), ()>::Continue(())
                                 });
@@ -799,15 +797,21 @@ impl FunctionBody {
     ) -> (FxIndexSet<Local>, Option<ast::SelfParam>) {
         let mut self_param = None;
         let mut res = FxIndexSet::default();
-        let mut add_name_if_local = |name_ref: Option<_>| {
-            let local_ref =
-                match name_ref.and_then(|name_ref| NameRefClass::classify(sema, &name_ref)) {
-                    Some(
-                        NameRefClass::Definition(Definition::Local(local_ref), _)
-                        | NameRefClass::FieldShorthand { local_ref, field_ref: _, adt_subst: _ },
-                    ) => local_ref,
-                    _ => return,
-                };
+
+        fn local_from_name_ref(
+            sema: &Semantics<'_, RootDatabase>,
+            name_ref: ast::NameRef,
+        ) -> Option<hir::Local> {
+            match NameRefClass::classify(sema, &name_ref) {
+                Some(
+                    NameRefClass::Definition(Definition::Local(local_ref), _)
+                    | NameRefClass::FieldShorthand { local_ref, field_ref: _, adt_subst: _ },
+                ) => Some(local_ref),
+                _ => None,
+            }
+        }
+
+        let mut add_name_if_local = |local_ref: Local| {
             let InFile { file_id, value } = local_ref.primary_source(sema.db).source;
             // locals defined inside macros are not relevant to us
             if !file_id.is_macro() {
@@ -823,13 +827,20 @@ impl FunctionBody {
         };
         self.walk_expr(&mut |expr| match expr {
             ast::Expr::PathExpr(path_expr) => {
-                add_name_if_local(path_expr.path().and_then(|it| it.as_single_name_ref()))
+                if let Some(local) = path_expr
+                    .path()
+                    .and_then(|it| it.as_single_name_ref())
+                    .and_then(|name_ref| local_from_name_ref(sema, name_ref))
+                {
+                    add_name_if_local(local);
+                }
             }
             ast::Expr::ClosureExpr(closure_expr) => {
                 if let Some(body) = closure_expr.body() {
                     body.syntax()
                         .descendants()
-                        .map(ast::NameRef::cast)
+                        .filter_map(ast::NameRef::cast)
+                        .filter_map(|name_ref| local_from_name_ref(sema, name_ref))
                         .for_each(&mut add_name_if_local);
                 }
             }
@@ -838,9 +849,31 @@ impl FunctionBody {
                     tt.syntax()
                         .descendants_with_tokens()
                         .filter_map(SyntaxElement::into_token)
-                        .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
-                        .flat_map(|t| sema.descend_into_macros_exact(t))
-                        .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
+                        .filter(|it| {
+                            matches!(it.kind(), SyntaxKind::STRING | SyntaxKind::IDENT | T![self])
+                        })
+                        .for_each(|t| {
+                            if ast::String::can_cast(t.kind()) {
+                                if let Some(parts) =
+                                    ast::String::cast(t).and_then(|s| sema.as_format_args_parts(&s))
+                                {
+                                    parts
+                                        .into_iter()
+                                        .filter_map(|(_, value)| value.and_then(|it| it.left()))
+                                        .filter_map(|path| match path {
+                                            PathResolution::Local(local) => Some(local),
+                                            _ => None,
+                                        })
+                                        .for_each(&mut add_name_if_local);
+                                }
+                            } else {
+                                sema.descend_into_macros_exact(t)
+                                    .into_iter()
+                                    .filter_map(|t| t.parent().and_then(ast::NameRef::cast))
+                                    .filter_map(|name_ref| local_from_name_ref(sema, name_ref))
+                                    .for_each(&mut add_name_if_local);
+                            }
+                        });
                 }
             }
             _ => (),
@@ -1428,10 +1461,10 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> Sy
     let name = fun.name.clone();
     let mut call_expr = if fun.self_param.is_some() {
         let self_arg = make::expr_path(make::ext::ident_path("self"));
-        make::expr_method_call(self_arg, name, args)
+        make::expr_method_call(self_arg, name, args).into()
     } else {
         let func = make::expr_path(make::path_unqualified(make::path_segment(name)));
-        make::expr_call(func, args)
+        make::expr_call(func, args).into()
     };
 
     let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
@@ -1689,11 +1722,7 @@ fn make_where_clause(
         })
         .peekable();
 
-    if predicates.peek().is_some() {
-        Some(make::where_clause(predicates))
-    } else {
-        None
-    }
+    if predicates.peek().is_some() { Some(make::where_clause(predicates)) } else { None }
 }
 
 fn pred_is_required(
@@ -1917,14 +1946,15 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -
                 };
                 let func = make::expr_path(make::ext::ident_path(constructor));
                 let args = make::arg_list(iter::once(tail_expr));
-                make::expr_call(func, args)
+                make::expr_call(func, args).into()
             })
         }
         FlowHandler::If { .. } => {
             let controlflow_continue = make::expr_call(
                 make::expr_path(make::path_from_text("ControlFlow::Continue")),
                 make::arg_list([make::ext::expr_unit()]),
-            );
+            )
+            .into();
             with_tail_expr(block, controlflow_continue)
         }
         FlowHandler::IfOption { .. } => {
@@ -1934,12 +1964,12 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -
         FlowHandler::MatchOption { .. } => map_tail_expr(block, |tail_expr| {
             let some = make::expr_path(make::ext::ident_path("Some"));
             let args = make::arg_list(iter::once(tail_expr));
-            make::expr_call(some, args)
+            make::expr_call(some, args).into()
         }),
         FlowHandler::MatchResult { .. } => map_tail_expr(block, |tail_expr| {
             let ok = make::expr_path(make::ext::ident_path("Ok"));
             let args = make::arg_list(iter::once(tail_expr));
-            make::expr_call(ok, args)
+            make::expr_call(ok, args).into()
         }),
     }
 }
@@ -2127,17 +2157,18 @@ fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Op
         FlowHandler::If { .. } => make::expr_call(
             make::expr_path(make::path_from_text("ControlFlow::Break")),
             make::arg_list([make::ext::expr_unit()]),
-        ),
+        )
+        .into(),
         FlowHandler::IfOption { .. } => {
             let expr = arg_expr.unwrap_or_else(make::ext::expr_unit);
             let args = make::arg_list([expr]);
-            make::expr_call(make::expr_path(make::ext::ident_path("Some")), args)
+            make::expr_call(make::expr_path(make::ext::ident_path("Some")), args).into()
         }
         FlowHandler::MatchOption { .. } => make::expr_path(make::ext::ident_path("None")),
         FlowHandler::MatchResult { .. } => {
             let expr = arg_expr.unwrap_or_else(make::ext::expr_unit);
             let args = make::arg_list([expr]);
-            make::expr_call(make::expr_path(make::ext::ident_path("Err")), args)
+            make::expr_call(make::expr_path(make::ext::ident_path("Err")), args).into()
         }
     };
     Some(make::expr_return(Some(value)).clone_for_update())
@@ -6134,6 +6165,28 @@ fn $0fun_name(a: i32, b: i32, c: i32, x: i32) -> i32 {
         );
     }
 
+    #[test]
+    fn fmt_macro_argument() {
+        check_assist(
+            extract_function,
+            r#"
+//- minicore: fmt
+fn existing(a: i32, b: i32, c: i32) {
+    $0print!("{a}{}{}", b, "{c}");$0
+}
+"#,
+            r#"
+fn existing(a: i32, b: i32, c: i32) {
+    fun_name(a, b);
+}
+
+fn $0fun_name(a: i32, b: i32) {
+    print!("{a}{}{}", b, "{c}");
+}
+"#,
+        );
+    }
+
     #[test]
     fn in_left_curly_is_not_applicable() {
         cov_mark::check!(extract_function_in_braces_is_not_applicable);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
index 6e3be0ce69279..b82b7984d4a45 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
@@ -1,25 +1,26 @@
 use std::iter;
 
 use either::Either;
-use hir::{HasSource, HirFileIdExt, ModuleSource};
+use hir::{HasSource, ModuleSource};
 use ide_db::{
-    assists::{AssistId, AssistKind},
+    FileId, FxHashMap, FxHashSet,
+    assists::AssistId,
     defs::{Definition, NameClass, NameRefClass},
     search::{FileReference, SearchScope},
-    FileId, FxHashMap, FxHashSet,
 };
 use itertools::Itertools;
 use smallvec::SmallVec;
 use syntax::{
+    AstNode,
+    SyntaxKind::{self, WHITESPACE},
+    SyntaxNode, TextRange, TextSize,
     algo::find_node_at_range,
     ast::{
-        self,
+        self, HasVisibility,
         edit::{AstNodeEdit, IndentLevel},
-        make, HasVisibility,
+        make,
     },
-    match_ast, ted, AstNode,
-    SyntaxKind::{self, WHITESPACE},
-    SyntaxNode, TextRange, TextSize,
+    match_ast, ted,
 };
 
 use crate::{AssistContext, Assists};
@@ -90,7 +91,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
     let old_item_indent = module.body_items[0].indent_level();
 
     acc.add(
-        AssistId("extract_module", AssistKind::RefactorExtract),
+        AssistId::refactor_extract("extract_module"),
         "Extract Module",
         module.text_range,
         |builder| {
@@ -112,7 +113,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
             let (usages_to_be_processed, record_fields, use_stmts_to_be_inserted) =
                 module.get_usages_and_record_fields(ctx);
 
-            builder.edit_file(ctx.file_id());
+            builder.edit_file(ctx.vfs_file_id());
             use_stmts_to_be_inserted.into_iter().for_each(|(_, use_stmt)| {
                 builder.insert(ctx.selection_trimmed().end(), format!("\n{use_stmt}"));
             });
@@ -124,7 +125,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
 
             let mut usages_to_be_processed_for_cur_file = vec![];
             for (file_id, usages) in usages_to_be_processed {
-                if file_id == ctx.file_id() {
+                if file_id == ctx.vfs_file_id() {
                     usages_to_be_processed_for_cur_file = usages;
                     continue;
                 }
@@ -134,7 +135,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
                 }
             }
 
-            builder.edit_file(ctx.file_id());
+            builder.edit_file(ctx.vfs_file_id());
             for (text_range, usage) in usages_to_be_processed_for_cur_file {
                 builder.replace(text_range, usage);
             }
@@ -363,7 +364,7 @@ impl Module {
 
                 None
             });
-            refs_in_files.entry(file_id.file_id()).or_default().extend(usages);
+            refs_in_files.entry(file_id.file_id(ctx.db())).or_default().extend(usages);
         }
     }
 
@@ -457,6 +458,7 @@ impl Module {
         let selection_range = ctx.selection_trimmed();
         let file_id = ctx.file_id();
         let usage_res = def.usages(&ctx.sema).in_scope(&SearchScope::single_file(file_id)).all();
+
         let file = ctx.sema.parse(file_id);
 
         // track uses which does not exists in `Use`
@@ -483,7 +485,7 @@ impl Module {
             ctx,
             curr_parent_module,
             selection_range,
-            file_id.file_id(),
+            file_id.file_id(ctx.db()),
         );
 
         // Find use stmt that use def in current file
@@ -670,7 +672,7 @@ fn check_def_in_mod_and_out_sel(
                 let have_same_parent = if let Some(ast_module) = &curr_parent_module {
                     ctx.sema.to_module_def(ast_module).is_some_and(|it| it == $x.module(ctx.db()))
                 } else {
-                    source.file_id.original_file(ctx.db()) == curr_file_id
+                    source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id
                 };
 
                 let in_sel = !selection_range.contains_range(source.value.syntax().text_range());
@@ -686,7 +688,7 @@ fn check_def_in_mod_and_out_sel(
                 (Some(ast_module), Some(hir_module)) => {
                     ctx.sema.to_module_def(ast_module).is_some_and(|it| it == hir_module)
                 }
-                _ => source.file_id.original_file(ctx.db()) == curr_file_id,
+                _ => source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id,
             };
 
             if have_same_parent {
@@ -1159,8 +1161,8 @@ mod modname {
     }
 
     #[test]
-    fn test_extract_module_for_impl_not_having_corresponding_adt_in_selection_and_not_in_same_mod_but_with_super(
-    ) {
+    fn test_extract_module_for_impl_not_having_corresponding_adt_in_selection_and_not_in_same_mod_but_with_super()
+     {
         check_assist(
             extract_module,
             r"
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
index d4f2ea3bd941b..b9c42285d257b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -3,25 +3,26 @@ use std::iter;
 use either::Either;
 use hir::{HasCrate, Module, ModuleDef, Name, Variant};
 use ide_db::{
+    FxHashSet, RootDatabase,
     defs::Definition,
     helpers::mod_path_to_ast,
-    imports::insert_use::{insert_use, ImportScope, InsertUseConfig},
+    imports::insert_use::{ImportScope, InsertUseConfig, insert_use},
     path_transform::PathTransform,
     search::FileReference,
-    FxHashSet, RootDatabase,
 };
 use itertools::Itertools;
 use syntax::{
-    ast::{
-        self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams,
-        HasName, HasVisibility,
-    },
-    match_ast, ted, Edition, SyntaxElement,
+    Edition, SyntaxElement,
     SyntaxKind::*,
     SyntaxNode, T,
+    ast::{
+        self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, edit::IndentLevel,
+        edit_in_place::Indent, make,
+    },
+    match_ast, ted,
 };
 
-use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder};
 
 // Assist: extract_struct_from_enum_variant
 //
@@ -54,7 +55,7 @@ pub(crate) fn extract_struct_from_enum_variant(
     let enum_hir = ctx.sema.to_def(&enum_ast)?;
     let target = variant.syntax().text_range();
     acc.add(
-        AssistId("extract_struct_from_enum_variant", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("extract_struct_from_enum_variant"),
         "Extract struct from enum variant",
         target,
         |builder| {
@@ -73,7 +74,7 @@ pub(crate) fn extract_struct_from_enum_variant(
                     def_file_references = Some(references);
                     continue;
                 }
-                builder.edit_file(file_id.file_id());
+                builder.edit_file(file_id.file_id(ctx.db()));
                 let processed = process_references(
                     ctx,
                     builder,
@@ -86,7 +87,7 @@ pub(crate) fn extract_struct_from_enum_variant(
                     apply_references(ctx.config.insert_use, path, node, import, edition)
                 });
             }
-            builder.edit_file(ctx.file_id());
+            builder.edit_file(ctx.vfs_file_id());
 
             let variant = builder.make_mut(variant.clone());
             if let Some(references) = def_file_references {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
index 67b8f5e505031..d843ac64567aa 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
@@ -1,11 +1,11 @@
 use either::Either;
 use ide_db::syntax_helpers::node_ext::walk_ty;
 use syntax::{
-    ast::{self, edit::IndentLevel, make, AstNode, HasGenericArgs, HasGenericParams, HasName},
+    ast::{self, AstNode, HasGenericArgs, HasGenericParams, HasName, edit::IndentLevel, make},
     syntax_editor,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: extract_type_alias
 //
@@ -40,7 +40,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
     let target = ty.syntax().text_range();
 
     acc.add(
-        AssistId("extract_type_alias", AssistKind::RefactorExtract),
+        AssistId::refactor_extract("extract_type_alias"),
         "Extract type as type alias",
         target,
         |builder| {
@@ -87,7 +87,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
                 ],
             );
 
-            builder.add_file_edits(ctx.file_id(), edit);
+            builder.add_file_edits(ctx.vfs_file_id(), edit);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
index 7b6f76d00452e..3971b60f25323 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
@@ -1,19 +1,19 @@
 use hir::{HirDisplay, TypeInfo};
 use ide_db::{
     assists::GroupLabel,
-    syntax_helpers::{suggest_name, LexedStr},
+    syntax_helpers::{LexedStr, suggest_name},
 };
 use syntax::{
+    NodeOrToken, SyntaxKind, SyntaxNode, T,
     algo::ancestors_at_offset,
     ast::{
-        self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory,
-        AstNode,
+        self, AstNode, edit::IndentLevel, edit_in_place::Indent, make,
+        syntax_factory::SyntaxFactory,
     },
     syntax_editor::Position,
-    NodeOrToken, SyntaxKind, SyntaxNode, T,
 };
 
-use crate::{utils::is_body_const, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, utils::is_body_const};
 
 // Assist: extract_variable
 //
@@ -170,7 +170,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
             |edit| {
                 let (var_name, expr_replace) = kind.get_name_and_expr(ctx, &to_extract);
 
-                let make = SyntaxFactory::new();
+                let make = SyntaxFactory::with_mappings();
                 let mut editor = edit.make_editor(&expr_replace);
 
                 let pat_name = make.name(&var_name);
@@ -263,7 +263,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
                 }
 
                 editor.add_mappings(make.finish_with_mappings());
-                edit.add_file_edits(ctx.file_id(), editor);
+                edit.add_file_edits(ctx.vfs_file_id(), editor);
                 edit.rename();
             },
         );
@@ -311,7 +311,7 @@ impl ExtractionKind {
             ExtractionKind::Static => "extract_static",
         };
 
-        AssistId(s, AssistKind::RefactorExtract)
+        AssistId::refactor_extract(s)
     }
 
     fn label(&self) -> &'static str {
@@ -378,7 +378,7 @@ fn get_literal_name(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<String>
         return None;
     }
 
-    match LexedStr::single_token(ctx.file_id().edition(), &inner) {
+    match LexedStr::single_token(ctx.edition(), &inner) {
         Some((SyntaxKind::IDENT, None)) => Some(inner),
         _ => None,
     }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
index 47e4a68293f0c..19e0a73f33356 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -1,13 +1,11 @@
-use hir::{
-    db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef,
-};
+use hir::{HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef, db::HirDatabase};
 use ide_db::FileId;
 use syntax::{
-    ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
     AstNode, TextRange,
+    ast::{self, HasVisibility as _, edit_in_place::HasVisibilityEdit, make},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // FIXME: this really should be a fix for diagnostic, rather than an assist.
 
@@ -78,7 +76,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
         }
     };
 
-    acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {
+    acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| {
         edit.edit_file(target_file);
 
         let vis_owner = edit.make_mut(vis_owner);
@@ -131,8 +129,8 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
         target_name.display(ctx.db(), current_edition)
     );
 
-    acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {
-        edit.edit_file(target_file.file_id());
+    acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| {
+        edit.edit_file(target_file.file_id(ctx.db()));
 
         let vis_owner = edit.make_mut(vis_owner);
         vis_owner.set_visibility(Some(missing_visibility.clone_for_update()));
@@ -162,7 +160,7 @@ fn target_data_for_def(
         Some((
             ast::AnyHasVisibility::new(source.value),
             range,
-            file_id.original_file(db.upcast()).file_id(),
+            file_id.original_file(db).file_id(db),
         ))
     }
 
@@ -203,9 +201,9 @@ fn target_data_for_def(
         hir::ModuleDef::Module(m) => {
             target_name = m.name(db);
             let in_file_source = m.declaration_source(db)?;
-            let file_id = in_file_source.file_id.original_file(db.upcast());
+            let file_id = in_file_source.file_id.original_file(db);
             let range = in_file_source.value.syntax().text_range();
-            (ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id())
+            (ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id(db))
         }
         // FIXME
         hir::ModuleDef::Macro(_) => return None,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
index 818a868fe3449..247e8109abc9d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -1,9 +1,9 @@
 use syntax::{
-    ast::{self, syntax_factory::SyntaxFactory, AstNode, BinExpr},
     SyntaxKind, T,
+    ast::{self, AstNode, BinExpr, syntax_factory::SyntaxFactory},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: flip_binexpr
 //
@@ -43,19 +43,19 @@ pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
     }
 
     acc.add(
-        AssistId("flip_binexpr", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("flip_binexpr"),
         "Flip binary expression",
         op_token.text_range(),
         |builder| {
             let mut editor = builder.make_editor(&expr.syntax().parent().unwrap());
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
             if let FlipAction::FlipAndReplaceOp(binary_op) = action {
                 editor.replace(op_token, make.token(binary_op))
             };
             editor.replace(lhs.syntax(), rhs.syntax());
             editor.replace(rhs.syntax(), lhs.syntax());
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs
index dd27269b001c6..1e95d4772349e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs
@@ -1,11 +1,11 @@
 use syntax::{
+    AstNode, Direction, NodeOrToken, SyntaxKind, SyntaxToken, T,
     algo::non_trivia_sibling,
     ast::{self, syntax_factory::SyntaxFactory},
     syntax_editor::SyntaxMapping,
-    AstNode, Direction, NodeOrToken, SyntaxKind, SyntaxToken, T,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: flip_comma
 //
@@ -40,7 +40,7 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
     }
 
     let target = comma.text_range();
-    acc.add(AssistId("flip_comma", AssistKind::RefactorRewrite), "Flip comma", target, |builder| {
+    acc.add(AssistId::refactor_rewrite("flip_comma"), "Flip comma", target, |builder| {
         let parent = comma.parent().unwrap();
         let mut editor = builder.make_editor(&parent);
 
@@ -55,7 +55,7 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
             editor.replace(next.clone(), prev.clone());
         }
 
-        builder.add_file_edits(ctx.file_id(), editor);
+        builder.add_file_edits(ctx.vfs_file_id(), editor);
     })
 }
 
@@ -101,7 +101,7 @@ fn flip_tree(tree: ast::TokenTree, comma: SyntaxToken) -> (ast::TokenTree, Synta
     ]
     .concat();
 
-    let make = SyntaxFactory::new();
+    let make = SyntaxFactory::with_mappings();
     let new_token_tree = make.token_tree(tree.left_delimiter_token().unwrap().kind(), result);
     (new_token_tree, make.finish_with_mappings())
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_or_pattern.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_or_pattern.rs
index d9fa03e7191b3..4829f5bec206b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_or_pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_or_pattern.rs
@@ -1,10 +1,10 @@
 use syntax::{
+    Direction, T,
     algo::non_trivia_sibling,
     ast::{self, AstNode},
-    Direction, T,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: flip_or_pattern
 //
@@ -31,17 +31,12 @@ pub(crate) fn flip_or_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
     let after = non_trivia_sibling(pipe.clone().into(), Direction::Next)?.into_node()?;
 
     let target = pipe.text_range();
-    acc.add(
-        AssistId("flip_or_pattern", AssistKind::RefactorRewrite),
-        "Flip patterns",
-        target,
-        |builder| {
-            let mut editor = builder.make_editor(parent.syntax());
-            editor.replace(before.clone(), after.clone());
-            editor.replace(after, before);
-            builder.add_file_edits(ctx.file_id(), editor);
-        },
-    )
+    acc.add(AssistId::refactor_rewrite("flip_or_pattern"), "Flip patterns", target, |builder| {
+        let mut editor = builder.make_editor(parent.syntax());
+        editor.replace(before.clone(), after.clone());
+        editor.replace(after, before);
+        builder.add_file_edits(ctx.vfs_file_id(), editor);
+    })
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs
index 3528f5e81324d..9756268c7cc33 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs
@@ -1,10 +1,10 @@
 use syntax::{
+    Direction, T,
     algo::non_trivia_sibling,
     ast::{self, AstNode},
-    Direction, T,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: flip_trait_bound
 //
@@ -29,14 +29,14 @@ pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
 
     let target = plus.text_range();
     acc.add(
-        AssistId("flip_trait_bound", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("flip_trait_bound"),
         "Flip trait bounds",
         target,
         |builder| {
             let mut editor = builder.make_editor(parent.syntax());
             editor.replace(before.clone(), after.clone());
             editor.replace(after, before);
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
index 7f7db07152d34..fce0ce399463c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
@@ -1,14 +1,13 @@
 use crate::assist_context::{AssistContext, Assists};
-use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
+use hir::{HasVisibility, HirDisplay, Module};
 use ide_db::{
-    assists::{AssistId, AssistKind},
-    base_db::Upcast,
-    defs::{Definition, NameRefClass},
     FileId,
+    assists::AssistId,
+    defs::{Definition, NameRefClass},
 };
 use syntax::{
-    ast::{self, edit::IndentLevel, NameRef},
     AstNode, Direction, SyntaxKind, TextSize,
+    ast::{self, NameRef, edit::IndentLevel},
 };
 
 // Assist: generate_constant
@@ -88,17 +87,12 @@ pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
         );
 
     let text = get_text_for_generate_constant(not_exist_name_ref, indent, outer_exists, type_name)?;
-    acc.add(
-        AssistId("generate_constant", AssistKind::QuickFix),
-        "Generate constant",
-        target,
-        |builder| {
-            if let Some(file_id) = file_id {
-                builder.edit_file(file_id);
-            }
-            builder.insert(offset, format!("{text}{post_string}"));
-        },
-    )
+    acc.add(AssistId::quick_fix("generate_constant"), "Generate constant", target, |builder| {
+        if let Some(file_id) = file_id {
+            builder.edit_file(file_id);
+        }
+        builder.insert(offset, format!("{text}{post_string}"));
+    })
 }
 
 fn get_text_for_generate_constant(
@@ -128,7 +122,7 @@ fn target_data_for_generate_constant(
         return None;
     }
     let in_file_source = current_module.definition_source(ctx.sema.db);
-    let file_id = in_file_source.file_id.original_file(ctx.sema.db.upcast());
+    let file_id = in_file_source.file_id.original_file(ctx.sema.db);
     match in_file_source.value {
         hir::ModuleSource::Module(module_node) => {
             let indent = IndentLevel::from_node(module_node.syntax());
@@ -140,9 +134,9 @@ fn target_data_for_generate_constant(
                 .any(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n'));
             let post_string =
                 if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") };
-            Some((offset, indent + 1, Some(file_id.file_id()), post_string))
+            Some((offset, indent + 1, Some(file_id.file_id(ctx.db())), post_string))
         }
-        _ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id()), "\n".into())),
+        _ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id(ctx.db())), "\n".into())),
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
index a6e3d49e0d1ae..6198dbc4ed99b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
@@ -1,7 +1,7 @@
-use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use ide_db::{RootDatabase, famous_defs::FamousDefs};
 use syntax::ast::{self, AstNode, HasName};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: generate_default_from_enum_variant
 //
@@ -47,7 +47,7 @@ pub(crate) fn generate_default_from_enum_variant(
 
     let target = variant.syntax().text_range();
     acc.add(
-        AssistId("generate_default_from_enum_variant", AssistKind::Generate),
+        AssistId::generate("generate_default_from_enum_variant"),
         "Generate `Default` impl from this enum variant",
         target,
         |edit| {
@@ -77,11 +77,7 @@ fn existing_default_impl(
     let default_trait = FamousDefs(sema, krate).core_default_Default()?;
     let enum_type = enum_.ty(sema.db);
 
-    if enum_type.impls_trait(sema.db, default_trait, &[]) {
-        Some(())
-    } else {
-        None
-    }
+    if enum_type.impls_trait(sema.db, default_trait, &[]) { Some(()) } else { None }
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
index dc27af5cbed20..79a78ab3698b8 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
@@ -1,13 +1,13 @@
 use ide_db::famous_defs::FamousDefs;
 use stdx::format_to;
 use syntax::{
-    ast::{self, make, HasGenericParams, HasName, Impl},
     AstNode,
+    ast::{self, HasGenericParams, HasName, Impl, make},
 };
 
 use crate::{
-    assist_context::{AssistContext, Assists},
     AssistId,
+    assist_context::{AssistContext, Assists},
 };
 
 // Assist: generate_default_from_new
@@ -65,7 +65,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
     let insert_location = impl_.syntax().text_range();
 
     acc.add(
-        AssistId("generate_default_from_new", crate::AssistKind::Generate),
+        AssistId::generate("generate_default_from_new"),
         "Generate a Default impl from a new fn",
         insert_location,
         move |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index 220259451e860..ca66cb69dcc05 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,15 +1,15 @@
 use hir::{HasCrate, HasVisibility};
-use ide_db::{path_transform::PathTransform, FxHashSet};
+use ide_db::{FxHashSet, path_transform::PathTransform};
 use syntax::{
     ast::{
-        self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
+        self, AstNode, HasGenericParams, HasName, HasVisibility as _, edit_in_place::Indent, make,
     },
     ted,
 };
 
 use crate::{
-    utils::{convert_param_list_to_arg_list, find_struct_impl},
     AssistContext, AssistId, AssistKind, Assists, GroupLabel,
+    utils::{convert_param_list_to_arg_list, find_struct_impl},
 };
 
 // Assist: generate_delegate_methods
@@ -92,19 +92,18 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
         });
     }
     methods.sort_by(|(a, _), (b, _)| a.cmp(b));
-    for (name, method) in methods {
+    for (index, (name, method)) in methods.into_iter().enumerate() {
         let adt = ast::Adt::Struct(strukt.clone());
         let name = name.display(ctx.db(), current_edition).to_string();
         // if `find_struct_impl` returns None, that means that a function named `name` already exists.
         let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else {
             continue;
         };
-
         let field = make::ext::field_from_idents(["self", &field_name])?;
 
         acc.add_group(
             &GroupLabel("Generate delegate methods…".to_owned()),
-            AssistId("generate_delegate_methods", AssistKind::Generate),
+            AssistId("generate_delegate_methods", AssistKind::Generate, Some(index)),
             format!("Generate delegate for `{field_name}.{name}()`",),
             target,
             |edit| {
@@ -141,7 +140,8 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
                     .map(convert_param_list_to_arg_list)
                     .unwrap_or_else(|| make::arg_list([]));
 
-                let tail_expr = make::expr_method_call(field, make::name_ref(&name), arg_list);
+                let tail_expr =
+                    make::expr_method_call(field, make::name_ref(&name), arg_list).into();
                 let tail_expr_finished =
                     if is_async { make::expr_await(tail_expr) } else { tail_expr };
                 let body = make::block_expr([], Some(tail_expr_finished));
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs
index 55b860d0ff545..848c63810a4b0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -5,25 +5,25 @@ use crate::{
     utils::convert_param_list_to_arg_list,
 };
 use either::Either;
-use hir::{db::HirDatabase, HasVisibility};
+use hir::{HasVisibility, db::HirDatabase};
 use ide_db::{
+    FxHashMap, FxHashSet,
     assists::{AssistId, GroupLabel},
     path_transform::PathTransform,
     syntax_helpers::suggest_name,
-    FxHashMap, FxHashSet,
 };
 use itertools::Itertools;
 use syntax::{
+    AstNode, Edition, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr,
     ast::{
-        self,
-        edit::{self, AstNodeEdit},
-        edit_in_place::AttrsOwnerEdit,
-        make, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericArgs,
+        self, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericArgs,
         HasGenericParams, HasName, HasTypeBounds, HasVisibility as astHasVisibility, Path,
         WherePred,
+        edit::{self, AstNodeEdit},
+        edit_in_place::AttrsOwnerEdit,
+        make,
     },
     ted::{self, Position},
-    AstNode, Edition, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr,
 };
 
 // Assist: generate_delegate_trait
@@ -124,7 +124,7 @@ impl Field {
     ) -> Option<Field> {
         let db = ctx.sema.db;
 
-        let module = ctx.sema.file_to_module_def(ctx.file_id())?;
+        let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
         let edition = module.krate().edition(ctx.db());
 
         let (name, range, ty) = match f {
@@ -201,7 +201,7 @@ impl Struct {
     pub(crate) fn delegate(&self, field: Field, acc: &mut Assists, ctx: &AssistContext<'_>) {
         let db = ctx.db();
 
-        for delegee in &field.impls {
+        for (index, delegee) in field.impls.iter().enumerate() {
             let trait_ = match delegee {
                 Delegee::Bound(b) => b,
                 Delegee::Impls(i, _) => i,
@@ -229,7 +229,11 @@ impl Struct {
 
             acc.add_group(
                 &GroupLabel(format!("Generate delegate trait impls for field `{}`", field.name)),
-                AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate),
+                AssistId(
+                    "generate_delegate_trait",
+                    ide_db::assists::AssistKind::Generate,
+                    Some(index),
+                ),
                 format!("Generate delegate trait impl `{}` for `{}`", signature, field.name),
                 field.range,
                 |builder| {
@@ -747,7 +751,7 @@ fn func_assoc_item(
     }
     .clone_for_update();
 
-    let body = make::block_expr(vec![], Some(call)).clone_for_update();
+    let body = make::block_expr(vec![], Some(call.into())).clone_for_update();
     let func = make::fn_(
         item.visibility(),
         item.name()?,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
index e558bb6da89bc..c7b97dcd231d1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
@@ -1,16 +1,16 @@
 use std::fmt::Display;
 
 use hir::{ModPath, ModuleDef};
-use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use ide_db::{RootDatabase, famous_defs::FamousDefs};
 use syntax::{
-    ast::{self, HasName},
     AstNode, Edition, SyntaxNode,
+    ast::{self, HasName},
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists, SourceChangeBuilder},
     utils::generate_trait_impl_text,
-    AssistId, AssistKind,
 };
 
 // Assist: generate_deref
@@ -65,7 +65,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
     let field_name = field.name()?;
     let target = field.syntax().text_range();
     acc.add(
-        AssistId("generate_deref", AssistKind::Generate),
+        AssistId::generate("generate_deref"),
         format!("Generate `{deref_type_to_generate:?}` impl using `{field_name}`"),
         target,
         |edit| {
@@ -106,7 +106,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
     let field_type = field.ty()?;
     let target = field.syntax().text_range();
     acc.add(
-        AssistId("generate_deref", AssistKind::Generate),
+        AssistId::generate("generate_deref"),
         format!("Generate `{deref_type_to_generate:?}` impl using `{field}`"),
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
index 53ba144ba9e3b..73a69c82fbcdd 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
@@ -1,9 +1,9 @@
 use syntax::{
-    ast::{self, edit_in_place::AttrsOwnerEdit, make, AstNode, HasAttrs},
     T,
+    ast::{self, AstNode, HasAttrs, edit_in_place::AttrsOwnerEdit, make},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: generate_derive
 //
@@ -39,7 +39,7 @@ pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
         Some(tt) => Some(tt.right_delimiter_token()?),
     };
 
-    acc.add(AssistId("generate_derive", AssistKind::Generate), "Add `#[derive]`", target, |edit| {
+    acc.add(AssistId::generate("generate_derive"), "Add `#[derive]`", target, |edit| {
         match derive_attr {
             None => {
                 let derive = make::attr_outer(make::meta_token_tree(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
index 862be791d1737..d4d1b3490cb64 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs
@@ -1,11 +1,12 @@
 use hir::{AsAssocItem, HasVisibility, ModuleDef, Visibility};
-use ide_db::assists::{AssistId, AssistKind};
+use ide_db::assists::AssistId;
 use itertools::Itertools;
 use stdx::{format_to, to_lower_snake_case};
 use syntax::{
+    AstNode, AstToken, Edition,
     algo::skip_whitespace_token,
-    ast::{self, edit::IndentLevel, HasDocComments, HasGenericArgs, HasName},
-    match_ast, AstNode, AstToken, Edition,
+    ast::{self, HasDocComments, HasGenericArgs, HasName, edit::IndentLevel},
+    match_ast,
 };
 
 use crate::assist_context::{AssistContext, Assists};
@@ -55,7 +56,7 @@ pub(crate) fn generate_documentation_template(
     let indent_level = IndentLevel::from_node(parent_syntax);
 
     acc.add(
-        AssistId("generate_documentation_template", AssistKind::Generate),
+        AssistId::generate("generate_documentation_template"),
         "Generate a documentation template",
         text_range,
         |builder| {
@@ -114,7 +115,7 @@ pub(crate) fn generate_doc_example(acc: &mut Assists, ctx: &AssistContext<'_>) -
     let indent_level = IndentLevel::from_node(&node);
 
     acc.add(
-        AssistId("generate_doc_example", AssistKind::Generate),
+        AssistId::generate("generate_doc_example"),
         "Generate a documentation example",
         node.text_range(),
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
index b5d3ed4369708..3e6d0bec68a6f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs
@@ -4,8 +4,8 @@ use syntax::ast::HasVisibility;
 use syntax::ast::{self, AstNode, HasName};
 
 use crate::{
+    AssistContext, AssistId, Assists,
     utils::{add_method_to_adt, find_struct_impl},
-    AssistContext, AssistId, AssistKind, Assists,
 };
 
 // Assist: generate_enum_is_method
@@ -57,7 +57,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>
     let target = variant.syntax().text_range();
     acc.add_group(
         &GroupLabel("Generate an `is_`,`as_`, or `try_into_` for this enum variant".to_owned()),
-        AssistId("generate_enum_is_method", AssistKind::Generate),
+        AssistId::generate("generate_enum_is_method"),
         "Generate an `is_` method for this enum variant",
         target,
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
index ee643ce9a4ac3..3974bcf618756 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
@@ -5,8 +5,8 @@ use syntax::ast::HasVisibility;
 use syntax::ast::{self, AstNode, HasName};
 
 use crate::{
+    AssistContext, AssistId, Assists,
     utils::{add_method_to_adt, find_struct_impl},
-    AssistContext, AssistId, AssistKind, Assists,
 };
 
 // Assist: generate_enum_try_into_method
@@ -153,7 +153,7 @@ fn generate_enum_projection_method(
     let target = variant.syntax().text_range();
     acc.add_group(
         &GroupLabel("Generate an `is_`,`as_`, or `try_into_` for this enum variant".to_owned()),
-        AssistId(assist_id, AssistKind::Generate),
+        AssistId::generate(assist_id),
         assist_description,
         target,
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
index bb08cb904ead7..3514ebb811ee2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
@@ -1,8 +1,9 @@
 use hir::{HasSource, HirDisplay, InRealFile};
-use ide_db::assists::{AssistId, AssistKind};
+use ide_db::assists::AssistId;
 use syntax::{
-    ast::{self, syntax_factory::SyntaxFactory, HasArgList},
-    match_ast, AstNode, SyntaxNode,
+    AstNode, SyntaxNode,
+    ast::{self, HasArgList, syntax_factory::SyntaxFactory},
+    match_ast,
 };
 
 use crate::assist_context::{AssistContext, Assists};
@@ -57,21 +58,16 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>)
     let db = ctx.db();
     let InRealFile { file_id, value: enum_node } = e.source(db)?.original_ast_node_rooted(db)?;
 
-    acc.add(
-        AssistId("generate_enum_variant", AssistKind::Generate),
-        "Generate variant",
-        target,
-        |builder| {
-            let mut editor = builder.make_editor(enum_node.syntax());
-            let make = SyntaxFactory::new();
-            let field_list = parent.make_field_list(ctx, &make);
-            let variant = make.variant(None, make.name(&name_ref.text()), field_list, None);
-            if let Some(it) = enum_node.variant_list() {
-                it.add_variant(&mut editor, &variant);
-            }
-            builder.add_file_edits(file_id, editor);
-        },
-    )
+    acc.add(AssistId::generate("generate_enum_variant"), "Generate variant", target, |builder| {
+        let mut editor = builder.make_editor(enum_node.syntax());
+        let make = SyntaxFactory::with_mappings();
+        let field_list = parent.make_field_list(ctx, &make);
+        let variant = make.variant(None, make.name(&name_ref.text()), field_list, None);
+        if let Some(it) = enum_node.variant_list() {
+            it.add_variant(&mut editor, &variant);
+        }
+        builder.add_file_edits(file_id.file_id(ctx.db()), editor);
+    })
 }
 
 #[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs
index 9d01ec00f836c..b63baa696d9ae 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs
@@ -1,8 +1,9 @@
 use either::Either;
-use ide_db::assists::{AssistId, AssistKind, GroupLabel};
+use ide_db::assists::{AssistId, GroupLabel};
 use syntax::{
-    ast::{self, edit::IndentLevel, make, HasGenericParams, HasName},
-    syntax_editor, AstNode,
+    AstNode,
+    ast::{self, HasGenericParams, HasName, edit::IndentLevel, make},
+    syntax_editor,
 };
 
 use crate::{AssistContext, Assists};
@@ -116,7 +117,7 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>)
                     }
                 }
 
-                builder.add_file_edits(ctx.file_id(), edit);
+                builder.add_file_edits(ctx.vfs_file_id(), edit);
             },
         );
     }
@@ -138,7 +139,7 @@ impl ParamStyle {
             ParamStyle::Unnamed => "generate_fn_type_alias_unnamed",
         };
 
-        AssistId(s, AssistKind::Generate)
+        AssistId::generate(s)
     }
 
     fn label(&self) -> &'static str {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
index 6091f06b96699..af949a0649899 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
@@ -1,9 +1,7 @@
-use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use ide_db::{RootDatabase, famous_defs::FamousDefs};
 use syntax::ast::{self, AstNode, HasName};
 
-use crate::{
-    utils::generate_trait_impl_text_intransitive, AssistContext, AssistId, AssistKind, Assists,
-};
+use crate::{AssistContext, AssistId, Assists, utils::generate_trait_impl_text_intransitive};
 
 // Assist: generate_from_impl_for_enum
 //
@@ -53,7 +51,7 @@ pub(crate) fn generate_from_impl_for_enum(
 
     let target = variant.syntax().text_range();
     acc.add(
-        AssistId("generate_from_impl_for_enum", AssistKind::Generate),
+        AssistId::generate("generate_from_impl_for_enum"),
         "Generate `From` impl for this enum variant",
         target,
         |edit| {
@@ -92,11 +90,7 @@ fn existing_from_impl(
 
     let wrapped_type = variant.fields(sema.db).first()?.ty(sema.db);
 
-    if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) {
-        Some(())
-    } else {
-        None
-    }
+    if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { Some(()) } else { None }
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
index 7af2a2e1e6a33..824380253ae50 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -1,28 +1,29 @@
 use hir::{
-    Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics,
-    StructKind, Type, TypeInfo,
+    Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, StructKind, Type,
+    TypeInfo,
 };
 use ide_db::{
+    FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
     defs::{Definition, NameRefClass},
     famous_defs::FamousDefs,
     helpers::is_editable_crate,
     path_transform::PathTransform,
     source_change::SourceChangeBuilder,
-    FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
 };
 use itertools::Itertools;
 use stdx::to_lower_snake_case;
 use syntax::{
+    Edition, SyntaxKind, SyntaxNode, T, TextRange,
     ast::{
-        self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, BlockExpr, CallExpr,
-        HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds,
+        self, AstNode, BlockExpr, CallExpr, HasArgList, HasGenericParams, HasModuleItem,
+        HasTypeBounds, edit::IndentLevel, edit_in_place::Indent, make,
     },
-    ted, Edition, SyntaxKind, SyntaxNode, TextRange, T,
+    ted,
 };
 
 use crate::{
+    AssistContext, AssistId, Assists,
     utils::{convert_reference_type, find_struct_impl},
-    AssistContext, AssistId, AssistKind, Assists,
 };
 
 // Assist: generate_function
@@ -171,16 +172,15 @@ fn add_func_to_accumulator(
     adt_info: Option<AdtInfo>,
     label: String,
 ) -> Option<()> {
-    acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |edit| {
+    acc.add(AssistId::generate("generate_function"), label, text_range, |edit| {
         edit.edit_file(file);
 
         let target = function_builder.target.clone();
         let edition = function_builder.target_edition;
         let func = function_builder.render(ctx.config.snippet_cap, edit);
 
-        if let Some(adt) =
-            adt_info
-                .and_then(|adt_info| if adt_info.impl_exists { None } else { Some(adt_info.adt) })
+        if let Some(adt) = adt_info
+            .and_then(|adt_info| if adt_info.impl_exists { None } else { Some(adt_info.adt) })
         {
             let name = make::ty_path(make::ext::ident_path(&format!(
                 "{}",
@@ -205,11 +205,12 @@ fn get_adt_source(
     fn_name: &str,
 ) -> Option<(Option<ast::Impl>, FileId)> {
     let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db);
+
     let file = ctx.sema.parse(range.file_id);
     let adt_source =
         ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
     find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()])
-        .map(|impl_| (impl_, range.file_id.file_id()))
+        .map(|impl_| (impl_, range.file_id.file_id(ctx.db())))
 }
 
 struct FunctionBuilder {
@@ -470,7 +471,7 @@ fn make_fn_body_as_new_function(
                     .map(|_| placeholder_expr.clone())
                     .collect::<Vec<_>>();
 
-                make::expr_call(make::expr_path(path_self), make::arg_list(args))
+                make::expr_call(make::expr_path(path_self), make::arg_list(args)).into()
             }
             StructKind::Unit => make::expr_path(path_self),
         }
@@ -496,7 +497,7 @@ fn get_fn_target(
     target_module: Option<Module>,
     call: CallExpr,
 ) -> Option<(GeneratedFunctionTarget, FileId)> {
-    let mut file = ctx.file_id().into();
+    let mut file = ctx.vfs_file_id();
     let target = match target_module {
         Some(target_module) => {
             let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module);
@@ -1161,7 +1162,7 @@ fn next_space_for_fn_in_module(
     target_module: hir::Module,
 ) -> (FileId, GeneratedFunctionTarget) {
     let module_source = target_module.definition_source(db);
-    let file = module_source.file_id.original_file(db.upcast());
+    let file = module_source.file_id.original_file(db);
     let assist_item = match &module_source.value {
         hir::ModuleSource::SourceFile(it) => match it.items().last() {
             Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()),
@@ -1186,7 +1187,7 @@ fn next_space_for_fn_in_module(
         }
     };
 
-    (file.file_id(), assist_item)
+    (file.file_id(db), assist_item)
 }
 
 #[derive(Clone, Copy)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
index 1b16ba5fc8ff3..c7e5e41aac4ce 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
@@ -1,13 +1,14 @@
 use ide_db::{famous_defs::FamousDefs, source_change::SourceChangeBuilder};
 use stdx::{format_to, to_lower_snake_case};
 use syntax::{
-    ast::{self, edit_in_place::Indent, make, AstNode, HasName, HasVisibility},
-    ted, TextRange,
+    TextRange,
+    ast::{self, AstNode, HasName, HasVisibility, edit_in_place::Indent, make},
+    ted,
 };
 
 use crate::{
+    AssistContext, AssistId, Assists, GroupLabel,
     utils::{convert_reference_type, find_struct_impl, generate_impl},
-    AssistContext, AssistId, AssistKind, Assists, GroupLabel,
 };
 
 // Assist: generate_setter
@@ -62,7 +63,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
 
     acc.add_group(
         &GroupLabel("Generate getter/setter".to_owned()),
-        AssistId("generate_setter", AssistKind::Generate),
+        AssistId::generate("generate_setter"),
         "Generate a setter method",
         target,
         |builder| build_source_change(builder, ctx, info_of_record_fields, setter_info),
@@ -203,7 +204,7 @@ pub(crate) fn generate_getter_impl(
 
     acc.add_group(
         &GroupLabel("Generate getter/setter".to_owned()),
-        AssistId(id, AssistKind::Generate),
+        AssistId::generate(id),
         label,
         target,
         |builder| build_source_change(builder, ctx, info_of_record_fields, getter_info),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
index 4439830947ade..2862e6d5afba3 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs
@@ -1,9 +1,9 @@
 use syntax::{
-    ast::{self, edit_in_place::Indent, make, AstNode, HasName},
+    ast::{self, AstNode, HasName, edit_in_place::Indent, make},
     ted,
 };
 
-use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, utils};
 
 fn insert_impl(impl_: ast::Impl, nominal: &ast::Adt) {
     let indent = nominal.indent_level();
@@ -44,7 +44,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
     }
 
     acc.add(
-        AssistId("generate_impl", AssistKind::Generate),
+        AssistId::generate("generate_impl"),
         format!("Generate impl for `{name}`"),
         target,
         |edit| {
@@ -90,7 +90,7 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
     }
 
     acc.add(
-        AssistId("generate_trait_impl", AssistKind::Generate),
+        AssistId::generate("generate_trait_impl"),
         format!("Generate trait impl for `{name}`"),
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
index ad422b25c39e0..af9c493b48044 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
@@ -1,12 +1,12 @@
-use hir::{sym, HasSource, Name};
+use hir::{HasSource, Name, sym};
 use syntax::{
-    ast::{self, HasName},
     AstNode,
+    ast::{self, HasName},
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: generate_is_empty_from_len
@@ -54,13 +54,13 @@ pub(crate) fn generate_is_empty_from_len(acc: &mut Assists, ctx: &AssistContext<
     }
 
     let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
-    let len_fn = get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::len.clone()))?;
+    let len_fn = get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::len))?;
     if !len_fn.ret_type(ctx.sema.db).is_usize() {
         cov_mark::hit!(len_fn_different_return_type);
         return None;
     }
 
-    if get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::is_empty.clone())).is_some() {
+    if get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::is_empty)).is_some() {
         cov_mark::hit!(is_empty_already_implemented);
         return None;
     }
@@ -69,7 +69,7 @@ pub(crate) fn generate_is_empty_from_len(acc: &mut Assists, ctx: &AssistContext<
     let range = node.syntax().value.text_range();
 
     acc.add(
-        AssistId("generate_is_empty_from_len", AssistKind::Generate),
+        AssistId::generate("generate_is_empty_from_len"),
         "Generate a is_empty impl from a len function",
         range,
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index 6aa561ad7f037..2ac960ed7e183 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -1,10 +1,11 @@
 use ide_db::famous_defs::FamousDefs;
 use syntax::{
+    AstNode,
     ast::{self, make},
-    ted, AstNode,
+    ted,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredictable case [#15581].
 // Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need.
@@ -101,7 +102,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
 
     let target = impl_def.syntax().text_range();
     acc.add(
-        AssistId("generate_mut_trait_impl", AssistKind::Generate),
+        AssistId::generate("generate_mut_trait_impl"),
         "Generate `IndexMut` impl from this `Index` trait",
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
index 70d14d6b95d85..f963f48d62ab6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
@@ -2,13 +2,13 @@ use ide_db::{
     imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
 };
 use syntax::{
-    ast::{self, edit_in_place::Indent, make, AstNode, HasName, HasVisibility, StructKind},
+    ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make},
     ted,
 };
 
 use crate::{
+    AssistContext, AssistId, Assists,
     utils::{find_struct_impl, generate_impl},
-    AssistContext, AssistId, AssistKind, Assists,
 };
 
 // Assist: generate_new
@@ -48,7 +48,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
     let current_module = ctx.sema.scope(strukt.syntax())?.module();
 
     let target = strukt.syntax().text_range();
-    acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| {
+    acc.add(AssistId::generate("generate_new"), "Generate `new`", target, |builder| {
         let trivial_constructors = field_list
             .fields()
             .map(|f| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
index 5f7350bc2812b..154b502e1bf97 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
@@ -1,13 +1,13 @@
 use crate::assist_context::{AssistContext, Assists};
 use ide_db::assists::AssistId;
 use syntax::{
+    AstNode, SyntaxKind, T,
     ast::{
-        self,
+        self, HasGenericParams, HasName,
         edit_in_place::{HasVisibilityEdit, Indent},
-        make, HasGenericParams, HasName,
+        make,
     },
     ted::{self, Position},
-    AstNode, SyntaxKind, T,
 };
 
 // NOTES :
@@ -95,7 +95,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
     let impl_name = impl_ast.self_ty()?;
 
     acc.add(
-        AssistId("generate_trait_from_impl", ide_db::assists::AssistKind::Generate),
+        AssistId::generate("generate_trait_from_impl"),
         "Generate trait from impl",
         impl_ast.syntax().text_range(),
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
index 9e09f198feb4a..6f028e58d0cdd 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
@@ -3,30 +3,32 @@ use std::collections::BTreeSet;
 use ast::make;
 use either::Either;
 use hir::{
+    FileRange, PathResolution, Semantics, TypeInfo,
     db::{ExpandDatabase, HirDatabase},
-    sym, FileRange, PathResolution, Semantics, TypeInfo,
+    sym,
 };
 use ide_db::{
-    base_db::CrateId,
+    EditionedFileId, RootDatabase,
+    base_db::Crate,
     defs::Definition,
     imports::insert_use::remove_path_if_in_use_stmt,
     path_transform::PathTransform,
     search::{FileReference, FileReferenceNode, SearchScope},
     source_change::SourceChangeBuilder,
     syntax_helpers::{node_ext::expr_as_name_ref, prettify_macro_expansion},
-    EditionedFileId, RootDatabase,
 };
-use itertools::{izip, Itertools};
+use itertools::{Itertools, izip};
 use syntax::{
+    AstNode, NodeOrToken, SyntaxKind,
     ast::{
-        self, edit::IndentLevel, edit_in_place::Indent, HasArgList, HasGenericArgs, Pat, PathExpr,
+        self, HasArgList, HasGenericArgs, Pat, PathExpr, edit::IndentLevel, edit_in_place::Indent,
     },
-    ted, AstNode, NodeOrToken, SyntaxKind,
+    ted,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: inline_into_callers
@@ -69,6 +71,7 @@ use crate::{
 // ```
 pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let def_file = ctx.file_id();
+    let vfs_def_file = ctx.vfs_file_id();
     let name = ctx.find_node_at_offset::<ast::Name>()?;
     let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?;
     let func_body = ast_func.body()?;
@@ -96,7 +99,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
     }
 
     acc.add(
-        AssistId("inline_into_callers", AssistKind::RefactorInline),
+        AssistId::refactor_inline("inline_into_callers"),
         "Inline into all callers",
         name.syntax().text_range(),
         |builder| {
@@ -104,7 +107,8 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
             let current_file_usage = usages.references.remove(&def_file);
 
             let mut remove_def = true;
-            let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
+            let mut inline_refs_for_file = |file_id: EditionedFileId, refs: Vec<FileReference>| {
+                let file_id = file_id.file_id(ctx.db());
                 builder.edit_file(file_id);
                 let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate());
                 let count = refs.len();
@@ -141,7 +145,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
             }
             match current_file_usage {
                 Some(refs) => inline_refs_for_file(def_file, refs),
-                None => builder.edit_file(def_file),
+                None => builder.edit_file(vfs_def_file),
             }
             if remove_def {
                 builder.delete(ast_func.syntax().text_range());
@@ -192,7 +196,7 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
     let name_ref: ast::NameRef = ctx.find_node_at_offset()?;
     let call_info = CallInfo::from_name_ref(
         name_ref.clone(),
-        ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(),
+        ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into(),
     )?;
     let (function, label) = match &call_info.node {
         ast::CallableExpr::Call(call) => {
@@ -230,32 +234,27 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
     }
 
     let syntax = call_info.node.syntax().clone();
-    acc.add(
-        AssistId("inline_call", AssistKind::RefactorInline),
-        label,
-        syntax.text_range(),
-        |builder| {
-            let replacement = inline(&ctx.sema, file_id, function, &fn_body, &params, &call_info);
-            builder.replace_ast(
-                match call_info.node {
-                    ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it),
-                    ast::CallableExpr::MethodCall(it) => ast::Expr::MethodCallExpr(it),
-                },
-                replacement,
-            );
-        },
-    )
+    acc.add(AssistId::refactor_inline("inline_call"), label, syntax.text_range(), |builder| {
+        let replacement = inline(&ctx.sema, file_id, function, &fn_body, &params, &call_info);
+        builder.replace_ast(
+            match call_info.node {
+                ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it),
+                ast::CallableExpr::MethodCall(it) => ast::Expr::MethodCallExpr(it),
+            },
+            replacement,
+        );
+    })
 }
 
 struct CallInfo {
     node: ast::CallableExpr,
     arguments: Vec<ast::Expr>,
     generic_arg_list: Option<ast::GenericArgList>,
-    krate: CrateId,
+    krate: Crate,
 }
 
 impl CallInfo {
-    fn from_name_ref(name_ref: ast::NameRef, krate: CrateId) -> Option<CallInfo> {
+    fn from_name_ref(name_ref: ast::NameRef, krate: Crate) -> Option<CallInfo> {
         let parent = name_ref.syntax().parent()?;
         if let Some(call) = ast::MethodCallExpr::cast(parent.clone()) {
             let receiver = call.receiver()?;
@@ -452,7 +451,7 @@ fn inline(
 
             let ty = sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty);
 
-            let is_self = param.name(sema.db).is_some_and(|name| name == sym::self_.clone());
+            let is_self = param.name(sema.db).is_some_and(|name| name == sym::self_);
 
             if is_self {
                 let mut this_pat = make::ident_pat(false, false, make::name("this"));
@@ -515,7 +514,7 @@ fn inline(
                     && usage.syntax().parent().and_then(ast::Expr::cast).is_some() =>
             {
                 cov_mark::hit!(inline_call_inline_closure);
-                let expr = make::expr_paren(expr.clone());
+                let expr = make::expr_paren(expr.clone()).into();
                 inline_direct(usage, &expr);
             }
             // inline single use literals
@@ -570,7 +569,7 @@ fn inline(
     let no_stmts = body.statements().next().is_none();
     match body.tail_expr() {
         Some(expr) if matches!(expr, ast::Expr::ClosureExpr(_)) && no_stmts => {
-            make::expr_paren(expr).clone_for_update()
+            make::expr_paren(expr).clone_for_update().into()
         }
         Some(expr) if !is_async_fn && no_stmts => expr,
         _ => match node
@@ -580,7 +579,7 @@ fn inline(
             .and_then(|bin_expr| bin_expr.lhs())
         {
             Some(lhs) if lhs.syntax() == node.syntax() => {
-                make::expr_paren(ast::Expr::BlockExpr(body)).clone_for_update()
+                make::expr_paren(ast::Expr::BlockExpr(body)).clone_for_update().into()
             }
             _ => ast::Expr::BlockExpr(body),
         },
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs
index ca5882d0313ac..e5ed04fdc7c9e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs
@@ -1,7 +1,7 @@
 use hir::HasCrate;
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: inline_const_as_literal
 //
@@ -44,7 +44,7 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>
             .ok()?
             .render(ctx.sema.db, konst.krate(ctx.sema.db).to_display_target(ctx.sema.db));
 
-        let id = AssistId("inline_const_as_literal", AssistKind::RefactorInline);
+        let id = AssistId::refactor_inline("inline_const_as_literal");
 
         let label = "Inline const as literal".to_owned();
         let target = variable.syntax().text_range();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
index 36eed290dc88d..5d4bdc6ec76cd 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
@@ -1,17 +1,17 @@
 use hir::{PathResolution, Semantics};
 use ide_db::{
+    EditionedFileId, RootDatabase,
     defs::Definition,
     search::{FileReference, FileReferenceNode, UsageSearchResult},
-    EditionedFileId, RootDatabase,
 };
 use syntax::{
-    ast::{self, syntax_factory::SyntaxFactory, AstNode, AstToken, HasName},
     SyntaxElement, TextRange,
+    ast::{self, AstNode, AstToken, HasName, syntax_factory::SyntaxFactory},
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: inline_local_variable
@@ -74,7 +74,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
     };
 
     acc.add(
-        AssistId("inline_local_variable", AssistKind::RefactorInline),
+        AssistId::refactor_inline("inline_local_variable"),
         "Inline variable",
         target.text_range(),
         move |builder| {
@@ -91,7 +91,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
                 }
             }
 
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
 
             for (name, should_wrap) in wrap_in_parens {
                 let replacement = if should_wrap {
@@ -110,7 +110,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
             }
 
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
index cd6f900ba15da..b09bef36ae15d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
@@ -2,7 +2,7 @@ use hir::db::ExpandDatabase;
 use ide_db::syntax_helpers::prettify_macro_expansion;
 use syntax::ast::{self, AstNode};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: inline_macro
 //
@@ -38,16 +38,16 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
 pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
     let macro_call = ctx.sema.to_def(&unexpanded)?;
-    let target_crate_id = ctx.sema.file_to_module_def(ctx.file_id())?.krate().into();
+    let target_crate_id = ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into();
     let text_range = unexpanded.syntax().text_range();
 
     acc.add(
-        AssistId("inline_macro", AssistKind::RefactorInline),
+        AssistId::refactor_inline("inline_macro"),
         "Inline macro".to_owned(),
         text_range,
         |builder| {
-            let expanded = ctx.sema.parse_or_expand(macro_call.as_file());
-            let span_map = ctx.sema.db.expansion_span_map(macro_call.as_macro_file());
+            let expanded = ctx.sema.parse_or_expand(macro_call.into());
+            let span_map = ctx.sema.db.expansion_span_map(macro_call);
             // Don't call `prettify_macro_expansion()` outside the actual assist action; it does some heavy rowan tree manipulation,
             // which can be very costly for big macros when it is done *even without the assist being invoked*.
             let expanded = prettify_macro_expansion(ctx.db(), expanded, &span_map, target_crate_id);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
index 76d465b011039..4511072b041b1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs
@@ -10,13 +10,14 @@ use ide_db::{
 };
 use itertools::Itertools;
 use syntax::{
-    ast::{self, make, HasGenericParams, HasName},
-    ted, AstNode, NodeOrToken, SyntaxNode,
+    AstNode, NodeOrToken, SyntaxNode,
+    ast::{self, HasGenericParams, HasName, make},
+    ted,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 use super::inline_call::split_refs_and_uses;
@@ -59,7 +60,7 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>)
     // until this is ok
 
     acc.add(
-        AssistId("inline_type_alias_uses", AssistKind::RefactorInline),
+        AssistId::refactor_inline("inline_type_alias_uses"),
         "Inline type alias into all uses",
         name.syntax().text_range(),
         |builder| {
@@ -86,17 +87,17 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>)
                     builder.replace(target, replacement);
                 }
 
-                if file_id == ctx.file_id() {
+                if file_id == ctx.vfs_file_id() {
                     builder.delete(ast_alias.syntax().text_range());
                     definition_deleted = true;
                 }
             };
 
             for (file_id, refs) in usages.into_iter() {
-                inline_refs_for_file(file_id.file_id(), refs);
+                inline_refs_for_file(file_id.file_id(ctx.db()), refs);
             }
             if !definition_deleted {
-                builder.edit_file(ctx.file_id());
+                builder.edit_file(ctx.vfs_file_id());
                 builder.delete(ast_alias.syntax().text_range());
             }
         },
@@ -148,7 +149,7 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
     let target = alias_instance.syntax().text_range();
 
     acc.add(
-        AssistId("inline_type_alias", AssistKind::RefactorInline),
+        AssistId::refactor_inline("inline_type_alias"),
         "Inline type alias",
         target,
         |builder| builder.replace(target, replacement.to_text(&concrete_type)),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs
index e405af5533d5e..47b273535a88f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs
@@ -1,9 +1,6 @@
 use hir::{AsAssocItem, HirDisplay};
-use ide_db::{
-    assists::{AssistId, AssistKind},
-    famous_defs::FamousDefs,
-};
-use syntax::{ast, AstNode};
+use ide_db::{assists::AssistId, famous_defs::FamousDefs};
+use syntax::{AstNode, ast};
 
 use crate::assist_context::{AssistContext, Assists};
 
@@ -60,7 +57,7 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
 
         let sc = adjusted_tc.display_source_code(db, scope.module().into(), true).ok()?;
         acc.add(
-            AssistId("into_to_qualified_from", AssistKind::Generate),
+            AssistId::generate("into_to_qualified_from"),
             "Convert `into` to fully qualified `from`",
             nameref.syntax().text_range(),
             |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
index 62909c586e3d4..264e3767a2324 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs
@@ -1,11 +1,11 @@
 use ide_db::FxHashSet;
 use syntax::{
-    ast::{self, edit_in_place::GenericParamsOwnerEdit, make, HasGenericParams},
-    ted::{self, Position},
     AstNode, TextRange,
+    ast::{self, HasGenericParams, edit_in_place::GenericParamsOwnerEdit, make},
+    ted::{self, Position},
 };
 
-use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder};
 
 static ASSIST_NAME: &str = "introduce_named_lifetime";
 static ASSIST_LABEL: &str = "Introduce named lifetime";
@@ -83,7 +83,7 @@ fn generate_fn_def_assist(
             _ => return None,
         }
     };
-    acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
+    acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| {
         let fn_def = builder.make_mut(fn_def);
         let lifetime = builder.make_mut(lifetime);
         let loc_needing_lifetime =
@@ -107,7 +107,7 @@ fn generate_impl_def_assist(
     lifetime: ast::Lifetime,
 ) -> Option<()> {
     let new_lifetime_param = generate_unique_lifetime_param_name(impl_def.generic_param_list())?;
-    acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
+    acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| {
         let impl_def = builder.make_mut(impl_def);
         let lifetime = builder.make_mut(lifetime);
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs
index 994e4a0eddaf6..db51070a6430b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs
@@ -1,8 +1,8 @@
 use ide_db::syntax_helpers::suggest_name;
 use itertools::Itertools;
-use syntax::ast::{self, syntax_factory::SyntaxFactory, AstNode, HasGenericParams, HasName};
+use syntax::ast::{self, AstNode, HasGenericParams, HasName, syntax_factory::SyntaxFactory};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: introduce_named_type_parameter
 //
@@ -24,10 +24,10 @@ pub(crate) fn introduce_named_type_parameter(
     let fn_ = param.syntax().ancestors().nth(2).and_then(ast::Fn::cast)?;
     let type_bound_list = impl_trait_type.type_bound_list()?;
 
-    let make = SyntaxFactory::new();
+    let make = SyntaxFactory::with_mappings();
     let target = fn_.syntax().text_range();
     acc.add(
-        AssistId("introduce_named_type_parameter", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("introduce_named_type_parameter"),
         "Replace impl trait with type parameter",
         target,
         |builder| {
@@ -59,7 +59,7 @@ pub(crate) fn introduce_named_type_parameter(
             }
 
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs
index ac710503d8a0d..d198870b023e6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs
@@ -1,13 +1,13 @@
 use ide_db::syntax_helpers::node_ext::is_pattern_cond;
 use syntax::{
-    ast::{self, AstNode},
     T,
+    ast::{self, AstNode},
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
     utils::invert_boolean_expression_legacy,
-    AssistId, AssistKind,
 };
 
 // Assist: invert_if
@@ -47,7 +47,7 @@ pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
         ast::ElseBranch::IfExpr(_) => return None,
     };
 
-    acc.add(AssistId("invert_if", AssistKind::RefactorRewrite), "Invert if", if_range, |edit| {
+    acc.add(AssistId::refactor_rewrite("invert_if"), "Invert if", if_range, |edit| {
         let flip_cond = invert_boolean_expression_legacy(cond.clone());
         edit.replace_ast(cond, flip_cond);
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
index 4171230836908..b7f7cb9cb01c5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
@@ -1,19 +1,20 @@
 use either::Either;
 use ide_db::imports::{
     insert_use::{ImportGranularity, InsertUseConfig},
-    merge_imports::{try_merge_imports, try_merge_trees, try_normalize_use_tree, MergeBehavior},
+    merge_imports::{MergeBehavior, try_merge_imports, try_merge_trees, try_normalize_use_tree},
 };
 use itertools::Itertools;
 use syntax::{
+    AstNode, SyntaxElement, SyntaxNode,
     algo::neighbor,
     ast::{self, edit_in_place::Removable},
-    match_ast, ted, AstNode, SyntaxElement, SyntaxNode,
+    match_ast, ted,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
     utils::next_prev,
-    AssistId, AssistKind,
 };
 
 use Edit::*;
@@ -68,55 +69,50 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
         (selection_range, edits?)
     };
 
-    acc.add(
-        AssistId("merge_imports", AssistKind::RefactorRewrite),
-        "Merge imports",
-        target,
-        |builder| {
-            let edits_mut: Vec<Edit> = edits
-                .into_iter()
-                .map(|it| match it {
-                    Remove(Either::Left(it)) => Remove(Either::Left(builder.make_mut(it))),
-                    Remove(Either::Right(it)) => Remove(Either::Right(builder.make_mut(it))),
-                    Replace(old, new) => Replace(builder.make_syntax_mut(old), new),
-                })
-                .collect();
-            for edit in edits_mut {
-                match edit {
-                    Remove(it) => it.as_ref().either(Removable::remove, Removable::remove),
-                    Replace(old, new) => {
-                        ted::replace(old, &new);
-
-                        // If there's a selection and we're replacing a use tree in a tree list,
-                        // normalize the parent use tree if it only contains the merged subtree.
-                        if !ctx.has_empty_selection() {
-                            let normalized_use_tree = ast::UseTree::cast(new)
-                                .as_ref()
-                                .and_then(ast::UseTree::parent_use_tree_list)
-                                .and_then(|use_tree_list| {
-                                    if use_tree_list.use_trees().collect_tuple::<(_,)>().is_some() {
-                                        Some(use_tree_list.parent_use_tree())
-                                    } else {
-                                        None
-                                    }
-                                })
-                                .and_then(|target_tree| {
-                                    try_normalize_use_tree(
-                                        &target_tree,
-                                        ctx.config.insert_use.granularity.into(),
-                                    )
-                                    .map(|top_use_tree_flat| (target_tree, top_use_tree_flat))
-                                });
-                            if let Some((old_tree, new_tree)) = normalized_use_tree {
-                                cov_mark::hit!(replace_parent_with_normalized_use_tree);
-                                ted::replace(old_tree.syntax(), new_tree.syntax());
-                            }
+    acc.add(AssistId::refactor_rewrite("merge_imports"), "Merge imports", target, |builder| {
+        let edits_mut: Vec<Edit> = edits
+            .into_iter()
+            .map(|it| match it {
+                Remove(Either::Left(it)) => Remove(Either::Left(builder.make_mut(it))),
+                Remove(Either::Right(it)) => Remove(Either::Right(builder.make_mut(it))),
+                Replace(old, new) => Replace(builder.make_syntax_mut(old), new),
+            })
+            .collect();
+        for edit in edits_mut {
+            match edit {
+                Remove(it) => it.as_ref().either(Removable::remove, Removable::remove),
+                Replace(old, new) => {
+                    ted::replace(old, &new);
+
+                    // If there's a selection and we're replacing a use tree in a tree list,
+                    // normalize the parent use tree if it only contains the merged subtree.
+                    if !ctx.has_empty_selection() {
+                        let normalized_use_tree = ast::UseTree::cast(new)
+                            .as_ref()
+                            .and_then(ast::UseTree::parent_use_tree_list)
+                            .and_then(|use_tree_list| {
+                                if use_tree_list.use_trees().collect_tuple::<(_,)>().is_some() {
+                                    Some(use_tree_list.parent_use_tree())
+                                } else {
+                                    None
+                                }
+                            })
+                            .and_then(|target_tree| {
+                                try_normalize_use_tree(
+                                    &target_tree,
+                                    ctx.config.insert_use.granularity.into(),
+                                )
+                                .map(|top_use_tree_flat| (target_tree, top_use_tree_flat))
+                            });
+                        if let Some((old_tree, new_tree)) = normalized_use_tree {
+                            cov_mark::hit!(replace_parent_with_normalized_use_tree);
+                            ted::replace(old_tree.syntax(), new_tree.syntax());
                         }
                     }
                 }
             }
-        },
-    )
+        }
+    })
 }
 
 trait Merge: AstNode + Clone {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs
index f83de931eaba7..42f35210b4967 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs
@@ -2,12 +2,12 @@ use hir::Type;
 use ide_db::FxHashMap;
 use std::iter::successors;
 use syntax::{
+    Direction,
     algo::neighbor,
     ast::{self, AstNode, HasName},
-    Direction,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists, TextRange};
+use crate::{AssistContext, AssistId, Assists, TextRange};
 
 // Assist: merge_match_arms
 //
@@ -73,7 +73,7 @@ pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
     }
 
     acc.add(
-        AssistId("merge_match_arms", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("merge_match_arms"),
         "Merge match arms",
         current_text_range,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs
index 7a0037fa202bb..73cb8204f2096 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs
@@ -1,12 +1,12 @@
 use ide_db::syntax_helpers::node_ext::is_pattern_cond;
 use syntax::{
-    ast::{self, AstNode, BinaryOp},
     T,
+    ast::{self, AstNode, BinaryOp},
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 // Assist: merge_nested_if
 //
@@ -69,29 +69,24 @@ pub(crate) fn merge_nested_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
     let nested_if_then_branch = nested_if_to_merge.then_branch()?;
     let then_branch_range = then_branch.syntax().text_range();
 
-    acc.add(
-        AssistId("merge_nested_if", AssistKind::RefactorRewrite),
-        "Merge nested if",
-        if_range,
-        |edit| {
-            let cond_text = if has_logic_op_or(&cond) {
-                format!("({})", cond.syntax().text())
-            } else {
-                cond.syntax().text().to_string()
-            };
+    acc.add(AssistId::refactor_rewrite("merge_nested_if"), "Merge nested if", if_range, |edit| {
+        let cond_text = if has_logic_op_or(&cond) {
+            format!("({})", cond.syntax().text())
+        } else {
+            cond.syntax().text().to_string()
+        };
 
-            let nested_if_cond_text = if has_logic_op_or(&nested_if_cond) {
-                format!("({})", nested_if_cond.syntax().text())
-            } else {
-                nested_if_cond.syntax().text().to_string()
-            };
+        let nested_if_cond_text = if has_logic_op_or(&nested_if_cond) {
+            format!("({})", nested_if_cond.syntax().text())
+        } else {
+            nested_if_cond.syntax().text().to_string()
+        };
 
-            let replace_cond = format!("{cond_text} && {nested_if_cond_text}");
+        let replace_cond = format!("{cond_text} && {nested_if_cond_text}");
 
-            edit.replace(cond_range, replace_cond);
-            edit.replace(then_branch_range, nested_if_then_branch.syntax().text());
-        },
-    )
+        edit.replace(cond_range, replace_cond);
+        edit.replace(then_branch_range, nested_if_then_branch.syntax().text());
+    })
 }
 
 /// Returns whether the given if condition has logical operators.
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
index 5101d8fa0a9e3..7e8735bd7a246 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs
@@ -1,13 +1,13 @@
 use syntax::{
     ast::{
-        self,
+        self, AstNode, HasName, HasTypeBounds,
         edit_in_place::{GenericParamsOwnerEdit, Removable},
-        make, AstNode, HasName, HasTypeBounds,
+        make,
     },
     match_ast,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: move_bounds_to_where_clause
 //
@@ -42,7 +42,7 @@ pub(crate) fn move_bounds_to_where_clause(
 
     let target = type_param_list.syntax().text_range();
     acc.add(
-        AssistId("move_bounds_to_where_clause", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("move_bounds_to_where_clause"),
         "Move to where clause",
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
index 743ea9476150d..0c1dc9eb9349f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
@@ -1,8 +1,8 @@
 use hir::{AsAssocItem, AssocItemContainer, FileRange, HasCrate, HasSource};
 use ide_db::{assists::AssistId, defs::Definition, search::SearchScope};
 use syntax::{
-    ast::{self, edit::IndentLevel, edit_in_place::Indent, AstNode},
     SyntaxKind,
+    ast::{self, AstNode, edit::IndentLevel, edit_in_place::Indent},
 };
 
 use crate::assist_context::{AssistContext, Assists};
@@ -83,7 +83,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
     }
 
     acc.add(
-        AssistId("move_const_to_impl", crate::AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("move_const_to_impl"),
         "Move const to impl block",
         const_.syntax().text_range(),
         |builder| {
@@ -105,7 +105,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
             builder.delete(range_to_delete);
 
             let usages = usages.iter().flat_map(|(file_id, usages)| {
-                let edition = file_id.edition();
+                let edition = file_id.edition(ctx.db());
                 usages.iter().map(move |usage| (edition, usage.range))
             });
             for (edition, range) in usages {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs
index 10915f8aafb8d..a36d3136a16da 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs
@@ -1,8 +1,5 @@
-use ide_db::{
-    assists::{AssistId, AssistKind},
-    base_db::AnchoredPathBuf,
-};
-use syntax::{ast, AstNode, ToSmolStr};
+use ide_db::{assists::AssistId, base_db::AnchoredPathBuf};
+use syntax::{AstNode, ToSmolStr, ast};
 
 use crate::{
     assist_context::{AssistContext, Assists},
@@ -25,7 +22,7 @@ use crate::{
 // ```
 pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
-    let module = ctx.sema.file_to_module_def(ctx.file_id())?;
+    let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
     // Enable this assist if the user select all "meaningful" content in the source file
     let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
     let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
@@ -41,13 +38,13 @@ pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
     let target = source_file.syntax().text_range();
     let module_name = module.name(ctx.db())?.as_str().to_smolstr();
     let path = format!("../{module_name}.rs");
-    let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
+    let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
     acc.add(
-        AssistId("move_from_mod_rs", AssistKind::Refactor),
+        AssistId::refactor("move_from_mod_rs"),
         format!("Convert {module_name}/mod.rs to {module_name}.rs"),
         target,
         |builder| {
-            builder.move_file(ctx.file_id(), dst);
+            builder.move_file(ctx.vfs_file_id(), dst);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
index a487960d8d4c5..644d1f6cafefc 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs
@@ -1,9 +1,9 @@
 use syntax::{
-    ast::{edit::AstNodeEdit, make, AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat},
     SyntaxKind::WHITESPACE,
+    ast::{AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat, edit::AstNodeEdit, make},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: move_guard_to_arm_body
 //
@@ -49,7 +49,7 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>)
 
     let target = guard.syntax().text_range();
     acc.add(
-        AssistId("move_guard_to_arm_body", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("move_guard_to_arm_body"),
         "Move guard to arm body",
         target,
         |edit| {
@@ -118,7 +118,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
     let (conds_blocks, tail) = parse_if_chain(if_expr)?;
 
     acc.add(
-        AssistId("move_arm_cond_to_match_guard", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("move_arm_cond_to_match_guard"),
         "Move condition to match guard",
         replace_node.text_range(),
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
index bbf18e21948eb..da62b817fcdb7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
@@ -1,16 +1,16 @@
 use std::iter;
 
 use ast::edit::IndentLevel;
-use hir::{sym, HasAttrs};
+use hir::{HasAttrs, sym};
 use ide_db::base_db::AnchoredPathBuf;
 use itertools::Itertools;
 use stdx::format_to;
 use syntax::{
-    ast::{self, edit::AstNodeEdit, HasName},
     AstNode, SmolStr, TextRange,
+    ast::{self, HasName, edit::AstNodeEdit},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: move_module_to_file
 //
@@ -45,7 +45,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
     let parent_module = module_def.parent(ctx.db())?;
 
     acc.add(
-        AssistId("move_module_to_file", AssistKind::RefactorExtract),
+        AssistId::refactor_extract("move_module_to_file"),
         "Extract module to file",
         target,
         |builder| {
@@ -57,7 +57,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
                         if !parent_module.is_mod_rs(db)
                             && parent_module
                                 .attrs(db)
-                                .by_key(&sym::path)
+                                .by_key(sym::path)
                                 .string_value_unescape()
                                 .is_none() =>
                     {
@@ -104,7 +104,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
                 buf,
             );
 
-            let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
+            let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
             builder.create_file(dst, contents);
         },
     )
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs
index 7b38c795dc80f..5e95b264fc8e4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs
@@ -1,8 +1,5 @@
-use ide_db::{
-    assists::{AssistId, AssistKind},
-    base_db::AnchoredPathBuf,
-};
-use syntax::{ast, AstNode, ToSmolStr};
+use ide_db::{assists::AssistId, base_db::AnchoredPathBuf};
+use syntax::{AstNode, ToSmolStr, ast};
 
 use crate::{
     assist_context::{AssistContext, Assists},
@@ -25,7 +22,7 @@ use crate::{
 // ```
 pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let source_file = ctx.find_node_at_offset::<ast::SourceFile>()?;
-    let module = ctx.sema.file_to_module_def(ctx.file_id())?;
+    let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?;
     // Enable this assist if the user select all "meaningful" content in the source file
     let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed());
     let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range());
@@ -41,13 +38,13 @@ pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
     let target = source_file.syntax().text_range();
     let module_name = module.name(ctx.db())?.as_str().to_smolstr();
     let path = format!("./{module_name}/mod.rs");
-    let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
+    let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path };
     acc.add(
-        AssistId("move_to_mod_rs", AssistKind::Refactor),
+        AssistId::refactor("move_to_mod_rs"),
         format!("Convert {module_name}.rs to {module_name}/mod.rs"),
         target,
         |builder| {
-            builder.move_file(ctx.file_id(), dst);
+            builder.move_file(ctx.vfs_file_id(), dst);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs
index 0b91eb676df01..bba28b5fc8af5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs
@@ -1,9 +1,9 @@
 use ide_db::imports::merge_imports::try_normalize_import;
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: normalize_import
@@ -28,14 +28,9 @@ pub(crate) fn normalize_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
     let normalized_use_item =
         try_normalize_import(&use_item, ctx.config.insert_use.granularity.into())?;
 
-    acc.add(
-        AssistId("normalize_import", AssistKind::RefactorRewrite),
-        "Normalize import",
-        target,
-        |builder| {
-            builder.replace_ast(use_item, normalized_use_item);
-        },
-    )
+    acc.add(AssistId::refactor_rewrite("normalize_import"), "Normalize import", target, |builder| {
+        builder.replace_ast(use_item, normalized_use_item);
+    })
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs
index a13799f9b1317..1fe40f8ee83ed 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs
@@ -1,6 +1,6 @@
-use syntax::{ast, ast::Radix, AstToken};
+use syntax::{AstToken, ast, ast::Radix};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
+use crate::{AssistContext, AssistId, Assists, GroupLabel};
 
 const MIN_NUMBER_OF_DIGITS_TO_FORMAT: usize = 5;
 
@@ -42,7 +42,7 @@ pub(crate) fn reformat_number_literal(acc: &mut Assists, ctx: &AssistContext<'_>
     let range = literal.syntax().text_range();
     acc.add_group(
         &group_id,
-        AssistId("reformat_number_literal", AssistKind::RefactorInline),
+        AssistId::refactor_inline("reformat_number_literal"),
         label,
         range,
         |builder| builder.replace(range, converted),
@@ -54,7 +54,7 @@ fn remove_separators(acc: &mut Assists, literal: ast::IntNumber) -> Option<()> {
     let range = literal.syntax().text_range();
     acc.add_group(
         &group_id,
-        AssistId("reformat_number_literal", AssistKind::RefactorInline),
+        AssistId::refactor_inline("reformat_number_literal"),
         "Remove digit separators",
         range,
         |builder| builder.replace(range, literal.text().replace('_', "")),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
index 0cc771ff39791..6316a8f0db24d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -1,12 +1,10 @@
 use hir::HirDisplay;
-use ide_db::{
-    assists::{AssistId, AssistKind},
-    defs::Definition,
-};
+use ide_db::{assists::AssistId, defs::Definition};
 use stdx::to_upper_snake_case;
 use syntax::{
-    ast::{self, make, HasName},
-    ted, AstNode,
+    AstNode,
+    ast::{self, HasName, make},
+    ted,
 };
 
 use crate::{
@@ -67,7 +65,7 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
     }
 
     acc.add(
-        AssistId("promote_local_to_const", AssistKind::Refactor),
+        AssistId::refactor("promote_local_to_const"),
         "Promote local to constant",
         let_stmt.syntax().text_range(),
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
index f222b3eb903c4..5f626d2957111 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
@@ -1,11 +1,12 @@
 use syntax::{
+    AstNode,
     ast::{self, make},
-    ted, AstNode,
+    ted,
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: pull_assignment_up
@@ -67,7 +68,7 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) ->
     }
 
     acc.add(
-        AssistId("pull_assignment_up", AssistKind::RefactorExtract),
+        AssistId::refactor_extract("pull_assignment_up"),
         "Pull assignment up",
         tgt.syntax().text_range(),
         move |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
index c3600af5a6c58..985121780b1ab 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
@@ -1,6 +1,6 @@
-use hir::{db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ItemInNs, ModuleDef};
-use ide_db::assists::{AssistId, AssistKind};
-use syntax::{ast, AstNode};
+use hir::{AsAssocItem, AssocItem, AssocItemContainer, ItemInNs, ModuleDef, db::HirDatabase};
+use ide_db::assists::AssistId;
+use syntax::{AstNode, ast};
 
 use crate::{
     assist_context::{AssistContext, Assists},
@@ -54,7 +54,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ->
     let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call);
 
     acc.add(
-        AssistId("qualify_method_call", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("qualify_method_call"),
         format!("Qualify `{ident}` method call"),
         range,
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
index 2a8465f634cfb..07d2f52a34ee9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
@@ -7,18 +7,17 @@ use ide_db::{
     helpers::mod_path_to_ast,
     imports::import_assets::{ImportCandidate, LocatedImport},
 };
-use syntax::ast::HasGenericArgs;
 use syntax::Edition;
+use syntax::ast::HasGenericArgs;
 use syntax::{
-    ast,
-    ast::{make, HasArgList},
-    AstNode, NodeOrToken,
+    AstNode, ast,
+    ast::{HasArgList, make},
 };
 
 use crate::{
+    AssistId, GroupLabel,
     assist_context::{AssistContext, Assists},
     handlers::auto_import::find_importable_node,
-    AssistId, AssistKind, GroupLabel,
 };
 
 // Assist: qualify_path
@@ -39,7 +38,7 @@ use crate::{
 // # pub mod std { pub mod collections { pub struct HashMap { } } }
 // ```
 pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
-    let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
+    let (import_assets, syntax_under_caret, expected) = find_importable_node(ctx)?;
     let cfg = ctx.config.import_path_config();
 
     let mut proposed_imports: Vec<_> =
@@ -48,64 +47,57 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
         return None;
     }
 
+    let range = ctx.sema.original_range(&syntax_under_caret).range;
+    let current_module = ctx.sema.scope(&syntax_under_caret).map(|scope| scope.module());
+
     let candidate = import_assets.import_candidate();
-    let qualify_candidate = match syntax_under_caret.clone() {
-        NodeOrToken::Node(syntax_under_caret) => match candidate {
-            ImportCandidate::Path(candidate) if !candidate.qualifier.is_empty() => {
-                cov_mark::hit!(qualify_path_qualifier_start);
-                let path = ast::Path::cast(syntax_under_caret)?;
-                let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?);
-                QualifyCandidate::QualifierStart(segment, prev_segment.generic_arg_list())
-            }
-            ImportCandidate::Path(_) => {
-                cov_mark::hit!(qualify_path_unqualified_name);
-                let path = ast::Path::cast(syntax_under_caret)?;
-                let generics = path.segment()?.generic_arg_list();
-                QualifyCandidate::UnqualifiedName(generics)
-            }
-            ImportCandidate::TraitAssocItem(_) => {
-                cov_mark::hit!(qualify_path_trait_assoc_item);
-                let path = ast::Path::cast(syntax_under_caret)?;
-                let (qualifier, segment) = (path.qualifier()?, path.segment()?);
-                QualifyCandidate::TraitAssocItem(qualifier, segment)
-            }
-            ImportCandidate::TraitMethod(_) => {
-                cov_mark::hit!(qualify_path_trait_method);
-                let mcall_expr = ast::MethodCallExpr::cast(syntax_under_caret)?;
-                QualifyCandidate::TraitMethod(ctx.sema.db, mcall_expr)
-            }
-        },
-        // derive attribute path
-        NodeOrToken::Token(_) => QualifyCandidate::UnqualifiedName(None),
+    let qualify_candidate = match candidate {
+        ImportCandidate::Path(candidate) if !candidate.qualifier.is_empty() => {
+            cov_mark::hit!(qualify_path_qualifier_start);
+            let path = ast::Path::cast(syntax_under_caret)?;
+            let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?);
+            QualifyCandidate::QualifierStart(segment, prev_segment.generic_arg_list())
+        }
+        ImportCandidate::Path(_) => {
+            cov_mark::hit!(qualify_path_unqualified_name);
+            let path = ast::Path::cast(syntax_under_caret)?;
+            let generics = path.segment()?.generic_arg_list();
+            QualifyCandidate::UnqualifiedName(generics)
+        }
+        ImportCandidate::TraitAssocItem(_) => {
+            cov_mark::hit!(qualify_path_trait_assoc_item);
+            let path = ast::Path::cast(syntax_under_caret)?;
+            let (qualifier, segment) = (path.qualifier()?, path.segment()?);
+            QualifyCandidate::TraitAssocItem(qualifier, segment)
+        }
+        ImportCandidate::TraitMethod(_) => {
+            cov_mark::hit!(qualify_path_trait_method);
+            let mcall_expr = ast::MethodCallExpr::cast(syntax_under_caret)?;
+            QualifyCandidate::TraitMethod(ctx.sema.db, mcall_expr)
+        }
     };
 
     // we aren't interested in different namespaces
     proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path));
     proposed_imports.dedup_by(|a, b| a.import_path == b.import_path);
 
-    let range = match &syntax_under_caret {
-        NodeOrToken::Node(node) => ctx.sema.original_range(node).range,
-        NodeOrToken::Token(token) => token.text_range(),
-    };
-    let current_module = ctx
-        .sema
-        .scope(&match syntax_under_caret {
-            NodeOrToken::Node(node) => node.clone(),
-            NodeOrToken::Token(t) => t.parent()?,
-        })
-        .map(|scope| scope.module());
     let current_edition =
         current_module.map(|it| it.krate().edition(ctx.db())).unwrap_or(Edition::CURRENT);
     // prioritize more relevant imports
     proposed_imports.sort_by_key(|import| {
-        Reverse(super::auto_import::relevance_score(ctx, import, current_module.as_ref()))
+        Reverse(super::auto_import::relevance_score(
+            ctx,
+            import,
+            expected.as_ref(),
+            current_module.as_ref(),
+        ))
     });
 
     let group_label = group_label(candidate);
     for import in proposed_imports {
         acc.add_group(
             &group_label,
-            AssistId("qualify_path", AssistKind::QuickFix),
+            AssistId::quick_fix("qualify_path"),
             label(ctx.db(), candidate, &import, current_edition),
             range,
             |builder| {
@@ -354,7 +346,7 @@ pub mod PubMod3 {
 }
 "#,
             r#"
-PubMod3::PubStruct
+PubMod1::PubStruct
 
 pub mod PubMod1 {
     pub struct PubStruct;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
index 5a197f23d0e3a..94b49c5df0915 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
@@ -1,8 +1,11 @@
 use std::borrow::Cow;
 
-use syntax::{ast, ast::IsString, AstToken, TextRange, TextSize};
+use syntax::{AstToken, TextRange, TextSize, ast, ast::IsString};
 
-use crate::{utils::required_hashes, AssistContext, AssistId, AssistKind, Assists};
+use crate::{
+    AssistContext, AssistId, Assists,
+    utils::{required_hashes, string_suffix},
+};
 
 // Assist: make_raw_string
 //
@@ -28,17 +31,20 @@ pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
     let value = token.value().ok()?;
     let target = token.syntax().text_range();
     acc.add(
-        AssistId("make_raw_string", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("make_raw_string"),
         "Rewrite as raw string",
         target,
         |edit| {
             let hashes = "#".repeat(required_hashes(&value).max(1));
+            let range = token.syntax().text_range();
+            let suffix = string_suffix(token.text()).unwrap_or_default();
+            let range = TextRange::new(range.start(), range.end() - TextSize::of(suffix));
             if matches!(value, Cow::Borrowed(_)) {
                 // Avoid replacing the whole string to better position the cursor.
-                edit.insert(token.syntax().text_range().start(), format!("r{hashes}"));
-                edit.insert(token.syntax().text_range().end(), hashes);
+                edit.insert(range.start(), format!("r{hashes}"));
+                edit.insert(range.end(), hashes);
             } else {
-                edit.replace(token.syntax().text_range(), format!("r{hashes}\"{value}\"{hashes}"));
+                edit.replace(range, format!("r{hashes}\"{value}\"{hashes}"));
             }
         },
     )
@@ -67,21 +73,25 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
     let value = token.value().ok()?;
     let target = token.syntax().text_range();
     acc.add(
-        AssistId("make_usual_string", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("make_usual_string"),
         "Rewrite as regular string",
         target,
         |edit| {
             // parse inside string to escape `"`
             let escaped = value.escape_default().to_string();
+            let suffix = string_suffix(token.text()).unwrap_or_default();
             if let Some(offsets) = token.quote_offsets() {
                 if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped {
+                    let end_quote = offsets.quotes.1;
+                    let end_quote =
+                        TextRange::new(end_quote.start(), end_quote.end() - TextSize::of(suffix));
                     edit.replace(offsets.quotes.0, "\"");
-                    edit.replace(offsets.quotes.1, "\"");
+                    edit.replace(end_quote, "\"");
                     return;
                 }
             }
 
-            edit.replace(token.syntax().text_range(), format!("\"{escaped}\""));
+            edit.replace(token.syntax().text_range(), format!("\"{escaped}\"{suffix}"));
         },
     )
 }
@@ -108,9 +118,10 @@ pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()>
     }
     let text_range = token.syntax().text_range();
     let target = text_range;
-    acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| {
+    acc.add(AssistId::refactor("add_hash"), "Add #", target, |edit| {
+        let suffix = string_suffix(token.text()).unwrap_or_default();
         edit.insert(text_range.start() + TextSize::of('r'), "#");
-        edit.insert(text_range.end(), "#");
+        edit.insert(text_range.end() - TextSize::of(suffix), "#");
     })
 }
 
@@ -150,9 +161,13 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
         return None;
     }
 
-    acc.add(AssistId("remove_hash", AssistKind::RefactorRewrite), "Remove #", text_range, |edit| {
+    acc.add(AssistId::refactor_rewrite("remove_hash"), "Remove #", text_range, |edit| {
+        let suffix = string_suffix(text).unwrap_or_default();
         edit.delete(TextRange::at(text_range.start() + TextSize::of('r'), TextSize::of('#')));
-        edit.delete(TextRange::new(text_range.end() - TextSize::of('#'), text_range.end()));
+        edit.delete(
+            TextRange::new(text_range.end() - TextSize::of('#'), text_range.end())
+                - TextSize::of(suffix),
+        );
     })
 }
 
@@ -262,6 +277,23 @@ string"###;
         )
     }
 
+    #[test]
+    fn make_raw_string_has_suffix() {
+        check_assist(
+            make_raw_string,
+            r#"
+            fn f() {
+                let s = $0"random string"i32;
+            }
+            "#,
+            r##"
+            fn f() {
+                let s = r#"random string"#i32;
+            }
+            "##,
+        )
+    }
+
     #[test]
     fn make_raw_string_not_works_on_partial_string() {
         check_assist_not_applicable(
@@ -316,6 +348,23 @@ string"###;
         )
     }
 
+    #[test]
+    fn add_hash_has_suffix_works() {
+        check_assist(
+            add_hash,
+            r#"
+            fn f() {
+                let s = $0r"random string"i32;
+            }
+            "#,
+            r##"
+            fn f() {
+                let s = r#"random string"#i32;
+            }
+            "##,
+        )
+    }
+
     #[test]
     fn add_more_hash_works() {
         check_assist(
@@ -333,6 +382,23 @@ string"###;
         )
     }
 
+    #[test]
+    fn add_more_hash_has_suffix_works() {
+        check_assist(
+            add_hash,
+            r##"
+            fn f() {
+                let s = $0r#"random"string"#i32;
+            }
+            "##,
+            r###"
+            fn f() {
+                let s = r##"random"string"##i32;
+            }
+            "###,
+        )
+    }
+
     #[test]
     fn add_hash_not_works() {
         check_assist_not_applicable(
@@ -367,6 +433,15 @@ string"###;
         )
     }
 
+    #[test]
+    fn remove_hash_has_suffix_works() {
+        check_assist(
+            remove_hash,
+            r##"fn f() { let s = $0r#"random string"#i32; }"##,
+            r#"fn f() { let s = r"random string"i32; }"#,
+        )
+    }
+
     #[test]
     fn cant_remove_required_hash() {
         cov_mark::check!(cant_remove_required_hash);
@@ -397,6 +472,23 @@ string"###;
         )
     }
 
+    #[test]
+    fn remove_more_hash_has_suffix_works() {
+        check_assist(
+            remove_hash,
+            r###"
+            fn f() {
+                let s = $0r##"random string"##i32;
+            }
+            "###,
+            r##"
+            fn f() {
+                let s = r#"random string"#i32;
+            }
+            "##,
+        )
+    }
+
     #[test]
     fn remove_hash_does_not_work() {
         check_assist_not_applicable(remove_hash, r#"fn f() { let s = $0"random string"; }"#);
@@ -437,6 +529,23 @@ string"###;
         )
     }
 
+    #[test]
+    fn make_usual_string_has_suffix_works() {
+        check_assist(
+            make_usual_string,
+            r##"
+            fn f() {
+                let s = $0r#"random string"#i32;
+            }
+            "##,
+            r#"
+            fn f() {
+                let s = "random string"i32;
+            }
+            "#,
+        )
+    }
+
     #[test]
     fn make_usual_string_with_quote_works() {
         check_assist(
@@ -471,6 +580,23 @@ string"###;
         )
     }
 
+    #[test]
+    fn make_usual_string_more_hash_has_suffix_works() {
+        check_assist(
+            make_usual_string,
+            r###"
+            fn f() {
+                let s = $0r##"random string"##i32;
+            }
+            "###,
+            r##"
+            fn f() {
+                let s = "random string"i32;
+            }
+            "##,
+        )
+    }
+
     #[test]
     fn make_usual_string_not_works() {
         check_assist_not_applicable(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
index 1f57f7d3d3765..52ace03f3cfee 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs
@@ -1,10 +1,11 @@
 use itertools::Itertools;
 use syntax::{
-    ast::{self, make, AstNode, AstToken},
-    match_ast, ted, Edition, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
+    Edition, NodeOrToken, SyntaxElement, T, TextRange, TextSize,
+    ast::{self, AstNode, AstToken, make},
+    match_ast, ted,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: remove_dbg
 //
@@ -41,7 +42,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
         macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::<Vec<_>>();
 
     acc.add(
-        AssistId("remove_dbg", AssistKind::QuickFix),
+        AssistId::quick_fix("remove_dbg"),
         "Remove dbg!()",
         replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range))?,
         |builder| {
@@ -73,7 +74,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
     }
 
     let mac_input = tt.syntax().children_with_tokens().skip(1).take_while(|it| *it != r_delim);
-    let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
+    let input_expressions = mac_input.chunk_by(|tok| tok.kind() == T![,]);
     let input_expressions = input_expressions
         .into_iter()
         .filter_map(|(is_sep, group)| (!is_sep).then_some(group))
@@ -145,7 +146,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
                 None => false,
             };
             let expr = replace_nested_dbgs(expr.clone());
-            let expr = if wrap { make::expr_paren(expr) } else { expr.clone_subtree() };
+            let expr = if wrap { make::expr_paren(expr).into() } else { expr.clone_subtree() };
             (macro_call.syntax().text_range(), Some(expr))
         }
         // dbg!(expr0, expr1, ...)
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs
index 43740a5a6d5c7..b07a361adf48e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs
@@ -1,6 +1,6 @@
 use syntax::{SyntaxKind, T};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: remove_mut
 //
@@ -21,18 +21,13 @@ pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
     let mut_token = ctx.find_token_syntax_at_offset(T![mut])?;
 
     let target = mut_token.text_range();
-    acc.add(
-        AssistId("remove_mut", AssistKind::Refactor),
-        "Remove `mut` keyword",
-        target,
-        |builder| {
-            let mut editor = builder.make_editor(&mut_token.parent().unwrap());
-            match mut_token.next_token() {
-                Some(it) if it.kind() == SyntaxKind::WHITESPACE => editor.delete(it),
-                _ => (),
-            }
-            editor.delete(mut_token);
-            builder.add_file_edits(ctx.file_id(), editor);
-        },
-    )
+    acc.add(AssistId::refactor("remove_mut"), "Remove `mut` keyword", target, |builder| {
+        let mut editor = builder.make_editor(&mut_token.parent().unwrap());
+        match mut_token.next_token() {
+            Some(it) if it.kind() == SyntaxKind::WHITESPACE => editor.delete(it),
+            _ => (),
+        }
+        editor.delete(mut_token);
+        builder.add_file_edits(ctx.vfs_file_id(), editor);
+    })
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
index e7beb23bf8e7f..d514c1c291583 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
@@ -1,10 +1,10 @@
 use syntax::{
+    AstNode, SyntaxKind, T,
     ast::{self, syntax_factory::SyntaxFactory},
     syntax_editor::Position,
-    AstNode, SyntaxKind, T,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: remove_parentheses
 //
@@ -40,7 +40,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
 
     let target = parens.syntax().text_range();
     acc.add(
-        AssistId("remove_parentheses", AssistKind::Refactor),
+        AssistId::refactor("remove_parentheses"),
         "Remove redundant parentheses",
         target,
         |builder| {
@@ -54,12 +54,12 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
                 None => false,
             };
             if need_to_add_ws {
-                let make = SyntaxFactory::new();
+                let make = SyntaxFactory::with_mappings();
                 editor.insert(Position::before(parens.syntax()), make.whitespace(" "));
                 editor.add_mappings(make.finish_with_mappings());
             }
             editor.replace(parens.syntax(), expr.syntax());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_underscore.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_underscore.rs
new file mode 100644
index 0000000000000..912e1936b593e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_underscore.rs
@@ -0,0 +1,191 @@
+use ide_db::{
+    assists::AssistId,
+    defs::{Definition, NameClass, NameRefClass},
+};
+use syntax::{AstNode, ast};
+
+use crate::{AssistContext, Assists};
+
+// Assist: remove_underscore_from_used_variables
+//
+// Removes underscore from used variables.
+//
+// ```
+// fn main() {
+//     let mut _$0foo = 1;
+//     _foo = 2;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+//     let mut foo = 1;
+//     foo = 2;
+// }
+// ```
+pub(crate) fn remove_underscore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+    let (text, text_range, def) = if let Some(name_ref) = ctx.find_node_at_offset::<ast::Name>() {
+        let text = name_ref.text();
+        if !text.starts_with('_') {
+            return None;
+        }
+
+        let def = match NameClass::classify(&ctx.sema, &name_ref)? {
+            NameClass::Definition(def @ Definition::Local(_)) => def,
+            NameClass::PatFieldShorthand { local_def, .. } => Definition::Local(local_def),
+            _ => return None,
+        };
+        (text.to_owned(), name_ref.syntax().text_range(), def)
+    } else if let Some(name_ref) = ctx.find_node_at_offset::<ast::NameRef>() {
+        let text = name_ref.text();
+        if !text.starts_with('_') {
+            return None;
+        }
+        let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
+            NameRefClass::Definition(def @ Definition::Local(_), _) => def,
+            NameRefClass::FieldShorthand { local_ref, .. } => Definition::Local(local_ref),
+            _ => return None,
+        };
+        (text.to_owned(), name_ref.syntax().text_range(), def)
+    } else {
+        return None;
+    };
+
+    if !def.usages(&ctx.sema).at_least_one() {
+        return None;
+    }
+
+    let new_name = text.trim_start_matches('_');
+    acc.add(
+        AssistId::refactor("remove_underscore_from_used_variables"),
+        "Remove underscore from a used variable",
+        text_range,
+        |builder| {
+            let changes = def.rename(&ctx.sema, new_name).unwrap();
+            builder.source_change = changes;
+        },
+    )
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::{check_assist, check_assist_not_applicable};
+
+    use super::*;
+
+    #[test]
+    fn remove_underscore_from_used_variable() {
+        check_assist(
+            remove_underscore,
+            r#"
+fn main() {
+    let mut _$0foo = 1;
+    _foo = 2;
+}
+"#,
+            r#"
+fn main() {
+    let mut foo = 1;
+    foo = 2;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn not_applicable_for_unused() {
+        check_assist_not_applicable(
+            remove_underscore,
+            r#"
+fn main() {
+    let _$0unused = 1;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn not_applicable_for_no_underscore() {
+        check_assist_not_applicable(
+            remove_underscore,
+            r#"
+fn main() {
+    let f$0oo = 1;
+    foo = 2;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn remove_multiple_underscores() {
+        check_assist(
+            remove_underscore,
+            r#"
+fn main() {
+    let mut _$0_foo = 1;
+    __foo = 2;
+}
+"#,
+            r#"
+fn main() {
+    let mut foo = 1;
+    foo = 2;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn remove_underscore_on_usage() {
+        check_assist(
+            remove_underscore,
+            r#"
+fn main() {
+    let mut _foo = 1;
+    _$0foo = 2;
+}
+"#,
+            r#"
+fn main() {
+    let mut foo = 1;
+    foo = 2;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn remove_underscore_in_function_parameter_usage() {
+        check_assist(
+            remove_underscore,
+            r#"
+fn foo(_foo: i32) {
+    let bar = _$0foo + 1;
+}
+"#,
+            r#"
+fn foo(foo: i32) {
+    let bar = foo + 1;
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn remove_underscore_in_function_parameter() {
+        check_assist(
+            remove_underscore,
+            r#"
+fn foo(_$0foo: i32) {
+    let bar = _foo + 1;
+}
+"#,
+            r#"
+fn foo(foo: i32) {
+    let bar = foo + 1;
+}
+"#,
+        )
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
index 0570b447782ec..1baf814ca6826 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -1,18 +1,18 @@
 use std::collections::hash_map::Entry;
 
-use hir::{FileRange, HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
+use hir::{FileRange, InFile, InRealFile, Module, ModuleSource};
 use ide_db::text_edit::TextRange;
 use ide_db::{
+    FxHashMap, RootDatabase,
     defs::Definition,
     search::{FileReference, ReferenceCategory, SearchScope},
-    FxHashMap, RootDatabase,
 };
 use syntax::{
-    ast::{self, Rename},
     AstNode,
+    ast::{self, Rename},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: remove_unused_imports
 //
@@ -126,7 +126,7 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>)
     // Peek so we terminate early if an unused use is found. Only do the rest of the work if the user selects the assist.
     if unused.peek().is_some() {
         acc.add(
-            AssistId("remove_unused_imports", AssistKind::QuickFix),
+            AssistId::quick_fix("remove_unused_imports"),
             "Remove all the unused imports",
             selected_el.text_range(),
             |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
index 5ddb17b20729a..8b824c7c7f497 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
@@ -1,16 +1,15 @@
-use ide_db::{defs::Definition, search::FileReference, EditionedFileId};
+use ide_db::{EditionedFileId, defs::Definition, search::FileReference};
 use syntax::{
+    AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, T, TextRange,
     algo::{find_node_at_range, least_common_ancestor_element},
     ast::{self, HasArgList},
     syntax_editor::Element,
-    AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, T,
 };
 
 use SyntaxKind::WHITESPACE;
 
 use crate::{
-    assist_context::SourceChangeBuilder, utils::next_prev, AssistContext, AssistId, AssistKind,
-    Assists,
+    AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder, utils::next_prev,
 };
 
 // Assist: remove_unused_param
@@ -77,7 +76,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
     }
     let parent = param.syntax().parent()?;
     acc.add(
-        AssistId("remove_unused_param", AssistKind::Refactor),
+        AssistId::refactor("remove_unused_param"),
         "Remove unused parameter",
         param.syntax().text_range(),
         |builder| {
@@ -89,7 +88,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
             for (file_id, references) in fn_def.usages(&ctx.sema).all() {
                 process_usages(ctx, builder, file_id, references, param_position, is_self_present);
             }
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
@@ -97,12 +96,14 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
 fn process_usages(
     ctx: &AssistContext<'_>,
     builder: &mut SourceChangeBuilder,
-    file_id: EditionedFileId,
+    editioned_file_id: EditionedFileId,
     references: Vec<FileReference>,
     arg_to_remove: usize,
     is_self_present: bool,
 ) {
-    let source_file = ctx.sema.parse(file_id);
+    let source_file = ctx.sema.parse(editioned_file_id);
+    let file_id = editioned_file_id.file_id(ctx.db());
+    builder.edit_file(file_id);
     let possible_ranges = references
         .into_iter()
         .filter_map(|usage| process_usage(&source_file, usage, arg_to_remove, is_self_present));
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
index a79a82be45079..990677d372139 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
@@ -1,9 +1,9 @@
 use either::Either;
 use ide_db::FxHashMap;
 use itertools::Itertools;
-use syntax::{ast, syntax_editor::SyntaxEditor, AstNode, SmolStr, SyntaxElement, ToSmolStr};
+use syntax::{AstNode, SmolStr, SyntaxElement, ToSmolStr, ast, syntax_editor::SyntaxEditor};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: reorder_fields
 //
@@ -67,7 +67,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
     }
     let target = record.as_ref().either(AstNode::syntax, AstNode::syntax).text_range();
     acc.add(
-        AssistId("reorder_fields", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("reorder_fields"),
         "Reorder record fields",
         target,
         |builder| {
@@ -82,7 +82,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
                 }
             }
 
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs
index c3404173eafe6..0ad5ec9d44246 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs
@@ -2,11 +2,11 @@ use hir::{PathResolution, Semantics};
 use ide_db::{FxHashMap, RootDatabase};
 use itertools::Itertools;
 use syntax::{
-    ast::{self, HasName},
     AstNode, SyntaxElement,
+    ast::{self, HasName},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: reorder_impl_items
 //
@@ -95,7 +95,7 @@ pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) ->
 
     let target = items.syntax().text_range();
     acc.add(
-        AssistId("reorder_impl_items", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("reorder_impl_items"),
         "Sort items by trait definition",
         target,
         |builder| {
@@ -106,7 +106,7 @@ pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) ->
                 .zip(sorted)
                 .for_each(|(old, new)| editor.replace(old.syntax(), new.syntax()));
 
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs
index 4b20b35c44624..6b385a03625b7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs
@@ -1,7 +1,7 @@
-use ide_db::assists::{AssistId, AssistKind, GroupLabel};
+use ide_db::assists::{AssistId, GroupLabel};
 use syntax::{
-    ast::{self, ArithOp, BinaryOp},
     AstNode, TextRange,
+    ast::{self, ArithOp, BinaryOp},
 };
 
 use crate::assist_context::{AssistContext, Assists};
@@ -132,7 +132,7 @@ impl ArithKind {
             ArithKind::Wrapping => "replace_arith_with_wrapping",
         };
 
-        AssistId(s, AssistKind::RefactorRewrite)
+        AssistId::refactor_rewrite(s)
     }
 
     fn label(&self) -> &'static str {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index 31e828eae2712..6dcdf5edbd631 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -1,20 +1,20 @@
-use hir::{InFile, MacroFileIdExt, ModuleDef};
+use hir::{InFile, ModuleDef};
 use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
 use itertools::Itertools;
 use syntax::{
-    ast::{self, make, AstNode, HasName},
-    ted,
     SyntaxKind::WHITESPACE,
     T,
+    ast::{self, AstNode, HasName, make},
+    ted::{self, Position},
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists, SourceChangeBuilder},
     utils::{
-        add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, generate_trait_impl,
-        DefaultMethods, IgnoreAssocItems,
+        DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl, filter_assoc_items,
+        gen_trait_fn_body, generate_trait_impl,
     },
-    AssistId, AssistKind,
 };
 
 // Assist: replace_derive_with_manual_impl
@@ -73,12 +73,12 @@ pub(crate) fn replace_derive_with_manual_impl(
     let current_edition = current_crate.edition(ctx.db());
 
     let found_traits = items_locator::items_with_name(
-        &ctx.sema,
+        ctx.db(),
         current_crate,
         NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
         items_locator::AssocSearchMode::Exclude,
     )
-    .filter_map(|item| match item.into_module_def() {
+    .filter_map(|(item, _)| match item.into_module_def() {
         ModuleDef::Trait(trait_) => Some(trait_),
         _ => None,
     })
@@ -125,74 +125,94 @@ fn add_assist(
     let annotated_name = adt.name()?;
     let label = format!("Convert to manual `impl {replace_trait_path} for {annotated_name}`");
 
-    acc.add(
-        AssistId("replace_derive_with_manual_impl", AssistKind::Refactor),
-        label,
-        target,
-        |builder| {
-            let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax());
-
-            let impl_def_with_items =
-                impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path);
-            update_attribute(builder, old_derives, old_tree, old_trait_path, attr);
-
-            let trait_path = make::ty_path(replace_trait_path.clone());
-
-            match (ctx.config.snippet_cap, impl_def_with_items) {
-                (None, None) => {
-                    let impl_def = generate_trait_impl(adt, trait_path);
+    acc.add(AssistId::refactor("replace_derive_with_manual_impl"), label, target, |builder| {
+        let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax());
+        let impl_is_unsafe = trait_.map(|s| s.is_unsafe(ctx.db())).unwrap_or(false);
+        let impl_def_with_items =
+            impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path);
+        update_attribute(builder, old_derives, old_tree, old_trait_path, attr);
+
+        let trait_path = make::ty_path(replace_trait_path.clone());
+
+        match (ctx.config.snippet_cap, impl_def_with_items) {
+            (None, None) => {
+                let impl_def = generate_trait_impl(adt, trait_path);
+                if impl_is_unsafe {
+                    ted::insert(
+                        Position::first_child_of(impl_def.syntax()),
+                        make::token(T![unsafe]),
+                    );
+                }
 
-                    ted::insert_all(
-                        insert_after,
-                        vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+                ted::insert_all(
+                    insert_after,
+                    vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+                );
+            }
+            (None, Some((impl_def, _))) => {
+                if impl_is_unsafe {
+                    ted::insert(
+                        Position::first_child_of(impl_def.syntax()),
+                        make::token(T![unsafe]),
                     );
                 }
-                (None, Some((impl_def, _))) => {
-                    ted::insert_all(
-                        insert_after,
-                        vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+                ted::insert_all(
+                    insert_after,
+                    vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+                );
+            }
+            (Some(cap), None) => {
+                let impl_def = generate_trait_impl(adt, trait_path);
+
+                if impl_is_unsafe {
+                    ted::insert(
+                        Position::first_child_of(impl_def.syntax()),
+                        make::token(T![unsafe]),
                     );
                 }
-                (Some(cap), None) => {
-                    let impl_def = generate_trait_impl(adt, trait_path);
 
-                    if let Some(l_curly) =
-                        impl_def.assoc_item_list().and_then(|it| it.l_curly_token())
-                    {
-                        builder.add_tabstop_after_token(cap, l_curly);
-                    }
+                if let Some(l_curly) = impl_def.assoc_item_list().and_then(|it| it.l_curly_token())
+                {
+                    builder.add_tabstop_after_token(cap, l_curly);
+                }
 
-                    ted::insert_all(
-                        insert_after,
-                        vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+                ted::insert_all(
+                    insert_after,
+                    vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+                );
+            }
+            (Some(cap), Some((impl_def, first_assoc_item))) => {
+                let mut added_snippet = false;
+
+                if impl_is_unsafe {
+                    ted::insert(
+                        Position::first_child_of(impl_def.syntax()),
+                        make::token(T![unsafe]),
                     );
                 }
-                (Some(cap), Some((impl_def, first_assoc_item))) => {
-                    let mut added_snippet = false;
-                    if let ast::AssocItem::Fn(ref func) = first_assoc_item {
-                        if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
-                        {
-                            if m.syntax().text() == "todo!()" {
-                                // Make the `todo!()` a placeholder
-                                builder.add_placeholder_snippet(cap, m);
-                                added_snippet = true;
-                            }
-                        }
-                    }
 
-                    if !added_snippet {
-                        // If we haven't already added a snippet, add a tabstop before the generated function
-                        builder.add_tabstop_before(cap, first_assoc_item);
+                if let ast::AssocItem::Fn(ref func) = first_assoc_item {
+                    if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) {
+                        if m.syntax().text() == "todo!()" {
+                            // Make the `todo!()` a placeholder
+                            builder.add_placeholder_snippet(cap, m);
+                            added_snippet = true;
+                        }
                     }
+                }
 
-                    ted::insert_all(
-                        insert_after,
-                        vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
-                    );
+                if !added_snippet {
+                    // If we haven't already added a snippet, add a tabstop before the generated function
+                    builder.add_tabstop_before(cap, first_assoc_item);
                 }
-            };
-        },
-    )
+
+                ted::insert_all(
+                    insert_after,
+                    vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+                );
+            }
+        };
+    })
 }
 
 fn impl_def_from_trait(
@@ -1402,6 +1422,23 @@ impl core::fmt::Debug for Foo {
         f.debug_struct("Foo").finish()
     }
 }
+"#,
+        )
+    }
+
+    #[test]
+    fn unsafeness_of_a_trait_observed() {
+        check_assist(
+            replace_derive_with_manual_impl,
+            r#"
+//- minicore: send, derive
+#[derive(Sen$0d)]
+pub struct Foo;
+"#,
+            r#"
+pub struct Foo;
+
+unsafe impl Send for Foo {$0}
 "#,
         )
     }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
index e324d6eaaad2f..15d3db5e749f0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -2,19 +2,19 @@ use std::iter::successors;
 
 use either::Either;
 use ide_db::{
+    RootDatabase,
     defs::NameClass,
     syntax_helpers::node_ext::{is_pattern_cond, single_let},
     ty_filter::TryEnum,
-    RootDatabase,
 };
 use syntax::{
-    ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory, HasName},
-    AstNode, TextRange, T,
+    AstNode, T, TextRange,
+    ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory},
 };
 
 use crate::{
+    AssistContext, AssistId, Assists,
     utils::{does_pat_match_variant, does_pat_variant_nested_or_literal, unwrap_trivial_block},
-    AssistContext, AssistId, AssistKind, Assists,
 };
 
 // Assist: replace_if_let_with_match
@@ -101,11 +101,11 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
     let let_ = if pat_seen { " let" } else { "" };
 
     acc.add(
-        AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_if_let_with_match"),
         format!("Replace if{let_} with match"),
         available_range,
         move |builder| {
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
             let match_expr = {
                 let else_arm = make_else_arm(ctx, &make, else_block, &cond_bodies);
                 let make_match_arm = |(pat, body): (_, ast::BlockExpr)| {
@@ -142,7 +142,7 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
             let mut editor = builder.make_editor(if_expr.syntax());
             editor.replace(if_expr.syntax(), expr.syntax());
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
@@ -249,11 +249,11 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
         _ => " let",
     };
     acc.add(
-        AssistId("replace_match_with_if_let", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_match_with_if_let"),
         format!("Replace match with if{let_}"),
         match_expr.syntax().text_range(),
         move |builder| {
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
             let make_block_expr = |expr: ast::Expr| {
                 // Blocks with modifiers (unsafe, async, etc.) are parsed as BlockExpr, but are
                 // formatted without enclosing braces. If we encounter such block exprs,
@@ -291,7 +291,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
             let mut editor = builder.make_editor(match_expr.syntax());
             editor.replace(match_expr.syntax(), if_let_expr.syntax());
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
index 47972ff619acb..e933bcc40dbbb 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
@@ -1,10 +1,10 @@
 use ide_db::syntax_helpers::suggest_name;
 use syntax::{
-    ast::{self, make, AstNode},
+    ast::{self, AstNode, make},
     ted,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: replace_is_some_with_if_let_some
 //
@@ -56,7 +56,7 @@ pub(crate) fn replace_is_method_with_if_let_method(
             };
 
             acc.add(
-                AssistId(assist_id, AssistKind::RefactorRewrite),
+                AssistId::refactor_rewrite(assist_id),
                 message,
                 call_expr.syntax().text_range(),
                 |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
index c071d3022d251..90f4ff7ad2511 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
@@ -1,10 +1,10 @@
 use ide_db::ty_filter::TryEnum;
 use syntax::{
-    ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory},
     AstNode, T,
+    ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: replace_let_with_if_let
 //
@@ -38,31 +38,43 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>
 
     let target = let_kw.text_range();
     acc.add(
-        AssistId("replace_let_with_if_let", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_let_with_if_let"),
         "Replace let with if let",
         target,
         |builder| {
             let mut editor = builder.make_editor(let_stmt.syntax());
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
             let ty = ctx.sema.type_of_expr(&init);
-            let happy_variant = ty
-                .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
-                .map(|it| it.happy_case());
-            let pat = match happy_variant {
-                None => original_pat,
-                Some(var_name) => {
-                    make.tuple_struct_pat(make.ident_path(var_name), [original_pat]).into()
+            let pat = if let_stmt.let_else().is_some() {
+                // Do not add the wrapper type that implements `Try`,
+                // since the statement already wraps the pattern.
+                original_pat
+            } else {
+                let happy_variant = ty
+                    .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
+                    .map(|it| it.happy_case());
+                match happy_variant {
+                    None => original_pat,
+                    Some(var_name) => {
+                        make.tuple_struct_pat(make.ident_path(var_name), [original_pat]).into()
+                    }
                 }
             };
 
             let block = make.block_expr([], None);
             block.indent(IndentLevel::from_node(let_stmt.syntax()));
-            let if_expr = make.expr_if(make.expr_let(pat, init).into(), block, None);
+            let if_expr = make.expr_if(
+                make.expr_let(pat, init).into(),
+                block,
+                let_stmt
+                    .let_else()
+                    .and_then(|let_else| let_else.block_expr().map(ast::ElseBranch::from)),
+            );
             let if_stmt = make.expr_stmt(if_expr.into());
 
             editor.replace(let_stmt.syntax(), if_stmt.syntax());
             editor.add_mappings(make.finish_with_mappings());
-            builder.add_file_edits(ctx.file_id(), editor);
+            builder.add_file_edits(ctx.vfs_file_id(), editor);
         },
     )
 }
@@ -90,6 +102,27 @@ enum E<T> { X(T), Y(T) }
 fn main() {
     if let x = E::X(92) {
     }
+}
+            ",
+        )
+    }
+
+    #[test]
+    fn replace_let_else() {
+        check_assist(
+            replace_let_with_if_let,
+            r"
+//- minicore: option
+fn main() {
+    let a = Some(1);
+    $0let Some(_) = a else { unreachable!() };
+}
+            ",
+            r"
+fn main() {
+    let a = Some(1);
+    if let Some(_) = a {
+    } else { unreachable!() }
 }
             ",
         )
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
index 12d025f07594e..14161d9fd91c3 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
@@ -1,7 +1,7 @@
-use ide_db::assists::{AssistId, AssistKind};
+use ide_db::assists::AssistId;
 use syntax::{
-    ast::{self, make, Expr, HasArgList},
     AstNode,
+    ast::{self, Expr, HasArgList, make},
 };
 
 use crate::{AssistContext, Assists};
@@ -60,7 +60,7 @@ pub(crate) fn replace_with_lazy_method(acc: &mut Assists, ctx: &AssistContext<'_
     )?;
 
     acc.add(
-        AssistId("replace_with_lazy_method", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_with_lazy_method"),
         format!("Replace {method_name} with {method_name_lazy}"),
         call.syntax().text_range(),
         |builder| {
@@ -74,16 +74,12 @@ pub(crate) fn replace_with_lazy_method(acc: &mut Assists, ctx: &AssistContext<'_
 fn into_closure(param: &Expr) -> Expr {
     (|| {
         if let ast::Expr::CallExpr(call) = param {
-            if call.arg_list()?.args().count() == 0 {
-                Some(call.expr()?)
-            } else {
-                None
-            }
+            if call.arg_list()?.args().count() == 0 { Some(call.expr()?) } else { None }
         } else {
             None
         }
     })()
-    .unwrap_or_else(|| make::expr_closure(None, param.clone()))
+    .unwrap_or_else(|| make::expr_closure(None, param.clone()).into())
 }
 
 // Assist: replace_with_eager_method
@@ -140,7 +136,7 @@ pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<'
     )?;
 
     acc.add(
-        AssistId("replace_with_eager_method", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_with_eager_method"),
         format!("Replace {method_name} with {method_name_eager}"),
         call.syntax().text_range(),
         |builder| {
@@ -154,16 +150,12 @@ pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<'
 fn into_call(param: &Expr) -> Expr {
     (|| {
         if let ast::Expr::ClosureExpr(closure) = param {
-            if closure.param_list()?.params().count() == 0 {
-                Some(closure.body()?)
-            } else {
-                None
-            }
+            if closure.param_list()?.params().count() == 0 { Some(closure.body()?) } else { None }
         } else {
             None
         }
     })()
-    .unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new())))
+    .unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new())).into())
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
index 26fd887cc99e9..3cd7b58f4ddd4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
@@ -1,19 +1,20 @@
 use hir::{FileRange, Semantics};
 use ide_db::text_edit::TextRange;
 use ide_db::{
+    EditionedFileId, RootDatabase,
     defs::Definition,
     search::{SearchScope, UsageSearchResult},
-    EditionedFileId, RootDatabase,
 };
 use syntax::{
+    AstNode,
     ast::{
-        self, make::impl_trait_type, HasGenericParams, HasName, HasTypeBounds, Name, NameLike,
-        PathType,
+        self, HasGenericParams, HasName, HasTypeBounds, Name, NameLike, PathType,
+        make::impl_trait_type,
     },
-    match_ast, ted, AstNode,
+    match_ast, ted,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: replace_named_generic_with_impl
 //
@@ -69,7 +70,7 @@ pub(crate) fn replace_named_generic_with_impl(
     let target = type_param.syntax().text_range();
 
     acc.add(
-        AssistId("replace_named_generic_with_impl", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_named_generic_with_impl"),
         "Replace named generic with impl trait",
         target,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
index f026b3230dd6d..c067747bc1bb1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
@@ -1,14 +1,15 @@
 use hir::AsAssocItem;
 use ide_db::{
     helpers::mod_path_to_ast,
-    imports::insert_use::{insert_use, ImportScope},
+    imports::insert_use::{ImportScope, insert_use},
 };
 use syntax::{
-    ast::{self, make, HasGenericArgs},
-    match_ast, ted, AstNode, Edition, SyntaxNode,
+    AstNode, Edition, SyntaxNode,
+    ast::{self, HasGenericArgs, make},
+    match_ast, ted,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: replace_qualified_name_with_use
 //
@@ -74,7 +75,7 @@ pub(crate) fn replace_qualified_name_with_use(
     let scope = ImportScope::find_insert_use_container(original_path.syntax(), &ctx.sema)?;
     let target = original_path.syntax().text_range();
     acc.add(
-        AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_qualified_name_with_use"),
         "Replace qualified path with use",
         target,
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs
index a48b20acbcac8..fb5b234d55987 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs
@@ -1,12 +1,11 @@
 use syntax::{
-    ast,
-    ast::IsString,
     AstToken,
     SyntaxKind::{CHAR, STRING},
-    TextRange, TextSize,
+    TextRange, TextSize, ast,
+    ast::IsString,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, utils::string_suffix};
 
 // Assist: replace_string_with_char
 //
@@ -34,14 +33,16 @@ pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext<'_
     let quote_offsets = token.quote_offsets()?;
 
     acc.add(
-        AssistId("replace_string_with_char", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_string_with_char"),
         "Replace string with char",
         target,
         |edit| {
             let (left, right) = quote_offsets.quotes;
+            let suffix = TextSize::of(string_suffix(token.text()).unwrap_or_default());
+            let right = TextRange::new(right.start(), right.end() - suffix);
             edit.replace(left, '\'');
             edit.replace(right, '\'');
-            if value == "'" {
+            if token.text_without_quotes() == "'" {
                 edit.insert(left.end(), '\\');
             }
         },
@@ -68,16 +69,18 @@ pub(crate) fn replace_char_with_string(acc: &mut Assists, ctx: &AssistContext<'_
     let target = token.text_range();
 
     acc.add(
-        AssistId("replace_char_with_string", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_char_with_string"),
         "Replace char with string",
         target,
         |edit| {
-            if token.text() == "'\"'" {
-                edit.replace(token.text_range(), r#""\"""#);
+            let suffix = string_suffix(token.text()).unwrap_or_default();
+            if token.text().starts_with("'\"'") {
+                edit.replace(token.text_range(), format!(r#""\""{suffix}"#));
             } else {
                 let len = TextSize::of('\'');
+                let suffix = TextSize::of(suffix);
                 edit.replace(TextRange::at(target.start(), len), '"');
-                edit.replace(TextRange::at(target.end() - len, len), '"');
+                edit.replace(TextRange::at(target.end() - suffix - len, len), '"');
             }
         },
     )
@@ -106,6 +109,23 @@ fn f() {
         )
     }
 
+    #[test]
+    fn replace_string_with_char_has_suffix() {
+        check_assist(
+            replace_string_with_char,
+            r#"
+fn f() {
+    let s = "$0c"i32;
+}
+"#,
+            r##"
+fn f() {
+    let s = 'c'i32;
+}
+"##,
+        )
+    }
+
     #[test]
     fn replace_string_with_char_assist_with_multi_byte_char() {
         check_assist(
@@ -288,6 +308,40 @@ fn f() {
         )
     }
 
+    #[test]
+    fn replace_char_with_string_quote_has_suffix() {
+        check_assist(
+            replace_char_with_string,
+            r#"
+fn f() {
+    find($0'"'i32);
+}
+"#,
+            r#"
+fn f() {
+    find("\""i32);
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn replace_char_with_string_escaped_quote_has_suffix() {
+        check_assist(
+            replace_char_with_string,
+            r#"
+fn f() {
+    find($0'\"'i32);
+}
+"#,
+            r#"
+fn f() {
+    find("\""i32);
+}
+"#,
+        )
+    }
+
     #[test]
     fn replace_string_with_char_quote() {
         check_assist(
@@ -301,6 +355,91 @@ fn f() {
 fn f() {
     find('\'');
 }
+"#,
+        )
+    }
+
+    #[test]
+    fn replace_string_with_escaped_char_quote() {
+        check_assist(
+            replace_string_with_char,
+            r#"
+fn f() {
+    find($0"\'");
+}
+"#,
+            r#"
+fn f() {
+    find('\'');
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn replace_string_with_char_quote_has_suffix() {
+        check_assist(
+            replace_string_with_char,
+            r#"
+fn f() {
+    find($0"'"i32);
+}
+"#,
+            r#"
+fn f() {
+    find('\''i32);
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn replace_string_with_escaped_char_quote_has_suffix() {
+        check_assist(
+            replace_string_with_char,
+            r#"
+fn f() {
+    find($0"\'"i32);
+}
+"#,
+            r#"
+fn f() {
+    find('\''i32);
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn replace_raw_string_with_char_quote() {
+        check_assist(
+            replace_string_with_char,
+            r#"
+fn f() {
+    find($0r"'");
+}
+"#,
+            r#"
+fn f() {
+    find('\'');
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn replace_string_with_code_escaped_char_quote() {
+        check_assist(
+            replace_string_with_char,
+            r#"
+fn f() {
+    find($0"\x27");
+}
+"#,
+            r#"
+fn f() {
+    find('\x27');
+}
 "#,
         )
     }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs
index 88b50543dda87..c6e864fcfdba8 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs
@@ -1,16 +1,13 @@
 use std::iter;
 
-use ide_db::{
-    assists::{AssistId, AssistKind},
-    ty_filter::TryEnum,
-};
+use ide_db::{assists::AssistId, ty_filter::TryEnum};
 use syntax::{
+    AstNode, T,
     ast::{
         self,
         edit::{AstNodeEdit, IndentLevel},
         make,
     },
-    AstNode, T,
 };
 
 use crate::assist_context::{AssistContext, Assists};
@@ -48,7 +45,7 @@ pub(crate) fn replace_try_expr_with_match(
 
     let target = qm_kw_parent.syntax().text_range();
     acc.add(
-        AssistId("replace_try_expr_with_match", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("replace_try_expr_with_match"),
         "Replace try expression with match",
         target,
         |edit| {
@@ -64,10 +61,13 @@ pub(crate) fn replace_try_expr_with_match(
                 TryEnum::Option => {
                     make::expr_return(Some(make::expr_path(make::ext::ident_path("None"))))
                 }
-                TryEnum::Result => make::expr_return(Some(make::expr_call(
-                    make::expr_path(make::ext::ident_path("Err")),
-                    make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))),
-                ))),
+                TryEnum::Result => make::expr_return(Some(
+                    make::expr_call(
+                        make::expr_path(make::ext::ident_path("Err")),
+                        make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))),
+                    )
+                    .into(),
+                )),
             };
 
             let happy_arm = make::match_arm(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
index 3a6391cd38006..a692259410dc0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs
@@ -1,12 +1,12 @@
 use hir::HirDisplay;
 use syntax::{
-    ast::{Expr, GenericArg, GenericArgList, HasGenericArgs, LetStmt, Type::InferType},
     AstNode, TextRange,
+    ast::{Expr, GenericArg, GenericArgList, HasGenericArgs, LetStmt, Type::InferType},
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: replace_turbofish_with_explicit_type
@@ -74,7 +74,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
         let ident_range = let_stmt.pat()?.syntax().text_range();
 
         return acc.add(
-            AssistId("replace_turbofish_with_explicit_type", AssistKind::RefactorRewrite),
+            AssistId::refactor_rewrite("replace_turbofish_with_explicit_type"),
             "Replace turbofish with explicit type",
             TextRange::new(initializer_start, turbofish_range.end()),
             |builder| {
@@ -89,7 +89,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
         let underscore_range = t.syntax().text_range();
 
         return acc.add(
-            AssistId("replace_turbofish_with_explicit_type", AssistKind::RefactorRewrite),
+            AssistId::refactor_rewrite("replace_turbofish_with_explicit_type"),
             "Replace `_` with turbofish type",
             turbofish_range,
             |builder| {
@@ -339,7 +339,7 @@ fn main() {
         check_assist(
             replace_turbofish_with_explicit_type,
             r#"
-//- minicore: option, future
+//- minicore: option, future, try
 struct Fut<T>(T);
 impl<T> core::future::Future for Fut<T> {
     type Output = Option<T>;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs
index 54e16d4d80a4c..e973e70345dc2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs
@@ -3,11 +3,11 @@ use std::cmp::Ordering;
 use itertools::Itertools;
 
 use syntax::{
-    ast::{self, HasName},
     AstNode, SyntaxNode,
+    ast::{self, HasName},
 };
 
-use crate::{utils::get_methods, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, utils::get_methods};
 
 // Assist: sort_items
 //
@@ -126,20 +126,15 @@ impl AddRewrite for Assists {
         new: Vec<T>,
         target: &SyntaxNode,
     ) -> Option<()> {
-        self.add(
-            AssistId("sort_items", AssistKind::RefactorRewrite),
-            label,
-            target.text_range(),
-            |builder| {
-                let mut editor = builder.make_editor(target);
-
-                old.into_iter()
-                    .zip(new)
-                    .for_each(|(old, new)| editor.replace(old.syntax(), new.syntax()));
-
-                builder.add_file_edits(builder.file_id, editor)
-            },
-        )
+        self.add(AssistId::refactor_rewrite("sort_items"), label, target.text_range(), |builder| {
+            let mut editor = builder.make_editor(target);
+
+            old.into_iter()
+                .zip(new)
+                .for_each(|(old, new)| editor.replace(old.syntax(), new.syntax()));
+
+            builder.add_file_edits(builder.file_id, editor)
+        })
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs
index 775ededecbcc8..1729a0667c0a2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs
@@ -1,6 +1,6 @@
-use syntax::{ast, AstNode, T};
+use syntax::{AstNode, T, ast};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: split_import
 //
@@ -29,7 +29,7 @@ pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
     }
 
     let target = colon_colon.text_range();
-    acc.add(AssistId("split_import", AssistKind::RefactorRewrite), "Split import", target, |edit| {
+    acc.add(AssistId::refactor_rewrite("split_import"), "Split import", target, |edit| {
         let use_tree = edit.make_mut(use_tree.clone());
         let path = edit.make_mut(path);
         use_tree.split_prefix(&path);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs
index e10897b3bef75..6af8e1482c245 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs
@@ -1,12 +1,12 @@
 //! Term search assist
 use hir::term_search::{TermSearchConfig, TermSearchCtx};
 use ide_db::{
-    assists::{AssistId, AssistKind, GroupLabel},
+    assists::{AssistId, GroupLabel},
     famous_defs::FamousDefs,
 };
 
 use itertools::Itertools;
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 
 use crate::assist_context::{AssistContext, Assists};
 
@@ -68,7 +68,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
     for code in paths {
         acc.add_group(
             &GroupLabel(String::from("Term search")),
-            AssistId("term_search", AssistKind::Generate),
+            AssistId::generate("term_search"),
             format!("Replace {macro_name}!() with {code}"),
             goal_range,
             |builder| {
@@ -144,7 +144,7 @@ fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#,
             term_search,
             r#"//- minicore: todo, unimplemented, option
 fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
-            r#"fn f() { let a: i32 = 1; let b: Option<i32> = Some(a); }"#,
+            r#"fn f() { let a: i32 = 1; let b: Option<i32> = None; }"#,
         )
     }
 
@@ -156,7 +156,7 @@ fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
 enum Option<T> { None, Some(T) }
 fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
             r#"enum Option<T> { None, Some(T) }
-fn f() { let a: i32 = 1; let b: Option<i32> = Option::Some(a); }"#,
+fn f() { let a: i32 = 1; let b: Option<i32> = Option::None; }"#,
         )
     }
 
@@ -168,7 +168,7 @@ fn f() { let a: i32 = 1; let b: Option<i32> = Option::Some(a); }"#,
 enum Option<T> { None, Some(T) }
 fn f() { let a: Option<i32> = Option::None; let b: Option<Option<i32>> = todo$0!(); }"#,
             r#"enum Option<T> { None, Some(T) }
-fn f() { let a: Option<i32> = Option::None; let b: Option<Option<i32>> = Option::Some(a); }"#,
+fn f() { let a: Option<i32> = Option::None; let b: Option<Option<i32>> = Option::None; }"#,
         )
     }
 
@@ -221,7 +221,7 @@ fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = todo$0!();
             term_search,
             r#"//- minicore: todo, unimplemented
 fn f() { let a: bool = todo$0!(); }"#,
-            r#"fn f() { let a: bool = false; }"#,
+            r#"fn f() { let a: bool = true; }"#,
         )
     }
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs
index 8f937a04122d6..eed070cb07dd6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs
@@ -1,11 +1,8 @@
 use hir::ModuleDef;
-use ide_db::{
-    assists::{AssistId, AssistKind},
-    famous_defs::FamousDefs,
-};
+use ide_db::{assists::AssistId, famous_defs::FamousDefs};
 use syntax::{
-    ast::{self, HasGenericArgs, HasVisibility},
     AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
+    ast::{self, HasGenericArgs, HasVisibility},
 };
 
 use crate::{AssistContext, Assists};
@@ -60,7 +57,7 @@ pub(crate) fn sugar_impl_future_into_async(
     let future_output = unwrap_future_output(main_trait_path)?;
 
     acc.add(
-        AssistId("sugar_impl_future_into_async", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("sugar_impl_future_into_async"),
         "Convert `impl Future` into async",
         function.syntax().text_range(),
         |builder| {
@@ -145,7 +142,7 @@ pub(crate) fn desugar_async_into_impl_future(
     let trait_path = trait_path.display(ctx.db(), edition);
 
     acc.add(
-        AssistId("desugar_async_into_impl_future", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("desugar_async_into_impl_future"),
         "Convert async into `impl Future`",
         function.syntax().text_range(),
         |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
index 264a2f0326ecf..386625b86b271 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
@@ -1,9 +1,9 @@
 use syntax::{
-    ast::{self, HasAttrs},
     AstNode, AstToken,
+    ast::{self, HasAttrs},
 };
 
-use crate::{utils::test_related_attribute_syn, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists, utils::test_related_attribute_syn};
 
 // Assist: toggle_ignore
 //
@@ -30,13 +30,13 @@ pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
 
     match has_ignore_attribute(&func) {
         None => acc.add(
-            AssistId("toggle_ignore", AssistKind::None),
+            AssistId::refactor("toggle_ignore"),
             "Ignore this test",
             attr.syntax().text_range(),
             |builder| builder.insert(attr.syntax().text_range().end(), "\n#[ignore]"),
         ),
         Some(ignore_attr) => acc.add(
-            AssistId("toggle_ignore", AssistKind::None),
+            AssistId::refactor("toggle_ignore"),
             "Re-enable this test",
             ignore_attr.syntax().text_range(),
             |builder| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
index e452b5f77870c..109269bd6e611 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
@@ -1,7 +1,8 @@
-use ide_db::assists::{AssistId, AssistKind};
+use ide_db::assists::AssistId;
 use syntax::{
+    AstNode, T,
     ast::{self, make},
-    ted, AstNode, T,
+    ted,
 };
 
 use crate::{AssistContext, Assists};
@@ -62,7 +63,7 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
     };
 
     acc.add(
-        AssistId("toggle_macro_delimiter", AssistKind::Refactor),
+        AssistId::refactor("toggle_macro_delimiter"),
         match token {
             MacroDelims::LPar | MacroDelims::RPar => "Replace delimiters with braces",
             MacroDelims::LBra | MacroDelims::RBra => "Replace delimiters with parentheses",
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
index 6b9f661d4de54..31ff47a05492e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
@@ -1,11 +1,11 @@
 use syntax::{
+    Direction, SyntaxKind, T,
     algo::neighbor,
-    ast::{self, edit::IndentLevel, make, AstNode},
+    ast::{self, AstNode, edit::IndentLevel, make},
     ted::{self, Position},
-    Direction, SyntaxKind, T,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: unmerge_match_arm
 //
@@ -47,7 +47,7 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
     let old_parent_range = new_parent.text_range();
 
     acc.add(
-        AssistId("unmerge_match_arm", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("unmerge_match_arm"),
         "Unmerge match arm",
         pipe_token.text_range(),
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
index 38ca572fa6609..805a7344494aa 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
@@ -1,12 +1,12 @@
 use syntax::{
-    ast::{self, edit_in_place::Removable, make, HasVisibility},
-    ted::{self, Position},
     AstNode, SyntaxKind,
+    ast::{self, HasVisibility, edit_in_place::Removable, make},
+    ted::{self, Position},
 };
 
 use crate::{
+    AssistId,
     assist_context::{AssistContext, Assists},
-    AssistId, AssistKind,
 };
 
 // Assist: unmerge_use
@@ -43,7 +43,7 @@ pub(crate) fn unmerge_use(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
     };
 
     let target = tree.syntax().text_range();
-    acc.add(AssistId("unmerge_use", AssistKind::RefactorRewrite), label, target, |builder| {
+    acc.add(AssistId::refactor_rewrite("unmerge_use"), label, target, |builder| {
         let new_use = make::use_(
             use_.visibility(),
             make::use_tree(path, tree.use_tree_list(), tree.rename(), tree.star_token().is_some()),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
index abe7fb132f0b3..ac10a829bbf1b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
@@ -1,13 +1,13 @@
 use ide_db::{
-    assists::{AssistId, AssistKind},
+    EditionedFileId,
+    assists::AssistId,
     defs::Definition,
     search::{FileReference, FileReferenceNode},
     syntax_helpers::node_ext::full_path_of_name_ref,
-    EditionedFileId,
 };
 use syntax::{
-    ast::{self, NameRef},
     AstNode, SyntaxKind, TextRange,
+    ast::{self, NameRef},
 };
 
 use crate::{AssistContext, Assists};
@@ -60,7 +60,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
 
     // Otherwise, we may remove the `async` keyword.
     acc.add(
-        AssistId("unnecessary_async", AssistKind::QuickFix),
+        AssistId::quick_fix("unnecessary_async"),
         "Remove unnecessary async",
         async_range,
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs
index baf4ddae2fbc9..ebb8ef99100e7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs
@@ -1,10 +1,10 @@
 use ide_db::imports::insert_use::ImportScope;
 use syntax::{
-    ast::{self, prec::ExprPrecedence, AstNode, HasArgList},
     TextRange,
+    ast::{self, AstNode, HasArgList, prec::ExprPrecedence},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: unqualify_method_call
 //
@@ -69,7 +69,7 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
     );
 
     acc.add(
-        AssistId("unqualify_method_call", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("unqualify_method_call"),
         "Unqualify method call",
         call.syntax().text_range(),
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
index fd37140e9c2bf..a83f6835ca615 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
@@ -1,13 +1,13 @@
 use syntax::{
+    AstNode, SyntaxKind, T, TextRange,
     ast::{
         self,
         edit::{AstNodeEdit, IndentLevel},
         make,
     },
-    AstNode, SyntaxKind, TextRange, T,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: unwrap_block
 //
@@ -27,9 +27,8 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
 // }
 // ```
 pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
-    let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite);
+    let assist_id = AssistId::refactor_rewrite("unwrap_block");
     let assist_label = "Unwrap block";
-
     let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
     let mut block = ast::BlockExpr::cast(l_curly_token.parent_ancestors().nth(1)?)?;
     let target = block.syntax().text_range();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs
index f647b531b7742..cf38262fbf443 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs
@@ -4,11 +4,12 @@ use ide_db::{
     syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
 };
 use syntax::{
-    ast::{self, syntax_factory::SyntaxFactory, HasArgList, HasGenericArgs},
-    match_ast, AstNode, NodeOrToken, SyntaxKind,
+    AstNode, NodeOrToken, SyntaxKind,
+    ast::{self, HasArgList, HasGenericArgs, syntax_factory::SyntaxFactory},
+    match_ast,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: unwrap_option_return_type
 //
@@ -66,7 +67,7 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) ->
 
     acc.add(kind.assist_id(), kind.label(), type_ref.syntax().text_range(), |builder| {
         let mut editor = builder.make_editor(&parent);
-        let make = SyntaxFactory::new();
+        let make = SyntaxFactory::with_mappings();
 
         let mut exprs_to_unwrap = Vec::new();
         let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e);
@@ -168,7 +169,7 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) ->
         }
 
         editor.add_mappings(make.finish_with_mappings());
-        builder.add_file_edits(ctx.file_id(), editor);
+        builder.add_file_edits(ctx.vfs_file_id(), editor);
     })
 }
 
@@ -186,7 +187,7 @@ impl UnwrapperKind {
             UnwrapperKind::Result => "unwrap_result_return_type",
         };
 
-        AssistId(s, AssistKind::RefactorRewrite)
+        AssistId::refactor_rewrite(s)
     }
 
     fn label(&self) -> &'static str {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs
index d09614c51127e..ecfecbb04ff22 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs
@@ -1,9 +1,9 @@
 use syntax::{
-    ast::{self, edit::AstNodeEdit},
     AstNode, T,
+    ast::{self, edit::AstNodeEdit},
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: unwrap_tuple
 //
@@ -56,7 +56,7 @@ pub(crate) fn unwrap_tuple(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
     let parent = let_kw.parent()?;
 
     acc.add(
-        AssistId("unwrap_tuple", AssistKind::RefactorRewrite),
+        AssistId::refactor_rewrite("unwrap_tuple"),
         "Unwrap tuple",
         let_kw.text_range(),
         |edit| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs
index 0b145dcb06ba3..9ea78719b20c0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs
@@ -7,11 +7,12 @@ use ide_db::{
     syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
 };
 use syntax::{
-    ast::{self, syntax_factory::SyntaxFactory, Expr, HasGenericArgs, HasGenericParams},
-    match_ast, AstNode,
+    AstNode,
+    ast::{self, Expr, HasGenericArgs, HasGenericParams, syntax_factory::SyntaxFactory},
+    match_ast,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: wrap_return_type_in_option
 //
@@ -76,7 +77,7 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
             type_ref.syntax().text_range(),
             |builder| {
                 let mut editor = builder.make_editor(&parent);
-                let make = SyntaxFactory::new();
+                let make = SyntaxFactory::with_mappings();
                 let alias = wrapper_alias(ctx, &make, &core_wrapper, type_ref, kind.symbol());
                 let new_return_ty = alias.unwrap_or_else(|| match kind {
                     WrapperKind::Option => make.ty_option(type_ref.clone()),
@@ -132,7 +133,7 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
                 }
 
                 editor.add_mappings(make.finish_with_mappings());
-                builder.add_file_edits(ctx.file_id(), editor);
+                builder.add_file_edits(ctx.vfs_file_id(), editor);
             },
         );
     }
@@ -154,7 +155,7 @@ impl WrapperKind {
             WrapperKind::Result => "wrap_return_type_in_result",
         };
 
-        AssistId(s, AssistKind::RefactorRewrite)
+        AssistId::refactor_rewrite(s)
     }
 
     fn label(&self) -> &'static str {
@@ -180,8 +181,8 @@ impl WrapperKind {
 
     fn symbol(&self) -> hir::Symbol {
         match self {
-            WrapperKind::Option => hir::sym::Option.clone(),
-            WrapperKind::Result => hir::sym::Result.clone(),
+            WrapperKind::Option => hir::sym::Option,
+            WrapperKind::Result => hir::sym::Result,
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
index 149cb4c43849d..1068d5d4cd57c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
@@ -1,13 +1,12 @@
 use ide_db::source_change::SourceChangeBuilder;
 use itertools::Itertools;
 use syntax::{
-    algo,
-    ast::{self, make, AstNode},
+    NodeOrToken, SyntaxToken, T, TextRange, algo,
+    ast::{self, AstNode, make},
     ted::{self, Position},
-    NodeOrToken, SyntaxToken, TextRange, T,
 };
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, Assists};
 
 // Assist: wrap_unwrap_cfg_attr
 //
@@ -211,7 +210,7 @@ fn wrap_derive(
     };
 
     acc.add(
-        AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor),
+        AssistId::refactor("wrap_unwrap_cfg_attr"),
         format!("Wrap #[derive({path_text})] in `cfg_attr`",),
         range,
         handle_source_change,
@@ -268,7 +267,7 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
         }
     };
     acc.add(
-        AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor),
+        AssistId::refactor("wrap_unwrap_cfg_attr"),
         "Convert to `cfg_attr`",
         range,
         handle_source_change,
@@ -296,11 +295,7 @@ fn unwrap_cfg_attr(acc: &mut Assists, attr: ast::Attr) -> Option<()> {
             continue;
         }
         let Some(attr_name) = tt.into_token().and_then(|token| {
-            if token.kind() == T![ident] {
-                Some(make::ext::ident_path(token.text()))
-            } else {
-                None
-            }
+            if token.kind() == T![ident] { Some(make::ext::ident_path(token.text())) } else { None }
         }) else {
             continue;
         };
@@ -341,7 +336,7 @@ fn unwrap_cfg_attr(acc: &mut Assists, attr: ast::Attr) -> Option<()> {
         f.replace(range, inner_attrs);
     };
     acc.add(
-        AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor),
+        AssistId::refactor("wrap_unwrap_cfg_attr"),
         "Extract Inner Attributes from `cfg_attr`",
         range,
         handle_source_change,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index e8480b0de1906..a157483a449c1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -68,7 +68,7 @@ pub mod utils;
 
 use hir::Semantics;
 use ide_db::{EditionedFileId, RootDatabase};
-use syntax::TextRange;
+use syntax::{Edition, TextRange};
 
 pub(crate) use crate::assist_context::{AssistContext, Assists};
 
@@ -90,7 +90,7 @@ pub fn assists(
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(range.file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(range.file_id));
+        .unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT));
     let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range });
     let mut acc = Assists::new(&ctx, resolve);
     handlers::all().iter().for_each(|handler| {
@@ -122,6 +122,7 @@ mod handlers {
     mod convert_closure_to_fn;
     mod convert_comment_block;
     mod convert_comment_from_or_to_doc;
+    mod convert_for_to_while_let;
     mod convert_from_to_tryfrom;
     mod convert_integer_literal;
     mod convert_into_to_from;
@@ -199,6 +200,7 @@ mod handlers {
     mod remove_dbg;
     mod remove_mut;
     mod remove_parentheses;
+    mod remove_underscore;
     mod remove_unused_imports;
     mod remove_unused_param;
     mod reorder_fields;
@@ -252,6 +254,7 @@ mod handlers {
             convert_closure_to_fn::convert_closure_to_fn,
             convert_comment_block::convert_comment_block,
             convert_comment_from_or_to_doc::convert_comment_from_or_to_doc,
+            convert_for_to_while_let::convert_for_loop_to_while_let,
             convert_from_to_tryfrom::convert_from_to_tryfrom,
             convert_integer_literal::convert_integer_literal,
             convert_into_to_from::convert_into_to_from,
@@ -333,6 +336,7 @@ mod handlers {
             remove_dbg::remove_dbg,
             remove_mut::remove_mut,
             remove_parentheses::remove_parentheses,
+            remove_underscore::remove_underscore,
             remove_unused_imports::remove_unused_imports,
             remove_unused_param::remove_unused_param,
             reorder_fields::reorder_fields,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
index 7d7012c462222..0593e6930dcaf 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -1,12 +1,12 @@
 mod generated;
 
 use expect_test::expect;
-use hir::{FileRange, Semantics};
+use hir::Semantics;
 use ide_db::{
-    base_db::{SourceDatabase, SourceRootDatabase},
+    EditionedFileId, FileRange, RootDatabase, SnippetCap,
+    base_db::SourceDatabase,
     imports::insert_use::{ImportGranularity, InsertUseConfig},
     source_change::FileSystemEdit,
-    EditionedFileId, RootDatabase, SnippetCap,
 };
 use stdx::{format_to, trim_indent};
 use syntax::TextRange;
@@ -14,8 +14,8 @@ use test_fixture::WithFixture;
 use test_utils::{assert_eq_text, extract_offset};
 
 use crate::{
-    assists, handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind,
-    AssistResolveStrategy, Assists, SingleResolve,
+    Assist, AssistConfig, AssistContext, AssistKind, AssistResolveStrategy, Assists, SingleResolve,
+    assists, handlers::Handler,
 };
 
 pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
@@ -222,17 +222,17 @@ pub(crate) fn check_assist_unresolved(
 fn check_doc_test(assist_id: &str, before: &str, after: &str) {
     let after = trim_indent(after);
     let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
-    let before = db.file_text(file_id.file_id()).to_string();
-    let frange = FileRange { file_id, range: selection.into() };
+    let before = db.file_text(file_id.file_id(&db)).text(&db).to_string();
+    let frange = ide_db::FileRange { file_id: file_id.file_id(&db), range: selection.into() };
 
-    let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange.into())
+    let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange)
         .into_iter()
         .find(|assist| assist.id.0 == assist_id)
         .unwrap_or_else(|| {
             panic!(
                 "\n\nAssist is not applicable: {}\nAvailable assists: {}",
                 assist_id,
-                assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into())
+                assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange)
                     .into_iter()
                     .map(|assist| assist.id.0)
                     .collect::<Vec<_>>()
@@ -247,7 +247,7 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
             .expect("Assist did not contain any source changes");
         let mut actual = before;
         if let Some((source_file_edit, snippet_edit)) =
-            source_change.get_source_and_snippet_edit(file_id.file_id())
+            source_change.get_source_and_snippet_edit(file_id.file_id(&db))
         {
             source_file_edit.apply(&mut actual);
             if let Some(snippet_edit) = snippet_edit {
@@ -281,9 +281,9 @@ fn check_with_config(
 ) {
     let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
     db.enable_proc_attr_macros();
-    let text_without_caret = db.file_text(file_with_caret_id.into()).to_string();
+    let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string();
 
-    let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
+    let frange = hir::FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
 
     let sema = Semantics::new(&db);
     let ctx = AssistContext::new(sema, &config, frange);
@@ -297,7 +297,9 @@ fn check_with_config(
 
     let assist = match assist_label {
         Some(label) => res.into_iter().find(|resolved| resolved.label == label),
-        None => res.pop(),
+        None if res.is_empty() => None,
+        // Pick the first as that is the one with the highest priority
+        None => Some(res.swap_remove(0)),
     };
 
     match (assist, expected) {
@@ -311,14 +313,14 @@ fn check_with_config(
 
             let mut buf = String::new();
             for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
-                let mut text = db.file_text(file_id).as_ref().to_owned();
+                let mut text = db.file_text(file_id).text(&db).as_ref().to_owned();
                 edit.apply(&mut text);
                 if let Some(snippet_edit) = snippet_edit {
                     snippet_edit.apply(&mut text);
                 }
                 if !skip_header {
-                    let sr = db.file_source_root(file_id);
-                    let sr = db.source_root(sr);
+                    let source_root_id = db.file_source_root(file_id).source_root_id(&db);
+                    let sr = db.source_root(source_root_id).source_root(&db);
                     let path = sr.path_for_file(&file_id).unwrap();
                     format_to!(buf, "//- {}\n", path)
                 }
@@ -329,15 +331,16 @@ fn check_with_config(
                 let (dst, contents) = match file_system_edit {
                     FileSystemEdit::CreateFile { dst, initial_contents } => (dst, initial_contents),
                     FileSystemEdit::MoveFile { src, dst } => {
-                        (dst, db.file_text(src).as_ref().to_owned())
+                        (dst, db.file_text(src).text(&db).as_ref().to_owned())
                     }
                     FileSystemEdit::MoveDir { src, src_id, dst } => {
                         // temporary placeholder for MoveDir since we are not using MoveDir in ide assists yet.
                         (dst, format!("{src_id:?}\n{src:?}"))
                     }
                 };
-                let sr = db.file_source_root(dst.anchor);
-                let sr = db.source_root(sr);
+
+                let source_root_id = db.file_source_root(dst.anchor).source_root_id(&db);
+                let sr = db.source_root(source_root_id).source_root(&db);
                 let mut base = sr.path_for_file(&dst.anchor).unwrap().clone();
                 base.pop();
                 let created_file_path = base.join(&dst.path).unwrap();
@@ -387,8 +390,9 @@ fn assist_order_field_struct() {
     let before = "struct Foo { $0bar: u32 }";
     let (before_cursor_pos, before) = extract_offset(before);
     let (db, file_id) = with_single_file(&before);
-    let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) };
-    let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into());
+    let frange =
+        FileRange { file_id: file_id.file_id(&db), range: TextRange::empty(before_cursor_pos) };
+    let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
     let mut assists = assists.iter();
 
     assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)");
@@ -414,7 +418,12 @@ pub fn test_some_range(a: int) -> bool {
 "#,
     );
 
-    let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into());
+    let assists = assists(
+        &db,
+        &TEST_CONFIG,
+        AssistResolveStrategy::None,
+        FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+    );
     let expected = labels(&assists);
 
     expect![[r#"
@@ -442,7 +451,12 @@ pub fn test_some_range(a: int) -> bool {
         let mut cfg = TEST_CONFIG;
         cfg.allowed = Some(vec![AssistKind::Refactor]);
 
-        let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
+        let assists = assists(
+            &db,
+            &cfg,
+            AssistResolveStrategy::None,
+            FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+        );
         let expected = labels(&assists);
 
         expect![[r#"
@@ -456,7 +470,12 @@ pub fn test_some_range(a: int) -> bool {
     {
         let mut cfg = TEST_CONFIG;
         cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
-        let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
+        let assists = assists(
+            &db,
+            &cfg,
+            AssistResolveStrategy::None,
+            FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+        );
         let expected = labels(&assists);
 
         expect![[r#"
@@ -468,7 +487,12 @@ pub fn test_some_range(a: int) -> bool {
     {
         let mut cfg = TEST_CONFIG;
         cfg.allowed = Some(vec![AssistKind::QuickFix]);
-        let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
+        let assists = assists(
+            &db,
+            &cfg,
+            AssistResolveStrategy::None,
+            FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+        );
         let expected = labels(&assists);
 
         expect![[r#""#]].assert_eq(&expected);
@@ -493,7 +517,12 @@ pub fn test_some_range(a: int) -> bool {
     cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
 
     {
-        let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
+        let assists = assists(
+            &db,
+            &cfg,
+            AssistResolveStrategy::None,
+            FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+        );
         assert_eq!(4, assists.len());
         let mut assists = assists.into_iter();
 
@@ -503,6 +532,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_variable",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into variable",
                 group: Some(
@@ -523,6 +553,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_constant",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into constant",
                 group: Some(
@@ -543,6 +574,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_static",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into static",
                 group: Some(
@@ -563,6 +595,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_function",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into function",
                 group: Some(
@@ -585,8 +618,9 @@ pub fn test_some_range(a: int) -> bool {
             AssistResolveStrategy::Single(SingleResolve {
                 assist_id: "SOMETHING_MISMATCHING".to_owned(),
                 assist_kind: AssistKind::RefactorExtract,
+                assist_subtype: None,
             }),
-            frange.into(),
+            FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
         );
         assert_eq!(4, assists.len());
         let mut assists = assists.into_iter();
@@ -597,6 +631,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_variable",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into variable",
                 group: Some(
@@ -617,6 +652,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_constant",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into constant",
                 group: Some(
@@ -637,6 +673,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_static",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into static",
                 group: Some(
@@ -657,6 +694,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_function",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into function",
                 group: Some(
@@ -679,8 +717,9 @@ pub fn test_some_range(a: int) -> bool {
             AssistResolveStrategy::Single(SingleResolve {
                 assist_id: "extract_variable".to_owned(),
                 assist_kind: AssistKind::RefactorExtract,
+                assist_subtype: None,
             }),
-            frange.into(),
+            FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
         );
         assert_eq!(4, assists.len());
         let mut assists = assists.into_iter();
@@ -691,6 +730,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_variable",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into variable",
                 group: Some(
@@ -710,24 +750,21 @@ pub fn test_some_range(a: int) -> bool {
                                         Indel {
                                             insert: "let",
                                             delete: 45..47,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "var_name",
                                             delete: 48..60,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "=",
                                             delete: 61..81,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "5;\n    if let 2..6 = var_name {\n        true\n    } else {\n        false\n    }",
                                             delete: 82..108,
-                                            annotation: None,
                                         },
                                     ],
+                                    annotation: None,
                                 },
                                 Some(
                                     SnippetEdit(
@@ -760,6 +797,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_constant",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into constant",
                 group: Some(
@@ -780,6 +818,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_static",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into static",
                 group: Some(
@@ -800,6 +839,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_function",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into function",
                 group: Some(
@@ -816,7 +856,12 @@ pub fn test_some_range(a: int) -> bool {
     }
 
     {
-        let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange.into());
+        let assists = assists(
+            &db,
+            &cfg,
+            AssistResolveStrategy::All,
+            FileRange { file_id: frange.file_id.file_id(&db), range: frange.range },
+        );
         assert_eq!(4, assists.len());
         let mut assists = assists.into_iter();
 
@@ -826,6 +871,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_variable",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into variable",
                 group: Some(
@@ -845,24 +891,21 @@ pub fn test_some_range(a: int) -> bool {
                                         Indel {
                                             insert: "let",
                                             delete: 45..47,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "var_name",
                                             delete: 48..60,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "=",
                                             delete: 61..81,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "5;\n    if let 2..6 = var_name {\n        true\n    } else {\n        false\n    }",
                                             delete: 82..108,
-                                            annotation: None,
                                         },
                                     ],
+                                    annotation: None,
                                 },
                                 Some(
                                     SnippetEdit(
@@ -895,6 +938,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_constant",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into constant",
                 group: Some(
@@ -914,29 +958,25 @@ pub fn test_some_range(a: int) -> bool {
                                         Indel {
                                             insert: "const",
                                             delete: 45..47,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "VAR_NAME:",
                                             delete: 48..60,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "i32",
                                             delete: 61..81,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "=",
                                             delete: 82..86,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "5;\n    if let 2..6 = VAR_NAME {\n        true\n    } else {\n        false\n    }",
                                             delete: 87..108,
-                                            annotation: None,
                                         },
                                     ],
+                                    annotation: None,
                                 },
                                 Some(
                                     SnippetEdit(
@@ -969,6 +1009,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_static",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into static",
                 group: Some(
@@ -988,29 +1029,25 @@ pub fn test_some_range(a: int) -> bool {
                                         Indel {
                                             insert: "static",
                                             delete: 45..47,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "VAR_NAME:",
                                             delete: 48..60,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "i32",
                                             delete: 61..81,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "=",
                                             delete: 82..86,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "5;\n    if let 2..6 = VAR_NAME {\n        true\n    } else {\n        false\n    }",
                                             delete: 87..108,
-                                            annotation: None,
                                         },
                                     ],
+                                    annotation: None,
                                 },
                                 Some(
                                     SnippetEdit(
@@ -1043,6 +1080,7 @@ pub fn test_some_range(a: int) -> bool {
                 id: AssistId(
                     "extract_function",
                     RefactorExtract,
+                    None,
                 ),
                 label: "Extract into function",
                 group: Some(
@@ -1062,14 +1100,13 @@ pub fn test_some_range(a: int) -> bool {
                                         Indel {
                                             insert: "fun_name()",
                                             delete: 59..60,
-                                            annotation: None,
                                         },
                                         Indel {
                                             insert: "\n\nfn fun_name() -> i32 {\n    5\n}",
                                             delete: 110..110,
-                                            annotation: None,
                                         },
                                     ],
+                                    annotation: None,
                                 },
                                 Some(
                                     SnippetEdit(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
index 4234124d670ff..00a9d35c3107c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -439,6 +439,30 @@ fn main() {
     )
 }
 
+#[test]
+fn doctest_convert_for_loop_to_while_let() {
+    check_doc_test(
+        "convert_for_loop_to_while_let",
+        r#####"
+fn main() {
+    let x = vec![1, 2, 3];
+    for$0 v in x {
+        let y = v * 2;
+    };
+}
+"#####,
+        r#####"
+fn main() {
+    let x = vec![1, 2, 3];
+    let mut tmp = x.into_iter();
+    while let Some(v) = tmp.next() {
+        let y = v * 2;
+    };
+}
+"#####,
+    )
+}
+
 #[test]
 fn doctest_convert_for_loop_with_for_each() {
     check_doc_test(
@@ -2724,6 +2748,25 @@ fn main() {
     )
 }
 
+#[test]
+fn doctest_remove_underscore_from_used_variables() {
+    check_doc_test(
+        "remove_underscore_from_used_variables",
+        r#####"
+fn main() {
+    let mut _$0foo = 1;
+    _foo = 2;
+}
+"#####,
+        r#####"
+fn main() {
+    let mut foo = 1;
+    foo = 2;
+}
+"#####,
+    )
+}
+
 #[test]
 fn doctest_remove_unused_imports() {
     check_doc_test(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index a6fa1706710d1..0471998f0b14e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -2,29 +2,29 @@
 
 pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
 use hir::{
-    db::{ExpandDatabase, HirDatabase},
     DisplayTarget, HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution,
     Semantics,
+    db::{ExpandDatabase, HirDatabase},
 };
 use ide_db::{
+    RootDatabase,
     famous_defs::FamousDefs,
     path_transform::PathTransform,
     syntax_helpers::{node_ext::preorder_expr, prettify_macro_expansion},
-    RootDatabase,
 };
 use stdx::format_to;
 use syntax::{
+    AstNode, AstToken, Direction, NodeOrToken, SourceFile,
+    SyntaxKind::*,
+    SyntaxNode, SyntaxToken, T, TextRange, TextSize, WalkEvent,
     ast::{
-        self,
+        self, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
         edit::{AstNodeEdit, IndentLevel},
         edit_in_place::{AttrsOwnerEdit, Indent, Removable},
         make,
         syntax_factory::SyntaxFactory,
-        HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
     },
-    ted, AstNode, AstToken, Direction, NodeOrToken, SourceFile,
-    SyntaxKind::*,
-    SyntaxNode, SyntaxToken, TextRange, TextSize, WalkEvent, T,
+    ted,
 };
 
 use crate::assist_context::{AssistContext, SourceChangeBuilder};
@@ -82,11 +82,7 @@ pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option<ast::Attr> {
     fn_def.attrs().find_map(|attr| {
         let path = attr.path()?;
         let text = path.syntax().text().to_string();
-        if text.starts_with("test") || text.ends_with("test") {
-            Some(attr)
-        } else {
-            None
-        }
+        if text.starts_with("test") || text.ends_with("test") { Some(attr) } else { None }
     })
 }
 
@@ -216,6 +212,7 @@ pub fn add_trait_assoc_items_to_impl(
     });
 
     let assoc_item_list = impl_.get_or_create_assoc_item_list();
+
     let mut first_item = None;
     for item in items {
         first_item.get_or_insert_with(|| item.clone());
@@ -333,7 +330,11 @@ fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> {
                 T![>] => T![<=],
                 T![>=] => T![<],
                 // Parenthesize other expressions before prefixing `!`
-                _ => return Some(make::expr_prefix(T![!], make::expr_paren(expr.clone())).into()),
+                _ => {
+                    return Some(
+                        make::expr_prefix(T![!], make::expr_paren(expr.clone()).into()).into(),
+                    );
+                }
             };
             ted::replace(op_token, make::token(rev_token));
             Some(bin.into())
@@ -350,7 +351,7 @@ fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> {
                 "is_err" => "is_ok",
                 _ => return None,
             };
-            Some(make::expr_method_call(receiver, make::name_ref(method), arg_list))
+            Some(make::expr_method_call(receiver, make::name_ref(method), arg_list).into())
         }
         ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::UnaryOp::Not => match pe.expr()? {
             ast::Expr::ParenExpr(parexpr) => parexpr.expr(),
@@ -498,11 +499,7 @@ pub(crate) fn find_struct_impl(
         };
         let not_trait_impl = blk.trait_(db).is_none();
 
-        if !(same_ty && not_trait_impl) {
-            None
-        } else {
-            Some(impl_blk)
-        }
+        if !(same_ty && not_trait_impl) { None } else { Some(impl_blk) }
     });
 
     if let Some(ref impl_blk) = block {
@@ -859,6 +856,7 @@ impl ReferenceConversion {
                     make::expr_ref(expr, false)
                 } else {
                     make::expr_method_call(expr, make::name_ref("as_ref"), make::arg_list([]))
+                        .into()
                 }
             }
         }
@@ -1028,6 +1026,20 @@ fn test_required_hashes() {
     assert_eq!(5, required_hashes("#ab\"##\"####c"));
 }
 
+/// Calculate the string literal suffix length
+pub(crate) fn string_suffix(s: &str) -> Option<&str> {
+    s.rfind(['"', '\'', '#']).map(|i| &s[i + 1..])
+}
+#[test]
+fn test_string_suffix() {
+    assert_eq!(Some(""), string_suffix(r#""abc""#));
+    assert_eq!(Some(""), string_suffix(r#""""#));
+    assert_eq!(Some("a"), string_suffix(r#"""a"#));
+    assert_eq!(Some("i32"), string_suffix(r#"""i32"#));
+    assert_eq!(Some("i32"), string_suffix(r#"r""i32"#));
+    assert_eq!(Some("i32"), string_suffix(r##"r#""#i32"##));
+}
+
 /// Replaces the record expression, handling field shorthands including inside macros.
 pub(crate) fn replace_record_field_expr(
     ctx: &AssistContext<'_>,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
index 7a9bdfe1ecc24..4ea56dc46aaaa 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
@@ -2,7 +2,7 @@
 
 use hir::TraitRef;
 use syntax::{
-    ast::{self, edit::AstNodeEdit, make, AstNode, BinaryOp, CmpOp, HasName, LogicOp},
+    ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make},
     ted,
 };
 
@@ -35,7 +35,7 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
     stdx::always!(func.name().is_some_and(|name| name.text() == "clone"));
     fn gen_clone_call(target: ast::Expr) -> ast::Expr {
         let method = make::name_ref("clone");
-        make::expr_method_call(target, method, make::arg_list(None))
+        make::expr_method_call(target, method, make::arg_list(None)).into()
     }
     let expr = match adt {
         // `Clone` cannot be derived for unions, so no default impl can be provided.
@@ -83,7 +83,8 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                         }
                         let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
                         let struct_name = make::expr_path(variant_name);
-                        let tuple_expr = make::expr_call(struct_name, make::arg_list(fields));
+                        let tuple_expr =
+                            make::expr_call(struct_name, make::arg_list(fields)).into();
                         arms.push(make::match_arm(pat.into(), None, tuple_expr));
                     }
 
@@ -126,7 +127,7 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                         fields.push(gen_clone_call(target));
                     }
                     let struct_name = make::expr_path(make::ext::ident_path("Self"));
-                    make::expr_call(struct_name, make::arg_list(fields))
+                    make::expr_call(struct_name, make::arg_list(fields)).into()
                 }
                 // => Self { }
                 None => {
@@ -165,7 +166,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                         let method = make::name_ref("debug_struct");
                         let struct_name = format!("\"{name}\"");
                         let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
-                        let mut expr = make::expr_method_call(target, method, args);
+                        let mut expr = make::expr_method_call(target, method, args).into();
 
                         let mut pats = vec![];
                         for field in list.fields() {
@@ -181,12 +182,13 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                             let path = &format!("{field_name}");
                             let path = make::expr_path(make::ext::ident_path(path));
                             let args = make::arg_list(vec![name, path]);
-                            expr = make::expr_method_call(expr, method_name, args);
+                            expr = make::expr_method_call(expr, method_name, args).into();
                         }
 
                         // => <expr>.finish()
                         let method = make::name_ref("finish");
-                        let expr = make::expr_method_call(expr, method, make::arg_list(None));
+                        let expr =
+                            make::expr_method_call(expr, method, make::arg_list(None)).into();
 
                         // => MyStruct { fields.. } => f.debug_struct("MyStruct")...finish(),
                         let pat = make::record_pat(variant_name.clone(), pats.into_iter());
@@ -198,7 +200,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                         let method = make::name_ref("debug_tuple");
                         let struct_name = format!("\"{name}\"");
                         let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
-                        let mut expr = make::expr_method_call(target, method, args);
+                        let mut expr = make::expr_method_call(target, method, args).into();
 
                         let mut pats = vec![];
                         for (i, _) in list.fields().enumerate() {
@@ -214,12 +216,13 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                             let field_path = &name.to_string();
                             let field_path = make::expr_path(make::ext::ident_path(field_path));
                             let args = make::arg_list(vec![field_path]);
-                            expr = make::expr_method_call(expr, method_name, args);
+                            expr = make::expr_method_call(expr, method_name, args).into();
                         }
 
                         // => <expr>.finish()
                         let method = make::name_ref("finish");
-                        let expr = make::expr_method_call(expr, method, make::arg_list(None));
+                        let expr =
+                            make::expr_method_call(expr, method, make::arg_list(None)).into();
 
                         // => MyStruct (fields..) => f.debug_tuple("MyStruct")...finish(),
                         let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
@@ -227,12 +230,14 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                     }
                     None => {
                         let fmt_string = make::expr_literal(&(format!("\"{name}\""))).into();
-                        let args = make::arg_list([target, fmt_string]);
-                        let macro_name = make::expr_path(make::ext::ident_path("write"));
-                        let macro_call = make::expr_macro_call(macro_name, args);
+                        let args = make::ext::token_tree_from_node(
+                            make::arg_list([target, fmt_string]).syntax(),
+                        );
+                        let macro_name = make::ext::ident_path("write");
+                        let macro_call = make::expr_macro(macro_name, args);
 
                         let variant_name = make::path_pat(variant_name);
-                        arms.push(make::match_arm(variant_name, None, macro_call));
+                        arms.push(make::match_arm(variant_name, None, macro_call.into()));
                     }
                 }
             }
@@ -254,12 +259,12 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
 
             let expr = match strukt.field_list() {
                 // => f.debug_struct("Name").finish()
-                None => make::expr_method_call(target, make::name_ref("debug_struct"), args),
+                None => make::expr_method_call(target, make::name_ref("debug_struct"), args).into(),
 
                 // => f.debug_struct("Name").field("foo", &self.foo).finish()
                 Some(ast::FieldList::RecordFieldList(field_list)) => {
                     let method = make::name_ref("debug_struct");
-                    let mut expr = make::expr_method_call(target, method, args);
+                    let mut expr = make::expr_method_call(target, method, args).into();
                     for field in field_list.fields() {
                         let name = field.name()?;
                         let f_name = make::expr_literal(&(format!("\"{name}\""))).into();
@@ -267,7 +272,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                         let f_path = make::expr_ref(f_path, false);
                         let f_path = make::expr_field(f_path, &format!("{name}"));
                         let args = make::arg_list([f_name, f_path]);
-                        expr = make::expr_method_call(expr, make::name_ref("field"), args);
+                        expr = make::expr_method_call(expr, make::name_ref("field"), args).into();
                     }
                     expr
                 }
@@ -275,20 +280,21 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                 // => f.debug_tuple("Name").field(self.0).finish()
                 Some(ast::FieldList::TupleFieldList(field_list)) => {
                     let method = make::name_ref("debug_tuple");
-                    let mut expr = make::expr_method_call(target, method, args);
+                    let mut expr = make::expr_method_call(target, method, args).into();
                     for (i, _) in field_list.fields().enumerate() {
                         let f_path = make::expr_path(make::ext::ident_path("self"));
                         let f_path = make::expr_ref(f_path, false);
                         let f_path = make::expr_field(f_path, &format!("{i}"));
                         let method = make::name_ref("field");
-                        expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path)));
+                        expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path)))
+                            .into();
                     }
                     expr
                 }
             };
 
             let method = make::name_ref("finish");
-            let expr = make::expr_method_call(expr, method, make::arg_list(None));
+            let expr = make::expr_method_call(expr, method, make::arg_list(None)).into();
             let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
             ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
             Some(())
@@ -300,7 +306,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
 fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
     fn gen_default_call() -> Option<ast::Expr> {
         let fn_name = make::ext::path_from_idents(["Default", "default"])?;
-        Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)))
+        Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)).into())
     }
     match adt {
         // `Debug` cannot be derived for unions, so no default impl can be provided.
@@ -327,7 +333,7 @@ fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                         .fields()
                         .map(|_| gen_default_call())
                         .collect::<Option<Vec<ast::Expr>>>()?;
-                    make::expr_call(struct_name, make::arg_list(fields))
+                    make::expr_call(struct_name, make::arg_list(fields)).into()
                 }
                 None => {
                     let struct_name = make::ext::ident_path("Self");
@@ -348,7 +354,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
     fn gen_hash_call(target: ast::Expr) -> ast::Stmt {
         let method = make::name_ref("hash");
         let arg = make::expr_path(make::ext::ident_path("state"));
-        let expr = make::expr_method_call(target, method, make::arg_list(Some(arg)));
+        let expr = make::expr_method_call(target, method, make::arg_list(Some(arg))).into();
         make::expr_stmt(expr).into()
     }
 
@@ -361,7 +367,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
             let fn_name = make_discriminant()?;
 
             let arg = make::expr_path(make::ext::ident_path("self"));
-            let fn_call = make::expr_call(fn_name, make::arg_list(Some(arg)));
+            let fn_call = make::expr_call(fn_name, make::arg_list(Some(arg))).into();
             let stmt = gen_hash_call(fn_call);
 
             make::block_expr(Some(stmt), None).indent(ast::edit::IndentLevel(1))
@@ -444,9 +450,11 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -
         ast::Adt::Enum(enum_) => {
             // => std::mem::discriminant(self) == std::mem::discriminant(other)
             let lhs_name = make::expr_path(make::ext::ident_path("self"));
-            let lhs = make::expr_call(make_discriminant()?, make::arg_list(Some(lhs_name.clone())));
+            let lhs = make::expr_call(make_discriminant()?, make::arg_list(Some(lhs_name.clone())))
+                .into();
             let rhs_name = make::expr_path(make::ext::ident_path("other"));
-            let rhs = make::expr_call(make_discriminant()?, make::arg_list(Some(rhs_name.clone())));
+            let rhs = make::expr_call(make_discriminant()?, make::arg_list(Some(rhs_name.clone())))
+                .into();
             let eq_check =
                 make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs);
 
@@ -613,7 +621,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>)
     fn gen_partial_cmp_call(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
         let rhs = make::expr_ref(rhs, false);
         let method = make::name_ref("partial_cmp");
-        make::expr_method_call(lhs, method, make::arg_list(Some(rhs)))
+        make::expr_method_call(lhs, method, make::arg_list(Some(rhs))).into()
     }
 
     // Check that self type and rhs type match. We don't know how to implement the method
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs
index d434872ea595e..840b26a7ad58b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs
@@ -4,8 +4,8 @@
 //! It determines whether to deref the new expression and/or wrap it in parentheses,
 //! based on the parent of the existing expression.
 use syntax::{
-    ast::{self, make, FieldExpr, MethodCallExpr},
     AstNode, T,
+    ast::{self, FieldExpr, MethodCallExpr, make},
 };
 
 use crate::AssistContext;
@@ -125,7 +125,7 @@ impl RefData {
         }
 
         if self.needs_parentheses {
-            expr = make::expr_paren(expr);
+            expr = make::expr_paren(expr).into();
         }
 
         expr
diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
index 68cc7a0b9a6df..94c01e333ed44 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
 itertools.workspace = true
 tracing.workspace = true
 
@@ -29,7 +29,7 @@ syntax.workspace = true
 hir.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 
 # local deps
 test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
index a22e7b272ea05..5d68aca9e615f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
@@ -24,17 +24,19 @@ pub(crate) mod vis;
 
 use std::iter;
 
-use hir::{sym, HasAttrs, Name, ScopeDef, Variant};
-use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SymbolKind};
-use syntax::{ast, SmolStr, ToSmolStr};
+use hir::{HasAttrs, Name, ScopeDef, Variant, sym};
+use ide_db::{RootDatabase, SymbolKind, imports::import_assets::LocatedImport};
+use syntax::{SmolStr, ToSmolStr, ast};
 
 use crate::{
+    CompletionContext, CompletionItem, CompletionItemKind,
     context::{
         DotAccess, ItemListKind, NameContext, NameKind, NameRefContext, NameRefKind,
         PathCompletionCtx, PathKind, PatternContext, TypeLocation, Visible,
     },
     item::Builder,
     render::{
+        RenderContext,
         const_::render_const,
         function::{render_fn, render_method},
         literal::{render_struct_literal, render_variant_lit},
@@ -44,9 +46,7 @@ use crate::{
         render_tuple_field,
         type_alias::{render_type_alias, render_type_alias_with_eq},
         union_literal::render_union_literal,
-        RenderContext,
     },
-    CompletionContext, CompletionItem, CompletionItemKind,
 };
 
 /// Represents an in-progress set of completions being built.
@@ -631,8 +631,7 @@ fn enum_variants_with_paths(
     let mut process_variant = |variant: Variant| {
         let self_path = hir::ModPath::from_segments(
             hir::PathKind::Plain,
-            iter::once(Name::new_symbol_root(sym::Self_.clone()))
-                .chain(iter::once(variant.name(ctx.db))),
+            iter::once(Name::new_symbol_root(sym::Self_)).chain(iter::once(variant.name(ctx.db))),
         );
 
         cb(acc, ctx, variant, self_path);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs
index cf5427bae38de..3c195f80fea47 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs
@@ -5,22 +5,22 @@
 use std::sync::LazyLock;
 
 use ide_db::{
+    FxHashMap, SymbolKind,
     generated::lints::{
-        Lint, CLIPPY_LINTS, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, FEATURES, RUSTDOC_LINTS,
+        CLIPPY_LINT_GROUPS, CLIPPY_LINTS, DEFAULT_LINTS, FEATURES, Lint, RUSTDOC_LINTS,
     },
     syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
-    FxHashMap, SymbolKind,
 };
 use itertools::Itertools;
 use syntax::{
-    ast::{self, AttrKind},
     AstNode, Edition, SyntaxKind, T,
+    ast::{self, AttrKind},
 };
 
 use crate::{
+    Completions,
     context::{AttrCtx, CompletionContext, PathCompletionCtx, Qualified},
     item::CompletionItem,
-    Completions,
 };
 
 mod cfg;
@@ -380,7 +380,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
         .children_with_tokens()
         .skip(1)
         .take_while(|it| it.as_token() != Some(&r_paren));
-    let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
+    let input_expressions = tokens.chunk_by(|tok| tok.kind() == T![,]);
     Some(
         input_expressions
             .into_iter()
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
index cda0da13b26eb..1676a8467c85f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
@@ -2,9 +2,9 @@
 
 use ide_db::SymbolKind;
 use itertools::Itertools;
-use syntax::{algo, ast::Ident, AstToken, Direction, NodeOrToken, SyntaxKind};
+use syntax::{AstToken, Direction, NodeOrToken, SyntaxKind, algo, ast::Ident};
 
-use crate::{completions::Completions, context::CompletionContext, CompletionItem};
+use crate::{CompletionItem, completions::Completions, context::CompletionContext};
 
 pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) {
     let add_completion = |item: &str| {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs
index 1f8927401b2f8..2fc07e0138280 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs
@@ -1,13 +1,13 @@
 //! Completion for derives
 use hir::ScopeDef;
-use ide_db::{documentation::HasDocs, SymbolKind};
+use ide_db::{SymbolKind, documentation::HasDocs};
 use itertools::Itertools;
 use syntax::{SmolStr, ToSmolStr};
 
 use crate::{
+    Completions,
     context::{CompletionContext, ExistingDerives, PathCompletionCtx, Qualified},
     item::CompletionItem,
-    Completions,
 };
 
 pub(crate) fn complete_derive_path(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
index 04f40e805ad68..c87c46d98127b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
@@ -1,8 +1,8 @@
 //! Completion for lints
-use ide_db::{documentation::Documentation, generated::lints::Lint, SymbolKind};
+use ide_db::{SymbolKind, documentation::Documentation, generated::lints::Lint};
 use syntax::ast;
 
-use crate::{context::CompletionContext, item::CompletionItem, Completions};
+use crate::{Completions, context::CompletionContext, item::CompletionItem};
 
 pub(super) fn complete_lint(
     acc: &mut Completions,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs
index deb12282c025b..0641a4f6c3fe5 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs
@@ -3,7 +3,7 @@ use hir::ModuleDef;
 use ide_db::SymbolKind;
 use syntax::ast;
 
-use crate::{context::CompletionContext, item::CompletionItem, Completions};
+use crate::{Completions, context::CompletionContext, item::CompletionItem};
 
 pub(super) fn complete_macro_use(
     acc: &mut Completions,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs
index 12652b448925b..cb7ccf7373123 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs
@@ -3,7 +3,7 @@
 use ide_db::SymbolKind;
 use syntax::ast;
 
-use crate::{context::CompletionContext, item::CompletionItem, Completions};
+use crate::{Completions, context::CompletionContext, item::CompletionItem};
 
 pub(super) fn complete_repr(
     acc: &mut Completions,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
index b38b9ac1f5391..4f21136d214ee 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
@@ -2,16 +2,16 @@
 
 use std::ops::ControlFlow;
 
-use hir::{HasContainer, ItemContainer, MethodCandidateCallback, Name};
+use hir::{Complete, HasContainer, ItemContainer, MethodCandidateCallback, Name};
 use ide_db::FxHashSet;
 use syntax::SmolStr;
 
 use crate::{
+    CompletionItem, CompletionItemKind, Completions,
     context::{
         CompletionContext, DotAccess, DotAccessExprCtx, DotAccessKind, PathCompletionCtx,
         PathExprCtx, Qualified,
     },
-    CompletionItem, CompletionItemKind, Completions,
 };
 
 /// Complete dot accesses, i.e. fields or methods.
@@ -259,7 +259,9 @@ fn complete_methods(
             // This needs to come before the `seen_methods` test, so that if we see the same method twice,
             // once as inherent and once not, we will include it.
             if let ItemContainer::Trait(trait_) = func.container(self.ctx.db) {
-                if self.ctx.exclude_traits.contains(&trait_) {
+                if self.ctx.exclude_traits.contains(&trait_)
+                    || trait_.complete(self.ctx.db) == Complete::IgnoreMethods
+                {
                     return ControlFlow::Continue(());
                 }
             }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs
index 40af5203e9c32..cd18b3dcfdc2b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs
@@ -1,36 +1,47 @@
 //! Completes environment variables defined by Cargo
 //! (<https://doc.rust-lang.org/cargo/reference/environment-variables.html>)
-use hir::MacroFileIdExt;
 use ide_db::syntax_helpers::node_ext::macro_call_for_string_token;
 use syntax::{
-    ast::{self, IsString},
     AstToken,
+    ast::{self, IsString},
 };
 
 use crate::{
-    completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind,
+    CompletionItem, CompletionItemKind, completions::Completions, context::CompletionContext,
 };
 
 const CARGO_DEFINED_VARS: &[(&str, &str)] = &[
-    ("CARGO","Path to the cargo binary performing the build"),
-    ("CARGO_MANIFEST_DIR","The directory containing the manifest of your package"),
-    ("CARGO_PKG_VERSION","The full version of your package"),
-    ("CARGO_PKG_VERSION_MAJOR","The major version of your package"),
-    ("CARGO_PKG_VERSION_MINOR","The minor version of your package"),
-    ("CARGO_PKG_VERSION_PATCH","The patch version of your package"),
-    ("CARGO_PKG_VERSION_PRE","The pre-release version of your package"),
-    ("CARGO_PKG_AUTHORS","Colon separated list of authors from the manifest of your package"),
-    ("CARGO_PKG_NAME","The name of your package"),
-    ("CARGO_PKG_DESCRIPTION","The description from the manifest of your package"),
-    ("CARGO_PKG_HOMEPAGE","The home page from the manifest of your package"),
-    ("CARGO_PKG_REPOSITORY","The repository from the manifest of your package"),
-    ("CARGO_PKG_LICENSE","The license from the manifest of your package"),
-    ("CARGO_PKG_LICENSE_FILE","The license file from the manifest of your package"),
-    ("CARGO_PKG_RUST_VERSION","The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version"),
-    ("CARGO_CRATE_NAME","The name of the crate that is currently being compiled"),
-    ("CARGO_BIN_NAME","The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as .exe"),
-    ("CARGO_PRIMARY_PACKAGE","This environment variable will be set if the package being built is primary. Primary packages are the ones the user selected on the command-line, either with -p flags or the defaults based on the current directory and the default workspace members. This environment variable will not be set when building dependencies. This is only set when compiling the package (not when running binaries or tests)"),
-    ("CARGO_TARGET_TMPDIR","Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code")
+    ("CARGO", "Path to the cargo binary performing the build"),
+    ("CARGO_MANIFEST_DIR", "The directory containing the manifest of your package"),
+    ("CARGO_PKG_VERSION", "The full version of your package"),
+    ("CARGO_PKG_VERSION_MAJOR", "The major version of your package"),
+    ("CARGO_PKG_VERSION_MINOR", "The minor version of your package"),
+    ("CARGO_PKG_VERSION_PATCH", "The patch version of your package"),
+    ("CARGO_PKG_VERSION_PRE", "The pre-release version of your package"),
+    ("CARGO_PKG_AUTHORS", "Colon separated list of authors from the manifest of your package"),
+    ("CARGO_PKG_NAME", "The name of your package"),
+    ("CARGO_PKG_DESCRIPTION", "The description from the manifest of your package"),
+    ("CARGO_PKG_HOMEPAGE", "The home page from the manifest of your package"),
+    ("CARGO_PKG_REPOSITORY", "The repository from the manifest of your package"),
+    ("CARGO_PKG_LICENSE", "The license from the manifest of your package"),
+    ("CARGO_PKG_LICENSE_FILE", "The license file from the manifest of your package"),
+    (
+        "CARGO_PKG_RUST_VERSION",
+        "The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version",
+    ),
+    ("CARGO_CRATE_NAME", "The name of the crate that is currently being compiled"),
+    (
+        "CARGO_BIN_NAME",
+        "The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as .exe",
+    ),
+    (
+        "CARGO_PRIMARY_PACKAGE",
+        "This environment variable will be set if the package being built is primary. Primary packages are the ones the user selected on the command-line, either with -p flags or the defaults based on the current directory and the default workspace members. This environment variable will not be set when building dependencies. This is only set when compiling the package (not when running binaries or tests)",
+    ),
+    (
+        "CARGO_TARGET_TMPDIR",
+        "Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code",
+    ),
 ];
 
 pub(crate) fn complete_cargo_env_vars(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
index b28b6e50e2284..ee1a21f9a1a6f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
@@ -2,14 +2,14 @@
 
 use std::ops::ControlFlow;
 
-use hir::{sym, Name, PathCandidateCallback, ScopeDef};
+use hir::{Complete, Name, PathCandidateCallback, ScopeDef, sym};
 use ide_db::FxHashSet;
 use syntax::ast;
 
 use crate::{
+    CompletionContext, Completions,
     completions::record::add_default_update,
     context::{BreakableKind, PathCompletionCtx, PathExprCtx, Qualified},
-    CompletionContext, Completions,
 };
 
 struct PathCallback<'a, F> {
@@ -33,10 +33,10 @@ where
     fn on_trait_item(&mut self, item: hir::AssocItem) -> ControlFlow<()> {
         // The excluded check needs to come before the `seen` test, so that if we see the same method twice,
         // once as inherent and once not, we will include it.
-        if item
-            .container_trait(self.ctx.db)
-            .is_none_or(|trait_| !self.ctx.exclude_traits.contains(&trait_))
-            && self.seen.insert(item)
+        if item.container_trait(self.ctx.db).is_none_or(|trait_| {
+            !self.ctx.exclude_traits.contains(&trait_)
+                && trait_.complete(self.ctx.db) != Complete::IgnoreMethods
+        }) && self.seen.insert(item)
         {
             (self.add_assoc_item)(self.acc, item);
         }
@@ -79,11 +79,7 @@ pub(crate) fn complete_expr_path(
     let wants_const_token =
         ref_expr_parent.is_some() && has_raw_token && !has_const_token && !has_mut_token;
     let wants_mut_token = if ref_expr_parent.is_some() {
-        if has_raw_token {
-            !has_const_token && !has_mut_token
-        } else {
-            !has_mut_token
-        }
+        if has_raw_token { !has_const_token && !has_mut_token } else { !has_mut_token }
     } else {
         false
     };
@@ -108,7 +104,9 @@ pub(crate) fn complete_expr_path(
             .iter()
             .copied()
             .map(hir::Trait::from)
-            .filter(|it| !ctx.exclude_traits.contains(it))
+            .filter(|it| {
+                !ctx.exclude_traits.contains(it) && it.complete(ctx.db) != Complete::IgnoreMethods
+            })
             .flat_map(|it| it.items(ctx.sema.db))
             .for_each(|item| add_assoc_item(acc, item)),
         Qualified::TypeAnchor { trait_: Some(trait_), .. } => {
@@ -262,7 +260,7 @@ pub(crate) fn complete_expr_path(
                                 path_ctx,
                                 strukt,
                                 None,
-                                Some(Name::new_symbol_root(sym::Self_.clone())),
+                                Some(Name::new_symbol_root(sym::Self_)),
                             );
                         }
                     }
@@ -282,7 +280,7 @@ pub(crate) fn complete_expr_path(
                                 ctx,
                                 un,
                                 None,
-                                Some(Name::new_symbol_root(sym::Self_.clone())),
+                                Some(Name::new_symbol_root(sym::Self_)),
                             );
                         }
                     }
@@ -349,6 +347,7 @@ pub(crate) fn complete_expr_path(
 
                     if !in_block_expr {
                         add_keyword("unsafe", "unsafe {\n    $0\n}");
+                        add_keyword("const", "const {\n    $0\n}");
                     }
                     add_keyword("match", "match $1 {\n    $0\n}");
                     add_keyword("while", "while $1 {\n    $0\n}");
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
index 7c2cc2a6c1d8f..570d1a0a2db8a 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
@@ -1,11 +1,11 @@
 //! Completes function abi strings.
 use syntax::{
-    ast::{self, IsString},
     AstNode, AstToken, SmolStr,
+    ast::{self, IsString},
 };
 
 use crate::{
-    completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind,
+    CompletionItem, CompletionItemKind, completions::Completions, context::CompletionContext,
 };
 
 // Most of these are feature gated, we should filter/add feature gate completions once we have them.
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs
index 7cb710c2d963c..71a3e4eb4ed6d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs
@@ -1,10 +1,10 @@
 //! Completion for extern crates
 
 use hir::Name;
-use ide_db::{documentation::HasDocs, SymbolKind};
+use ide_db::{SymbolKind, documentation::HasDocs};
 use syntax::ToSmolStr;
 
-use crate::{context::CompletionContext, CompletionItem, CompletionItemKind};
+use crate::{CompletionItem, CompletionItemKind, context::CompletionContext};
 
 use super::Completions;
 
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs
index b795bbd872a0a..1441b0e3a01ae 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs
@@ -1,8 +1,8 @@
 //! Completion of field list position.
 
 use crate::{
-    context::{PathCompletionCtx, Qualified},
     CompletionContext, Completions,
+    context::{PathCompletionCtx, Qualified},
 };
 
 pub(crate) fn complete_field_list_tuple_variant(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
index b5555e6610240..a747561380906 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
@@ -5,16 +5,16 @@ use ide_db::imports::{
     insert_use::ImportScope,
 };
 use itertools::Itertools;
-use syntax::{ast, AstNode, SyntaxNode};
+use syntax::{AstNode, SyntaxNode, ast};
 
 use crate::{
+    Completions,
     config::AutoImportExclusionType,
     context::{
         CompletionContext, DotAccess, PathCompletionCtx, PathKind, PatternContext, Qualified,
         TypeLocation,
     },
-    render::{render_resolution_with_import, render_resolution_with_import_pat, RenderContext},
-    Completions,
+    render::{RenderContext, render_resolution_with_import, render_resolution_with_import_pat},
 };
 
 // Feature: Completion With Autoimport
@@ -268,19 +268,7 @@ fn import_on_the_fly(
                 && !ctx.is_item_hidden(original_item)
                 && ctx.check_stability(original_item.attrs(ctx.db).as_deref())
         })
-        .filter(|import| {
-            let def = import.item_to_import.into_module_def();
-            if let Some(&kind) = ctx.exclude_flyimport.get(&def) {
-                if kind == AutoImportExclusionType::Always {
-                    return false;
-                }
-                let method_imported = import.item_to_import != import.original_item;
-                if method_imported {
-                    return false;
-                }
-            }
-            true
-        })
+        .filter(|import| filter_excluded_flyimport(ctx, import))
         .sorted_by(|a, b| {
             let key = |import_path| {
                 (
@@ -366,24 +354,7 @@ fn import_on_the_fly_method(
             !ctx.is_item_hidden(&import.item_to_import)
                 && !ctx.is_item_hidden(&import.original_item)
         })
-        .filter(|import| {
-            let def = import.item_to_import.into_module_def();
-            if let Some(&kind) = ctx.exclude_flyimport.get(&def) {
-                if kind == AutoImportExclusionType::Always {
-                    return false;
-                }
-                let method_imported = import.item_to_import != import.original_item;
-                if method_imported {
-                    return false;
-                }
-            }
-
-            if let ModuleDef::Trait(_) = import.item_to_import.into_module_def() {
-                !ctx.exclude_flyimport.contains_key(&def)
-            } else {
-                true
-            }
-        })
+        .filter(|import| filter_excluded_flyimport(ctx, import))
         .sorted_by(|a, b| {
             let key = |import_path| {
                 (
@@ -401,14 +372,32 @@ fn import_on_the_fly_method(
     Some(())
 }
 
+fn filter_excluded_flyimport(ctx: &CompletionContext<'_>, import: &LocatedImport) -> bool {
+    let def = import.item_to_import.into_module_def();
+    let is_exclude_flyimport = ctx.exclude_flyimport.get(&def).copied();
+
+    if matches!(is_exclude_flyimport, Some(AutoImportExclusionType::Always))
+        || !import.complete_in_flyimport.0
+    {
+        return false;
+    }
+    let method_imported = import.item_to_import != import.original_item;
+    if method_imported
+        && (is_exclude_flyimport.is_some()
+            || ctx.exclude_flyimport.contains_key(&import.original_item.into_module_def()))
+    {
+        // If this is a method, exclude it either if it was excluded itself (which may not be caught above,
+        // because `item_to_import` is the trait), or if its trait was excluded. We don't need to check
+        // the attributes here, since they pass from trait to methods on import map construction.
+        return false;
+    }
+    true
+}
+
 fn import_name(ctx: &CompletionContext<'_>) -> String {
     let token_kind = ctx.token.kind();
 
-    if token_kind.is_any_identifier() {
-        ctx.token.to_string()
-    } else {
-        String::new()
-    }
+    if token_kind.is_any_identifier() { ctx.token.to_string() } else { String::new() }
 }
 
 fn import_assets_for_path(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs
index e86eaad4d0f24..6d1e973dc4c5c 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs
@@ -3,14 +3,14 @@
 use hir::HirDisplay;
 use ide_db::FxHashMap;
 use syntax::{
-    algo,
+    AstNode, Direction, SyntaxKind, TextRange, TextSize, algo,
     ast::{self, HasModuleItem},
-    match_ast, AstNode, Direction, SyntaxKind, TextRange, TextSize,
+    match_ast,
 };
 
 use crate::{
-    context::{ParamContext, ParamKind, PatternContext},
     CompletionContext, CompletionItem, CompletionItemKind, Completions,
+    context::{ParamContext, ParamKind, PatternContext},
 };
 
 // FIXME: Make this a submodule of [`pattern`]
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs
index dcd40c3412c70..5ae65b05bc42e 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs
@@ -1,11 +1,11 @@
 //! Completes identifiers in format string literals.
 
 use hir::{ModuleDef, ScopeDef};
-use ide_db::{syntax_helpers::format_string::is_format_string, SymbolKind};
+use ide_db::{SymbolKind, syntax_helpers::format_string::is_format_string};
 use itertools::Itertools;
-use syntax::{ast, AstToken, TextRange, TextSize, ToSmolStr};
+use syntax::{AstToken, TextRange, TextSize, ToSmolStr, ast};
 
-use crate::{context::CompletionContext, CompletionItem, CompletionItemKind, Completions};
+use crate::{CompletionItem, CompletionItemKind, Completions, context::CompletionContext};
 
 /// Complete identifiers in format strings.
 pub(crate) fn format_string(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
index 3ab341e4eded4..893997cee473e 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
@@ -1,8 +1,8 @@
 //! Completion of paths and keywords at item list position.
 
 use crate::{
-    context::{ItemListKind, PathCompletionCtx, PathExprCtx, Qualified},
     CompletionContext, Completions,
+    context::{ItemListKind, PathCompletionCtx, PathExprCtx, Qualified},
 };
 
 pub(crate) mod trait_impl;
@@ -114,6 +114,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
             add_keyword("trait", "trait $1 {\n    $0\n}");
             if no_vis_qualifiers {
                 add_keyword("impl", "impl $1 {\n    $0\n}");
+                add_keyword("impl for", "impl $1 for $2 {\n    $0\n}");
             }
         }
 
@@ -141,9 +142,10 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
         add_keyword("struct", "struct $0");
         add_keyword("trait", "trait $1 {\n    $0\n}");
         add_keyword("union", "union $1 {\n    $0\n}");
-        add_keyword("use", "use $0");
+        add_keyword("use", "use $0;");
         if no_vis_qualifiers {
             add_keyword("impl", "impl $1 {\n    $0\n}");
+            add_keyword("impl for", "impl $1 for $2 {\n    $0\n}");
         }
     }
 
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 831f5665f4aa0..58aead73fd6fc 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -31,20 +31,21 @@
 //! }
 //! ```
 
-use hir::{db::ExpandDatabase, MacroFileId, Name};
+use hir::{MacroCallId, Name, db::ExpandDatabase};
 use ide_db::text_edit::TextEdit;
 use ide_db::{
-    documentation::HasDocs, path_transform::PathTransform,
-    syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items, SymbolKind,
+    SymbolKind, documentation::HasDocs, path_transform::PathTransform,
+    syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items,
 };
 use syntax::{
-    ast::{self, edit_in_place::AttrsOwnerEdit, make, HasGenericArgs, HasTypeBounds},
-    format_smolstr, ted, AstNode, SmolStr, SyntaxElement, SyntaxKind, TextRange, ToSmolStr, T,
+    AstNode, SmolStr, SyntaxElement, SyntaxKind, T, TextRange, ToSmolStr,
+    ast::{self, HasGenericArgs, HasTypeBounds, edit_in_place::AttrsOwnerEdit, make},
+    format_smolstr, ted,
 };
 
 use crate::{
-    context::PathCompletionCtx, CompletionContext, CompletionItem, CompletionItemKind,
-    CompletionRelevance, Completions,
+    CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance, Completions,
+    context::PathCompletionCtx,
 };
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -464,7 +465,7 @@ fn add_const_impl(
 fn make_const_compl_syntax(
     ctx: &CompletionContext<'_>,
     const_: &ast::Const,
-    macro_file: Option<MacroFileId>,
+    macro_file: Option<MacroCallId>,
 ) -> SmolStr {
     let const_ = if let Some(macro_file) = macro_file {
         let span_map = ctx.db.expansion_span_map(macro_file);
@@ -492,7 +493,7 @@ fn make_const_compl_syntax(
 fn function_declaration(
     ctx: &CompletionContext<'_>,
     node: &ast::Fn,
-    macro_file: Option<MacroFileId>,
+    macro_file: Option<MacroCallId>,
 ) -> String {
     let node = if let Some(macro_file) = macro_file {
         let span_map = ctx.db.expansion_span_map(macro_file);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
index 26c29e0202c0d..039742463c81c 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
@@ -56,6 +56,7 @@ mod tests {
                 kw extern
                 kw fn
                 kw impl
+                kw impl for
                 kw trait
             "#]],
         );
@@ -76,6 +77,7 @@ fn foo(a: A) { a.$0 }
                 kw await                                                           expr.await
                 sn box                                                         Box::new(expr)
                 sn call                                                        function(expr)
+                sn const                                                             const {}
                 sn dbg                                                             dbg!(expr)
                 sn dbgr                                                           dbg!(&expr)
                 sn deref                                                                *expr
@@ -103,6 +105,7 @@ fn foo() {
                 kw await                                                                          expr.await
                 sn box                                                                        Box::new(expr)
                 sn call                                                                       function(expr)
+                sn const                                                                            const {}
                 sn dbg                                                                            dbg!(expr)
                 sn dbgr                                                                          dbg!(&expr)
                 sn deref                                                                               *expr
@@ -132,6 +135,7 @@ fn foo(a: A) { a.$0 }
                 kw await                                                           expr.await
                 sn box                                                         Box::new(expr)
                 sn call                                                        function(expr)
+                sn const                                                             const {}
                 sn dbg                                                             dbg!(expr)
                 sn dbgr                                                           dbg!(&expr)
                 sn deref                                                                *expr
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs
index 53a62fe49c5aa..b02f079b7213d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs
@@ -7,7 +7,7 @@
 //! there is no value in lifting these out into the outline module test since they will either not
 //! show up for normal completions, or they won't show completions other than lifetimes depending
 //! on the fixture input.
-use hir::{sym, Name, ScopeDef};
+use hir::{Name, ScopeDef, sym};
 
 use crate::{
     completions::Completions,
@@ -31,13 +31,13 @@ pub(crate) fn complete_lifetime(
             acc.add_lifetime(ctx, name);
         }
     });
-    acc.add_lifetime(ctx, Name::new_symbol_root(sym::tick_static.clone()));
+    acc.add_lifetime(ctx, Name::new_symbol_root(sym::tick_static));
     if !in_lifetime_param_bound
         && def.is_some_and(|def| {
             !matches!(def, hir::GenericDef::Function(_) | hir::GenericDef::Impl(_))
         })
     {
-        acc.add_lifetime(ctx, Name::new_symbol_root(sym::tick_underscore.clone()));
+        acc.add_lifetime(ctx, Name::new_symbol_root(sym::tick_underscore));
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
index cca6a22f290d2..013747e4d0cc7 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
@@ -2,14 +2,14 @@
 
 use std::iter;
 
-use hir::{HirFileIdExt, Module};
+use hir::Module;
 use ide_db::{
-    base_db::{SourceRootDatabase, VfsPath},
     FxHashSet, RootDatabase, SymbolKind,
+    base_db::{SourceDatabase, VfsPath},
 };
-use syntax::{ast, AstNode, SyntaxKind};
+use syntax::{AstNode, SyntaxKind, ast};
 
-use crate::{context::CompletionContext, CompletionItem, Completions};
+use crate::{CompletionItem, Completions, context::CompletionContext};
 
 /// Complete mod declaration, i.e. `mod $0;`
 pub(crate) fn complete_mod(
@@ -43,11 +43,14 @@ pub(crate) fn complete_mod(
 
     let module_definition_file =
         current_module.definition_source_file_id(ctx.db).original_file(ctx.db);
-    let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file.file_id()));
+    let source_root_id =
+        ctx.db.file_source_root(module_definition_file.file_id(ctx.db)).source_root_id(ctx.db);
+    let source_root = ctx.db.source_root(source_root_id).source_root(ctx.db);
+
     let directory_to_look_for_submodules = directory_to_look_for_submodules(
         current_module,
         ctx.db,
-        source_root.path_for_file(&module_definition_file.file_id())?,
+        source_root.path_for_file(&module_definition_file.file_id(ctx.db))?,
     )?;
 
     let existing_mod_declarations = current_module
@@ -63,9 +66,11 @@ pub(crate) fn complete_mod(
 
     source_root
         .iter()
-        .filter(|&submodule_candidate_file| submodule_candidate_file != module_definition_file)
         .filter(|&submodule_candidate_file| {
-            module_declaration_file.is_none_or(|it| it != submodule_candidate_file)
+            submodule_candidate_file != module_definition_file.file_id(ctx.db)
+        })
+        .filter(|&submodule_candidate_file| {
+            module_declaration_file.is_none_or(|it| it.file_id(ctx.db) != submodule_candidate_file)
         })
         .filter_map(|submodule_file| {
             let submodule_path = source_root.path_for_file(&submodule_file)?;
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
index 8f38e02ed7685..ea3511d31caf2 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
@@ -1,12 +1,12 @@
 //! Completes constants and paths in unqualified patterns.
 
-use hir::{db::DefDatabase, AssocItem, ScopeDef};
+use hir::{AssocItem, ScopeDef};
 use ide_db::syntax_helpers::suggest_name;
 use syntax::ast::Pat;
 
 use crate::{
-    context::{PathCompletionCtx, PatternContext, PatternRefutability, Qualified},
     CompletionContext, Completions,
+    context::{PathCompletionCtx, PatternContext, PatternRefutability, Qualified},
 };
 
 /// Completes constants and paths in unqualified patterns.
@@ -48,7 +48,7 @@ pub(crate) fn complete_pattern(
 
     // Suggest name only in let-stmt and fn param
     if pattern_ctx.should_suggest_name {
-        let mut name_generator = suggest_name::NameGenerator::new();
+        let mut name_generator = suggest_name::NameGenerator::default();
         if let Some(suggested) = ctx
             .expected_type
             .as_ref()
@@ -60,7 +60,7 @@ pub(crate) fn complete_pattern(
     }
 
     let refutable = pattern_ctx.refutability == PatternRefutability::Refutable;
-    let single_variant_enum = |enum_: hir::Enum| ctx.db.enum_data(enum_.into()).variants.len() == 1;
+    let single_variant_enum = |enum_: hir::Enum| enum_.num_variants(ctx.db) == 1;
 
     if let Some(hir::Adt::Enum(e)) =
         ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
index 28e2853096e0e..54be7d2fbc33f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
@@ -5,24 +5,24 @@ mod format_like;
 use base_db::SourceDatabase;
 use hir::{ItemInNs, Semantics};
 use ide_db::{
+    RootDatabase, SnippetCap,
     documentation::{Documentation, HasDocs},
     imports::insert_use::ImportScope,
     text_edit::TextEdit,
     ty_filter::TryEnum,
-    RootDatabase, SnippetCap,
 };
 use stdx::never;
 use syntax::{
-    ast::{self, AstNode, AstToken},
     SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},
     TextRange, TextSize,
+    ast::{self, AstNode, AstToken},
 };
 
 use crate::{
+    CompletionItem, CompletionItemKind, CompletionRelevance, Completions, SnippetScope,
     completions::postfix::format_like::add_format_like_completions,
     context::{BreakableKind, CompletionContext, DotAccess, DotAccessKind},
     item::{Builder, CompletionRelevancePostfixMatch},
-    CompletionItem, CompletionItemKind, CompletionRelevance, Completions, SnippetScope,
 };
 
 pub(crate) fn complete_postfix(
@@ -155,22 +155,29 @@ pub(crate) fn complete_postfix(
     postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc, ctx.db);
     postfix_snippet("deref", "*expr", &format!("*{receiver_text}")).add_to(acc, ctx.db);
 
-    let mut unsafe_should_be_wrapped = true;
+    let mut block_should_be_wrapped = true;
     if dot_receiver.syntax().kind() == BLOCK_EXPR {
-        unsafe_should_be_wrapped = false;
+        block_should_be_wrapped = false;
         if let Some(parent) = dot_receiver.syntax().parent() {
             if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) {
-                unsafe_should_be_wrapped = true;
+                block_should_be_wrapped = true;
             }
         }
     };
-    let unsafe_completion_string = if unsafe_should_be_wrapped {
+    let unsafe_completion_string = if block_should_be_wrapped {
         format!("unsafe {{ {receiver_text} }}")
     } else {
         format!("unsafe {receiver_text}")
     };
     postfix_snippet("unsafe", "unsafe {}", &unsafe_completion_string).add_to(acc, ctx.db);
 
+    let const_completion_string = if block_should_be_wrapped {
+        format!("const {{ {receiver_text} }}")
+    } else {
+        format!("const {receiver_text}")
+    };
+    postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db);
+
     // The rest of the postfix completions create an expression that moves an argument,
     // so it's better to consider references now to avoid breaking the compilation
 
@@ -276,8 +283,8 @@ fn get_receiver_text(
     if receiver_is_ambiguous_float_literal {
         range.range = TextRange::at(range.range.start(), range.range.len() - TextSize::of('.'))
     }
-    let file_text = sema.db.file_text(range.file_id.file_id());
-    let mut text = file_text[range.range].to_owned();
+    let file_text = sema.db.file_text(range.file_id.file_id(sema.db));
+    let mut text = file_text.text(sema.db)[range.range].to_owned();
 
     // The receiver texts should be interpreted as-is, as they are expected to be
     // normal Rust expressions.
@@ -414,8 +421,8 @@ mod tests {
     use expect_test::expect;
 
     use crate::{
-        tests::{check, check_edit, check_edit_with_config, TEST_CONFIG},
         CompletionConfig, Snippet,
+        tests::{TEST_CONFIG, check, check_edit, check_edit_with_config},
     };
 
     #[test]
@@ -430,6 +437,7 @@ fn main() {
             expect![[r#"
                 sn box  Box::new(expr)
                 sn call function(expr)
+                sn const      const {}
                 sn dbg      dbg!(expr)
                 sn dbgr    dbg!(&expr)
                 sn deref         *expr
@@ -463,6 +471,7 @@ fn main() {
             expect![[r#"
                 sn box  Box::new(expr)
                 sn call function(expr)
+                sn const      const {}
                 sn dbg      dbg!(expr)
                 sn dbgr    dbg!(&expr)
                 sn deref         *expr
@@ -490,6 +499,7 @@ fn main() {
             expect![[r#"
                 sn box  Box::new(expr)
                 sn call function(expr)
+                sn const      const {}
                 sn dbg      dbg!(expr)
                 sn dbgr    dbg!(&expr)
                 sn deref         *expr
@@ -516,6 +526,7 @@ fn main() {
             expect![[r#"
                 sn box  Box::new(expr)
                 sn call function(expr)
+                sn const      const {}
                 sn dbg      dbg!(expr)
                 sn dbgr    dbg!(&expr)
                 sn deref         *expr
@@ -653,59 +664,74 @@ fn main() {
 
     #[test]
     fn postfix_completion_for_unsafe() {
-        check_edit("unsafe", r#"fn main() { foo.$0 }"#, r#"fn main() { unsafe { foo } }"#);
-        check_edit("unsafe", r#"fn main() { { foo }.$0 }"#, r#"fn main() { unsafe { foo } }"#);
+        postfix_completion_for_block("unsafe");
+    }
+
+    #[test]
+    fn postfix_completion_for_const() {
+        postfix_completion_for_block("const");
+    }
+
+    fn postfix_completion_for_block(kind: &str) {
+        check_edit(kind, r#"fn main() { foo.$0 }"#, &format!("fn main() {{ {kind} {{ foo }} }}"));
         check_edit(
-            "unsafe",
+            kind,
+            r#"fn main() { { foo }.$0 }"#,
+            &format!("fn main() {{ {kind} {{ foo }} }}"),
+        );
+        check_edit(
+            kind,
             r#"fn main() { if x { foo }.$0 }"#,
-            r#"fn main() { unsafe { if x { foo } } }"#,
+            &format!("fn main() {{ {kind} {{ if x {{ foo }} }} }}"),
         );
         check_edit(
-            "unsafe",
+            kind,
             r#"fn main() { loop { foo }.$0 }"#,
-            r#"fn main() { unsafe { loop { foo } } }"#,
+            &format!("fn main() {{ {kind} {{ loop {{ foo }} }} }}"),
         );
         check_edit(
-            "unsafe",
+            kind,
             r#"fn main() { if true {}.$0 }"#,
-            r#"fn main() { unsafe { if true {} } }"#,
+            &format!("fn main() {{ {kind} {{ if true {{}} }} }}"),
         );
         check_edit(
-            "unsafe",
+            kind,
             r#"fn main() { while true {}.$0 }"#,
-            r#"fn main() { unsafe { while true {} } }"#,
+            &format!("fn main() {{ {kind} {{ while true {{}} }} }}"),
         );
         check_edit(
-            "unsafe",
+            kind,
             r#"fn main() { for i in 0..10 {}.$0 }"#,
-            r#"fn main() { unsafe { for i in 0..10 {} } }"#,
+            &format!("fn main() {{ {kind} {{ for i in 0..10 {{}} }} }}"),
         );
         check_edit(
-            "unsafe",
+            kind,
             r#"fn main() { let x = if true {1} else {2}.$0 }"#,
-            r#"fn main() { let x = unsafe { if true {1} else {2} } }"#,
+            &format!("fn main() {{ let x = {kind} {{ if true {{1}} else {{2}} }} }}"),
         );
 
         // completion will not be triggered
         check_edit(
-            "unsafe",
+            kind,
             r#"fn main() { let x = true else {panic!()}.$0}"#,
-            r#"fn main() { let x = true else {panic!()}.unsafe $0}"#,
+            &format!("fn main() {{ let x = true else {{panic!()}}.{kind} $0}}"),
         );
     }
 
     #[test]
     fn custom_postfix_completion() {
         let config = CompletionConfig {
-            snippets: vec![Snippet::new(
-                &[],
-                &["break".into()],
-                &["ControlFlow::Break(${receiver})".into()],
-                "",
-                &["core::ops::ControlFlow".into()],
-                crate::SnippetScope::Expr,
-            )
-            .unwrap()],
+            snippets: vec![
+                Snippet::new(
+                    &[],
+                    &["break".into()],
+                    &["ControlFlow::Break(${receiver})".into()],
+                    "",
+                    &["core::ops::ControlFlow".into()],
+                    crate::SnippetScope::Expr,
+                )
+                .unwrap(),
+            ],
             ..TEST_CONFIG
         };
 
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
index c612170eb54bc..7faa1139595f8 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs
@@ -17,15 +17,15 @@
 // ![Format String Completion](https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif)
 
 use ide_db::{
-    syntax_helpers::format_string_exprs::{parse_format_exprs, with_placeholders, Arg},
     SnippetCap,
+    syntax_helpers::format_string_exprs::{Arg, parse_format_exprs, with_placeholders},
 };
-use syntax::{ast, AstToken};
+use syntax::{AstToken, ast};
 
 use crate::{
+    Completions,
     completions::postfix::{build_postfix_snippet_builder, escape_snippet_bits},
     context::CompletionContext,
-    Completions,
 };
 
 /// Mapping ("postfix completion item" => "macro to use")
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
index d0c4c24d060f8..c18aab007b2cf 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
@@ -1,14 +1,14 @@
 //! Complete fields in record literals and patterns.
 use ide_db::SymbolKind;
 use syntax::{
-    ast::{self, Expr},
     SmolStr,
+    ast::{self, Expr},
 };
 
 use crate::{
-    context::{DotAccess, DotAccessExprCtx, DotAccessKind, PatternContext},
     CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance,
     CompletionRelevancePostfixMatch, Completions,
+    context::{DotAccess, DotAccessExprCtx, DotAccessKind, PatternContext},
 };
 
 pub(crate) fn complete_record_pattern_fields(
@@ -144,8 +144,8 @@ mod tests {
     use ide_db::SnippetCap;
 
     use crate::{
-        tests::{check_edit, check_edit_with_config, TEST_CONFIG},
         CompletionConfig,
+        tests::{TEST_CONFIG, check_edit, check_edit_with_config},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
index 357709e0c1fde..31aae11676228 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
@@ -1,11 +1,11 @@
 //! This file provides snippet completions, like `pd` => `eprintln!(...)`.
 
-use ide_db::{documentation::Documentation, imports::insert_use::ImportScope, SnippetCap};
+use ide_db::{SnippetCap, documentation::Documentation, imports::insert_use::ImportScope};
 
 use crate::{
+    CompletionContext, CompletionItem, CompletionItemKind, Completions, SnippetScope,
     context::{ItemListKind, PathCompletionCtx, PathExprCtx, Qualified},
     item::Builder,
-    CompletionContext, CompletionItem, CompletionItemKind, Completions, SnippetScope,
 };
 
 pub(crate) fn complete_expr_snippet(
@@ -153,23 +153,25 @@ fn add_custom_completions(
 #[cfg(test)]
 mod tests {
     use crate::{
-        tests::{check_edit_with_config, TEST_CONFIG},
         CompletionConfig, Snippet,
+        tests::{TEST_CONFIG, check_edit_with_config},
     };
 
     #[test]
     fn custom_snippet_completion() {
         check_edit_with_config(
             CompletionConfig {
-                snippets: vec![Snippet::new(
-                    &["break".into()],
-                    &[],
-                    &["ControlFlow::Break(())".into()],
-                    "",
-                    &["core::ops::ControlFlow".into()],
-                    crate::SnippetScope::Expr,
-                )
-                .unwrap()],
+                snippets: vec![
+                    Snippet::new(
+                        &["break".into()],
+                        &[],
+                        &["ControlFlow::Break(())".into()],
+                        "",
+                        &["core::ops::ControlFlow".into()],
+                        crate::SnippetScope::Expr,
+                    )
+                    .unwrap(),
+                ],
                 ..TEST_CONFIG
             },
             "break",
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
index b07148809323f..79db705af495d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs
@@ -1,12 +1,12 @@
 //! Completion of names from the current scope in type position.
 
 use hir::{HirDisplay, ScopeDef};
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 
 use crate::{
+    CompletionContext, Completions,
     context::{PathCompletionCtx, Qualified, TypeAscriptionTarget, TypeLocation},
     render::render_type_inference,
-    CompletionContext, Completions,
 };
 
 pub(crate) fn complete_type_path(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
index b384987c51ce1..4d6d0b758a381 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
@@ -2,12 +2,12 @@
 
 use hir::ScopeDef;
 use ide_db::{FxHashSet, SymbolKind};
-use syntax::{ast, format_smolstr, AstNode};
+use syntax::{AstNode, ast, format_smolstr};
 
 use crate::{
+    CompletionItem, CompletionItemKind, CompletionRelevance, Completions,
     context::{CompletionContext, PathCompletionCtx, Qualified},
     item::Builder,
-    CompletionItem, CompletionItemKind, CompletionRelevance, Completions,
 };
 
 pub(crate) fn complete_use_path(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs
index 0ea5157fb46fe..d15c35ac84991 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs
@@ -1,8 +1,8 @@
 //! Completion for visibility specifiers.
 
 use crate::{
-    context::{CompletionContext, PathCompletionCtx, Qualified},
     Completions,
+    context::{CompletionContext, PathCompletionCtx, Qualified},
 };
 
 pub(crate) fn complete_vis_path(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
index 45aab38e8ea09..844fce5ef8019 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
@@ -5,9 +5,9 @@
 //! completions if we are allowed to.
 
 use hir::ImportPathConfig;
-use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
+use ide_db::{SnippetCap, imports::insert_use::InsertUseConfig};
 
-use crate::{snippet::Snippet, CompletionFieldsToResolve};
+use crate::{CompletionFieldsToResolve, snippet::Snippet};
 
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub struct CompletionConfig<'a> {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index e686a29309461..3baf1f3de6109 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -6,25 +6,27 @@ mod tests;
 
 use std::{iter, ops::ControlFlow};
 
+use base_db::RootQueryDb as _;
 use hir::{
-    DisplayTarget, HasAttrs, Local, ModPath, ModuleDef, ModuleSource, Name, PathResolution,
-    ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
+    DisplayTarget, HasAttrs, Local, ModuleDef, ModuleSource, Name, PathResolution, ScopeDef,
+    Semantics, SemanticsScope, Symbol, Type, TypeInfo,
 };
 use ide_db::{
-    base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition,
-    FxHashMap, FxHashSet, RootDatabase,
+    FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs,
+    helpers::is_editable_crate,
 };
 use syntax::{
-    ast::{self, AttrKind, NameOrNameRef},
-    match_ast, AstNode, Edition, SmolStr,
+    AstNode, Edition, SmolStr,
     SyntaxKind::{self, *},
-    SyntaxToken, TextRange, TextSize, T,
+    SyntaxToken, T, TextRange, TextSize,
+    ast::{self, AttrKind, NameOrNameRef},
+    match_ast,
 };
 
 use crate::{
-    config::AutoImportExclusionType,
-    context::analysis::{expand_and_analyze, AnalysisResult},
     CompletionConfig,
+    config::AutoImportExclusionType,
+    context::analysis::{AnalysisResult, expand_and_analyze},
 };
 
 const COMPLETION_MARKER: &str = "raCompletionMarker";
@@ -675,11 +677,7 @@ impl CompletionContext<'_> {
             };
         }
 
-        if self.is_doc_hidden(attrs, defining_crate) {
-            Visible::No
-        } else {
-            Visible::Yes
-        }
+        if self.is_doc_hidden(attrs, defining_crate) { Visible::No } else { Visible::Yes }
     }
 
     pub(crate) fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
@@ -706,15 +704,16 @@ impl<'a> CompletionContext<'a> {
         let _p = tracing::info_span!("CompletionContext::new").entered();
         let sema = Semantics::new(db);
 
-        let file_id = sema.attach_first_edition(file_id)?;
-        let original_file = sema.parse(file_id);
+        let editioned_file_id = sema.attach_first_edition(file_id)?;
+        let original_file = sema.parse(editioned_file_id);
 
         // Insert a fake ident to get a valid parse tree. We will use this file
         // to determine context, though the original_file will be used for
         // actual completion.
         let file_with_fake_ident = {
-            let parse = db.parse(file_id);
-            parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, file_id.edition()).tree()
+            let (_, edition) = editioned_file_id.unpack(db);
+            let parse = db.parse(editioned_file_id);
+            parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, edition).tree()
         };
 
         // always pick the token to the immediate left of the cursor, as that is what we are actually
@@ -794,15 +793,12 @@ impl<'a> CompletionContext<'a> {
             .exclude_traits
             .iter()
             .filter_map(|path| {
-                scope
-                    .resolve_mod_path(&ModPath::from_segments(
-                        hir::PathKind::Plain,
-                        path.split("::").map(Symbol::intern).map(Name::new_symbol_root),
-                    ))
-                    .find_map(|it| match it {
+                hir::resolve_absolute_path(db, path.split("::").map(Symbol::intern)).find_map(
+                    |it| match it {
                         hir::ItemInNs::Types(ModuleDef::Trait(t)) => Some(t),
                         _ => None,
-                    })
+                    },
+                )
             })
             .collect();
 
@@ -810,17 +806,14 @@ impl<'a> CompletionContext<'a> {
             .exclude_flyimport
             .iter()
             .flat_map(|(path, kind)| {
-                scope
-                    .resolve_mod_path(&ModPath::from_segments(
-                        hir::PathKind::Plain,
-                        path.split("::").map(Symbol::intern).map(Name::new_symbol_root),
-                    ))
+                hir::resolve_absolute_path(db, path.split("::").map(Symbol::intern))
                     .map(|it| (it.into_module_def(), *kind))
             })
             .collect();
         exclude_flyimport
             .extend(exclude_traits.iter().map(|&t| (t.into(), AutoImportExclusionType::Always)));
 
+        // FIXME: This should be part of `CompletionAnalysis` / `expand_and_analyze`
         let complete_semicolon = if config.add_semicolon_to_unit {
             let inside_closure_ret = token.parent_ancestors().try_for_each(|ancestor| {
                 match_ast! {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index 1a34548f70824..5959973589669 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -2,24 +2,28 @@
 use std::iter;
 
 use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant};
-use ide_db::{active_parameter::ActiveParameter, RootDatabase};
+use ide_db::{RootDatabase, active_parameter::ActiveParameter};
 use itertools::Either;
 use syntax::{
-    algo::{self, ancestors_at_offset, find_node_at_offset, non_trivia_sibling},
+    AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
+    T, TextRange, TextSize,
+    algo::{
+        self, ancestors_at_offset, find_node_at_offset, non_trivia_sibling,
+        previous_non_trivia_token,
+    },
     ast::{
         self, AttrKind, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName,
         NameOrNameRef,
     },
-    match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
-    SyntaxToken, TextRange, TextSize, T,
+    match_ast,
 };
 
 use crate::context::{
-    AttrCtx, BreakableKind, CompletionAnalysis, DotAccess, DotAccessExprCtx, DotAccessKind,
-    ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext,
-    NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx, PathKind, PatternContext,
-    PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation,
-    COMPLETION_MARKER,
+    AttrCtx, BreakableKind, COMPLETION_MARKER, CompletionAnalysis, DotAccess, DotAccessExprCtx,
+    DotAccessKind, ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind,
+    NameRefContext, NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx, PathKind,
+    PatternContext, PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget,
+    TypeLocation,
 };
 
 #[derive(Debug)]
@@ -1812,22 +1816,6 @@ fn is_in_block(node: &SyntaxNode) -> bool {
         .unwrap_or(false)
 }
 
-fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
-    let mut token = match e.into() {
-        SyntaxElement::Node(n) => n.first_token()?,
-        SyntaxElement::Token(t) => t,
-    }
-    .prev_token();
-    while let Some(inner) = token {
-        if !inner.kind().is_trivia() {
-            return Some(inner);
-        } else {
-            token = inner.prev_token();
-        }
-    }
-    None
-}
-
 fn next_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
     let mut token = match e.into() {
         SyntaxElement::Node(n) => n.last_token()?,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
index a03f632cdfdfa..75c20968e1e5f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs
@@ -1,9 +1,9 @@
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 use hir::HirDisplay;
 
 use crate::{
     context::CompletionContext,
-    tests::{position, TEST_CONFIG},
+    tests::{TEST_CONFIG, position},
 };
 
 fn check_expected_type_and_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
@@ -371,6 +371,17 @@ fn foo() {
 "#,
         expect![[r#"ty: Foo, name: ?"#]],
     );
+    check_expected_type_and_name(
+        r#"
+struct Foo { field: u32 }
+fn foo() {
+    Foo {
+        ..self::$0
+    }
+}
+"#,
+        expect!["ty: ?, name: ?"],
+    );
 }
 
 #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
index 8d6dc4c801301..e208b9fd41ae2 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -5,17 +5,17 @@ use std::{fmt, mem};
 use hir::Mutability;
 use ide_db::text_edit::TextEdit;
 use ide_db::{
-    documentation::Documentation, imports::import_assets::LocatedImport, RootDatabase, SnippetCap,
-    SymbolKind,
+    RootDatabase, SnippetCap, SymbolKind, documentation::Documentation,
+    imports::import_assets::LocatedImport,
 };
 use itertools::Itertools;
 use smallvec::SmallVec;
 use stdx::{format_to, impl_from, never};
-use syntax::{format_smolstr, Edition, SmolStr, TextRange, TextSize};
+use syntax::{Edition, SmolStr, TextRange, TextSize, format_smolstr};
 
 use crate::{
     context::{CompletionContext, PathCompletionCtx},
-    render::{render_path_resolution, RenderContext},
+    render::{RenderContext, render_path_resolution},
 };
 
 /// `CompletionItem` describes a single completion entity which expands to 1 or more entries in the
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
index a990b39481a19..1fdd4cdb1c6bb 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -11,10 +11,10 @@ mod snippet;
 mod tests;
 
 use ide_db::{
+    FilePosition, FxHashSet, RootDatabase,
     imports::insert_use::{self, ImportScope},
     syntax_helpers::tree_diff::diff,
     text_edit::TextEdit,
-    FilePosition, FxHashSet, RootDatabase,
 };
 use syntax::ast::make;
 
@@ -275,7 +275,9 @@ pub fn resolve_completion_edits(
     let _p = tracing::info_span!("resolve_completion_edits").entered();
     let sema = hir::Semantics::new(db);
 
-    let original_file = sema.parse(sema.attach_first_edition(file_id)?);
+    let editioned_file_id = sema.attach_first_edition(file_id)?;
+
+    let original_file = sema.parse(editioned_file_id);
     let original_token =
         syntax::AstNode::syntax(&original_file).token_at_offset(offset).left_biased()?;
     let position_for_import = &original_token.parent()?;
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index c82905eddefbb..00c0b470f9875 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -10,17 +10,19 @@ pub(crate) mod type_alias;
 pub(crate) mod union_literal;
 pub(crate) mod variant;
 
-use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type, sym};
 use ide_db::text_edit::TextEdit;
 use ide_db::{
+    RootDatabase, SnippetCap, SymbolKind,
     documentation::{Documentation, HasDocs},
     helpers::item_name,
     imports::import_assets::LocatedImport,
-    RootDatabase, SnippetCap, SymbolKind,
 };
-use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange, ToSmolStr};
+use syntax::{AstNode, SmolStr, SyntaxKind, TextRange, ToSmolStr, ast, format_smolstr};
 
 use crate::{
+    CompletionContext, CompletionItem, CompletionItemKind, CompletionItemRefMode,
+    CompletionRelevance,
     context::{DotAccess, DotAccessKind, PathCompletionCtx, PathKind, PatternContext},
     item::{Builder, CompletionRelevanceTypeMatch},
     render::{
@@ -28,8 +30,6 @@ use crate::{
         literal::render_variant_lit,
         macro_::{render_macro, render_macro_pat},
     },
-    CompletionContext, CompletionItem, CompletionItemKind, CompletionItemRefMode,
-    CompletionRelevance,
 };
 /// Interface for data and methods required for items rendering.
 #[derive(Debug, Clone)]
@@ -92,7 +92,7 @@ impl<'a> RenderContext<'a> {
 
     fn is_deprecated(&self, def: impl HasAttrs) -> bool {
         let attrs = def.attrs(self.db());
-        attrs.by_key(&sym::deprecated).exists()
+        attrs.by_key(sym::deprecated).exists()
     }
 
     fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool {
@@ -334,7 +334,7 @@ pub(crate) fn render_expr(
             continue;
         };
 
-        item.add_import(LocatedImport::new(path, trait_item, trait_item));
+        item.add_import(LocatedImport::new_no_completion(path, trait_item, trait_item));
     }
 
     Some(item)
@@ -683,14 +683,14 @@ fn path_ref_match(
 mod tests {
     use std::cmp;
 
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use ide_db::SymbolKind;
     use itertools::Itertools;
 
     use crate::{
-        item::CompletionRelevanceTypeMatch,
-        tests::{check_edit, do_completion, get_all_items, TEST_CONFIG},
         CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch,
+        item::CompletionRelevanceTypeMatch,
+        tests::{TEST_CONFIG, check_edit, do_completion, get_all_items},
     };
 
     #[track_caller]
@@ -1276,6 +1276,53 @@ fn main() { fo$0 }
         );
     }
 
+    #[test]
+    fn fn_detail_includes_variadics() {
+        check(
+            r#"
+unsafe extern "C" fn foo(a: u32, b: u32, ...) {}
+
+fn main() { fo$0 }
+"#,
+            SymbolKind::Function,
+            expect![[r#"
+                [
+                    CompletionItem {
+                        label: "foo(…)",
+                        detail_left: None,
+                        detail_right: Some(
+                            "unsafe fn(u32, u32, ...)",
+                        ),
+                        source_range: 62..64,
+                        delete: 62..64,
+                        insert: "foo(${1:a}, ${2:b});$0",
+                        kind: SymbolKind(
+                            Function,
+                        ),
+                        lookup: "foo",
+                        detail: "unsafe fn(u32, u32, ...)",
+                        trigger_call_info: true,
+                    },
+                    CompletionItem {
+                        label: "main()",
+                        detail_left: None,
+                        detail_right: Some(
+                            "fn()",
+                        ),
+                        source_range: 62..64,
+                        delete: 62..64,
+                        insert: "main();$0",
+                        kind: SymbolKind(
+                            Function,
+                        ),
+                        lookup: "main",
+                        detail: "fn()",
+                    },
+                ]
+            "#]],
+        );
+    }
+
     #[test]
     fn enum_detail_just_name_for_unit() {
         check(
@@ -2042,8 +2089,8 @@ fn f() { A { bar: b$0 }; }
             expect![[r#"
                 fn bar() fn() -> u8 [type+name]
                 fn baz() fn() -> u8 [type]
-                ex bar()  [type]
                 ex baz()  [type]
+                ex bar()  [type]
                 st A A []
                 fn f() fn() []
             "#]],
@@ -2773,14 +2820,13 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
                                 Indel {
                                     insert: "(",
                                     delete: 107..107,
-                                    annotation: None,
                                 },
                                 Indel {
                                     insert: "qux)()",
                                     delete: 109..110,
-                                    annotation: None,
                                 },
                             ],
+                            annotation: None,
                         },
                         kind: SymbolKind(
                             Field,
@@ -2960,6 +3006,7 @@ fn main() {
                 sn refm &mut expr []
                 sn deref *expr []
                 sn unsafe unsafe {} []
+                sn const const {} []
                 sn match match expr {} []
                 sn box Box::new(expr) []
                 sn dbg dbg!(expr) []
@@ -2990,6 +3037,7 @@ fn main() {
                 sn refm &mut expr []
                 sn deref *expr []
                 sn unsafe unsafe {} []
+                sn const const {} []
                 sn match match expr {} []
                 sn box Box::new(expr) []
                 sn dbg dbg!(expr) []
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
index 4693bdc047f97..2fe517fa8cd01 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
@@ -1,12 +1,13 @@
 //! Renderer for function calls.
 
-use hir::{db::HirDatabase, AsAssocItem, HirDisplay};
+use hir::{AsAssocItem, HirDisplay, db::HirDatabase};
 use ide_db::{SnippetCap, SymbolKind};
 use itertools::Itertools;
 use stdx::{format_to, to_lower_snake_case};
-use syntax::{format_smolstr, AstNode, SmolStr, ToSmolStr};
+use syntax::{AstNode, SmolStr, ToSmolStr, format_smolstr};
 
 use crate::{
+    CallableSnippets,
     context::{
         CompleteSemicolon, CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind,
     },
@@ -15,9 +16,8 @@ use crate::{
         CompletionRelevanceReturnType, CompletionRelevanceTraitInfo,
     },
     render::{
-        compute_exact_name_match, compute_ref_match, compute_type_match, match_types, RenderContext,
+        RenderContext, compute_exact_name_match, compute_ref_match, compute_type_match, match_types,
     },
-    CallableSnippets,
 };
 
 #[derive(Debug)]
@@ -293,11 +293,7 @@ fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'sta
         for (name, local) in ctx.locals.iter().sorted_by_key(|&(k, _)| k.clone()) {
             if name.as_str() == arg {
                 return if local.ty(ctx.db) == derefed_ty {
-                    if ty.is_mutable_reference() {
-                        "&mut "
-                    } else {
-                        "&"
-                    }
+                    if ty.is_mutable_reference() { "&mut " } else { "&" }
                 } else {
                     ""
                 };
@@ -324,7 +320,9 @@ fn detail(ctx: &CompletionContext<'_>, func: hir::Function) -> String {
         format_to!(detail, "unsafe ");
     }
 
-    format_to!(detail, "fn({})", params_display(ctx, func));
+    detail.push_str("fn(");
+    params_display(ctx, &mut detail, func);
+    detail.push(')');
     if !ret_ty.is_unit() {
         format_to!(detail, " -> {}", ret_ty.display(ctx.db, ctx.display_target));
     }
@@ -346,24 +344,28 @@ fn detail_full(ctx: &CompletionContext<'_>, func: hir::Function) -> String {
     detail
 }
 
-fn params_display(ctx: &CompletionContext<'_>, func: hir::Function) -> String {
+fn params_display(ctx: &CompletionContext<'_>, detail: &mut String, func: hir::Function) {
     if let Some(self_param) = func.self_param(ctx.db) {
+        format_to!(detail, "{}", self_param.display(ctx.db, ctx.display_target));
         let assoc_fn_params = func.assoc_fn_params(ctx.db);
         let params = assoc_fn_params
             .iter()
             .skip(1) // skip the self param because we are manually handling that
             .map(|p| p.ty().display(ctx.db, ctx.display_target));
-        format!(
-            "{}{}",
-            self_param.display(ctx.db, ctx.display_target),
-            params.format_with("", |display, f| {
-                f(&", ")?;
-                f(&display)
-            })
-        )
+        for param in params {
+            format_to!(detail, ", {}", param);
+        }
     } else {
         let assoc_fn_params = func.assoc_fn_params(ctx.db);
-        assoc_fn_params.iter().map(|p| p.ty().display(ctx.db, ctx.display_target)).join(", ")
+        format_to!(
+            detail,
+            "{}",
+            assoc_fn_params.iter().map(|p| p.ty().display(ctx.db, ctx.display_target)).format(", ")
+        );
+    }
+
+    if func.is_varargs(ctx.db) {
+        detail.push_str(", ...");
     }
 }
 
@@ -398,8 +400,8 @@ fn params(
 #[cfg(test)]
 mod tests {
     use crate::{
-        tests::{check_edit, check_edit_with_config, TEST_CONFIG},
         CallableSnippets, CompletionConfig,
+        tests::{TEST_CONFIG, check_edit, check_edit_with_config},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
index ffda52fb47852..5a9e35a7290bf 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
@@ -1,23 +1,22 @@
 //! Renderer for `enum` variants.
 
-use hir::{db::HirDatabase, StructKind};
+use hir::{StructKind, db::HirDatabase};
 use ide_db::{
-    documentation::{Documentation, HasDocs},
     SymbolKind,
+    documentation::{Documentation, HasDocs},
 };
 
 use crate::{
+    CompletionItemKind, CompletionRelevance, CompletionRelevanceReturnType,
     context::{CompletionContext, PathCompletionCtx, PathKind},
     item::{Builder, CompletionItem, CompletionRelevanceFn},
     render::{
-        compute_type_match,
+        RenderContext, compute_type_match,
         variant::{
-            format_literal_label, format_literal_lookup, render_record_lit, render_tuple_lit,
-            visible_fields, RenderedLiteral,
+            RenderedLiteral, format_literal_label, format_literal_lookup, render_record_lit,
+            render_tuple_lit, visible_fields,
         },
-        RenderContext,
     },
-    CompletionItemKind, CompletionRelevance, CompletionRelevanceReturnType,
 };
 
 pub(crate) fn render_variant_lit(
@@ -164,11 +163,7 @@ impl Variant {
             Variant::Struct(it) => visible_fields(ctx, &fields, it)?,
             Variant::EnumVariant(it) => visible_fields(ctx, &fields, it)?,
         };
-        if !fields_omitted {
-            Some(visible_fields)
-        } else {
-            None
-        }
+        if !fields_omitted { Some(visible_fields) } else { None }
     }
 
     fn kind(self, db: &dyn HirDatabase) -> StructKind {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
index 8b2476d153f1f..4674dae031440 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs
@@ -1,8 +1,8 @@
 //! Renderer for macro invocations.
 
 use hir::HirDisplay;
-use ide_db::{documentation::Documentation, SymbolKind};
-use syntax::{format_smolstr, SmolStr, ToSmolStr};
+use ide_db::{SymbolKind, documentation::Documentation};
+use syntax::{SmolStr, ToSmolStr, format_smolstr};
 
 use crate::{
     context::{PathCompletionCtx, PathKind, PatternContext},
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
index 124abb17b6a1c..dcc51a86a8edc 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
@@ -1,17 +1,17 @@
 //! Renderer for patterns.
 
-use hir::{db::HirDatabase, Name, StructKind};
-use ide_db::{documentation::HasDocs, SnippetCap};
+use hir::{Name, StructKind, db::HirDatabase};
+use ide_db::{SnippetCap, documentation::HasDocs};
 use itertools::Itertools;
 use syntax::{Edition, SmolStr, ToSmolStr};
 
 use crate::{
+    CompletionItem, CompletionItemKind,
     context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext},
     render::{
-        variant::{format_literal_label, format_literal_lookup, visible_fields},
         RenderContext,
+        variant::{format_literal_label, format_literal_lookup, visible_fields},
     },
-    CompletionItem, CompletionItemKind,
 };
 
 pub(crate) fn render_struct_pat(
@@ -64,11 +64,11 @@ pub(crate) fn render_variant_pat(
         ),
         None => {
             let name = local_name.unwrap_or_else(|| variant.name(ctx.db()));
-            let it = (
+
+            (
                 name.as_str().to_smolstr(),
                 name.display(ctx.db(), ctx.completion.edition).to_smolstr(),
-            );
-            it
+            )
         }
     };
 
@@ -191,7 +191,7 @@ fn render_record_as_pat(
             format!(
                 "{name} {{ {}{} }}",
                 fields.enumerate().format_with(", ", |(idx, field), f| {
-                    f(&format_args!("{}${}", field.name(db).display(db.upcast(), edition), idx + 1))
+                    f(&format_args!("{}${}", field.name(db).display(db, edition), idx + 1))
                 }),
                 if fields_omitted { ", .." } else { "" },
                 name = name
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
index 09154e81c0304..23f0d4e06f2c8 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs
@@ -6,11 +6,11 @@ use itertools::Itertools;
 use syntax::ToSmolStr;
 
 use crate::{
+    CompletionItem, CompletionItemKind,
     render::{
-        variant::{format_literal_label, format_literal_lookup, visible_fields},
         RenderContext,
+        variant::{format_literal_label, format_literal_lookup, visible_fields},
     },
-    CompletionItem, CompletionItemKind,
 };
 
 pub(crate) fn render_union_literal(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
index 83718e57229a5..42324b4290a77 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
@@ -1,7 +1,7 @@
 //! Code common to structs, unions, and enum variants.
 
 use crate::context::CompletionContext;
-use hir::{sym, HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind};
+use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind, sym};
 use ide_db::SnippetCap;
 use itertools::Itertools;
 use syntax::SmolStr;
@@ -96,7 +96,7 @@ pub(crate) fn visible_fields(
         .copied()
         .collect::<Vec<_>>();
     let has_invisible_field = n_fields - fields.len() > 0;
-    let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(&sym::non_exhaustive).exists()
+    let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(sym::non_exhaustive).exists()
         && item.krate(ctx.db) != module.krate();
     let fields_omitted = has_invisible_field || is_foreign_non_exhaustive;
     Some((fields, fields_omitted))
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
index 07f33a826e4c7..9dc0c0234dc56 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
@@ -174,7 +174,7 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[ModPath]) -> Option<Vec
             ctx.config.insert_use.prefix_kind,
             import_cfg,
         )?;
-        Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item)))
+        Some((path.len() > 1).then(|| LocatedImport::new_no_completion(path.clone(), item, item)))
     };
     let mut res = Vec::with_capacity(requires.len());
     for import in requires {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index 9d91f95eb65b8..fdc3d9a13bc92 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -28,8 +28,8 @@ use base_db::SourceDatabase;
 use expect_test::Expect;
 use hir::PrefixKind;
 use ide_db::{
-    imports::insert_use::{ImportGranularity, InsertUseConfig},
     FilePosition, RootDatabase, SnippetCap,
+    imports::insert_use::{ImportGranularity, InsertUseConfig},
 };
 use itertools::Itertools;
 use stdx::{format_to, trim_indent};
@@ -37,8 +37,8 @@ use test_fixture::ChangeFixture;
 use test_utils::assert_eq_text;
 
 use crate::{
-    resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionFieldsToResolve,
-    CompletionItem, CompletionItemKind,
+    CallableSnippets, CompletionConfig, CompletionFieldsToResolve, CompletionItem,
+    CompletionItemKind, resolve_completion_edits,
 };
 
 /// Lots of basic item definitions
@@ -155,13 +155,14 @@ fn completion_list_with_config(
 pub(crate) fn position(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (RootDatabase, FilePosition) {
-    let change_fixture = ChangeFixture::parse(ra_fixture);
     let mut database = RootDatabase::default();
+    let change_fixture = ChangeFixture::parse(&database, ra_fixture);
     database.enable_proc_attr_macros();
     database.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let offset = range_or_offset.expect_offset();
-    (database, FilePosition { file_id: file_id.file_id(), offset })
+    let position = FilePosition { file_id: file_id.file_id(&database), offset };
+    (database, position)
 }
 
 pub(crate) fn do_completion(code: &str, kind: CompletionItemKind) -> Vec<CompletionItem> {
@@ -246,7 +247,7 @@ pub(crate) fn check_edit_with_config(
         .filter(|it| it.lookup() == what)
         .collect_tuple()
         .unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}"));
-    let mut actual = db.file_text(position.file_id).to_string();
+    let mut actual = db.file_text(position.file_id).text(&db).to_string();
 
     let mut combined_edit = completion.text_edit.clone();
 
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
index 9b3c676c48a1f..b30ac43bf8fbe 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -1,13 +1,13 @@
 //! Completion tests for expressions.
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 
 use crate::{
+    CompletionConfig,
     config::AutoImportExclusionType,
     tests::{
-        check, check_edit, check_with_base_items, completion_list_with_config, BASE_ITEMS_FIXTURE,
-        TEST_CONFIG,
+        BASE_ITEMS_FIXTURE, TEST_CONFIG, check, check_edit, check_with_base_items,
+        completion_list_with_config,
     },
-    CompletionConfig,
 };
 
 fn check_with_config(
@@ -58,6 +58,7 @@ fn baz() {
             un Union                  Union
             ev TupleV(…)        TupleV(u32)
             bt u32                      u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -101,6 +102,7 @@ fn func(param0 @ (param1, param2): (i32, i32)) {
             lc param1             i32
             lc param2             i32
             bt u32                u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -169,6 +171,7 @@ impl Unit {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -247,6 +250,7 @@ fn complete_in_block() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -298,6 +302,7 @@ fn complete_after_if_expr() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -335,6 +340,7 @@ fn complete_in_match_arm() {
         expect![[r#"
             fn foo() fn()
             bt u32    u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -372,6 +378,7 @@ fn completes_in_loop_ctx() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -400,6 +407,7 @@ fn completes_in_loop_ctx() {
             sn box  Box::new(expr)
             sn break    break expr
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -424,6 +432,7 @@ fn completes_in_let_initializer() {
         expect![[r#"
             fn main() fn()
             bt u32     u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -448,6 +457,7 @@ fn completes_after_ref_expr() {
         expect![[r#"
             fn main() fn()
             bt u32     u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -471,6 +481,7 @@ fn completes_after_ref_expr() {
             fn main() fn()
             bt u32     u32
             kw const
+            kw const
             kw crate::
             kw false
             kw for
@@ -492,6 +503,7 @@ fn completes_after_ref_expr() {
         expect![[r#"
             fn main() fn()
             bt u32     u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -512,6 +524,7 @@ fn completes_after_ref_expr() {
         expect![[r#"
             fn main() fn()
             bt u32     u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -532,6 +545,7 @@ fn completes_after_ref_expr() {
         expect![[r#"
             fn main() fn()
             bt u32     u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -566,6 +580,7 @@ fn foo() {
             fn foo() fn()
             st Foo    Foo
             bt u32    u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -601,6 +616,7 @@ fn foo() {
             fn foo() fn()
             lc bar    i32
             bt u32    u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -632,6 +648,7 @@ fn quux(x: i32) {
             lc x                i32
             ma m!(…) macro_rules! m
             bt u32              u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -659,6 +676,7 @@ fn quux(x: i32) {
             lc x                i32
             ma m!(…) macro_rules! m
             bt u32              u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -688,6 +706,7 @@ fn quux(x: i32) {
             lc y                i32
             ma m!(…) macro_rules! m
             bt u32              u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -870,6 +889,7 @@ fn brr() {
             st YoloVariant                  YoloVariant
             st YoloVariant {…} YoloVariant { f: usize }
             bt u32                                  u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -945,6 +965,7 @@ fn foo() { if foo {} $0 }
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -987,6 +1008,7 @@ fn foo() { if foo {} el$0 }
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1016,6 +1038,7 @@ fn foo() { bar(if foo {} $0) }
         expect![[r#"
             fn foo() fn()
             bt u32    u32
+            kw const
             kw crate::
             kw else
             kw else if
@@ -1040,6 +1063,7 @@ fn foo() { bar(if foo {} el$0) }
         expect![[r#"
             fn foo() fn()
             bt u32    u32
+            kw const
             kw crate::
             kw else
             kw else if
@@ -1077,6 +1101,7 @@ fn foo() { if foo {} $0 let x = 92; }
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1119,6 +1144,7 @@ fn foo() { if foo {} el$0 let x = 92; }
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1161,6 +1187,7 @@ fn foo() { if foo {} el$0 { let x = 92; } }
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1213,6 +1240,7 @@ pub struct UnstableThisShouldNotBeListed;
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1267,6 +1295,7 @@ pub struct UnstableButWeAreOnNightlyAnyway;
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1309,6 +1338,7 @@ fn main() {
             me foo()     fn(&self)
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -1335,6 +1365,7 @@ fn main() {
             me foo()     fn(&self)
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -1365,6 +1396,7 @@ fn main() {
             me foo()     fn(&self)
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -1391,6 +1423,7 @@ fn main() {
             me foo()     fn(&self)
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -1417,6 +1450,7 @@ fn main() {
             me foo()     fn(&self)
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -1442,6 +1476,7 @@ fn main() {
         expect![[r#"
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -1505,6 +1540,7 @@ fn main() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1531,7 +1567,10 @@ fn main() {
 #[test]
 fn excluded_trait_method_is_excluded() {
     check_with_config(
-        CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+        CompletionConfig {
+            exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+            ..TEST_CONFIG
+        },
         r#"
 trait ExcludedTrait {
     fn foo(&self) {}
@@ -1551,22 +1590,20 @@ fn foo() {
 }
         "#,
         expect![[r#"
-            me bar() (as ExcludedTrait) fn(&self)
-            me baz() (as ExcludedTrait) fn(&self)
-            me foo() (as ExcludedTrait) fn(&self)
-            me inherent()               fn(&self)
-            sn box                 Box::new(expr)
-            sn call                function(expr)
-            sn dbg                     dbg!(expr)
-            sn dbgr                   dbg!(&expr)
-            sn deref                        *expr
-            sn let                            let
-            sn letm                       let mut
-            sn match                match expr {}
-            sn ref                          &expr
-            sn refm                     &mut expr
-            sn return                 return expr
-            sn unsafe                   unsafe {}
+            me inherent() fn(&self)
+            sn box   Box::new(expr)
+            sn call  function(expr)
+            sn const       const {}
+            sn dbg       dbg!(expr)
+            sn dbgr     dbg!(&expr)
+            sn deref          *expr
+            sn let              let
+            sn letm         let mut
+            sn match  match expr {}
+            sn ref            &expr
+            sn refm       &mut expr
+            sn return   return expr
+            sn unsafe     unsafe {}
         "#]],
     );
 }
@@ -1574,7 +1611,10 @@ fn foo() {
 #[test]
 fn excluded_trait_not_excluded_when_inherent() {
     check_with_config(
-        CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+        CompletionConfig {
+            exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+            ..TEST_CONFIG
+        },
         r#"
 trait ExcludedTrait {
     fn foo(&self) {}
@@ -1594,6 +1634,7 @@ fn foo(v: &dyn ExcludedTrait) {
             me foo() (as ExcludedTrait) fn(&self)
             sn box                 Box::new(expr)
             sn call                function(expr)
+            sn const                     const {}
             sn dbg                     dbg!(expr)
             sn dbgr                   dbg!(&expr)
             sn deref                        *expr
@@ -1607,7 +1648,10 @@ fn foo(v: &dyn ExcludedTrait) {
         "#]],
     );
     check_with_config(
-        CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+        CompletionConfig {
+            exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+            ..TEST_CONFIG
+        },
         r#"
 trait ExcludedTrait {
     fn foo(&self) {}
@@ -1627,6 +1671,7 @@ fn foo(v: impl ExcludedTrait) {
             me foo() (as ExcludedTrait) fn(&self)
             sn box                 Box::new(expr)
             sn call                function(expr)
+            sn const                     const {}
             sn dbg                     dbg!(expr)
             sn dbgr                   dbg!(&expr)
             sn deref                        *expr
@@ -1640,7 +1685,10 @@ fn foo(v: impl ExcludedTrait) {
         "#]],
     );
     check_with_config(
-        CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+        CompletionConfig {
+            exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+            ..TEST_CONFIG
+        },
         r#"
 trait ExcludedTrait {
     fn foo(&self) {}
@@ -1660,6 +1708,7 @@ fn foo<T: ExcludedTrait>(v: T) {
             me foo() (as ExcludedTrait) fn(&self)
             sn box                 Box::new(expr)
             sn call                function(expr)
+            sn const                     const {}
             sn dbg                     dbg!(expr)
             sn dbgr                   dbg!(&expr)
             sn deref                        *expr
@@ -1678,7 +1727,7 @@ fn foo<T: ExcludedTrait>(v: T) {
 fn excluded_trait_method_is_excluded_from_flyimport() {
     check_with_config(
         CompletionConfig {
-            exclude_traits: &["test::module2::ExcludedTrait".to_owned()],
+            exclude_traits: &["ra_test_fixture::module2::ExcludedTrait".to_owned()],
             ..TEST_CONFIG
         },
         r#"
@@ -1702,22 +1751,20 @@ fn foo() {
 }
         "#,
         expect![[r#"
-            me bar() (use module2::ExcludedTrait) fn(&self)
-            me baz() (use module2::ExcludedTrait) fn(&self)
-            me foo() (use module2::ExcludedTrait) fn(&self)
-            me inherent()                         fn(&self)
-            sn box                           Box::new(expr)
-            sn call                          function(expr)
-            sn dbg                               dbg!(expr)
-            sn dbgr                             dbg!(&expr)
-            sn deref                                  *expr
-            sn let                                      let
-            sn letm                                 let mut
-            sn match                          match expr {}
-            sn ref                                    &expr
-            sn refm                               &mut expr
-            sn return                           return expr
-            sn unsafe                             unsafe {}
+            me inherent() fn(&self)
+            sn box   Box::new(expr)
+            sn call  function(expr)
+            sn const       const {}
+            sn dbg       dbg!(expr)
+            sn dbgr     dbg!(&expr)
+            sn deref          *expr
+            sn let              let
+            sn letm         let mut
+            sn match  match expr {}
+            sn ref            &expr
+            sn refm       &mut expr
+            sn return   return expr
+            sn unsafe     unsafe {}
         "#]],
     );
 }
@@ -1727,7 +1774,7 @@ fn flyimport_excluded_trait_method_is_excluded_from_flyimport() {
     check_with_config(
         CompletionConfig {
             exclude_flyimport: vec![(
-                "test::module2::ExcludedTrait".to_owned(),
+                "ra_test_fixture::module2::ExcludedTrait".to_owned(),
                 AutoImportExclusionType::Methods,
             )],
             ..TEST_CONFIG
@@ -1753,22 +1800,20 @@ fn foo() {
 }
         "#,
         expect![[r#"
-            me bar() (use module2::ExcludedTrait) fn(&self)
-            me baz() (use module2::ExcludedTrait) fn(&self)
-            me foo() (use module2::ExcludedTrait) fn(&self)
-            me inherent()                         fn(&self)
-            sn box                           Box::new(expr)
-            sn call                          function(expr)
-            sn dbg                               dbg!(expr)
-            sn dbgr                             dbg!(&expr)
-            sn deref                                  *expr
-            sn let                                      let
-            sn letm                                 let mut
-            sn match                          match expr {}
-            sn ref                                    &expr
-            sn refm                               &mut expr
-            sn return                           return expr
-            sn unsafe                             unsafe {}
+            me inherent() fn(&self)
+            sn box   Box::new(expr)
+            sn call  function(expr)
+            sn const       const {}
+            sn dbg       dbg!(expr)
+            sn dbgr     dbg!(&expr)
+            sn deref          *expr
+            sn let              let
+            sn letm         let mut
+            sn match  match expr {}
+            sn ref            &expr
+            sn refm       &mut expr
+            sn return   return expr
+            sn unsafe     unsafe {}
         "#]],
     );
 }
@@ -1776,7 +1821,10 @@ fn foo() {
 #[test]
 fn excluded_trait_method_is_excluded_from_path_completion() {
     check_with_config(
-        CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+        CompletionConfig {
+            exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+            ..TEST_CONFIG
+        },
         r#"
 pub trait ExcludedTrait {
     fn foo(&self) {}
@@ -1796,10 +1844,7 @@ fn foo() {
 }
         "#,
         expect![[r#"
-            me bar(…) (as ExcludedTrait) fn(&self)
-            me baz(…) (as ExcludedTrait) fn(&self)
-            me foo(…) (as ExcludedTrait) fn(&self)
-            me inherent(…)               fn(&self)
+            me inherent(…) fn(&self)
         "#]],
     );
 }
@@ -1807,7 +1852,10 @@ fn foo() {
 #[test]
 fn excluded_trait_method_is_not_excluded_when_trait_is_specified() {
     check_with_config(
-        CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+        CompletionConfig {
+            exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+            ..TEST_CONFIG
+        },
         r#"
 pub trait ExcludedTrait {
     fn foo(&self) {}
@@ -1833,7 +1881,10 @@ fn foo() {
             "#]],
     );
     check_with_config(
-        CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+        CompletionConfig {
+            exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+            ..TEST_CONFIG
+        },
         r#"
 pub trait ExcludedTrait {
     fn foo(&self) {}
@@ -1863,7 +1914,10 @@ fn foo() {
 #[test]
 fn excluded_trait_not_excluded_when_inherent_path() {
     check_with_config(
-        CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+        CompletionConfig {
+            exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+            ..TEST_CONFIG
+        },
         r#"
 trait ExcludedTrait {
     fn foo(&self) {}
@@ -1884,7 +1938,10 @@ fn foo() {
         "#]],
     );
     check_with_config(
-        CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+        CompletionConfig {
+            exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+            ..TEST_CONFIG
+        },
         r#"
 trait ExcludedTrait {
     fn foo(&self) {}
@@ -1956,6 +2013,7 @@ fn bar() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -2028,6 +2086,7 @@ fn foo() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
index 2e7c53def7fc5..27c91bc7c4558 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
@@ -1,9 +1,9 @@
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 
 use crate::{
-    context::{CompletionAnalysis, NameContext, NameKind, NameRefKind},
-    tests::{check_edit, check_edit_with_config, TEST_CONFIG},
     CompletionConfig,
+    context::{CompletionAnalysis, NameContext, NameKind, NameRefKind},
+    tests::{TEST_CONFIG, check_edit, check_edit_with_config},
 };
 
 fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
@@ -1810,9 +1810,10 @@ fn function() {
 
 #[test]
 fn excluded_trait_item_included_when_exact_match() {
+    // FIXME: This does not work, we need to change the code.
     check_with_config(
         CompletionConfig {
-            exclude_traits: &["test::module2::ExcludedTrait".to_owned()],
+            exclude_traits: &["ra_test_fixture::module2::ExcludedTrait".to_owned()],
             ..TEST_CONFIG
         },
         r#"
@@ -1828,10 +1829,122 @@ mod module2 {
 
 fn foo() {
     true.foo$0
+}
+        "#,
+        expect![""],
+    );
+}
+
+#[test]
+fn excluded_via_attr() {
+    check(
+        r#"
+mod module2 {
+    #[rust_analyzer::completions(ignore_flyimport)]
+    pub trait ExcludedTrait {
+        fn foo(&self) {}
+        fn bar(&self) {}
+        fn baz(&self) {}
+    }
+
+    impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+    true.$0
+}
+        "#,
+        expect![""],
+    );
+    check(
+        r#"
+mod module2 {
+    #[rust_analyzer::completions(ignore_flyimport_methods)]
+    pub trait ExcludedTrait {
+        fn foo(&self) {}
+        fn bar(&self) {}
+        fn baz(&self) {}
+    }
+
+    impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+    true.$0
+}
+        "#,
+        expect![""],
+    );
+    check(
+        r#"
+mod module2 {
+    #[rust_analyzer::completions(ignore_methods)]
+    pub trait ExcludedTrait {
+        fn foo(&self) {}
+        fn bar(&self) {}
+        fn baz(&self) {}
+    }
+
+    impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+    true.$0
+}
+        "#,
+        expect![""],
+    );
+    check(
+        r#"
+mod module2 {
+    #[rust_analyzer::completions(ignore_flyimport)]
+    pub trait ExcludedTrait {
+        fn foo(&self) {}
+        fn bar(&self) {}
+        fn baz(&self) {}
+    }
+
+    impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+    ExcludedTrait$0
+}
+        "#,
+        expect![""],
+    );
+    check(
+        r#"
+mod module2 {
+    #[rust_analyzer::completions(ignore_methods)]
+    pub trait ExcludedTrait {
+        fn foo(&self) {}
+        fn bar(&self) {}
+        fn baz(&self) {}
+    }
+
+    impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+    ExcludedTrait$0
 }
         "#,
         expect![[r#"
-            me foo() (use module2::ExcludedTrait) fn(&self)
+            tt ExcludedTrait (use module2::ExcludedTrait)
         "#]],
     );
+    check(
+        r#"
+mod module2 {
+    #[rust_analyzer::completions(ignore_flyimport)]
+    pub struct Foo {}
+}
+
+fn foo() {
+    Foo$0
+}
+        "#,
+        expect![""],
+    );
 }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
index be2c37d10162e..55689034fb478 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
@@ -284,6 +284,7 @@ fn bar() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
index 841c42123a017..fcdf10c85616c 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
@@ -16,6 +16,7 @@ fn in_mod_item_list() {
             kw extern
             kw fn
             kw impl
+            kw impl for
             kw mod
             kw pub
             kw pub(crate)
@@ -50,6 +51,7 @@ fn in_source_file_item_list() {
             kw extern
             kw fn
             kw impl
+            kw impl for
             kw mod
             kw pub
             kw pub(crate)
@@ -83,6 +85,7 @@ fn in_item_list_after_attr() {
             kw extern
             kw fn
             kw impl
+            kw impl for
             kw mod
             kw pub
             kw pub(crate)
@@ -122,6 +125,7 @@ fn after_unsafe_token() {
             kw extern
             kw fn
             kw impl
+            kw impl for
             kw trait
         "#]],
     );
@@ -385,6 +389,7 @@ fn after_unit_struct() {
             kw extern
             kw fn
             kw impl
+            kw impl for
             kw mod
             kw pub
             kw pub(crate)
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
index 6b1dfe366ce2d..626d1677d5553 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
@@ -22,6 +22,7 @@ fn main() {
             me foo()     fn(&self)
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -55,6 +56,7 @@ fn main() {
             me foo()     fn(&self)
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -90,6 +92,7 @@ fn main() {}
             me foo()     fn(&self)
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
@@ -125,6 +128,7 @@ fn main() {}
             me foo()     fn(&self)
             sn box  Box::new(expr)
             sn call function(expr)
+            sn const      const {}
             sn dbg      dbg!(expr)
             sn dbgr    dbg!(&expr)
             sn deref         *expr
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs
index 9ab66243b5c8c..00977ea4e533b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs
@@ -1,13 +1,13 @@
 use base_db::SourceDatabase;
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 use itertools::Itertools;
 
-use crate::tests::{completion_list_with_config_raw, position, TEST_CONFIG};
+use crate::tests::{TEST_CONFIG, completion_list_with_config_raw, position};
 
 fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
     let completions = completion_list_with_config_raw(TEST_CONFIG, ra_fixture, true, None);
     let (db, position) = position(ra_fixture);
-    let mut actual = db.file_text(position.file_id).to_string();
+    let mut actual = db.file_text(position.file_id).text(&db).to_string();
     completions
         .into_iter()
         .exactly_one()
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
index 005263d100a5b..15518e98370ee 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
@@ -1,14 +1,14 @@
 //! Tests that don't fit into a specific category.
 
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 use ide_db::SymbolKind;
 
 use crate::{
+    CompletionItemKind,
     tests::{
-        check, check_edit, check_no_kw, check_with_trigger_character, do_completion_with_config,
-        TEST_CONFIG,
+        TEST_CONFIG, check, check_edit, check_no_kw, check_with_trigger_character,
+        do_completion_with_config,
     },
-    CompletionItemKind,
 };
 
 #[test]
@@ -105,7 +105,7 @@ mod macros {
 fn completes_std_prelude_if_core_is_defined() {
     check_no_kw(
         r#"
-//- /main.rs crate:main deps:core,std
+//- /main.rs crate:main deps:core,std edition:2021
 fn foo() { let x: $0 }
 
 //- /core/lib.rs crate:core
@@ -1008,6 +1008,7 @@ fn here_we_go() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1059,6 +1060,7 @@ fn here_we_go() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1146,6 +1148,7 @@ fn here_we_go() {
             me baz() (alias qux) fn(&self) -> u8
             sn box                Box::new(expr)
             sn call               function(expr)
+            sn const                    const {}
             sn dbg                    dbg!(expr)
             sn dbgr                  dbg!(&expr)
             sn deref                       *expr
@@ -1183,6 +1186,7 @@ fn bar() { qu$0 }
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
@@ -1264,6 +1268,7 @@ fn here_we_go() {
             md foo
             st Bar (alias Qux) (use foo::Bar) Bar
             bt u32                            u32
+            kw const
             kw crate::
             kw false
             kw for
@@ -1439,6 +1444,7 @@ fn foo() {
             kw if
             kw if let
             kw impl
+            kw impl for
             kw let
             kw letm
             kw loop
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
index 641998c3dacaf..f1d6b605b0021 100644
--- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
 crossbeam-channel.workspace = true
 tracing.workspace = true
 rayon.workspace = true
@@ -22,7 +22,9 @@ either.workspace = true
 itertools.workspace = true
 arrayvec.workspace = true
 indexmap.workspace = true
-memchr = "2.6.4"
+memchr = "2.7.4"
+salsa.workspace = true
+query-group.workspace = true
 triomphe.workspace = true
 nohash-hasher.workspace = true
 bitflags.workspace = true
@@ -34,6 +36,7 @@ profile.workspace = true
 stdx.workspace = true
 syntax.workspace = true
 span.workspace = true
+vfs.workspace = true
 # ide should depend only on the top-level `hir` package. if you need
 # something from some `hir-xxx` subpackage, reexport the API via `hir`.
 hir.workspace = true
@@ -41,7 +44,7 @@ hir.workspace = true
 line-index.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 
 # local deps
 test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
index 11808fed3be6a..7b5723f37f760 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs
@@ -3,9 +3,11 @@
 use either::Either;
 use hir::{InFile, Semantics, Type};
 use parser::T;
+use span::TextSize;
 use syntax::{
+    AstNode, NodeOrToken, SyntaxToken,
     ast::{self, AstChildren, HasArgList, HasAttrs, HasName},
-    match_ast, AstNode, NodeOrToken, SyntaxToken,
+    match_ast,
 };
 
 use crate::RootDatabase;
@@ -20,7 +22,24 @@ impl ActiveParameter {
     /// Returns information about the call argument this token is part of.
     pub fn at_token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Self> {
         let (signature, active_parameter) = callable_for_token(sema, token)?;
+        Self::from_signature_and_active_parameter(sema, signature, active_parameter)
+    }
 
+    /// Returns information about the call argument this token is part of.
+    pub fn at_arg(
+        sema: &Semantics<'_, RootDatabase>,
+        list: ast::ArgList,
+        at: TextSize,
+    ) -> Option<Self> {
+        let (signature, active_parameter) = callable_for_arg_list(sema, list, at)?;
+        Self::from_signature_and_active_parameter(sema, signature, active_parameter)
+    }
+
+    fn from_signature_and_active_parameter(
+        sema: &Semantics<'_, RootDatabase>,
+        signature: hir::Callable,
+        active_parameter: Option<usize>,
+    ) -> Option<Self> {
         let idx = active_parameter?;
         let mut params = signature.params();
         if idx >= params.len() {
@@ -48,20 +67,32 @@ pub fn callable_for_token(
     sema: &Semantics<'_, RootDatabase>,
     token: SyntaxToken,
 ) -> Option<(hir::Callable, Option<usize>)> {
+    let offset = token.text_range().start();
     // Find the calling expression and its NameRef
     let parent = token.parent()?;
-    let calling_node = parent.ancestors().filter_map(ast::CallableExpr::cast).find(|it| {
-        it.arg_list()
-            .is_some_and(|it| it.syntax().text_range().contains(token.text_range().start()))
-    })?;
+    let calling_node = parent
+        .ancestors()
+        .filter_map(ast::CallableExpr::cast)
+        .find(|it| it.arg_list().is_some_and(|it| it.syntax().text_range().contains(offset)))?;
+
+    callable_for_node(sema, &calling_node, offset)
+}
 
-    callable_for_node(sema, &calling_node, &token)
+/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable.
+pub fn callable_for_arg_list(
+    sema: &Semantics<'_, RootDatabase>,
+    arg_list: ast::ArgList,
+    at: TextSize,
+) -> Option<(hir::Callable, Option<usize>)> {
+    debug_assert!(arg_list.syntax().text_range().contains(at));
+    let callable = arg_list.syntax().parent().and_then(ast::CallableExpr::cast)?;
+    callable_for_node(sema, &callable, at)
 }
 
 pub fn callable_for_node(
     sema: &Semantics<'_, RootDatabase>,
     calling_node: &ast::CallableExpr,
-    token: &SyntaxToken,
+    offset: TextSize,
 ) -> Option<(hir::Callable, Option<usize>)> {
     let callable = match calling_node {
         ast::CallableExpr::Call(call) => sema.resolve_expr_as_callable(&call.expr()?),
@@ -73,7 +104,7 @@ pub fn callable_for_node(
             .children_with_tokens()
             .filter_map(NodeOrToken::into_token)
             .filter(|t| t.kind() == T![,])
-            .take_while(|t| t.text_range().start() <= token.text_range().start())
+            .take_while(|t| t.text_range().start() <= offset)
             .count()
     });
     Some((callable, active_param))
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
index 46ff4fbf9e904..008b6fdbe2c68 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -1,17 +1,12 @@
 //! Applies changes to the IDE state transactionally.
 
-use base_db::{
-    ra_salsa::{
-        debug::{DebugQueryTable, TableEntry},
-        Database, Durability, Query, QueryTable,
-    },
-    SourceRootId,
-};
-use profile::{memory_usage, Bytes};
+use base_db::SourceRootId;
+use profile::Bytes;
 use rustc_hash::FxHashSet;
+use salsa::{Database as _, Durability};
 use triomphe::Arc;
 
-use crate::{symbol_index::SymbolsDatabase, ChangeWithProcMacros, RootDatabase};
+use crate::{ChangeWithProcMacros, RootDatabase, symbol_index::SymbolsDatabase};
 
 impl RootDatabase {
     pub fn request_cancellation(&mut self) {
@@ -34,8 +29,8 @@ impl RootDatabase {
                     local_roots.insert(root_id);
                 }
             }
-            self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
-            self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH);
+            self.set_local_roots_with_durability(Arc::new(local_roots), Durability::MEDIUM);
+            self.set_library_roots_with_durability(Arc::new(library_roots), Durability::MEDIUM);
         }
         change.apply(self);
     }
@@ -52,23 +47,23 @@ impl RootDatabase {
     pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes, usize)> {
         let mut acc: Vec<(String, Bytes, usize)> = vec![];
 
-        fn collect_query_count<'q, Q>(table: &QueryTable<'q, Q>) -> usize
-        where
-            QueryTable<'q, Q>: DebugQueryTable,
-            Q: Query,
-            <Q as Query>::Storage: 'q,
-        {
-            struct EntryCounter(usize);
-            impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
-                fn from_iter<T>(iter: T) -> EntryCounter
-                where
-                    T: IntoIterator<Item = TableEntry<K, V>>,
-                {
-                    EntryCounter(iter.into_iter().count())
-                }
-            }
-            table.entries::<EntryCounter>().0
-        }
+        // fn collect_query_count<'q, Q>(table: &QueryTable<'q, Q>) -> usize
+        // where
+        //     QueryTable<'q, Q>: DebugQueryTable,
+        //     Q: Query,
+        //     <Q as Query>::Storage: 'q,
+        // {
+        //     struct EntryCounter(usize);
+        //     impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
+        //         fn from_iter<T>(iter: T) -> EntryCounter
+        //         where
+        //             T: IntoIterator<Item = TableEntry<K, V>>,
+        //         {
+        //             EntryCounter(iter.into_iter().count())
+        //         }
+        //     }
+        //     table.entries::<EntryCounter>().0
+        // }
 
         macro_rules! purge_each_query {
             ($($q:path)*) => {$(
@@ -83,170 +78,174 @@ impl RootDatabase {
             )*}
         }
         purge_each_query![
-            // SymbolsDatabase
-            crate::symbol_index::ModuleSymbolsQuery
-            crate::symbol_index::LibrarySymbolsQuery
-            crate::symbol_index::LocalRootsQuery
-            crate::symbol_index::LibraryRootsQuery
-            // HirDatabase
-            hir::db::AdtDatumQuery
-            hir::db::AdtVarianceQuery
-            hir::db::AssociatedTyDataQuery
-            hir::db::AssociatedTyValueQuery
-            hir::db::BorrowckQuery
-            hir::db::CallableItemSignatureQuery
-            hir::db::ConstEvalDiscriminantQuery
-            hir::db::ConstEvalQuery
-            hir::db::ConstEvalStaticQuery
-            hir::db::ConstParamTyQuery
-            hir::db::DynCompatibilityOfTraitQuery
-            hir::db::FieldTypesQuery
-            hir::db::FnDefDatumQuery
-            hir::db::FnDefVarianceQuery
-            hir::db::GenericDefaultsQuery
-            hir::db::GenericPredicatesForParamQuery
-            hir::db::GenericPredicatesQuery
-            hir::db::GenericPredicatesWithoutParentQuery
-            hir::db::ImplDatumQuery
-            hir::db::ImplSelfTyQuery
-            hir::db::ImplTraitQuery
-            hir::db::IncoherentInherentImplCratesQuery
-            hir::db::InferQuery
-            hir::db::InherentImplsInBlockQuery
-            hir::db::InherentImplsInCrateQuery
-            hir::db::InternCallableDefQuery
-            hir::db::InternClosureQuery
-            hir::db::InternCoroutineQuery
-            hir::db::InternImplTraitIdQuery
-            hir::db::InternLifetimeParamIdQuery
-            hir::db::InternTypeOrConstParamIdQuery
-            hir::db::LayoutOfAdtQuery
-            hir::db::LayoutOfTyQuery
-            hir::db::LookupImplMethodQuery
-            hir::db::MirBodyForClosureQuery
-            hir::db::MirBodyQuery
-            hir::db::MonomorphizedMirBodyForClosureQuery
-            hir::db::MonomorphizedMirBodyQuery
-            hir::db::ProgramClausesForChalkEnvQuery
-            hir::db::ReturnTypeImplTraitsQuery
-            hir::db::TargetDataLayoutQuery
-            hir::db::TraitDatumQuery
-            hir::db::TraitEnvironmentQuery
-            hir::db::TraitImplsInBlockQuery
-            hir::db::TraitImplsInCrateQuery
-            hir::db::TraitImplsInDepsQuery
-            hir::db::TraitSolveQuery
-            hir::db::TyQuery
-            hir::db::TypeAliasImplTraitsQuery
-            hir::db::ValueTyQuery
+            // // SymbolsDatabase
+            // crate::symbol_index::ModuleSymbolsQuery
+            // crate::symbol_index::LibrarySymbolsQuery
+            // crate::symbol_index::LocalRootsQuery
+            // crate::symbol_index::LibraryRootsQuery
+            // // HirDatabase
+            // hir::db::AdtDatumQuery
+            // hir::db::AdtVarianceQuery
+            // hir::db::AssociatedTyDataQuery
+            // hir::db::AssociatedTyValueQuery
+            // hir::db::BorrowckQuery
+            // hir::db::CallableItemSignatureQuery
+            // hir::db::ConstEvalDiscriminantQuery
+            // hir::db::ConstEvalQuery
+            // hir::db::ConstEvalStaticQuery
+            // hir::db::ConstParamTyQuery
+            // hir::db::DynCompatibilityOfTraitQuery
+            // hir::db::FieldTypesQuery
+            // hir::db::FnDefDatumQuery
+            // hir::db::FnDefVarianceQuery
+            // hir::db::GenericDefaultsQuery
+            // hir::db::GenericPredicatesForParamQuery
+            // hir::db::GenericPredicatesQuery
+            // hir::db::GenericPredicatesWithoutParentQuery
+            // hir::db::ImplDatumQuery
+            // hir::db::ImplSelfTyQuery
+            // hir::db::ImplTraitQuery
+            // hir::db::IncoherentInherentImplCratesQuery
+            // hir::db::InferQuery
+            // hir::db::InherentImplsInBlockQuery
+            // hir::db::InherentImplsInCrateQuery
+            // hir::db::InternCallableDefQuery
+            // hir::db::InternClosureQuery
+            // hir::db::InternCoroutineQuery
+            // hir::db::InternImplTraitIdQuery
+            // hir::db::InternLifetimeParamIdQuery
+            // hir::db::InternTypeOrConstParamIdQuery
+            // hir::db::LayoutOfAdtQuery
+            // hir::db::LayoutOfTyQuery
+            // hir::db::LookupImplMethodQuery
+            // hir::db::MirBodyForClosureQuery
+            // hir::db::MirBodyQuery
+            // hir::db::MonomorphizedMirBodyForClosureQuery
+            // hir::db::MonomorphizedMirBodyQuery
+            // hir::db::ProgramClausesForChalkEnvQuery
+            // hir::db::ReturnTypeImplTraitsQuery
+            // hir::db::TargetDataLayoutQuery
+            // hir::db::TraitDatumQuery
+            // hir::db::TraitEnvironmentQuery
+            // hir::db::TraitImplsInBlockQuery
+            // hir::db::TraitImplsInCrateQuery
+            // hir::db::TraitImplsInDepsQuery
+            // hir::db::TraitSolveQuery
+            // hir::db::TyQuery
+            // hir::db::TypeAliasImplTraitsQuery
+            // hir::db::ValueTyQuery
 
-            // DefDatabase
-            hir::db::AttrsQuery
-            hir::db::BlockDefMapQuery
-            hir::db::BlockItemTreeQuery
-            hir::db::BlockItemTreeWithSourceMapQuery
-            hir::db::BodyQuery
-            hir::db::BodyWithSourceMapQuery
-            hir::db::ConstDataQuery
-            hir::db::ConstVisibilityQuery
-            hir::db::CrateDefMapQuery
-            hir::db::CrateLangItemsQuery
-            hir::db::CrateNotableTraitsQuery
-            hir::db::CrateSupportsNoStdQuery
-            hir::db::EnumDataQuery
-            hir::db::EnumVariantDataWithDiagnosticsQuery
-            hir::db::ExpandProcAttrMacrosQuery
-            hir::db::ExprScopesQuery
-            hir::db::ExternCrateDeclDataQuery
-            hir::db::FieldVisibilitiesQuery
-            hir::db::FieldsAttrsQuery
-            hir::db::FieldsAttrsSourceMapQuery
-            hir::db::FileItemTreeQuery
-            hir::db::FileItemTreeWithSourceMapQuery
-            hir::db::FunctionDataQuery
-            hir::db::FunctionVisibilityQuery
-            hir::db::GenericParamsQuery
-            hir::db::GenericParamsWithSourceMapQuery
-            hir::db::ImplDataWithDiagnosticsQuery
-            hir::db::ImportMapQuery
-            hir::db::IncludeMacroInvocQuery
-            hir::db::InternAnonymousConstQuery
-            hir::db::InternBlockQuery
-            hir::db::InternConstQuery
-            hir::db::InternEnumQuery
-            hir::db::InternExternBlockQuery
-            hir::db::InternExternCrateQuery
-            hir::db::InternFunctionQuery
-            hir::db::InternImplQuery
-            hir::db::InternInTypeConstQuery
-            hir::db::InternMacro2Query
-            hir::db::InternMacroRulesQuery
-            hir::db::InternProcMacroQuery
-            hir::db::InternStaticQuery
-            hir::db::InternStructQuery
-            hir::db::InternTraitAliasQuery
-            hir::db::InternTraitQuery
-            hir::db::InternTypeAliasQuery
-            hir::db::InternUnionQuery
-            hir::db::InternUseQuery
-            hir::db::LangItemQuery
-            hir::db::Macro2DataQuery
-            hir::db::MacroDefQuery
-            hir::db::MacroRulesDataQuery
-            hir::db::NotableTraitsInDepsQuery
-            hir::db::ProcMacroDataQuery
-            hir::db::StaticDataQuery
-            hir::db::StructDataWithDiagnosticsQuery
-            hir::db::TraitAliasDataQuery
-            hir::db::TraitDataWithDiagnosticsQuery
-            hir::db::TypeAliasDataQuery
-            hir::db::UnionDataWithDiagnosticsQuery
+            // // DefDatabase
+            // hir::db::AttrsQuery
+            // hir::db::BlockDefMapQuery
+            // hir::db::BlockItemTreeQuery
+            // hir::db::BlockItemTreeWithSourceMapQuery
+            // hir::db::BodyQuery
+            // hir::db::BodyWithSourceMapQuery
+            // hir::db::ConstDataQuery
+            // hir::db::ConstVisibilityQuery
+            // hir::db::CrateDefMapQuery
+            // hir::db::CrateLangItemsQuery
+            // hir::db::CrateNotableTraitsQuery
+            // hir::db::CrateSupportsNoStdQuery
+            // hir::db::EnumDataQuery
+            // hir::db::ExpandProcAttrMacrosQuery
+            // hir::db::ExprScopesQuery
+            // hir::db::ExternCrateDeclDataQuery
+            // hir::db::FieldVisibilitiesQuery
+            // hir::db::FieldsAttrsQuery
+            // hir::db::FieldsAttrsSourceMapQuery
+            // hir::db::FileItemTreeQuery
+            // hir::db::FileItemTreeWithSourceMapQuery
+            // hir::db::FunctionDataQuery
+            // hir::db::FunctionVisibilityQuery
+            // hir::db::GenericParamsQuery
+            // hir::db::GenericParamsWithSourceMapQuery
+            // hir::db::ImplItemsWithDiagnosticsQuery
+            // hir::db::ImportMapQuery
+            // hir::db::IncludeMacroInvocQuery
+            // hir::db::InternAnonymousConstQuery
+            // hir::db::InternBlockQuery
+            // hir::db::InternConstQuery
+            // hir::db::InternEnumQuery
+            // hir::db::InternExternBlockQuery
+            // hir::db::InternExternCrateQuery
+            // hir::db::InternFunctionQuery
+            // hir::db::InternImplQuery
+            // hir::db::InternInTypeConstQuery
+            // hir::db::InternMacro2Query
+            // hir::db::InternMacroRulesQuery
+            // hir::db::InternProcMacroQuery
+            // hir::db::InternStaticQuery
+            // hir::db::InternStructQuery
+            // hir::db::InternTraitAliasQuery
+            // hir::db::InternTraitQuery
+            // hir::db::InternTypeAliasQuery
+            // hir::db::InternUnionQuery
+            // hir::db::InternUseQuery
+            // hir::db::LangItemQuery
+            // hir::db::Macro2DataQuery
+            // hir::db::MacroDefQuery
+            // hir::db::MacroRulesDataQuery
+            // hir::db::NotableTraitsInDepsQuery
+            // hir::db::ProcMacroDataQuery
+            // hir::db::StaticDataQuery
+            // hir::db::TraitAliasDataQuery
+            // hir::db::TraitItemsWithDiagnosticsQuery
+            // hir::db::TypeAliasDataQuery
+            // hir::db::VariantDataWithDiagnosticsQuery
 
-            // InternDatabase
-            hir::db::InternFunctionQuery
-            hir::db::InternStructQuery
-            hir::db::InternUnionQuery
-            hir::db::InternEnumQuery
-            hir::db::InternConstQuery
-            hir::db::InternStaticQuery
-            hir::db::InternTraitQuery
-            hir::db::InternTraitAliasQuery
-            hir::db::InternTypeAliasQuery
-            hir::db::InternImplQuery
-            hir::db::InternExternBlockQuery
-            hir::db::InternBlockQuery
-            hir::db::InternMacro2Query
-            hir::db::InternProcMacroQuery
-            hir::db::InternMacroRulesQuery
+            // // InternDatabase
+            // hir::db::InternFunctionQuery
+            // hir::db::InternStructQuery
+            // hir::db::InternUnionQuery
+            // hir::db::InternEnumQuery
+            // hir::db::InternConstQuery
+            // hir::db::InternStaticQuery
+            // hir::db::InternTraitQuery
+            // hir::db::InternTraitAliasQuery
+            // hir::db::InternTypeAliasQuery
+            // hir::db::InternImplQuery
+            // hir::db::InternExternBlockQuery
+            // hir::db::InternBlockQuery
+            // hir::db::InternMacro2Query
+            // hir::db::InternProcMacroQuery
+            // hir::db::InternMacroRulesQuery
 
-            // ExpandDatabase
-            hir::db::AstIdMapQuery
-            hir::db::DeclMacroExpanderQuery
-            hir::db::ExpandProcMacroQuery
-            hir::db::InternMacroCallQuery
-            hir::db::InternSyntaxContextQuery
-            hir::db::MacroArgQuery
-            hir::db::ParseMacroExpansionErrorQuery
-            hir::db::ParseMacroExpansionQuery
-            hir::db::ProcMacroSpanQuery
-            hir::db::ProcMacrosQuery
-            hir::db::RealSpanMapQuery
+            // // ExpandDatabase
+            // hir::db::AstIdMapQuery
+            // hir::db::DeclMacroExpanderQuery
+            // hir::db::ExpandProcMacroQuery
+            // hir::db::InternMacroCallQuery
+            // hir::db::InternSyntaxContextQuery
+            // hir::db::MacroArgQuery
+            // hir::db::ParseMacroExpansionErrorQuery
+            // hir::db::ParseMacroExpansionQuery
+            // hir::db::ProcMacroSpanQuery
+            // hir::db::ProcMacrosQuery
+            // hir::db::RealSpanMapQuery
 
-            // LineIndexDatabase
-            crate::LineIndexQuery
+            // // LineIndexDatabase
+            // crate::LineIndexQuery
 
-            // SourceDatabase
-            base_db::ParseQuery
-            base_db::ParseErrorsQuery
-            base_db::CrateGraphQuery
-            base_db::CrateWorkspaceDataQuery
+            // // SourceDatabase
+            // base_db::ParseQuery
+            // base_db::ParseErrorsQuery
+            // base_db::AllCratesQuery
+            // base_db::InternUniqueCrateDataQuery
+            // base_db::InternUniqueCrateDataLookupQuery
+            // base_db::CrateDataQuery
+            // base_db::ExtraCrateDataQuery
+            // base_db::CrateCfgQuery
+            // base_db::CrateEnvQuery
+            // base_db::CrateWorkspaceDataQuery
 
-            // SourceDatabaseExt
-            base_db::FileTextQuery
-            base_db::CompressedFileTextQuery
-            base_db::FileSourceRootQuery
-            base_db::SourceRootQuery
-            base_db::SourceRootCratesQuery
+            // // SourceDatabaseExt
+            // base_db::FileTextQuery
+            // base_db::CompressedFileTextQuery
+            // base_db::FileSourceRootQuery
+            // base_db::SourceRootQuery
+            // base_db::SourceRootCratesQuery
         ];
 
         acc.sort_by_key(|it| std::cmp::Reverse(it.1));
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/assists.rs b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs
index 1c40685ebb130..90ae4a3b5b3a6 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/assists.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs
@@ -43,9 +43,6 @@ pub enum Command {
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
 pub enum AssistKind {
-    // FIXME: does the None variant make sense? Probably not.
-    None,
-
     QuickFix,
     Generate,
     Refactor,
@@ -61,7 +58,7 @@ impl AssistKind {
         }
 
         match self {
-            AssistKind::None | AssistKind::Generate => true,
+            AssistKind::Generate => true,
             AssistKind::Refactor => matches!(
                 other,
                 AssistKind::RefactorExtract
@@ -74,7 +71,6 @@ impl AssistKind {
 
     pub fn name(&self) -> &str {
         match self {
-            AssistKind::None => "None",
             AssistKind::QuickFix => "QuickFix",
             AssistKind::Generate => "Generate",
             AssistKind::Refactor => "Refactor",
@@ -90,7 +86,6 @@ impl FromStr for AssistKind {
 
     fn from_str(s: &str) -> Result<Self, Self::Err> {
         match s {
-            "None" => Ok(AssistKind::None),
             "QuickFix" => Ok(AssistKind::QuickFix),
             "Generate" => Ok(AssistKind::Generate),
             "Refactor" => Ok(AssistKind::Refactor),
@@ -105,7 +100,33 @@ impl FromStr for AssistKind {
 /// Unique identifier of the assist, should not be shown to the user
 /// directly.
 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
-pub struct AssistId(pub &'static str, pub AssistKind);
+pub struct AssistId(pub &'static str, pub AssistKind, pub Option<usize>);
+
+impl AssistId {
+    pub fn quick_fix(id: &'static str) -> AssistId {
+        AssistId(id, AssistKind::QuickFix, None)
+    }
+
+    pub fn generate(id: &'static str) -> AssistId {
+        AssistId(id, AssistKind::Generate, None)
+    }
+
+    pub fn refactor(id: &'static str) -> AssistId {
+        AssistId(id, AssistKind::Refactor, None)
+    }
+
+    pub fn refactor_extract(id: &'static str) -> AssistId {
+        AssistId(id, AssistKind::RefactorExtract, None)
+    }
+
+    pub fn refactor_inline(id: &'static str) -> AssistId {
+        AssistId(id, AssistKind::RefactorInline, None)
+    }
+
+    pub fn refactor_rewrite(id: &'static str) -> AssistId {
+        AssistId(id, AssistKind::RefactorRewrite, None)
+    }
+}
 
 /// A way to control how many assist to resolve during the assist resolution.
 /// When an assist is resolved, its edits are calculated that might be costly to always do by default.
@@ -128,6 +149,8 @@ pub struct SingleResolve {
     pub assist_id: String,
     // The kind of the assist.
     pub assist_kind: AssistKind,
+    /// Subtype of the assist. When many assists have the same id, it differentiates among them.
+    pub assist_subtype: Option<usize>,
 }
 
 impl AssistResolveStrategy {
@@ -136,7 +159,9 @@ impl AssistResolveStrategy {
             AssistResolveStrategy::None => false,
             AssistResolveStrategy::All => true,
             AssistResolveStrategy::Single(single_resolve) => {
-                single_resolve.assist_id == id.0 && single_resolve.assist_kind == id.1
+                single_resolve.assist_id == id.0
+                    && single_resolve.assist_kind == id.1
+                    && single_resolve.assist_subtype == id.2
             }
         }
     }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index 502314ed1e0ec..bf4f541ff54ca 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -5,9 +5,9 @@
 
 // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
 
+use crate::RootDatabase;
 use crate::documentation::{Documentation, HasDocs};
 use crate::famous_defs::FamousDefs;
-use crate::RootDatabase;
 use arrayvec::ArrayVec;
 use either::Either;
 use hir::{
@@ -21,8 +21,9 @@ use hir::{
 use span::Edition;
 use stdx::{format_to, impl_from};
 use syntax::{
+    SyntaxKind, SyntaxNode, SyntaxToken,
     ast::{self, AstNode},
-    match_ast, SyntaxKind, SyntaxNode, SyntaxToken,
+    match_ast,
 };
 
 // FIXME: a more precise name would probably be `Symbol`?
@@ -838,6 +839,14 @@ impl NameRefClass {
                 ast::AsmRegSpec(_) => {
                     Some(NameRefClass::Definition(Definition::InlineAsmRegOrRegClass(()), None))
                 },
+                ast::OffsetOfExpr(_) => {
+                    let (def, subst) = sema.resolve_offset_of_field(name_ref)?;
+                    let def = match def {
+                        Either::Left(variant) => Definition::Variant(variant),
+                        Either::Right(field) => Definition::Field(field),
+                    };
+                    Some(NameRefClass::Definition(def, Some(subst)))
+                },
                 _ => None
             }
         }
@@ -988,7 +997,6 @@ impl TryFrom<DefWithBody> for Definition {
             DefWithBody::Static(it) => Ok(it.into()),
             DefWithBody::Const(it) => Ok(it.into()),
             DefWithBody::Variant(it) => Ok(it.into()),
-            DefWithBody::InTypeConst(_) => Err(()),
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs
index b83efcd02f772..ef2c83992c049 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs
@@ -1,14 +1,15 @@
 //! Documentation attribute related utilities.
 use either::Either;
 use hir::{
+    AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile,
     db::{DefDatabase, HirDatabase},
-    resolve_doc_path_on, sym, AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile,
+    resolve_doc_path_on, sym,
 };
 use itertools::Itertools;
 use span::{TextRange, TextSize};
 use syntax::{
-    ast::{self, IsString},
     AstToken,
+    ast::{self, IsString},
 };
 
 /// Holds documentation
@@ -92,7 +93,7 @@ pub fn docs_with_rangemap(
     attrs: &AttrsWithOwner,
 ) -> Option<(Documentation, DocsRangeMap)> {
     let docs = attrs
-        .by_key(&sym::doc)
+        .by_key(sym::doc)
         .attrs()
         .filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id)));
     let indent = doc_indent(attrs);
@@ -134,7 +135,7 @@ pub fn docs_with_rangemap(
 }
 
 pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
-    let docs = attrs.by_key(&sym::doc).attrs().filter_map(|attr| attr.string_value_unescape());
+    let docs = attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape());
     let indent = doc_indent(attrs);
     let mut buf = String::new();
     for doc in docs {
@@ -151,11 +152,7 @@ pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
         buf.push('\n');
     }
     buf.pop();
-    if buf.is_empty() {
-        None
-    } else {
-        Some(buf)
-    }
+    if buf.is_empty() { None } else { Some(buf) }
 }
 
 macro_rules! impl_has_docs {
@@ -269,7 +266,7 @@ fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
 
 fn doc_indent(attrs: &hir::Attrs) -> usize {
     let mut min = !0;
-    for val in attrs.by_key(&sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) {
+    for val in attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) {
         if let Some(m) =
             val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min()
         {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
index 2f4d07446f2c1..994150b1ac4c2 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
@@ -1,6 +1,6 @@
 //! See [`FamousDefs`].
 
-use base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase};
+use base_db::{CrateOrigin, LangCrateOrigin};
 use hir::{Crate, Enum, Function, Macro, Module, ScopeDef, Semantics, Trait};
 
 use crate::RootDatabase;
@@ -198,11 +198,10 @@ impl FamousDefs<'_, '_> {
     fn find_lang_crate(&self, origin: LangCrateOrigin) -> Option<Crate> {
         let krate = self.1;
         let db = self.0.db;
-        let crate_graph = self.0.db.crate_graph();
         let res = krate
             .dependencies(db)
             .into_iter()
-            .find(|dep| crate_graph[dep.krate.into()].origin == CrateOrigin::Lang(origin))?
+            .find(|dep| dep.krate.origin(db) == CrateOrigin::Lang(origin))?
             .krate;
         Some(res)
     }
@@ -221,11 +220,7 @@ impl FamousDefs<'_, '_> {
         for segment in path {
             module = module.children(db).find_map(|child| {
                 let name = child.name(db)?;
-                if name.as_str() == segment {
-                    Some(child)
-                } else {
-                    None
-                }
+                if name.as_str() == segment { Some(child) } else { None }
             })?;
         }
         let def =
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
index 84fa58d743bbc..340429037e67a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
@@ -2,17 +2,18 @@
 
 use std::collections::VecDeque;
 
-use base_db::SourceRootDatabase;
+use base_db::SourceDatabase;
 use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics};
 use span::{Edition, FileId};
 use syntax::{
-    ast::{self, make},
     AstToken, SyntaxKind, SyntaxToken, ToSmolStr, TokenAtOffset,
+    ast::{self, make},
 };
 
 use crate::{
+    RootDatabase,
     defs::{Definition, IdentClass},
-    generated, RootDatabase,
+    generated,
 };
 
 pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option<Name> {
@@ -108,8 +109,8 @@ pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
 
 pub fn is_editable_crate(krate: Crate, db: &RootDatabase) -> bool {
     let root_file = krate.root_file(db);
-    let source_root_id = db.file_source_root(root_file);
-    !db.source_root(source_root_id).is_library
+    let source_root_id = db.file_source_root(root_file).source_root_id(db);
+    !db.source_root(source_root_id).source_root(db).is_library
 }
 
 // FIXME: This is a weird function
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
index 77fc59b4eccb5..ac592dfe93cf9 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -3,20 +3,20 @@
 use std::ops::ControlFlow;
 
 use hir::{
-    db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig,
+    AsAssocItem, AssocItem, AssocItemContainer, Complete, Crate, HasCrate, ImportPathConfig,
     ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
-    SemanticsScope, Trait, TyFingerprint, Type,
+    SemanticsScope, Trait, TyFingerprint, Type, db::HirDatabase,
 };
 use itertools::Itertools;
 use rustc_hash::{FxHashMap, FxHashSet};
 use syntax::{
-    ast::{self, make, HasName},
     AstNode, SyntaxNode,
+    ast::{self, HasName, make},
 };
 
 use crate::{
-    items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT},
     FxIndexSet, RootDatabase,
+    items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT},
 };
 
 /// A candidate for import, derived during various IDE activities:
@@ -183,6 +183,9 @@ impl ImportAssets {
     }
 }
 
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct CompleteInFlyimport(pub bool);
+
 /// An import (not necessary the only one) that corresponds a certain given [`PathImportCandidate`].
 /// (the structure is not entirely correct, since there can be situations requiring two imports, see FIXME below for the details)
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -198,11 +201,31 @@ pub struct LocatedImport {
     /// the original item is the associated constant, but the import has to be a trait that
     /// defines this constant.
     pub original_item: ItemInNs,
+    /// The value of `#[rust_analyzer::completions(...)]`, if existing.
+    pub complete_in_flyimport: CompleteInFlyimport,
 }
 
 impl LocatedImport {
-    pub fn new(import_path: ModPath, item_to_import: ItemInNs, original_item: ItemInNs) -> Self {
-        Self { import_path, item_to_import, original_item }
+    pub fn new(
+        import_path: ModPath,
+        item_to_import: ItemInNs,
+        original_item: ItemInNs,
+        complete_in_flyimport: CompleteInFlyimport,
+    ) -> Self {
+        Self { import_path, item_to_import, original_item, complete_in_flyimport }
+    }
+
+    pub fn new_no_completion(
+        import_path: ModPath,
+        item_to_import: ItemInNs,
+        original_item: ItemInNs,
+    ) -> Self {
+        Self {
+            import_path,
+            item_to_import,
+            original_item,
+            complete_in_flyimport: CompleteInFlyimport(true),
+        }
     }
 }
 
@@ -273,12 +296,13 @@ impl ImportAssets {
             Some(it) => it,
             None => return <FxIndexSet<_>>::default().into_iter(),
         };
+        let db = sema.db;
         let krate = self.module_with_candidate.krate();
         let scope_definitions = self.scope_definitions(sema);
         let mod_path = |item| {
             get_mod_path(
-                sema.db,
-                item_for_path_search(sema.db, item)?,
+                db,
+                item_for_path_search(db, item)?,
                 &self.module_with_candidate,
                 prefixed,
                 cfg,
@@ -288,7 +312,7 @@ impl ImportAssets {
 
         match &self.import_candidate {
             ImportCandidate::Path(path_candidate) => path_applicable_imports(
-                sema,
+                db,
                 &scope,
                 krate,
                 path_candidate,
@@ -297,7 +321,7 @@ impl ImportAssets {
             ),
             ImportCandidate::TraitAssocItem(trait_candidate)
             | ImportCandidate::TraitMethod(trait_candidate) => trait_applicable_items(
-                sema,
+                db,
                 krate,
                 &scope,
                 trait_candidate,
@@ -325,7 +349,7 @@ impl ImportAssets {
 }
 
 fn path_applicable_imports(
-    sema: &Semantics<'_, RootDatabase>,
+    db: &RootDatabase,
     scope: &SemanticsScope<'_>,
     current_crate: Crate,
     path_candidate: &PathImportCandidate,
@@ -337,7 +361,7 @@ fn path_applicable_imports(
     match &*path_candidate.qualifier {
         [] => {
             items_locator::items_with_name(
-                sema,
+                db,
                 current_crate,
                 path_candidate.name.clone(),
                 // FIXME: we could look up assoc items by the input and propose those in completion,
@@ -350,12 +374,17 @@ fn path_applicable_imports(
                 // see also an ignored test under FIXME comment in the qualify_path.rs module
                 AssocSearchMode::Exclude,
             )
-            .filter_map(|item| {
+            .filter_map(|(item, do_not_complete)| {
                 if !scope_filter(item) {
                     return None;
                 }
                 let mod_path = mod_path(item)?;
-                Some(LocatedImport::new(mod_path, item, item))
+                Some(LocatedImport::new(
+                    mod_path,
+                    item,
+                    item,
+                    CompleteInFlyimport(do_not_complete != Complete::IgnoreFlyimport),
+                ))
             })
             .take(DEFAULT_QUERY_SEARCH_LIMIT)
             .collect()
@@ -365,22 +394,23 @@ fn path_applicable_imports(
         // what follows
         // FIXME: This doesn't handle visibility
         [first_qsegment, qualifier_rest @ ..] => items_locator::items_with_name(
-            sema,
+            db,
             current_crate,
             NameToImport::Exact(first_qsegment.as_str().to_owned(), true),
             AssocSearchMode::Exclude,
         )
-        .filter_map(|item| {
+        .filter_map(|(item, do_not_complete)| {
             // we found imports for `first_qsegment`, now we need to filter these imports by whether
             // they result in resolving the rest of the path successfully
             validate_resolvable(
-                sema,
+                db,
                 scope,
                 mod_path,
                 scope_filter,
                 &path_candidate.name,
                 item,
                 qualifier_rest,
+                CompleteInFlyimport(do_not_complete != Complete::IgnoreFlyimport),
             )
         })
         .take(DEFAULT_QUERY_SEARCH_LIMIT)
@@ -391,13 +421,14 @@ fn path_applicable_imports(
 /// Validates and builds an import for `resolved_qualifier` if the `unresolved_qualifier` appended
 /// to it resolves and there is a validate `candidate` after that.
 fn validate_resolvable(
-    sema: &Semantics<'_, RootDatabase>,
+    db: &RootDatabase,
     scope: &SemanticsScope<'_>,
     mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
     scope_filter: impl Fn(ItemInNs) -> bool,
     candidate: &NameToImport,
     resolved_qualifier: ItemInNs,
     unresolved_qualifier: &[Name],
+    complete_in_flyimport: CompleteInFlyimport,
 ) -> Option<LocatedImport> {
     let _p = tracing::info_span!("ImportAssets::import_for_item").entered();
 
@@ -406,8 +437,8 @@ fn validate_resolvable(
         if !unresolved_qualifier.is_empty() {
             match resolved_qualifier {
                 ItemInNs::Types(ModuleDef::Module(module)) => {
-                    adjusted_resolved_qualifier = sema
-                        .resolve_mod_path_relative(module, unresolved_qualifier.iter().cloned())?
+                    adjusted_resolved_qualifier = module
+                        .resolve_mod_path(db, unresolved_qualifier.iter().cloned())?
                         .next()?;
                 }
                 // can't resolve multiple segments for non-module item path bases
@@ -424,7 +455,7 @@ fn validate_resolvable(
     let ty = match qualifier {
         ModuleDef::Module(module) => {
             return items_locator::items_with_name_in_module(
-                sema,
+                db,
                 module,
                 candidate.clone(),
                 AssocSearchMode::Exclude,
@@ -433,23 +464,30 @@ fn validate_resolvable(
                     false => ControlFlow::Continue(()),
                 },
             )
-            .map(|item| LocatedImport::new(import_path_candidate, resolved_qualifier, item))
+            .map(|item| {
+                LocatedImport::new(
+                    import_path_candidate,
+                    resolved_qualifier,
+                    item,
+                    complete_in_flyimport,
+                )
+            });
         }
         // FIXME
         ModuleDef::Trait(_) => return None,
         // FIXME
         ModuleDef::TraitAlias(_) => return None,
-        ModuleDef::TypeAlias(alias) => alias.ty(sema.db),
-        ModuleDef::BuiltinType(builtin) => builtin.ty(sema.db),
-        ModuleDef::Adt(adt) => adt.ty(sema.db),
+        ModuleDef::TypeAlias(alias) => alias.ty(db),
+        ModuleDef::BuiltinType(builtin) => builtin.ty(db),
+        ModuleDef::Adt(adt) => adt.ty(db),
         _ => return None,
     };
-    ty.iterate_path_candidates(sema.db, scope, &FxHashSet::default(), None, None, |assoc| {
+    ty.iterate_path_candidates(db, scope, &FxHashSet::default(), None, None, |assoc| {
         // FIXME: Support extra trait imports
-        if assoc.container_or_implemented_trait(sema.db).is_some() {
+        if assoc.container_or_implemented_trait(db).is_some() {
             return None;
         }
-        let name = assoc.name(sema.db)?;
+        let name = assoc.name(db)?;
         let is_match = match candidate {
             NameToImport::Prefix(text, true) => name.as_str().starts_with(text),
             NameToImport::Prefix(text, false) => {
@@ -471,6 +509,7 @@ fn validate_resolvable(
             import_path_candidate.clone(),
             resolved_qualifier,
             assoc_to_item(assoc),
+            complete_in_flyimport,
         ))
     })
 }
@@ -495,7 +534,7 @@ fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Optio
 }
 
 fn trait_applicable_items(
-    sema: &Semantics<'_, RootDatabase>,
+    db: &RootDatabase,
     current_crate: Crate,
     scope: &SemanticsScope<'_>,
     trait_candidate: &TraitImportCandidate,
@@ -505,21 +544,19 @@ fn trait_applicable_items(
 ) -> FxIndexSet<LocatedImport> {
     let _p = tracing::info_span!("ImportAssets::trait_applicable_items").entered();
 
-    let db = sema.db;
-
     let inherent_traits = trait_candidate.receiver_ty.applicable_inherent_traits(db);
     let env_traits = trait_candidate.receiver_ty.env_traits(db);
     let related_traits = inherent_traits.chain(env_traits).collect::<FxHashSet<_>>();
 
-    let mut required_assoc_items = FxHashSet::default();
+    let mut required_assoc_items = FxHashMap::default();
     let mut trait_candidates: FxHashSet<_> = items_locator::items_with_name(
-        sema,
+        db,
         current_crate,
         trait_candidate.assoc_item_name.clone(),
         AssocSearchMode::AssocItemsOnly,
     )
-    .filter_map(|input| item_as_assoc(db, input))
-    .filter_map(|assoc| {
+    .filter_map(|(input, do_not_complete)| Some((item_as_assoc(db, input)?, do_not_complete)))
+    .filter_map(|(assoc, do_not_complete)| {
         if !trait_assoc_item && matches!(assoc, AssocItem::Const(_) | AssocItem::TypeAlias(_)) {
             return None;
         }
@@ -528,7 +565,8 @@ fn trait_applicable_items(
         if related_traits.contains(&assoc_item_trait) {
             return None;
         }
-        required_assoc_items.insert(assoc);
+        required_assoc_items
+            .insert(assoc, CompleteInFlyimport(do_not_complete != Complete::IgnoreFlyimport));
         Some(assoc_item_trait.into())
     })
     .collect();
@@ -600,7 +638,7 @@ fn trait_applicable_items(
             None,
             None,
             |assoc| {
-                if required_assoc_items.contains(&assoc) {
+                if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) {
                     let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?;
                     let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
                     let import_path = trait_import_paths
@@ -611,6 +649,7 @@ fn trait_applicable_items(
                         import_path,
                         trait_item,
                         assoc_to_item(assoc),
+                        complete_in_flyimport,
                     ));
                 }
                 None::<()>
@@ -625,7 +664,7 @@ fn trait_applicable_items(
             None,
             |function| {
                 let assoc = function.as_assoc_item(db)?;
-                if required_assoc_items.contains(&assoc) {
+                if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) {
                     let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?;
                     let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
                     let import_path = trait_import_paths
@@ -636,6 +675,7 @@ fn trait_applicable_items(
                         import_path,
                         trait_item,
                         assoc_to_item(assoc),
+                        complete_in_flyimport,
                     ));
                 }
                 None::<()>
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
index 8e25ad3472d3b..d26e5d62ced51 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
@@ -6,20 +6,20 @@ use std::cmp::Ordering;
 
 use hir::Semantics;
 use syntax::{
-    algo,
+    Direction, NodeOrToken, SyntaxKind, SyntaxNode, algo,
     ast::{
-        self, edit_in_place::Removable, make, AstNode, HasAttrs, HasModuleItem, HasVisibility,
-        PathSegmentKind,
+        self, AstNode, HasAttrs, HasModuleItem, HasVisibility, PathSegmentKind,
+        edit_in_place::Removable, make,
     },
-    ted, Direction, NodeOrToken, SyntaxKind, SyntaxNode,
+    ted,
 };
 
 use crate::{
+    RootDatabase,
     imports::merge_imports::{
-        common_prefix, eq_attrs, eq_visibility, try_merge_imports, use_tree_cmp, MergeBehavior,
-        NormalizationStyle,
+        MergeBehavior, NormalizationStyle, common_prefix, eq_attrs, eq_visibility,
+        try_merge_imports, use_tree_cmp,
     },
-    RootDatabase,
 };
 
 pub use hir::PrefixKind;
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
index decb0ea9d8a8a..428ba1d511897 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
@@ -1,6 +1,6 @@
 use stdx::trim_indent;
 use test_fixture::WithFixture;
-use test_utils::{assert_eq_text, CURSOR_MARKER};
+use test_utils::{CURSOR_MARKER, assert_eq_text};
 
 use super::*;
 
@@ -1250,9 +1250,11 @@ fn check_with_config(
 ) {
     let (db, file_id, pos) = if ra_fixture_before.contains(CURSOR_MARKER) {
         let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture_before);
+
         (db, file_id, Some(range_or_offset))
     } else {
         let (db, file_id) = RootDatabase::with_single_file(ra_fixture_before);
+
         (db, file_id, None)
     };
     let sema = &Semantics::new(&db);
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
index 9e89dfe87abe5..61962e593476c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
@@ -5,13 +5,12 @@ use itertools::{EitherOrBoth, Itertools};
 use parser::T;
 use stdx::is_upper_snake_case;
 use syntax::{
-    algo,
+    Direction, SyntaxElement, algo,
     ast::{
-        self, edit_in_place::Removable, make, AstNode, HasAttrs, HasName, HasVisibility,
-        PathSegmentKind,
+        self, AstNode, HasAttrs, HasName, HasVisibility, PathSegmentKind, edit_in_place::Removable,
+        make,
     },
     ted::{self, Position},
-    Direction, SyntaxElement,
 };
 
 use crate::syntax_helpers::node_ext::vis_eq;
@@ -191,7 +190,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
                     && !use_trees.is_empty()
                     && rhs_t.use_tree_list().is_some() =>
             {
-                return None
+                return None;
             }
             Err(insert_idx) => {
                 use_trees.insert(insert_idx, rhs_t.clone());
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
index 4d9c051354a61..e9385253250ad 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
@@ -5,12 +5,12 @@
 use std::ops::ControlFlow;
 
 use either::Either;
-use hir::{import_map, Crate, ItemInNs, Module, Semantics};
+use hir::{Complete, Crate, ItemInNs, Module, import_map};
 
 use crate::{
+    RootDatabase,
     imports::import_assets::NameToImport,
     symbol_index::{self, SymbolsDatabase as _},
-    RootDatabase,
 };
 
 /// A value to use, when uncertain which limit to pick.
@@ -20,13 +20,13 @@ pub use import_map::AssocSearchMode;
 
 // FIXME: Do callbacks instead to avoid allocations.
 /// Searches for importable items with the given name in the crate and its dependencies.
-pub fn items_with_name<'a>(
-    sema: &'a Semantics<'_, RootDatabase>,
+pub fn items_with_name(
+    db: &RootDatabase,
     krate: Crate,
     name: NameToImport,
     assoc_item_search: AssocSearchMode,
-) -> impl Iterator<Item = ItemInNs> + 'a {
-    let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate.display_name(sema.db).map(|name| name.to_string()))
+) -> impl Iterator<Item = (ItemInNs, Complete)> {
+    let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate.display_name(db).map(|name| name.to_string()))
         .entered();
 
     let prefix = matches!(name, NameToImport::Prefix(..));
@@ -68,12 +68,12 @@ pub fn items_with_name<'a>(
         }
     };
 
-    find_items(sema, krate, local_query, external_query)
+    find_items(db, krate, local_query, external_query)
 }
 
 /// Searches for importable items with the given name in the crate and its dependencies.
 pub fn items_with_name_in_module<T>(
-    sema: &Semantics<'_, RootDatabase>,
+    db: &RootDatabase,
     module: Module,
     name: NameToImport,
     assoc_item_search: AssocSearchMode,
@@ -110,7 +110,7 @@ pub fn items_with_name_in_module<T>(
             local_query
         }
     };
-    local_query.search(&[sema.db.module_symbols(module)], |local_candidate| {
+    local_query.search(&[db.module_symbols(module)], |local_candidate| {
         cb(match local_candidate.def {
             hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
             def => ItemInNs::from(def),
@@ -118,32 +118,34 @@ pub fn items_with_name_in_module<T>(
     })
 }
 
-fn find_items<'a>(
-    sema: &'a Semantics<'_, RootDatabase>,
+fn find_items(
+    db: &RootDatabase,
     krate: Crate,
     local_query: symbol_index::Query,
     external_query: import_map::Query,
-) -> impl Iterator<Item = ItemInNs> + 'a {
+) -> impl Iterator<Item = (ItemInNs, Complete)> {
     let _p = tracing::info_span!("find_items").entered();
-    let db = sema.db;
 
     // NOTE: `external_query` includes `assoc_item_search`, so we don't need to
     // filter on our own.
-    let external_importables =
-        krate.query_external_importables(db, external_query).map(|external_importable| {
-            match external_importable {
+    let external_importables = krate.query_external_importables(db, external_query).map(
+        |(external_importable, do_not_complete)| {
+            let external_importable = match external_importable {
                 Either::Left(module_def) => ItemInNs::from(module_def),
                 Either::Right(macro_def) => ItemInNs::from(macro_def),
-            }
-        });
+            };
+            (external_importable, do_not_complete)
+        },
+    );
 
     // Query the local crate using the symbol index.
     let mut local_results = Vec::new();
     local_query.search(&symbol_index::crate_symbols(db, krate), |local_candidate| {
-        local_results.push(match local_candidate.def {
+        let def = match local_candidate.def {
             hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
             def => ItemInNs::from(def),
-        });
+        };
+        local_results.push((def, local_candidate.do_not_complete));
         ControlFlow::<()>::Continue(())
     });
     local_results.into_iter().chain(external_importables)
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index 96115eee6dc2a..d3934e14abf90 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -45,18 +45,18 @@ pub mod syntax_helpers {
     pub use parser::LexedStr;
 }
 
-pub use hir::ChangeWithProcMacros;
+pub use hir::{ChangeWithProcMacros, EditionedFileId};
+use salsa::Durability;
 
 use std::{fmt, mem::ManuallyDrop};
 
 use base_db::{
-    ra_salsa::{self, Durability},
-    AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
-    DEFAULT_FILE_TEXT_LRU_CAP,
+    CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Files, RootQueryDb,
+    SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, query_group,
 };
 use hir::{
-    db::{DefDatabase, ExpandDatabase, HirDatabase},
     FilePositionWrapper, FileRangeWrapper,
+    db::{DefDatabase, ExpandDatabase},
 };
 use triomphe::Arc;
 
@@ -67,7 +67,7 @@ pub use ::line_index;
 
 /// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience.
 pub use base_db;
-pub use span::{EditionedFileId, FileId};
+pub use span::{self, FileId};
 
 pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
 pub type FxIndexMap<K, V> =
@@ -76,22 +76,22 @@ pub type FxIndexMap<K, V> =
 pub type FilePosition = FilePositionWrapper<FileId>;
 pub type FileRange = FileRangeWrapper<FileId>;
 
-#[ra_salsa::database(
-    base_db::SourceRootDatabaseStorage,
-    base_db::SourceDatabaseStorage,
-    hir::db::ExpandDatabaseStorage,
-    hir::db::DefDatabaseStorage,
-    hir::db::HirDatabaseStorage,
-    hir::db::InternDatabaseStorage,
-    LineIndexDatabaseStorage,
-    symbol_index::SymbolsDatabaseStorage
-)]
+#[salsa::db]
 pub struct RootDatabase {
     // We use `ManuallyDrop` here because every codegen unit that contains a
     // `&RootDatabase -> &dyn OtherDatabase` cast will instantiate its drop glue in the vtable,
     // which duplicates `Weak::drop` and `Arc::drop` tens of thousands of times, which makes
     // compile times of all `ide_*` and downstream crates suffer greatly.
-    storage: ManuallyDrop<ra_salsa::Storage<RootDatabase>>,
+    storage: ManuallyDrop<salsa::Storage<Self>>,
+    files: Arc<Files>,
+    crates_map: Arc<CratesMap>,
+}
+
+impl std::panic::RefUnwindSafe for RootDatabase {}
+
+#[salsa::db]
+impl salsa::Database for RootDatabase {
+    fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {}
 }
 
 impl Drop for RootDatabase {
@@ -100,43 +100,76 @@ impl Drop for RootDatabase {
     }
 }
 
+impl Clone for RootDatabase {
+    fn clone(&self) -> Self {
+        Self {
+            storage: self.storage.clone(),
+            files: self.files.clone(),
+            crates_map: self.crates_map.clone(),
+        }
+    }
+}
+
 impl fmt::Debug for RootDatabase {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         f.debug_struct("RootDatabase").finish()
     }
 }
 
-impl Upcast<dyn ExpandDatabase> for RootDatabase {
-    #[inline]
-    fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
-        self
+#[salsa::db]
+impl SourceDatabase for RootDatabase {
+    fn file_text(&self, file_id: vfs::FileId) -> FileText {
+        self.files.file_text(file_id)
     }
-}
 
-impl Upcast<dyn DefDatabase> for RootDatabase {
-    #[inline]
-    fn upcast(&self) -> &(dyn DefDatabase + 'static) {
-        self
+    fn set_file_text(&mut self, file_id: vfs::FileId, text: &str) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text(self, file_id, text);
     }
-}
 
-impl Upcast<dyn HirDatabase> for RootDatabase {
-    #[inline]
-    fn upcast(&self) -> &(dyn HirDatabase + 'static) {
-        self
+    fn set_file_text_with_durability(
+        &mut self,
+        file_id: vfs::FileId,
+        text: &str,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text_with_durability(self, file_id, text, durability);
     }
-}
 
-impl FileLoader for RootDatabase {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
-        FileLoaderDelegate(self).resolve_path(path)
+    /// Source root of the file.
+    fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
+        self.files.source_root(source_root_id)
     }
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
-        FileLoaderDelegate(self).relevant_crates(file_id)
+
+    fn set_source_root_with_durability(
+        &mut self,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_source_root_with_durability(self, source_root_id, source_root, durability);
+    }
+
+    fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput {
+        self.files.file_source_root(id)
+    }
+
+    fn set_file_source_root_with_durability(
+        &mut self,
+        id: vfs::FileId,
+        source_root_id: SourceRootId,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_source_root_with_durability(self, id, source_root_id, durability);
     }
-}
 
-impl ra_salsa::Database for RootDatabase {}
+    fn crates_map(&self) -> Arc<CratesMap> {
+        self.crates_map.clone()
+    }
+}
 
 impl Default for RootDatabase {
     fn default() -> RootDatabase {
@@ -146,14 +179,19 @@ impl Default for RootDatabase {
 
 impl RootDatabase {
     pub fn new(lru_capacity: Option<u16>) -> RootDatabase {
-        let mut db = RootDatabase { storage: ManuallyDrop::new(ra_salsa::Storage::default()) };
-        db.set_crate_graph_with_durability(Default::default(), Durability::HIGH);
-        db.set_proc_macros_with_durability(Default::default(), Durability::HIGH);
-        db.set_local_roots_with_durability(Default::default(), Durability::HIGH);
-        db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
+        let mut db = RootDatabase {
+            storage: ManuallyDrop::new(salsa::Storage::default()),
+            files: Default::default(),
+            crates_map: Default::default(),
+        };
+        // This needs to be here otherwise `CrateGraphBuilder` will panic.
+        db.set_all_crates(Arc::new(Box::new([])));
+        CrateGraphBuilder::default().set_in_db(&mut db);
+        db.set_proc_macros_with_durability(Default::default(), Durability::MEDIUM);
+        db.set_local_roots_with_durability(Default::default(), Durability::MEDIUM);
+        db.set_library_roots_with_durability(Default::default(), Durability::MEDIUM);
         db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
         db.update_base_query_lru_capacities(lru_capacity);
-        db.setup_syntax_context_root();
         db
     }
 
@@ -161,57 +199,59 @@ impl RootDatabase {
         self.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
     }
 
-    pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option<u16>) {
-        let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP);
-        base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
-        base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
-        // macro expansions are usually rather small, so we can afford to keep more of them alive
-        hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
-        hir::db::BorrowckQuery.in_db_mut(self).set_lru_capacity(base_db::DEFAULT_BORROWCK_LRU_CAP);
-        hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
+    pub fn update_base_query_lru_capacities(&mut self, _lru_capacity: Option<u16>) {
+        // let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP);
+        // base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
+        // base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
+        // // macro expansions are usually rather small, so we can afford to keep more of them alive
+        // hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
+        // hir::db::BorrowckQuery.in_db_mut(self).set_lru_capacity(base_db::DEFAULT_BORROWCK_LRU_CAP);
+        // hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
     }
 
-    pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, u16>) {
-        use hir::db as hir_db;
-
-        base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
-        base_db::ParseQuery.in_db_mut(self).set_lru_capacity(
-            lru_capacities
-                .get(stringify!(ParseQuery))
-                .copied()
-                .unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP),
-        );
-        hir_db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(
-            lru_capacities
-                .get(stringify!(ParseMacroExpansionQuery))
-                .copied()
-                .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
-        );
-        hir_db::BorrowckQuery.in_db_mut(self).set_lru_capacity(
-            lru_capacities
-                .get(stringify!(BorrowckQuery))
-                .copied()
-                .unwrap_or(base_db::DEFAULT_BORROWCK_LRU_CAP),
-        );
-        hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
+    pub fn update_lru_capacities(&mut self, _lru_capacities: &FxHashMap<Box<str>, u16>) {
+        // FIXME(salsa-transition): bring this back; allow changing LRU settings at runtime.
+        // use hir::db as hir_db;
+
+        // base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
+        // base_db::ParseQuery.in_db_mut(self).set_lru_capacity(
+        //     lru_capacities
+        //         .get(stringify!(ParseQuery))
+        //         .copied()
+        //         .unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP),
+        // );
+        // hir_db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(
+        //     lru_capacities
+        //         .get(stringify!(ParseMacroExpansionQuery))
+        //         .copied()
+        //         .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
+        // );
+        // hir_db::BorrowckQuery.in_db_mut(self).set_lru_capacity(
+        //     lru_capacities
+        //         .get(stringify!(BorrowckQuery))
+        //         .copied()
+        //         .unwrap_or(base_db::DEFAULT_BORROWCK_LRU_CAP),
+        // );
+        // hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
     }
-}
 
-impl ra_salsa::ParallelDatabase for RootDatabase {
-    fn snapshot(&self) -> ra_salsa::Snapshot<RootDatabase> {
-        ra_salsa::Snapshot::new(RootDatabase {
-            storage: ManuallyDrop::new(self.storage.snapshot()),
-        })
+    pub fn snapshot(&self) -> Self {
+        Self {
+            storage: self.storage.clone(),
+            files: self.files.clone(),
+            crates_map: self.crates_map.clone(),
+        }
     }
 }
 
-#[ra_salsa::query_group(LineIndexDatabaseStorage)]
-pub trait LineIndexDatabase: base_db::SourceDatabase {
+#[query_group::query_group]
+pub trait LineIndexDatabase: base_db::RootQueryDb {
+    #[salsa::invoke_interned(line_index)]
     fn line_index(&self, file_id: FileId) -> Arc<LineIndex>;
 }
 
 fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> {
-    let text = db.file_text(file_id);
+    let text = db.file_text(file_id).text(db);
     Arc::new(LineIndex::new(&text))
 }
 
@@ -288,11 +328,7 @@ pub struct SnippetCap {
 
 impl SnippetCap {
     pub const fn new(allow_snippets: bool) -> Option<SnippetCap> {
-        if allow_snippets {
-            Some(SnippetCap { _private: () })
-        } else {
-            None
-        }
+        if allow_snippets { Some(SnippetCap { _private: () }) } else { None }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
index a348a4ef7d3fb..232648af661ff 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -7,8 +7,9 @@ use itertools::Itertools;
 use rustc_hash::FxHashMap;
 use span::Edition;
 use syntax::{
-    ast::{self, make, AstNode, HasGenericArgs},
-    ted, NodeOrToken, SyntaxNode,
+    NodeOrToken, SyntaxNode,
+    ast::{self, AstNode, HasGenericArgs, make},
+    ted,
 };
 
 #[derive(Default)]
@@ -209,7 +210,7 @@ impl<'a> PathTransform<'a> {
             .flat_map(|it| it.lifetime_params(db))
             .zip(self.substs.lifetimes.clone())
             .filter_map(|(k, v)| {
-                Some((k.name(db).display(db.upcast(), target_edition).to_string(), v.lifetime()?))
+                Some((k.name(db).display(db, target_edition).to_string(), v.lifetime()?))
             })
             .collect();
         let ctx = Ctx {
@@ -324,7 +325,7 @@ impl Ctx<'_> {
                                 allow_unstable: true,
                             };
                             let found_path = self.target_module.find_path(
-                                self.source_scope.db.upcast(),
+                                self.source_scope.db,
                                 hir::ModuleDef::Trait(trait_ref),
                                 cfg,
                             )?;
@@ -383,8 +384,7 @@ impl Ctx<'_> {
                     prefer_absolute: false,
                     allow_unstable: true,
                 };
-                let found_path =
-                    self.target_module.find_path(self.source_scope.db.upcast(), def, cfg)?;
+                let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?;
                 let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update();
                 if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
                     if let Some(segment) = res.segment() {
@@ -424,7 +424,7 @@ impl Ctx<'_> {
                             allow_unstable: true,
                         };
                         let found_path = self.target_module.find_path(
-                            self.source_scope.db.upcast(),
+                            self.source_scope.db,
                             ModuleDef::from(adt),
                             cfg,
                         )?;
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs
index 22dc3d9e29d65..17c3f75ce1731 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs
@@ -6,16 +6,14 @@ mod topologic_sort;
 
 use std::time::Duration;
 
-use hir::{db::DefDatabase, Symbol};
+use hir::{Symbol, db::DefDatabase};
 use itertools::Itertools;
+use salsa::{Cancelled, Database};
 
 use crate::{
-    base_db::{
-        ra_salsa::{Database, ParallelDatabase, Snapshot},
-        Cancelled, CrateId, SourceDatabase,
-    },
-    symbol_index::SymbolsDatabase,
     FxIndexMap, RootDatabase,
+    base_db::{Crate, RootQueryDb},
+    symbol_index::SymbolsDatabase,
 };
 
 /// We're indexing many crates.
@@ -37,20 +35,22 @@ pub fn parallel_prime_caches(
 ) {
     let _p = tracing::info_span!("parallel_prime_caches").entered();
 
-    let graph = db.crate_graph();
     let mut crates_to_prime = {
+        // FIXME: We already have the crate list topologically sorted (but without the things
+        // `TopologicalSortIter` gives us). Maybe there is a way to avoid using it and rip it out
+        // of the codebase?
         let mut builder = topologic_sort::TopologicalSortIter::builder();
 
-        for crate_id in graph.iter() {
-            builder.add(crate_id, graph[crate_id].dependencies.iter().map(|d| d.crate_id));
+        for &crate_id in db.all_crates().iter() {
+            builder.add(crate_id, crate_id.data(db).dependencies.iter().map(|d| d.crate_id));
         }
 
         builder.build()
     };
 
     enum ParallelPrimeCacheWorkerProgress {
-        BeginCrate { crate_id: CrateId, crate_name: Symbol },
-        EndCrate { crate_id: CrateId },
+        BeginCrate { crate_id: Crate, crate_name: Symbol },
+        EndCrate { crate_id: Crate },
     }
 
     // We split off def map computation from other work,
@@ -66,7 +66,7 @@ pub fn parallel_prime_caches(
     let (work_sender, progress_receiver) = {
         let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
         let (work_sender, work_receiver) = crossbeam_channel::unbounded();
-        let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
+        let prime_caches_worker = move |db: RootDatabase| {
             while let Ok((crate_id, crate_name, kind)) = work_receiver.recv() {
                 progress_sender
                     .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
@@ -90,7 +90,7 @@ pub fn parallel_prime_caches(
             stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
                 .allow_leak(true)
                 .name(format!("PrimeCaches#{id}"))
-                .spawn(move || Cancelled::catch(|| worker(db)))
+                .spawn(move || Cancelled::catch(|| worker(db.snapshot())))
                 .expect("failed to spawn thread");
         }
 
@@ -108,18 +108,16 @@ pub fn parallel_prime_caches(
     let mut additional_phases = vec![];
 
     while crates_done < crates_total {
-        db.unwind_if_cancelled();
-
-        for crate_id in &mut crates_to_prime {
-            let krate = &graph[crate_id];
-            let name = krate
-                .display_name
-                .as_deref()
-                .cloned()
-                .unwrap_or_else(|| Symbol::integer(crate_id.into_raw().into_u32() as usize));
-            if krate.origin.is_lang() {
-                additional_phases.push((crate_id, name.clone(), PrimingPhase::ImportMap));
-            } else if krate.origin.is_local() {
+        db.unwind_if_revision_cancelled();
+
+        for krate in &mut crates_to_prime {
+            let name = krate.extra_data(db).display_name.as_deref().cloned().unwrap_or_else(|| {
+                Symbol::integer(salsa::plumbing::AsId::as_id(&krate).as_u32() as usize)
+            });
+            let origin = &krate.data(db).origin;
+            if origin.is_lang() {
+                additional_phases.push((krate, name.clone(), PrimingPhase::ImportMap));
+            } else if origin.is_local() {
                 // Compute the symbol search index.
                 // This primes the cache for `ide_db::symbol_index::world_symbols()`.
                 //
@@ -129,10 +127,10 @@ pub fn parallel_prime_caches(
                 // FIXME: We should do it unconditionally if the configuration is set to default to
                 // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we
                 // would need to pipe that configuration information down here.
-                additional_phases.push((crate_id, name.clone(), PrimingPhase::CrateSymbols));
+                additional_phases.push((krate, name.clone(), PrimingPhase::CrateSymbols));
             }
 
-            work_sender.send((crate_id, name, PrimingPhase::DefMap)).ok();
+            work_sender.send((krate, name, PrimingPhase::DefMap)).ok();
         }
 
         // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
@@ -145,7 +143,7 @@ pub fn parallel_prime_caches(
             }
             Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
                 // our workers may have died from a cancelled task, so we'll check and re-raise here.
-                db.unwind_if_cancelled();
+                db.unwind_if_revision_cancelled();
                 break;
             }
         };
@@ -177,7 +175,7 @@ pub fn parallel_prime_caches(
     }
 
     while crates_done < crates_total {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
 
         // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
         // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or
@@ -189,7 +187,7 @@ pub fn parallel_prime_caches(
             }
             Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
                 // our workers may have died from a cancelled task, so we'll check and re-raise here.
-                db.unwind_if_cancelled();
+                db.unwind_if_revision_cancelled();
                 break;
             }
         };
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches/topologic_sort.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches/topologic_sort.rs
index 7353d71fa4f86..c8a0386310367 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches/topologic_sort.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches/topologic_sort.rs
@@ -7,14 +7,20 @@ pub(crate) struct TopologicSortIterBuilder<T> {
     nodes: FxHashMap<T, Entry<T>>,
 }
 
-impl<T> TopologicSortIterBuilder<T>
+// this implementation has different bounds on T than would be implied by #[derive(Default)]
+impl<T> Default for TopologicSortIterBuilder<T>
 where
     T: Copy + Eq + PartialEq + Hash,
 {
-    fn new() -> Self {
+    fn default() -> Self {
         Self { nodes: Default::default() }
     }
+}
 
+impl<T> TopologicSortIterBuilder<T>
+where
+    T: Copy + Eq + PartialEq + Hash,
+{
     fn get_or_create_entry(&mut self, item: T) -> &mut Entry<T> {
         self.nodes.entry(item).or_default()
     }
@@ -54,7 +60,7 @@ where
     T: Copy + Eq + PartialEq + Hash,
 {
     pub(crate) fn builder() -> TopologicSortIterBuilder<T> {
-        TopologicSortIterBuilder::new()
+        TopologicSortIterBuilder::default()
     }
 
     pub(crate) fn pending(&self) -> usize {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index 1633065f65217..b8119e1aab366 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -28,22 +28,22 @@ use crate::{
 };
 use base_db::AnchoredPathBuf;
 use either::Either;
-use hir::{FieldSource, FileRange, HirFileIdExt, InFile, ModuleSource, Semantics};
-use span::{Edition, EditionedFileId, FileId, SyntaxContextId};
-use stdx::{never, TupleExt};
+use hir::{EditionedFileId, FieldSource, FileRange, InFile, ModuleSource, Semantics};
+use span::{Edition, FileId, SyntaxContext};
+use stdx::{TupleExt, never};
 use syntax::{
+    AstNode, SyntaxKind, T, TextRange,
     ast::{self, HasName},
     utils::is_raw_identifier,
-    AstNode, SyntaxKind, TextRange, T,
 };
 
 use crate::{
+    RootDatabase,
     defs::Definition,
     search::{FileReference, FileReferenceNode},
     source_change::{FileSystemEdit, SourceChange},
     syntax_helpers::node_ext::expr_as_name_ref,
     traits::convert_to_def_in_trait,
-    RootDatabase,
 };
 
 pub type Result<T, E = RenameError> = std::result::Result<T, E>;
@@ -113,7 +113,7 @@ impl Definition {
     /// renamed and extern crate names will report its range, though a rename will introduce
     /// an alias instead.
     pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
-        let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then_some(range);
+        let syn_ctx_is_root = |(range, ctx): (_, SyntaxContext)| ctx.is_root().then_some(range);
         let res = match self {
             Definition::Macro(mac) => {
                 let src = sema.source(mac)?;
@@ -220,7 +220,7 @@ impl Definition {
         fn name_range<D>(
             def: D,
             sema: &Semantics<'_, RootDatabase>,
-        ) -> Option<(FileRange, SyntaxContextId)>
+        ) -> Option<(FileRange, SyntaxContext)>
         where
             D: hir::HasSource,
             D::Ast: ast::HasName,
@@ -249,7 +249,7 @@ fn rename_mod(
 
     let InFile { file_id, value: def_source } = module.definition_source(sema.db);
     if let ModuleSource::SourceFile(..) = def_source {
-        let anchor = file_id.original_file(sema.db).file_id();
+        let anchor = file_id.original_file(sema.db).file_id(sema.db);
 
         let is_mod_rs = module.is_mod_rs(sema.db);
         let has_detached_child = module.children(sema.db).any(|child| !child.is_inline(sema.db));
@@ -296,13 +296,13 @@ fn rename_mod(
                     .original_file_range_opt(sema.db)
                     .map(TupleExt::head)
                 {
-                    let new_name = if is_raw_identifier(new_name, file_id.edition()) {
+                    let new_name = if is_raw_identifier(new_name, file_id.edition(sema.db)) {
                         format!("r#{new_name}")
                     } else {
                         new_name.to_owned()
                     };
                     source_change.insert_source_edit(
-                        file_id.file_id(),
+                        file_id.file_id(sema.db),
                         TextEdit::replace(file_range.range, new_name),
                     )
                 };
@@ -315,8 +315,8 @@ fn rename_mod(
     let usages = def.usages(sema).all();
     let ref_edits = usages.iter().map(|(file_id, references)| {
         (
-            EditionedFileId::file_id(file_id),
-            source_edit_from_references(references, def, new_name, file_id.edition()),
+            file_id.file_id(sema.db),
+            source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
         )
     });
     source_change.extend(ref_edits);
@@ -362,19 +362,15 @@ fn rename_reference(
     let mut source_change = SourceChange::default();
     source_change.extend(usages.iter().map(|(file_id, references)| {
         (
-            EditionedFileId::file_id(file_id),
-            source_edit_from_references(references, def, new_name, file_id.edition()),
+            file_id.file_id(sema.db),
+            source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
         )
     }));
 
-    let mut insert_def_edit = |def| {
-        let (file_id, edit) = source_edit_from_def(sema, def, new_name, &mut source_change)?;
-        source_change.insert_source_edit(file_id, edit);
-        Ok(())
-    };
     // This needs to come after the references edits, because we change the annotation of existing edits
     // if a conflict is detected.
-    insert_def_edit(def)?;
+    let (file_id, edit) = source_edit_from_def(sema, def, new_name, &mut source_change)?;
+    source_change.insert_source_edit(file_id, edit);
     Ok(source_change)
 }
 
@@ -545,7 +541,7 @@ fn source_edit_from_def(
     source_change: &mut SourceChange,
 ) -> Result<(FileId, TextEdit)> {
     let new_name_edition_aware = |new_name: &str, file_id: EditionedFileId| {
-        if is_raw_identifier(new_name, file_id.edition()) {
+        if is_raw_identifier(new_name, file_id.edition(sema.db)) {
             format!("r#{new_name}")
         } else {
             new_name.to_owned()
@@ -642,7 +638,7 @@ fn source_edit_from_def(
         edit.set_annotation(conflict_annotation);
 
         let Some(file_id) = file_id else { bail!("No file available to rename") };
-        return Ok((EditionedFileId::file_id(file_id), edit));
+        return Ok((file_id.file_id(sema.db), edit));
     }
     let FileRange { file_id, range } = def
         .range_for_rename(sema)
@@ -658,7 +654,7 @@ fn source_edit_from_def(
         _ => (range, new_name.to_owned()),
     };
     edit.replace(range, new_name_edition_aware(&new_name, file_id));
-    Ok((file_id.file_id(), edit.finish()))
+    Ok((file_id.file_id(sema.db), edit.finish()))
 }
 
 #[derive(Copy, Clone, Debug, PartialEq)]
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index 02cd8b8bdf510..30be5bc21b498 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -7,28 +7,28 @@
 use std::mem;
 use std::{cell::LazyCell, cmp::Reverse};
 
-use base_db::{ra_salsa::Database, SourceDatabase, SourceRootDatabase};
+use base_db::{RootQueryDb, SourceDatabase};
 use either::Either;
 use hir::{
-    sym, Adt, AsAssocItem, DefWithBody, FileRange, FileRangeWrapper, HasAttrs, HasContainer,
-    HasSource, HirFileIdExt, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer,
-    ModuleSource, PathResolution, Semantics, Visibility,
+    Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs,
+    HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer,
+    ModuleSource, PathResolution, Semantics, Visibility, sym,
 };
 use memchr::memmem::Finder;
 use parser::SyntaxKind;
 use rustc_hash::{FxHashMap, FxHashSet};
-use span::EditionedFileId;
+use salsa::Database;
 use syntax::{
+    AstNode, AstToken, SmolStr, SyntaxElement, SyntaxNode, TextRange, TextSize, ToSmolStr,
     ast::{self, HasName, Rename},
-    match_ast, AstNode, AstToken, SmolStr, SyntaxElement, SyntaxNode, TextRange, TextSize,
-    ToSmolStr,
+    match_ast,
 };
 use triomphe::Arc;
 
 use crate::{
+    RootDatabase,
     defs::{Definition, NameClass, NameRefClass},
     traits::{as_trait_assoc_def, convert_to_def_in_trait},
-    RootDatabase,
 };
 
 #[derive(Debug, Default, Clone)]
@@ -161,13 +161,15 @@ impl SearchScope {
     fn crate_graph(db: &RootDatabase) -> SearchScope {
         let mut entries = FxHashMap::default();
 
-        let graph = db.crate_graph();
-        for krate in graph.iter() {
-            let root_file = graph[krate].root_file_id;
-            let source_root_id = db.file_source_root(root_file);
-            let source_root = db.source_root(source_root_id);
+        let all_crates = db.all_crates();
+        for &krate in all_crates.iter() {
+            let crate_data = krate.data(db);
+            let source_root = db.file_source_root(crate_data.root_file_id).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             entries.extend(
-                source_root.iter().map(|id| (EditionedFileId::new(id, graph[krate].edition), None)),
+                source_root
+                    .iter()
+                    .map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)),
             );
         }
         SearchScope { entries }
@@ -178,10 +180,13 @@ impl SearchScope {
         let mut entries = FxHashMap::default();
         for rev_dep in of.transitive_reverse_dependencies(db) {
             let root_file = rev_dep.root_file(db);
-            let source_root_id = db.file_source_root(root_file);
-            let source_root = db.source_root(source_root_id);
+
+            let source_root = db.file_source_root(root_file).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             entries.extend(
-                source_root.iter().map(|id| (EditionedFileId::new(id, rev_dep.edition(db)), None)),
+                source_root
+                    .iter()
+                    .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)),
             );
         }
         SearchScope { entries }
@@ -190,12 +195,13 @@ impl SearchScope {
     /// Build a search scope spanning the given crate.
     fn krate(db: &RootDatabase, of: hir::Crate) -> SearchScope {
         let root_file = of.root_file(db);
-        let source_root_id = db.file_source_root(root_file);
-        let source_root = db.source_root(source_root_id);
+
+        let source_root_id = db.file_source_root(root_file).source_root_id(db);
+        let source_root = db.source_root(source_root_id).source_root(db);
         SearchScope {
             entries: source_root
                 .iter()
-                .map(|id| (EditionedFileId::new(id, of.edition(db)), None))
+                .map(|id| (EditionedFileId::new(db, id, of.edition(db)), None))
                 .collect(),
         }
     }
@@ -308,8 +314,6 @@ impl Definition {
                 DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()),
                 DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()),
                 DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()),
-                // FIXME: implement
-                DefWithBody::InTypeConst(_) => return SearchScope::empty(),
             };
             return match def {
                 Some(def) => SearchScope::file_range(
@@ -325,8 +329,6 @@ impl Definition {
                 DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()),
                 DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()),
                 DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()),
-                // FIXME: implement
-                DefWithBody::InTypeConst(_) => return SearchScope::empty(),
             };
             return match def {
                 Some(def) => SearchScope::file_range(
@@ -367,7 +369,7 @@ impl Definition {
         if let Definition::Macro(macro_def) = self {
             return match macro_def.kind(db) {
                 hir::MacroKind::Declarative => {
-                    if macro_def.attrs(db).by_key(&sym::macro_export).exists() {
+                    if macro_def.attrs(db).by_key(sym::macro_export).exists() {
                         SearchScope::reverse_dependencies(db, module.krate())
                     } else {
                         SearchScope::krate(db, module.krate())
@@ -483,7 +485,7 @@ impl<'a> FindUsages<'a> {
         scope: &'b SearchScope,
     ) -> impl Iterator<Item = (Arc<str>, EditionedFileId, TextRange)> + 'b {
         scope.entries.iter().map(|(&file_id, &search_range)| {
-            let text = db.file_text(file_id.file_id());
+            let text = db.file_text(file_id.file_id(db)).text(db);
             let search_range =
                 search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
 
@@ -853,7 +855,10 @@ impl<'a> FindUsages<'a> {
                 name,
                 is_possibly_self.into_iter().map(|position| {
                     (
-                        self.sema.db.file_text(position.file_id.file_id()),
+                        self.sema
+                            .db
+                            .file_text(position.file_id.file_id(self.sema.db))
+                            .text(self.sema.db),
                         position.file_id,
                         position.range,
                     )
@@ -947,7 +952,6 @@ impl<'a> FindUsages<'a> {
         let include_self_kw_refs =
             self.include_self_kw_refs.as_ref().map(|ty| (ty, Finder::new("Self")));
         for (text, file_id, search_range) in Self::scope_files(sema.db, &search_scope) {
-            self.sema.db.unwind_if_cancelled();
             let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
 
             // Search for occurrences of the items name
@@ -1001,7 +1005,8 @@ impl<'a> FindUsages<'a> {
             let finder = &Finder::new("super");
 
             for (text, file_id, search_range) in Self::scope_files(sema.db, &scope) {
-                self.sema.db.unwind_if_cancelled();
+                self.sema.db.unwind_if_revision_cancelled();
+
                 let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
 
                 for offset in Self::match_indices(&text, finder, search_range) {
@@ -1050,7 +1055,8 @@ impl<'a> FindUsages<'a> {
                     return;
                 };
 
-                let text = sema.db.file_text(file_id.file_id());
+                let file_text = sema.db.file_text(file_id.file_id(self.sema.db));
+                let text = file_text.text(sema.db);
                 let search_range =
                     search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
 
@@ -1279,7 +1285,7 @@ impl<'a> FindUsages<'a> {
                         if convert_to_def_in_trait(self.sema.db, def)
                             != convert_to_def_in_trait(self.sema.db, self.def) =>
                     {
-                        return false
+                        return false;
                     }
                     (Some(_), Definition::TypeAlias(_)) => {}
                     // We looking at an assoc item of a trait definition, so reference all the
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
index b4d0b0dc9f0af..b1b58d6568cb5 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -6,7 +6,7 @@
 use std::{collections::hash_map::Entry, fmt, iter, mem};
 
 use crate::text_edit::{TextEdit, TextEditBuilder};
-use crate::{assists::Command, syntax_helpers::tree_diff::diff, SnippetCap};
+use crate::{SnippetCap, assists::Command, syntax_helpers::tree_diff::diff};
 use base_db::AnchoredPathBuf;
 use itertools::Itertools;
 use nohash_hasher::IntMap;
@@ -14,8 +14,8 @@ use rustc_hash::FxHashMap;
 use span::FileId;
 use stdx::never;
 use syntax::{
-    syntax_editor::{SyntaxAnnotation, SyntaxEditor},
     AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize,
+    syntax_editor::{SyntaxAnnotation, SyntaxEditor},
 };
 
 /// An annotation ID associated with an indel, to describe changes.
@@ -469,7 +469,7 @@ impl SourceChangeBuilder {
     }
 
     fn add_snippet_annotation(&mut self, kind: AnnotationSnippet) -> SyntaxAnnotation {
-        let annotation = SyntaxAnnotation::new();
+        let annotation = SyntaxAnnotation::default();
         self.snippet_annotations.push((kind, annotation));
         self.source_change.is_snippet = true;
         annotation
@@ -479,13 +479,14 @@ impl SourceChangeBuilder {
         self.commit();
 
         // Only one file can have snippet edits
-        stdx::never!(self
-            .source_change
-            .source_file_edits
-            .iter()
-            .filter(|(_, (_, snippet_edit))| snippet_edit.is_some())
-            .at_most_one()
-            .is_err());
+        stdx::never!(
+            self.source_change
+                .source_file_edits
+                .iter()
+                .filter(|(_, (_, snippet_edit))| snippet_edit.is_some())
+                .at_most_one()
+                .is_err()
+        );
 
         mem::take(&mut self.source_change)
     }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
index 2737436993deb..d1ba79e8c785e 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -27,16 +27,13 @@ use std::{
     ops::ControlFlow,
 };
 
-use base_db::{
-    ra_salsa::{self, ParallelDatabase},
-    SourceRootDatabase, SourceRootId, Upcast,
-};
-use fst::{raw::IndexedValue, Automaton, Streamer};
+use base_db::{RootQueryDb, SourceDatabase, SourceRootId};
+use fst::{Automaton, Streamer, raw::IndexedValue};
 use hir::{
+    Crate, Module,
     db::HirDatabase,
     import_map::{AssocSearchMode, SearchMode},
     symbols::{FileSymbol, SymbolCollector},
-    Crate, Module,
 };
 use rayon::prelude::*;
 use rustc_hash::FxHashSet;
@@ -99,38 +96,42 @@ impl Query {
     }
 }
 
-#[ra_salsa::query_group(SymbolsDatabaseStorage)]
-pub trait SymbolsDatabase: HirDatabase + SourceRootDatabase + Upcast<dyn HirDatabase> {
+#[query_group::query_group]
+pub trait SymbolsDatabase: HirDatabase + SourceDatabase {
     /// The symbol index for a given module. These modules should only be in source roots that
     /// are inside local_roots.
+    // FIXME: Is it worth breaking the encapsulation boundary of `hir`, and make this take a `ModuleId`,
+    // in order for it to be a non-interned query?
+    #[salsa::invoke_interned(module_symbols)]
     fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
 
     /// The symbol index for a given source root within library_roots.
+    #[salsa::invoke_interned(library_symbols)]
     fn library_symbols(&self, source_root_id: SourceRootId) -> Arc<SymbolIndex>;
 
-    #[ra_salsa::transparent]
+    #[salsa::transparent]
     /// The symbol indices of modules that make up a given crate.
     fn crate_symbols(&self, krate: Crate) -> Box<[Arc<SymbolIndex>]>;
 
     /// The set of "local" (that is, from the current workspace) roots.
     /// Files in local roots are assumed to change frequently.
-    #[ra_salsa::input]
+    #[salsa::input]
     fn local_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
 
     /// The set of roots for crates.io libraries.
     /// Files in libraries are assumed to never change.
-    #[ra_salsa::input]
+    #[salsa::input]
     fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
 }
 
 fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc<SymbolIndex> {
     let _p = tracing::info_span!("library_symbols").entered();
 
-    let mut symbol_collector = SymbolCollector::new(db.upcast());
+    let mut symbol_collector = SymbolCollector::new(db);
 
     db.source_root_crates(source_root_id)
         .iter()
-        .flat_map(|&krate| Crate::from(krate).modules(db.upcast()))
+        .flat_map(|&krate| Crate::from(krate).modules(db))
         // we specifically avoid calling other SymbolsDatabase queries here, even though they do the same thing,
         // as the index for a library is not going to really ever change, and we do not want to store each
         // the module or crate indices for those in salsa unless we need to.
@@ -142,32 +143,12 @@ fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Ar
 fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> {
     let _p = tracing::info_span!("module_symbols").entered();
 
-    Arc::new(SymbolIndex::new(SymbolCollector::new_module(db.upcast(), module)))
+    Arc::new(SymbolIndex::new(SymbolCollector::new_module(db, module)))
 }
 
 pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolIndex>]> {
     let _p = tracing::info_span!("crate_symbols").entered();
-    krate.modules(db.upcast()).into_iter().map(|module| db.module_symbols(module)).collect()
-}
-
-/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
-struct Snap<DB>(DB);
-impl<DB: ParallelDatabase> Snap<ra_salsa::Snapshot<DB>> {
-    fn new(db: &DB) -> Self {
-        Self(db.snapshot())
-    }
-}
-impl<DB: ParallelDatabase> Clone for Snap<ra_salsa::Snapshot<DB>> {
-    fn clone(&self) -> Snap<ra_salsa::Snapshot<DB>> {
-        Snap(self.0.snapshot())
-    }
-}
-impl<DB> std::ops::Deref for Snap<DB> {
-    type Target = DB;
-
-    fn deref(&self) -> &Self::Target {
-        &self.0
-    }
+    krate.modules(db).into_iter().map(|module| db.module_symbols(module)).collect()
 }
 
 // Feature: Workspace Symbol
@@ -201,7 +182,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
     let indices: Vec<_> = if query.libs {
         db.library_roots()
             .par_iter()
-            .map_with(Snap::new(db), |snap, &root| snap.library_symbols(root))
+            .map_with(db.clone(), |snap, &root| snap.library_symbols(root))
             .collect()
     } else {
         let mut crates = Vec::new();
@@ -211,7 +192,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
         }
         let indices: Vec<_> = crates
             .into_par_iter()
-            .map_with(Snap::new(db), |snap, krate| snap.crate_symbols(krate.into()))
+            .map_with(db.clone(), |snap, krate| snap.crate_symbols(krate.into()))
             .collect();
         indices.iter().flat_map(|indices| indices.iter().cloned()).collect()
     };
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
index 92478ef480d6a..7e8c921d9ed39 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs
@@ -1,7 +1,7 @@
 //! Tools to work with format string literals for the `format_args!` family of macros.
 use syntax::{
-    ast::{self, IsString},
     AstNode, AstToken, TextRange, TextSize,
+    ast::{self, IsString},
 };
 
 // FIXME: This can probably be re-implemented via the HIR?
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs
index c104aa571894d..8f25833fffb8d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs
@@ -183,7 +183,7 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec<Arg>), ()> {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
 
     fn check(input: &str, expect: &Expect) {
         let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_owned(), vec![]));
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
index 56a66070ef7f3..bdff64dd0812c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
@@ -5,8 +5,8 @@ use itertools::Itertools;
 use parser::T;
 use span::Edition;
 use syntax::{
-    ast::{self, HasLoopBody, MacroCall, PathSegmentKind, VisibilityKind},
     AstNode, AstToken, Preorder, RustLanguage, WalkEvent,
+    ast::{self, HasLoopBody, MacroCall, PathSegmentKind, VisibilityKind},
 };
 
 pub fn expr_as_name_ref(expr: &ast::Expr) -> Option<ast::NameRef> {
@@ -121,7 +121,7 @@ pub fn walk_patterns_in_expr(start: &ast::Expr, cb: &mut dyn FnMut(ast::Pat)) {
         match ast::Stmt::cast(node.clone()) {
             Some(ast::Stmt::LetStmt(l)) => {
                 if let Some(pat) = l.pat() {
-                    let _ = walk_pat(&pat, &mut |pat| {
+                    _ = walk_pat(&pat, &mut |pat| {
                         cb(pat);
                         ControlFlow::<(), ()>::Continue(())
                     });
@@ -159,7 +159,7 @@ pub fn walk_patterns_in_expr(start: &ast::Expr, cb: &mut dyn FnMut(ast::Pat)) {
                     }
                 } else if let Some(pat) = ast::Pat::cast(node) {
                     preorder.skip_subtree();
-                    let _ = walk_pat(&pat, &mut |pat| {
+                    _ = walk_pat(&pat, &mut |pat| {
                         cb(pat);
                         ControlFlow::<(), ()>::Continue(())
                     });
@@ -484,7 +484,7 @@ pub fn parse_tt_as_comma_sep_paths(
             None => None,
             Some(tok) => Some(tok),
         });
-    let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
+    let input_expressions = tokens.chunk_by(|tok| tok.kind() == T![,]);
     let paths = input_expressions
         .into_iter()
         .filter_map(|(is_sep, group)| (!is_sep).then_some(group))
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs
index e085bf15cb92d..9b9f450bc7342 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs
@@ -7,8 +7,9 @@ use itertools::Itertools;
 use rustc_hash::FxHashMap;
 use stdx::to_lower_snake_case;
 use syntax::{
+    AstNode, Edition, SmolStr, SmolStrBuilder, ToSmolStr,
     ast::{self, HasName},
-    match_ast, AstNode, Edition, SmolStr, SmolStrBuilder, ToSmolStr,
+    match_ast,
 };
 
 use crate::RootDatabase;
@@ -82,7 +83,7 @@ const USELESS_METHODS: &[&str] = &[
 ///
 /// ```
 /// # use ide_db::syntax_helpers::suggest_name::NameGenerator;
-/// let mut generator = NameGenerator::new();
+/// let mut generator = NameGenerator::default();
 /// assert_eq!(generator.suggest_name("a"), "a");
 /// assert_eq!(generator.suggest_name("a"), "a1");
 ///
@@ -95,21 +96,16 @@ pub struct NameGenerator {
 }
 
 impl NameGenerator {
-    /// Create a new empty generator
-    pub fn new() -> Self {
-        Self { pool: FxHashMap::default() }
-    }
-
     /// Create a new generator with existing names. When suggesting a name, it will
     /// avoid conflicts with existing names.
     pub fn new_with_names<'a>(existing_names: impl Iterator<Item = &'a str>) -> Self {
-        let mut generator = Self::new();
+        let mut generator = Self::default();
         existing_names.for_each(|name| generator.insert(name));
         generator
     }
 
     pub fn new_from_scope_locals(scope: Option<SemanticsScope<'_>>) -> Self {
-        let mut generator = Self::new();
+        let mut generator = Self::default();
         if let Some(scope) = scope {
             scope.process_all_names(&mut |name, scope| {
                 if let hir::ScopeDef::Local(_) = scope {
@@ -457,9 +453,10 @@ mod tests {
     fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected: &str) {
         let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture);
         let frange = FileRange { file_id, range: range_or_offset.into() };
-
         let sema = Semantics::new(&db);
+
         let source_file = sema.parse(frange.file_id);
+
         let element = source_file.syntax().covering_element(frange.range);
         let expr =
             element.ancestors().find_map(ast::Expr::cast).expect("selection is not an expression");
@@ -468,7 +465,7 @@ mod tests {
             frange.range,
             "selection is not an expression(yet contained in one)"
         );
-        let name = NameGenerator::new().for_variable(&expr, &sema);
+        let name = NameGenerator::default().for_variable(&expr, &sema);
         assert_eq!(&name, expected);
     }
 
@@ -1115,7 +1112,7 @@ fn main() {
 
     #[test]
     fn conflicts_with_existing_names() {
-        let mut generator = NameGenerator::new();
+        let mut generator = NameGenerator::default();
         assert_eq!(generator.suggest_name("a"), "a");
         assert_eq!(generator.suggest_name("a"), "a1");
         assert_eq!(generator.suggest_name("a"), "a2");
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs
index 02e24c47761c9..7163c08e1e317 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs
@@ -2,7 +2,7 @@
 use rustc_hash::FxHashMap;
 use syntax::{NodeOrToken, SyntaxElement, SyntaxNode};
 
-use crate::{text_edit::TextEditBuilder, FxIndexMap};
+use crate::{FxIndexMap, text_edit::TextEditBuilder};
 
 #[derive(Debug, Hash, PartialEq, Eq)]
 enum TreeDiffInsertPos {
@@ -153,7 +153,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use itertools::Itertools;
     use parser::{Edition, SyntaxKind};
     use syntax::{AstNode, SourceFile, SyntaxElement};
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
index efcf53ded64f0..455a6805907cc 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -2,7 +2,9 @@
     (
         Module {
             id: ModuleId {
-                krate: Idx::<CrateData>(0),
+                krate: Crate(
+                    Id(3000),
+                ),
                 block: None,
                 local_id: Idx::<ModuleData>(0),
             },
@@ -14,17 +16,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                1,
+                                3401,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -40,6 +41,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "Struct",
@@ -47,17 +49,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                3400,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -73,6 +74,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "mul1",
@@ -80,17 +82,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                3400,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -106,6 +107,7 @@
                 container_name: None,
                 is_alias: true,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "mul2",
@@ -113,17 +115,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                3400,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -139,6 +140,7 @@
                 container_name: None,
                 is_alias: true,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "s1",
@@ -146,17 +148,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                3400,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -172,6 +173,7 @@
                 container_name: None,
                 is_alias: true,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "s1",
@@ -179,17 +181,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                1,
+                                3401,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -205,6 +206,7 @@
                 container_name: None,
                 is_alias: true,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "s2",
@@ -212,17 +214,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                3400,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -238,6 +239,7 @@
                 container_name: None,
                 is_alias: true,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
         ],
     ),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 7dce95592b819..5e5ae1d168e7e 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -2,7 +2,9 @@
     (
         Module {
             id: ModuleId {
-                krate: Idx::<CrateData>(0),
+                krate: Crate(
+                    Id(3000),
+                ),
                 block: None,
                 local_id: Idx::<ModuleData>(0),
             },
@@ -13,16 +15,15 @@
                 def: TypeAlias(
                     TypeAlias {
                         id: TypeAliasId(
-                            0,
+                            6800,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: TYPE_ALIAS,
@@ -38,22 +39,22 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "CONST",
                 def: Const(
                     Const {
                         id: ConstId(
-                            0,
+                            6000,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: CONST,
@@ -69,22 +70,22 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "CONST_WITH_INNER",
                 def: Const(
                     Const {
                         id: ConstId(
-                            2,
+                            6002,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: CONST,
@@ -100,6 +101,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "Enum",
@@ -107,17 +109,16 @@
                     Enum(
                         Enum {
                             id: EnumId(
-                                0,
+                                4c00,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: ENUM,
@@ -133,6 +134,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "ItemLikeMacro",
@@ -140,17 +142,16 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                4800,
                             ),
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: USE_TREE,
@@ -166,6 +167,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "Macro",
@@ -173,17 +175,16 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                4800,
                             ),
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: MACRO_DEF,
@@ -199,22 +200,22 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "STATIC",
                 def: Static(
                     Static {
                         id: StaticId(
-                            0,
+                            6400,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STATIC,
@@ -230,6 +231,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "Struct",
@@ -237,17 +239,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                1,
+                                4401,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -263,6 +264,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "StructFromMacro",
@@ -270,14 +272,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                4400,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: MacroFile(
-                        0,
+                        MacroCallId(
+                            Id(3800),
+                        ),
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -293,6 +297,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "StructInFn",
@@ -300,17 +305,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                5,
+                                4405,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -328,6 +332,7 @@
                 ),
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "StructInNamedConst",
@@ -335,17 +340,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                6,
+                                4406,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -363,6 +367,7 @@
                 ),
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "StructInUnnamedConst",
@@ -370,17 +375,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                7,
+                                4407,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -396,6 +400,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "StructT",
@@ -403,17 +408,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                2,
+                                4402,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -429,22 +433,22 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "Trait",
                 def: Trait(
                     Trait {
                         id: TraitId(
-                            0,
+                            5800,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: TRAIT,
@@ -460,6 +464,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "Trait",
@@ -467,17 +472,16 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                4800,
                             ),
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: USE_TREE,
@@ -493,6 +497,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "Union",
@@ -500,17 +505,16 @@
                     Union(
                         Union {
                             id: UnionId(
-                                0,
+                                5000,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: UNION,
@@ -526,24 +530,26 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "a_mod",
                 def: Module(
                     Module {
                         id: ModuleId {
-                            krate: Idx::<CrateData>(0),
+                            krate: Crate(
+                                Id(3000),
+                            ),
                             block: None,
                             local_id: Idx::<ModuleData>(1),
                         },
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: MODULE,
@@ -559,24 +565,26 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "b_mod",
                 def: Module(
                     Module {
                         id: ModuleId {
-                            krate: Idx::<CrateData>(0),
+                            krate: Crate(
+                                Id(3000),
+                            ),
                             block: None,
                             local_id: Idx::<ModuleData>(2),
                         },
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: MODULE,
@@ -592,6 +600,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "define_struct",
@@ -599,17 +608,16 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                1,
+                                3401,
                             ),
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: MACRO_RULES,
@@ -625,22 +633,22 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "generic_impl_fn",
                 def: Function(
                     Function {
                         id: FunctionId(
-                            2,
+                            5c02,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: FN,
@@ -658,22 +666,22 @@
                 ),
                 is_alias: false,
                 is_assoc: true,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "impl_fn",
                 def: Function(
                     Function {
                         id: FunctionId(
-                            1,
+                            5c01,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: FN,
@@ -691,6 +699,7 @@
                 ),
                 is_alias: false,
                 is_assoc: true,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "macro_rules_macro",
@@ -698,17 +707,16 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                0,
+                                3400,
                             ),
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: MACRO_RULES,
@@ -724,22 +732,22 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "main",
                 def: Function(
                     Function {
                         id: FunctionId(
-                            0,
+                            5c00,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: FN,
@@ -755,6 +763,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "really_define_struct",
@@ -762,17 +771,16 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                1,
+                                3401,
                             ),
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: USE_TREE,
@@ -788,22 +796,22 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "trait_fn",
                 def: Function(
                     Function {
                         id: FunctionId(
-                            3,
+                            5c03,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: FN,
@@ -821,13 +829,16 @@
                 ),
                 is_alias: false,
                 is_assoc: true,
+                do_not_complete: Yes,
             },
         ],
     ),
     (
         Module {
             id: ModuleId {
-                krate: Idx::<CrateData>(0),
+                krate: Crate(
+                    Id(3000),
+                ),
                 block: None,
                 local_id: Idx::<ModuleData>(1),
             },
@@ -839,17 +850,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3,
+                                4403,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            0,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2000),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -865,13 +875,16 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
         ],
     ),
     (
         Module {
             id: ModuleId {
-                krate: Idx::<CrateData>(0),
+                krate: Crate(
+                    Id(3000),
+                ),
                 block: None,
                 local_id: Idx::<ModuleData>(2),
             },
@@ -882,16 +895,15 @@
                 def: Trait(
                     Trait {
                         id: TraitId(
-                            0,
+                            5800,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            1,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2001),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: USE_TREE,
@@ -907,6 +919,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "IsThisJustATrait",
@@ -914,17 +927,16 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                4800,
                             ),
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            1,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2001),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: USE_TREE,
@@ -940,6 +952,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "StructInModB",
@@ -947,17 +960,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4,
+                                4404,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            1,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2001),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -973,6 +985,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "SuperItemLikeMacro",
@@ -980,17 +993,16 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                4800,
                             ),
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            1,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2001),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: USE_TREE,
@@ -1006,6 +1018,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
             FileSymbol {
                 name: "ThisStruct",
@@ -1013,17 +1026,16 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4,
+                                4404,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
-                    hir_file_id: EditionedFileId(
-                        FileId(
-                            1,
+                    hir_file_id: FileId(
+                        EditionedFileId(
+                            Id(2001),
                         ),
-                        Edition2021,
                     ),
                     ptr: SyntaxNodePtr {
                         kind: USE_TREE,
@@ -1039,6 +1051,7 @@
                 container_name: None,
                 is_alias: false,
                 is_assoc: false,
+                do_not_complete: Yes,
             },
         ],
     ),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs b/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs
index b59010f2f8c83..6e9bd7bdcc21a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs
@@ -18,18 +18,19 @@ pub struct Indel {
     pub insert: String,
     /// Refers to offsets in the original text
     pub delete: TextRange,
-    pub annotation: Option<ChangeAnnotationId>,
 }
 
 #[derive(Default, Debug, Clone)]
 pub struct TextEdit {
     /// Invariant: disjoint and sorted by `delete`.
     indels: Vec<Indel>,
+    annotation: Option<ChangeAnnotationId>,
 }
 
 #[derive(Debug, Default, Clone)]
 pub struct TextEditBuilder {
     indels: Vec<Indel>,
+    annotation: Option<ChangeAnnotationId>,
 }
 
 impl Indel {
@@ -40,7 +41,7 @@ impl Indel {
         Indel::replace(range, String::new())
     }
     pub fn replace(range: TextRange, replace_with: String) -> Indel {
-        Indel { delete: range, insert: replace_with, annotation: None }
+        Indel { delete: range, insert: replace_with }
     }
 
     pub fn apply(&self, text: &mut String) {
@@ -142,12 +143,12 @@ impl TextEdit {
         Some(res)
     }
 
-    pub fn set_annotation(&mut self, annotation: Option<ChangeAnnotationId>) {
-        if annotation.is_some() {
-            for indel in &mut self.indels {
-                indel.annotation = annotation;
-            }
-        }
+    pub(crate) fn set_annotation(&mut self, conflict_annotation: Option<ChangeAnnotationId>) {
+        self.annotation = conflict_annotation;
+    }
+
+    pub fn change_annotation(&self) -> Option<ChangeAnnotationId> {
+        self.annotation
     }
 }
 
@@ -183,10 +184,10 @@ impl TextEditBuilder {
         self.indel(Indel::insert(offset, text));
     }
     pub fn finish(self) -> TextEdit {
-        let mut indels = self.indels;
+        let TextEditBuilder { mut indels, annotation } = self;
         assert_disjoint_or_equal(&mut indels);
         indels = coalesce_indels(indels);
-        TextEdit { indels }
+        TextEdit { indels, annotation }
     }
     pub fn invalidates_offset(&self, offset: TextSize) -> bool {
         self.indels.iter().any(|indel| indel.delete.contains_inclusive(offset))
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
index 0f67496d0989e..61e28386d0721 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
@@ -1,9 +1,9 @@
 //! Functionality for obtaining data related to traits from the DB.
 
-use crate::{defs::Definition, RootDatabase};
-use hir::{db::HirDatabase, AsAssocItem, Semantics};
+use crate::{RootDatabase, defs::Definition};
+use hir::{AsAssocItem, Semantics, db::HirDatabase};
 use rustc_hash::FxHashSet;
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 
 /// Given the `impl` block, attempts to find the trait this `impl` corresponds to.
 pub fn resolve_target_trait(
@@ -113,7 +113,7 @@ fn assoc_item_of_trait(
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use hir::FilePosition;
     use hir::Semantics;
     use span::Edition;
@@ -126,8 +126,8 @@ mod tests {
     pub(crate) fn position(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (RootDatabase, FilePosition) {
-        let change_fixture = ChangeFixture::parse(ra_fixture);
         let mut database = RootDatabase::default();
+        let change_fixture = ChangeFixture::parse(&database, ra_fixture);
         database.apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ($0)");
@@ -138,6 +138,7 @@ mod tests {
     fn check_trait(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
         let (db, position) = position(ra_fixture);
         let sema = Semantics::new(&db);
+
         let file = sema.parse(position.file_id);
         let impl_block: ast::Impl =
             sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
@@ -152,6 +153,7 @@ mod tests {
     fn check_missing_assoc(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
         let (db, position) = position(ra_fixture);
         let sema = Semantics::new(&db);
+
         let file = sema.parse(position.file_id);
         let impl_block: ast::Impl =
             sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs
index 2fdd8358637df..63ce0ddbb8fc2 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs
@@ -5,7 +5,7 @@
 use std::iter;
 
 use hir::Semantics;
-use syntax::ast::{self, make, Pat};
+use syntax::ast::{self, Pat, make};
 
 use crate::RootDatabase;
 
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs
index c3f0bf3706904..a4a93e36f0e1a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs
@@ -3,8 +3,8 @@
 use hir::StructKind;
 use span::Edition;
 use syntax::{
-    ast::{make, Expr, Path},
     ToSmolStr,
+    ast::{Expr, Path, make},
 };
 
 /// given a type return the trivial constructor (if one exists)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
index 483cb6df86236..96be51e1b2666 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
 either.workspace = true
 itertools.workspace = true
 serde_json.workspace = true
@@ -27,7 +27,7 @@ ide-db.workspace = true
 paths.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 
 # local deps
 test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs
index 92b6e748ca5ed..92ca7a74184fb 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs
@@ -1,4 +1,4 @@
-use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticsContext, adjusted_display_range};
 
 // Diagnostic: await-outside-of-async
 //
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/bad_rtn.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/bad_rtn.rs
new file mode 100644
index 0000000000000..9ed85f9f208e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/bad_rtn.rs
@@ -0,0 +1,52 @@
+use ide_db::Severity;
+
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: bad-rtn
+//
+// This diagnostic is shown when a RTN (Return Type Notation, `Type::method(..): Send`) is written in an improper place.
+pub(crate) fn bad_rtn(ctx: &DiagnosticsContext<'_>, d: &hir::BadRtn) -> Diagnostic {
+    Diagnostic::new_with_syntax_node_ptr(
+        ctx,
+        DiagnosticCode::Ra("bad-rtn", Severity::Error),
+        "return type notation not allowed in this position yet",
+        d.rtn.map(Into::into),
+    )
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::check_diagnostics;
+
+    #[test]
+    fn fn_traits_also_emit() {
+        check_diagnostics(
+            r#"
+//- minicore: fn
+fn foo<
+    A: Fn(..),
+      // ^^^^ error: return type notation not allowed in this position yet
+>() {}
+        "#,
+        );
+    }
+
+    #[test]
+    fn bad_rtn() {
+        check_diagnostics(
+            r#"
+mod module {
+    pub struct Type;
+}
+trait Trait {}
+
+fn foo()
+where
+    module(..)::Type: Trait
+       // ^^^^ error: return type notation not allowed in this position yet
+{
+}
+        "#,
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs
new file mode 100644
index 0000000000000..438dd2fdcb6c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs
@@ -0,0 +1,112 @@
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: elided-lifetimes-in-path
+//
+// This diagnostic is triggered when lifetimes are elided in paths. It is a lint only for some cases,
+// and a hard error for others.
+pub(crate) fn elided_lifetimes_in_path(
+    ctx: &DiagnosticsContext<'_>,
+    d: &hir::ElidedLifetimesInPath,
+) -> Diagnostic {
+    if d.hard_error {
+        Diagnostic::new_with_syntax_node_ptr(
+            ctx,
+            DiagnosticCode::RustcHardError("E0726"),
+            "implicit elided lifetime not allowed here",
+            d.generics_or_segment.map(Into::into),
+        )
+        .experimental()
+    } else {
+        Diagnostic::new_with_syntax_node_ptr(
+            ctx,
+            DiagnosticCode::RustcLint("elided_lifetimes_in_paths"),
+            "hidden lifetime parameters in types are deprecated",
+            d.generics_or_segment.map(Into::into),
+        )
+        .experimental()
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::check_diagnostics;
+
+    #[test]
+    fn fn_() {
+        check_diagnostics(
+            r#"
+#![warn(elided_lifetimes_in_paths)]
+
+struct Foo<'a>(&'a ());
+
+fn foo(_: Foo) {}
+       // ^^^ warn: hidden lifetime parameters in types are deprecated
+        "#,
+        );
+        check_diagnostics(
+            r#"
+#![warn(elided_lifetimes_in_paths)]
+
+struct Foo<'a>(&'a ());
+
+fn foo(_: Foo<'_>) -> Foo { loop {} }
+                   // ^^^ warn: hidden lifetime parameters in types are deprecated
+        "#,
+        );
+    }
+
+    #[test]
+    fn async_fn() {
+        check_diagnostics(
+            r#"
+struct Foo<'a>(&'a ());
+
+async fn foo(_: Foo) {}
+             // ^^^ error: implicit elided lifetime not allowed here
+        "#,
+        );
+        check_diagnostics(
+            r#"
+#![warn(elided_lifetimes_in_paths)]
+
+struct Foo<'a>(&'a ());
+
+fn foo(_: Foo<'_>) -> Foo { loop {} }
+                   // ^^^ warn: hidden lifetime parameters in types are deprecated
+        "#,
+        );
+    }
+
+    #[test]
+    fn no_error_when_explicitly_elided() {
+        check_diagnostics(
+            r#"
+#![warn(elided_lifetimes_in_paths)]
+
+struct Foo<'a>(&'a ());
+trait Trait<'a> {}
+
+fn foo(_: Foo<'_>) -> Foo<'_> { loop {} }
+async fn bar(_: Foo<'_>) -> Foo<'_> { loop {} }
+impl Foo<'_> {}
+impl Trait<'_> for Foo<'_> {}
+        "#,
+        );
+    }
+
+    #[test]
+    fn impl_() {
+        check_diagnostics(
+            r#"
+struct Foo<'a>(&'a ());
+trait Trait<'a> {}
+
+impl Foo {}
+  // ^^^ error: implicit elided lifetime not allowed here
+
+impl Trait for Foo<'_> {}
+  // ^^^^^ error: implicit elided lifetime not allowed here
+        "#,
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
index 876c2ccd49d7a..1dc6a7bf9cae7 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
@@ -1,27 +1,30 @@
 //! Suggests shortening `Foo { field: field }` to `Foo { field }` in both
 //! expressions and patterns.
 
+use ide_db::RootDatabase;
 use ide_db::text_edit::TextEdit;
-use ide_db::{source_change::SourceChange, EditionedFileId, FileRange};
-use syntax::{ast, match_ast, AstNode, SyntaxNode};
+use ide_db::{EditionedFileId, FileRange, source_change::SourceChange};
+use syntax::{AstNode, SyntaxNode, ast, match_ast};
 
-use crate::{fix, Diagnostic, DiagnosticCode};
+use crate::{Diagnostic, DiagnosticCode, fix};
 
 pub(crate) fn field_shorthand(
+    db: &RootDatabase,
     acc: &mut Vec<Diagnostic>,
     file_id: EditionedFileId,
     node: &SyntaxNode,
 ) {
     match_ast! {
         match node {
-            ast::RecordExpr(it) => check_expr_field_shorthand(acc, file_id, it),
-            ast::RecordPat(it) => check_pat_field_shorthand(acc, file_id, it),
+            ast::RecordExpr(it) => check_expr_field_shorthand(db, acc, file_id, it),
+            ast::RecordPat(it) => check_pat_field_shorthand(db, acc, file_id, it),
             _ => ()
         }
     };
 }
 
 fn check_expr_field_shorthand(
+    db: &RootDatabase,
     acc: &mut Vec<Diagnostic>,
     file_id: EditionedFileId,
     record_expr: ast::RecordExpr,
@@ -49,16 +52,17 @@ fn check_expr_field_shorthand(
         let edit = edit_builder.finish();
 
         let field_range = record_field.syntax().text_range();
+        let vfs_file_id = file_id.file_id(db);
         acc.push(
             Diagnostic::new(
                 DiagnosticCode::Clippy("redundant_field_names"),
                 "Shorthand struct initialization",
-                FileRange { file_id: file_id.into(), range: field_range },
+                FileRange { file_id: vfs_file_id, range: field_range },
             )
             .with_fixes(Some(vec![fix(
                 "use_expr_field_shorthand",
                 "Use struct shorthand initialization",
-                SourceChange::from_text_edit(file_id, edit),
+                SourceChange::from_text_edit(vfs_file_id, edit),
                 field_range,
             )])),
         );
@@ -66,6 +70,7 @@ fn check_expr_field_shorthand(
 }
 
 fn check_pat_field_shorthand(
+    db: &RootDatabase,
     acc: &mut Vec<Diagnostic>,
     file_id: EditionedFileId,
     record_pat: ast::RecordPat,
@@ -93,16 +98,17 @@ fn check_pat_field_shorthand(
         let edit = edit_builder.finish();
 
         let field_range = record_pat_field.syntax().text_range();
+        let vfs_file_id = file_id.file_id(db);
         acc.push(
             Diagnostic::new(
                 DiagnosticCode::Clippy("redundant_field_names"),
                 "Shorthand struct pattern",
-                FileRange { file_id: file_id.into(), range: field_range },
+                FileRange { file_id: vfs_file_id, range: field_range },
             )
             .with_fixes(Some(vec![fix(
                 "use_pat_field_shorthand",
                 "Use struct field shorthand",
-                SourceChange::from_text_edit(file_id, edit),
+                SourceChange::from_text_edit(vfs_file_id, edit),
                 field_range,
             )])),
         );
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
index 7d62daf716c47..b617c09498393 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
@@ -3,9 +3,9 @@ use hir::GenericArgsProhibitedReason;
 use ide_db::assists::Assist;
 use ide_db::source_change::SourceChange;
 use ide_db::text_edit::TextEdit;
-use syntax::{ast, AstNode, TextRange};
+use syntax::{AstNode, TextRange, ast};
 
-use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
 
 // Diagnostic: generic-args-prohibited
 //
@@ -36,6 +36,7 @@ fn describe_reason(reason: GenericArgsProhibitedReason) -> String {
         }
         GenericArgsProhibitedReason::Const => "constants",
         GenericArgsProhibitedReason::Static => "statics",
+        GenericArgsProhibitedReason::LocalVariable => "local variables",
     };
     format!("generic arguments are not allowed on {kind}")
 }
@@ -63,7 +64,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::GenericArgsProhibited) -> Option
     Some(vec![fix(
         "remove_generic_args",
         "Remove these generics",
-        SourceChange::from_text_edit(file_id, TextEdit::delete(range)),
+        SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), TextEdit::delete(range)),
         syntax.syntax().text_range(),
     )])
 }
@@ -320,7 +321,7 @@ trait E<A: foo::<()>::Trait>
                // ^^^^^ 💡 error: generic arguments are not allowed on builtin types
 }
 
-impl<A: foo::<()>::Trait> E for ()
+impl<A: foo::<()>::Trait> E<()> for ()
         // ^^^^^^ 💡 error: generic arguments are not allowed on modules
     where bool<i32>: foo::Trait
            // ^^^^^ 💡 error: generic arguments are not allowed on builtin types
@@ -518,14 +519,14 @@ fn baz() {
     }
 
     #[test]
-    fn const_and_static() {
+    fn const_param_and_static() {
         check_diagnostics(
             r#"
 const CONST: i32 = 0;
 static STATIC: i32 = 0;
-fn baz() {
-    let _ = CONST::<()>;
-              // ^^^^^^ 💡 error: generic arguments are not allowed on constants
+fn baz<const CONST_PARAM: usize>() {
+    let _ = CONST_PARAM::<()>;
+                    // ^^^^^^ 💡 error: generic arguments are not allowed on constants
     let _ = STATIC::<()>;
                // ^^^^^^ 💡 error: generic arguments are not allowed on statics
 }
@@ -533,6 +534,19 @@ fn baz() {
         );
     }
 
+    #[test]
+    fn local_variable() {
+        check_diagnostics(
+            r#"
+fn baz() {
+    let x = 1;
+    let _ = x::<()>;
+          // ^^^^^^ 💡 error: generic arguments are not allowed on local variables
+}
+        "#,
+        );
+    }
+
     #[test]
     fn enum_variant() {
         check_diagnostics(
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
index 96a368eb0ea3f..47e1c84fecd0a 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -39,8 +39,9 @@ pub(crate) fn inactive_code(
 
 #[cfg(test)]
 mod tests {
-    use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig};
+    use crate::{DiagnosticsConfig, tests::check_diagnostics_with_config};
 
+    #[track_caller]
     pub(crate) fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
         let config = DiagnosticsConfig {
             disabled: std::iter::once("unlinked-file".to_owned()).collect(),
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
index d3f302077528a..0b9a2ec9db3dd 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
@@ -1,7 +1,7 @@
 use hir::InFile;
 use syntax::{AstNode, TextRange};
 
-use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range};
 
 // Diagnostic: incoherent-impl
 //
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 246330e6efaac..289a076573252 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -1,13 +1,13 @@
-use hir::{db::ExpandDatabase, CaseType, InFile};
+use hir::{CaseType, InFile, db::ExpandDatabase};
 use ide_db::{assists::Assist, defs::NameClass};
 use syntax::AstNode;
 
 use crate::{
-    // references::rename::rename_with_semantics,
-    unresolved_fix,
     Diagnostic,
     DiagnosticCode,
     DiagnosticsContext,
+    // references::rename::rename_with_semantics,
+    unresolved_fix,
 };
 
 // Diagnostic: incorrect-ident-case
@@ -786,6 +786,8 @@ static FOO: () = {
     }
 
     #[test]
+    // FIXME
+    #[should_panic]
     fn enum_variant_body_inner_item() {
         check_diagnostics(
             r#"
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
new file mode 100644
index 0000000000000..17c7f75880c90
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
@@ -0,0 +1,187 @@
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use hir::IncorrectGenericsLenKind;
+
+// Diagnostic: incorrect-generics-len
+//
+// This diagnostic is triggered if the number of generic arguments does not match their declaration.
+pub(crate) fn incorrect_generics_len(
+    ctx: &DiagnosticsContext<'_>,
+    d: &hir::IncorrectGenericsLen,
+) -> Diagnostic {
+    let owner_description = d.def.description();
+    let expected = d.expected;
+    let provided = d.provided;
+    let kind_description = match d.kind {
+        IncorrectGenericsLenKind::Lifetimes => "lifetime",
+        IncorrectGenericsLenKind::TypesAndConsts => "generic",
+    };
+    let message = format!(
+        "this {owner_description} takes {expected} {kind_description} argument{} \
+            but {provided} {kind_description} argument{} {} supplied",
+        if expected == 1 { "" } else { "s" },
+        if provided == 1 { "" } else { "s" },
+        if provided == 1 { "was" } else { "were" },
+    );
+    Diagnostic::new_with_syntax_node_ptr(
+        ctx,
+        DiagnosticCode::RustcHardError("E0107"),
+        message,
+        d.generics_or_segment.map(Into::into),
+    )
+    .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::check_diagnostics;
+
+    #[test]
+    fn partially_specified_generics() {
+        check_diagnostics(
+            r#"
+struct Bar<T, U>(T, U);
+
+fn foo() {
+    let _ = Bar::<()>;
+            // ^^^^^^ error: this struct takes 2 generic arguments but 1 generic argument was supplied
+}
+
+        "#,
+        );
+    }
+
+    #[test]
+    fn enum_variant() {
+        check_diagnostics(
+            r#"
+enum Enum<T, U> {
+    Variant(T, U),
+}
+
+fn foo() {
+    let _ = Enum::<()>::Variant;
+             // ^^^^^^ error: this enum takes 2 generic arguments but 1 generic argument was supplied
+    let _ = Enum::Variant::<()>;
+                      // ^^^^^^ error: this enum takes 2 generic arguments but 1 generic argument was supplied
+}
+
+        "#,
+        );
+    }
+
+    #[test]
+    fn lifetimes() {
+        check_diagnostics(
+            r#"
+struct Foo<'a, 'b>(&'a &'b ());
+
+fn foo(Foo(_): Foo) -> Foo {
+    let _: Foo = Foo(&&());
+    let _: Foo::<> = Foo::<>(&&());
+    let _: Foo::<'static>
+           // ^^^^^^^^^^^ error: this struct takes 2 lifetime arguments but 1 lifetime argument was supplied
+                          = Foo::<'static>(&&());
+                            // ^^^^^^^^^^^ error: this struct takes 2 lifetime arguments but 1 lifetime argument was supplied
+    |_: Foo| -> Foo {loop{}};
+
+    loop {}
+}
+
+        "#,
+        );
+    }
+
+    #[test]
+    fn no_error_for_elided_lifetimes() {
+        check_diagnostics(
+            r#"
+struct Foo<'a>(&'a ());
+
+fn foo(_v: &()) -> Foo { loop {} }
+        "#,
+        );
+    }
+
+    #[test]
+    fn errs_for_elided_lifetimes_if_lifetimes_are_explicitly_provided() {
+        check_diagnostics(
+            r#"
+struct Foo<'a, 'b>(&'a &'b ());
+
+fn foo(_v: Foo<'_>
+           // ^^^^ error: this struct takes 2 lifetime arguments but 1 lifetime argument was supplied
+) -> Foo<'static> { loop {} }
+     // ^^^^^^^^^ error: this struct takes 2 lifetime arguments but 1 lifetime argument was supplied
+        "#,
+        );
+    }
+
+    #[test]
+    fn types_and_consts() {
+        check_diagnostics(
+            r#"
+struct Foo<'a, T>(&'a T);
+fn foo(_v: Foo) {}
+        // ^^^ error: this struct takes 1 generic argument but 0 generic arguments were supplied
+
+struct Bar<T, const N: usize>(T);
+fn bar() {
+    let _ = Bar::<()>;
+            // ^^^^^^ error: this struct takes 2 generic arguments but 1 generic argument was supplied
+}
+        "#,
+        );
+    }
+
+    #[test]
+    fn respects_defaults() {
+        check_diagnostics(
+            r#"
+struct Foo<T = (), const N: usize = 0>(T);
+fn foo(_v: Foo) {}
+
+struct Bar<T, const N: usize = 0>(T);
+fn bar(_v: Bar<()>) {}
+        "#,
+        );
+    }
+
+    #[test]
+    fn constant() {
+        check_diagnostics(
+            r#"
+const CONST: i32 = 0;
+fn baz() {
+    let _ = CONST::<()>;
+              // ^^^^^^ error: this constant takes 0 generic arguments but 1 generic argument was supplied
+}
+        "#,
+        );
+    }
+
+    #[test]
+    fn assoc_type() {
+        check_diagnostics(
+            r#"
+trait Trait {
+    type Assoc;
+}
+
+fn foo<T: Trait<Assoc<i32> = bool>>() {}
+                  // ^^^^^ error: this type alias takes 0 generic arguments but 1 generic argument was supplied
+        "#,
+        );
+    }
+
+    #[test]
+    fn regression_19669() {
+        check_diagnostics(
+            r#"
+//- minicore: from
+fn main() {
+    let _: i32 = Into::into(0);
+}
+"#,
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs
new file mode 100644
index 0000000000000..84496df2d7cfb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs
@@ -0,0 +1,80 @@
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use hir::GenericArgKind;
+use syntax::SyntaxKind;
+
+// Diagnostic: incorrect-generics-order
+//
+// This diagnostic is triggered the order of provided generic arguments does not match their declaration.
+pub(crate) fn incorrect_generics_order(
+    ctx: &DiagnosticsContext<'_>,
+    d: &hir::IncorrectGenericsOrder,
+) -> Diagnostic {
+    let provided_description = match d.provided_arg.value.kind() {
+        SyntaxKind::CONST_ARG => "constant",
+        SyntaxKind::LIFETIME_ARG => "lifetime",
+        SyntaxKind::TYPE_ARG => "type",
+        _ => panic!("non-generic-arg passed to `incorrect_generics_order()`"),
+    };
+    let expected_description = match d.expected_kind {
+        GenericArgKind::Lifetime => "lifetime",
+        GenericArgKind::Type => "type",
+        GenericArgKind::Const => "constant",
+    };
+    let message =
+        format!("{provided_description} provided when a {expected_description} was expected");
+    Diagnostic::new_with_syntax_node_ptr(
+        ctx,
+        DiagnosticCode::RustcHardError("E0747"),
+        message,
+        d.provided_arg.map(Into::into),
+    )
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::check_diagnostics;
+
+    #[test]
+    fn lifetime_out_of_order() {
+        check_diagnostics(
+            r#"
+struct Foo<'a, T>(&'a T);
+
+fn bar(_v: Foo<(), 'static>) {}
+            // ^^ error: type provided when a lifetime was expected
+        "#,
+        );
+    }
+
+    #[test]
+    fn types_and_consts() {
+        check_diagnostics(
+            r#"
+struct Foo<T>(T);
+fn foo1(_v: Foo<1>) {}
+             // ^ error: constant provided when a type was expected
+fn foo2(_v: Foo<{ (1, 2) }>) {}
+             // ^^^^^^^^^^ error: constant provided when a type was expected
+
+struct Bar<const N: usize>;
+fn bar(_v: Bar<()>) {}
+            // ^^ error: type provided when a constant was expected
+
+struct Baz<T, const N: usize>(T);
+fn baz(_v: Baz<1, ()>) {}
+            // ^ error: constant provided when a type was expected
+        "#,
+        );
+    }
+
+    #[test]
+    fn no_error_when_num_incorrect() {
+        check_diagnostics(
+            r#"
+struct Baz<T, U>(T, U);
+fn baz(_v: Baz<1>) {}
+           // ^^^ error: this struct takes 2 generic arguments but 1 generic argument was supplied
+        "#,
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs
index 82cd1f2fde6db..b56255b1fde42 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs
@@ -440,8 +440,9 @@ fn main() {
     q as *const [i32];
   //^^^^^^^^^^^^^^^^^ error: cannot cast thin pointer `*const i32` to fat pointer `*const [i32]`
 
+    // FIXME: This should emit diagnostics but disabled to prevent many false positives
     let t: *mut (dyn Trait + 'static) = 0 as *mut _;
-                                      //^^^^^^^^^^^ error: cannot cast `usize` to a fat pointer `*mut _`
+
     let mut fail: *const str = 0 as *const str;
                              //^^^^^^^^^^^^^^^ error: cannot cast `usize` to a fat pointer `*const str`
     let mut fail2: *const str = 0isize as *const str;
@@ -1161,6 +1162,49 @@ struct ZerocopyKnownLayoutMaybeUninit(<<Flexible as Field>::Type as KnownLayout>
 fn test(ptr: *mut [u8]) -> *mut ZerocopyKnownLayoutMaybeUninit {
     ptr as *mut _
 }
+"#,
+        );
+    }
+
+    #[test]
+    fn regression_19431() {
+        check_diagnostics(
+            r#"
+//- minicore: coerce_unsized
+struct Dst([u8]);
+
+struct Struct {
+    body: Dst,
+}
+
+trait Field {
+    type Type: ?Sized;
+}
+
+impl Field for Struct {
+    type Type = Dst;
+}
+
+trait KnownLayout {
+    type MaybeUninit: ?Sized;
+    type PointerMetadata;
+}
+
+impl<T> KnownLayout for [T] {
+    type MaybeUninit = [T];
+    type PointerMetadata = usize;
+}
+
+impl KnownLayout for Dst {
+    type MaybeUninit = Dst;
+    type PointerMetadata = <[u8] as KnownLayout>::PointerMetadata;
+}
+
+struct ZerocopyKnownLayoutMaybeUninit(<<Struct as Field>::Type as KnownLayout>::MaybeUninit);
+
+fn test(ptr: *mut ZerocopyKnownLayoutMaybeUninit) -> *mut <<Struct as Field>::Type as KnownLayout>::MaybeUninit {
+    ptr as *mut _
+}
 "#,
         );
     }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
index f22041ebe233b..ac1b599c49e2a 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -4,19 +4,19 @@
 use hir::{ImportPathConfig, PathResolution, Semantics};
 use ide_db::text_edit::TextEdit;
 use ide_db::{
+    EditionedFileId, FileRange, FxHashMap, RootDatabase,
     helpers::mod_path_to_ast,
-    imports::insert_use::{insert_use, ImportScope},
+    imports::insert_use::{ImportScope, insert_use},
     source_change::SourceChangeBuilder,
-    EditionedFileId, FileRange, FxHashMap, RootDatabase,
 };
 use itertools::Itertools;
 use stdx::{format_to, never};
 use syntax::{
-    ast::{self, make},
     Edition, SyntaxKind, SyntaxNode,
+    ast::{self, make},
 };
 
-use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsConfig, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsConfig, Severity, fix};
 
 #[derive(Default)]
 struct State {
@@ -128,14 +128,15 @@ pub(crate) fn json_in_items(
                 state.has_serialize = serialize_resolved.is_some();
                 state.build_struct("Root", &it);
                 edit.insert(range.start(), state.result);
+                let vfs_file_id = file_id.file_id(sema.db);
                 acc.push(
                     Diagnostic::new(
                         DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning),
                         "JSON syntax is not valid as a Rust item",
-                        FileRange { file_id: file_id.into(), range },
+                        FileRange { file_id: vfs_file_id, range },
                     )
                     .with_fixes(Some(vec![{
-                        let mut scb = SourceChangeBuilder::new(file_id);
+                        let mut scb = SourceChangeBuilder::new(vfs_file_id);
                         let scope = match import_scope {
                             ImportScope::File(it) => ImportScope::File(scb.make_mut(it)),
                             ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)),
@@ -183,7 +184,7 @@ pub(crate) fn json_in_items(
                             }
                         }
                         let mut sc = scb.finish();
-                        sc.insert_source_edit(file_id, edit.finish());
+                        sc.insert_source_edit(vfs_file_id, edit.finish());
                         fix("convert_json_to_struct", "Convert JSON to struct", sc, range)
                     }])),
                 );
@@ -196,8 +197,8 @@ pub(crate) fn json_in_items(
 #[cfg(test)]
 mod tests {
     use crate::{
-        tests::{check_diagnostics_with_config, check_fix, check_no_fix},
         DiagnosticsConfig,
+        tests::{check_diagnostics_with_config, check_fix, check_no_fix},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
index 2f132985895c6..a2648a1995d7f 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -38,8 +38,8 @@ pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefErr
 #[cfg(test)]
 mod tests {
     use crate::{
-        tests::{check_diagnostics, check_diagnostics_with_config},
         DiagnosticsConfig,
+        tests::{check_diagnostics, check_diagnostics_with_config},
     };
 
     #[test]
@@ -123,6 +123,7 @@ include!("foo/bar.rs");
 
     #[test]
     fn good_out_dir_diagnostic() {
+        // FIXME: The diagnostic here is duplicated for each eager expansion
         check_diagnostics(
             r#"
 #[rustc_builtin_macro]
@@ -134,6 +135,8 @@ macro_rules! concat { () => {} }
 
   include!(concat!(env!("OUT_DIR"), "/out.rs"));
                       //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+                 //^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+         //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
 "#,
         );
     }
@@ -238,6 +241,7 @@ macro_rules! outer {
 fn f() {
     outer!();
 } //^^^^^^^^ error: leftover tokens
+  //^^^^^^^^ error: Syntax Error in Expansion: expected expression
 "#,
         )
     }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index 0520bb3fe9b9b..63fd9b4e3f06b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -2,11 +2,11 @@ use either::Either;
 use hir::InFile;
 use ide_db::FileRange;
 use syntax::{
-    ast::{self, HasArgList},
     AstNode, AstPtr,
+    ast::{self, HasArgList},
 };
 
-use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range};
 
 // Diagnostic: mismatched-tuple-struct-pat-arg-count
 //
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 938b7182bc946..6b02111016c08 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -1,20 +1,22 @@
 use either::Either;
 use hir::{
+    AssocItem, HirDisplay, ImportPathConfig, InFile, Type,
     db::{ExpandDatabase, HirDatabase},
-    sym, AssocItem, HirDisplay, HirFileIdExt, ImportPathConfig, InFile, Type,
+    sym,
 };
 use ide_db::{
-    assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
-    source_change::SourceChange, syntax_helpers::tree_diff::diff, text_edit::TextEdit,
-    use_trivial_constructor::use_trivial_constructor, FxHashMap,
+    FxHashMap, assists::Assist, famous_defs::FamousDefs,
+    imports::import_assets::item_for_path_search, source_change::SourceChange,
+    syntax_helpers::tree_diff::diff, text_edit::TextEdit,
+    use_trivial_constructor::use_trivial_constructor,
 };
 use stdx::format_to;
 use syntax::{
-    ast::{self, make},
     AstNode, Edition, SyntaxNode, SyntaxNodePtr, ToSmolStr,
+    ast::{self, make},
 };
 
-use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
 
 // Diagnostic: missing-fields
 //
@@ -83,7 +85,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
         Some(vec![fix(
             "fill_missing_fields",
             "Fill struct fields",
-            SourceChange::from_text_edit(range.file_id, edit),
+            SourceChange::from_text_edit(range.file_id.file_id(ctx.sema.db), edit),
             range.range,
         )])
     };
@@ -140,11 +142,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
                         )
                     })();
 
-                    if expr.is_some() {
-                        expr
-                    } else {
-                        Some(generate_fill_expr(ty))
-                    }
+                    if expr.is_some() { expr } else { Some(generate_fill_expr(ty)) }
                 };
                 let field = make::record_expr_field(
                     make::name_ref(&f.name(ctx.sema.db).display_no_db(ctx.edition).to_smolstr()),
@@ -177,7 +175,7 @@ fn make_ty(
     edition: Edition,
 ) -> ast::Type {
     let ty_str = match ty.as_adt() {
-        Some(adt) => adt.name(db).display(db.upcast(), edition).to_string(),
+        Some(adt) => adt.name(db).display(db, edition).to_string(),
         None => {
             ty.display_source_code(db, module.into(), false).ok().unwrap_or_else(|| "_".to_owned())
         }
@@ -209,14 +207,17 @@ fn get_default_constructor(
         }
     }
 
-    let krate = ctx.sema.file_to_module_def(d.file.original_file(ctx.sema.db))?.krate();
+    let krate = ctx
+        .sema
+        .file_to_module_def(d.file.original_file(ctx.sema.db).file_id(ctx.sema.db))?
+        .krate();
     let module = krate.root_module();
 
     // Look for a ::new() associated function
     let has_new_func = ty
         .iterate_assoc_items(ctx.sema.db, krate, |assoc_item| {
             if let AssocItem::Function(func) = assoc_item {
-                if func.name(ctx.sema.db) == sym::new.clone()
+                if func.name(ctx.sema.db) == sym::new
                     && func.assoc_fn_params(ctx.sema.db).is_empty()
                 {
                     return Some(());
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs
new file mode 100644
index 0000000000000..8cdbb6384ff5a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs
@@ -0,0 +1,92 @@
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: missing-lifetime
+//
+// This diagnostic is triggered when a lifetime argument is missing.
+pub(crate) fn missing_lifetime(
+    ctx: &DiagnosticsContext<'_>,
+    d: &hir::MissingLifetime,
+) -> Diagnostic {
+    Diagnostic::new_with_syntax_node_ptr(
+        ctx,
+        DiagnosticCode::RustcHardError("E0106"),
+        "missing lifetime specifier",
+        d.generics_or_segment.map(Into::into),
+    )
+    .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::check_diagnostics;
+
+    #[test]
+    fn in_fields() {
+        check_diagnostics(
+            r#"
+struct Foo<'a>(&'a ());
+struct Bar(Foo);
+        // ^^^ error: missing lifetime specifier
+        "#,
+        );
+    }
+
+    #[test]
+    fn bounds() {
+        check_diagnostics(
+            r#"
+struct Foo<'a, T>(&'a T);
+trait Trait<'a> {
+    type Assoc;
+}
+
+fn foo<'a, T: Trait>(
+           // ^^^^^ error: missing lifetime specifier
+    _: impl Trait<'a, Assoc: Trait>,
+                          // ^^^^^ error: missing lifetime specifier
+)
+where
+    Foo<T>: Trait<'a>,
+    // ^^^ error: missing lifetime specifier
+{
+}
+        "#,
+        );
+    }
+
+    #[test]
+    fn generic_defaults() {
+        check_diagnostics(
+            r#"
+struct Foo<'a>(&'a ());
+
+struct Bar<T = Foo>(T);
+            // ^^^ error: missing lifetime specifier
+        "#,
+        );
+    }
+
+    #[test]
+    fn type_alias_type() {
+        check_diagnostics(
+            r#"
+struct Foo<'a>(&'a ());
+
+type Bar = Foo;
+        // ^^^ error: missing lifetime specifier
+        "#,
+        );
+    }
+
+    #[test]
+    fn const_param_ty() {
+        check_diagnostics(
+            r#"
+struct Foo<'a>(&'a ());
+
+fn bar<const F: Foo>() {}
+             // ^^^ error: missing lifetime specifier
+        "#,
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index 0bf600e5dfaf1..d3d3c3aa38dc2 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -18,10 +18,10 @@ pub(crate) fn missing_match_arms(
 #[cfg(test)]
 mod tests {
     use crate::{
+        DiagnosticsConfig,
         tests::{
             check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled,
         },
-        DiagnosticsConfig,
     };
     use test_utils::skip_slow_tests;
 
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 040aa2949aa93..3c36b455ca9d9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -1,11 +1,11 @@
 use hir::db::ExpandDatabase;
-use hir::{HirFileIdExt, UnsafeLint, UnsafetyReason};
+use hir::{UnsafeLint, UnsafetyReason};
 use ide_db::text_edit::TextEdit;
 use ide_db::{assists::Assist, source_change::SourceChange};
-use syntax::{ast, SyntaxNode};
-use syntax::{match_ast, AstNode};
+use syntax::{AstNode, match_ast};
+use syntax::{SyntaxNode, ast};
 
-use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
 
 // Diagnostic: missing-unsafe
 //
@@ -51,8 +51,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Option<Vec<Ass
 
     let replacement = format!("unsafe {{ {} }}", node_to_add_unsafe_block.text());
     let edit = TextEdit::replace(node_to_add_unsafe_block.text_range(), replacement);
-    let source_change =
-        SourceChange::from_text_edit(d.node.file_id.original_file(ctx.sema.db), edit);
+    let source_change = SourceChange::from_text_edit(
+        d.node.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+        edit,
+    );
     Some(vec![fix("add_unsafe", "Add unsafe block", source_change, expr.syntax().text_range())])
 }
 
@@ -137,13 +139,13 @@ struct HasUnsafe;
 impl HasUnsafe {
     unsafe fn unsafe_fn(&self) {
         let x = &5_usize as *const usize;
-        let _y = *x;
+        let _y = unsafe {*x};
     }
 }
 
 unsafe fn unsafe_fn() {
     let x = &5_usize as *const usize;
-    let _y = *x;
+    let _y = unsafe {*x};
 }
 
 fn main() {
@@ -337,7 +339,7 @@ struct S(usize);
 impl S {
     unsafe fn func(&self) {
         let x = &self.0 as *const usize;
-        let _z = *x;
+        let _z = unsafe { *x };
     }
 }
 fn main() {
@@ -350,7 +352,7 @@ struct S(usize);
 impl S {
     unsafe fn func(&self) {
         let x = &self.0 as *const usize;
-        let _z = *x;
+        let _z = unsafe { *x };
     }
 }
 fn main() {
@@ -874,6 +876,19 @@ fn baz() {
 fn f(it: unsafe fn()){
     it();
  // ^^^^ 💡 error: call to unsafe function is unsafe and requires an unsafe function or block
+}
+        "#,
+        );
+    }
+
+    #[test]
+    fn unsafe_call_in_const_expr() {
+        check_diagnostics(
+            r#"
+unsafe fn f() {}
+fn main() {
+    const { f(); };
+         // ^^^ 💡 error: call to unsafe function is unsafe and requires an unsafe function or block
 }
         "#,
         );
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index 0e3c4c7aa3642..5d25f2c6a90fd 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -1,9 +1,9 @@
 use hir::db::ExpandDatabase;
 use ide_db::source_change::SourceChange;
 use ide_db::text_edit::TextEdit;
-use syntax::{ast, AstNode, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, T};
+use syntax::{AstNode, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, T, ast};
 
-use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
 
 // Diagnostic: need-mut
 //
@@ -39,7 +39,7 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option
         Some(vec![fix(
             "add_mut",
             "Change it to be mutable",
-            SourceChange::from_text_edit(file_id, edit),
+            SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), edit),
             use_range,
         )])
     })();
@@ -82,7 +82,7 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Op
         Some(vec![fix(
             "remove_mut",
             "Remove unnecessary `mut`",
-            SourceChange::from_text_edit(file_id, edit),
+            SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), edit),
             use_range,
         )])
     })();
@@ -1258,7 +1258,7 @@ fn foo(mut foo: Foo) {
 
 pub struct A {}
 pub unsafe fn foo(a: *mut A) {
-    let mut b = || -> *mut A { &mut *a };
+    let mut b = || -> *mut A { unsafe { &mut *a } };
       //^^^^^ 💡 warn: variable does not need to be mutable
     let _ = b();
 }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
index 0f126a1a6562c..fa3347aa12e62 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -1,13 +1,13 @@
 use either::Either;
-use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics, VariantId};
+use hir::{HasSource, HirDisplay, Semantics, VariantId, db::ExpandDatabase};
 use ide_db::text_edit::TextEdit;
-use ide_db::{source_change::SourceChange, EditionedFileId, RootDatabase};
+use ide_db::{EditionedFileId, RootDatabase, source_change::SourceChange};
 use syntax::{
-    ast::{self, edit::IndentLevel, make},
     AstNode,
+    ast::{self, edit::IndentLevel, make},
 };
 
-use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
 
 // Diagnostic: no-such-field
 //
@@ -108,7 +108,7 @@ fn missing_record_expr_field_fixes(
     }
 
     let source_change = SourceChange::from_text_edit(
-        def_file_id,
+        def_file_id.file_id(sema.db),
         TextEdit::insert(last_field_syntax.text_range().end(), new_field),
     );
 
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs
index c8e3cff364a9e..6b78645002617 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs
@@ -1,9 +1,9 @@
-use hir::{db::ExpandDatabase, diagnostics::RemoveTrailingReturn, FileRange};
+use hir::{FileRange, db::ExpandDatabase, diagnostics::RemoveTrailingReturn};
 use ide_db::text_edit::TextEdit;
 use ide_db::{assists::Assist, source_change::SourceChange};
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 
-use crate::{adjusted_display_range, fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range, fix};
 
 // Diagnostic: remove-trailing-return
 //
@@ -49,7 +49,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveTrailingReturn) -> Option<Vec<A
     let replacement =
         return_expr.expr().map_or_else(String::new, |expr| format!("{}", expr.syntax().text()));
     let edit = TextEdit::replace(range, replacement);
-    let source_change = SourceChange::from_text_edit(file_id, edit);
+    let source_change = SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), edit);
 
     Some(vec![fix(
         "remove_trailing_return",
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
index a46c48608f7e1..8d717b9093b92 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
@@ -1,17 +1,17 @@
-use hir::{db::ExpandDatabase, diagnostics::RemoveUnnecessaryElse, HirFileIdExt};
+use hir::{db::ExpandDatabase, diagnostics::RemoveUnnecessaryElse};
 use ide_db::text_edit::TextEdit;
 use ide_db::{assists::Assist, source_change::SourceChange};
 use itertools::Itertools;
 use syntax::{
+    AstNode, SyntaxToken, TextRange,
     ast::{
         self,
         edit::{AstNodeEdit, IndentLevel},
     },
-    AstNode, SyntaxToken, TextRange,
 };
 
 use crate::{
-    adjusted_display_range, fix, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity,
+    Diagnostic, DiagnosticCode, DiagnosticsContext, Severity, adjusted_display_range, fix,
 };
 
 // Diagnostic: remove-unnecessary-else
@@ -90,8 +90,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option<Vec<
     };
 
     let edit = TextEdit::replace(range, replacement);
-    let source_change =
-        SourceChange::from_text_edit(d.if_expr.file_id.original_file(ctx.sema.db), edit);
+    let source_change = SourceChange::from_text_edit(
+        d.if_expr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+        edit,
+    );
 
     Some(vec![fix(
         "remove_unnecessary_else",
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
index 4aff446de603b..6b335c52de75c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -1,12 +1,12 @@
-use hir::{db::ExpandDatabase, HirFileIdExt, InFile};
+use hir::{InFile, db::ExpandDatabase};
 use ide_db::source_change::SourceChange;
 use ide_db::text_edit::TextEdit;
 use syntax::{
-    ast::{self, HasArgList},
     AstNode, TextRange,
+    ast::{self, HasArgList},
 };
 
-use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
 
 // Diagnostic: replace-filter-map-next-with-find-map
 //
@@ -43,7 +43,8 @@ fn fixes(
 
     let edit = TextEdit::replace(range_to_replace, replacement);
 
-    let source_change = SourceChange::from_text_edit(d.file.original_file(ctx.sema.db), edit);
+    let source_change =
+        SourceChange::from_text_edit(d.file.original_file(ctx.sema.db).file_id(ctx.sema.db), edit);
 
     Some(vec![fix(
         "replace_with_find_map",
@@ -56,8 +57,8 @@ fn fixes(
 #[cfg(test)]
 mod tests {
     use crate::{
-        tests::{check_diagnostics_with_config, check_fix},
         DiagnosticsConfig,
+        tests::{check_diagnostics_with_config, check_fix},
     };
 
     #[track_caller]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
index 6be2c54e6030e..19ee1caa3e6a9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
@@ -1,7 +1,7 @@
 use hir::InFile;
 use syntax::ast;
 
-use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity, adjusted_display_range};
 
 // Diagnostic: trait-impl-incorrect-safety
 //
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
index a35b67ce98781..2d7d78f5d7bdf 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
@@ -1,8 +1,8 @@
 use hir::InFile;
 use itertools::Itertools;
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 
-use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range};
 
 // Diagnostic: trait-impl-missing-assoc_item
 //
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
index 78a04e15424fd..35dc9b0fac8a3 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
@@ -104,4 +104,17 @@ impl<T> foo::Foo<dyn LocalTrait> for Bar {}
 "#,
         );
     }
+
+    #[test]
+    fn twice_fundamental() {
+        check_diagnostics(
+            r#"
+//- /foo.rs crate:foo
+pub trait Trait {}
+//- /bar.rs crate:bar deps:foo
+struct Foo;
+impl foo::Trait for &&Foo {}
+        "#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index 3db2e013a3978..d5c4bcf768adb 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -1,7 +1,7 @@
-use hir::{db::ExpandDatabase, HasSource, HirDisplay};
+use hir::{HasSource, HirDisplay, db::ExpandDatabase};
 use ide_db::text_edit::TextRange;
 use ide_db::{
-    assists::{Assist, AssistId, AssistKind},
+    assists::{Assist, AssistId},
     label::Label,
     source_change::SourceChangeBuilder,
 };
@@ -54,10 +54,12 @@ pub(crate) fn trait_impl_redundant_assoc_item(
         }
     };
 
+    let hir::FileRange { file_id, range } =
+        hir::InFile::new(d.file_id, diagnostic_range).original_node_file_range_rooted(db);
     Diagnostic::new(
         DiagnosticCode::RustcHardError("E0407"),
         format!("{redundant_item_name} is not a member of trait `{trait_name}`"),
-        hir::InFile::new(d.file_id, diagnostic_range).original_node_file_range_rooted(db),
+        ide_db::FileRange { file_id: file_id.file_id(ctx.sema.db), range },
     )
     .with_fixes(quickfix_for_redundant_assoc_item(
         ctx,
@@ -93,11 +95,11 @@ fn quickfix_for_redundant_assoc_item(
         Some(())
     };
     let file_id = d.file_id.file_id()?;
-    let mut source_change_builder = SourceChangeBuilder::new(file_id);
+    let mut source_change_builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db));
     add_assoc_item_def(&mut source_change_builder)?;
 
     Some(vec![Assist {
-        id: AssistId("add assoc item def into trait def", AssistKind::QuickFix),
+        id: AssistId::quick_fix("add assoc item def into trait def"),
         label: Label::new("Add assoc item def into trait def".to_owned()),
         group: None,
         target: range,
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index c726a3bcd3cad..8f6ed1a7bdbdb 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -1,21 +1,20 @@
 use either::Either;
-use hir::{db::ExpandDatabase, CallableKind, ClosureStyle, HirDisplay, HirFileIdExt, InFile};
+use hir::{CallableKind, ClosureStyle, HirDisplay, InFile, db::ExpandDatabase};
 use ide_db::{
     famous_defs::FamousDefs,
     source_change::{SourceChange, SourceChangeBuilder},
     text_edit::TextEdit,
 };
 use syntax::{
+    AstNode, AstPtr, TextSize,
     ast::{
-        self,
+        self, BlockExpr, Expr, ExprStmt, HasArgList,
         edit::{AstNodeEdit, IndentLevel},
         syntax_factory::SyntaxFactory,
-        BlockExpr, Expr, ExprStmt, HasArgList,
     },
-    AstNode, AstPtr, TextSize,
 };
 
-use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range, fix};
 
 // Diagnostic: type-mismatch
 //
@@ -72,11 +71,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assi
         str_ref_to_owned(ctx, d, expr_ptr, &mut fixes);
     }
 
-    if fixes.is_empty() {
-        None
-    } else {
-        Some(fixes)
-    }
+    if fixes.is_empty() { None } else { Some(fixes) }
 }
 
 fn add_reference(
@@ -155,7 +150,7 @@ fn add_missing_ok_or_some(
                 }
 
                 let source_change = SourceChange::from_text_edit(
-                    expr_ptr.file_id.original_file(ctx.sema.db),
+                    expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
                     builder.finish(),
                 );
                 let name = format!("Insert {variant_name}(()) as the tail of this block");
@@ -169,7 +164,7 @@ fn add_missing_ok_or_some(
                 builder
                     .insert(ret_expr.syntax().text_range().end(), format!(" {variant_name}(())"));
                 let source_change = SourceChange::from_text_edit(
-                    expr_ptr.file_id.original_file(ctx.sema.db),
+                    expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
                     builder.finish(),
                 );
                 let name = format!("Insert {variant_name}(()) as the return value");
@@ -182,8 +177,10 @@ fn add_missing_ok_or_some(
     let mut builder = TextEdit::builder();
     builder.insert(expr.syntax().text_range().start(), format!("{variant_name}("));
     builder.insert(expr.syntax().text_range().end(), ")".to_owned());
-    let source_change =
-        SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), builder.finish());
+    let source_change = SourceChange::from_text_edit(
+        expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+        builder.finish(),
+    );
     let name = format!("Wrap in {variant_name}");
     acc.push(fix("wrap_in_constructor", &name, source_change, expr_range));
     Some(())
@@ -225,7 +222,7 @@ fn remove_unnecessary_wrapper(
     let inner_arg = call_expr.arg_list()?.args().next()?;
 
     let file_id = expr_ptr.file_id.original_file(db);
-    let mut builder = SourceChangeBuilder::new(file_id);
+    let mut builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db));
     let mut editor;
     match inner_arg {
         // We're returning `()`
@@ -236,7 +233,7 @@ fn remove_unnecessary_wrapper(
                 .and_then(Either::<ast::ReturnExpr, ast::StmtList>::cast)?;
 
             editor = builder.make_editor(parent.syntax());
-            let make = SyntaxFactory::new();
+            let make = SyntaxFactory::with_mappings();
 
             match parent {
                 Either::Left(ret_expr) => {
@@ -261,7 +258,7 @@ fn remove_unnecessary_wrapper(
         }
     }
 
-    builder.add_file_edits(file_id, editor);
+    builder.add_file_edits(file_id.file_id(ctx.sema.db), editor);
     let name = format!("Remove unnecessary {}() wrapper", variant.name(db).as_str());
     acc.push(fix(
         "remove_unnecessary_wrapper",
@@ -293,8 +290,10 @@ fn remove_semicolon(
     let semicolon_range = expr_before_semi.semicolon_token()?.text_range();
 
     let edit = TextEdit::delete(semicolon_range);
-    let source_change =
-        SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
+    let source_change = SourceChange::from_text_edit(
+        expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+        edit,
+    );
 
     acc.push(fix("remove_semicolon", "Remove this semicolon", source_change, semicolon_range));
     Some(())
@@ -321,8 +320,10 @@ fn str_ref_to_owned(
     let to_owned = ".to_owned()".to_owned();
 
     let edit = TextEdit::insert(expr.syntax().text_range().end(), to_owned);
-    let source_change =
-        SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
+    let source_change = SourceChange::from_text_edit(
+        expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
+        edit,
+    );
     acc.push(fix("str_ref_to_owned", "Add .to_owned() here", source_change, expr_range));
 
     Some(())
@@ -1046,19 +1047,6 @@ fn test() -> String {
         );
     }
 
-    #[test]
-    fn closure_mismatch_show_different_type() {
-        check_diagnostics(
-            r#"
-fn f() {
-    let mut x = (|| 1, 2);
-    x = (|| 3, 4);
-       //^^^^ error: expected {closure#0}, found {closure#1}
-}
-            "#,
-        );
-    }
-
     #[test]
     fn type_mismatch_range_adjustment() {
         cov_mark::check!(type_mismatch_range_adjustment);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
index c25318eda4855..277aff2e08f4d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -1,11 +1,11 @@
 use hir::{
-    db::ExpandDatabase,
-    term_search::{term_search, TermSearchConfig, TermSearchCtx},
     ClosureStyle, HirDisplay, ImportPathConfig,
+    db::ExpandDatabase,
+    term_search::{TermSearchConfig, TermSearchCtx, term_search},
 };
 use ide_db::text_edit::TextEdit;
 use ide_db::{
-    assists::{Assist, AssistId, AssistKind, GroupLabel},
+    assists::{Assist, AssistId, GroupLabel},
     label::Label,
     source_change::SourceChange,
 };
@@ -78,23 +78,19 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>
         })
         .unique()
         .map(|code| Assist {
-            id: AssistId("typed-hole", AssistKind::QuickFix),
+            id: AssistId::quick_fix("typed-hole"),
             label: Label::new(format!("Replace `_` with `{code}`")),
             group: Some(GroupLabel("Replace `_` with a term".to_owned())),
             target: original_range.range,
             source_change: Some(SourceChange::from_text_edit(
-                original_range.file_id,
+                original_range.file_id.file_id(ctx.sema.db),
                 TextEdit::replace(original_range.range, code),
             )),
             command: None,
         })
         .collect();
 
-    if !assists.is_empty() {
-        Some(assists)
-    } else {
-        None
-    }
+    if !assists.is_empty() { Some(assists) } else { None }
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index f3109b9bb73a2..47fa305936263 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -2,20 +2,19 @@
 
 use std::iter;
 
-use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
+use hir::{DefMap, InFile, ModuleSource, db::DefDatabase};
+use ide_db::base_db::RootQueryDb;
 use ide_db::text_edit::TextEdit;
 use ide_db::{
-    base_db::{FileLoader, SourceDatabase, SourceRootDatabase},
-    source_change::SourceChange,
-    FileId, FileRange, LineIndexDatabase,
+    FileId, FileRange, LineIndexDatabase, base_db::SourceDatabase, source_change::SourceChange,
 };
 use paths::Utf8Component;
 use syntax::{
-    ast::{self, edit::IndentLevel, HasModuleItem, HasName},
     AstNode, TextRange,
+    ast::{self, HasModuleItem, HasName, edit::IndentLevel},
 };
 
-use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
+use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity, fix};
 
 // Diagnostic: unlinked-file
 //
@@ -36,7 +35,9 @@ pub(crate) fn unlinked_file(
         "This file is not included anywhere in the module tree, so rust-analyzer can't offer IDE services."
     };
 
-    let message = format!("{message}\n\nIf you're intentionally working on unowned files, you can silence this warning by adding \"unlinked-file\" to rust-analyzer.diagnostics.disabled in your settings.");
+    let message = format!(
+        "{message}\n\nIf you're intentionally working on unowned files, you can silence this warning by adding \"unlinked-file\" to rust-analyzer.diagnostics.disabled in your settings."
+    );
 
     let mut unused = true;
 
@@ -48,6 +49,7 @@ pub(crate) fn unlinked_file(
         // Only show this diagnostic on the first three characters of
         // the file, to avoid overwhelming the user during startup.
         range = SourceDatabase::file_text(ctx.sema.db, file_id)
+            .text(ctx.sema.db)
             .char_indices()
             .take(3)
             .last()
@@ -78,7 +80,11 @@ fn fixes(
     // If there's an existing module that could add `mod` or `pub mod` items to include the unlinked file,
     // suggest that as a fix.
 
-    let source_root = ctx.sema.db.source_root(ctx.sema.db.file_source_root(file_id));
+    let db = ctx.sema.db;
+
+    let source_root = ctx.sema.db.file_source_root(file_id).source_root_id(db);
+    let source_root = ctx.sema.db.source_root(source_root).source_root(db);
+
     let our_path = source_root.path_for_file(&file_id)?;
     let parent = our_path.parent()?;
     let (module_name, _) = our_path.name_and_extension()?;
@@ -93,12 +99,14 @@ fn fixes(
     };
 
     // check crate roots, i.e. main.rs, lib.rs, ...
-    'crates: for &krate in &*ctx.sema.db.relevant_crates(file_id) {
+    let relevant_crates = db.relevant_crates(file_id);
+    'crates: for &krate in &*relevant_crates {
         let crate_def_map = ctx.sema.db.crate_def_map(krate);
 
         let root_module = &crate_def_map[DefMap::ROOT];
         let Some(root_file_id) = root_module.origin.file_id() else { continue };
-        let Some(crate_root_path) = source_root.path_for_file(&root_file_id.file_id()) else {
+        let Some(crate_root_path) = source_root.path_for_file(&root_file_id.file_id(ctx.sema.db))
+        else {
             continue;
         };
         let Some(rel) = parent.strip_prefix(&crate_root_path.parent()?) else { continue };
@@ -124,7 +132,12 @@ fn fixes(
         let InFile { file_id: parent_file_id, value: source } =
             current.definition_source(ctx.sema.db);
         let parent_file_id = parent_file_id.file_id()?;
-        return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range);
+        return make_fixes(
+            parent_file_id.file_id(ctx.sema.db),
+            source,
+            &module_name,
+            trigger_range,
+        );
     }
 
     // if we aren't adding to a crate root, walk backwards such that we support `#[path = ...]` overrides if possible
@@ -141,10 +154,12 @@ fn fixes(
             paths.into_iter().find_map(|path| source_root.file_for_path(&path))
         })?;
     stack.pop();
-    'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() {
+    let relevant_crates = db.relevant_crates(parent_id);
+    'crates: for &krate in relevant_crates.iter() {
         let crate_def_map = ctx.sema.db.crate_def_map(krate);
         let Some((_, module)) = crate_def_map.modules().find(|(_, module)| {
-            module.origin.file_id().map(Into::into) == Some(parent_id) && !module.origin.is_inline()
+            module.origin.file_id().map(|file_id| file_id.file_id(ctx.sema.db)) == Some(parent_id)
+                && !module.origin.is_inline()
         }) else {
             continue;
         };
@@ -174,7 +189,12 @@ fn fixes(
             let InFile { file_id: parent_file_id, value: source } =
                 current.definition_source(ctx.sema.db);
             let parent_file_id = parent_file_id.file_id()?;
-            return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range);
+            return make_fixes(
+                parent_file_id.file_id(ctx.sema.db),
+                source,
+                &module_name,
+                trigger_range,
+            );
         }
     }
 
@@ -193,9 +213,11 @@ fn make_fixes(
 
     let mod_decl = format!("mod {new_mod_name};");
     let pub_mod_decl = format!("pub mod {new_mod_name};");
+    let pub_crate_mod_decl = format!("pub(crate) mod {new_mod_name};");
 
     let mut mod_decl_builder = TextEdit::builder();
     let mut pub_mod_decl_builder = TextEdit::builder();
+    let mut pub_crate_mod_decl_builder = TextEdit::builder();
 
     let mut items = match &source {
         ModuleSource::SourceFile(it) => it.items(),
@@ -224,6 +246,7 @@ fn make_fixes(
             let indent = IndentLevel::from_node(last.syntax());
             mod_decl_builder.insert(offset, format!("\n{indent}{mod_decl}"));
             pub_mod_decl_builder.insert(offset, format!("\n{indent}{pub_mod_decl}"));
+            pub_crate_mod_decl_builder.insert(offset, format!("\n{indent}{pub_crate_mod_decl}"));
         }
         None => {
             // Prepend before the first item in the file.
@@ -234,6 +257,8 @@ fn make_fixes(
                     let indent = IndentLevel::from_node(first.syntax());
                     mod_decl_builder.insert(offset, format!("{mod_decl}\n\n{indent}"));
                     pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n{indent}"));
+                    pub_crate_mod_decl_builder
+                        .insert(offset, format!("{pub_crate_mod_decl}\n\n{indent}"));
                 }
                 None => {
                     // No items in the file, so just append at the end.
@@ -251,6 +276,8 @@ fn make_fixes(
                     };
                     mod_decl_builder.insert(offset, format!("{indent}{mod_decl}\n"));
                     pub_mod_decl_builder.insert(offset, format!("{indent}{pub_mod_decl}\n"));
+                    pub_crate_mod_decl_builder
+                        .insert(offset, format!("{indent}{pub_crate_mod_decl}\n"));
                 }
             }
         }
@@ -269,6 +296,12 @@ fn make_fixes(
             SourceChange::from_text_edit(parent_file_id, pub_mod_decl_builder.finish()),
             trigger_range,
         ),
+        fix(
+            "add_pub_crate_mod_declaration",
+            &format!("Insert `{pub_crate_mod_decl}`"),
+            SourceChange::from_text_edit(parent_file_id, pub_crate_mod_decl_builder.finish()),
+            trigger_range,
+        ),
     ])
 }
 
@@ -296,6 +329,11 @@ fn f() {}
                 r#"
 pub mod foo;
 
+fn f() {}
+"#,
+                r#"
+pub(crate) mod foo;
+
 fn f() {}
 "#,
             ],
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 6ab713a5896c9..a4f4813cf5b07 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -1,25 +1,24 @@
 use std::iter;
 
 use either::Either;
-use hir::{db::ExpandDatabase, Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union};
+use hir::{Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union, db::ExpandDatabase};
 use ide_db::text_edit::TextEdit;
 use ide_db::{
-    assists::{Assist, AssistId, AssistKind},
+    assists::{Assist, AssistId},
     helpers::is_editable_crate,
     label::Label,
     source_change::{SourceChange, SourceChangeBuilder},
 };
 use syntax::{
-    algo,
-    ast::{self, edit::IndentLevel, make, FieldList, Name, Visibility},
-    AstNode, AstPtr, Direction, SyntaxKind, TextSize,
+    AstNode, AstPtr, Direction, SyntaxKind, TextSize, algo,
+    ast::{self, FieldList, Name, Visibility, edit::IndentLevel, make},
 };
 use syntax::{
-    ast::{edit::AstNodeEdit, Type},
     SyntaxNode,
+    ast::{Type, edit::AstNodeEdit},
 };
 
-use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range};
 
 // Diagnostic: unresolved-field
 //
@@ -62,11 +61,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<A
         fixes.extend(method_fix(ctx, &d.expr));
     }
     fixes.extend(field_fix(ctx, d));
-    if fixes.is_empty() {
-        None
-    } else {
-        Some(fixes)
-    }
+    if fixes.is_empty() { None } else { Some(fixes) }
 }
 
 // FIXME: Add Snippet Support
@@ -124,10 +119,10 @@ fn add_variant_to_union(
     let (offset, record_field) =
         record_field_layout(None, field_name, suggested_type, field_list, adt_syntax.value)?;
 
-    let mut src_change_builder = SourceChangeBuilder::new(range.file_id);
+    let mut src_change_builder = SourceChangeBuilder::new(range.file_id.file_id(ctx.sema.db));
     src_change_builder.insert(offset, record_field);
     Some(Assist {
-        id: AssistId("add-variant-to-union", AssistKind::QuickFix),
+        id: AssistId::quick_fix("add-variant-to-union"),
         label: Label::new("Add field to union".to_owned()),
         group: None,
         target: error_range.range,
@@ -170,12 +165,13 @@ fn add_field_to_struct_fix(
                 struct_syntax.value,
             )?;
 
-            let mut src_change_builder = SourceChangeBuilder::new(struct_range.file_id);
+            let mut src_change_builder =
+                SourceChangeBuilder::new(struct_range.file_id.file_id(ctx.sema.db));
 
             // FIXME: Allow for choosing a visibility modifier see https://github.com/rust-lang/rust-analyzer/issues/11563
             src_change_builder.insert(offset, record_field);
             Some(Assist {
-                id: AssistId("add-field-to-record-struct", AssistKind::QuickFix),
+                id: AssistId::quick_fix("add-field-to-record-struct"),
                 label: Label::new("Add field to Record Struct".to_owned()),
                 group: None,
                 target: error_range.range,
@@ -185,7 +181,8 @@ fn add_field_to_struct_fix(
         }
         None => {
             // Add a field list to the Unit Struct
-            let mut src_change_builder = SourceChangeBuilder::new(struct_range.file_id);
+            let mut src_change_builder =
+                SourceChangeBuilder::new(struct_range.file_id.file_id(ctx.sema.db));
             let field_name = match field_name.chars().next() {
                 // FIXME : See match arm below regarding tuple structs.
                 Some(ch) if ch.is_numeric() => return None,
@@ -211,7 +208,7 @@ fn add_field_to_struct_fix(
             src_change_builder.replace(semi_colon.text_range(), record_field_list.to_string());
 
             Some(Assist {
-                id: AssistId("convert-unit-struct-to-record-struct", AssistKind::QuickFix),
+                id: AssistId::quick_fix("convert-unit-struct-to-record-struct"),
                 label: Label::new("Convert Unit Struct to Record Struct and add field".to_owned()),
                 group: None,
                 target: error_range.range,
@@ -270,12 +267,12 @@ fn method_fix(
     let expr = expr_ptr.value.to_node(&root);
     let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?;
     Some(Assist {
-        id: AssistId("expected-field-found-method-call-fix", AssistKind::QuickFix),
+        id: AssistId::quick_fix("expected-field-found-method-call-fix"),
         label: Label::new("Use parentheses to call the method".to_owned()),
         group: None,
         target: range,
         source_change: Some(SourceChange::from_text_edit(
-            file_id,
+            file_id.file_id(ctx.sema.db),
             TextEdit::insert(range.end(), "()".to_owned()),
         )),
         command: None,
@@ -285,11 +282,11 @@ fn method_fix(
 mod tests {
 
     use crate::{
+        DiagnosticsConfig,
         tests::{
             check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled,
             check_fix, check_no_fix,
         },
-        DiagnosticsConfig,
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 35e7521af7061..4422d8f8262fc 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -1,16 +1,17 @@
-use hir::{db::ExpandDatabase, FileRange, HirDisplay, InFile};
+use hir::{FileRange, HirDisplay, InFile, db::ExpandDatabase};
 use ide_db::text_edit::TextEdit;
 use ide_db::{
-    assists::{Assist, AssistId, AssistKind},
+    assists::{Assist, AssistId},
     label::Label,
     source_change::SourceChange,
 };
 use syntax::{
-    ast::{self, make, HasArgList},
-    format_smolstr, AstNode, SmolStr, TextRange, ToSmolStr,
+    AstNode, SmolStr, TextRange, ToSmolStr,
+    ast::{self, HasArgList, make},
+    format_smolstr,
 };
 
-use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range};
 
 // Diagnostic: unresolved-method
 //
@@ -67,11 +68,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<
         fixes.push(assoc_func_fix);
     }
 
-    if fixes.is_empty() {
-        None
-    } else {
-        Some(fixes)
-    }
+    if fixes.is_empty() { None } else { Some(fixes) }
 }
 
 fn field_fix(
@@ -99,13 +96,13 @@ fn field_fix(
         _ => return None,
     };
     Some(Assist {
-        id: AssistId("expected-method-found-field-fix", AssistKind::QuickFix),
+        id: AssistId::quick_fix("expected-method-found-field-fix"),
         label: Label::new("Use parentheses to call the value of the field".to_owned()),
         group: None,
         target: range,
         source_change: Some(SourceChange::from_iter([
-            (file_id.into(), TextEdit::insert(range.start(), "(".to_owned())),
-            (file_id.into(), TextEdit::insert(range.end(), ")".to_owned())),
+            (file_id.file_id(ctx.sema.db), TextEdit::insert(range.start(), "(".to_owned())),
+            (file_id.file_id(ctx.sema.db), TextEdit::insert(range.end(), ")".to_owned())),
         ])),
         command: None,
     })
@@ -178,14 +175,14 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
         let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id;
 
         Some(Assist {
-            id: AssistId("method_call_to_assoc_func_call_fix", AssistKind::QuickFix),
+            id: AssistId::quick_fix("method_call_to_assoc_func_call_fix"),
             label: Label::new(format!(
                 "Use associated func call instead: `{assoc_func_call_expr_string}`"
             )),
             group: None,
             target: range,
             source_change: Some(SourceChange::from_text_edit(
-                file_id,
+                file_id.file_id(ctx.sema.db),
                 TextEdit::replace(range, assoc_func_call_expr_string),
             )),
             command: None,
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
index 2bd8e484f8537..599cabe3e4f20 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -1,9 +1,9 @@
-use hir::{db::ExpandDatabase, HirFileIdExt};
+use hir::db::ExpandDatabase;
 use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
 use itertools::Itertools;
 use syntax::AstNode;
 
-use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
 
 // Diagnostic: unresolved-module
 //
@@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec<
                     &format!("Create module at `{candidate}`"),
                     FileSystemEdit::CreateFile {
                         dst: AnchoredPathBuf {
-                            anchor: d.decl.file_id.original_file(ctx.sema.db).file_id(),
+                            anchor: d.decl.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db),
                             path: candidate.clone(),
                         },
                         initial_contents: "".to_owned(),
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs
index d5caf4de3367e..77b1075ea5325 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs
@@ -1,10 +1,10 @@
 use hir::Name;
 use ide_db::text_edit::TextEdit;
 use ide_db::{
-    assists::{Assist, AssistId, AssistKind},
+    FileRange, RootDatabase,
+    assists::{Assist, AssistId},
     label::Label,
     source_change::SourceChange,
-    FileRange, RootDatabase,
 };
 use syntax::{Edition, TextRange};
 
@@ -46,7 +46,7 @@ pub(crate) fn unused_variables(
                 ctx.sema.db,
                 var_name,
                 it.range,
-                diagnostic_range.into(),
+                diagnostic_range,
                 ast.file_id.is_macro(),
                 ctx.edition,
             )
@@ -68,7 +68,7 @@ fn fixes(
     }
 
     Some(vec![Assist {
-        id: AssistId("unscore_unused_variable_name", AssistKind::QuickFix),
+        id: AssistId::quick_fix("unscore_unused_variable_name"),
         label: Label::new(format!(
             "Rename unused {} to _{}",
             var_name.display(db, edition),
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
index e5c2eca171ae4..e0ea4b3373878 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
@@ -1,15 +1,17 @@
 use hir::InFile;
+use ide_db::RootDatabase;
 use ide_db::text_edit::TextEdit;
-use ide_db::{source_change::SourceChange, EditionedFileId, FileRange};
+use ide_db::{EditionedFileId, FileRange, source_change::SourceChange};
 use itertools::Itertools;
-use syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr};
+use syntax::{AstNode, SyntaxNode, SyntaxNodePtr, ast};
 
-use crate::{fix, Diagnostic, DiagnosticCode};
+use crate::{Diagnostic, DiagnosticCode, fix};
 
 // Diagnostic: unnecessary-braces
 //
 // Diagnostic for unnecessary braces in `use` items.
 pub(crate) fn useless_braces(
+    db: &RootDatabase,
     acc: &mut Vec<Diagnostic>,
     file_id: EditionedFileId,
     node: &SyntaxNode,
@@ -38,13 +40,13 @@ pub(crate) fn useless_braces(
             Diagnostic::new(
                 DiagnosticCode::RustcLint("unused_braces"),
                 "Unnecessary braces in use statement".to_owned(),
-                FileRange { file_id: file_id.into(), range: use_range },
+                FileRange { file_id: file_id.file_id(db), range: use_range },
             )
             .with_main_node(InFile::new(file_id.into(), SyntaxNodePtr::new(node)))
             .with_fixes(Some(vec![fix(
                 "remove_braces",
                 "Remove unnecessary braces",
-                SourceChange::from_text_edit(file_id, edit),
+                SourceChange::from_text_edit(file_id.file_id(db), edit),
                 use_range,
             )])),
         );
@@ -56,8 +58,8 @@ pub(crate) fn useless_braces(
 #[cfg(test)]
 mod tests {
     use crate::{
-        tests::{check_diagnostics, check_diagnostics_with_config, check_fix},
         DiagnosticsConfig,
+        tests::{check_diagnostics, check_diagnostics_with_config, check_fix},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index e15d349578914..11efedd8a59d1 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -25,18 +25,23 @@
 
 mod handlers {
     pub(crate) mod await_outside_of_async;
+    pub(crate) mod bad_rtn;
     pub(crate) mod break_outside_of_loop;
+    pub(crate) mod elided_lifetimes_in_path;
     pub(crate) mod expected_function;
     pub(crate) mod generic_args_prohibited;
     pub(crate) mod inactive_code;
     pub(crate) mod incoherent_impl;
     pub(crate) mod incorrect_case;
+    pub(crate) mod incorrect_generics_len;
+    pub(crate) mod incorrect_generics_order;
     pub(crate) mod invalid_cast;
     pub(crate) mod invalid_derive_target;
     pub(crate) mod macro_error;
     pub(crate) mod malformed_derive;
     pub(crate) mod mismatched_arg_count;
     pub(crate) mod missing_fields;
+    pub(crate) mod missing_lifetime;
     pub(crate) mod missing_match_arms;
     pub(crate) mod missing_unsafe;
     pub(crate) mod moved_out_of_ref;
@@ -82,23 +87,23 @@ use std::{collections::hash_map, iter, sync::LazyLock};
 
 use either::Either;
 use hir::{
-    db::ExpandDatabase, diagnostics::AnyDiagnostic, Crate, DisplayTarget, HirFileId, InFile,
-    Semantics,
+    Crate, DisplayTarget, HirFileId, InFile, Semantics, db::ExpandDatabase,
+    diagnostics::AnyDiagnostic,
 };
 use ide_db::{
-    assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
-    base_db::{ReleaseChannel, SourceDatabase},
-    generated::lints::{Lint, LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, DEFAULT_LINT_GROUPS},
+    EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
+    assists::{Assist, AssistId, AssistResolveStrategy},
+    base_db::{ReleaseChannel, RootQueryDb as _},
+    generated::lints::{CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS, DEFAULT_LINTS, Lint, LintGroup},
     imports::insert_use::InsertUseConfig,
     label::Label,
     source_change::SourceChange,
     syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
-    EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
 };
 use itertools::Itertools;
 use syntax::{
+    AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, T, TextRange,
     ast::{self, AstNode, HasAttrs},
-    AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, TextRange, T,
 };
 
 // FIXME: Make this an enum
@@ -127,7 +132,7 @@ impl DiagnosticCode {
                 format!("https://rust-lang.github.io/rust-clippy/master/#/{e}")
             }
             DiagnosticCode::Ra(e, _) => {
-                format!("https://rust-analyzer.github.io/manual.html#{e}")
+                format!("https://rust-analyzer.github.io/book/diagnostics.html#{e}")
             }
         }
     }
@@ -301,8 +306,11 @@ impl DiagnosticsContext<'_> {
                 }
             }
         })()
+        .map(|frange| ide_db::FileRange {
+            file_id: frange.file_id.file_id(self.sema.db),
+            range: frange.range,
+        })
         .unwrap_or_else(|| sema.diagnostics_display_range(*node))
-        .into()
     }
 }
 
@@ -319,13 +327,14 @@ pub fn syntax_diagnostics(
     }
 
     let sema = Semantics::new(db);
-    let file_id = sema
+    let editioned_file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+
+    let (file_id, _) = editioned_file_id.unpack(db);
 
     // [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
-    db.parse_errors(file_id)
-        .as_deref()
+    db.parse_errors(editioned_file_id)
         .into_iter()
         .flatten()
         .take(128)
@@ -333,7 +342,7 @@ pub fn syntax_diagnostics(
             Diagnostic::new(
                 DiagnosticCode::SyntaxError,
                 format!("Syntax Error: {err}"),
-                FileRange { file_id: file_id.into(), range: err.range() },
+                FileRange { file_id, range: err.range() },
             )
         })
         .collect()
@@ -349,26 +358,28 @@ pub fn semantic_diagnostics(
 ) -> Vec<Diagnostic> {
     let _p = tracing::info_span!("semantic_diagnostics").entered();
     let sema = Semantics::new(db);
-    let file_id = sema
+    let editioned_file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+
+    let (file_id, edition) = editioned_file_id.unpack(db);
     let mut res = Vec::new();
 
-    let parse = sema.parse(file_id);
+    let parse = sema.parse(editioned_file_id);
 
     // FIXME: This iterates the entire file which is a rather expensive operation.
     // We should implement these differently in some form?
     // Salsa caching + incremental re-parse would be better here
     for node in parse.syntax().descendants() {
-        handlers::useless_braces::useless_braces(&mut res, file_id, &node);
-        handlers::field_shorthand::field_shorthand(&mut res, file_id, &node);
+        handlers::useless_braces::useless_braces(db, &mut res, editioned_file_id, &node);
+        handlers::field_shorthand::field_shorthand(db, &mut res, editioned_file_id, &node);
         handlers::json_is_not_rust::json_in_items(
             &sema,
             &mut res,
-            file_id,
+            editioned_file_id,
             &node,
             config,
-            file_id.edition(),
+            edition,
         );
     }
 
@@ -378,29 +389,32 @@ pub fn semantic_diagnostics(
         module.and_then(|m| db.toolchain_channel(m.krate().into())),
         Some(ReleaseChannel::Nightly) | None
     );
-    let krate = module.map(|module| module.krate()).unwrap_or_else(|| {
-        (*db.crate_graph().crates_in_topological_order().last().unwrap()).into()
-    });
-    let display_target = krate.to_display_target(db);
-    let ctx = DiagnosticsContext {
-        config,
-        sema,
-        resolve,
-        edition: file_id.edition(),
-        is_nightly,
-        display_target,
+
+    let krate = match module {
+        Some(module) => module.krate(),
+        None => {
+            match db.all_crates().last() {
+                Some(last) => (*last).into(),
+                // short-circuit, return an empty vec of diagnostics
+                None => return vec![],
+            }
+        }
     };
+    let display_target = krate.to_display_target(db);
+    let ctx = DiagnosticsContext { config, sema, resolve, edition, is_nightly, display_target };
 
     let mut diags = Vec::new();
     match module {
         // A bunch of parse errors in a file indicate some bigger structural parse changes in the
         // file, so we skip semantic diagnostics so we can show these faster.
         Some(m) => {
-            if db.parse_errors(file_id).as_deref().is_none_or(|es| es.len() < 16) {
+            if db.parse_errors(editioned_file_id).is_none_or(|es| es.len() < 16) {
                 m.diagnostics(db, &mut diags, config.style_lints);
             }
         }
-        None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id.file_id()),
+        None => {
+            handlers::unlinked_file::unlinked_file(&ctx, &mut res, editioned_file_id.file_id(db))
+        }
     }
 
     for diag in diags {
@@ -488,6 +502,11 @@ pub fn semantic_diagnostics(
             AnyDiagnostic::ParenthesizedGenericArgsWithoutFnTrait(d) => {
                 handlers::parenthesized_generic_args_without_fn_trait::parenthesized_generic_args_without_fn_trait(&ctx, &d)
             }
+            AnyDiagnostic::BadRtn(d) => handlers::bad_rtn::bad_rtn(&ctx, &d),
+            AnyDiagnostic::IncorrectGenericsLen(d) => handlers::incorrect_generics_len::incorrect_generics_len(&ctx, &d),
+            AnyDiagnostic::IncorrectGenericsOrder(d) => handlers::incorrect_generics_order::incorrect_generics_order(&ctx, &d),
+            AnyDiagnostic::MissingLifetime(d) => handlers::missing_lifetime::missing_lifetime(&ctx, &d),
+            AnyDiagnostic::ElidedLifetimesInPath(d) => handlers::elided_lifetimes_in_path::elided_lifetimes_in_path(&ctx, &d),
         };
         res.push(d)
     }
@@ -517,7 +536,7 @@ pub fn semantic_diagnostics(
         &mut FxHashMap::default(),
         &mut lints,
         &mut Vec::new(),
-        file_id.edition(),
+        editioned_file_id.edition(db),
     );
 
     res.retain(|d| d.severity != Severity::Allow);
@@ -559,9 +578,8 @@ fn handle_diag_from_macros(
     let span_map = sema.db.expansion_span_map(macro_file);
     let mut spans = span_map.spans_for_range(node.text_range());
     if spans.any(|span| {
-        sema.db.lookup_intern_syntax_context(span.ctx).outer_expn.is_some_and(|expansion| {
-            let macro_call =
-                sema.db.lookup_intern_macro_call(expansion.as_macro_file().macro_call_id);
+        span.ctx.outer_expn(sema.db).is_some_and(|expansion| {
+            let macro_call = sema.db.lookup_intern_macro_call(expansion.into());
             // We don't want to show diagnostics for non-local macros at all, but proc macros authors
             // seem to rely on being able to emit non-warning-free code, so we don't want to show warnings
             // for them even when the proc macro comes from the same workspace (in rustc that's not a
@@ -767,9 +785,9 @@ fn fill_lint_attrs(
                     }
                 });
 
-                let all_matching_groups = lint_groups(&diag.code, edition)
-                    .iter()
-                    .filter_map(|lint_group| cached.get(lint_group));
+                let lints = lint_groups(&diag.code, edition);
+                let all_matching_groups =
+                    lints.iter().filter_map(|lint_group| cached.get(lint_group));
                 let cached_severity =
                     all_matching_groups.min_by_key(|it| it.depth).map(|it| it.severity);
 
@@ -977,7 +995,7 @@ fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextR
 fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
     assert!(!id.contains(' '));
     Assist {
-        id: AssistId(id, AssistKind::QuickFix),
+        id: AssistId::quick_fix(id),
         label: Label::new(label.to_owned()),
         group: None,
         target,
@@ -993,8 +1011,8 @@ fn adjusted_display_range<N: AstNode>(
 ) -> FileRange {
     let source_file = ctx.sema.parse_or_expand(diag_ptr.file_id);
     let node = diag_ptr.value.to_node(&source_file);
-    diag_ptr
+    let hir::FileRange { file_id, range } = diag_ptr
         .with_value(adj(node).unwrap_or_else(|| diag_ptr.value.text_range()))
-        .original_node_file_range_rooted(ctx.sema.db)
-        .into()
+        .original_node_file_range_rooted(ctx.sema.db);
+    ide_db::FileRange { file_id: file_id.file_id(ctx.sema.db), range }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
index fc2a7db7174e9..13d08d46dedd5 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
@@ -3,12 +3,12 @@
 mod overly_long_real_world_cases;
 
 use ide_db::{
-    assists::AssistResolveStrategy, base_db::SourceDatabase, LineIndexDatabase, RootDatabase,
+    LineIndexDatabase, RootDatabase, assists::AssistResolveStrategy, base_db::SourceDatabase,
 };
 use itertools::Itertools;
 use stdx::trim_indent;
 use test_fixture::WithFixture;
-use test_utils::{assert_eq_text, extract_annotations, MiniCore};
+use test_utils::{MiniCore, assert_eq_text, extract_annotations};
 
 use crate::{DiagnosticsConfig, ExprFillDefaultMode, Severity};
 
@@ -75,7 +75,7 @@ fn check_nth_fix_with_config(
         &db,
         &config,
         &AssistResolveStrategy::All,
-        file_position.file_id.into(),
+        file_position.file_id.file_id(&db),
     )
     .pop()
     .expect("no diagnostics");
@@ -85,7 +85,7 @@ fn check_nth_fix_with_config(
     let actual = {
         let source_change = fix.source_change.as_ref().unwrap();
         let file_id = *source_change.source_file_edits.keys().next().unwrap();
-        let mut actual = db.file_text(file_id).to_string();
+        let mut actual = db.file_text(file_id).text(&db).to_string();
 
         for (edit, snippet_edit) in source_change.source_file_edits.values() {
             edit.apply(&mut actual);
@@ -128,7 +128,7 @@ pub(crate) fn check_has_fix(
         &db,
         &conf,
         &AssistResolveStrategy::All,
-        file_position.file_id.into(),
+        file_position.file_id.file_id(&db),
     )
     .into_iter()
     .find(|d| {
@@ -142,7 +142,7 @@ pub(crate) fn check_has_fix(
                     let actual = {
                         let source_change = fix.source_change.as_ref().unwrap();
                         let file_id = *source_change.source_file_edits.keys().next().unwrap();
-                        let mut actual = db.file_text(file_id).to_string();
+                        let mut actual = db.file_text(file_id).text(&db).to_string();
 
                         for (edit, snippet_edit) in source_change.source_file_edits.values() {
                             edit.apply(&mut actual);
@@ -175,7 +175,7 @@ pub(crate) fn check_has_single_fix(
         &db,
         &conf,
         &AssistResolveStrategy::All,
-        file_position.file_id.into(),
+        file_position.file_id.file_id(&db),
     )
     .into_iter()
     .find(|d| {
@@ -190,7 +190,7 @@ pub(crate) fn check_has_single_fix(
                     let actual = {
                         let source_change = fix.source_change.as_ref().unwrap();
                         let file_id = *source_change.source_file_edits.keys().next().unwrap();
-                        let mut actual = db.file_text(file_id).to_string();
+                        let mut actual = db.file_text(file_id).text(&db).to_string();
 
                         for (edit, snippet_edit) in source_change.source_file_edits.values() {
                             edit.apply(&mut actual);
@@ -216,7 +216,7 @@ pub(crate) fn check_no_fix(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
         &db,
         &DiagnosticsConfig::test_sample(),
         &AssistResolveStrategy::All,
-        file_position.file_id.into(),
+        file_position.file_id.file_id(&db),
     )
     .pop()
     .unwrap();
@@ -250,7 +250,7 @@ pub(crate) fn check_diagnostics_with_config(
         .iter()
         .copied()
         .flat_map(|file_id| {
-            super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into())
+            super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.file_id(&db))
                 .into_iter()
                 .map(|d| {
                     let mut annotation = String::new();
@@ -272,12 +272,13 @@ pub(crate) fn check_diagnostics_with_config(
         .map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation)))
         .into_group_map();
     for file_id in files {
-        let file_id = file_id.into();
+        let file_id = file_id.file_id(&db);
         let line_index = db.line_index(file_id);
 
         let mut actual = annotations.remove(&file_id).unwrap_or_default();
-        let expected = extract_annotations(&db.file_text(file_id));
-        actual.sort_by_key(|(range, _)| range.start());
+        let mut expected = extract_annotations(&db.file_text(file_id).text(&db));
+        expected.sort_by_key(|(range, s)| (range.start(), s.clone()));
+        actual.sort_by_key(|(range, s)| (range.start(), s.clone()));
         // FIXME: We should panic on duplicates instead, but includes currently cause us to report
         // diagnostics twice for the calling module when both files are queried.
         actual.dedup();
@@ -289,7 +290,7 @@ pub(crate) fn check_diagnostics_with_config(
             for (e, _) in &actual {
                 eprintln!(
                     "Code in range {e:?} = {}",
-                    &db.file_text(file_id)[usize::from(e.start())..usize::from(e.end())]
+                    &db.file_text(file_id).text(&db)[usize::from(e.start())..usize::from(e.end())]
                 )
             }
         }
@@ -316,7 +317,7 @@ fn test_disabled_diagnostics() {
     config.disabled.insert("E0583".into());
 
     let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
-    let file_id = file_id.into();
+    let file_id = file_id.file_id(&db);
 
     let diagnostics = super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
     assert!(diagnostics.is_empty());
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
index fa75e5a421476..1212fa9f9c65f 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
@@ -12,20 +12,18 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
 itertools.workspace = true
-triomphe.workspace = true
-nohash-hasher.workspace = true
 
 # local deps
 hir.workspace = true
 ide-db.workspace = true
 parser.workspace = true
-stdx.workspace = true
 syntax.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
+triomphe.workspace = true
 
 # local deps
 test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs
index ca937a03f82d2..8d6b7c637d730 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs
@@ -6,7 +6,7 @@
 //! needs to determine it somehow. We do this in a stupid way -- by pasting SSR
 //! rule into different contexts and checking what works.
 
-use syntax::{ast, AstNode, SyntaxNode};
+use syntax::{AstNode, SyntaxNode, ast};
 
 pub(crate) fn ty(s: &str) -> Result<SyntaxNode, ()> {
     fragment::<ast::Type>("type T = {};", s)
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
index a14e69030e325..181cc74a51d4f 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
@@ -1,10 +1,10 @@
 //! This module allows building an SSR MatchFinder by parsing the SSR rule
 //! from a comment.
 
-use ide_db::{base_db::SourceDatabase, EditionedFileId, FilePosition, FileRange, RootDatabase};
+use ide_db::{EditionedFileId, FilePosition, FileRange, RootDatabase, base_db::RootQueryDb};
 use syntax::{
-    ast::{self, AstNode, AstToken},
     TextRange,
+    ast::{self, AstNode, AstToken},
 };
 
 use crate::MatchFinder;
@@ -17,7 +17,9 @@ pub fn ssr_from_comment(
     frange: FileRange,
 ) -> Option<(MatchFinder<'_>, TextRange)> {
     let comment = {
-        let file = db.parse(EditionedFileId::current_edition(frange.file_id));
+        let file_id = EditionedFileId::current_edition(db, frange.file_id);
+
+        let file = db.parse(file_id);
         file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
     }?;
     let comment_text_without_prefix = comment.text().strip_prefix(comment.prefix()).unwrap();
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
index 889258c94c535..339c199ec29ac 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
@@ -80,10 +80,11 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc
 
 use crate::{errors::bail, matching::MatchFailureReason};
 use hir::{FileRange, Semantics};
+use ide_db::symbol_index::SymbolsDatabase;
 use ide_db::text_edit::TextEdit;
-use ide_db::{base_db::SourceDatabase, EditionedFileId, FileId, FxHashMap, RootDatabase};
+use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase, base_db::SourceDatabase};
 use resolving::ResolvedRule;
-use syntax::{ast, AstNode, SyntaxNode, TextRange};
+use syntax::{AstNode, SyntaxNode, TextRange, ast};
 
 // A structured search replace rule. Create by calling `parse` on a str.
 #[derive(Debug)]
@@ -126,7 +127,7 @@ impl<'db> MatchFinder<'db> {
         let sema = Semantics::new(db);
         let file_id = sema
             .attach_first_edition(lookup_context.file_id)
-            .unwrap_or_else(|| EditionedFileId::current_edition(lookup_context.file_id));
+            .unwrap_or_else(|| EditionedFileId::current_edition(db, lookup_context.file_id));
         let resolution_scope = resolving::ResolutionScope::new(
             &sema,
             hir::FilePosition { file_id, offset: lookup_context.offset },
@@ -137,10 +138,11 @@ impl<'db> MatchFinder<'db> {
 
     /// Constructs an instance using the start of the first file in `db` as the lookup context.
     pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
-        use ide_db::base_db::SourceRootDatabase;
-        use ide_db::symbol_index::SymbolsDatabase;
-        if let Some(first_file_id) =
-            db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
+        if let Some(first_file_id) = db
+            .local_roots()
+            .iter()
+            .next()
+            .and_then(|root| db.source_root(*root).source_root(db).iter().next())
         {
             MatchFinder::in_context(
                 db,
@@ -171,7 +173,7 @@ impl<'db> MatchFinder<'db> {
         let mut matches_by_file = FxHashMap::default();
         for m in self.matches().matches {
             matches_by_file
-                .entry(m.range.file_id.file_id())
+                .entry(m.range.file_id.file_id(self.sema.db))
                 .or_insert_with(SsrMatches::default)
                 .matches
                 .push(m);
@@ -184,7 +186,7 @@ impl<'db> MatchFinder<'db> {
                     replacing::matches_to_edit(
                         self.sema.db,
                         &matches,
-                        &self.sema.db.file_text(file_id),
+                        &self.sema.db.file_text(file_id).text(self.sema.db),
                         &self.rules,
                     ),
                 )
@@ -225,7 +227,7 @@ impl<'db> MatchFinder<'db> {
     ) -> Vec<MatchDebugInfo> {
         let file = self.sema.parse(file_id);
         let mut res = Vec::new();
-        let file_text = self.sema.db.file_text(file_id.into());
+        let file_text = self.sema.db.file_text(file_id.file_id(self.sema.db)).text(self.sema.db);
         let mut remaining_text = &*file_text;
         let mut base = 0;
         let len = snippet.len() as u32;
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
index e219ba4bf6398..cff4eede04269 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
@@ -2,16 +2,16 @@
 //! process of matching, placeholder values are recorded.
 
 use crate::{
+    SsrMatches,
     parsing::{Constraint, NodeKind, Placeholder, Var},
     resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
-    SsrMatches,
 };
 use hir::{FileRange, ImportPathConfig, Semantics};
-use ide_db::{base_db::SourceDatabase, FxHashMap};
+use ide_db::{FxHashMap, base_db::RootQueryDb};
 use std::{cell::Cell, iter::Peekable};
 use syntax::{
-    ast::{self, AstNode, AstToken, HasGenericArgs},
     SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken,
+    ast::{self, AstNode, AstToken, HasGenericArgs},
 };
 
 // Creates a match error. If we're currently attempting to match some code that we thought we were
@@ -627,11 +627,10 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
             })?
             .original;
         let krate = self.sema.scope(expr.syntax()).map(|it| it.krate()).unwrap_or_else(|| {
-            hir::Crate::from(
-                *self.sema.db.crate_graph().crates_in_topological_order().last().unwrap(),
-            )
+            hir::Crate::from(*self.sema.db.all_crates().last().expect("no crate graph present"))
         });
-        let res = code_type
+
+        code_type
             .autoderef(self.sema.db)
             .enumerate()
             .find(|(_, deref_code_type)| pattern_type == deref_code_type)
@@ -644,8 +643,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
                     pattern_type.display(self.sema.db, display_target),
                     code_type.display(self.sema.db, display_target)
                 )
-            });
-        res
+            })
     }
 
     fn get_placeholder_for_node(&self, node: &SyntaxNode) -> Option<&Placeholder> {
@@ -808,10 +806,20 @@ mod tests {
         let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
 
         let (db, position, selections) = crate::tests::single_file(input);
+        let position = ide_db::FilePosition {
+            file_id: position.file_id.file_id(&db),
+            offset: position.offset,
+        };
         let mut match_finder = MatchFinder::in_context(
             &db,
-            position.into(),
-            selections.into_iter().map(Into::into).collect(),
+            position,
+            selections
+                .into_iter()
+                .map(|frange| ide_db::FileRange {
+                    file_id: frange.file_id.file_id(&db),
+                    range: frange.range,
+                })
+                .collect(),
         )
         .unwrap();
         match_finder.add_rule(rule).unwrap();
@@ -822,7 +830,7 @@ mod tests {
 
         let edits = match_finder.edits();
         assert_eq!(edits.len(), 1);
-        let edit = &edits[&position.file_id.into()];
+        let edit = &edits[&position.file_id];
         let mut after = input.to_owned();
         edit.apply(&mut after);
         assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
index ea40d5b815ef3..2c0f1658d837f 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
@@ -9,7 +9,7 @@ use std::{fmt::Display, str::FromStr};
 use syntax::{SmolStr, SyntaxKind, SyntaxNode, T};
 
 use crate::errors::bail;
-use crate::{fragments, SsrError, SsrPattern, SsrRule};
+use crate::{SsrError, SsrPattern, SsrRule, fragments};
 
 #[derive(Debug)]
 pub(crate) struct ParsedRule {
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs
index 11c1615a560eb..3c92697926f3d 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs
@@ -5,11 +5,11 @@ use ide_db::{FxHashMap, FxHashSet};
 use itertools::Itertools;
 use parser::Edition;
 use syntax::{
-    ast::{self, AstNode, AstToken},
     SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize,
+    ast::{self, AstNode, AstToken},
 };
 
-use crate::{fragments, resolving::ResolvedRule, Match, SsrMatches};
+use crate::{Match, SsrMatches, fragments, resolving::ResolvedRule};
 
 /// Returns a text edit that will replace each match in `matches` with its corresponding replacement
 /// template. Placeholders in the template will have been substituted with whatever they matched to
@@ -34,7 +34,7 @@ fn matches_to_edit_at_offset(
     for m in &matches.matches {
         edit_builder.replace(
             m.range.range.checked_sub(relative_start).unwrap(),
-            render_replace(db, m, file_src, rules, m.range.file_id.edition()),
+            render_replace(db, m, file_src, rules, m.range.file_id.edition(db)),
         );
     }
     edit_builder.finish()
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
index 270ee0b3ec967..a687db4bf58d6 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
@@ -4,11 +4,11 @@ use hir::AsAssocItem;
 use ide_db::FxHashMap;
 use parsing::Placeholder;
 use syntax::{
-    ast::{self, HasGenericArgs},
     SmolStr, SyntaxKind, SyntaxNode, SyntaxToken,
+    ast::{self, HasGenericArgs},
 };
 
-use crate::{errors::error, parsing, SsrError};
+use crate::{SsrError, errors::error, parsing};
 
 pub(crate) struct ResolutionScope<'db> {
     scope: hir::SemanticsScope<'db>,
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
index b1cade39266a0..d89911fca403d 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
@@ -1,17 +1,16 @@
 //! Searching for matches.
 
 use crate::{
-    matching,
+    Match, MatchFinder, matching,
     resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
-    Match, MatchFinder,
 };
 use hir::FileRange;
 use ide_db::{
+    EditionedFileId, FileId, FxHashSet,
     defs::Definition,
     search::{SearchScope, UsageSearchResult},
-    EditionedFileId, FileId, FxHashSet,
 };
-use syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
+use syntax::{AstNode, SyntaxKind, SyntaxNode, ast};
 
 /// A cache for the results of find_usages. This is for when we have multiple patterns that have the
 /// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type
@@ -139,7 +138,7 @@ impl MatchFinder<'_> {
             files.push(
                 self.sema
                     .attach_first_edition(file_id)
-                    .unwrap_or_else(|| EditionedFileId::current_edition(file_id)),
+                    .unwrap_or_else(|| EditionedFileId::current_edition(self.sema.db, file_id)),
             );
         });
         SearchScope::files(&files)
@@ -156,10 +155,10 @@ impl MatchFinder<'_> {
     fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
         if self.restrict_ranges.is_empty() {
             // Unrestricted search.
-            use ide_db::base_db::SourceRootDatabase;
+            use ide_db::base_db::SourceDatabase;
             use ide_db::symbol_index::SymbolsDatabase;
             for &root in self.sema.db.local_roots().iter() {
-                let sr = self.sema.db.source_root(root);
+                let sr = self.sema.db.source_root(root).source_root(self.sema.db);
                 for file_id in sr.iter() {
                     callback(file_id);
                 }
@@ -230,7 +229,9 @@ impl MatchFinder<'_> {
         }
         let Some(node_range) = self.sema.original_range_opt(code) else { return false };
         for range in &self.restrict_ranges {
-            if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
+            if range.file_id == node_range.file_id.file_id(self.sema.db)
+                && range.range.contains_range(node_range.range)
+            {
                 return true;
             }
         }
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
index d783e1952526c..46b633b8a3250 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
@@ -1,8 +1,8 @@
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 use hir::{FilePosition, FileRange};
 use ide_db::{
-    base_db::{ra_salsa::Durability, SourceDatabase},
     EditionedFileId, FxHashSet,
+    base_db::{SourceDatabase, salsa::Durability},
 };
 use test_utils::RangeOrOffset;
 use triomphe::Arc;
@@ -67,7 +67,7 @@ fn parser_undefined_placeholder_in_replacement() {
 /// the start of the file. If there's a second cursor marker, then we'll return a single range.
 pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec<FileRange>) {
     use ide_db::symbol_index::SymbolsDatabase;
-    use test_fixture::{WithFixture, WORKSPACE};
+    use test_fixture::{WORKSPACE, WithFixture};
     let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) {
         ide_db::RootDatabase::with_range_or_offset(code)
     } else {
@@ -98,10 +98,18 @@ fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
 
 fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
     let (db, position, selections) = single_file(input);
+    let position =
+        ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset };
     let mut match_finder = MatchFinder::in_context(
         &db,
-        position.into(),
-        selections.into_iter().map(Into::into).collect(),
+        position,
+        selections
+            .into_iter()
+            .map(|selection| ide_db::FileRange {
+                file_id: selection.file_id.file_id(&db),
+                range: selection.range,
+            })
+            .collect(),
     )
     .unwrap();
     for rule in rules {
@@ -114,8 +122,8 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
     }
     // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
     // stuff.
-    let mut actual = db.file_text(position.file_id.into()).to_string();
-    edits[&position.file_id.into()].apply(&mut actual);
+    let mut actual = db.file_text(position.file_id).text(&db).to_string();
+    edits[&position.file_id].apply(&mut actual);
     expected.assert_eq(&actual);
 }
 
@@ -136,8 +144,14 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
     let (db, position, selections) = single_file(code);
     let mut match_finder = MatchFinder::in_context(
         &db,
-        position.into(),
-        selections.into_iter().map(Into::into).collect(),
+        ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset },
+        selections
+            .into_iter()
+            .map(|selection| ide_db::FileRange {
+                file_id: selection.file_id.file_id(&db),
+                range: selection.range,
+            })
+            .collect(),
     )
     .unwrap();
     match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
@@ -153,8 +167,14 @@ fn assert_no_match(pattern: &str, code: &str) {
     let (db, position, selections) = single_file(code);
     let mut match_finder = MatchFinder::in_context(
         &db,
-        position.into(),
-        selections.into_iter().map(Into::into).collect(),
+        ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset },
+        selections
+            .into_iter()
+            .map(|selection| ide_db::FileRange {
+                file_id: selection.file_id.file_id(&db),
+                range: selection.range,
+            })
+            .collect(),
     )
     .unwrap();
     match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
@@ -169,8 +189,14 @@ fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expecte
     let (db, position, selections) = single_file(code);
     let mut match_finder = MatchFinder::in_context(
         &db,
-        position.into(),
-        selections.into_iter().map(Into::into).collect(),
+        ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset },
+        selections
+            .into_iter()
+            .map(|selection| ide_db::FileRange {
+                file_id: selection.file_id.file_id(&db),
+                range: selection.range,
+            })
+            .collect(),
     )
     .unwrap();
     match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml
index 9af56c40e982e..1d19daf2f5aa9 100644
--- a/src/tools/rust-analyzer/crates/ide/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
 arrayvec.workspace = true
 either.workspace = true
 itertools.workspace = true
@@ -25,7 +25,7 @@ dot.workspace = true
 smallvec.workspace = true
 triomphe.workspace = true
 nohash-hasher.workspace = true
-rustc_apfloat = "0.2.0"
+rustc_apfloat = "0.2.2"
 
 # local deps
 cfg.workspace = true
@@ -46,7 +46,7 @@ hir.workspace = true
 toolchain.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 
 # local deps
 test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
index e47891bbdfe7e..3d71da985b24b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
@@ -1,17 +1,17 @@
 use hir::{HasSource, InFile, InRealFile, Semantics};
 use ide_db::{
-    defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxIndexSet,
-    RootDatabase,
+    FileId, FilePosition, FileRange, FxIndexSet, RootDatabase, defs::Definition,
+    helpers::visit_file_defs,
 };
 use itertools::Itertools;
-use syntax::{ast::HasName, AstNode, TextRange};
+use syntax::{AstNode, TextRange, ast::HasName};
 
 use crate::{
+    NavigationTarget, RunnableKind,
     annotations::fn_references::find_all_methods,
     goto_implementation::goto_implementation,
     references::find_all_refs,
-    runnables::{runnables, Runnable},
-    NavigationTarget, RunnableKind,
+    runnables::{Runnable, runnables},
 };
 
 mod fn_references;
@@ -149,7 +149,7 @@ pub(crate) fn annotations(
             source_file_id: FileId,
         ) -> Option<(TextRange, Option<TextRange>)> {
             if let Some(InRealFile { file_id, value }) = node.original_ast_node_rooted(db) {
-                if file_id == source_file_id {
+                if file_id.file_id(db) == source_file_id {
                     return Some((
                         value.syntax().text_range(),
                         value.name().map(|name| name.syntax().text_range()),
@@ -209,9 +209,9 @@ fn should_skip_runnable(kind: &RunnableKind, binary_target: bool) -> bool {
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
 
-    use crate::{fixture, Annotation, AnnotationConfig};
+    use crate::{Annotation, AnnotationConfig, fixture};
 
     use super::AnnotationLocation;
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs b/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs
index 08cc10509cb8a..427a2eff82017 100644
--- a/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs
@@ -4,7 +4,7 @@
 use hir::Semantics;
 use ide_assists::utils::test_related_attribute_syn;
 use ide_db::RootDatabase;
-use syntax::{ast, ast::HasName, AstNode, SyntaxNode, TextRange};
+use syntax::{AstNode, SyntaxNode, TextRange, ast, ast::HasName};
 
 use crate::FileId;
 
@@ -34,8 +34,8 @@ fn method_range(item: SyntaxNode) -> Option<(TextRange, Option<TextRange>)> {
 mod tests {
     use syntax::TextRange;
 
-    use crate::fixture;
     use crate::TextSize;
+    use crate::fixture;
     use std::ops::RangeInclusive;
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
index afd6f740c42c6..4b8d07a253375 100644
--- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
@@ -4,14 +4,14 @@ use std::iter;
 
 use hir::Semantics;
 use ide_db::{
+    FileRange, FxIndexMap, RootDatabase,
     defs::{Definition, NameClass, NameRefClass},
     helpers::pick_best_token,
     search::FileReference,
-    FileRange, FxIndexMap, RootDatabase,
 };
-use syntax::{ast, AstNode, SyntaxKind::IDENT};
+use syntax::{AstNode, SyntaxKind::IDENT, ast};
 
-use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav, goto_definition};
 
 #[derive(Debug, Clone)]
 pub struct CallItem {
@@ -76,9 +76,9 @@ pub(crate) fn incoming_calls(
                 }
 
                 let range = sema.original_range(name.syntax());
-                calls.add(nav.call_site, range.into());
+                calls.add(nav.call_site, range.into_file_id(db));
                 if let Some(other) = nav.def_site {
-                    calls.add(other, range.into());
+                    calls.add(other, range.into_file_id(db));
                 }
             }
         }
@@ -143,7 +143,7 @@ pub(crate) fn outgoing_calls(
             Some(nav_target.into_iter().zip(iter::repeat(range)))
         })
         .flatten()
-        .for_each(|(nav, range)| calls.add(nav, range.into()));
+        .for_each(|(nav, range)| calls.add(nav, range.into_file_id(db)));
 
     Some(calls.into_items())
 }
@@ -165,7 +165,7 @@ impl CallLocations {
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use ide_db::FilePosition;
     use itertools::Itertools;
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/child_modules.rs b/src/tools/rust-analyzer/crates/ide/src/child_modules.rs
new file mode 100644
index 0000000000000..b781596187b91
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/child_modules.rs
@@ -0,0 +1,123 @@
+use hir::Semantics;
+use ide_db::{FilePosition, RootDatabase};
+use syntax::{
+    algo::find_node_at_offset,
+    ast::{self, AstNode},
+};
+
+use crate::NavigationTarget;
+
+// Feature: Child Modules
+//
+// Navigates to the child modules of the current module.
+//
+// | Editor  | Action Name |
+// |---------|-------------|
+// | VS Code | **rust-analyzer: Locate child modules** |
+
+/// This returns `Vec` because a module may be included from several places.
+pub(crate) fn child_modules(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
+    let sema = Semantics::new(db);
+    let source_file = sema.parse_guess_edition(position.file_id);
+    // First go to the parent module which contains the cursor
+    let module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset);
+
+    match module {
+        Some(module) => {
+            // Return all child modules inside the ItemList of the parent module
+            sema.to_def(&module)
+                .into_iter()
+                .flat_map(|module| module.children(db))
+                .map(|module| NavigationTarget::from_module_to_decl(db, module).call_site())
+                .collect()
+        }
+        None => {
+            // Return all the child modules inside the source file
+            sema.file_to_module_defs(position.file_id)
+                .flat_map(|module| module.children(db))
+                .map(|module| NavigationTarget::from_module_to_decl(db, module).call_site())
+                .collect()
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use ide_db::FileRange;
+
+    use crate::fixture;
+
+    fn check_child_module(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
+        let (analysis, position, expected) = fixture::annotations(ra_fixture);
+        let navs = analysis.child_modules(position).unwrap();
+        let navs = navs
+            .iter()
+            .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+            .collect::<Vec<_>>();
+        assert_eq!(expected.into_iter().map(|(fr, _)| fr).collect::<Vec<_>>(), navs);
+    }
+
+    #[test]
+    fn test_resolve_child_module() {
+        check_child_module(
+            r#"
+//- /lib.rs
+$0
+mod foo;
+  //^^^
+
+//- /foo.rs
+// empty
+"#,
+        );
+    }
+
+    #[test]
+    fn test_resolve_child_module_on_module_decl() {
+        check_child_module(
+            r#"
+//- /lib.rs
+mod $0foo;
+//- /foo.rs
+mod bar;
+  //^^^
+
+//- /foo/bar.rs
+// empty
+"#,
+        );
+    }
+
+    #[test]
+    fn test_resolve_child_module_for_inline() {
+        check_child_module(
+            r#"
+//- /lib.rs
+mod foo {
+    mod $0bar {
+        mod baz {}
+    }     //^^^
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn test_resolve_multi_child_module() {
+        check_child_module(
+            r#"
+//- /main.rs
+$0
+mod foo;
+  //^^^
+mod bar;
+  //^^^
+//- /foo.rs
+// empty
+
+//- /bar.rs
+// empty
+"#,
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index 8d2ca33bf254d..ebbd68bcdf743 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -6,28 +6,29 @@ mod tests;
 mod intra_doc_links;
 
 use pulldown_cmark::{BrokenLink, CowStr, Event, InlineStr, LinkType, Options, Parser, Tag};
-use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions};
+use pulldown_cmark_to_cmark::{Options as CMarkOptions, cmark_resume_with_options};
 use stdx::format_to;
 use url::Url;
 
-use hir::{db::HirDatabase, sym, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs};
+use hir::{Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs, db::HirDatabase, sym};
 use ide_db::{
-    base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase},
+    RootDatabase,
+    base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb},
     defs::{Definition, NameClass, NameRefClass},
-    documentation::{docs_with_rangemap, Documentation, HasDocs},
+    documentation::{Documentation, HasDocs, docs_with_rangemap},
     helpers::pick_best_token,
-    RootDatabase,
 };
 use syntax::{
-    ast::{self, IsString},
-    match_ast, AstNode, AstToken,
+    AstNode, AstToken,
     SyntaxKind::*,
-    SyntaxNode, SyntaxToken, TextRange, TextSize, T,
+    SyntaxNode, SyntaxToken, T, TextRange, TextSize,
+    ast::{self, IsString},
+    match_ast,
 };
 
 use crate::{
-    doc_links::intra_doc_links::{parse_intra_doc_link, strip_prefixes_suffixes},
     FilePosition, Semantics,
+    doc_links::intra_doc_links::{parse_intra_doc_link, strip_prefixes_suffixes},
 };
 
 /// Web and local links to an item's documentation.
@@ -504,9 +505,7 @@ fn get_doc_base_urls(
 
     let Some(krate) = krate else { return Default::default() };
     let Some(display_name) = krate.display_name(db) else { return Default::default() };
-    let crate_data = &db.crate_graph()[krate.into()];
-
-    let (web_base, local_base) = match &crate_data.origin {
+    let (web_base, local_base) = match krate.origin(db) {
         // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself.
         // FIXME: Use the toolchains channel instead of nightly
         CrateOrigin::Lang(
@@ -598,7 +597,7 @@ fn filename_and_frag_for_def(
         Definition::Module(m) => match m.name(db) {
             // `#[doc(keyword = "...")]` is internal used only by rust compiler
             Some(name) => {
-                match m.attrs(db).by_key(&sym::doc).find_string_value_in_tt(&sym::keyword) {
+                match m.attrs(db).by_key(sym::doc).find_string_value_in_tt(sym::keyword) {
                     Some(kw) => {
                         format!("keyword.{}.html", kw)
                     }
@@ -628,7 +627,7 @@ fn filename_and_frag_for_def(
             return Some((def, file, Some(format!("variant.{}", ev.name(db).as_str()))));
         }
         Definition::Const(c) => {
-            format!("const.{}.html", c.name(db)?.as_str())
+            format!("constant.{}.html", c.name(db)?.as_str())
         }
         Definition::Static(s) => {
             format!("static.{}.html", s.name(db).as_str())
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs
index 6cc240d652499..c331734c785ed 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs
@@ -53,7 +53,7 @@ pub(super) fn strip_prefixes_suffixes(s: &str) -> &str {
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
 
     use super::*;
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
index b09e3a3c8047c..91785be8d8bad 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
@@ -1,18 +1,19 @@
 use std::iter;
 
-use expect_test::{expect, Expect};
+use expect_test::{Expect, expect};
 use hir::Semantics;
 use ide_db::{
+    FilePosition, FileRange, RootDatabase,
     defs::Definition,
     documentation::{Documentation, HasDocs},
-    FilePosition, FileRange, RootDatabase,
 };
 use itertools::Itertools;
-use syntax::{ast, match_ast, AstNode, SyntaxNode};
+use syntax::{AstNode, SyntaxNode, ast, match_ast};
 
 use crate::{
+    TryToNav,
     doc_links::{extract_definitions_from_docs, resolve_doc_path_for_def, rewrite_links},
-    fixture, TryToNav,
+    fixture,
 };
 
 fn check_external_docs(
@@ -43,7 +44,7 @@ fn check_external_docs(
 
 fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
     let (analysis, position) = fixture::position(ra_fixture);
-    let sema = &Semantics::new(&*analysis.db);
+    let sema = &Semantics::new(&analysis.db);
     let (cursor_def, docs) = def_under_cursor(sema, &position);
     let res = rewrite_links(sema.db, docs.as_str(), cursor_def);
     expect.assert_eq(&res)
@@ -54,7 +55,7 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
 
     let (analysis, position, mut expected) = fixture::annotations(ra_fixture);
     expected.sort_by_key(key_fn);
-    let sema = &Semantics::new(&*analysis.db);
+    let sema = &Semantics::new(&analysis.db);
     let (cursor_def, docs) = def_under_cursor(sema, &position);
     let defs = extract_definitions_from_docs(&docs);
     let actual: Vec<_> = defs
@@ -683,7 +684,9 @@ fn rewrite_intra_doc_link_with_anchor() {
         //! $0[PartialEq#derivable]
         fn main() {}
         "#,
-        expect!["[PartialEq#derivable](https://doc.rust-lang.org/stable/core/cmp/trait.PartialEq.html#derivable)"],
+        expect![
+            "[PartialEq#derivable](https://doc.rust-lang.org/stable/core/cmp/trait.PartialEq.html#derivable)"
+        ],
     );
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
index ad4308e06a14b..241a702038da4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
@@ -1,12 +1,12 @@
 use hir::db::ExpandDatabase;
-use hir::{ExpandResult, InFile, MacroFileIdExt, Semantics};
-use ide_db::base_db::CrateId;
+use hir::{ExpandResult, InFile, Semantics};
 use ide_db::{
-    helpers::pick_best_token, syntax_helpers::prettify_macro_expansion, FileId, RootDatabase,
+    FileId, RootDatabase, base_db::Crate, helpers::pick_best_token,
+    syntax_helpers::prettify_macro_expansion,
 };
-use span::{Edition, SpanMap, SyntaxContextId, TextRange, TextSize};
+use span::{Edition, SpanMap, SyntaxContext, TextRange, TextSize};
 use stdx::format_to;
-use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
+use syntax::{AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, ast, ted};
 
 use crate::FilePosition;
 
@@ -99,7 +99,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
                         .display(
                             db,
                             sema.attach_first_edition(position.file_id)
-                                .map(|it| it.edition())
+                                .map(|it| it.edition(db))
                                 .unwrap_or(Edition::CURRENT),
                         )
                         .to_string(),
@@ -142,7 +142,7 @@ fn expand_macro_recur(
     sema: &Semantics<'_, RootDatabase>,
     macro_call: &ast::Item,
     error: &mut String,
-    result_span_map: &mut SpanMap<SyntaxContextId>,
+    result_span_map: &mut SpanMap<SyntaxContext>,
     offset_in_original_node: TextSize,
 ) -> Option<SyntaxNode> {
     let ExpandResult { value: expanded, err } = match macro_call {
@@ -170,7 +170,7 @@ fn expand(
     sema: &Semantics<'_, RootDatabase>,
     expanded: SyntaxNode,
     error: &mut String,
-    result_span_map: &mut SpanMap<SyntaxContextId>,
+    result_span_map: &mut SpanMap<SyntaxContext>,
     mut offset_in_original_node: i32,
 ) -> SyntaxNode {
     let children = expanded.descendants().filter_map(ast::Item::cast);
@@ -207,8 +207,8 @@ fn format(
     kind: SyntaxKind,
     file_id: FileId,
     expanded: SyntaxNode,
-    span_map: &SpanMap<SyntaxContextId>,
-    krate: CrateId,
+    span_map: &SpanMap<SyntaxContext>,
+    krate: Crate,
 ) -> String {
     let expansion = prettify_macro_expansion(db, expanded, span_map, krate).to_string();
 
@@ -234,7 +234,8 @@ fn _format(
     file_id: FileId,
     expansion: &str,
 ) -> Option<String> {
-    use ide_db::base_db::{FileLoader, SourceDatabase};
+    use ide_db::base_db::RootQueryDb;
+
     // hack until we get hygiene working (same character amount to preserve formatting as much as possible)
     const DOLLAR_CRATE_REPLACE: &str = "__r_a_";
     const BUILTIN_REPLACE: &str = "builtin__POUND";
@@ -249,7 +250,7 @@ fn _format(
     let expansion = format!("{prefix}{expansion}{suffix}");
 
     let &crate_id = db.relevant_crates(file_id).iter().next()?;
-    let edition = db.crate_graph()[crate_id].edition;
+    let edition = crate_id.data(db).edition;
 
     #[allow(clippy::disallowed_methods)]
     let mut cmd = std::process::Command::new(toolchain::Tool::Rustfmt.path());
@@ -289,7 +290,7 @@ fn _format(
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
 
     use crate::fixture;
 
@@ -550,7 +551,7 @@ macro_rules! foo {
 }
 
 fn main() {
-    let res = fo$0o!();
+    fo$0o!()
 }
 "#,
             expect![[r#"
@@ -559,6 +560,24 @@ fn main() {
         );
     }
 
+    #[test]
+    fn macro_expand_item_expansion_in_expression_call() {
+        check(
+            r#"
+macro_rules! foo {
+    () => {fn f<T>() {}};
+}
+
+fn main() {
+    let res = fo$0o!();
+}
+"#,
+            expect![[r#"
+                foo!
+                fn f<T>(){}"#]],
+        );
+    }
+
     #[test]
     fn macro_expand_derive() {
         check(
@@ -677,4 +696,26 @@ crate::Foo;
 crate::Foo;"#]],
         );
     }
+
+    #[test]
+    fn semi_glueing() {
+        check(
+            r#"
+macro_rules! __log_value {
+    ($key:ident :$capture:tt =) => {};
+}
+
+macro_rules! __log {
+    ($key:tt $(:$capture:tt)? $(= $value:expr)?; $($arg:tt)+) => {
+        __log_value!($key $(:$capture)* = $($value)*);
+    };
+}
+
+__log!(written:%; "Test"$0);
+    "#,
+            expect![[r#"
+                __log!
+            "#]],
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
index 76414854e91ef..a374f9752fcfa 100644
--- a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
@@ -3,11 +3,11 @@ use std::iter::successors;
 use hir::Semantics;
 use ide_db::RootDatabase;
 use syntax::{
-    algo::{self, skip_trivia_token},
-    ast::{self, AstNode, AstToken},
     Direction, NodeOrToken,
     SyntaxKind::{self, *},
-    SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T,
+    SyntaxNode, SyntaxToken, T, TextRange, TextSize, TokenAtOffset,
+    algo::{self, skip_trivia_token},
+    ast::{self, AstNode, AstToken},
 };
 
 use crate::FileRange;
@@ -178,11 +178,7 @@ fn extend_tokens_from_range(
     .last()?;
 
     let range = first.text_range().cover(last.text_range());
-    if range.contains_range(original_range) && original_range != range {
-        Some(range)
-    } else {
-        None
-    }
+    if range.contains_range(original_range) && original_range != range { Some(range) } else { None }
 }
 
 /// Find the shallowest node with same range, which allows us to traverse siblings.
@@ -216,11 +212,7 @@ fn extend_single_word_in_comment_or_string(
     let to: TextSize = (cursor_position + end_idx).into();
 
     let range = TextRange::new(from, to);
-    if range.is_empty() {
-        None
-    } else {
-        Some(range + leaf.text_range().start())
-    }
+    if range.is_empty() { None } else { Some(range + leaf.text_range().start()) }
 }
 
 fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange {
diff --git a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs
index 5ed2144430741..956379e722d53 100644
--- a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs
@@ -1,6 +1,6 @@
 use ide_db::{
-    base_db::{CrateOrigin, SourceDatabase},
     FileId, FxIndexSet, RootDatabase,
+    base_db::{CrateOrigin, RootQueryDb},
 };
 
 #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
@@ -20,21 +20,24 @@ pub struct CrateInfo {
 //
 // ![Show Dependency Tree](https://user-images.githubusercontent.com/5748995/229394139-2625beab-f4c9-484b-84ed-ad5dee0b1e1a.png)
 pub(crate) fn fetch_crates(db: &RootDatabase) -> FxIndexSet<CrateInfo> {
-    let crate_graph = db.crate_graph();
-    crate_graph
+    db.all_crates()
         .iter()
-        .map(|crate_id| &crate_graph[crate_id])
-        .filter(|&data| !matches!(data.origin, CrateOrigin::Local { .. }))
-        .map(crate_info)
+        .copied()
+        .map(|crate_id| (crate_id.data(db), crate_id.extra_data(db)))
+        .filter(|(data, _)| !matches!(data.origin, CrateOrigin::Local { .. }))
+        .map(|(data, extra_data)| crate_info(data, extra_data))
         .collect()
 }
 
-fn crate_info(data: &ide_db::base_db::CrateData) -> CrateInfo {
-    let crate_name = crate_name(data);
-    let version = data.version.clone();
+fn crate_info(
+    data: &ide_db::base_db::BuiltCrateData,
+    extra_data: &ide_db::base_db::ExtraCrateData,
+) -> CrateInfo {
+    let crate_name = crate_name(extra_data);
+    let version = extra_data.version.clone();
     CrateInfo { name: crate_name, version, root_file_id: data.root_file_id }
 }
 
-fn crate_name(data: &ide_db::base_db::CrateData) -> Option<String> {
+fn crate_name(data: &ide_db::base_db::ExtraCrateData) -> Option<String> {
     data.display_name.as_ref().map(|it| it.canonical_name().as_str().to_owned())
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
index 52fbab6fa12b1..347da4e85b4aa 100644
--- a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
@@ -1,8 +1,8 @@
 use ide_db::SymbolKind;
 use syntax::{
+    AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, TextRange, WalkEvent,
     ast::{self, HasAttrs, HasGenericParams, HasName},
-    match_ast, AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, TextRange,
-    WalkEvent,
+    match_ast,
 };
 
 #[derive(Debug, Clone)]
@@ -250,7 +250,7 @@ fn structure_token(token: SyntaxToken) -> Option<StructureNode> {
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
 
     use super::*;
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/fixture.rs b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
index a0612f48d37e8..fbf89042fae15 100644
--- a/src/tools/rust-analyzer/crates/ide/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
@@ -1,16 +1,16 @@
 //! Utilities for creating `Analysis` instances for tests.
 use test_fixture::ChangeFixture;
-use test_utils::{extract_annotations, RangeOrOffset};
+use test_utils::{RangeOrOffset, extract_annotations};
 
 use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange};
 
 /// Creates analysis for a single file.
 pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
-    (host.analysis(), change_fixture.files[0].into())
+    (host.analysis(), change_fixture.files[0].file_id(&host.db))
 }
 
 /// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -18,23 +18,23 @@ pub(crate) fn position(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, FilePosition) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let offset = range_or_offset.expect_offset();
-    (host.analysis(), FilePosition { file_id: file_id.into(), offset })
+    (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset })
 }
 
 /// Creates analysis for a single file, returns range marked with a pair of $0.
 pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileRange) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let range = range_or_offset.expect_range();
-    (host.analysis(), FileRange { file_id: file_id.into(), range })
+    (host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range })
 }
 
 /// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0.
@@ -42,11 +42,11 @@ pub(crate) fn range_or_position(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, FileId, RangeOrOffset) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
-    (host.analysis(), file_id.into(), range_or_offset)
+    (host.analysis(), file_id.file_id(&host.db), range_or_offset)
 }
 
 /// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -54,24 +54,25 @@ pub(crate) fn annotations(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, FilePosition, Vec<(FileRange, String)>) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let offset = range_or_offset.expect_offset();
 
+    let db = &host.db;
     let annotations = change_fixture
         .files
         .iter()
         .flat_map(|&file_id| {
-            let file_text = host.analysis().file_text(file_id.into()).unwrap();
+            let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap();
             let annotations = extract_annotations(&file_text);
             annotations
                 .into_iter()
-                .map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data))
+                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
         })
         .collect();
-    (host.analysis(), FilePosition { file_id: file_id.into(), offset }, annotations)
+    (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations)
 }
 
 /// Creates analysis from a multi-file fixture with annotations without $0
@@ -79,19 +80,20 @@ pub(crate) fn annotations_without_marker(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, Vec<(FileRange, String)>) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
 
+    let db = &host.db;
     let annotations = change_fixture
         .files
         .iter()
         .flat_map(|&file_id| {
-            let file_text = host.analysis().file_text(file_id.into()).unwrap();
+            let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap();
             let annotations = extract_annotations(&file_text);
             annotations
                 .into_iter()
-                .map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data))
+                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
         })
         .collect();
     (host.analysis(), annotations)
diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs
index e5a94ff9fe964..194e8c968f758 100755
--- a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs
@@ -1,9 +1,10 @@
-use ide_db::{syntax_helpers::node_ext::vis_eq, FxHashSet};
+use ide_db::{FxHashSet, syntax_helpers::node_ext::vis_eq};
 use syntax::{
-    ast::{self, AstNode, AstToken},
-    match_ast, Direction, NodeOrToken, SourceFile,
+    Direction, NodeOrToken, SourceFile,
     SyntaxKind::{self, *},
     TextRange, TextSize,
+    ast::{self, AstNode, AstToken},
+    match_ast,
 };
 
 use std::hash::Hash;
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
index 3742edc8db84b..38c032d382e3d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
@@ -1,13 +1,13 @@
 use hir::{AsAssocItem, Semantics};
 use ide_db::{
-    defs::{Definition, NameClass, NameRefClass},
     RootDatabase,
+    defs::{Definition, NameClass, NameRefClass},
 };
-use syntax::{ast, match_ast, AstNode, SyntaxKind::*, T};
+use syntax::{AstNode, SyntaxKind::*, T, ast, match_ast};
 
 use crate::{
-    goto_definition::goto_definition, navigation_target::TryToNav, FilePosition, NavigationTarget,
-    RangeInfo,
+    FilePosition, NavigationTarget, RangeInfo, goto_definition::goto_definition,
+    navigation_target::TryToNav,
 };
 
 // Feature: Go to Declaration
@@ -32,7 +32,7 @@ pub(crate) fn goto_declaration(
         .descend_into_macros_no_opaque(original_token)
         .iter()
         .filter_map(|token| {
-            let parent = token.parent()?;
+            let parent = token.value.parent()?;
             let def = match_ast! {
                 match parent {
                     ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? {
@@ -52,7 +52,7 @@ pub(crate) fn goto_declaration(
             };
             let assoc = match def? {
                 Definition::Module(module) => {
-                    return Some(NavigationTarget::from_module_to_decl(db, module))
+                    return Some(NavigationTarget::from_module_to_decl(db, module));
                 }
                 Definition::Const(c) => c.as_assoc_item(db),
                 Definition::TypeAlias(ta) => ta.as_assoc_item(db),
@@ -69,11 +69,7 @@ pub(crate) fn goto_declaration(
         .flatten()
         .collect();
 
-    if info.is_empty() {
-        goto_definition(db, position)
-    } else {
-        Some(RangeInfo::new(range, info))
-    }
+    if info.is_empty() { goto_definition(db, position) } else { Some(RangeInfo::new(range, info)) }
 }
 
 #[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index 60a904233a9a5..b894e857522f9 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -1,28 +1,28 @@
 use std::{iter, mem::discriminant};
 
 use crate::{
+    FilePosition, NavigationTarget, RangeInfo, TryToNav, UpmappingResult,
     doc_links::token_as_doc_comment,
     navigation_target::{self, ToNav},
-    FilePosition, NavigationTarget, RangeInfo, TryToNav, UpmappingResult,
 };
 use hir::{
-    sym, AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, MacroFileIdExt,
-    ModuleDef, Semantics,
+    AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, ModuleDef, Semantics, sym,
 };
 use ide_db::{
-    base_db::{AnchoredPath, FileLoader, SourceDatabase},
+    RootDatabase, SymbolKind,
+    base_db::{AnchoredPath, SourceDatabase},
     defs::{Definition, IdentClass},
     famous_defs::FamousDefs,
     helpers::pick_best_token,
-    RootDatabase, SymbolKind,
 };
 use itertools::Itertools;
 use span::{Edition, FileId};
 use syntax::{
-    ast::{self, HasLoopBody},
-    match_ast, AstNode, AstToken,
+    AstNode, AstToken,
     SyntaxKind::*,
-    SyntaxNode, SyntaxToken, TextRange, T,
+    SyntaxNode, SyntaxToken, T, TextRange,
+    ast::{self, HasLoopBody},
+    match_ast,
 };
 
 // Feature: Go to Definition
@@ -43,7 +43,7 @@ pub(crate) fn goto_definition(
     let sema = &Semantics::new(db);
     let file = sema.parse_guess_edition(file_id).syntax().clone();
     let edition =
-        sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
+        sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
     let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
         IDENT
         | INT_NUMBER
@@ -91,16 +91,19 @@ pub(crate) fn goto_definition(
         .descend_into_macros_no_opaque(original_token.clone())
         .into_iter()
         .filter_map(|token| {
-            let parent = token.parent()?;
+            let parent = token.value.parent()?;
 
-            if let Some(token) = ast::String::cast(token.clone()) {
-                if let Some(x) = try_lookup_include_path(sema, token, file_id) {
+            let token_file_id = token.file_id;
+            if let Some(token) = ast::String::cast(token.value.clone()) {
+                if let Some(x) =
+                    try_lookup_include_path(sema, InFile::new(token_file_id, token), file_id)
+                {
                     return Some(vec![x]);
                 }
             }
 
             if ast::TokenTree::can_cast(parent.kind()) {
-                if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token) {
+                if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.value) {
                     return Some(vec![x]);
                 }
             }
@@ -204,20 +207,22 @@ fn find_definition_for_known_blanket_dual_impls(
 
 fn try_lookup_include_path(
     sema: &Semantics<'_, RootDatabase>,
-    token: ast::String,
+    token: InFile<ast::String>,
     file_id: FileId,
 ) -> Option<NavigationTarget> {
-    let file = sema.hir_file_for(&token.syntax().parent()?).macro_file()?;
+    let file = token.file_id.macro_file()?;
+
+    // Check that we are in the eager argument expansion of an include macro
+    // that is we are the string input of it
     if !iter::successors(Some(file), |file| file.parent(sema.db).macro_file())
-        // Check that we are in the eager argument expansion of an include macro
         .any(|file| file.is_include_like_macro(sema.db) && file.eager_arg(sema.db).is_none())
     {
         return None;
     }
-    let path = token.value().ok()?;
+    let path = token.value.value().ok()?;
 
     let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
-    let size = sema.db.file_text(file_id).len().try_into().ok()?;
+    let size = sema.db.file_text(file_id).text(sema.db).len().try_into().ok()?;
     Some(NavigationTarget {
         file_id,
         full_range: TextRange::new(0.into(), size),
@@ -358,7 +363,7 @@ fn nav_for_exit_points(
 
                         if let Some(FileRange { file_id, range }) = focus_frange {
                             let contains_frange = |nav: &NavigationTarget| {
-                                nav.file_id == file_id && nav.full_range.contains_range(range)
+                                nav.file_id == file_id.file_id(db) && nav.full_range.contains_range(range)
                             };
 
                             if let Some(def_site) = nav.def_site.as_mut() {
@@ -2047,7 +2052,10 @@ fn main() {
         );
     }
 
+    // macros in this position are not yet supported
     #[test]
+    // FIXME
+    #[should_panic]
     fn goto_doc_include_str() {
         check(
             r#"
@@ -2190,8 +2198,8 @@ where T : Bound
 struct A;
 impl Bound for A{}
 fn f() {
-    let gen = Gen::<A>(A);
-    gen.g$0();
+    let g = Gen::<A>(A);
+    g.g$0();
 }
                 "#,
             );
@@ -2216,8 +2224,8 @@ where T : Bound
 struct A;
 impl Bound for A{}
 fn f() {
-    let gen = Gen::<A>(A);
-    gen.g$0();
+    let g = Gen::<A>(A);
+    g.g$0();
 }
 "#,
             );
@@ -3324,4 +3332,218 @@ fn main() {
 "#,
         );
     }
+
+    #[test]
+    fn struct_shadow_by_module() {
+        check(
+            r#"
+mod foo {
+    pub mod bar {
+         // ^^^
+        pub type baz = usize;
+    }
+}
+struct bar;
+fn main() {
+    use foo::bar;
+    let x: ba$0r::baz = 5;
+
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn type_alias_shadow_by_module() {
+        check(
+            r#"
+mod foo {
+    pub mod bar {
+         // ^^^
+        pub fn baz() {}
+    }
+}
+
+trait Qux {}
+
+fn item<bar: Qux>() {
+    use foo::bar;
+    ba$0r::baz();
+}
+}
+"#,
+        );
+
+        check(
+            r#"
+mod foo {
+    pub mod bar {
+         // ^^^
+        pub fn baz() {}
+    }
+}
+
+fn item<bar>(x: bar) {
+    use foo::bar;
+    let x: bar$0 = x;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn trait_shadow_by_module() {
+        check(
+            r#"
+pub mod foo {
+    pub mod Bar {}
+         // ^^^
+}
+
+trait Bar {}
+
+fn main() {
+    use foo::Bar;
+    fn f<Qux: B$0ar>() {}
+}
+            "#,
+        );
+    }
+
+    #[test]
+    fn const_shadow_by_module() {
+        check(
+            r#"
+pub mod foo {
+    pub struct u8 {}
+    pub mod bar {
+        pub mod u8 {}
+    }
+}
+
+fn main() {
+    use foo::u8;
+    {
+        use foo::bar::u8;
+
+        fn f1<const N: u$08>() {}
+    }
+    fn f2<const N: u8>() {}
+}
+"#,
+        );
+
+        check(
+            r#"
+pub mod foo {
+    pub struct u8 {}
+            // ^^
+    pub mod bar {
+        pub mod u8 {}
+    }
+}
+
+fn main() {
+    use foo::u8;
+    {
+        use foo::bar::u8;
+
+        fn f1<const N: u8>() {}
+    }
+    fn f2<const N: u$08>() {}
+}
+"#,
+        );
+
+        check(
+            r#"
+pub mod foo {
+    pub struct buz {}
+    pub mod bar {
+        pub mod buz {}
+             // ^^^
+    }
+}
+
+fn main() {
+    use foo::buz;
+    {
+        use foo::bar::buz;
+
+        fn f1<const N: buz$0>() {}
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn offset_of() {
+        check(
+            r#"
+//- minicore: offset_of
+struct Foo {
+    field: i32,
+ // ^^^^^
+}
+
+fn foo() {
+    let _ = core::mem::offset_of!(Foo, fiel$0d);
+}
+        "#,
+        );
+
+        check(
+            r#"
+//- minicore: offset_of
+struct Bar(Foo);
+struct Foo {
+    field: i32,
+ // ^^^^^
+}
+
+fn foo() {
+    let _ = core::mem::offset_of!(Bar, 0.fiel$0d);
+}
+        "#,
+        );
+
+        check(
+            r#"
+//- minicore: offset_of
+struct Bar(Baz);
+enum Baz {
+    Abc(Foo),
+    None,
+}
+struct Foo {
+    field: i32,
+ // ^^^^^
+}
+
+fn foo() {
+    let _ = core::mem::offset_of!(Bar, 0.Abc.0.fiel$0d);
+}
+        "#,
+        );
+
+        check(
+            r#"
+//- minicore: offset_of
+struct Bar(Baz);
+enum Baz {
+    Abc(Foo),
+ // ^^^
+    None,
+}
+struct Foo {
+    field: i32,
+}
+
+fn foo() {
+    let _ = core::mem::offset_of!(Bar, 0.Ab$0c.0.field);
+}
+        "#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
index e1d834b5d1c69..1bc28f28b6f57 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -1,10 +1,10 @@
 use hir::{AsAssocItem, Impl, Semantics};
 use ide_db::{
+    RootDatabase,
     defs::{Definition, NameClass, NameRefClass},
     helpers::pick_best_token,
-    RootDatabase,
 };
-use syntax::{ast, AstNode, SyntaxKind::*, T};
+use syntax::{AstNode, SyntaxKind::*, T, ast};
 
 use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
index ddc274a830352..a78f5cdc9d0e6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
@@ -1,6 +1,6 @@
 use hir::GenericParam;
-use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase};
-use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T};
+use ide_db::{RootDatabase, defs::Definition, helpers::pick_best_token};
+use syntax::{AstNode, SyntaxKind::*, SyntaxToken, T, ast, match_ast};
 
 use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
 
@@ -71,8 +71,8 @@ pub(crate) fn goto_type_definition(
     sema.descend_into_macros_no_opaque(token)
         .into_iter()
         .filter_map(|token| {
-            let ty = sema
-                .token_ancestors_with_macros(token)
+            sema
+                .token_ancestors_with_macros(token.value)
                 // When `token` is within a macro call, we can't determine its type. Don't continue
                 // this traversal because otherwise we'll end up returning the type of *that* macro
                 // call, which is not what we want in general.
@@ -87,7 +87,7 @@ pub(crate) fn goto_type_definition(
                             ast::Pat(it) => sema.type_of_pat(&it)?.original,
                             ast::SelfParam(it) => sema.type_of_self(&it)?,
                             ast::Type(it) => sema.resolve_type(&it)?,
-                            ast::RecordField(it) => sema.to_def(&it)?.ty(db.upcast()),
+                            ast::RecordField(it) => sema.to_def(&it)?.ty(db),
                             // can't match on RecordExprField directly as `ast::Expr` will match an iteration too early otherwise
                             ast::NameRef(it) => {
                                 if let Some(record_field) = ast::RecordExprField::for_name_ref(&it) {
@@ -103,8 +103,7 @@ pub(crate) fn goto_type_definition(
                     };
 
                     Some(ty)
-                });
-            ty
+                })
         })
         .for_each(process_ty);
     Some(RangeInfo::new(range, res))
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index 6463206596af5..80624eeae80c7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -1,7 +1,8 @@
 use std::iter;
 
-use hir::{db, FilePosition, FileRange, HirFileId, InFile, Semantics};
+use hir::{EditionedFileId, FilePosition, FileRange, HirFileId, InFile, Semantics, db};
 use ide_db::{
+    FxHashMap, FxHashSet, RootDatabase,
     defs::{Definition, IdentClass},
     helpers::pick_best_token,
     search::{FileReference, ReferenceCategory, SearchScope},
@@ -9,17 +10,17 @@ use ide_db::{
         eq_label_lt, for_each_tail_expr, full_path_of_name_ref, is_closure_or_blk_with_modif,
         preorder_expr_with_ctx_checker,
     },
-    FxHashMap, FxHashSet, RootDatabase,
 };
-use span::EditionedFileId;
+use span::FileId;
 use syntax::{
-    ast::{self, HasLoopBody},
-    match_ast, AstNode,
+    AstNode,
     SyntaxKind::{self, IDENT, INT_NUMBER},
-    SyntaxToken, TextRange, WalkEvent, T,
+    SyntaxToken, T, TextRange, WalkEvent,
+    ast::{self, HasLoopBody},
+    match_ast,
 };
 
-use crate::{goto_definition, navigation_target::ToNav, NavigationTarget, TryToNav};
+use crate::{NavigationTarget, TryToNav, goto_definition, navigation_target::ToNav};
 
 #[derive(PartialEq, Eq, Hash)]
 pub struct HighlightedRange {
@@ -59,13 +60,14 @@ pub(crate) fn highlight_related(
     let _p = tracing::info_span!("highlight_related").entered();
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
+    let span_file_id = file_id.editioned_file_id(sema.db);
     let syntax = sema.parse(file_id).syntax().clone();
 
     let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
         T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
         T![->] => 4,
-        kind if kind.is_keyword(file_id.edition()) => 3,
+        kind if kind.is_keyword(span_file_id.edition()) => 3,
         IDENT | INT_NUMBER => 2,
         T![|] => 1,
         _ => 0,
@@ -87,11 +89,18 @@ pub(crate) fn highlight_related(
         T![break] | T![loop] | T![while] | T![continue] if config.break_points => {
             highlight_break_points(sema, token).remove(&file_id)
         }
-        T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
-        T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
-        _ if config.references => {
-            highlight_references(sema, token, FilePosition { file_id, offset })
+        T![|] if config.closure_captures => {
+            highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
+        }
+        T![move] if config.closure_captures => {
+            highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
         }
+        _ if config.references => highlight_references(
+            sema,
+            token,
+            FilePosition { file_id, offset },
+            span_file_id.file_id(),
+        ),
         _ => None,
     }
 }
@@ -100,6 +109,7 @@ fn highlight_closure_captures(
     sema: &Semantics<'_, RootDatabase>,
     token: SyntaxToken,
     file_id: EditionedFileId,
+    vfs_file_id: FileId,
 ) -> Option<Vec<HighlightedRange>> {
     let closure = token.parent_ancestors().take(2).find_map(ast::ClosureExpr::cast)?;
     let search_range = closure.body()?.syntax().text_range();
@@ -132,7 +142,7 @@ fn highlight_closure_captures(
                     .sources(sema.db)
                     .into_iter()
                     .flat_map(|x| x.to_nav(sema.db))
-                    .filter(|decl| decl.file_id == file_id)
+                    .filter(|decl| decl.file_id == vfs_file_id)
                     .filter_map(|decl| decl.focus_range)
                     .map(move |range| HighlightedRange { range, category })
                     .chain(usages)
@@ -145,6 +155,7 @@ fn highlight_references(
     sema: &Semantics<'_, RootDatabase>,
     token: SyntaxToken,
     FilePosition { file_id, offset }: FilePosition,
+    vfs_file_id: FileId,
 ) -> Option<Vec<HighlightedRange>> {
     let defs = if let Some((range, resolution)) =
         sema.check_for_format_args_template(token.clone(), offset)
@@ -152,7 +163,10 @@ fn highlight_references(
         match resolution.map(Definition::from) {
             Some(def) => iter::once(def).collect(),
             None => {
-                return Some(vec![HighlightedRange { range, category: ReferenceCategory::empty() }])
+                return Some(vec![HighlightedRange {
+                    range,
+                    category: ReferenceCategory::empty(),
+                }]);
             }
         }
     } else {
@@ -224,6 +238,23 @@ fn highlight_references(
             }
         }
 
+        // highlight the tail expr of the labelled block
+        if matches!(def, Definition::Label(_)) {
+            let label = token.parent_ancestors().nth(1).and_then(ast::Label::cast);
+            if let Some(block) =
+                label.and_then(|label| label.syntax().parent()).and_then(ast::BlockExpr::cast)
+            {
+                for_each_tail_expr(&block.into(), &mut |tail| {
+                    if !matches!(tail, ast::Expr::BreakExpr(_)) {
+                        res.insert(HighlightedRange {
+                            range: tail.syntax().text_range(),
+                            category: ReferenceCategory::empty(),
+                        });
+                    }
+                });
+            }
+        }
+
         // highlight the defs themselves
         match def {
             Definition::Local(local) => {
@@ -236,7 +267,7 @@ fn highlight_references(
                     .sources(sema.db)
                     .into_iter()
                     .flat_map(|x| x.to_nav(sema.db))
-                    .filter(|decl| decl.file_id == file_id)
+                    .filter(|decl| decl.file_id == vfs_file_id)
                     .filter_map(|decl| decl.focus_range)
                     .map(|range| HighlightedRange { range, category })
                     .for_each(|x| {
@@ -254,7 +285,7 @@ fn highlight_references(
                     },
                 };
                 for nav in navs {
-                    if nav.file_id != file_id {
+                    if nav.file_id != vfs_file_id {
                         continue;
                     }
                     let hl_range = nav.focus_range.map(|range| {
@@ -274,11 +305,7 @@ fn highlight_references(
     }
 
     res.extend(usages);
-    if res.is_empty() {
-        None
-    } else {
-        Some(res.into_iter().collect())
-    }
+    if res.is_empty() { None } else { Some(res.into_iter().collect()) }
 }
 
 fn hl_exit_points(
@@ -442,6 +469,18 @@ pub(crate) fn highlight_break_points(
                 push_to_highlights(file_id, text_range);
             });
 
+        if matches!(expr, ast::Expr::BlockExpr(_)) {
+            for_each_tail_expr(&expr, &mut |tail| {
+                if matches!(tail, ast::Expr::BreakExpr(_)) {
+                    return;
+                }
+
+                let file_id = sema.hir_file_for(tail.syntax());
+                let range = tail.syntax().text_range();
+                push_to_highlights(file_id, Some(range));
+            });
+        }
+
         Some(highlights)
     }
 
@@ -2068,4 +2107,41 @@ pub unsafe fn bootstrap() -> ! {
 "#,
         )
     }
+
+    #[test]
+    fn labeled_block_tail_expr() {
+        check(
+            r#"
+fn foo() {
+    'a: {
+ // ^^^
+        if true { break$0 'a 0; }
+               // ^^^^^^^^
+        5
+     // ^
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn labeled_block_tail_expr_2() {
+        check(
+            r#"
+fn foo() {
+    let _ = 'b$0lk: {
+         // ^^^^
+        let x = 1;
+        if true { break 'blk 42; }
+                     // ^^^^
+        if false { break 'blk 24; }
+                      // ^^^^
+        100
+     // ^^^
+    };
+}
+"#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index b00de6ba40833..2f2d2252f8449 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -7,26 +7,30 @@ use std::{iter, ops::Not};
 
 use either::Either;
 use hir::{
-    db::DefDatabase, DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, LangItem,
-    Semantics,
+    DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, LangItem, Semantics,
+    db::DefDatabase,
 };
 use ide_db::{
+    FileRange, FxIndexSet, Ranker, RootDatabase,
     defs::{Definition, IdentClass, NameRefClass, OperatorClass},
     famous_defs::FamousDefs,
     helpers::pick_best_token,
-    FileRange, FxIndexSet, Ranker, RootDatabase,
 };
-use itertools::{multizip, Itertools};
+use itertools::{Itertools, multizip};
 use span::Edition;
-use syntax::{ast, AstNode, SyntaxKind::*, SyntaxNode, T};
+use syntax::{
+    AstNode,
+    SyntaxKind::{self, *},
+    SyntaxNode, T, ast,
+};
 
 use crate::{
+    FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav,
     doc_links::token_as_doc_comment,
     markdown_remove::remove_markdown,
     markup::Markup,
     navigation_target::UpmappingResult,
     runnables::{runnable_fn, runnable_mod},
-    FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav,
 };
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub struct HoverConfig {
@@ -129,8 +133,8 @@ pub(crate) fn hover(
     let sema = &hir::Semantics::new(db);
     let file = sema.parse_guess_edition(file_id).syntax().clone();
     let edition =
-        sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
-    let display_target = sema.first_crate_or_default(file_id).to_display_target(db);
+        sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
+    let display_target = sema.first_crate(file_id)?.to_display_target(db);
     let mut res = if range.is_empty() {
         hover_offset(
             sema,
@@ -274,11 +278,13 @@ fn hover_offset(
                         }
 
                         class => {
-                            let is_def = matches!(class, IdentClass::NameClass(_));
+                            let render_extras = matches!(class, IdentClass::NameClass(_))
+                                // Render extra information for `Self` keyword as well
+                                || ast::NameRef::cast(node.clone()).is_some_and(|name_ref| name_ref.token_kind() == SyntaxKind::SELF_TYPE_KW);
                             multizip((
                                 class.definitions(),
                                 iter::repeat(None),
-                                iter::repeat(is_def),
+                                iter::repeat(render_extras),
                                 iter::repeat(node),
                             ))
                             .collect::<Vec<_>>()
@@ -422,7 +428,7 @@ pub(crate) fn hover_for_definition(
     subst: Option<GenericSubstitution>,
     scope_node: &SyntaxNode,
     macro_arm: Option<u32>,
-    hovered_definition: bool,
+    render_extras: bool,
     config: &HoverConfig,
     edition: Edition,
     display_target: DisplayTarget,
@@ -456,7 +462,7 @@ pub(crate) fn hover_for_definition(
         famous_defs.as_ref(),
         &notable_traits,
         macro_arm,
-        hovered_definition,
+        render_extras,
         subst_types.as_ref(),
         config,
         edition,
@@ -499,6 +505,7 @@ fn notable_traits(
                 )
             })
         })
+        .sorted_by_cached_key(|(trait_, _)| trait_.name(db))
         .collect::<Vec<_>>()
 }
 
@@ -512,7 +519,7 @@ fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<Hov
 
     let adt = match def {
         Definition::Trait(it) => {
-            return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action)
+            return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action);
         }
         Definition::Adt(it) => Some(it),
         Definition::SelfType(it) => it.self_ty(db).as_adt(),
@@ -544,7 +551,7 @@ fn runnable_action(
         Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable),
         Definition::Function(func) => {
             let src = func.source(sema.db)?;
-            if src.file_id != file_id {
+            if src.file_id.file_id().is_none_or(|f| f.file_id(sema.db) != file_id) {
                 cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment);
                 cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr);
                 return None;
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index 31ef89a07cde1..69b83f3b12d89 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -3,34 +3,34 @@ use std::{env, mem, ops::Not};
 
 use either::Either;
 use hir::{
-    db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, CaptureKind, DisplayTarget, DropGlue,
+    Adt, AsAssocItem, AsExternAssocItem, CaptureKind, DisplayTarget, DropGlue,
     DynCompatibilityViolation, HasCrate, HasSource, HirDisplay, Layout, LayoutError,
     MethodViolationCode, Name, Semantics, Symbol, Trait, Type, TypeInfo, VariantDef,
+    db::ExpandDatabase,
 };
 use ide_db::{
-    base_db::SourceDatabase,
+    RootDatabase,
     defs::Definition,
     documentation::HasDocs,
     famous_defs::FamousDefs,
     generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
     syntax_helpers::prettify_macro_expansion,
-    RootDatabase,
 };
 use itertools::Itertools;
 use rustc_apfloat::{
-    ieee::{Half as f16, Quad as f128},
     Float,
+    ieee::{Half as f16, Quad as f128},
 };
 use span::Edition;
 use stdx::format_to;
-use syntax::{algo, ast, match_ast, AstNode, AstToken, Direction, SyntaxToken, T};
+use syntax::{AstNode, AstToken, Direction, SyntaxToken, T, algo, ast, match_ast};
 
 use crate::{
-    doc_links::{remove_links, rewrite_links},
-    hover::{notable_traits, walk_and_push_ty, SubstTyLen},
-    interpret::render_const_eval_error,
     HoverAction, HoverConfig, HoverResult, Markup, MemoryLayoutHoverConfig,
     MemoryLayoutHoverRenderKind,
+    doc_links::{remove_links, rewrite_links},
+    hover::{SubstTyLen, notable_traits, walk_and_push_ty},
+    interpret::render_const_eval_error,
 };
 
 pub(super) fn type_info_of(
@@ -346,11 +346,7 @@ pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option<Hove
                 .is_some_and(|t| {
                     t.kind() == T![ident] && t.into_token().is_some_and(|t| t.text() == "clippy")
                 });
-            if is_clippy {
-                (true, CLIPPY_LINTS)
-            } else {
-                (false, DEFAULT_LINTS)
-            }
+            if is_clippy { (true, CLIPPY_LINTS) } else { (false, DEFAULT_LINTS) }
         }
         _ => return None,
     };
@@ -418,7 +414,7 @@ fn definition_owner_name(db: &RootDatabase, def: Definition, edition: Edition) -
                             "{}::{}",
                             name.display(db, edition),
                             it.name(db).display(db, edition)
-                        ))
+                        ));
                     }
                     None => Some(it.name(db)),
                 }
@@ -436,7 +432,7 @@ fn definition_owner_name(db: &RootDatabase, def: Definition, edition: Edition) -
                             "{}::{}",
                             name.display(db, edition),
                             it.name(db)?.display(db, edition)
-                        ))
+                        ));
                     }
                     None => it.name(db),
                 }
@@ -466,8 +462,7 @@ pub(super) fn path(
     item_name: Option<String>,
     edition: Edition,
 ) -> String {
-    let crate_name =
-        db.crate_graph()[module.krate().into()].display_name.as_ref().map(|it| it.to_string());
+    let crate_name = module.krate().display_name(db).as_ref().map(|it| it.to_string());
     let module_path = module
         .path_to_root(db)
         .into_iter()
@@ -482,7 +477,7 @@ pub(super) fn definition(
     famous_defs: Option<&FamousDefs<'_, '_>>,
     notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
     macro_arm: Option<u32>,
-    hovered_definition: bool,
+    render_extras: bool,
     subst_types: Option<&Vec<(Symbol, Type)>>,
     config: &HoverConfig,
     edition: Edition,
@@ -645,6 +640,12 @@ pub(super) fn definition(
         Definition::Local(it) => {
             render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None)
         }
+        Definition::SelfType(it) => render_memory_layout(
+            config.memory_layout,
+            || it.self_ty(db).layout(db),
+            |_| None,
+            |_| None,
+        ),
         _ => None,
     };
 
@@ -717,18 +718,17 @@ pub(super) fn definition(
             }
             _ => return None,
         };
-        let rendered_drop_glue = match drop_info.drop_glue {
-            DropGlue::None => "does not contain types with destructors (drop glue)",
-            DropGlue::DependOnParams => {
-                "may contain types with destructors (drop glue) depending on type parameters"
+        let rendered_drop_glue = if drop_info.has_dtor == Some(true) {
+            "impl Drop"
+        } else {
+            match drop_info.drop_glue {
+                DropGlue::HasDropGlue => "needs Drop",
+                DropGlue::None => "no Drop",
+                DropGlue::DependOnParams => "type param may need Drop",
             }
-            DropGlue::HasDropGlue => "contain types with destructors (drop glue)",
         };
-        Some(match drop_info.has_dtor {
-            Some(true) => format!("{}; has a destructor", rendered_drop_glue),
-            Some(false) => format!("{}; doesn't have a destructor", rendered_drop_glue),
-            None => rendered_drop_glue.to_owned(),
-        })
+
+        Some(rendered_drop_glue.to_owned())
     };
 
     let dyn_compatibility_info = || match def {
@@ -746,7 +746,7 @@ pub(super) fn definition(
     };
 
     let mut extra = String::new();
-    if hovered_definition {
+    if render_extras {
         if let Some(notable_traits) =
             render_notable_trait(db, notable_traits, edition, display_target)
         {
@@ -760,15 +760,18 @@ pub(super) fn definition(
         if let Some(layout_info) = layout_info() {
             extra.push_str("\n___\n");
             extra.push_str(&layout_info);
+            if let Some(drop_info) = drop_info() {
+                extra.push_str(", ");
+                extra.push_str(&drop_info)
+            }
+        } else if let Some(drop_info) = drop_info() {
+            extra.push_str("\n___\n");
+            extra.push_str(&drop_info);
         }
         if let Some(dyn_compatibility_info) = dyn_compatibility_info() {
             extra.push_str("\n___\n");
             extra.push_str(&dyn_compatibility_info);
         }
-        if let Some(drop_info) = drop_info() {
-            extra.push_str("\n___\n");
-            extra.push_str(&drop_info);
-        }
     }
     let mut desc = String::new();
     desc.push_str(&label);
@@ -906,9 +909,9 @@ fn render_notable_trait(
     let mut needs_impl_header = true;
     for (trait_, assoc_types) in notable_traits {
         desc.push_str(if mem::take(&mut needs_impl_header) {
-            "Implements notable traits: "
+            "Implements notable traits: `"
         } else {
-            ", "
+            "`, `"
         });
         format_to!(desc, "{}", trait_.name(db).display(db, edition));
         if !assoc_types.is_empty() {
@@ -928,7 +931,12 @@ fn render_notable_trait(
             desc.push('>');
         }
     }
-    desc.is_empty().not().then_some(desc)
+    if desc.is_empty() {
+        None
+    } else {
+        desc.push('`');
+        Some(desc)
+    }
 }
 
 fn type_info(
@@ -955,37 +963,12 @@ fn type_info(
     res.markup = if let Some(adjusted_ty) = adjusted {
         walk_and_push_ty(db, &adjusted_ty, &mut push_new_def);
 
-        let notable = {
-            let mut desc = String::new();
-            let mut needs_impl_header = true;
-            for (trait_, assoc_types) in notable_traits(db, &original) {
-                desc.push_str(if mem::take(&mut needs_impl_header) {
-                    "Implements Notable Traits: "
-                } else {
-                    ", "
-                });
-                format_to!(desc, "{}", trait_.name(db).display(db, edition));
-                if !assoc_types.is_empty() {
-                    desc.push('<');
-                    format_to!(
-                        desc,
-                        "{}",
-                        assoc_types.into_iter().format_with(", ", |(ty, name), f| {
-                            f(&name.display(db, edition))?;
-                            f(&" = ")?;
-                            match ty {
-                                Some(ty) => f(&ty.display(db, display_target)),
-                                None => f(&"?"),
-                            }
-                        })
-                    );
-                    desc.push('>');
-                }
-            }
-            if !desc.is_empty() {
-                desc.push('\n');
-            }
-            desc
+        let notable = if let Some(notable) =
+            render_notable_trait(db, &notable_traits(db, &original), edition, display_target)
+        {
+            format!("{notable}\n")
+        } else {
+            String::new()
         };
 
         let original = original.display(db, display_target).to_string();
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index 80a2d4690d4a2..d469cd7c0cd54 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -1,9 +1,9 @@
-use expect_test::{expect, Expect};
-use ide_db::{base_db::SourceDatabase, FileRange};
+use expect_test::{Expect, expect};
+use ide_db::{FileRange, base_db::SourceDatabase};
 use syntax::TextRange;
 
 use crate::{
-    fixture, HoverConfig, HoverDocFormat, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind,
+    HoverConfig, HoverDocFormat, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, fixture,
 };
 
 const HOVER_BASE_CONFIG: HoverConfig = HoverConfig {
@@ -47,7 +47,7 @@ fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
         .unwrap();
 
     let content = analysis.db.file_text(position.file_id);
-    let hovered_element = &content[hover.range];
+    let hovered_element = &content.text(&analysis.db)[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
     expect.assert_eq(&actual)
@@ -72,7 +72,7 @@ fn check_hover_fields_limit(
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -98,7 +98,7 @@ fn check_hover_enum_variants_limit(
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -124,7 +124,7 @@ fn check_assoc_count(
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -141,7 +141,7 @@ fn check_hover_no_links(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect:
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -158,7 +158,7 @@ fn check_hover_no_memory_layout(#[rust_analyzer::rust_fixture] ra_fixture: &str,
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -179,7 +179,7 @@ fn check_hover_no_markdown(#[rust_analyzer::rust_fixture] ra_fixture: &str, expe
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -567,11 +567,7 @@ fn main() {
 
             ---
 
-            size = 8, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 8, align = 4, no Drop
         "#]],
     );
 }
@@ -816,11 +812,7 @@ struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 }
 
             ---
 
-            size = 1, align = 1, offset = 6
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 1, align = 1, offset = 6, no Drop
         "#]],
     );
 }
@@ -871,11 +863,7 @@ fn main() {
 
             ---
 
-            size = 4, align = 4, offset = 0
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, offset = 0, no Drop
         "#]],
     );
 }
@@ -945,11 +933,7 @@ struct Foo$0(pub u32) where u32: Copy;
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 4, align = 4, no Drop
         "#]],
     );
 }
@@ -975,11 +959,7 @@ struct Foo$0 { field: u32 }
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 4, align = 4, no Drop
         "#]],
     );
     check(
@@ -1004,11 +984,7 @@ struct Foo$0 where u32: Copy { field: u32 }
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 4, align = 4, no Drop
         "#]],
     );
 }
@@ -1037,11 +1013,7 @@ fn hover_record_struct_limit() {
 
             ---
 
-            size = 12 (0xC), align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 12 (0xC), align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1064,11 +1036,7 @@ fn hover_record_struct_limit() {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 4, align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1094,11 +1062,7 @@ fn hover_record_struct_limit() {
 
             ---
 
-            size = 16 (0x10), align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 16 (0x10), align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1119,11 +1083,7 @@ fn hover_record_struct_limit() {
 
             ---
 
-            size = 12 (0xC), align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 12 (0xC), align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1144,11 +1104,7 @@ fn hover_record_struct_limit() {
 
             ---
 
-            size = 12 (0xC), align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 12 (0xC), align = 4, no Drop
         "#]],
     );
 
@@ -1171,11 +1127,7 @@ fn hover_record_struct_limit() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -1200,11 +1152,7 @@ fn hover_record_variant_limit() {
 
             ---
 
-            size = 12 (0xC), align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 12 (0xC), align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1225,11 +1173,7 @@ fn hover_record_variant_limit() {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1250,11 +1194,7 @@ fn hover_record_variant_limit() {
 
             ---
 
-            size = 16 (0x10), align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 16 (0x10), align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1275,11 +1215,7 @@ fn hover_record_variant_limit() {
 
             ---
 
-            size = 12 (0xC), align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 12 (0xC), align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1300,11 +1236,7 @@ fn hover_record_variant_limit() {
 
             ---
 
-            size = 12 (0xC), align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 12 (0xC), align = 4, no Drop
         "#]],
     );
 }
@@ -1330,11 +1262,7 @@ fn hover_enum_limit() {
 
             ---
 
-            size = 1, align = 1, niches = 254
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 1, align = 1, niches = 254, no Drop
         "#]],
     );
     check_hover_enum_variants_limit(
@@ -1356,11 +1284,7 @@ fn hover_enum_limit() {
 
             ---
 
-            size = 1, align = 1, niches = 254
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 1, align = 1, niches = 254, no Drop
         "#]],
     );
     check_hover_enum_variants_limit(
@@ -1379,11 +1303,7 @@ fn hover_enum_limit() {
 
             ---
 
-            size = 1, align = 1, niches = 254
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 1, align = 1, niches = 254, no Drop
         "#]],
     );
     check_hover_enum_variants_limit(
@@ -1402,11 +1322,7 @@ fn hover_enum_limit() {
 
             ---
 
-            size = 1, align = 1, niches = 254
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 1, align = 1, niches = 254, no Drop
         "#]],
     );
     check_hover_enum_variants_limit(
@@ -1443,11 +1359,7 @@ fn hover_enum_limit() {
 
             ---
 
-            size = 12 (0xC), align = 4, niches = a lot
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 12 (0xC), align = 4, niches = a lot, no Drop
         "#]],
     );
 }
@@ -1473,11 +1385,7 @@ fn hover_union_limit() {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 4, align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1499,11 +1407,7 @@ fn hover_union_limit() {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 4, align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1522,11 +1426,7 @@ fn hover_union_limit() {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 4, align = 4, no Drop
         "#]],
     );
     check_hover_fields_limit(
@@ -1545,11 +1445,7 @@ fn hover_union_limit() {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 4, align = 4, no Drop
         "#]],
     );
 }
@@ -1575,11 +1471,7 @@ struct Foo$0 where u32: Copy;
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -1605,7 +1497,7 @@ type Fo$0o: Trait = S where T: Trait;
 
             ---
 
-            does not contain types with destructors (drop glue)
+            no Drop
         "#]],
     );
 }
@@ -1754,11 +1646,7 @@ fn main() {
 
             ---
 
-            size = 8, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 8, align = 4, no Drop
         "#]],
     );
     check_hover_range(
@@ -1813,11 +1701,7 @@ fn main() { let b$0ar = Some(12); }
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, no Drop
         "#]],
     );
 }
@@ -1845,7 +1729,7 @@ enum Option<T> {
 
             ---
 
-            does not contain types with destructors (drop glue)
+            no Drop
 
             ---
 
@@ -1908,11 +1792,7 @@ fn hover_for_local_variable_pat() {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, no Drop
         "#]],
     )
 }
@@ -1944,11 +1824,7 @@ fn hover_for_param_edge() {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, no Drop
         "#]],
     )
 }
@@ -1974,7 +1850,7 @@ fn hover_for_param_with_multiple_traits() {
 
             ---
 
-            may contain types with destructors (drop glue) depending on type parameters
+            type param may need Drop
         "#]],
     )
 }
@@ -2000,11 +1876,7 @@ fn main() { let foo_$0test = Thing::new(); }
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, no Drop
         "#]],
     )
 }
@@ -2089,6 +1961,10 @@ impl Thing {
                 x: u32,
             }
             ```
+
+            ---
+
+            size = 4, align = 4
         "#]],
     );
     check_hover_fields_limit(
@@ -2109,6 +1985,10 @@ impl Thing {
             ```rust
             struct Thing
             ```
+
+            ---
+
+            size = 4, align = 4
         "#]],
     );
     check(
@@ -2130,6 +2010,10 @@ impl Thing {
                 x: u32,
             }
             ```
+
+            ---
+
+            size = 4, align = 4
         "#]],
     );
     check(
@@ -2151,6 +2035,10 @@ impl Thing {
                 A,
             }
             ```
+
+            ---
+
+            size = 0, align = 1
         "#]],
     );
     check(
@@ -2172,6 +2060,10 @@ impl Thing {
                 A,
             }
             ```
+
+            ---
+
+            size = 0, align = 1
         "#]],
     );
     check(
@@ -2190,6 +2082,10 @@ impl usize {
             ```rust
             usize
             ```
+
+            ---
+
+            size = 8, align = 8
         "#]],
     );
     check(
@@ -2208,6 +2104,32 @@ impl fn() -> usize {
             ```rust
             fn() -> usize
             ```
+
+            ---
+
+            size = 8, align = 8, niches = 1
+        "#]],
+    );
+    check(
+        r#"
+pub struct Foo
+where
+    Self$0:;
+"#,
+        expect![[r#"
+            *Self*
+
+            ```rust
+            ra_test_fixture
+            ```
+
+            ```rust
+            pub struct Foo
+            ```
+
+            ---
+
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -2753,11 +2675,7 @@ fn test_hover_function_pointer_show_identifiers() {
 
             ---
 
-            size = 8, align = 8, niches = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 8, align = 8, niches = 1, no Drop
         "#]],
     );
 }
@@ -2779,11 +2697,7 @@ fn test_hover_function_pointer_no_identifier() {
 
             ---
 
-            size = 8, align = 8, niches = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 8, align = 8, niches = 1, no Drop
         "#]],
     );
 }
@@ -3026,11 +2940,7 @@ pub struct B$0ar
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 0, align = 1, no Drop
 
             ---
 
@@ -3061,11 +2971,7 @@ pub struct B$0ar
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 0, align = 1, no Drop
 
             ---
 
@@ -3158,11 +3064,7 @@ fn test_hover_layout_of_variant() {
 
             ---
 
-            size = 4, align = 2
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 2, no Drop
         "#]],
     );
 }
@@ -3187,7 +3089,7 @@ fn test_hover_layout_of_variant_generic() {
 
             ---
 
-            does not contain types with destructors (drop glue)
+            no Drop
         "#]],
     );
 }
@@ -3212,11 +3114,7 @@ struct S$0<T>(core::marker::PhantomData<T>);
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -3244,11 +3142,7 @@ fn test_hover_layout_of_enum() {
 
             ---
 
-            size = 16 (0x10), align = 8, niches = 254
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 16 (0x10), align = 8, niches = 254, no Drop
         "#]],
     );
 }
@@ -3270,7 +3164,7 @@ fn test_hover_no_memory_layout() {
 
             ---
 
-            does not contain types with destructors (drop glue)
+            no Drop
         "#]],
     );
 
@@ -4578,11 +4472,7 @@ fn main() {
 
             ---
 
-            size = 8, align = 8, niches = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 8, align = 8, niches = 1, no Drop
 
             ---
 
@@ -4596,11 +4486,7 @@ fn main() {
 
             ---
 
-            size = 4, align = 4, offset = 0
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, offset = 0, no Drop
         "#]],
     );
 }
@@ -4620,16 +4506,12 @@ struct S$0T<const C: usize = 1, T = Foo>(T);
             ```
 
             ```rust
-            struct ST<const C: usize = 1, T = Foo>(T)
+            struct ST<const C: usize = {const}, T = Foo>(T)
             ```
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            may contain types with destructors (drop glue) depending on type parameters; doesn't have a destructor
+            size = 0, align = 1, type param may need Drop
         "#]],
     );
 }
@@ -4654,11 +4536,7 @@ struct S$0T<const C: usize = {40 + 2}, T = Foo>(T);
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            may contain types with destructors (drop glue) depending on type parameters; doesn't have a destructor
+            size = 0, align = 1, type param may need Drop
         "#]],
     );
 }
@@ -4679,16 +4557,12 @@ struct S$0T<const C: usize = VAL, T = Foo>(T);
             ```
 
             ```rust
-            struct ST<const C: usize = VAL, T = Foo>(T)
+            struct ST<const C: usize = {const}, T = Foo>(T)
             ```
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            may contain types with destructors (drop glue) depending on type parameters; doesn't have a destructor
+            size = 0, align = 1, type param may need Drop
         "#]],
     );
 }
@@ -4712,11 +4586,7 @@ fn main() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -4740,11 +4610,7 @@ fn main() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -4763,16 +4629,12 @@ fn main() {
             *value*
 
             ```rust
-            let value: Const<-1>
+            let value: Const<_>
             ```
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -4796,11 +4658,7 @@ fn main() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -4824,11 +4682,7 @@ fn main() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -4851,11 +4705,7 @@ impl Foo {
 
             ---
 
-            size = 8, align = 8, niches = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 8, align = 8, niches = 1, no Drop
         "#]],
     );
 }
@@ -4879,11 +4729,7 @@ impl Foo {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -5368,16 +5214,12 @@ type Fo$0o2 = Foo<2>;
             ```
 
             ```rust
-            type Foo2 = Foo<2>
+            type Foo2 = Foo<<expr>>
             ```
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -5427,11 +5269,7 @@ enum E {
 
             ---
 
-            size = 1, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 1, align = 1, no Drop
 
             ---
 
@@ -5460,11 +5298,7 @@ enum E {
 
             ---
 
-            size = 1, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 1, align = 1, no Drop
 
             ---
 
@@ -5494,11 +5328,7 @@ enum E {
 
             ---
 
-            size = 1, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 1, align = 1, no Drop
 
             ---
 
@@ -5528,11 +5358,7 @@ enum E {
 
             ---
 
-            size = 1, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 1, align = 1, no Drop
 
             ---
 
@@ -6197,7 +6023,7 @@ const FOO$0: &[i32; 5] = &[12; 5];
             ```
 
             ```rust
-            const FOO: &[i32; 5] = &[12, 12, 12, 12, 12]
+            const FOO: &[i32; {const}] = &[12, 12, 12, 12, 12]
             ```
         "#]],
     );
@@ -6463,11 +6289,7 @@ fn main() {
 
             ---
 
-            size = 32 (0x20), align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 32 (0x20), align = 4, no Drop
         "#]],
     );
 }
@@ -7671,11 +7493,7 @@ enum Enum {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, no Drop
         "#]],
     );
 }
@@ -7701,11 +7519,7 @@ enum Enum {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, no Drop
         "#]],
     );
 }
@@ -8375,11 +8189,7 @@ fn test() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -9024,15 +8834,11 @@ fn main(notable$0: u32) {}
 
             ---
 
-            Implements notable traits: Notable\<Assoc = &str, Assoc2 = char>
+            Implements notable traits: `Notable<Assoc = &str, Assoc2 = char>`
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, no Drop
         "#]],
     );
 }
@@ -9124,11 +8930,7 @@ extern "C" {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -9157,7 +8959,7 @@ fn main() {
             S
             ```
             ___
-            Implements notable traits: Notable, Future<Output = u32>, Iterator<Item = S>"#]],
+            Implements notable traits: `Future<Output = u32>`, `Iterator<Item = S>`, `Notable`"#]],
     );
 }
 
@@ -9274,11 +9076,7 @@ struct Pedro$0<'a> {
 
             ---
 
-            size = 16 (0x10), align = 8, niches = 1
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 16 (0x10), align = 8, niches = 1, no Drop
         "#]],
     )
 }
@@ -9299,7 +9097,7 @@ fn main(a$0: impl T) {}
 
             ---
 
-            may contain types with destructors (drop glue) depending on type parameters
+            type param may need Drop
         "#]],
     );
 }
@@ -9320,11 +9118,7 @@ fn main(a$0: T) {}
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -9377,11 +9171,7 @@ fn main() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -9715,11 +9505,7 @@ type A$0 = B;
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
 
             ---
 
@@ -9752,11 +9538,7 @@ type A$0 = B;
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
 
             ---
 
@@ -9790,11 +9572,7 @@ type A$0 = B;
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
 
             ---
 
@@ -9826,11 +9604,7 @@ type A$0 = B;
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 
@@ -9954,11 +9728,7 @@ fn main() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 
@@ -9986,11 +9756,7 @@ fn main() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 
@@ -10025,11 +9791,7 @@ fn main() {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
 }
@@ -10348,11 +10110,7 @@ fn bar() {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 4, align = 4, no Drop
 
             ---
 
@@ -10366,7 +10124,7 @@ fn bar() {
 
             ---
 
-            may contain types with destructors (drop glue) depending on type parameters
+            type param may need Drop
 
             ---
 
@@ -10599,11 +10357,7 @@ struct NoDrop$0;
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            size = 0, align = 1, no Drop
         "#]],
     );
     check(
@@ -10627,11 +10381,7 @@ impl Drop for NeedsDrop {
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue); has a destructor
+            size = 0, align = 1, impl Drop
         "#]],
     );
     check(
@@ -10656,11 +10406,7 @@ type NoDrop$0 = core::mem::ManuallyDrop<NeedsDrop>;
 
             ---
 
-            size = 0, align = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 0, align = 1, no Drop
         "#]],
     );
     check(
@@ -10691,11 +10437,7 @@ struct DropField$0 {
 
             ---
 
-            size = 4, align = 4
-
-            ---
-
-            contain types with destructors (drop glue); doesn't have a destructor
+            size = 4, align = 4, needs Drop
         "#]],
     );
     check(
@@ -10716,7 +10458,7 @@ type Foo$0 = impl Sized;
 
             ---
 
-            contain types with destructors (drop glue)
+            needs Drop
         "#]],
     );
     check(
@@ -10744,11 +10486,7 @@ enum Enum {
 
             ---
 
-            size = 16 (0x10), align = 8, niches = 1
-
-            ---
-
-            does not contain types with destructors (drop glue)
+            size = 16 (0x10), align = 8, niches = 1, no Drop
         "#]],
     );
     check(
@@ -10768,7 +10506,7 @@ struct Foo$0<T>(T);
 
             ---
 
-            may contain types with destructors (drop glue) depending on type parameters; doesn't have a destructor
+            type param may need Drop
         "#]],
     );
     check(
@@ -10791,7 +10529,7 @@ struct Foo$0<T: Copy>(T);
 
             ---
 
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            no Drop
         "#]],
     );
     check(
@@ -10817,7 +10555,7 @@ struct Foo$0<T: Trait>(T::Assoc);
 
             ---
 
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            no Drop
         "#]],
     );
     check(
@@ -10848,7 +10586,7 @@ pub struct ManuallyDrop$0<T: ?Sized> {
 
             ---
 
-            does not contain types with destructors (drop glue); doesn't have a destructor
+            no Drop
         "#]],
     );
 }
@@ -10891,3 +10629,73 @@ impl PublicFlags for NoteDialects {
         "#]],
     );
 }
+
+#[test]
+fn bounds_from_container_do_not_panic() {
+    check(
+        r#"
+//- minicore: copy
+struct Foo<T>(T);
+
+impl<T: Copy> Foo<T> {
+    fn foo<U: Copy>(&self, _u: U) {}
+}
+
+fn bar(v: &Foo<i32>) {
+    v.$0foo(1u32);
+}
+    "#,
+        expect![[r#"
+            *foo*
+
+            ```rust
+            ra_test_fixture::Foo
+            ```
+
+            ```rust
+            impl<T> Foo<T>
+            fn foo<U>(&self, _u: U)
+            where
+                U: Copy,
+                // Bounds from impl:
+                T: Copy,
+            ```
+
+            ---
+
+            `T` = `i32`, `U` = `u32`
+        "#]],
+    );
+}
+
+#[test]
+fn extra_lifetime_param_on_trait_method_subst() {
+    check(
+        r#"
+struct AudioFormat;
+
+trait ValueEnum {
+    fn to_possible_value(&self);
+}
+
+impl ValueEnum for AudioFormat {
+    fn to_possible_value<'a>(&'a self) {}
+}
+
+fn main() {
+    ValueEnum::to_possible_value$0(&AudioFormat);
+}
+    "#,
+        expect![[r#"
+            *to_possible_value*
+
+            ```rust
+            ra_test_fixture::AudioFormat
+            ```
+
+            ```rust
+            fn to_possible_value<'a>(&'a self)
+            ```
+        "#]],
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
index 6babdff52a2be..82704af647db3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -5,21 +5,21 @@ use std::{
 
 use either::Either;
 use hir::{
-    sym, ClosureStyle, DisplayTarget, HasVisibility, HirDisplay, HirDisplayError, HirWrite,
-    ModuleDef, ModuleDefId, Semantics,
+    ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError,
+    HirWrite, ModuleDef, ModuleDefId, Semantics, sym,
 };
-use ide_db::{famous_defs::FamousDefs, FileRange, RootDatabase};
-use ide_db::{text_edit::TextEdit, FxHashSet};
+use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder};
+use ide_db::{FxHashSet, text_edit::TextEdit};
 use itertools::Itertools;
-use smallvec::{smallvec, SmallVec};
-use span::EditionedFileId;
+use smallvec::{SmallVec, smallvec};
 use stdx::never;
 use syntax::{
+    SmolStr, SyntaxNode, TextRange, TextSize, WalkEvent,
     ast::{self, AstNode, HasGenericParams},
-    format_smolstr, match_ast, SmolStr, SyntaxNode, TextRange, TextSize, WalkEvent,
+    format_smolstr, match_ast,
 };
 
-use crate::{navigation_target::TryToNav, FileId};
+use crate::{FileId, navigation_target::TryToNav};
 
 mod adjustment;
 mod bind_pat;
@@ -85,7 +85,7 @@ pub(crate) fn inlay_hints(
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
     let file = sema.parse(file_id);
     let file = file.syntax();
 
@@ -136,7 +136,7 @@ pub(crate) fn inlay_hints_resolve(
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
     let file = sema.parse(file_id);
     let file = file.syntax();
 
@@ -207,7 +207,11 @@ fn hints(
     file_id: EditionedFileId,
     node: SyntaxNode,
 ) {
-    let display_target = sema.first_crate_or_default(file_id.file_id()).to_display_target(sema.db);
+    let file_id = file_id.editioned_file_id(sema.db);
+    let Some(krate) = sema.first_crate(file_id.file_id()) else {
+        return;
+    };
+    let display_target = krate.to_display_target(sema.db);
     closing_brace::hints(hints, sema, config, file_id, display_target, node.clone());
     if let Some(any_has_generic_args) = ast::AnyHasGenericArgs::cast(node.clone()) {
         generic_param::hints(hints, famous_defs, config, any_has_generic_args);
@@ -219,12 +223,12 @@ fn hints(
                 chaining::hints(hints, famous_defs, config, display_target, &expr);
                 adjustment::hints(hints, famous_defs, config, display_target, &expr);
                 match expr {
-                    ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)),
+                    ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, ast::Expr::from(it)),
                     ast::Expr::MethodCallExpr(it) => {
-                        param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it))
+                        param_name::hints(hints, famous_defs, config, ast::Expr::from(it))
                     }
                     ast::Expr::ClosureExpr(it) => {
-                        closure_captures::hints(hints, famous_defs, config, file_id, it.clone());
+                        closure_captures::hints(hints, famous_defs, config, it.clone());
                         closure_ret::hints(hints, famous_defs, config, display_target, it)
                     },
                     ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, file_id,  it),
@@ -793,7 +797,7 @@ fn hint_iterator(
 
     if ty.impls_trait(db, iter_trait, &[]) {
         let assoc_type_item = iter_trait.items(db).into_iter().find_map(|item| match item {
-            hir::AssocItem::TypeAlias(alias) if alias.name(db) == sym::Item.clone() => Some(alias),
+            hir::AssocItem::TypeAlias(alias) if alias.name(db) == sym::Item => Some(alias),
             _ => None,
         })?;
         if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) {
@@ -809,7 +813,8 @@ fn ty_to_text_edit(
     config: &InlayHintsConfig,
     node_for_hint: &SyntaxNode,
     ty: &hir::Type,
-    offset_to_insert: TextSize,
+    offset_to_insert_ty: TextSize,
+    additional_edits: &dyn Fn(&mut TextEditBuilder),
     prefix: impl Into<String>,
 ) -> Option<LazyProperty<TextEdit>> {
     // FIXME: Limit the length and bail out on excess somehow?
@@ -818,8 +823,11 @@ fn ty_to_text_edit(
         .and_then(|scope| ty.display_source_code(scope.db, scope.module().into(), false).ok())?;
     Some(config.lazy_text_edit(|| {
         let mut builder = TextEdit::builder();
-        builder.insert(offset_to_insert, prefix.into());
-        builder.insert(offset_to_insert, rendered);
+        builder.insert(offset_to_insert_ty, prefix.into());
+        builder.insert(offset_to_insert_ty, rendered);
+
+        additional_edits(&mut builder);
+
         builder.finish()
     }))
 }
@@ -836,9 +844,9 @@ mod tests {
     use itertools::Itertools;
     use test_utils::extract_annotations;
 
-    use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode};
     use crate::DiscriminantHints;
-    use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints};
+    use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode};
+    use crate::{LifetimeElisionHints, fixture, inlay_hints::InlayHintsConfig};
 
     use super::{ClosureReturnTypeHints, GenericParameterHints, InlayFieldsToResolve};
 
@@ -993,6 +1001,53 @@ fn foo() {
 fn foo() {
     let
 }
+"#,
+        );
+    }
+
+    #[test]
+    fn closure_dependency_cycle_no_panic() {
+        check(
+            r#"
+fn foo() {
+    let closure;
+     // ^^^^^^^ impl Fn()
+    closure = || {
+        closure();
+    };
+}
+
+fn bar() {
+    let closure1;
+     // ^^^^^^^^ impl Fn()
+    let closure2;
+     // ^^^^^^^^ impl Fn()
+    closure1 = || {
+        closure2();
+    };
+    closure2 = || {
+        closure1();
+    };
+}
+        "#,
+        );
+    }
+
+    #[test]
+    fn regression_19610() {
+        check(
+            r#"
+trait Trait {
+    type Assoc;
+}
+struct Foo<A>(A);
+impl<A: Trait<Assoc = impl Trait>> Foo<A> {
+    fn foo<'a, 'b>(_: &'a [i32], _: &'b [i32]) {}
+}
+
+fn bar() {
+    Foo::foo(&[1], &[2]);
+}
 "#,
         );
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
index 91b8187295236..f2844a2eaa614 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
@@ -13,7 +13,7 @@ use hir::{
 use ide_db::famous_defs::FamousDefs;
 
 use ide_db::text_edit::TextEditBuilder;
-use syntax::ast::{self, prec::ExprPrecedence, AstNode};
+use syntax::ast::{self, AstNode, prec::ExprPrecedence};
 
 use crate::{
     AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintLabel, InlayHintLabelPart,
@@ -224,7 +224,7 @@ fn mode_and_needs_parens_for_adjustment_hints(
     expr: &ast::Expr,
     mode: AdjustmentHintsMode,
 ) -> (bool, bool, bool) {
-    use {std::cmp::Ordering::*, AdjustmentHintsMode::*};
+    use {AdjustmentHintsMode::*, std::cmp::Ordering::*};
 
     match mode {
         Prefix | Postfix => {
@@ -284,8 +284,8 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool,
 #[cfg(test)]
 mod tests {
     use crate::{
-        inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
         AdjustmentHints, AdjustmentHintsMode, InlayHintsConfig,
+        inlay_hints::tests::{DISABLED_CONFIG, check_with_config},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
index 4379153acaa17..52ea2e5ec58b4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
@@ -4,7 +4,7 @@
 //! let _x /* i32 */= f(4, 4);
 //! ```
 use hir::{DisplayTarget, Semantics};
-use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use ide_db::{RootDatabase, famous_defs::FamousDefs};
 
 use itertools::Itertools;
 use syntax::{
@@ -13,8 +13,8 @@ use syntax::{
 };
 
 use crate::{
-    inlay_hints::{closure_has_block_body, label_of_ty, ty_to_text_edit},
     InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind,
+    inlay_hints::{closure_has_block_body, label_of_ty, ty_to_text_edit},
 };
 
 pub(super) fn hints(
@@ -87,6 +87,7 @@ pub(super) fn hints(
                 .as_ref()
                 .map_or_else(|| pat.syntax().text_range(), |t| t.text_range())
                 .end(),
+            &|_| (),
             if colon_token.is_some() { "" } else { ": " },
         )
     } else {
@@ -181,10 +182,10 @@ mod tests {
     use syntax::{TextRange, TextSize};
     use test_utils::extract_annotations;
 
-    use crate::{fixture, inlay_hints::InlayHintsConfig, ClosureReturnTypeHints};
+    use crate::{ClosureReturnTypeHints, fixture, inlay_hints::InlayHintsConfig};
 
     use crate::inlay_hints::tests::{
-        check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
+        DISABLED_CONFIG, TEST_CONFIG, check, check_edit, check_no_edit, check_with_config,
     };
 
     #[track_caller]
@@ -855,28 +856,6 @@ fn main() {
       //^ |i32| -> ()
     let p = (y, z);
       //^ (|i32| -> i32, |i32| -> ())
-}
-            "#,
-        );
-        check_with_config(
-            InlayHintsConfig {
-                type_hints: true,
-                closure_style: ClosureStyle::ClosureWithId,
-                ..DISABLED_CONFIG
-            },
-            r#"
-//- minicore: fn
-fn main() {
-    let x = || 2;
-      //^ {closure#0}
-    let y = |t: i32| x() + t;
-      //^ {closure#1}
-    let mut t = 5;
-          //^ i32
-    let z = |k: i32| { t += k; };
-      //^ {closure#2}
-    let p = (y, z);
-      //^ ({closure#1}, {closure#2})
 }
             "#,
         );
@@ -1140,12 +1119,11 @@ fn test() {
 
     #[test]
     fn no_edit_for_closure_return_without_body_block() {
-        // We can lift this limitation; see FIXME in closure_ret module.
         let config = InlayHintsConfig {
             closure_return_type_hints: ClosureReturnTypeHints::Always,
             ..TEST_CONFIG
         };
-        check_no_edit(
+        check_edit(
             config,
             r#"
 struct S<T>(T);
@@ -1154,6 +1132,13 @@ fn test() {
     let f = |a: S<usize>| S(a);
 }
 "#,
+            expect![[r#"
+            struct S<T>(T);
+            fn test() {
+                let f = || -> i32 { 3 };
+                let f = |a: S<usize>| -> S<S<usize>> { S(a) };
+            }
+            "#]],
         );
     }
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs
index 5bbb4fe4e66e3..d29173206889d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs
@@ -128,8 +128,8 @@ mod tests {
     use expect_test::expect;
 
     use crate::{
-        inlay_hints::tests::{check_edit, check_with_config, DISABLED_CONFIG},
         InlayHintsConfig,
+        inlay_hints::tests::{DISABLED_CONFIG, check_edit, check_with_config},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs
index e9b728bcaa75d..8ddbfaeffe879 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs
@@ -1,7 +1,7 @@
 //! Implementation of trait bound hints.
 //!
 //! Currently this renders the implied `Sized` bound.
-use ide_db::{famous_defs::FamousDefs, FileRange};
+use ide_db::{FileRange, famous_defs::FamousDefs};
 
 use span::EditionedFileId;
 use syntax::ast::{self, AstNode, HasTypeBounds};
@@ -86,7 +86,7 @@ mod tests {
 
     use crate::inlay_hints::InlayHintsConfig;
 
-    use crate::inlay_hints::tests::{check_expect, check_with_config, DISABLED_CONFIG};
+    use crate::inlay_hints::tests::{DISABLED_CONFIG, check_expect, check_with_config};
 
     #[track_caller]
     fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
index 604719bc366f5..ff157fa171b50 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
@@ -2,8 +2,8 @@
 use hir::DisplayTarget;
 use ide_db::famous_defs::FamousDefs;
 use syntax::{
-    ast::{self, AstNode},
     Direction, NodeOrToken, SyntaxKind, T,
+    ast::{self, AstNode},
 };
 
 use crate::{InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind};
@@ -76,16 +76,15 @@ pub(super) fn hints(
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use ide_db::text_edit::{TextRange, TextSize};
 
     use crate::{
-        fixture,
+        InlayHintsConfig, fixture,
         inlay_hints::{
-            tests::{check_expect, check_with_config, DISABLED_CONFIG, TEST_CONFIG},
             LazyProperty,
+            tests::{DISABLED_CONFIG, TEST_CONFIG, check_expect, check_with_config},
         },
-        InlayHintsConfig,
     };
 
     #[track_caller]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs
index bec6d38ee9cac..de9ca8c000f0b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs
@@ -7,13 +7,14 @@ use hir::{DisplayTarget, HirDisplay, Semantics};
 use ide_db::{FileRange, RootDatabase};
 use span::EditionedFileId;
 use syntax::{
+    SyntaxKind, SyntaxNode, T,
     ast::{self, AstNode, HasLoopBody, HasName},
-    match_ast, SyntaxKind, SyntaxNode, T,
+    match_ast,
 };
 
 use crate::{
-    inlay_hints::LazyProperty, InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig,
-    InlayKind,
+    InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind,
+    inlay_hints::LazyProperty,
 };
 
 pub(super) fn hints(
@@ -159,8 +160,8 @@ pub(super) fn hints(
 #[cfg(test)]
 mod tests {
     use crate::{
-        inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
         InlayHintsConfig,
+        inlay_hints::tests::{DISABLED_CONFIG, check_with_config},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs
index 9b981c0a3acf7..3186a566d2bce 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs
@@ -3,8 +3,7 @@
 //! Tests live in [`bind_pat`][super::bind_pat] module.
 use ide_db::famous_defs::FamousDefs;
 use ide_db::text_edit::{TextRange, TextSize};
-use span::EditionedFileId;
-use stdx::{never, TupleExt};
+use stdx::{TupleExt, never};
 use syntax::ast::{self, AstNode};
 
 use crate::{
@@ -15,7 +14,6 @@ pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
     FamousDefs(sema, _): &FamousDefs<'_, '_>,
     config: &InlayHintsConfig,
-    _file_id: EditionedFileId,
     closure: ast::ClosureExpr,
 ) -> Option<()> {
     if !config.closure_capture_hints {
@@ -75,10 +73,12 @@ pub(super) fn hints(
                 // force cache the source file, otherwise sema lookup will potentially panic
                 _ = sema.parse_or_expand(source.file());
                 source.name().and_then(|name| {
-                    name.syntax()
-                        .original_file_range_opt(sema.db)
-                        .map(TupleExt::head)
-                        .map(Into::into)
+                    name.syntax().original_file_range_opt(sema.db).map(TupleExt::head).map(
+                        |frange| ide_db::FileRange {
+                            file_id: frange.file_id.file_id(sema.db),
+                            range: frange.range,
+                        },
+                    )
                 })
             }),
             tooltip: None,
@@ -96,8 +96,8 @@ pub(super) fn hints(
 #[cfg(test)]
 mod tests {
     use crate::{
-        inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
         InlayHintsConfig,
+        inlay_hints::tests::{DISABLED_CONFIG, check_with_config},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs
index 61c9c25fe7396..9e600b5455be2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs
@@ -2,12 +2,12 @@
 //!
 //! Tests live in [`bind_pat`][super::bind_pat] module.
 use hir::DisplayTarget;
-use ide_db::famous_defs::FamousDefs;
+use ide_db::{famous_defs::FamousDefs, text_edit::TextEditBuilder};
 use syntax::ast::{self, AstNode};
 
 use crate::{
-    inlay_hints::{closure_has_block_body, label_of_ty, ty_to_text_edit},
     ClosureReturnTypeHints, InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind,
+    inlay_hints::{closure_has_block_body, label_of_ty, ty_to_text_edit},
 };
 
 pub(super) fn hints(
@@ -35,8 +35,9 @@ pub(super) fn hints(
 
     let param_list = closure.param_list()?;
 
-    let closure = sema.descend_node_into_attributes(closure).pop()?;
-    let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure.clone()))?.adjusted();
+    let resolve_parent = Some(closure.syntax().text_range());
+    let descended_closure = sema.descend_node_into_attributes(closure.clone()).pop()?;
+    let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(descended_closure.clone()))?.adjusted();
     let callable = ty.as_callable(sema.db)?;
     let ty = callable.return_type();
     if arrow.is_none() && ty.is_unit() {
@@ -48,23 +49,30 @@ pub(super) fn hints(
     if arrow.is_none() {
         label.prepend_str(" -> ");
     }
-    // FIXME?: We could provide text edit to insert braces for closures with non-block body.
-    let text_edit = if has_block_body {
-        ty_to_text_edit(
-            sema,
-            config,
-            closure.syntax(),
-            &ty,
-            arrow
-                .as_ref()
-                .map_or_else(|| param_list.syntax().text_range(), |t| t.text_range())
-                .end(),
-            if arrow.is_none() { " -> " } else { "" },
-        )
-    } else {
-        None
+
+    let offset_to_insert_ty =
+        arrow.as_ref().map_or_else(|| param_list.syntax().text_range(), |t| t.text_range()).end();
+
+    // Insert braces if necessary
+    let insert_braces = |builder: &mut TextEditBuilder| {
+        if !has_block_body {
+            if let Some(range) = closure.body().map(|b| b.syntax().text_range()) {
+                builder.insert(range.start(), "{ ".to_owned());
+                builder.insert(range.end(), " }".to_owned());
+            }
+        }
     };
 
+    let text_edit = ty_to_text_edit(
+        sema,
+        config,
+        descended_closure.syntax(),
+        &ty,
+        offset_to_insert_ty,
+        &insert_braces,
+        if arrow.is_none() { " -> " } else { "" },
+    );
+
     acc.push(InlayHint {
         range: param_list.syntax().text_range(),
         kind: InlayKind::Type,
@@ -73,14 +81,14 @@ pub(super) fn hints(
         position: InlayHintPosition::After,
         pad_left: false,
         pad_right: false,
-        resolve_parent: Some(closure.syntax().text_range()),
+        resolve_parent,
     });
     Some(())
 }
 
 #[cfg(test)]
 mod tests {
-    use crate::inlay_hints::tests::{check_with_config, DISABLED_CONFIG};
+    use crate::inlay_hints::tests::{DISABLED_CONFIG, check_with_config};
 
     use super::*;
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs
index f1e1955d14ca7..827a0438dd022 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs
@@ -6,7 +6,7 @@
 //! ```
 use hir::Semantics;
 use ide_db::text_edit::TextEdit;
-use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use ide_db::{RootDatabase, famous_defs::FamousDefs};
 use span::EditionedFileId;
 use syntax::ast::{self, AstNode, HasName};
 
@@ -107,8 +107,8 @@ mod tests {
     use expect_test::expect;
 
     use crate::inlay_hints::{
-        tests::{check_edit, check_with_config, DISABLED_CONFIG},
         DiscriminantHints, InlayHintsConfig,
+        tests::{DISABLED_CONFIG, check_edit, check_with_config},
     };
 
     #[track_caller]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs
index 652dff0bc56e7..20f54b2cd19d6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs
@@ -1,7 +1,7 @@
 //! Extern block hints
 use ide_db::{famous_defs::FamousDefs, text_edit::TextEdit};
 use span::EditionedFileId;
-use syntax::{ast, AstNode, SyntaxToken};
+use syntax::{AstNode, SyntaxToken, ast};
 
 use crate::{InlayHint, InlayHintsConfig};
 
@@ -98,7 +98,7 @@ fn item_hint(
 
 #[cfg(test)]
 mod tests {
-    use crate::inlay_hints::tests::{check_with_config, DISABLED_CONFIG};
+    use crate::inlay_hints::tests::{DISABLED_CONFIG, check_with_config};
 
     #[test]
     fn unadorned() {
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs
index 762a4c2655181..6e1b3bdbdf039 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs
@@ -1,12 +1,14 @@
 //! Implementation of inlay hints for generic parameters.
+use either::Either;
 use ide_db::{active_parameter::generic_def_for_node, famous_defs::FamousDefs};
 use syntax::{
-    ast::{self, AnyHasGenericArgs, HasGenericArgs, HasName},
     AstNode,
+    ast::{self, AnyHasGenericArgs, HasGenericArgs, HasName},
 };
 
 use crate::{
-    inlay_hints::GenericParameterHints, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind,
+    InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind,
+    inlay_hints::{GenericParameterHints, param_name},
 };
 
 use super::param_name::is_argument_similar_to_param_name;
@@ -62,8 +64,17 @@ pub(crate) fn hints(
         let param_name = param.name(sema.db);
 
         let should_hide = {
-            let argument = get_string_representation(&arg)?;
-            is_argument_similar_to_param_name(&argument, param_name.as_str())
+            let param_name = param_name.as_str();
+            get_segment_representation(&arg).map_or(false, |seg| match seg {
+                Either::Left(Either::Left(argument)) => {
+                    is_argument_similar_to_param_name(&argument, param_name)
+                }
+                Either::Left(Either::Right(argument)) => argument
+                    .segment()
+                    .and_then(|it| it.name_ref())
+                    .is_some_and(|it| it.text().eq_ignore_ascii_case(param_name)),
+                Either::Right(lifetime) => lifetime.text().eq_ignore_ascii_case(param_name),
+            })
         };
 
         if should_hide {
@@ -91,7 +102,10 @@ pub(crate) fn hints(
                     }
                 };
                 let linked_location = source_syntax.and_then(|it| sema.original_range_opt(&it));
-                linked_location.map(Into::into)
+                linked_location.map(|frange| ide_db::FileRange {
+                    file_id: frange.file_id.file_id(sema.db),
+                    range: frange.range,
+                })
             }),
         );
 
@@ -111,32 +125,34 @@ pub(crate) fn hints(
     Some(())
 }
 
-fn get_string_representation(arg: &ast::GenericArg) -> Option<String> {
+fn get_segment_representation(
+    arg: &ast::GenericArg,
+) -> Option<Either<Either<Vec<ast::NameRef>, ast::Path>, ast::Lifetime>> {
     return match arg {
         ast::GenericArg::AssocTypeArg(_) => None,
-        ast::GenericArg::ConstArg(const_arg) => Some(const_arg.to_string()),
+        ast::GenericArg::ConstArg(const_arg) => {
+            param_name::get_segment_representation(&const_arg.expr()?).map(Either::Left)
+        }
         ast::GenericArg::LifetimeArg(lifetime_arg) => {
             let lifetime = lifetime_arg.lifetime()?;
-            Some(lifetime.to_string())
+            Some(Either::Right(lifetime))
         }
         ast::GenericArg::TypeArg(type_arg) => {
             let ty = type_arg.ty()?;
-            Some(
-                type_path_segment(&ty)
-                    .map_or_else(|| type_arg.to_string(), |segment| segment.to_string()),
-            )
+            type_path(&ty).map(Either::Right).map(Either::Left)
         }
     };
 
-    fn type_path_segment(ty: &ast::Type) -> Option<ast::PathSegment> {
+    fn type_path(ty: &ast::Type) -> Option<ast::Path> {
         match ty {
-            ast::Type::ArrayType(it) => type_path_segment(&it.ty()?),
-            ast::Type::ForType(it) => type_path_segment(&it.ty()?),
-            ast::Type::ParenType(it) => type_path_segment(&it.ty()?),
-            ast::Type::PathType(path_type) => path_type.path()?.segment(),
-            ast::Type::PtrType(it) => type_path_segment(&it.ty()?),
-            ast::Type::RefType(it) => type_path_segment(&it.ty()?),
-            ast::Type::SliceType(it) => type_path_segment(&it.ty()?),
+            ast::Type::ArrayType(it) => type_path(&it.ty()?),
+            ast::Type::ForType(it) => type_path(&it.ty()?),
+            ast::Type::ParenType(it) => type_path(&it.ty()?),
+            ast::Type::PathType(path_type) => path_type.path(),
+            ast::Type::PtrType(it) => type_path(&it.ty()?),
+            ast::Type::RefType(it) => type_path(&it.ty()?),
+            ast::Type::SliceType(it) => type_path(&it.ty()?),
+            ast::Type::MacroType(macro_type) => macro_type.macro_call()?.path(),
             _ => None,
         }
     }
@@ -145,11 +161,11 @@ fn get_string_representation(arg: &ast::GenericArg) -> Option<String> {
 #[cfg(test)]
 mod tests {
     use crate::{
+        InlayHintsConfig,
         inlay_hints::{
-            tests::{check_with_config, DISABLED_CONFIG},
             GenericParameterHints,
+            tests::{DISABLED_CONFIG, check_with_config},
         },
-        InlayHintsConfig,
     };
 
     #[track_caller]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs
index 390139d214eb0..f52e27946fff7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -6,16 +6,17 @@
 //! }
 //! ```
 use hir::{
+    ChalkTyInterner, DefWithBody,
     db::{DefDatabase as _, HirDatabase as _},
     mir::{MirSpan, TerminatorKind},
-    ChalkTyInterner, DefWithBody,
 };
-use ide_db::{famous_defs::FamousDefs, FileRange};
+use ide_db::{FileRange, famous_defs::FamousDefs};
 
 use span::EditionedFileId;
 use syntax::{
+    ToSmolStr,
     ast::{self, AstNode},
-    match_ast, ToSmolStr,
+    match_ast,
 };
 
 use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
@@ -107,7 +108,7 @@ pub(super) fn hints(
                         .and_then(|d| source_map.pat_syntax(*d).ok())
                         .and_then(|d| {
                             Some(FileRange {
-                                file_id: d.file_id.file_id()?.into(),
+                                file_id: d.file_id.file_id()?.file_id(sema.db),
                                 range: d.value.text_range(),
                             })
                         })
@@ -143,8 +144,8 @@ fn nearest_token_after_node(
 #[cfg(test)]
 mod tests {
     use crate::{
-        inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
         InlayHintsConfig,
+        inlay_hints::tests::{DISABLED_CONFIG, check_with_config},
     };
 
     const ONLY_DROP_CONFIG: InlayHintsConfig =
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs
index ae5b519b43d00..f3be09f30a135 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs
@@ -7,8 +7,8 @@ use ide_db::famous_defs::FamousDefs;
 use ide_db::text_edit::TextEdit;
 use span::EditionedFileId;
 use syntax::{
-    ast::{self, AstNode},
     SyntaxKind,
+    ast::{self, AstNode},
 };
 
 use crate::{InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, LifetimeElisionHints};
@@ -56,8 +56,8 @@ pub(super) fn hints(
 #[cfg(test)]
 mod tests {
     use crate::{
-        inlay_hints::tests::{check_with_config, TEST_CONFIG},
         InlayHintsConfig, LifetimeElisionHints,
+        inlay_hints::tests::{TEST_CONFIG, check_with_config},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs
index 1fdd698991710..baba49a427d19 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs
@@ -4,18 +4,18 @@
 //! ```
 use std::iter;
 
-use ide_db::{famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty, FxHashMap};
+use ide_db::{FxHashMap, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty};
 use itertools::Itertools;
 use span::EditionedFileId;
+use syntax::{SmolStr, format_smolstr};
 use syntax::{
-    ast::{self, AstNode, HasGenericParams, HasName},
     SyntaxKind, SyntaxToken,
+    ast::{self, AstNode, HasGenericParams, HasName},
 };
-use syntax::{format_smolstr, SmolStr};
 
 use crate::{
-    inlay_hints::InlayHintCtx, InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind,
-    LifetimeElisionHints,
+    InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, LifetimeElisionHints,
+    inlay_hints::InlayHintCtx,
 };
 
 pub(super) fn fn_hints(
@@ -268,13 +268,14 @@ fn hints_(
         ctx.lifetime_stacks.iter().flat_map(|it| it.iter()).cloned().zip(iter::repeat(0)).collect();
     // allocate names
     let mut gen_idx_name = {
-        let mut gen = (0u8..).map(|idx| match idx {
+        let mut generic = (0u8..).map(|idx| match idx {
             idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]),
             idx => format_smolstr!("'{idx}"),
         });
         let ctx = &*ctx;
         move || {
-            gen.by_ref()
+            generic
+                .by_ref()
                 .find(|s| ctx.lifetime_stacks.iter().flat_map(|it| it.iter()).all(|n| n != s))
                 .unwrap_or_default()
         }
@@ -406,8 +407,8 @@ fn hints_(
 #[cfg(test)]
 mod tests {
     use crate::{
-        inlay_hints::tests::{check, check_with_config, TEST_CONFIG},
         InlayHintsConfig, LifetimeElisionHints,
+        inlay_hints::tests::{TEST_CONFIG, check, check_with_config},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs
index 8f01b1bd38b50..5ff9fee60abfa 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs
@@ -4,16 +4,14 @@
 //! _ = max(/*x*/4, /*y*/4);
 //! ```
 
+use std::iter::zip;
+
 use either::Either;
-use hir::{Callable, Semantics};
-use ide_db::{famous_defs::FamousDefs, RootDatabase};
+use hir::Semantics;
+use ide_db::{RootDatabase, famous_defs::FamousDefs};
 
-use span::EditionedFileId;
 use stdx::to_lower_snake_case;
-use syntax::{
-    ast::{self, AstNode, HasArgList, HasName, UnaryOp},
-    ToSmolStr,
-};
+use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp};
 
 use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
 
@@ -21,7 +19,6 @@ pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
     FamousDefs(sema, krate): &FamousDefs<'_, '_>,
     config: &InlayHintsConfig,
-    _file_id: EditionedFileId,
     expr: ast::Expr,
 ) -> Option<()> {
     if !config.parameter_hints {
@@ -29,6 +26,12 @@ pub(super) fn hints(
     }
 
     let (callable, arg_list) = get_callable(sema, &expr)?;
+    let unary_function = callable.n_params() == 1;
+    let function_name = match callable.kind() {
+        hir::CallableKind::Function(function) => Some(function.name(sema.db)),
+        _ => None,
+    };
+    let function_name = function_name.as_ref().map(|it| it.as_str());
     let hints = callable
         .params()
         .into_iter()
@@ -40,7 +43,13 @@ pub(super) fn hints(
             Some((p, param_name, arg, range))
         })
         .filter(|(_, param_name, arg, _)| {
-            !should_hide_param_name_hint(sema, &callable, param_name.as_str(), arg)
+            !should_hide_param_name_hint(
+                sema,
+                unary_function,
+                function_name,
+                param_name.as_str(),
+                arg,
+            )
         })
         .map(|(param, param_name, _, hir::FileRange { range, .. })| {
             let colon = if config.render_colons { ":" } else { "" };
@@ -56,7 +65,10 @@ pub(super) fn hints(
                             _ => None,
                         },
                     }?;
-                    sema.original_range_opt(name_syntax.syntax()).map(Into::into)
+                    sema.original_range_opt(name_syntax.syntax()).map(|frange| ide_db::FileRange {
+                        file_id: frange.file_id.file_id(sema.db),
+                        range: frange.range,
+                    })
                 }),
             );
             InlayHint {
@@ -94,9 +106,13 @@ fn get_callable(
     }
 }
 
+const INSIGNIFICANT_METHOD_NAMES: &[&str] = &["clone", "as_ref", "into"];
+const INSIGNIFICANT_PARAMETER_NAMES: &[&str] = &["predicate", "value", "pat", "rhs", "other"];
+
 fn should_hide_param_name_hint(
     sema: &Semantics<'_, RootDatabase>,
-    callable: &hir::Callable,
+    unary_function: bool,
+    function_name: Option<&str>,
     param_name: &str,
     argument: &ast::Expr,
 ) -> bool {
@@ -114,95 +130,128 @@ fn should_hide_param_name_hint(
         return true;
     }
 
-    if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) {
-        return false;
+    if param_name.starts_with("ra_fixture") {
+        return true;
     }
 
-    let fn_name = match callable.kind() {
-        hir::CallableKind::Function(it) => Some(it.name(sema.db).as_str().to_smolstr()),
-        _ => None,
-    };
-    let fn_name = fn_name.as_deref();
-    is_param_name_suffix_of_fn_name(param_name, callable, fn_name)
-        || is_argument_expr_similar_to_param_name(argument, param_name)
-        || param_name.starts_with("ra_fixture")
-        || (callable.n_params() == 1 && is_obvious_param(param_name))
-        || is_adt_constructor_similar_to_param_name(sema, argument, param_name)
+    if unary_function {
+        if let Some(function_name) = function_name {
+            if is_param_name_suffix_of_fn_name(param_name, function_name) {
+                return true;
+            }
+        }
+        if is_obvious_param(param_name) {
+            return true;
+        }
+    }
+
+    is_argument_expr_similar_to_param_name(sema, argument, param_name)
 }
 
 /// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal.
 ///
 /// `fn strip_suffix(suffix)` will be hidden.
 /// `fn stripsuffix(suffix)` will not be hidden.
-fn is_param_name_suffix_of_fn_name(
+fn is_param_name_suffix_of_fn_name(param_name: &str, fn_name: &str) -> bool {
+    fn_name == param_name
+        || fn_name
+            .len()
+            .checked_sub(param_name.len())
+            .and_then(|at| fn_name.is_char_boundary(at).then(|| fn_name.split_at(at)))
+            .is_some_and(|(prefix, suffix)| {
+                suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_')
+            })
+}
+
+fn is_argument_expr_similar_to_param_name(
+    sema: &Semantics<'_, RootDatabase>,
+    argument: &ast::Expr,
     param_name: &str,
-    callable: &Callable,
-    fn_name: Option<&str>,
 ) -> bool {
-    match (callable.n_params(), fn_name) {
-        (1, Some(function)) => {
-            function == param_name
-                || function
-                    .len()
-                    .checked_sub(param_name.len())
-                    .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at)))
-                    .is_some_and(|(prefix, suffix)| {
-                        suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_')
-                    })
+    match get_segment_representation(argument) {
+        Some(Either::Left(argument)) => is_argument_similar_to_param_name(&argument, param_name),
+        Some(Either::Right(path)) => {
+            path.segment()
+                .and_then(|it| it.name_ref())
+                .is_some_and(|name_ref| name_ref.text().eq_ignore_ascii_case(param_name))
+                || is_adt_constructor_similar_to_param_name(sema, &path, param_name)
         }
-        _ => false,
+        None => false,
     }
 }
 
-fn is_argument_expr_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool {
-    let argument = match get_string_representation(argument) {
-        Some(argument) => argument,
-        None => return false,
-    };
-    is_argument_similar_to_param_name(&argument, param_name)
-}
-
 /// Check whether param_name and argument are the same or
 /// whether param_name is a prefix/suffix of argument(split at `_`).
-pub(super) fn is_argument_similar_to_param_name(argument: &str, param_name: &str) -> bool {
-    // std is honestly too panic happy...
-    let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at));
-
-    let param_name = param_name.trim_start_matches('_');
-    let argument = argument.trim_start_matches('_');
-
-    match str_split_at(argument, param_name.len()) {
-        Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => {
-            return rest.is_empty() || rest.starts_with('_');
-        }
-        _ => (),
-    }
-    match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) {
-        Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => {
-            return rest.is_empty() || rest.ends_with('_');
-        }
-        _ => (),
-    }
-    false
+pub(super) fn is_argument_similar_to_param_name(
+    argument: &[ast::NameRef],
+    param_name: &str,
+) -> bool {
+    debug_assert!(!argument.is_empty());
+    debug_assert!(!param_name.is_empty());
+    let param_name = param_name.split('_');
+    let argument = argument.iter().flat_map(|it| it.text_non_mutable().split('_'));
+
+    let prefix_match = zip(argument.clone(), param_name.clone())
+        .all(|(arg, param)| arg.eq_ignore_ascii_case(param));
+    let postfix_match = || {
+        zip(argument.rev(), param_name.rev()).all(|(arg, param)| arg.eq_ignore_ascii_case(param))
+    };
+    prefix_match || postfix_match()
 }
 
-fn get_string_representation(expr: &ast::Expr) -> Option<String> {
+pub(super) fn get_segment_representation(
+    expr: &ast::Expr,
+) -> Option<Either<Vec<ast::NameRef>, ast::Path>> {
     match expr {
         ast::Expr::MethodCallExpr(method_call_expr) => {
+            let receiver =
+                method_call_expr.receiver().and_then(|expr| get_segment_representation(&expr));
             let name_ref = method_call_expr.name_ref()?;
-            match name_ref.text().as_str() {
-                "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()),
-                name_ref => Some(name_ref.to_owned()),
+            if INSIGNIFICANT_METHOD_NAMES.contains(&name_ref.text().as_str()) {
+                return receiver;
             }
+            Some(Either::Left(match receiver {
+                Some(Either::Left(mut left)) => {
+                    left.push(name_ref);
+                    left
+                }
+                Some(Either::Right(_)) | None => vec![name_ref],
+            }))
+        }
+        ast::Expr::FieldExpr(field_expr) => {
+            let expr = field_expr.expr().and_then(|expr| get_segment_representation(&expr));
+            let name_ref = field_expr.name_ref()?;
+            let res = match expr {
+                Some(Either::Left(mut left)) => {
+                    left.push(name_ref);
+                    left
+                }
+                Some(Either::Right(_)) | None => vec![name_ref],
+            };
+            Some(Either::Left(res))
         }
-        ast::Expr::MacroExpr(macro_expr) => {
-            Some(macro_expr.macro_call()?.path()?.segment()?.to_string())
+        // paths
+        ast::Expr::MacroExpr(macro_expr) => macro_expr.macro_call()?.path().map(Either::Right),
+        ast::Expr::RecordExpr(record_expr) => record_expr.path().map(Either::Right),
+        ast::Expr::PathExpr(path_expr) => {
+            let path = path_expr.path()?;
+            // single segment paths are likely locals
+            Some(match path.as_single_name_ref() {
+                None => Either::Right(path),
+                Some(name_ref) => Either::Left(vec![name_ref]),
+            })
         }
-        ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()),
-        ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()),
-        ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?),
-        ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?),
-        ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?),
+        ast::Expr::PrefixExpr(prefix_expr) if prefix_expr.op_kind() == Some(UnaryOp::Not) => None,
+        // recurse
+        ast::Expr::PrefixExpr(prefix_expr) => get_segment_representation(&prefix_expr.expr()?),
+        ast::Expr::RefExpr(ref_expr) => get_segment_representation(&ref_expr.expr()?),
+        ast::Expr::CastExpr(cast_expr) => get_segment_representation(&cast_expr.expr()?),
+        ast::Expr::CallExpr(call_expr) => get_segment_representation(&call_expr.expr()?),
+        ast::Expr::AwaitExpr(await_expr) => get_segment_representation(&await_expr.expr()?),
+        ast::Expr::IndexExpr(index_expr) => get_segment_representation(&index_expr.base()?),
+        ast::Expr::ParenExpr(paren_expr) => get_segment_representation(&paren_expr.expr()?),
+        ast::Expr::TryExpr(try_expr) => get_segment_representation(&try_expr.expr()?),
+        // ast::Expr::ClosureExpr(closure_expr) => todo!(),
         _ => None,
     }
 }
@@ -210,30 +259,15 @@ fn get_string_representation(expr: &ast::Expr) -> Option<String> {
 fn is_obvious_param(param_name: &str) -> bool {
     // avoid displaying hints for common functions like map, filter, etc.
     // or other obvious words used in std
-    let is_obvious_param_name =
-        matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other");
-    param_name.len() == 1 || is_obvious_param_name
+    param_name.len() == 1 || INSIGNIFICANT_PARAMETER_NAMES.contains(&param_name)
 }
 
 fn is_adt_constructor_similar_to_param_name(
     sema: &Semantics<'_, RootDatabase>,
-    argument: &ast::Expr,
+    path: &ast::Path,
     param_name: &str,
 ) -> bool {
-    let path = match argument {
-        ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e {
-            ast::Expr::PathExpr(p) => p.path(),
-            _ => None,
-        }),
-        ast::Expr::PathExpr(p) => p.path(),
-        ast::Expr::RecordExpr(r) => r.path(),
-        _ => return false,
-    };
-    let path = match path {
-        Some(it) => it,
-        None => return false,
-    };
-    (|| match sema.resolve_path(&path)? {
+    (|| match sema.resolve_path(path)? {
         hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
             Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name)
         }
@@ -257,8 +291,8 @@ fn is_adt_constructor_similar_to_param_name(
 #[cfg(test)]
 mod tests {
     use crate::{
-        inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
         InlayHintsConfig,
+        inlay_hints::tests::{DISABLED_CONFIG, check_with_config},
     };
 
     #[track_caller]
@@ -501,6 +535,7 @@ fn enum_matches_param_name(completion_kind: CompletionKind) {}
 
 fn foo(param: u32) {}
 fn bar(param_eter: u32) {}
+fn baz(a_d_e: u32) {}
 
 enum CompletionKind {
     Keyword,
@@ -553,6 +588,14 @@ fn main() {
       //^^^^^^^^^^^ param_eter
 
     non_ident_pat((0, 0));
+
+    baz(a.d.e);
+    baz(a.dc.e);
+     // ^^^^^^ a_d_e
+    baz(ac.d.e);
+     // ^^^^^^ a_d_e
+    baz(a.d.ec);
+     // ^^^^^^ a_d_e
 }"#,
         );
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs
index de9b0e98a4beb..d67d84588402e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs
@@ -5,7 +5,7 @@
 //! ```
 use ide_db::famous_defs::FamousDefs;
 use span::EditionedFileId;
-use syntax::{ast, SyntaxToken, T};
+use syntax::{SyntaxToken, T, ast};
 
 use crate::{InlayHint, InlayHintsConfig};
 
@@ -41,8 +41,8 @@ fn inlay_hint(token: SyntaxToken) -> InlayHint {
 #[cfg(test)]
 mod tests {
     use crate::{
-        inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
         InlayHintsConfig,
+        inlay_hints::tests::{DISABLED_CONFIG, check_with_config},
     };
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret.rs b/src/tools/rust-analyzer/crates/ide/src/interpret.rs
index 74dad488b4d30..8f9d2d6bf111b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/interpret.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/interpret.rs
@@ -1,8 +1,8 @@
 use hir::{ConstEvalError, DefWithBody, DisplayTarget, Semantics};
-use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
+use ide_db::{FilePosition, LineIndexDatabase, RootDatabase, base_db::SourceDatabase};
 use std::time::{Duration, Instant};
 use stdx::format_to;
-use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
+use syntax::{AstNode, TextRange, algo::ancestors_at_offset, ast};
 
 // Feature: Interpret A Function, Static Or Const.
 //
@@ -35,10 +35,10 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<(Dura
         _ => return None,
     };
     let span_formatter = |file_id, text_range: TextRange| {
-        let path = &db
-            .source_root(db.file_source_root(file_id))
-            .path_for_file(&file_id)
-            .map(|x| x.to_string());
+        let source_root = db.file_source_root(file_id).source_root_id(db);
+        let source_root = db.source_root(source_root).source_root(db);
+
+        let path = source_root.path_for_file(&file_id).map(|x| x.to_string());
         let path = path.as_deref().unwrap_or("<unknown file>");
         match db.line_index(file_id).try_line_col(text_range.start()) {
             Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col),
@@ -64,10 +64,9 @@ pub(crate) fn render_const_eval_error(
     display_target: DisplayTarget,
 ) -> String {
     let span_formatter = |file_id, text_range: TextRange| {
-        let path = &db
-            .source_root(db.file_source_root(file_id))
-            .path_for_file(&file_id)
-            .map(|x| x.to_string());
+        let source_root = db.file_source_root(file_id).source_root_id(db);
+        let source_root = db.source_root(source_root).source_root(db);
+        let path = source_root.path_for_file(&file_id).map(|x| x.to_string());
         let path = path.as_deref().unwrap_or("<unknown file>");
         match db.line_index(file_id).try_line_col(text_range.start()) {
             Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col),
diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
index ea18a97070c3a..0188c105faa78 100644
--- a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
@@ -2,10 +2,10 @@ use ide_assists::utils::extract_trivial_expression;
 use ide_db::syntax_helpers::node_ext::expr_as_name_ref;
 use itertools::Itertools;
 use syntax::{
-    ast::{self, AstNode, AstToken, IsString},
     NodeOrToken, SourceFile, SyntaxElement,
     SyntaxKind::{self, USE_TREE, WHITESPACE},
-    SyntaxToken, TextRange, TextSize, T,
+    SyntaxToken, T, TextRange, TextSize,
+    ast::{self, AstNode, AstToken, IsString},
 };
 
 use ide_db::text_edit::{TextEdit, TextEditBuilder};
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index 8ac1a96cc6524..a13be6c4927f7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -20,6 +20,7 @@ mod navigation_target;
 
 mod annotations;
 mod call_hierarchy;
+mod child_modules;
 mod doc_links;
 mod expand_macro;
 mod extend_selection;
@@ -57,23 +58,22 @@ mod view_memory_layout;
 mod view_mir;
 mod view_syntax_tree;
 
-use std::{iter, panic::UnwindSafe};
+use std::panic::{AssertUnwindSafe, UnwindSafe};
 
 use cfg::CfgOptions;
 use fetch_crates::CrateInfo;
-use hir::{sym, ChangeWithProcMacros};
+use hir::{ChangeWithProcMacros, EditionedFileId, sym};
 use ide_db::{
+    FxHashMap, FxIndexSet, LineIndexDatabase,
     base_db::{
-        ra_salsa::{self, ParallelDatabase},
-        CrateOrigin, CrateWorkspaceData, Env, FileLoader, FileSet, SourceDatabase,
-        SourceRootDatabase, VfsPath,
+        CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
+        salsa::Cancelled,
     },
-    prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
+    prime_caches, symbol_index,
 };
-use span::EditionedFileId;
 use syntax::SourceFile;
 use triomphe::Arc;
-use view_memory_layout::{view_memory_layout, RecursiveMemoryLayout};
+use view_memory_layout::{RecursiveMemoryLayout, view_memory_layout};
 
 use crate::navigation_target::ToNav;
 
@@ -110,8 +110,8 @@ pub use crate::{
         StaticIndex, StaticIndexedFile, TokenId, TokenStaticData, VendoredLibrariesConfig,
     },
     syntax_highlighting::{
-        tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
         HighlightConfig, HlRange,
+        tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
     },
     test_explorer::{TestItem, TestItemKind},
 };
@@ -125,7 +125,8 @@ pub use ide_completion::{
 };
 pub use ide_db::text_edit::{Indel, TextEdit};
 pub use ide_db::{
-    base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId},
+    FileId, FilePosition, FileRange, RootDatabase, Severity, SymbolKind,
+    base_db::{Crate, CrateGraphBuilder, FileChange, SourceRoot, SourceRootId},
     documentation::Documentation,
     label::Label,
     line_index::{LineCol, LineIndex},
@@ -133,7 +134,6 @@ pub use ide_db::{
     search::{ReferenceCategory, SearchScope},
     source_change::{FileSystemEdit, SnippetEdit, SourceChange},
     symbol_index::Query,
-    FileId, FilePosition, FileRange, RootDatabase, Severity, SymbolKind,
 };
 pub use ide_diagnostics::{Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode};
 pub use ide_ssr::SsrError;
@@ -217,7 +217,7 @@ impl Default for AnalysisHost {
 /// `Analysis` are canceled (most method return `Err(Canceled)`).
 #[derive(Debug)]
 pub struct Analysis {
-    db: ra_salsa::Snapshot<RootDatabase>,
+    db: RootDatabase,
 }
 
 // As a general design guideline, `Analysis` API are intended to be independent
@@ -237,34 +237,37 @@ impl Analysis {
         file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_owned()));
         let source_root = SourceRoot::new_local(file_set);
 
-        let mut change = ChangeWithProcMacros::new();
+        let mut change = ChangeWithProcMacros::default();
         change.set_roots(vec![source_root]);
-        let mut crate_graph = CrateGraph::default();
+        let mut crate_graph = CrateGraphBuilder::default();
         // FIXME: cfg options
         // Default to enable test for single file.
         let mut cfg_options = CfgOptions::default();
-        cfg_options.insert_atom(sym::test.clone());
+
+        // FIXME: This is less than ideal
+        let proc_macro_cwd = Arc::new(
+            TryFrom::try_from(&*std::env::current_dir().unwrap().as_path().to_string_lossy())
+                .unwrap(),
+        );
+        cfg_options.insert_atom(sym::test);
         crate_graph.add_crate_root(
             file_id,
             Edition::CURRENT,
             None,
             None,
-            Arc::new(cfg_options),
+            cfg_options,
             None,
             Env::default(),
             CrateOrigin::Local { repo: None, name: None },
             false,
-            None,
-        );
-        change.change_file(file_id, Some(text));
-        let ws_data = crate_graph
-            .iter()
-            .zip(iter::repeat(Arc::new(CrateWorkspaceData {
+            proc_macro_cwd,
+            Arc::new(CrateWorkspaceData {
                 data_layout: Err("fixture has no layout".into()),
                 toolchain: None,
-            })))
-            .collect();
-        change.set_crate_graph(crate_graph, ws_data);
+            }),
+        );
+        change.change_file(file_id, Some(text));
+        change.set_crate_graph(crate_graph);
 
         host.apply_change(change);
         (host.analysis(), file_id)
@@ -276,12 +279,12 @@ impl Analysis {
     }
 
     pub fn source_root_id(&self, file_id: FileId) -> Cancellable<SourceRootId> {
-        self.with_db(|db| db.file_source_root(file_id))
+        self.with_db(|db| db.file_source_root(file_id).source_root_id(db))
     }
 
     pub fn is_local_source_root(&self, source_root_id: SourceRootId) -> Cancellable<bool> {
         self.with_db(|db| {
-            let sr = db.source_root(source_root_id);
+            let sr = db.source_root(source_root_id).source_root(db);
             !sr.is_library
         })
     }
@@ -295,18 +298,25 @@ impl Analysis {
 
     /// Gets the text of the source file.
     pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
-        self.with_db(|db| SourceDatabase::file_text(db, file_id))
+        self.with_db(|db| SourceDatabase::file_text(db, file_id).text(db))
     }
 
     /// Gets the syntax tree of the file.
     pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
         // FIXME edition
-        self.with_db(|db| db.parse(EditionedFileId::current_edition(file_id)).tree())
+        self.with_db(|db| {
+            let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
+
+            db.parse(editioned_file_id_wrapper).tree()
+        })
     }
 
     /// Returns true if this file belongs to an immutable library.
     pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
-        self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
+        self.with_db(|db| {
+            let source_root = db.file_source_root(file_id).source_root_id(db);
+            db.source_root(source_root).source_root(db).is_library
+        })
     }
 
     /// Gets the file's `LineIndex`: data structure to convert between absolute
@@ -324,7 +334,8 @@ impl Analysis {
     /// supported).
     pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
         self.with_db(|db| {
-            let parse = db.parse(EditionedFileId::current_edition(position.file_id));
+            let file_id = EditionedFileId::current_edition(&self.db, position.file_id);
+            let parse = db.parse(file_id);
             let file = parse.tree();
             matching_brace::matching_brace(&file, position.offset)
         })
@@ -358,7 +369,7 @@ impl Analysis {
         self.with_db(|db| test_explorer::discover_tests_in_crate_by_test_id(db, crate_id))
     }
 
-    pub fn discover_tests_in_crate(&self, crate_id: CrateId) -> Cancellable<Vec<TestItem>> {
+    pub fn discover_tests_in_crate(&self, crate_id: Crate) -> Cancellable<Vec<TestItem>> {
         self.with_db(|db| test_explorer::discover_tests_in_crate(db, crate_id))
     }
 
@@ -383,7 +394,9 @@ impl Analysis {
     /// stuff like trailing commas.
     pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
         self.with_db(|db| {
-            let parse = db.parse(EditionedFileId::current_edition(frange.file_id));
+            let editioned_file_id_wrapper =
+                EditionedFileId::current_edition(&self.db, frange.file_id);
+            let parse = db.parse(editioned_file_id_wrapper);
             join_lines::join_lines(config, &parse.tree(), frange.range)
         })
     }
@@ -419,9 +432,9 @@ impl Analysis {
     pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
         // FIXME: Edition
         self.with_db(|db| {
-            file_structure::file_structure(
-                &db.parse(EditionedFileId::current_edition(file_id)).tree(),
-            )
+            let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
+
+            file_structure::file_structure(&db.parse(editioned_file_id_wrapper).tree())
         })
     }
 
@@ -450,9 +463,9 @@ impl Analysis {
     /// Returns the set of folding ranges.
     pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
         self.with_db(|db| {
-            folding_ranges::folding_ranges(
-                &db.parse(EditionedFileId::current_edition(file_id)).tree(),
-            )
+            let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
+
+            folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
         })
     }
 
@@ -506,7 +519,11 @@ impl Analysis {
         position: FilePosition,
         search_scope: Option<SearchScope>,
     ) -> Cancellable<Option<Vec<ReferenceSearchResult>>> {
-        self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
+        let search_scope = AssertUnwindSafe(search_scope);
+        self.with_db(|db| {
+            let _ = &search_scope;
+            references::find_all_refs(&Semantics::new(db), position, search_scope.0)
+        })
     }
 
     /// Returns a short text describing element at position.
@@ -577,34 +594,44 @@ impl Analysis {
         self.with_db(|db| parent_module::parent_module(db, position))
     }
 
+    /// Returns vec of `mod name;` declaration which are created by the current module.
+    pub fn child_modules(&self, position: FilePosition) -> Cancellable<Vec<NavigationTarget>> {
+        self.with_db(|db| child_modules::child_modules(db, position))
+    }
+
     /// Returns crates that this file belongs to.
-    pub fn crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
+    pub fn crates_for(&self, file_id: FileId) -> Cancellable<Vec<Crate>> {
         self.with_db(|db| parent_module::crates_for(db, file_id))
     }
 
     /// Returns crates that this file belongs to.
-    pub fn transitive_rev_deps(&self, crate_id: CrateId) -> Cancellable<Vec<CrateId>> {
-        self.with_db(|db| db.crate_graph().transitive_rev_deps(crate_id).collect())
+    pub fn transitive_rev_deps(&self, crate_id: Crate) -> Cancellable<Vec<Crate>> {
+        self.with_db(|db| Vec::from_iter(db.transitive_rev_deps(crate_id)))
     }
 
     /// Returns crates that this file *might* belong to.
-    pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
+    pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<Crate>> {
         self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect())
     }
 
     /// Returns the edition of the given crate.
-    pub fn crate_edition(&self, crate_id: CrateId) -> Cancellable<Edition> {
-        self.with_db(|db| db.crate_graph()[crate_id].edition)
+    pub fn crate_edition(&self, crate_id: Crate) -> Cancellable<Edition> {
+        self.with_db(|db| crate_id.data(db).edition)
+    }
+
+    /// Returns whether the given crate is a proc macro.
+    pub fn is_proc_macro_crate(&self, crate_id: Crate) -> Cancellable<bool> {
+        self.with_db(|db| crate_id.data(db).is_proc_macro)
     }
 
     /// Returns true if this crate has `no_std` or `no_core` specified.
-    pub fn is_crate_no_std(&self, crate_id: CrateId) -> Cancellable<bool> {
+    pub fn is_crate_no_std(&self, crate_id: Crate) -> Cancellable<bool> {
         self.with_db(|db| hir::db::DefDatabase::crate_def_map(db, crate_id).is_no_std())
     }
 
     /// Returns the root file of the given crate.
-    pub fn crate_root(&self, crate_id: CrateId) -> Cancellable<FileId> {
-        self.with_db(|db| db.crate_graph()[crate_id].root_file_id)
+    pub fn crate_root(&self, crate_id: Crate) -> Cancellable<FileId> {
+        self.with_db(|db| crate_id.data(db).root_file_id)
     }
 
     /// Returns the set of possible targets to run for the current file.
@@ -618,7 +645,11 @@ impl Analysis {
         position: FilePosition,
         search_scope: Option<SearchScope>,
     ) -> Cancellable<Vec<Runnable>> {
-        self.with_db(|db| runnables::related_tests(db, position, search_scope))
+        let search_scope = AssertUnwindSafe(search_scope);
+        self.with_db(|db| {
+            let _ = &search_scope;
+            runnables::related_tests(db, position, search_scope.0)
+        })
     }
 
     /// Computes syntax highlighting for the given file
@@ -717,7 +748,7 @@ impl Analysis {
         frange: FileRange,
     ) -> Cancellable<Vec<Assist>> {
         let include_fixes = match &assist_config.allowed {
-            Some(it) => it.iter().any(|&it| it == AssistKind::None || it == AssistKind::QuickFix),
+            Some(it) => it.contains(&AssistKind::QuickFix),
             None => true,
         };
 
@@ -811,6 +842,10 @@ impl Analysis {
         self.with_db(|db| view_memory_layout(db, position))
     }
 
+    pub fn editioned_file_id_to_vfs(&self, file_id: hir::EditionedFileId) -> FileId {
+        file_id.file_id(&self.db)
+    }
+
     /// Performs an operation on the database that may be canceled.
     ///
     /// rust-analyzer needs to be able to answer semantic questions about the
@@ -828,7 +863,8 @@ impl Analysis {
     where
         F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
     {
-        Cancelled::catch(|| f(&self.db))
+        let snap = self.db.snapshot();
+        Cancelled::catch(|| f(&snap))
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
index 67346ea9cf90f..b2b91d6e3cf34 100644
--- a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs
@@ -1,6 +1,6 @@
 use syntax::{
+    SourceFile, SyntaxKind, T, TextSize,
     ast::{self, AstNode},
-    SourceFile, SyntaxKind, TextSize, T,
 };
 
 // Feature: Matching Brace
diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
index 5754b4fa82f43..4a06cd919fc3b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
@@ -5,15 +5,15 @@ use core::fmt;
 
 use hir::{Adt, AsAssocItem, Crate, HirDisplay, MacroKind, Semantics};
 use ide_db::{
+    FilePosition, RootDatabase,
     base_db::{CrateOrigin, LangCrateOrigin},
     defs::{Definition, IdentClass},
     helpers::pick_best_token,
-    FilePosition, RootDatabase,
 };
 use itertools::Itertools;
 use syntax::{AstNode, SyntaxKind::*, T};
 
-use crate::{doc_links::token_as_doc_comment, parent_module::crates_for, RangeInfo};
+use crate::{RangeInfo, doc_links::token_as_doc_comment, parent_module::crates_for};
 
 #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
 pub enum MonikerDescriptorKind {
@@ -194,11 +194,7 @@ pub(crate) fn def_to_kind(db: &RootDatabase, def: Definition) -> SymbolInformati
         Definition::Function(it) => {
             if it.as_assoc_item(db).is_some() {
                 if it.has_self_param(db) {
-                    if it.has_body(db) {
-                        Method
-                    } else {
-                        TraitMethod
-                    }
+                    if it.has_body(db) { Method } else { TraitMethod }
                 } else {
                     StaticMethod
                 }
@@ -405,7 +401,7 @@ fn display<T: HirDisplay>(db: &RootDatabase, module: hir::Module, it: T) -> Stri
 
 #[cfg(test)]
 mod tests {
-    use crate::{fixture, MonikerResult};
+    use crate::{MonikerResult, fixture};
 
     use super::MonikerKind;
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
index 3fb3a788b9182..f3bb3df1cd8d7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/move_item.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs
@@ -3,9 +3,9 @@ use std::{iter::once, mem};
 use hir::Semantics;
 use ide_db::syntax_helpers::tree_diff::diff;
 use ide_db::text_edit::{TextEdit, TextEditBuilder};
-use ide_db::{helpers::pick_best_token, FileRange, RootDatabase};
+use ide_db::{FileRange, RootDatabase, helpers::pick_best_token};
 use itertools::Itertools;
-use syntax::{ast, match_ast, AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange};
+use syntax::{AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, ast, match_ast};
 
 #[derive(Copy, Clone, Debug)]
 pub enum Direction {
@@ -174,7 +174,7 @@ fn replace_nodes<'a>(
 #[cfg(test)]
 mod tests {
     use crate::fixture;
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
 
     use crate::Direction;
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
index d67aaac06fb95..9334b73fc7b4f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -5,19 +5,20 @@ use std::fmt;
 use arrayvec::ArrayVec;
 use either::Either;
 use hir::{
-    db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasCrate,
-    HasSource, HirDisplay, HirFileId, InFile, LocalSource, ModuleSource,
+    AssocItem, FieldSource, HasContainer, HasCrate, HasSource, HirDisplay, HirFileId, InFile,
+    LocalSource, ModuleSource, db::ExpandDatabase, symbols::FileSymbol,
 };
 use ide_db::{
+    FileId, FileRange, RootDatabase, SymbolKind,
     defs::Definition,
     documentation::{Documentation, HasDocs},
-    FileId, FileRange, RootDatabase, SymbolKind,
 };
 use span::Edition;
 use stdx::never;
 use syntax::{
+    AstNode, SmolStr, SyntaxNode, TextRange, ToSmolStr,
     ast::{self, HasName},
-    format_smolstr, AstNode, SmolStr, SyntaxNode, TextRange, ToSmolStr,
+    format_smolstr,
 };
 
 /// `NavigationTarget` represents an element in the editor's UI which you can
@@ -816,14 +817,10 @@ pub(crate) fn orig_range_with_focus_r(
 ) -> UpmappingResult<(FileRange, Option<TextRange>)> {
     let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) };
 
-    let call_kind =
-        || db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind;
+    let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind;
 
-    let def_range = || {
-        db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
-            .def
-            .definition_range(db)
-    };
+    let def_range =
+        || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db);
 
     // FIXME: Also make use of the syntax context to determine which site we are at?
     let value_range = InFile::new(hir_file, value).original_node_file_range_opt(db);
@@ -900,7 +897,7 @@ pub(crate) fn orig_range_with_focus_r(
 
     UpmappingResult {
         call_site: (
-            call_site_range.into(),
+            call_site_range.into_file_id(db),
             call_site_focus.and_then(|hir::FileRange { file_id, range }| {
                 if call_site_range.file_id == file_id && call_site_range.range.contains_range(range)
                 {
@@ -912,7 +909,7 @@ pub(crate) fn orig_range_with_focus_r(
         ),
         def_site: def_site.map(|(def_site_range, def_site_focus)| {
             (
-                def_site_range.into(),
+                def_site_range.into_file_id(db),
                 def_site_focus.and_then(|hir::FileRange { file_id, range }| {
                     if def_site_range.file_id == file_id
                         && def_site_range.range.contains_range(range)
@@ -933,7 +930,10 @@ fn orig_range(
     value: &SyntaxNode,
 ) -> UpmappingResult<(FileRange, Option<TextRange>)> {
     UpmappingResult {
-        call_site: (InFile::new(hir_file, value).original_file_range_rooted(db).into(), None),
+        call_site: (
+            InFile::new(hir_file, value).original_file_range_rooted(db).into_file_id(db),
+            None,
+        ),
         def_site: None,
     }
 }
@@ -944,7 +944,10 @@ fn orig_range_r(
     value: TextRange,
 ) -> UpmappingResult<(FileRange, Option<TextRange>)> {
     UpmappingResult {
-        call_site: (InFile::new(hir_file, value).original_node_file_range(db).0.into(), None),
+        call_site: (
+            InFile::new(hir_file, value).original_node_file_range(db).0.into_file_id(db),
+            None,
+        ),
         def_site: None,
     }
 }
@@ -953,7 +956,7 @@ fn orig_range_r(
 mod tests {
     use expect_test::expect;
 
-    use crate::{fixture, Query};
+    use crate::{Query, fixture};
 
     #[test]
     fn test_nav_for_symbol() {
diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
index 6d82f9b0634b4..6dc01c4506336 100644
--- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
@@ -1,7 +1,7 @@
-use hir::{db::DefDatabase, Semantics};
+use hir::{Semantics, db::DefDatabase};
 use ide_db::{
-    base_db::{CrateId, FileLoader},
     FileId, FilePosition, RootDatabase,
+    base_db::{Crate, RootQueryDb},
 };
 use itertools::Itertools;
 use syntax::{
@@ -53,11 +53,13 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
 }
 
 /// This returns `Vec` because a module may be included from several places.
-pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
+pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<Crate> {
     db.relevant_crates(file_id)
         .iter()
         .copied()
-        .filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some())
+        .filter(|&crate_id| {
+            db.crate_def_map(crate_id).modules_for_file(db, file_id).next().is_some()
+        })
         .sorted()
         .collect()
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index 069818d50e76a..4fa116444b7ff 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -11,21 +11,22 @@
 
 use hir::{PathResolution, Semantics};
 use ide_db::{
+    FileId, RootDatabase,
     defs::{Definition, NameClass, NameRefClass},
     search::{ReferenceCategory, SearchScope, UsageSearchResult},
-    FileId, RootDatabase,
 };
 use itertools::Itertools;
 use nohash_hasher::IntMap;
 use span::Edition;
 use syntax::{
-    ast::{self, HasName},
-    match_ast, AstNode,
+    AstNode,
     SyntaxKind::*,
-    SyntaxNode, TextRange, TextSize, T,
+    SyntaxNode, T, TextRange, TextSize,
+    ast::{self, HasName},
+    match_ast,
 };
 
-use crate::{highlight_related, FilePosition, HighlightedRange, NavigationTarget, TryToNav};
+use crate::{FilePosition, HighlightedRange, NavigationTarget, TryToNav, highlight_related};
 
 #[derive(Debug, Clone)]
 pub struct ReferenceSearchResult {
@@ -67,7 +68,7 @@ pub(crate) fn find_all_refs(
                 .into_iter()
                 .map(|(file_id, refs)| {
                     (
-                        file_id.into(),
+                        file_id.file_id(sema.db),
                         refs.into_iter()
                             .map(|file_ref| (file_ref.range, file_ref.category))
                             .unique()
@@ -123,11 +124,11 @@ pub(crate) fn find_all_refs(
     }
 }
 
-pub(crate) fn find_defs<'a>(
-    sema: &'a Semantics<'_, RootDatabase>,
+pub(crate) fn find_defs(
+    sema: &Semantics<'_, RootDatabase>,
     syntax: &SyntaxNode,
     offset: TextSize,
-) -> Option<impl IntoIterator<Item = Definition> + 'a> {
+) -> Option<Vec<Definition>> {
     let token = syntax.token_at_offset(offset).find(|t| {
         matches!(
             t.kind(),
@@ -306,8 +307,10 @@ fn handle_control_flow_keywords(
     FilePosition { file_id, offset }: FilePosition,
 ) -> Option<ReferenceSearchResult> {
     let file = sema.parse_guess_edition(file_id);
-    let edition =
-        sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
+    let edition = sema
+        .attach_first_edition(file_id)
+        .map(|it| it.edition(sema.db))
+        .unwrap_or(Edition::CURRENT);
     let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword(edition))?;
 
     let references = match token.kind() {
@@ -327,7 +330,7 @@ fn handle_control_flow_keywords(
             .into_iter()
             .map(|HighlightedRange { range, category }| (range, category))
             .collect();
-        (file_id.into(), ranges)
+        (file_id.file_id(sema.db), ranges)
     })
     .collect();
 
@@ -336,12 +339,12 @@ fn handle_control_flow_keywords(
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
-    use ide_db::FileId;
-    use span::EditionedFileId;
+    use expect_test::{Expect, expect};
+    use hir::EditionedFileId;
+    use ide_db::{FileId, RootDatabase};
     use stdx::format_to;
 
-    use crate::{fixture, SearchScope};
+    use crate::{SearchScope, fixture};
 
     #[test]
     fn exclude_tests() {
@@ -1003,7 +1006,9 @@ pub(super) struct Foo$0 {
 
         check_with_scope(
             code,
-            Some(SearchScope::single_file(EditionedFileId::current_edition(FileId::from_raw(2)))),
+            Some(&mut |db| {
+                SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2)))
+            }),
             expect![[r#"
                 quux Function FileId(0) 19..35 26..30
 
@@ -1259,11 +1264,12 @@ impl Foo {
 
     fn check_with_scope(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
-        search_scope: Option<SearchScope>,
+        search_scope: Option<&mut dyn FnMut(&RootDatabase) -> SearchScope>,
         expect: Expect,
     ) {
         let (analysis, pos) = fixture::position(ra_fixture);
-        let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap();
+        let refs =
+            analysis.find_all_refs(pos, search_scope.map(|it| it(&analysis.db))).unwrap().unwrap();
 
         let mut actual = String::new();
         for mut refs in refs {
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index d0e1c2097a7a9..e6cda60cd95b5 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -4,16 +4,16 @@
 //! tests. This module also implements a couple of magic tricks, like renaming
 //! `self` and to `self` (to switch between associated function and method).
 
-use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
+use hir::{AsAssocItem, InFile, Semantics};
 use ide_db::{
+    FileId, FileRange, RootDatabase,
     defs::{Definition, NameClass, NameRefClass},
-    rename::{bail, format_err, source_edit_from_references, IdentifierKind},
+    rename::{IdentifierKind, bail, format_err, source_edit_from_references},
     source_change::SourceChangeBuilder,
-    FileId, FileRange, RootDatabase,
 };
 use itertools::Itertools;
 use stdx::{always, never};
-use syntax::{ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize};
+use syntax::{AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, ast};
 
 use ide_db::text_edit::TextEdit;
 
@@ -120,7 +120,7 @@ pub(crate) fn rename(
                 source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| {
                     (
                         position.file_id,
-                        source_edit_from_references(refs, def, new_name, file_id.edition()),
+                        source_edit_from_references(refs, def, new_name, file_id.edition(db)),
                     )
                 }));
 
@@ -297,7 +297,7 @@ fn find_definitions(
                 // remove duplicates, comparing `Definition`s
                 Ok(v.into_iter()
                     .unique_by(|&(.., def)| def)
-                    .map(|(a, b, c)| (a.into(), b, c))
+                    .map(|(a, b, c)| (a.into_file_id(sema.db), b, c))
                     .collect::<Vec<_>>()
                     .into_iter())
             }
@@ -368,10 +368,13 @@ fn rename_to_self(
     let usages = def.usages(sema).all();
     let mut source_change = SourceChange::default();
     source_change.extend(usages.iter().map(|(file_id, references)| {
-        (file_id.into(), source_edit_from_references(references, def, "self", file_id.edition()))
+        (
+            file_id.file_id(sema.db),
+            source_edit_from_references(references, def, "self", file_id.edition(sema.db)),
+        )
     }));
     source_change.insert_source_edit(
-        file_id.original_file(sema.db),
+        file_id.original_file(sema.db).file_id(sema.db),
         TextEdit::replace(param_source.syntax().text_range(), String::from(self_param)),
     );
     Ok(source_change)
@@ -402,9 +405,12 @@ fn rename_self_to_param(
         bail!("Cannot rename reference to `_` as it is being referenced multiple times");
     }
     let mut source_change = SourceChange::default();
-    source_change.insert_source_edit(file_id.original_file(sema.db), edit);
+    source_change.insert_source_edit(file_id.original_file(sema.db).file_id(sema.db), edit);
     source_change.extend(usages.iter().map(|(file_id, references)| {
-        (file_id.into(), source_edit_from_references(references, def, new_name, file_id.edition()))
+        (
+            file_id.file_id(sema.db),
+            source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
+        )
     }));
     Ok(source_change)
 }
@@ -443,7 +449,7 @@ fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Opt
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use ide_db::source_change::SourceChange;
     use ide_db::text_edit::TextEdit;
     use itertools::Itertools;
@@ -509,10 +515,9 @@ mod tests {
         let found_conflicts = source_change
             .source_file_edits
             .iter()
+            .filter(|(_, (edit, _))| edit.change_annotation().is_some())
             .flat_map(|(file_id, (edit, _))| {
-                edit.into_iter()
-                    .filter(|edit| edit.annotation.is_some())
-                    .map(move |edit| (*file_id, edit.delete))
+                edit.into_iter().map(move |edit| (*file_id, edit.delete))
             })
             .sorted_unstable_by_key(|(file_id, range)| (*file_id, range.start()))
             .collect_vec();
@@ -1081,7 +1086,6 @@ mod foo$0;
                             Indel {
                                 insert: "foo2",
                                 delete: 4..7,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1129,7 +1133,6 @@ use crate::foo$0::FooContent;
                             Indel {
                                 insert: "quux",
                                 delete: 8..11,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1141,7 +1144,6 @@ use crate::foo$0::FooContent;
                             Indel {
                                 insert: "quux",
                                 delete: 11..14,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1183,7 +1185,6 @@ mod fo$0o;
                             Indel {
                                 insert: "foo2",
                                 delete: 4..7,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1232,7 +1233,6 @@ mod outer { mod fo$0o; }
                             Indel {
                                 insert: "bar",
                                 delete: 16..19,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1304,7 +1304,6 @@ pub mod foo$0;
                             Indel {
                                 insert: "foo2",
                                 delete: 27..30,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1316,7 +1315,6 @@ pub mod foo$0;
                             Indel {
                                 insert: "foo2",
                                 delete: 8..11,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1372,7 +1370,6 @@ mod quux;
                             Indel {
                                 insert: "foo2",
                                 delete: 4..7,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1506,12 +1503,10 @@ pub fn baz() {}
                             Indel {
                                 insert: "r#fn",
                                 delete: 4..7,
-                                annotation: None,
                             },
                             Indel {
                                 insert: "r#fn",
                                 delete: 22..25,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1576,12 +1571,10 @@ pub fn baz() {}
                             Indel {
                                 insert: "foo",
                                 delete: 4..8,
-                                annotation: None,
                             },
                             Indel {
                                 insert: "foo",
                                 delete: 23..27,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1643,7 +1636,6 @@ fn bar() {
                             Indel {
                                 insert: "dyn",
                                 delete: 7..10,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1655,7 +1647,6 @@ fn bar() {
                             Indel {
                                 insert: "r#dyn",
                                 delete: 18..21,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1685,7 +1676,6 @@ fn bar() {
                             Indel {
                                 insert: "r#dyn",
                                 delete: 7..10,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1697,7 +1687,6 @@ fn bar() {
                             Indel {
                                 insert: "dyn",
                                 delete: 18..21,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1727,7 +1716,6 @@ fn bar() {
                             Indel {
                                 insert: "r#dyn",
                                 delete: 7..10,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1739,7 +1727,6 @@ fn bar() {
                             Indel {
                                 insert: "dyn",
                                 delete: 18..21,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1776,12 +1763,10 @@ fn bar() {
                             Indel {
                                 insert: "abc",
                                 delete: 7..10,
-                                annotation: None,
                             },
                             Indel {
                                 insert: "abc",
                                 delete: 32..35,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1793,7 +1778,6 @@ fn bar() {
                             Indel {
                                 insert: "abc",
                                 delete: 18..23,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1827,12 +1811,10 @@ fn bar() {
                             Indel {
                                 insert: "abc",
                                 delete: 7..12,
-                                annotation: None,
                             },
                             Indel {
                                 insert: "abc",
                                 delete: 34..39,
-                                annotation: None,
                             },
                         ],
                     ),
@@ -1844,7 +1826,6 @@ fn bar() {
                             Indel {
                                 insert: "abc",
                                 delete: 18..21,
-                                annotation: None,
                             },
                         ],
                     ),
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index b8deed01fb7f2..ab139602404cb 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -4,28 +4,29 @@ use arrayvec::ArrayVec;
 use ast::HasName;
 use cfg::{CfgAtom, CfgExpr};
 use hir::{
-    db::HirDatabase, sym, symbols::FxIndexSet, AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate,
-    HasSource, HirFileIdExt, ModPath, Name, PathKind, Semantics, Symbol,
+    AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, ModPath, Name, PathKind, Semantics,
+    Symbol, db::HirDatabase, sym, symbols::FxIndexSet,
 };
 use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
 use ide_db::{
-    base_db::SourceDatabase,
+    FilePosition, FxHashMap, FxIndexMap, RootDatabase, SymbolKind,
+    base_db::RootQueryDb,
     defs::Definition,
     documentation::docs_from_attrs,
     helpers::visit_file_defs,
     search::{FileReferenceNode, SearchScope},
-    FilePosition, FxHashMap, FxIndexMap, RootDatabase, SymbolKind,
 };
 use itertools::Itertools;
 use smallvec::SmallVec;
 use span::{Edition, TextSize};
 use stdx::format_to;
 use syntax::{
+    SmolStr, SyntaxNode, ToSmolStr,
     ast::{self, AstNode},
-    format_smolstr, SmolStr, SyntaxNode, ToSmolStr,
+    format_smolstr,
 };
 
-use crate::{references, FileId, NavigationTarget, ToNav, TryToNav};
+use crate::{FileId, NavigationTarget, ToNav, TryToNav, references};
 
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Runnable {
@@ -284,8 +285,10 @@ fn find_related_tests_in_module(
 
     let file_id = mod_source.file_id.original_file(sema.db);
     let mod_scope = SearchScope::file_range(hir::FileRange { file_id, range: mod_source.value });
-    let fn_pos =
-        FilePosition { file_id: file_id.into(), offset: fn_name.syntax().text_range().start() };
+    let fn_pos = FilePosition {
+        file_id: file_id.file_id(sema.db),
+        offset: fn_name.syntax().text_range().start(),
+    };
     find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests)
 }
 
@@ -499,7 +502,7 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
     let krate = def.krate(db);
     let edition = krate.map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
     let display_target = krate
-        .unwrap_or_else(|| (*db.crate_graph().crates_in_topological_order().last().unwrap()).into())
+        .unwrap_or_else(|| (*db.all_crates().last().expect("no crate graph present")).into())
         .to_display_target(db);
     if !has_runnable_doc_test(&attrs) {
         return None;
@@ -752,7 +755,7 @@ impl UpdateTest {
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
 
     use crate::fixture;
 
@@ -1209,13 +1212,13 @@ impl Foo {
             r#"
 //- /lib.rs
 $0
-macro_rules! gen {
+macro_rules! generate {
     () => {
         #[test]
         fn foo_test() {}
     }
 }
-macro_rules! gen2 {
+macro_rules! generate2 {
     () => {
         mod tests2 {
             #[test]
@@ -1223,25 +1226,25 @@ macro_rules! gen2 {
         }
     }
 }
-macro_rules! gen_main {
+macro_rules! generate_main {
     () => {
         fn main() {}
     }
 }
 mod tests {
-    gen!();
+    generate!();
 }
-gen2!();
-gen_main!();
+generate2!();
+generate_main!();
 "#,
             expect![[r#"
                 [
-                    "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..315, name: \"\", kind: Module })",
-                    "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 267..292, focus_range: 271..276, name: \"tests\", kind: Module, description: \"mod tests\" })",
-                    "(Test, NavigationTarget { file_id: FileId(0), full_range: 283..290, name: \"foo_test\", kind: Function })",
-                    "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
-                    "(Test, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"foo_test2\", kind: Function }, true)",
-                    "(Bin, NavigationTarget { file_id: FileId(0), full_range: 302..314, name: \"main\", kind: Function })",
+                    "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"\", kind: Module })",
+                    "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })",
+                    "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..310, name: \"foo_test\", kind: Function })",
+                    "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
+                    "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"foo_test2\", kind: Function }, true)",
+                    "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..344, name: \"main\", kind: Function })",
                 ]
             "#]],
         );
diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
index b5468a5aee9ff..0e17b35590747 100644
--- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
@@ -5,20 +5,22 @@ use std::collections::BTreeSet;
 
 use either::Either;
 use hir::{
-    AssocItem, DisplayTarget, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait,
+    AssocItem, DisplayTarget, GenericDef, GenericParam, HirDisplay, ModuleDef, PathResolution,
+    Semantics, Trait,
 };
 use ide_db::{
-    active_parameter::{callable_for_node, generic_def_for_node},
-    documentation::{Documentation, HasDocs},
     FilePosition, FxIndexMap,
+    active_parameter::{callable_for_arg_list, generic_def_for_node},
+    documentation::{Documentation, HasDocs},
 };
+use itertools::Itertools;
 use span::Edition;
 use stdx::format_to;
 use syntax::{
-    algo,
-    ast::{self, AstChildren, HasArgList},
-    match_ast, AstNode, Direction, NodeOrToken, SyntaxElementChildren, SyntaxNode, SyntaxToken,
-    TextRange, TextSize, ToSmolStr, T,
+    AstNode, Direction, NodeOrToken, SyntaxElementChildren, SyntaxNode, SyntaxToken, T, TextRange,
+    TextSize, ToSmolStr, algo,
+    ast::{self, AstChildren},
+    match_ast,
 };
 
 use crate::RootDatabase;
@@ -83,8 +85,8 @@ pub(crate) fn signature_help(
         .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
     let token = sema.descend_into_macros_single_exact(token);
     let edition =
-        sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
-    let display_target = sema.first_crate_or_default(file_id).to_display_target(db);
+        sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
+    let display_target = sema.first_crate(file_id)?.to_display_target(db);
 
     for node in token.parent_ancestors() {
         match_ast! {
@@ -163,20 +165,8 @@ fn signature_help_for_call(
     edition: Edition,
     display_target: DisplayTarget,
 ) -> Option<SignatureHelp> {
-    // Find the calling expression and its NameRef
-    let mut nodes = arg_list.syntax().ancestors().skip(1);
-    let calling_node = loop {
-        if let Some(callable) = ast::CallableExpr::cast(nodes.next()?) {
-            let inside_callable = callable
-                .arg_list()
-                .is_some_and(|it| it.syntax().text_range().contains(token.text_range().start()));
-            if inside_callable {
-                break callable;
-            }
-        }
-    };
-
-    let (callable, active_parameter) = callable_for_node(sema, &calling_node, &token)?;
+    let (callable, active_parameter) =
+        callable_for_arg_list(sema, arg_list, token.text_range().start())?;
 
     let mut res =
         SignatureHelp { doc: None, signature: String::new(), parameters: vec![], active_parameter };
@@ -187,6 +177,20 @@ fn signature_help_for_call(
         hir::CallableKind::Function(func) => {
             res.doc = func.docs(db);
             format_to!(res.signature, "fn {}", func.name(db).display(db, edition));
+
+            let generic_params = GenericDef::Function(func)
+                .params(db)
+                .iter()
+                .filter(|param| match param {
+                    GenericParam::TypeParam(type_param) => !type_param.is_implicit(db),
+                    GenericParam::ConstParam(_) | GenericParam::LifetimeParam(_) => true,
+                })
+                .map(|param| param.display(db, display_target))
+                .join(", ");
+            if !generic_params.is_empty() {
+                format_to!(res.signature, "<{}>", generic_params);
+            }
+
             fn_params = Some(match callable.receiver_param(db) {
                 Some(_self) => func.params_without_self(db),
                 None => func.assoc_fn_params(db),
@@ -195,15 +199,34 @@ fn signature_help_for_call(
         hir::CallableKind::TupleStruct(strukt) => {
             res.doc = strukt.docs(db);
             format_to!(res.signature, "struct {}", strukt.name(db).display(db, edition));
+
+            let generic_params = GenericDef::Adt(strukt.into())
+                .params(db)
+                .iter()
+                .map(|param| param.display(db, display_target))
+                .join(", ");
+            if !generic_params.is_empty() {
+                format_to!(res.signature, "<{}>", generic_params);
+            }
         }
         hir::CallableKind::TupleEnumVariant(variant) => {
             res.doc = variant.docs(db);
             format_to!(
                 res.signature,
-                "enum {}::{}",
+                "enum {}",
                 variant.parent_enum(db).name(db).display(db, edition),
-                variant.name(db).display(db, edition)
             );
+
+            let generic_params = GenericDef::Adt(variant.parent_enum(db).into())
+                .params(db)
+                .iter()
+                .map(|param| param.display(db, display_target))
+                .join(", ");
+            if !generic_params.is_empty() {
+                format_to!(res.signature, "<{}>", generic_params);
+            }
+
+            format_to!(res.signature, "::{}", variant.name(db).display(db, edition))
         }
         hir::CallableKind::Closure(closure) => {
             let fn_trait = closure.fn_trait(db);
@@ -327,7 +350,7 @@ fn signature_help_for_generics(
         }
         // These don't have generic args that can be specified
         hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) | hir::GenericDef::Static(_) => {
-            return None
+            return None;
         }
     }
 
@@ -351,6 +374,20 @@ fn signature_help_for_generics(
 
         buf.clear();
         format_to!(buf, "{}", param.display(db, display_target));
+        match param {
+            GenericParam::TypeParam(param) => {
+                if let Some(ty) = param.default(db) {
+                    format_to!(buf, " = {}", ty.display(db, display_target));
+                }
+            }
+            GenericParam::ConstParam(param) => {
+                if let Some(expr) = param.default(db, display_target).and_then(|konst| konst.expr())
+                {
+                    format_to!(buf, " = {}", expr);
+                }
+            }
+            _ => {}
+        }
         res.push_generic_param(&buf);
     }
     if let hir::GenericDef::Trait(tr) = generics_def {
@@ -695,9 +732,8 @@ fn signature_help_for_tuple_pat_ish(
 }
 #[cfg(test)]
 mod tests {
-    use std::iter;
 
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use ide_db::FilePosition;
     use stdx::format_to;
     use test_fixture::ChangeFixture;
@@ -708,13 +744,14 @@ mod tests {
     pub(crate) fn position(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (RootDatabase, FilePosition) {
-        let change_fixture = ChangeFixture::parse(ra_fixture);
         let mut database = RootDatabase::default();
+        let change_fixture = ChangeFixture::parse(&database, ra_fixture);
         database.apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ($0)");
         let offset = range_or_offset.expect_offset();
-        (database, FilePosition { file_id: file_id.into(), offset })
+        let position = FilePosition { file_id: file_id.file_id(&database), offset };
+        (database, position)
     }
 
     #[track_caller]
@@ -742,11 +779,11 @@ mod tests {
                     let gap = start.checked_sub(offset).unwrap_or_else(|| {
                         panic!("parameter ranges out of order: {:?}", sig_help.parameter_ranges())
                     });
-                    rendered.extend(iter::repeat(' ').take(gap as usize));
+                    rendered.extend(std::iter::repeat_n(' ', gap as usize));
                     let param_text = &sig_help.signature[*range];
                     let width = param_text.chars().count(); // …
                     let marker = if is_active { '^' } else { '-' };
-                    rendered.extend(iter::repeat(marker).take(width));
+                    rendered.extend(std::iter::repeat_n(marker, width));
                     offset += gap + u32::from(range.len());
                 }
                 if !sig_help.parameter_ranges().is_empty() {
@@ -828,8 +865,8 @@ fn foo<T, U: Copy + Display>(x: T, y: U) -> u32
 fn bar() { foo($03, ); }
 "#,
             expect![[r#"
-                fn foo(x: i32, y: U) -> u32
-                       ^^^^^^  ----
+                fn foo<T, U>(x: i32, y: U) -> u32
+                             ^^^^^^  ----
             "#]],
         );
     }
@@ -842,7 +879,7 @@ fn foo<T>() -> T where T: Copy + Display {}
 fn bar() { foo($0); }
 "#,
             expect![[r#"
-                fn foo() -> T
+                fn foo<T>() -> T
             "#]],
         );
     }
@@ -1292,8 +1329,8 @@ fn main() {
 }
 "#,
             expect![[r#"
-                struct S({unknown})
-                         ^^^^^^^^^
+                struct S<T>({unknown})
+                            ^^^^^^^^^
             "#]],
         );
     }
@@ -1388,7 +1425,7 @@ id! {
 fn test() { S.foo($0); }
 "#,
             expect![[r#"
-                fn foo(&'a mut self)
+                fn foo<'a>(&'a mut self)
             "#]],
         );
     }
@@ -1737,8 +1774,8 @@ fn sup() {
 }
 "#,
             expect![[r#"
-                fn test(&mut self, val: V)
-                                   ^^^^^^
+                fn test<V>(&mut self, val: V)
+                                      ^^^^^^
             "#]],
         );
     }
@@ -1914,8 +1951,8 @@ fn f() {
 }
 "#,
             expect![[r#"
-                fn foo(x: Wrap<impl Trait<U>>)
-                       ^^^^^^^^^^^^^^^^^^^^^^
+                fn foo<U>(x: Wrap<impl Trait<U>>)
+                          ^^^^^^^^^^^^^^^^^^^^^^
             "#]],
         );
     }
@@ -2407,4 +2444,96 @@ fn main() {
             "#]],
         );
     }
+
+    #[test]
+    fn test_tuple_generic_param() {
+        check(
+            r#"
+struct S<T>(T);
+
+fn main() {
+    let s: S<$0
+}
+            "#,
+            expect![[r#"
+                struct S<T>
+                         ^
+            "#]],
+        );
+    }
+
+    #[test]
+    fn test_enum_generic_param() {
+        check(
+            r#"
+enum Option<T> {
+    Some(T),
+    None,
+}
+
+fn main() {
+    let opt: Option<$0
+}
+            "#,
+            expect![[r#"
+                enum Option<T>
+                            ^
+            "#]],
+        );
+    }
+
+    #[test]
+    fn test_enum_variant_generic_param() {
+        check(
+            r#"
+enum Option<T> {
+    Some(T),
+    None,
+}
+
+fn main() {
+    let opt = Option::Some($0);
+}
+            "#,
+            expect![[r#"
+                enum Option<T>::Some({unknown})
+                                     ^^^^^^^^^
+            "#]],
+        );
+    }
+
+    #[test]
+    fn test_generic_arg_with_default() {
+        check(
+            r#"
+struct S<T = u8> {
+    field: T,
+}
+
+fn main() {
+    let s: S<$0
+}
+            "#,
+            expect![[r#"
+                struct S<T = u8>
+                         ^^^^^^
+            "#]],
+        );
+
+        check(
+            r#"
+struct S<const C: u8 = 5> {
+    field: C,
+}
+
+fn main() {
+    let s: S<$0
+}
+            "#,
+            expect![[r#"
+                struct S<const C: u8 = 5>
+                         ^^^^^^^^^^^^^^^
+            "#]],
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/ssr.rs b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
index 90e350949b81f..7df4499a0c2f7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/ssr.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
@@ -2,8 +2,8 @@
 //! assist in ide_assists because that would require the ide_assists crate
 //! depend on the ide_ssr crate.
 
-use ide_assists::{Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel};
-use ide_db::{label::Label, source_change::SourceChange, FileRange, RootDatabase};
+use ide_assists::{Assist, AssistId, AssistResolveStrategy, GroupLabel};
+use ide_db::{FileRange, RootDatabase, label::Label, source_change::SourceChange};
 
 pub(crate) fn ssr_assists(
     db: &RootDatabase,
@@ -16,7 +16,7 @@ pub(crate) fn ssr_assists(
         Some(ssr_data) => ssr_data,
         None => return ssr_assists,
     };
-    let id = AssistId("ssr", AssistKind::RefactorRewrite);
+    let id = AssistId::refactor_rewrite("ssr");
 
     let (source_change_for_file, source_change_for_workspace) = if resolve.should_resolve(&id) {
         let edits = match_finder.edits();
@@ -59,8 +59,8 @@ mod tests {
     use expect_test::expect;
     use ide_assists::{Assist, AssistResolveStrategy};
     use ide_db::{
-        base_db::ra_salsa::Durability, symbol_index::SymbolsDatabase, FileRange, FxHashSet,
-        RootDatabase,
+        FileRange, FxHashSet, RootDatabase, base_db::salsa::Durability,
+        symbol_index::SymbolsDatabase,
     };
     use test_fixture::WithFixture;
     use triomphe::Arc;
@@ -78,7 +78,7 @@ mod tests {
         ssr_assists(
             &db,
             &resolve,
-            FileRange { file_id: file_id.into(), range: range_or_offset.into() },
+            FileRange { file_id: file_id.file_id(&db), range: range_or_offset.into() },
         )
     }
 
@@ -120,6 +120,7 @@ mod tests {
                 id: AssistId(
                     "ssr",
                     RefactorRewrite,
+                    None,
                 ),
                 label: "Apply SSR in file",
                 group: Some(
@@ -139,9 +140,9 @@ mod tests {
                                         Indel {
                                             insert: "3",
                                             delete: 33..34,
-                                            annotation: None,
                                         },
                                     ],
+                                    annotation: None,
                                 },
                                 None,
                             ),
@@ -163,6 +164,7 @@ mod tests {
                 id: AssistId(
                     "ssr",
                     RefactorRewrite,
+                    None,
                 ),
                 label: "Apply SSR in workspace",
                 group: Some(
@@ -182,9 +184,9 @@ mod tests {
                                         Indel {
                                             insert: "3",
                                             delete: 33..34,
-                                            annotation: None,
                                         },
                                     ],
+                                    annotation: None,
                                 },
                                 None,
                             ),
@@ -196,9 +198,9 @@ mod tests {
                                         Indel {
                                             insert: "3",
                                             delete: 11..12,
-                                            annotation: None,
                                         },
                                     ],
+                                    annotation: None,
                                 },
                                 None,
                             ),
@@ -240,6 +242,7 @@ mod tests {
                 id: AssistId(
                     "ssr",
                     RefactorRewrite,
+                    None,
                 ),
                 label: "Apply SSR in file",
                 group: Some(
@@ -260,6 +263,7 @@ mod tests {
                 id: AssistId(
                     "ssr",
                     RefactorRewrite,
+                    None,
                 ),
                 label: "Apply SSR in workspace",
                 group: Some(
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index 332aecf1e3cc5..efee39c13db94 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -1,25 +1,25 @@
 //! This module provides `StaticIndex` which is used for powering
 //! read-only code browsers and emitting LSIF
 
-use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics};
+use arrayvec::ArrayVec;
+use hir::{Crate, Module, Semantics, db::HirDatabase};
 use ide_db::{
-    base_db::{SourceDatabase, SourceRootDatabase, VfsPath},
-    defs::Definition,
+    FileId, FileRange, FxHashMap, FxHashSet, RootDatabase,
+    base_db::{RootQueryDb, SourceDatabase, VfsPath},
+    defs::{Definition, IdentClass},
     documentation::Documentation,
     famous_defs::FamousDefs,
-    helpers::get_definition,
-    FileId, FileRange, FxHashMap, FxHashSet, RootDatabase,
 };
 use span::Edition;
-use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T};
+use syntax::{AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T, TextRange};
 
 use crate::navigation_target::UpmappingResult;
 use crate::{
-    hover::{hover_for_definition, SubstTyLen},
+    Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav,
+    hover::{SubstTyLen, hover_for_definition},
     inlay_hints::{AdjustmentHintsMode, InlayFieldsToResolve},
-    moniker::{def_to_kind, def_to_moniker, MonikerResult, SymbolInformationKind},
+    moniker::{MonikerResult, SymbolInformationKind, def_to_kind, def_to_moniker},
     parent_module::crates_for,
-    Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav,
 };
 
 /// A static representation of fully analyzed source code.
@@ -120,12 +120,28 @@ fn documentation_for_definition(
         famous_defs.as_ref(),
         def.krate(sema.db)
             .unwrap_or_else(|| {
-                (*sema.db.crate_graph().crates_in_topological_order().last().unwrap()).into()
+                (*sema.db.all_crates().last().expect("no crate graph present")).into()
             })
             .to_display_target(sema.db),
     )
 }
 
+// FIXME: This is a weird function
+fn get_definitions(
+    sema: &Semantics<'_, RootDatabase>,
+    token: SyntaxToken,
+) -> Option<ArrayVec<Definition, 2>> {
+    for token in sema.descend_into_macros_exact(token) {
+        let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
+        if let Some(defs) = def {
+            if !defs.is_empty() {
+                return Some(defs);
+            }
+        }
+    }
+    None
+}
+
 pub enum VendoredLibrariesConfig<'a> {
     Included { workspace_root: &'a VfsPath },
     Excluded,
@@ -175,9 +191,14 @@ impl StaticIndex<'_> {
         // hovers
         let sema = hir::Semantics::new(self.db);
         let root = sema.parse_guess_edition(file_id).syntax().clone();
-        let edition =
-            sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT);
-        let display_target = sema.first_crate_or_default(file_id).to_display_target(self.db);
+        let edition = sema
+            .attach_first_edition(file_id)
+            .map(|it| it.edition(self.db))
+            .unwrap_or(Edition::CURRENT);
+        let display_target = match sema.first_crate(file_id) {
+            Some(krate) => krate.to_display_target(sema.db),
+            None => return,
+        };
         let tokens = root.descendants_with_tokens().filter_map(|it| match it {
             syntax::NodeOrToken::Node(_) => None,
             syntax::NodeOrToken::Token(it) => Some(it),
@@ -254,11 +275,14 @@ impl StaticIndex<'_> {
         for token in tokens {
             let range = token.text_range();
             let node = token.parent().unwrap();
-            let def = match get_definition(&sema, token.clone()) {
-                Some(it) => it,
+            match get_definitions(&sema, token.clone()) {
+                Some(it) => {
+                    for i in it {
+                        add_token(i, range, &node);
+                    }
+                }
                 None => continue,
             };
-            add_token(def, range, &node);
         }
         self.files.push(result);
     }
@@ -267,14 +291,14 @@ impl StaticIndex<'_> {
         analysis: &'a Analysis,
         vendored_libs_config: VendoredLibrariesConfig<'_>,
     ) -> StaticIndex<'a> {
-        let db = &*analysis.db;
+        let db = &analysis.db;
         let work = all_modules(db).into_iter().filter(|module| {
             let file_id = module.definition_source_file_id(db).original_file(db);
-            let source_root = db.file_source_root(file_id.into());
-            let source_root = db.source_root(source_root);
+            let source_root = db.file_source_root(file_id.file_id(&analysis.db)).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             let is_vendored = match vendored_libs_config {
                 VendoredLibrariesConfig::Included { workspace_root } => source_root
-                    .path_for_file(&file_id.into())
+                    .path_for_file(&file_id.file_id(&analysis.db))
                     .is_some_and(|module_path| module_path.starts_with(workspace_root)),
                 VendoredLibrariesConfig::Excluded => false,
             };
@@ -294,7 +318,7 @@ impl StaticIndex<'_> {
             if visited_files.contains(&file_id) {
                 continue;
             }
-            this.add_file(file_id.into());
+            this.add_file(file_id.file_id(&analysis.db));
             // mark the file
             visited_files.insert(file_id);
         }
@@ -304,8 +328,8 @@ impl StaticIndex<'_> {
 
 #[cfg(test)]
 mod tests {
-    use crate::{fixture, StaticIndex};
-    use ide_db::{base_db::VfsPath, FileRange, FxHashSet};
+    use crate::{StaticIndex, fixture};
+    use ide_db::{FileRange, FxHashMap, FxHashSet, base_db::VfsPath};
     use syntax::TextSize;
 
     use super::VendoredLibrariesConfig;
@@ -360,6 +384,71 @@ mod tests {
         }
     }
 
+    #[track_caller]
+    fn check_references(
+        #[rust_analyzer::rust_fixture] ra_fixture: &str,
+        vendored_libs_config: VendoredLibrariesConfig<'_>,
+    ) {
+        let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+        let s = StaticIndex::compute(&analysis, vendored_libs_config);
+        let mut range_set: FxHashMap<_, i32> = ranges.iter().map(|it| (it.0, 0)).collect();
+
+        // Make sure that all references have at least one range. We use a HashMap instead of a
+        // a HashSet so that we can have more than one reference at the same range.
+        for (_, t) in s.tokens.iter() {
+            for r in &t.references {
+                if r.is_definition {
+                    continue;
+                }
+                if r.range.range.start() == TextSize::from(0) {
+                    // ignore whole file range corresponding to module definition
+                    continue;
+                }
+                match range_set.entry(r.range) {
+                    std::collections::hash_map::Entry::Occupied(mut entry) => {
+                        let count = entry.get_mut();
+                        *count += 1;
+                    }
+                    std::collections::hash_map::Entry::Vacant(_) => {
+                        panic!("additional reference {r:?}");
+                    }
+                }
+            }
+        }
+        for (range, count) in range_set.iter() {
+            if *count == 0 {
+                panic!("unfound reference {range:?}");
+            }
+        }
+    }
+
+    #[test]
+    fn field_initialization() {
+        check_references(
+            r#"
+struct Point {
+    x: f64,
+     //^^^
+    y: f64,
+     //^^^
+}
+    fn foo() {
+        let x = 5.;
+        let y = 10.;
+        let mut p = Point { x, y };
+                  //^^^^^   ^  ^
+        p.x = 9.;
+      //^ ^
+        p.y = 10.;
+      //^ ^
+    }
+"#,
+            VendoredLibrariesConfig::Included {
+                workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()),
+            },
+        );
+    }
+
     #[test]
     fn struct_and_enum() {
         check_all_ranges(
@@ -379,6 +468,17 @@ struct Foo;
      //^^^
 enum E { X(Foo) }
    //^   ^
+"#,
+            VendoredLibrariesConfig::Included {
+                workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()),
+            },
+        );
+
+        check_references(
+            r#"
+struct Foo;
+enum E { X(Foo) }
+   //      ^^^
 "#,
             VendoredLibrariesConfig::Included {
                 workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()),
diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs
index a44be67668ce3..55a0db2d82046 100644
--- a/src/tools/rust-analyzer/crates/ide/src/status.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/status.rs
@@ -1,29 +1,8 @@
-use std::{fmt, marker::PhantomData};
-
-use hir::{
-    db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery},
-    Attr, Attrs, ExpandResult, MacroFileId, Module,
-};
-use ide_db::{
-    base_db::{
-        ra_salsa::{
-            debug::{DebugQueryTable, TableEntry},
-            Query, QueryTable,
-        },
-        CompressedFileTextQuery, CrateData, ParseQuery, SourceDatabase, SourceRootId,
-    },
-    symbol_index::ModuleSymbolsQuery,
-};
-use ide_db::{
-    symbol_index::{LibrarySymbolsQuery, SymbolIndex},
-    RootDatabase,
-};
+use ide_db::RootDatabase;
+use ide_db::base_db::{BuiltCrateData, ExtraCrateData};
 use itertools::Itertools;
-use profile::{memory_usage, Bytes};
-use span::{EditionedFileId, FileId};
+use span::FileId;
 use stdx::format_to;
-use syntax::{ast, Parse, SyntaxNode};
-use triomphe::Arc;
 
 // Feature: Status
 //
@@ -37,17 +16,17 @@ use triomphe::Arc;
 pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
     let mut buf = String::new();
 
-    format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db)));
-    format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db)));
-    format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db)));
-    format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db)));
-    format_to!(buf, "{}\n", collect_query(ModuleSymbolsQuery.in_db(db)));
-    format_to!(buf, "{} in total\n", memory_usage());
+    // format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db)));
+    // format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db)));
+    // format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db)));
+    // format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db)));
+    // format_to!(buf, "{}\n", collect_query(ModuleSymbolsQuery.in_db(db)));
+    // format_to!(buf, "{} in total\n", memory_usage());
 
-    format_to!(buf, "\nDebug info:\n");
-    format_to!(buf, "{}\n", collect_query(AttrsQuery.in_db(db)));
-    format_to!(buf, "{} ast id maps\n", collect_query_count(AstIdMapQuery.in_db(db)));
-    format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db)));
+    // format_to!(buf, "\nDebug info:\n");
+    // format_to!(buf, "{}\n", collect_query(AttrsQuery.in_db(db)));
+    // format_to!(buf, "{} ast id maps\n", collect_query_count(AstIdMapQuery.in_db(db)));
+    // format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db)));
 
     if let Some(file_id) = file_id {
         format_to!(buf, "\nCrates for file {}:\n", file_id.index());
@@ -55,27 +34,25 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
         if crates.is_empty() {
             format_to!(buf, "Does not belong to any crate");
         }
-        let crate_graph = db.crate_graph();
         for crate_id in crates {
-            let CrateData {
+            let BuiltCrateData {
                 root_file_id,
                 edition,
-                version,
-                display_name,
-                cfg_options,
-                potential_cfg_options,
-                env,
                 dependencies,
                 origin,
                 is_proc_macro,
                 proc_macro_cwd,
-            } = &crate_graph[crate_id];
+            } = crate_id.data(db);
+            let ExtraCrateData { version, display_name, potential_cfg_options } =
+                crate_id.extra_data(db);
+            let cfg_options = crate_id.cfg_options(db);
+            let env = crate_id.env(db);
             format_to!(
                 buf,
                 "Crate: {}\n",
                 match display_name {
-                    Some(it) => format!("{it}({})", crate_id.into_raw()),
-                    None => format!("{}", crate_id.into_raw()),
+                    Some(it) => format!("{it}({:?})", crate_id),
+                    None => format!("{:?}", crate_id),
                 }
             );
             format_to!(buf, "    Root module file id: {}\n", root_file_id.index());
@@ -89,7 +66,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
             format_to!(buf, "    Proc macro cwd: {:?}\n", proc_macro_cwd);
             let deps = dependencies
                 .iter()
-                .map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw()))
+                .map(|dep| format!("{}={:?}", dep.name, dep.crate_id))
                 .format(", ");
             format_to!(buf, "    Dependencies: {}\n", deps);
         }
@@ -97,190 +74,3 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
 
     buf.trim().to_owned()
 }
-
-fn collect_query<'q, Q>(table: QueryTable<'q, Q>) -> <Q as QueryCollect>::Collector
-where
-    QueryTable<'q, Q>: DebugQueryTable,
-    Q: QueryCollect,
-    <Q as Query>::Storage: 'q,
-    <Q as QueryCollect>::Collector: StatCollect<
-        <QueryTable<'q, Q> as DebugQueryTable>::Key,
-        <QueryTable<'q, Q> as DebugQueryTable>::Value,
-    >,
-{
-    struct StatCollectorWrapper<C>(C);
-    impl<C: StatCollect<K, V>, K, V> FromIterator<TableEntry<K, V>> for StatCollectorWrapper<C> {
-        fn from_iter<T>(iter: T) -> StatCollectorWrapper<C>
-        where
-            T: IntoIterator<Item = TableEntry<K, V>>,
-        {
-            let mut res = C::default();
-            for entry in iter {
-                res.collect_entry(entry.key, entry.value);
-            }
-            StatCollectorWrapper(res)
-        }
-    }
-    table.entries::<StatCollectorWrapper<<Q as QueryCollect>::Collector>>().0
-}
-
-fn collect_query_count<'q, Q>(table: QueryTable<'q, Q>) -> usize
-where
-    QueryTable<'q, Q>: DebugQueryTable,
-    Q: Query,
-    <Q as Query>::Storage: 'q,
-{
-    struct EntryCounter(usize);
-    impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
-        fn from_iter<T>(iter: T) -> EntryCounter
-        where
-            T: IntoIterator<Item = TableEntry<K, V>>,
-        {
-            EntryCounter(iter.into_iter().count())
-        }
-    }
-    table.entries::<EntryCounter>().0
-}
-
-trait QueryCollect: Query {
-    type Collector;
-}
-
-impl QueryCollect for LibrarySymbolsQuery {
-    type Collector = SymbolsStats<SourceRootId>;
-}
-
-impl QueryCollect for ParseQuery {
-    type Collector = SyntaxTreeStats<false>;
-}
-
-impl QueryCollect for ParseMacroExpansionQuery {
-    type Collector = SyntaxTreeStats<true>;
-}
-
-impl QueryCollect for CompressedFileTextQuery {
-    type Collector = FilesStats;
-}
-
-impl QueryCollect for ModuleSymbolsQuery {
-    type Collector = SymbolsStats<Module>;
-}
-
-impl QueryCollect for AttrsQuery {
-    type Collector = AttrsStats;
-}
-
-trait StatCollect<K, V>: Default {
-    fn collect_entry(&mut self, key: K, value: Option<V>);
-}
-
-#[derive(Default)]
-struct FilesStats {
-    total: usize,
-    size: Bytes,
-}
-
-impl fmt::Display for FilesStats {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(fmt, "{} of files", self.size)
-    }
-}
-
-impl StatCollect<FileId, Arc<[u8]>> for FilesStats {
-    fn collect_entry(&mut self, _: FileId, value: Option<Arc<[u8]>>) {
-        self.total += 1;
-        self.size += value.unwrap().len();
-    }
-}
-
-#[derive(Default)]
-pub(crate) struct SyntaxTreeStats<const MACROS: bool> {
-    total: usize,
-    pub(crate) retained: usize,
-}
-
-impl<const MACROS: bool> fmt::Display for SyntaxTreeStats<MACROS> {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(
-            fmt,
-            "{} trees, {} preserved{}",
-            self.total,
-            self.retained,
-            if MACROS { " (macros)" } else { "" }
-        )
-    }
-}
-
-impl StatCollect<EditionedFileId, Parse<ast::SourceFile>> for SyntaxTreeStats<false> {
-    fn collect_entry(&mut self, _: EditionedFileId, value: Option<Parse<ast::SourceFile>>) {
-        self.total += 1;
-        self.retained += value.is_some() as usize;
-    }
-}
-
-impl<M> StatCollect<MacroFileId, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
-    fn collect_entry(
-        &mut self,
-        _: MacroFileId,
-        value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>,
-    ) {
-        self.total += 1;
-        self.retained += value.is_some() as usize;
-    }
-}
-
-struct SymbolsStats<Key> {
-    total: usize,
-    size: Bytes,
-    phantom: PhantomData<Key>,
-}
-
-impl<Key> Default for SymbolsStats<Key> {
-    fn default() -> Self {
-        Self { total: Default::default(), size: Default::default(), phantom: PhantomData }
-    }
-}
-
-impl fmt::Display for SymbolsStats<Module> {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(fmt, "{} of module index symbols ({})", self.size, self.total)
-    }
-}
-impl fmt::Display for SymbolsStats<SourceRootId> {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(fmt, "{} of library index symbols ({})", self.size, self.total)
-    }
-}
-impl<Key> StatCollect<Key, Arc<SymbolIndex>> for SymbolsStats<Key> {
-    fn collect_entry(&mut self, _: Key, value: Option<Arc<SymbolIndex>>) {
-        if let Some(symbols) = value {
-            self.total += symbols.len();
-            self.size += symbols.memory_size();
-        }
-    }
-}
-
-#[derive(Default)]
-struct AttrsStats {
-    entries: usize,
-    total: usize,
-}
-
-impl fmt::Display for AttrsStats {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let size = self.entries * size_of::<Attrs>() + self.total * size_of::<Attr>();
-        let size = Bytes::new(size as _);
-        write!(
-            fmt,
-            "{} attribute query entries, {} total attributes ({} for storing entries)",
-            self.entries, self.total, size
-        )
-    }
-}
-
-impl<Key> StatCollect<Key, Attrs> for AttrsStats {
-    fn collect_entry(&mut self, _: Key, value: Option<Attrs>) {
-        self.entries += 1;
-        self.total += value.map_or(0, |it| it.len());
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index 83082496d5b64..e1bc76318f8b9 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -15,26 +15,23 @@ mod tests;
 use std::ops::ControlFlow;
 
 use either::Either;
-use hir::{
-    DefWithBody, HirFileIdExt, InFile, InRealFile, MacroFileIdExt, MacroKind, Name, Semantics,
-};
+use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Name, Semantics};
 use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind};
-use span::EditionedFileId;
 use syntax::{
-    ast::{self, IsString},
     AstNode, AstToken, NodeOrToken,
     SyntaxKind::*,
-    SyntaxNode, SyntaxToken, TextRange, WalkEvent, T,
+    SyntaxNode, SyntaxToken, T, TextRange, WalkEvent,
+    ast::{self, IsString},
 };
 
 use crate::{
+    FileId, HlMod, HlOperator, HlPunct, HlTag,
     syntax_highlighting::{
         escape::{highlight_escape_byte, highlight_escape_char, highlight_escape_string},
         format::highlight_format_string,
         highlights::Highlights,
         tags::Highlight,
     },
-    FileId, HlMod, HlOperator, HlPunct, HlTag,
 };
 
 pub(crate) use html::highlight_as_html;
@@ -199,7 +196,7 @@ pub(crate) fn highlight(
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
 
     // Determine the root based on the given range.
     let (root, range_to_highlight) = {
@@ -218,10 +215,7 @@ pub(crate) fn highlight(
     };
 
     let mut hl = highlights::Highlights::new(root.text_range());
-    let krate = match sema.scope(&root) {
-        Some(it) => it.krate(),
-        None => return hl.to_vec(),
-    };
+    let krate = sema.scope(&root).map(|it| it.krate());
     traverse(&mut hl, &sema, config, InRealFile::new(file_id, &root), krate, range_to_highlight);
     hl.to_vec()
 }
@@ -231,10 +225,10 @@ fn traverse(
     sema: &Semantics<'_, RootDatabase>,
     config: HighlightConfig,
     InRealFile { file_id, value: root }: InRealFile<&SyntaxNode>,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     range_to_highlight: TextRange,
 ) {
-    let is_unlinked = sema.file_to_module_def(file_id).is_none();
+    let is_unlinked = sema.file_to_module_def(file_id.file_id(sema.db)).is_none();
 
     enum AttrOrDerive {
         Attr(ast::Item),
@@ -494,7 +488,7 @@ fn string_injections(
     sema: &Semantics<'_, RootDatabase>,
     config: HighlightConfig,
     file_id: EditionedFileId,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     token: SyntaxToken,
     descended_token: &SyntaxToken,
 ) -> ControlFlow<()> {
@@ -508,7 +502,14 @@ fn string_injections(
             {
                 return ControlFlow::Break(());
             }
-            highlight_format_string(hl, sema, krate, &string, &descended_string, file_id.edition());
+            highlight_format_string(
+                hl,
+                sema,
+                krate,
+                &string,
+                &descended_string,
+                file_id.edition(sema.db),
+            );
 
             if !string.is_raw() {
                 highlight_escape_string(hl, &string);
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
index cc02aff2acf8a..3716dcfed006e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
@@ -1,21 +1,21 @@
 //! Syntax highlighting for format macro strings.
 use ide_db::{
-    defs::Definition,
-    syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier},
     SymbolKind,
+    defs::Definition,
+    syntax_helpers::format_string::{FormatSpecifier, is_format_string, lex_format_specifiers},
 };
 use span::Edition;
-use syntax::{ast, AstToken};
+use syntax::{AstToken, ast};
 
 use crate::{
-    syntax_highlighting::{highlight::highlight_def, highlights::Highlights},
     HlRange, HlTag,
+    syntax_highlighting::{highlight::highlight_def, highlights::Highlights},
 };
 
 pub(super) fn highlight_format_string(
     stack: &mut Highlights,
     sema: &hir::Semantics<'_, ide_db::RootDatabase>,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     string: &ast::String,
     expanded_string: &ast::String,
     edition: Edition,
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index 282fbb4433b22..87db0cd7dc53c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -3,23 +3,23 @@
 use std::ops::ControlFlow;
 
 use either::Either;
-use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics};
+use hir::{AsAssocItem, HasVisibility, Semantics};
 use ide_db::{
+    FxHashMap, RootDatabase, SymbolKind,
     defs::{Definition, IdentClass, NameClass, NameRefClass},
     syntax_helpers::node_ext::walk_pat,
-    FxHashMap, RootDatabase, SymbolKind,
 };
 use span::Edition;
 use stdx::hash_once;
 use syntax::{
-    ast, match_ast, AstNode, AstPtr, AstToken, NodeOrToken,
+    AstNode, AstPtr, AstToken, NodeOrToken,
     SyntaxKind::{self, *},
-    SyntaxNode, SyntaxNodePtr, SyntaxToken, T,
+    SyntaxNode, SyntaxNodePtr, SyntaxToken, T, ast, match_ast,
 };
 
 use crate::{
-    syntax_highlighting::tags::{HlOperator, HlPunct},
     Highlight, HlMod, HlTag,
+    syntax_highlighting::tags::{HlOperator, HlPunct},
 };
 
 pub(super) fn token(
@@ -63,7 +63,7 @@ pub(super) fn token(
 
 pub(super) fn name_like(
     sema: &Semantics<'_, RootDatabase>,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     bindings_shadow_count: Option<&mut FxHashMap<hir::Name, u32>>,
     is_unsafe_node: &impl Fn(AstPtr<Either<ast::Expr, ast::Pat>>) -> bool,
     syntactic_name_ref_highlighting: bool,
@@ -113,7 +113,8 @@ fn punctuation(
 ) -> Highlight {
     let operator_parent = token.parent();
     let parent_kind = operator_parent.as_ref().map_or(EOF, SyntaxNode::kind);
-    let h = match (kind, parent_kind) {
+
+    match (kind, parent_kind) {
         (T![?], TRY_EXPR) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow,
         (T![&], BIN_EXPR) => HlOperator::Bitwise.into(),
         (T![&], REF_EXPR | REF_PAT) => HlTag::Operator(HlOperator::Other).into(),
@@ -143,11 +144,7 @@ fn punctuation(
             let ptr = operator_parent
                 .as_ref()
                 .and_then(|it| AstPtr::try_from_raw(SyntaxNodePtr::new(it)));
-            if ptr.is_some_and(is_unsafe_node) {
-                h | HlMod::Unsafe
-            } else {
-                h
-            }
+            if ptr.is_some_and(is_unsafe_node) { h | HlMod::Unsafe } else { h }
         }
         (T![-], PREFIX_EXPR) => {
             let prefix_expr =
@@ -223,11 +220,7 @@ fn punctuation(
                 let is_unsafe = is_unsafe_macro
                     || operator_parent
                         .and_then(|it| {
-                            if ast::ArgList::can_cast(it.kind()) {
-                                it.parent()
-                            } else {
-                                Some(it)
-                            }
+                            if ast::ArgList::can_cast(it.kind()) { it.parent() } else { Some(it) }
                         })
                         .and_then(|it| AstPtr::try_from_raw(SyntaxNodePtr::new(&it)))
                         .is_some_and(is_unsafe_node);
@@ -248,8 +241,7 @@ fn punctuation(
             _ => HlPunct::Other,
         }
         .into(),
-    };
-    h
+    }
 }
 
 fn keyword(token: SyntaxToken, kind: SyntaxKind) -> Highlight {
@@ -280,7 +272,7 @@ fn keyword(token: SyntaxToken, kind: SyntaxKind) -> Highlight {
 
 fn highlight_name_ref(
     sema: &Semantics<'_, RootDatabase>,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     bindings_shadow_count: Option<&mut FxHashMap<hir::Name, u32>>,
     binding_hash: &mut Option<u64>,
     is_unsafe_node: &impl Fn(AstPtr<Either<ast::Expr, ast::Pat>>) -> bool,
@@ -296,7 +288,7 @@ fn highlight_name_ref(
     let name_class = match NameRefClass::classify(sema, &name_ref) {
         Some(name_kind) => name_kind,
         None if syntactic_name_ref_highlighting => {
-            return highlight_name_ref_by_syntax(name_ref, sema, krate, is_unsafe_node)
+            return highlight_name_ref_by_syntax(name_ref, sema, krate, is_unsafe_node);
         }
         // FIXME: This is required for helper attributes used by proc-macros, as those do not map down
         // to anything when used.
@@ -409,9 +401,10 @@ fn highlight_name_ref(
         NameRefClass::ExternCrateShorthand { decl, krate: resolved_krate } => {
             let mut h = HlTag::Symbol(SymbolKind::Module).into();
 
-            if resolved_krate != krate {
-                h |= HlMod::Library
+            if krate.as_ref().is_some_and(|krate| resolved_krate != *krate) {
+                h |= HlMod::Library;
             }
+
             let is_public = decl.visibility(db) == hir::Visibility::Public;
             if is_public {
                 h |= HlMod::Public
@@ -439,7 +432,7 @@ fn highlight_name(
     bindings_shadow_count: Option<&mut FxHashMap<hir::Name, u32>>,
     binding_hash: &mut Option<u64>,
     is_unsafe_node: &impl Fn(AstPtr<Either<ast::Expr, ast::Pat>>) -> bool,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     name: ast::Name,
     edition: Edition,
 ) -> Highlight {
@@ -484,7 +477,7 @@ fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 {
 
 pub(super) fn highlight_def(
     sema: &Semantics<'_, RootDatabase>,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     def: Definition,
     edition: Edition,
     is_ref: bool,
@@ -668,7 +661,7 @@ pub(super) fn highlight_def(
     };
 
     let def_crate = def.krate(db);
-    let is_from_other_crate = def_crate != Some(krate);
+    let is_from_other_crate = def_crate != krate;
     let is_from_builtin_crate = def_crate.is_some_and(|def_crate| def_crate.is_builtin(db));
     let is_builtin = matches!(
         def,
@@ -689,7 +682,7 @@ pub(super) fn highlight_def(
 
 fn highlight_method_call_by_name_ref(
     sema: &Semantics<'_, RootDatabase>,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     name_ref: &ast::NameRef,
     is_unsafe_node: &impl Fn(AstPtr<Either<ast::Expr, ast::Pat>>) -> bool,
 ) -> Option<Highlight> {
@@ -699,7 +692,7 @@ fn highlight_method_call_by_name_ref(
 
 fn highlight_method_call(
     sema: &Semantics<'_, RootDatabase>,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     method_call: &ast::MethodCallExpr,
     is_unsafe_node: &impl Fn(AstPtr<Either<ast::Expr, ast::Pat>>) -> bool,
 ) -> Option<Highlight> {
@@ -726,7 +719,7 @@ fn highlight_method_call(
     }
 
     let def_crate = func.module(sema.db).krate();
-    let is_from_other_crate = def_crate != krate;
+    let is_from_other_crate = krate.as_ref().map_or(false, |krate| def_crate != *krate);
     let is_from_builtin_crate = def_crate.is_builtin(sema.db);
     let is_public = func.visibility(sema.db) == hir::Visibility::Public;
 
@@ -799,7 +792,7 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight {
 fn highlight_name_ref_by_syntax(
     name: ast::NameRef,
     sema: &Semantics<'_, RootDatabase>,
-    krate: hir::Crate,
+    krate: Option<hir::Crate>,
     is_unsafe_node: &impl Fn(AstPtr<Either<ast::Expr, ast::Pat>>) -> bool,
 ) -> Highlight {
     let default = HlTag::UnresolvedReference;
@@ -818,12 +811,9 @@ fn highlight_name_ref_by_syntax(
             let h = HlTag::Symbol(SymbolKind::Field);
             let is_unsafe = ast::Expr::cast(parent)
                 .is_some_and(|it| is_unsafe_node(AstPtr::new(&it).wrap_left()));
-            if is_unsafe {
-                h | HlMod::Unsafe
-            } else {
-                h.into()
-            }
+            if is_unsafe { h | HlMod::Unsafe } else { h.into() }
         }
+        RECORD_EXPR_FIELD | RECORD_PAT_FIELD => HlTag::Symbol(SymbolKind::Field).into(),
         PATH_SEGMENT => {
             let name_based_fallback = || {
                 if name.text().chars().next().unwrap_or_default().is_uppercase() {
@@ -862,6 +852,8 @@ fn highlight_name_ref_by_syntax(
                 .into(),
             }
         }
+        ASSOC_TYPE_ARG => SymbolKind::TypeAlias.into(),
+        USE_BOUND_GENERIC_ARGS => SymbolKind::TypeParam.into(),
         _ => default.into(),
     }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
index 07d40bafeba17..9fd807f031f1f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
@@ -1,21 +1,20 @@
 //! Renders a bit of code as HTML.
 
-use hir::Semantics;
+use hir::{EditionedFileId, Semantics};
 use oorandom::Rand32;
-use span::EditionedFileId;
 use stdx::format_to;
 use syntax::AstNode;
 
 use crate::{
-    syntax_highlighting::{highlight, HighlightConfig},
     FileId, RootDatabase,
+    syntax_highlighting::{HighlightConfig, highlight},
 };
 
 pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
     let file = sema.parse(file_id);
     let file = file.syntax();
     fn rainbowify(seed: u64) -> String {
@@ -40,7 +39,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
             macro_bang: true,
             syntactic_name_ref_highlighting: false,
         },
-        file_id.into(),
+        file_id.file_id(db),
         None,
     );
     let text = file.to_string();
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
index 1be90ad6a1ebc..0998e14c87ba0 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
@@ -3,21 +3,20 @@
 use std::mem;
 
 use either::Either;
-use hir::{sym, HirFileId, InFile, Semantics};
+use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym};
 use ide_db::{
-    active_parameter::ActiveParameter, defs::Definition, documentation::docs_with_rangemap,
-    rust_doc::is_rust_fence, SymbolKind,
+    SymbolKind, active_parameter::ActiveParameter, defs::Definition,
+    documentation::docs_with_rangemap, rust_doc::is_rust_fence,
 };
-use span::EditionedFileId;
 use syntax::{
-    ast::{self, AstNode, IsString, QuoteOffsets},
     AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize,
+    ast::{self, AstNode, IsString, QuoteOffsets},
 };
 
 use crate::{
-    doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
-    syntax_highlighting::{highlights::Highlights, injector::Injector, HighlightConfig},
     Analysis, HlMod, HlRange, HlTag, RootDatabase,
+    doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
+    syntax_highlighting::{HighlightConfig, highlights::Highlights, injector::Injector},
 };
 
 pub(super) fn ra_fixture(
@@ -161,7 +160,7 @@ pub(super) fn doc_comment(
     let mut new_comments = Vec::new();
     let mut string;
 
-    for attr in attributes.by_key(&sym::doc).attrs() {
+    for attr in attributes.by_key(sym::doc).attrs() {
         let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
         if file_id != src_file_id {
             continue;
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs
index a902fd717f099..c30f797324967 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs
@@ -53,11 +53,7 @@ impl<T> Delta<T> {
     where
         T: Ord + Sub<Output = T>,
     {
-        if to >= from {
-            Delta::Add(to - from)
-        } else {
-            Delta::Sub(from - to)
-        }
+        if to >= from { Delta::Add(to - from) } else { Delta::Sub(from - to) }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index c8c8c5dba4c40..d00f279c82995 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -45,14 +45,13 @@
 <span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
 <span class="comment documentation">//! ```</span>
 
+<span class="comment documentation">//! Syntactic name ref highlighting testing</span>
 <span class="comment documentation">//! ```rust</span>
 <span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword crate_root injected">self</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">std</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">other</span><span class="none injected"> </span><span class="keyword injected">as</span><span class="none injected"> </span><span class="module crate_root declaration injected">otter</span><span class="semicolon injected">;</span>
 <span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">core</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">alloc</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">proc_macro</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">test</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">Krate</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">trait</span><span class="none injected"> </span><span class="trait declaration injected">T</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="keyword injected">type</span><span class="none injected"> </span><span class="type_alias associated declaration injected static trait">Assoc</span><span class="semicolon injected">;</span><span class="none injected"> </span><span class="brace injected">}</span>
+<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">f</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">Arg</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">-&gt;</span><span class="none injected"> </span><span class="keyword injected">use</span><span class="angle injected">&lt;</span><span class="struct injected">Arg</span><span class="angle injected">&gt;</span><span class="none injected"> </span><span class="keyword injected">where</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="colon injected">:</span><span class="none injected"> </span><span class="trait injected">T</span><span class="comparison injected">&lt;</span><span class="struct injected">Assoc</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="comparison injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
 <span class="comment documentation">//! ```</span>
 <span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span>
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html
new file mode 100644
index 0000000000000..36ed8c594f7e2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html
@@ -0,0 +1,46 @@
+
+<style>
+body                { margin: 0; }
+pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime           { color: #DFAF8F; font-style: italic; }
+.label              { color: #DFAF8F; font-style: italic; }
+.comment            { color: #7F9F7F; }
+.documentation      { color: #629755; }
+.intra_doc_link     { font-style: italic; }
+.injected           { opacity: 0.65 ; }
+.struct, .enum      { color: #7CB8BB; }
+.enum_variant       { color: #BDE0F3; }
+.string_literal     { color: #CC9393; }
+.field              { color: #94BFF3; }
+.function           { color: #93E0E3; }
+.parameter          { color: #94BFF3; }
+.text               { color: #DCDCCC; }
+.type               { color: #7CB8BB; }
+.builtin_type       { color: #8CD0D3; }
+.type_param         { color: #DFAF8F; }
+.attribute          { color: #94BFF3; }
+.numeric_literal    { color: #BFEBBF; }
+.bool_literal       { color: #BFE6EB; }
+.macro              { color: #94BFF3; }
+.proc_macro         { color: #94BFF3; text-decoration: underline; }
+.derive             { color: #94BFF3; font-style: italic; }
+.module             { color: #AFD8AF; }
+.value_param        { color: #DCDCCC; }
+.variable           { color: #DCDCCC; }
+.format_specifier   { color: #CC696B; }
+.mutable            { text-decoration: underline; }
+.escape_sequence    { color: #94BFF3; }
+.keyword            { color: #F0DFAF; font-weight: bold; }
+.control            { font-style: italic; }
+.reference          { font-style: italic; font-weight: bold; }
+.const              { font-weight: bolder; }
+.unsafe             { color: #BC8383; }
+
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
+    <span class="keyword">let</span> <span class="variable declaration">x</span> <span class="operator">=</span> <span class="operator">&</span><span class="keyword">raw</span> <span class="keyword">mut</span> <span class="numeric_literal">5</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+</code></pre>
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
index 7f6b4c2c880e1..e1a8d876c417b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
@@ -41,14 +41,14 @@
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 </style>
 <pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
-    <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
-    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="10753541418856619067" style="color: hsl(51,52%,47%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
-    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="9865812862466303869" style="color: hsl(329,86%,55%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="5697120079570210533" style="color: hsl(268,86%,80%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="4222724691718692706" style="color: hsl(156,71%,51%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
 
-    <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="4890670724659097491" style="color: hsl(330,46%,45%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
-    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="4002942168268782293" style="color: hsl(114,87%,67%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="4890670724659097491" style="color: hsl(330,46%,45%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="17855021198829413584" style="color: hsl(230,76%,79%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="16380625810977895757" style="color: hsl(262,75%,75%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="17855021198829413584" style="color: hsl(230,76%,79%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
 <span class="brace">}</span>
 
 <span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
-    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
 <span class="brace">}</span></code></pre>
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
index 8f69bb8230000..dd359326c61d6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -1,11 +1,11 @@
 use std::time::Instant;
 
-use expect_test::{expect_file, ExpectFile};
+use expect_test::{ExpectFile, expect_file};
 use ide_db::SymbolKind;
 use span::Edition;
-use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear};
+use test_utils::{AssertLinear, bench, bench_fixture, skip_slow_tests};
 
-use crate::{fixture, FileRange, HighlightConfig, HlTag, TextRange};
+use crate::{FileRange, HighlightConfig, HlTag, TextRange, fixture};
 
 const HL_CONFIG: HighlightConfig = HighlightConfig {
     strings: true,
@@ -739,14 +739,13 @@ fn test_highlight_doc_comment() {
 //! fn test() {}
 //! ```
 
+//! Syntactic name ref highlighting testing
 //! ```rust
 //! extern crate self;
-//! extern crate std;
+//! extern crate other as otter;
 //! extern crate core;
-//! extern crate alloc;
-//! extern crate proc_macro;
-//! extern crate test;
-//! extern crate Krate;
+//! trait T { type Assoc; }
+//! fn f<Arg>() -> use<Arg> where (): T<Assoc = ()> {}
 //! ```
 mod outline_module;
 
@@ -1302,7 +1301,7 @@ fn benchmark_syntax_highlighting_parser() {
             })
             .count()
     };
-    assert_eq!(hash, 1167);
+    assert_eq!(hash, 1606);
 }
 
 #[test]
@@ -1421,3 +1420,18 @@ fn template() {}
         false,
     );
 }
+
+#[test]
+fn issue_19357() {
+    check_highlighting(
+        r#"
+//- /foo.rs
+fn main() {
+    let x = &raw mut 5;
+}
+//- /main.rs
+"#,
+        expect_file!["./test_data/highlight_issue_19357.html"],
+        false,
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs
index 30b1d4c39b301..06cbd50e946ac 100644
--- a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs
@@ -1,17 +1,15 @@
 //! Discovers tests
 
 use hir::{Crate, Module, ModuleDef, Semantics};
-use ide_db::{
-    base_db::{CrateGraph, CrateId, SourceDatabase},
-    FileId, RootDatabase,
-};
+use ide_db::base_db;
+use ide_db::{FileId, RootDatabase, base_db::RootQueryDb};
 use syntax::TextRange;
 
-use crate::{runnables::runnable_fn, NavigationTarget, Runnable, TryToNav};
+use crate::{NavigationTarget, Runnable, TryToNav, runnables::runnable_fn};
 
 #[derive(Debug)]
 pub enum TestItemKind {
-    Crate(CrateId),
+    Crate(base_db::Crate),
     Module,
     Function,
 }
@@ -28,12 +26,12 @@ pub struct TestItem {
 }
 
 pub(crate) fn discover_test_roots(db: &RootDatabase) -> Vec<TestItem> {
-    let crate_graph = db.crate_graph();
-    crate_graph
+    db.all_crates()
         .iter()
-        .filter(|&id| crate_graph[id].origin.is_local())
+        .copied()
+        .filter(|&id| id.data(db).origin.is_local())
         .filter_map(|id| {
-            let test_id = crate_graph[id].display_name.as_ref()?.to_string();
+            let test_id = id.extra_data(db).display_name.as_ref()?.to_string();
             Some(TestItem {
                 kind: TestItemKind::Crate(id),
                 label: test_id.clone(),
@@ -47,12 +45,12 @@ pub(crate) fn discover_test_roots(db: &RootDatabase) -> Vec<TestItem> {
         .collect()
 }
 
-fn find_crate_by_id(crate_graph: &CrateGraph, crate_id: &str) -> Option<CrateId> {
+fn find_crate_by_id(db: &RootDatabase, crate_id: &str) -> Option<base_db::Crate> {
     // here, we use display_name as the crate id. This is not super ideal, but it works since we
     // only show tests for the local crates.
-    crate_graph.iter().find(|&id| {
-        crate_graph[id].origin.is_local()
-            && crate_graph[id].display_name.as_ref().is_some_and(|x| x.to_string() == crate_id)
+    db.all_crates().iter().copied().find(|&id| {
+        id.data(db).origin.is_local()
+            && id.extra_data(db).display_name.as_ref().is_some_and(|x| x.to_string() == crate_id)
     })
 }
 
@@ -115,8 +113,7 @@ pub(crate) fn discover_tests_in_crate_by_test_id(
     db: &RootDatabase,
     crate_test_id: &str,
 ) -> Vec<TestItem> {
-    let crate_graph = db.crate_graph();
-    let Some(crate_id) = find_crate_by_id(&crate_graph, crate_test_id) else {
+    let Some(crate_id) = find_crate_by_id(db, crate_test_id) else {
         return vec![];
     };
     discover_tests_in_crate(db, crate_id)
@@ -171,12 +168,14 @@ fn find_module_id_and_test_parents(
     Some((r, id))
 }
 
-pub(crate) fn discover_tests_in_crate(db: &RootDatabase, crate_id: CrateId) -> Vec<TestItem> {
-    let crate_graph = db.crate_graph();
-    if !crate_graph[crate_id].origin.is_local() {
+pub(crate) fn discover_tests_in_crate(
+    db: &RootDatabase,
+    crate_id: base_db::Crate,
+) -> Vec<TestItem> {
+    if !crate_id.data(db).origin.is_local() {
         return vec![];
     }
-    let Some(crate_test_id) = &crate_graph[crate_id].display_name else {
+    let Some(crate_test_id) = &crate_id.extra_data(db).display_name else {
         return vec![];
     };
     let kind = TestItemKind::Crate(crate_id);
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
index 8c9dd05145272..4df7e25223d91 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -15,14 +15,15 @@
 
 mod on_enter;
 
+use hir::EditionedFileId;
+use ide_db::{FilePosition, RootDatabase, base_db::RootQueryDb};
+use span::Edition;
 use std::iter;
 
-use ide_db::{base_db::SourceDatabase, FilePosition, RootDatabase};
-use span::{Edition, EditionedFileId};
 use syntax::{
-    algo::{ancestors_at_offset, find_node_at_offset},
-    ast::{self, edit::IndentLevel, AstToken},
     AstNode, Parse, SourceFile, SyntaxKind, TextRange, TextSize,
+    algo::{ancestors_at_offset, find_node_at_offset},
+    ast::{self, AstToken, edit::IndentLevel},
 };
 
 use ide_db::text_edit::TextEdit;
@@ -73,7 +74,8 @@ pub(crate) fn on_char_typed(
     // FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
     // causing the editor to feel sluggish!
     let edition = Edition::CURRENT_FIXME;
-    let file = &db.parse(EditionedFileId::new(position.file_id, edition));
+    let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
+    let file = &db.parse(editioned_file_id_wrapper);
     let char_matches_position =
         file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
     if !stdx::always!(char_matches_position) {
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
index c6d1c283f4eca..fdc583a15cc71 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
@@ -1,15 +1,14 @@
 //! Handles the `Enter` key press. At the momently, this only continues
 //! comments, but should handle indent some time in the future as well.
 
-use ide_db::RootDatabase;
-use ide_db::{base_db::SourceDatabase, FilePosition};
-use span::EditionedFileId;
+use ide_db::base_db::RootQueryDb;
+use ide_db::{FilePosition, RootDatabase};
 use syntax::{
-    algo::find_node_at_offset,
-    ast::{self, edit::IndentLevel, AstToken},
     AstNode, SmolStr, SourceFile,
     SyntaxKind::*,
     SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset,
+    algo::find_node_at_offset,
+    ast::{self, AstToken, edit::IndentLevel},
 };
 
 use ide_db::text_edit::TextEdit;
@@ -51,7 +50,9 @@ use ide_db::text_edit::TextEdit;
 //
 // ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
 pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
-    let parse = db.parse(EditionedFileId::current_edition(position.file_id));
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
+    let parse = db.parse(editioned_file_id_wrapper);
     let file = parse.tree();
     let token = file.syntax().token_at_offset(position.offset).left_biased()?;
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
index eb6eb7da1e90a..4696fef3209a9 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
@@ -1,9 +1,11 @@
 use dot::{Id, LabelText};
+use ide_db::base_db::salsa::plumbing::AsId;
 use ide_db::{
-    base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceRootDatabase},
-    FxHashSet, RootDatabase,
+    FxHashMap, RootDatabase,
+    base_db::{
+        BuiltCrateData, BuiltDependency, Crate, ExtraCrateData, RootQueryDb, SourceDatabase,
+    },
 };
-use triomphe::Arc;
 
 // Feature: View Crate Graph
 //
@@ -16,76 +18,81 @@ use triomphe::Arc;
 // |---------|-------------|
 // | VS Code | **rust-analyzer: View Crate Graph** |
 pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
-    let crate_graph = db.crate_graph();
-    let crates_to_render = crate_graph
+    let all_crates = db.all_crates();
+    let crates_to_render = all_crates
         .iter()
-        .filter(|krate| {
+        .copied()
+        .map(|krate| (krate, (krate.data(db), krate.extra_data(db))))
+        .filter(|(_, (crate_data, _))| {
             if full {
                 true
             } else {
                 // Only render workspace crates
-                let root_id = db.file_source_root(crate_graph[*krate].root_file_id);
-                !db.source_root(root_id).is_library
+                let root_id = db.file_source_root(crate_data.root_file_id).source_root_id(db);
+                !db.source_root(root_id).source_root(db).is_library
             }
         })
         .collect();
-    let graph = DotCrateGraph { graph: crate_graph, crates_to_render };
+    let graph = DotCrateGraph { crates_to_render };
 
     let mut dot = Vec::new();
     dot::render(&graph, &mut dot).unwrap();
     Ok(String::from_utf8(dot).unwrap())
 }
 
-struct DotCrateGraph {
-    graph: Arc<CrateGraph>,
-    crates_to_render: FxHashSet<CrateId>,
+struct DotCrateGraph<'db> {
+    crates_to_render: FxHashMap<Crate, (&'db BuiltCrateData, &'db ExtraCrateData)>,
 }
 
-type Edge<'a> = (CrateId, &'a Dependency);
+type Edge<'a> = (Crate, &'a BuiltDependency);
 
-impl<'a> dot::GraphWalk<'a, CrateId, Edge<'a>> for DotCrateGraph {
-    fn nodes(&'a self) -> dot::Nodes<'a, CrateId> {
-        self.crates_to_render.iter().copied().collect()
+impl<'a> dot::GraphWalk<'a, Crate, Edge<'a>> for DotCrateGraph<'_> {
+    fn nodes(&'a self) -> dot::Nodes<'a, Crate> {
+        self.crates_to_render.keys().copied().collect()
     }
 
     fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> {
         self.crates_to_render
             .iter()
-            .flat_map(|krate| {
-                self.graph[*krate]
+            .flat_map(|(krate, (crate_data, _))| {
+                crate_data
                     .dependencies
                     .iter()
-                    .filter(|dep| self.crates_to_render.contains(&dep.crate_id))
+                    .filter(|dep| self.crates_to_render.contains_key(&dep.crate_id))
                     .map(move |dep| (*krate, dep))
             })
             .collect()
     }
 
-    fn source(&'a self, edge: &Edge<'a>) -> CrateId {
+    fn source(&'a self, edge: &Edge<'a>) -> Crate {
         edge.0
     }
 
-    fn target(&'a self, edge: &Edge<'a>) -> CrateId {
+    fn target(&'a self, edge: &Edge<'a>) -> Crate {
         edge.1.crate_id
     }
 }
 
-impl<'a> dot::Labeller<'a, CrateId, Edge<'a>> for DotCrateGraph {
+impl<'a> dot::Labeller<'a, Crate, Edge<'a>> for DotCrateGraph<'_> {
     fn graph_id(&'a self) -> Id<'a> {
         Id::new("rust_analyzer_crate_graph").unwrap()
     }
 
-    fn node_id(&'a self, n: &CrateId) -> Id<'a> {
-        Id::new(format!("_{}", u32::from(n.into_raw()))).unwrap()
+    fn node_id(&'a self, n: &Crate) -> Id<'a> {
+        let id = n.as_id().as_u32();
+        Id::new(format!("_{:?}", id)).unwrap()
     }
 
-    fn node_shape(&'a self, _node: &CrateId) -> Option<LabelText<'a>> {
+    fn node_shape(&'a self, _node: &Crate) -> Option<LabelText<'a>> {
         Some(LabelText::LabelStr("box".into()))
     }
 
-    fn node_label(&'a self, n: &CrateId) -> LabelText<'a> {
-        let name =
-            self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| name.as_str());
+    fn node_label(&'a self, n: &Crate) -> LabelText<'a> {
+        let name = self.crates_to_render[n]
+            .1
+            .display_name
+            .as_ref()
+            .map_or("(unnamed crate)", |name| name.as_str());
         LabelText::LabelStr(name.into())
     }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
index bfdf9d0f3374e..ec5e993f5a67d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
@@ -1,6 +1,6 @@
-use hir::{DefWithBody, Semantics};
+use hir::Semantics;
 use ide_db::{FilePosition, RootDatabase};
-use syntax::{algo::ancestors_at_offset, ast, AstNode};
+use syntax::AstNode;
 
 // Feature: View Hir
 //
@@ -10,21 +10,10 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode};
 //
 // ![View Hir](https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif)
 pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String {
-    body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned())
-}
-
-fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> {
-    let sema = Semantics::new(db);
-    let source_file = sema.parse_guess_edition(position.file_id);
-
-    let item = ancestors_at_offset(source_file.syntax(), position.offset)
-        .filter(|it| !ast::MacroCall::can_cast(it.kind()))
-        .find_map(ast::Item::cast)?;
-    let def: DefWithBody = match item {
-        ast::Item::Fn(it) => sema.to_def(&it)?.into(),
-        ast::Item::Const(it) => sema.to_def(&it)?.into(),
-        ast::Item::Static(it) => sema.to_def(&it)?.into(),
-        _ => return None,
-    };
-    Some(def.debug_hir(db))
+    (|| {
+        let sema = Semantics::new(db);
+        let source_file = sema.parse_guess_edition(position.file_id);
+        sema.debug_hir_at(source_file.syntax().token_at_offset(position.offset).next()?)
+    })()
+    .unwrap_or_else(|| "Not inside a lowerable item".to_owned())
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
index 67c241cbb9153..2cd751463bdb8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
@@ -1,6 +1,5 @@
-use hir::{db::DefDatabase, Semantics};
+use hir::{EditionedFileId, Semantics, db::DefDatabase};
 use ide_db::{FileId, RootDatabase};
-use span::EditionedFileId;
 
 // Feature: Debug ItemTree
 //
@@ -13,6 +12,6 @@ pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
-    db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition())
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+    db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db))
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
index 34bca7bce12cf..140ae4265be7d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
@@ -2,9 +2,9 @@ use std::fmt;
 
 use hir::{DisplayTarget, Field, HirDisplay, Layout, Semantics, Type};
 use ide_db::{
+    RootDatabase,
     defs::Definition,
     helpers::{get_definition, pick_best_token},
-    RootDatabase,
 };
 use syntax::{AstNode, SyntaxKind};
 
@@ -83,7 +83,7 @@ pub(crate) fn view_memory_layout(
 ) -> Option<RecursiveMemoryLayout> {
     let sema = Semantics::new(db);
     let file = sema.parse_guess_edition(position.file_id);
-    let display_target = sema.first_crate_or_default(position.file_id).to_display_target(db);
+    let display_target = sema.first_crate(position.file_id)?.to_display_target(db);
     let token =
         pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
             SyntaxKind::IDENT => 3,
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
index aa4ff64a819e1..6ca231c7a81a6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
@@ -1,6 +1,6 @@
 use hir::{DefWithBody, Semantics};
 use ide_db::{FilePosition, RootDatabase};
-use syntax::{algo::ancestors_at_offset, ast, AstNode};
+use syntax::{AstNode, algo::ancestors_at_offset, ast};
 
 // Feature: View Mir
 //
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs
index 407720864bfdb..ecd93e8b28190 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs
@@ -1,13 +1,13 @@
 use hir::Semantics;
 use ide_db::{
-    line_index::{LineCol, LineIndex},
     FileId, LineIndexDatabase, RootDatabase,
+    line_index::{LineCol, LineIndex},
 };
 use span::{TextRange, TextSize};
 use stdx::format_to;
 use syntax::{
-    ast::{self, IsString},
     AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, WalkEvent,
+    ast::{self, IsString},
 };
 use triomphe::Arc;
 
diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml
index 397eba0929673..9ff656cb744e4 100644
--- a/src/tools/rust-analyzer/crates/intern/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml
@@ -13,7 +13,6 @@ rust-version.workspace = true
 
 
 [dependencies]
-# We need to freeze the version of the crate, as the raw-api feature is considered unstable
 dashmap.workspace = true
 hashbrown.workspace = true
 rustc-hash.workspace = true
diff --git a/src/tools/rust-analyzer/crates/intern/src/lib.rs b/src/tools/rust-analyzer/crates/intern/src/lib.rs
index 58327419f6314..398d224c07ad2 100644
--- a/src/tools/rust-analyzer/crates/intern/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/intern/src/lib.rs
@@ -3,79 +3,84 @@
 //! Eventually this should probably be replaced with salsa-based interning.
 
 use std::{
+    borrow::Borrow,
     fmt::{self, Debug, Display},
-    hash::{BuildHasherDefault, Hash, Hasher},
+    hash::{BuildHasher, BuildHasherDefault, Hash, Hasher},
     ops::Deref,
     sync::OnceLock,
 };
 
 use dashmap::{DashMap, SharedValue};
-use hashbrown::{hash_map::RawEntryMut, HashMap};
+use hashbrown::raw::RawTable;
 use rustc_hash::FxHasher;
 use triomphe::Arc;
 
 type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
-type Guard<T> = dashmap::RwLockWriteGuard<
-    'static,
-    HashMap<Arc<T>, SharedValue<()>, BuildHasherDefault<FxHasher>>,
->;
+type Guard<T> = dashmap::RwLockWriteGuard<'static, RawTable<(Arc<T>, SharedValue<()>)>>;
 
 mod symbol;
-pub use self::symbol::{symbols as sym, Symbol};
+pub use self::symbol::{Symbol, symbols as sym};
 
 pub struct Interned<T: Internable + ?Sized> {
     arc: Arc<T>,
 }
 
 impl<T: Internable> Interned<T> {
+    #[inline]
     pub fn new(obj: T) -> Self {
-        let (mut shard, hash) = Self::select(&obj);
-        // Atomically,
-        // - check if `obj` is already in the map
-        //   - if so, clone its `Arc` and return it
-        //   - if not, box it up, insert it, and return a clone
-        // This needs to be atomic (locking the shard) to avoid races with other thread, which could
-        // insert the same object between us looking it up and inserting it.
-        match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &obj) {
-            RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
-            RawEntryMut::Vacant(vac) => Self {
-                arc: vac.insert_hashed_nocheck(hash, Arc::new(obj), SharedValue::new(())).0.clone(),
-            },
-        }
+        Self::new_generic(obj)
     }
 }
 
 impl Interned<str> {
+    #[inline]
     pub fn new_str(s: &str) -> Self {
-        let (mut shard, hash) = Self::select(s);
+        Self::new_generic(s)
+    }
+}
+
+impl<T: Internable + ?Sized> Interned<T> {
+    #[inline]
+    pub fn new_generic<U>(obj: U) -> Self
+    where
+        U: Borrow<T>,
+        Arc<T>: From<U>,
+    {
+        let storage = T::storage().get();
+        let (mut shard, hash) = Self::select(storage, obj.borrow());
         // Atomically,
         // - check if `obj` is already in the map
         //   - if so, clone its `Arc` and return it
         //   - if not, box it up, insert it, and return a clone
         // This needs to be atomic (locking the shard) to avoid races with other thread, which could
         // insert the same object between us looking it up and inserting it.
-        match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) {
-            RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
-            RawEntryMut::Vacant(vac) => Self {
-                arc: vac.insert_hashed_nocheck(hash, Arc::from(s), SharedValue::new(())).0.clone(),
+        let bucket = match shard.find_or_find_insert_slot(
+            hash,
+            |(other, _)| **other == *obj.borrow(),
+            |(x, _)| Self::hash(storage, x),
+        ) {
+            Ok(bucket) => bucket,
+            // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
+            Err(insert_slot) => unsafe {
+                shard.insert_in_slot(hash, insert_slot, (Arc::from(obj), SharedValue::new(())))
             },
-        }
+        };
+        // SAFETY: We just retrieved/inserted this bucket.
+        unsafe { Self { arc: bucket.as_ref().0.clone() } }
     }
-}
 
-impl<T: Internable + ?Sized> Interned<T> {
     #[inline]
-    fn select(obj: &T) -> (Guard<T>, u64) {
-        let storage = T::storage().get();
-        let hash = {
-            let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher());
-            obj.hash(&mut hasher);
-            hasher.finish()
-        };
+    fn select(storage: &'static InternMap<T>, obj: &T) -> (Guard<T>, u64) {
+        let hash = Self::hash(storage, obj);
         let shard_idx = storage.determine_shard(hash as usize);
         let shard = &storage.shards()[shard_idx];
         (shard.write(), hash)
     }
+
+    #[inline]
+    fn hash(storage: &'static InternMap<T>, obj: &T) -> u64 {
+        storage.hasher().hash_one(obj)
+    }
 }
 
 impl<T: Internable + ?Sized> Drop for Interned<T> {
@@ -93,21 +98,20 @@ impl<T: Internable + ?Sized> Drop for Interned<T> {
 impl<T: Internable + ?Sized> Interned<T> {
     #[cold]
     fn drop_slow(&mut self) {
-        let (mut shard, hash) = Self::select(&self.arc);
+        let storage = T::storage().get();
+        let (mut shard, hash) = Self::select(storage, &self.arc);
 
         if Arc::count(&self.arc) != 2 {
             // Another thread has interned another copy
             return;
         }
 
-        match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &self.arc) {
-            RawEntryMut::Occupied(occ) => occ.remove(),
-            RawEntryMut::Vacant(_) => unreachable!(),
-        };
+        shard.remove_entry(hash, |(other, _)| **other == *self.arc);
 
         // Shrink the backing storage if the shard is less than 50% occupied.
         if shard.len() * 2 < shard.capacity() {
-            shard.shrink_to_fit();
+            let len = shard.len();
+            shard.shrink_to(len, |(x, _)| Self::hash(storage, x));
         }
     }
 }
@@ -177,7 +181,10 @@ pub struct InternStorage<T: ?Sized> {
     map: OnceLock<InternMap<T>>,
 }
 
-#[allow(clippy::new_without_default)] // this a const fn, so it can't be default
+#[allow(
+    clippy::new_without_default,
+    reason = "this a const fn, so it can't be default yet. See <https://github.com/rust-lang/rust/issues/63065>"
+)]
 impl<T: ?Sized> InternStorage<T> {
     pub const fn new() -> Self {
         Self { map: OnceLock::new() }
diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol.rs b/src/tools/rust-analyzer/crates/intern/src/symbol.rs
index 0fa6701ca3fa3..89c3be96fcb96 100644
--- a/src/tools/rust-analyzer/crates/intern/src/symbol.rs
+++ b/src/tools/rust-analyzer/crates/intern/src/symbol.rs
@@ -2,16 +2,15 @@
 //! supporting compile time declaration of symbols that will never be freed.
 
 use std::{
-    borrow::Borrow,
     fmt,
-    hash::{BuildHasherDefault, Hash, Hasher},
+    hash::{BuildHasher, BuildHasherDefault, Hash},
     mem::{self, ManuallyDrop},
     ptr::NonNull,
     sync::OnceLock,
 };
 
 use dashmap::{DashMap, SharedValue};
-use hashbrown::{hash_map::RawEntryMut, HashMap};
+use hashbrown::raw::RawTable;
 use rustc_hash::FxHasher;
 use triomphe::Arc;
 
@@ -127,31 +126,39 @@ impl fmt::Debug for Symbol {
 const _: () = assert!(size_of::<Symbol>() == size_of::<NonNull<()>>());
 const _: () = assert!(align_of::<Symbol>() == align_of::<NonNull<()>>());
 
-static MAP: OnceLock<DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>>> = OnceLock::new();
+type Map = DashMap<Symbol, (), BuildHasherDefault<FxHasher>>;
+static MAP: OnceLock<Map> = OnceLock::new();
 
 impl Symbol {
     pub fn intern(s: &str) -> Self {
-        let (mut shard, hash) = Self::select_shard(s);
+        let storage = MAP.get_or_init(symbols::prefill);
+        let (mut shard, hash) = Self::select_shard(storage, s);
         // Atomically,
         // - check if `obj` is already in the map
         //   - if so, copy out its entry, conditionally bumping the backing Arc and return it
         //   - if not, put it into a box and then into an Arc, insert it, bump the ref-count and return the copy
         // This needs to be atomic (locking the shard) to avoid races with other thread, which could
         // insert the same object between us looking it up and inserting it.
-        match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) {
-            RawEntryMut::Occupied(occ) => Self { repr: increase_arc_refcount(occ.key().0) },
-            RawEntryMut::Vacant(vac) => Self {
-                repr: increase_arc_refcount(
-                    vac.insert_hashed_nocheck(
-                        hash,
-                        SymbolProxy(TaggedArcPtr::arc(Arc::new(Box::<str>::from(s)))),
+        let bucket = match shard.find_or_find_insert_slot(
+            hash,
+            |(other, _)| other.as_str() == s,
+            |(x, _)| Self::hash(storage, x.as_str()),
+        ) {
+            Ok(bucket) => bucket,
+            // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
+            Err(insert_slot) => unsafe {
+                shard.insert_in_slot(
+                    hash,
+                    insert_slot,
+                    (
+                        Symbol { repr: TaggedArcPtr::arc(Arc::new(Box::<str>::from(s))) },
                         SharedValue::new(()),
-                    )
-                    .0
-                     .0,
-                ),
+                    ),
+                )
             },
-        }
+        };
+        // SAFETY: We just retrieved/inserted this bucket.
+        unsafe { bucket.as_ref().0.clone() }
     }
 
     pub fn integer(i: usize) -> Self {
@@ -180,38 +187,34 @@ impl Symbol {
         symbols::__empty.clone()
     }
 
+    #[inline]
     pub fn as_str(&self) -> &str {
         self.repr.as_str()
     }
 
     #[inline]
     fn select_shard(
+        storage: &'static Map,
         s: &str,
-    ) -> (
-        dashmap::RwLockWriteGuard<
-            'static,
-            HashMap<SymbolProxy, SharedValue<()>, BuildHasherDefault<FxHasher>>,
-        >,
-        u64,
-    ) {
-        let storage = MAP.get_or_init(symbols::prefill);
-        let hash = {
-            let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher());
-            s.hash(&mut hasher);
-            hasher.finish()
-        };
+    ) -> (dashmap::RwLockWriteGuard<'static, RawTable<(Symbol, SharedValue<()>)>>, u64) {
+        let hash = Self::hash(storage, s);
         let shard_idx = storage.determine_shard(hash as usize);
         let shard = &storage.shards()[shard_idx];
         (shard.write(), hash)
     }
 
+    #[inline]
+    fn hash(storage: &'static Map, s: &str) -> u64 {
+        storage.hasher().hash_one(s)
+    }
+
     #[cold]
     fn drop_slow(arc: &Arc<Box<str>>) {
-        let (mut shard, hash) = Self::select_shard(arc);
+        let storage = MAP.get_or_init(symbols::prefill);
+        let (mut shard, hash) = Self::select_shard(storage, arc);
 
         match Arc::count(arc) {
-            0 => unreachable!(),
-            1 => unreachable!(),
+            0 | 1 => unreachable!(),
             2 => (),
             _ => {
                 // Another thread has interned another copy
@@ -219,19 +222,17 @@ impl Symbol {
             }
         }
 
-        let ptr = match shard.raw_entry_mut().from_key_hashed_nocheck::<str>(hash, arc.as_ref()) {
-            RawEntryMut::Occupied(occ) => occ.remove_entry(),
-            RawEntryMut::Vacant(_) => unreachable!(),
-        }
-        .0
-         .0;
+        let s = &***arc;
+        let (ptr, _) = shard.remove_entry(hash, |(x, _)| x.as_str() == s).unwrap();
+        let ptr = ManuallyDrop::new(ptr);
         // SAFETY: We're dropping, we have ownership.
-        ManuallyDrop::into_inner(unsafe { ptr.try_as_arc_owned().unwrap() });
+        ManuallyDrop::into_inner(unsafe { ptr.repr.try_as_arc_owned().unwrap() });
         debug_assert_eq!(Arc::count(arc), 1);
 
         // Shrink the backing storage if the shard is less than 50% occupied.
         if shard.len() * 2 < shard.capacity() {
-            shard.shrink_to_fit();
+            let len = shard.len();
+            shard.shrink_to(len, |(x, _)| Self::hash(storage, x.as_str()));
         }
     }
 }
@@ -276,22 +277,6 @@ impl fmt::Display for Symbol {
     }
 }
 
-// only exists so we can use `from_key_hashed_nocheck` with a &str
-#[derive(Debug, PartialEq, Eq)]
-struct SymbolProxy(TaggedArcPtr);
-
-impl Hash for SymbolProxy {
-    fn hash<H: Hasher>(&self, state: &mut H) {
-        self.0.as_str().hash(state);
-    }
-}
-
-impl Borrow<str> for SymbolProxy {
-    fn borrow(&self) -> &str {
-        self.0.as_str()
-    }
-}
-
 #[cfg(test)]
 mod tests {
     use super::*;
diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs
index 6b77c72cee897..abde48d151271 100644
--- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs
+++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs
@@ -1,72 +1,47 @@
 //! Module defining all known symbols required by the rest of rust-analyzer.
 #![allow(non_upper_case_globals)]
 
-use std::hash::{BuildHasherDefault, Hash as _, Hasher as _};
+use std::hash::{BuildHasher, BuildHasherDefault};
 
 use dashmap::{DashMap, SharedValue};
 use rustc_hash::FxHasher;
 
-use crate::{
-    symbol::{SymbolProxy, TaggedArcPtr},
-    Symbol,
-};
+use crate::{Symbol, symbol::TaggedArcPtr};
 
 macro_rules! define_symbols {
     (@WITH_NAME: $($alias:ident = $value:literal,)* @PLAIN: $($name:ident,)*) => {
-        // We define symbols as both `const`s and `static`s because some const code requires const symbols,
-        // but code from before the transition relies on the lifetime of the predefined symbols and making them
-        // `const`s make it error (because now they're temporaries). In the future we probably should only
-        // use consts.
-
-        /// Predefined symbols as `const`s (instead of the default `static`s).
-        pub mod consts {
-            use super::{Symbol, TaggedArcPtr};
-
-            // The strings should be in `static`s so that symbol equality holds.
-            $(
-                pub const $name: Symbol = {
-                    static SYMBOL_STR: &str = stringify!($name);
-                    Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) }
-                };
-            )*
-            $(
-                pub const $alias: Symbol = {
-                    static SYMBOL_STR: &str = $value;
-                    Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) }
-                };
-            )*
-        }
-
+        // The strings should be in `static`s so that symbol equality holds.
         $(
-            pub static $name: Symbol = consts::$name;
+            pub const $name: Symbol = {
+                static SYMBOL_STR: &str = stringify!($name);
+                Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) }
+            };
         )*
         $(
-            pub static $alias: Symbol = consts::$alias;
+            pub const $alias: Symbol = {
+                static SYMBOL_STR: &str = $value;
+                Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) }
+            };
         )*
 
 
-        pub(super) fn prefill() -> DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>> {
-            let mut dashmap_ = <DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>>>::with_hasher(BuildHasherDefault::default());
+        pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
+            let mut dashmap_ = <DashMap<Symbol, (), BuildHasherDefault<FxHasher>>>::with_hasher(BuildHasherDefault::default());
 
-            let hash_thing_ = |hasher_: &BuildHasherDefault<FxHasher>, it_: &SymbolProxy| {
-                let mut hasher_ = std::hash::BuildHasher::build_hasher(hasher_);
-                it_.hash(&mut hasher_);
-                hasher_.finish()
-            };
+            let hasher_ = dashmap_.hasher().clone();
+            let hash_one = |it_: &str| hasher_.hash_one(it_);
             {
                 $(
-
-                    let proxy_ = SymbolProxy($name.repr);
-                    let hash_ = hash_thing_(dashmap_.hasher(), &proxy_);
+                    let s = stringify!($name);
+                    let hash_ = hash_one(s);
                     let shard_idx_ = dashmap_.determine_shard(hash_ as usize);
-                    dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(()));
+                    dashmap_.shards_mut()[shard_idx_].get_mut().insert(hash_, ($name, SharedValue::new(())), |(x, _)| hash_one(x.as_str()));
                 )*
                 $(
-
-                    let proxy_ = SymbolProxy($alias.repr);
-                    let hash_ = hash_thing_(dashmap_.hasher(), &proxy_);
+                    let s = $value;
+                    let hash_ = hash_one(s);
                     let shard_idx_ = dashmap_.determine_shard(hash_ as usize);
-                    dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(()));
+                    dashmap_.shards_mut()[shard_idx_].get_mut().insert(hash_, ($alias, SharedValue::new(())), |(x, _)| hash_one(x.as_str()));
                 )*
             }
             dashmap_
@@ -161,6 +136,7 @@ define_symbols! {
     bitxor_assign,
     bitxor,
     bool,
+    bootstrap,
     box_free,
     Box,
     boxed,
@@ -511,6 +487,7 @@ define_symbols! {
     unreachable_2021,
     unreachable,
     unsafe_cell,
+    unsafe_pinned,
     unsize,
     unstable,
     usize,
@@ -521,4 +498,12 @@ define_symbols! {
     win64,
     array,
     boxed_slice,
+    completions,
+    ignore_flyimport,
+    ignore_flyimport_methods,
+    ignore_methods,
+    position,
+    flags,
+    precision,
+    width,
 }
diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
index 23fd50a05644c..91b012e05071f 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
@@ -21,7 +21,6 @@ tracing.workspace = true
 
 hir-expand.workspace = true
 ide-db.workspace = true
-paths.workspace = true
 proc-macro-api.workspace = true
 project-model.workspace = true
 span.workspace = true
diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
index 72ca85c6a2fde..3e52dbaea6549 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -2,25 +2,26 @@
 //! for incorporating changes.
 // Note, don't remove any public api from this. This API is consumed by external tools
 // to run rust-analyzer as a library.
-use std::{collections::hash_map::Entry, iter, mem, path::Path, sync};
+use std::{collections::hash_map::Entry, mem, path::Path, sync};
 
-use crossbeam_channel::{unbounded, Receiver};
+use crossbeam_channel::{Receiver, unbounded};
 use hir_expand::proc_macro::{
     ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacroLoadResult,
-    ProcMacros,
+    ProcMacrosBuilder,
 };
 use ide_db::{
-    base_db::{CrateGraph, CrateWorkspaceData, Env, SourceRoot, SourceRootId},
-    prime_caches, ChangeWithProcMacros, FxHashMap, RootDatabase,
+    ChangeWithProcMacros, FxHashMap, RootDatabase,
+    base_db::{CrateGraphBuilder, Env, SourceRoot, SourceRootId},
+    prime_caches,
 };
 use itertools::Itertools;
 use proc_macro_api::{MacroDylib, ProcMacroClient};
 use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
 use span::Span;
 use vfs::{
+    AbsPath, AbsPathBuf, VfsPath,
     file_set::FileSetConfig,
     loader::{Handle, LoadingProgress},
-    AbsPath, AbsPathBuf, VfsPath,
 };
 
 #[derive(Debug)]
@@ -65,7 +66,7 @@ pub fn load_workspace_at(
 
 pub fn load_workspace(
     ws: ProjectWorkspace,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
     load_config: &LoadCargoConfig,
 ) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroClient>)> {
     let (sender, receiver) = unbounded();
@@ -139,7 +140,6 @@ pub fn load_workspace(
     });
 
     let db = load_crate_graph(
-        &ws,
         crate_graph,
         proc_macros,
         project_folders.source_root_config,
@@ -418,18 +418,15 @@ pub fn load_proc_macro(
 }
 
 fn load_crate_graph(
-    ws: &ProjectWorkspace,
-    crate_graph: CrateGraph,
-    proc_macros: ProcMacros,
+    crate_graph: CrateGraphBuilder,
+    proc_macros: ProcMacrosBuilder,
     source_root_config: SourceRootConfig,
     vfs: &mut vfs::Vfs,
     receiver: &Receiver<vfs::loader::Message>,
 ) -> RootDatabase {
-    let ProjectWorkspace { toolchain, target_layout, .. } = ws;
-
     let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
     let mut db = RootDatabase::new(lru_cap);
-    let mut analysis_change = ChangeWithProcMacros::new();
+    let mut analysis_change = ChangeWithProcMacros::default();
 
     db.enable_proc_attr_macros();
 
@@ -461,14 +458,7 @@ fn load_crate_graph(
     let source_roots = source_root_config.partition(vfs);
     analysis_change.set_roots(source_roots);
 
-    let ws_data = crate_graph
-        .iter()
-        .zip(iter::repeat(From::from(CrateWorkspaceData {
-            data_layout: target_layout.clone(),
-            toolchain: toolchain.clone(),
-        })))
-        .collect();
-    analysis_change.set_crate_graph(crate_graph, ws_data);
+    analysis_change.set_crate_graph(crate_graph);
     analysis_change.set_proc_macros(proc_macros);
 
     db.apply_change(analysis_change);
@@ -494,7 +484,7 @@ fn expander_to_proc_macro(
     }
 }
 
-#[derive(Debug)]
+#[derive(Debug, PartialEq, Eq)]
 struct Expander(proc_macro_api::ProcMacro);
 
 impl ProcMacroExpander for Expander {
@@ -506,7 +496,7 @@ impl ProcMacroExpander for Expander {
         def_site: Span,
         call_site: Span,
         mixed_site: Span,
-        current_dir: Option<String>,
+        current_dir: String,
     ) -> Result<tt::TopSubtree<Span>, ProcMacroExpansionError> {
         match self.0.expand(
             subtree.view(),
@@ -522,11 +512,15 @@ impl ProcMacroExpander for Expander {
             Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
         }
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().downcast_ref::<Self>().is_some_and(|other| self == other)
+    }
 }
 
 #[cfg(test)]
 mod tests {
-    use ide_db::base_db::SourceDatabase;
+    use ide_db::base_db::RootQueryDb;
     use vfs::file_set::FileSetConfigBuilder;
 
     use super::*;
@@ -543,7 +537,7 @@ mod tests {
         let (db, _vfs, _proc_macro) =
             load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
 
-        let n_crates = db.crate_graph().iter().count();
+        let n_crates = db.all_crates().len();
         // RA has quite a few crates, but the exact count doesn't matter
         assert!(n_crates > 20);
     }
@@ -633,7 +627,7 @@ mod tests {
         let fsc = builder.build();
         let src = SourceRootConfig { fsc, local_filesets: vec![0, 1, 2, 3] };
         let mut vc = src.source_root_parent_map().into_iter().collect::<Vec<_>>();
-        vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0));
+        vc.sort_by(|x, y| x.0.0.cmp(&y.0.0));
 
         assert_eq!(vc, vec![(SourceRootId(2), SourceRootId(1)), (SourceRootId(3), SourceRootId(1))])
     }
@@ -648,7 +642,7 @@ mod tests {
         let fsc = builder.build();
         let src = SourceRootConfig { fsc, local_filesets: vec![0, 1, 3] };
         let mut vc = src.source_root_parent_map().into_iter().collect::<Vec<_>>();
-        vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0));
+        vc.sort_by(|x, y| x.0.0.cmp(&y.0.0));
 
         assert_eq!(vc, vec![(SourceRootId(3), SourceRootId(1)),])
     }
@@ -663,7 +657,7 @@ mod tests {
         let fsc = builder.build();
         let src = SourceRootConfig { fsc, local_filesets: vec![0, 1, 3] };
         let mut vc = src.source_root_parent_map().into_iter().collect::<Vec<_>>();
-        vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0));
+        vc.sort_by(|x, y| x.0.0.cmp(&y.0.0));
 
         assert_eq!(vc, vec![(SourceRootId(3), SourceRootId(1)),])
     }
@@ -679,7 +673,7 @@ mod tests {
         let fsc = builder.build();
         let src = SourceRootConfig { fsc, local_filesets: vec![0, 1] };
         let mut vc = src.source_root_parent_map().into_iter().collect::<Vec<_>>();
-        vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0));
+        vc.sort_by(|x, y| x.0.0.cmp(&y.0.0));
 
         assert_eq!(vc, vec![(SourceRootId(1), SourceRootId(0)),])
     }
@@ -695,7 +689,7 @@ mod tests {
         let fsc = builder.build();
         let src = SourceRootConfig { fsc, local_filesets: vec![0, 1] };
         let mut vc = src.source_root_parent_map().into_iter().collect::<Vec<_>>();
-        vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0));
+        vc.sort_by(|x, y| x.0.0.cmp(&y.0.0));
 
         assert_eq!(vc, vec![(SourceRootId(1), SourceRootId(0)),])
     }
diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
index e6fbb298ebdb7..f3ab093bae08a 100644
--- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
@@ -12,15 +12,13 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
 rustc-hash.workspace = true
 smallvec.workspace = true
-tracing.workspace = true
 arrayvec.workspace = true
 ra-ap-rustc_lexer.workspace = true
 
 # local deps
-syntax.workspace = true
 parser.workspace = true
 tt.workspace = true
 stdx.workspace = true
@@ -31,9 +29,10 @@ syntax-bridge.workspace = true
 [dev-dependencies]
 test-utils.workspace = true
 expect-test.workspace = true
+syntax.workspace = true
 
 [features]
-in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"]
+in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree"]
 
 [lints]
 workspace = true
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
index 89c300300379c..db75dceae1cb9 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -5,18 +5,19 @@ use rustc_hash::FxHashMap;
 use span::{Edition, Span};
 use stdx::itertools::Itertools;
 use syntax::{
-    ast::{self, HasName},
     AstNode,
+    ast::{self, HasName},
 };
 use syntax_bridge::{
-    dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
-    syntax_node_to_token_tree, DocCommentDesugarMode,
+    DocCommentDesugarMode,
+    dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
+    syntax_node_to_token_tree,
 };
 use test_utils::{bench, bench_fixture, skip_slow_tests};
 
 use crate::{
-    parser::{MetaVarKind, Op, RepeatKind, Separator},
     DeclarativeMacro,
+    parser::{MetaVarKind, Op, RepeatKind, Separator},
 };
 
 #[test]
@@ -53,7 +54,7 @@ fn benchmark_expand_macro_rules() {
             .map(|(id, tt)| {
                 let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT);
                 assert!(res.err.is_none());
-                res.value.0 .0.len()
+                res.value.0.0.len()
             })
             .sum()
     };
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
index 5539a88c707d1..f910f9f9d753f 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -9,7 +9,7 @@ use intern::Symbol;
 use rustc_hash::FxHashMap;
 use span::{Edition, Span};
 
-use crate::{parser::MetaVarKind, ExpandError, ExpandErrorKind, ExpandResult, MatchedArmIndex};
+use crate::{ExpandError, ExpandErrorKind, ExpandResult, MatchedArmIndex, parser::MetaVarKind};
 
 pub(crate) fn expand_rules(
     rules: &[crate::Rule],
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
index b7f25aa380961..940aaacb02ed5 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -61,19 +61,19 @@
 
 use std::{rc::Rc, sync::Arc};
 
-use intern::{sym, Symbol};
-use smallvec::{smallvec, SmallVec};
+use intern::{Symbol, sym};
+use smallvec::{SmallVec, smallvec};
 use span::{Edition, Span};
 use tt::{
-    iter::{TtElement, TtIter},
     DelimSpan,
+    iter::{TtElement, TtIter},
 };
 
 use crate::{
+    ExpandError, ExpandErrorKind, MetaTemplate, ValueResult,
     expander::{Binding, Bindings, ExpandResult, Fragment},
     expect_fragment,
     parser::{ExprKind, MetaVarKind, Op, RepeatKind, Separator},
-    ExpandError, ExpandErrorKind, MetaTemplate, ValueResult,
 };
 
 impl<'a> Bindings<'a> {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
index 7710ea7938951..ec277ba72e90e 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -1,14 +1,14 @@
 //! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like
 //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
 
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use span::{Edition, Span};
-use tt::{iter::TtElement, Delimiter, TopSubtreeBuilder};
+use tt::{Delimiter, TopSubtreeBuilder, iter::TtElement};
 
 use crate::{
+    ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate,
     expander::{Binding, Bindings, Fragment},
     parser::{ConcatMetaVarExprElem, MetaVarKind, Op, RepeatKind, Separator},
-    ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate,
 };
 
 impl<'t> Bindings<'t> {
@@ -80,7 +80,7 @@ impl<'t> Bindings<'t> {
                     | MetaVarKind::Expr(_)
                     | MetaVarKind::Ident => {
                         builder.push(tt::Leaf::Ident(tt::Ident {
-                            sym: sym::missing.clone(),
+                            sym: sym::missing,
                             span,
                             is_raw: tt::IdentIsRaw::No,
                         }));
@@ -93,7 +93,7 @@ impl<'t> Bindings<'t> {
                                 spacing: tt::Spacing::Joint,
                             }),
                             tt::Leaf::Ident(tt::Ident {
-                                sym: sym::missing.clone(),
+                                sym: sym::missing,
                                 span,
                                 is_raw: tt::IdentIsRaw::No,
                             }),
@@ -101,7 +101,7 @@ impl<'t> Bindings<'t> {
                     }
                     MetaVarKind::Literal => {
                         builder.push(tt::Leaf::Ident(tt::Ident {
-                            sym: sym::missing.clone(),
+                            sym: sym::missing,
                             span,
                             is_raw: tt::IdentIsRaw::No,
                         }));
@@ -210,8 +210,11 @@ fn expand_subtree(
             }
             Op::Ignore { name, id } => {
                 // Expand the variable, but ignore the result. This registers the repetition count.
-                // FIXME: Any emitted errors are dropped.
-                let _ = ctx.bindings.get_fragment(name, *id, &mut ctx.nesting, marker);
+                let e = ctx.bindings.get_fragment(name, *id, &mut ctx.nesting, marker).err();
+                // FIXME: The error gets dropped if there were any previous errors.
+                // This should be reworked in a way where the errors can be combined
+                // and reported rather than storing the first error encountered.
+                err = err.or(e);
             }
             Op::Index { depth } => {
                 let index =
@@ -239,9 +242,7 @@ fn expand_subtree(
                 let mut binding = match ctx.bindings.get(name, ctx.call_site) {
                     Ok(b) => b,
                     Err(e) => {
-                        if err.is_none() {
-                            err = Some(e);
-                        }
+                        err = err.or(Some(e));
                         continue;
                     }
                 };
@@ -331,7 +332,10 @@ fn expand_subtree(
                                 }
                                 _ => {
                                     if err.is_none() {
-                                        err = Some(ExpandError::binding_error(var.span, "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`"))
+                                        err = Some(ExpandError::binding_error(
+                                            var.span,
+                                            "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`",
+                                        ))
                                     }
                                     continue;
                                 }
@@ -386,8 +390,13 @@ fn expand_var(
     match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
         Ok(fragment) => {
             match fragment {
-                Fragment::Tokens(tt) => builder.extend_with_tt(tt.strip_invisible()),
-                Fragment::TokensOwned(tt) => builder.extend_with_tt(tt.view().strip_invisible()),
+                // rustc spacing is not like ours. Ours is like proc macros', it dictates how puncts will actually be joined.
+                // rustc uses them mostly for pretty printing. So we have to deviate a bit from what rustc does here.
+                // Basically, a metavariable can never be joined with whatever after it.
+                Fragment::Tokens(tt) => builder.extend_with_tt_alone(tt.strip_invisible()),
+                Fragment::TokensOwned(tt) => {
+                    builder.extend_with_tt_alone(tt.view().strip_invisible())
+                }
                 Fragment::Expr(sub) => {
                     let sub = sub.strip_invisible();
                     let mut span = id;
@@ -399,7 +408,7 @@ fn expand_var(
                     if wrap_in_parens {
                         builder.open(tt::DelimiterKind::Parenthesis, span);
                     }
-                    builder.extend_with_tt(sub);
+                    builder.extend_with_tt_alone(sub);
                     if wrap_in_parens {
                         builder.close(span);
                     }
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
index bebd29ef74700..9f9fa36abd46a 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -21,10 +21,10 @@ mod benchmark;
 #[cfg(test)]
 mod tests;
 
-use span::{Edition, Span, SyntaxContextId};
+use span::{Edition, Span, SyntaxContext};
 use syntax_bridge::to_parser_input;
-use tt::iter::TtIter;
 use tt::DelimSpan;
+use tt::iter::TtIter;
 
 use std::fmt;
 use std::sync::Arc;
@@ -149,7 +149,7 @@ impl DeclarativeMacro {
     /// The old, `macro_rules! m {}` flavor.
     pub fn parse_macro_rules(
         tt: &tt::TopSubtree<Span>,
-        ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
+        ctx_edition: impl Copy + Fn(SyntaxContext) -> Edition,
     ) -> DeclarativeMacro {
         // Note: this parsing can be implemented using mbe machinery itself, by
         // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
@@ -189,7 +189,7 @@ impl DeclarativeMacro {
     pub fn parse_macro2(
         args: Option<&tt::TopSubtree<Span>>,
         body: &tt::TopSubtree<Span>,
-        ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
+        ctx_edition: impl Copy + Fn(SyntaxContext) -> Edition,
     ) -> DeclarativeMacro {
         let mut rules = Vec::new();
         let mut err = None;
@@ -262,7 +262,7 @@ impl DeclarativeMacro {
 
 impl Rule {
     fn parse(
-        edition: impl Copy + Fn(SyntaxContextId) -> Edition,
+        edition: impl Copy + Fn(SyntaxContext) -> Edition,
         src: &mut TtIter<'_, Span>,
     ) -> Result<Self, ParseError> {
         let (_, lhs) =
diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
index 0a670053c9882..fbc353d610348 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
@@ -4,9 +4,12 @@
 use std::sync::Arc;
 
 use arrayvec::ArrayVec;
-use intern::{sym, Symbol};
-use span::{Edition, Span, SyntaxContextId};
-use tt::iter::{TtElement, TtIter};
+use intern::{Symbol, sym};
+use span::{Edition, Span, SyntaxContext};
+use tt::{
+    MAX_GLUED_PUNCT_LEN,
+    iter::{TtElement, TtIter},
+};
 
 use crate::ParseError;
 
@@ -28,14 +31,14 @@ pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>);
 
 impl MetaTemplate {
     pub(crate) fn parse_pattern(
-        edition: impl Copy + Fn(SyntaxContextId) -> Edition,
+        edition: impl Copy + Fn(SyntaxContext) -> Edition,
         pattern: TtIter<'_, Span>,
     ) -> Result<Self, ParseError> {
         MetaTemplate::parse(edition, pattern, Mode::Pattern)
     }
 
     pub(crate) fn parse_template(
-        edition: impl Copy + Fn(SyntaxContextId) -> Edition,
+        edition: impl Copy + Fn(SyntaxContext) -> Edition,
         template: TtIter<'_, Span>,
     ) -> Result<Self, ParseError> {
         MetaTemplate::parse(edition, template, Mode::Template)
@@ -46,7 +49,7 @@ impl MetaTemplate {
     }
 
     fn parse(
-        edition: impl Copy + Fn(SyntaxContextId) -> Edition,
+        edition: impl Copy + Fn(SyntaxContext) -> Edition,
         mut src: TtIter<'_, Span>,
         mode: Mode,
     ) -> Result<Self, ParseError> {
@@ -96,7 +99,7 @@ pub(crate) enum Op {
         delimiter: tt::Delimiter<Span>,
     },
     Literal(tt::Literal<Span>),
-    Punct(Box<ArrayVec<tt::Punct<Span>, 3>>),
+    Punct(Box<ArrayVec<tt::Punct<Span>, MAX_GLUED_PUNCT_LEN>>),
     Ident(tt::Ident<Span>),
 }
 
@@ -151,7 +154,7 @@ pub(crate) enum MetaVarKind {
 pub(crate) enum Separator {
     Literal(tt::Literal<Span>),
     Ident(tt::Ident<Span>),
-    Puncts(ArrayVec<tt::Punct<Span>, 3>),
+    Puncts(ArrayVec<tt::Punct<Span>, MAX_GLUED_PUNCT_LEN>),
 }
 
 // Note that when we compare a Separator, we just care about its textual value.
@@ -179,7 +182,7 @@ enum Mode {
 }
 
 fn next_op(
-    edition: impl Copy + Fn(SyntaxContextId) -> Edition,
+    edition: impl Copy + Fn(SyntaxContext) -> Edition,
     first_peeked: TtElement<'_, Span>,
     src: &mut TtIter<'_, Span>,
     mode: Mode,
@@ -194,7 +197,7 @@ fn next_op(
                         let mut res = ArrayVec::new();
                         res.push(*p);
                         Box::new(res)
-                    }))
+                    }));
                 }
                 Some(it) => it,
             };
@@ -212,20 +215,20 @@ fn next_op(
                         Mode::Pattern => {
                             return Err(ParseError::unexpected(
                                 "`${}` metavariable expressions are not allowed in matchers",
-                            ))
+                            ));
                         }
                     },
                     _ => {
                         return Err(ParseError::expected(
                             "expected `$()` repetition or `${}` expression",
-                        ))
+                        ));
                     }
                 },
                 TtElement::Leaf(leaf) => match leaf {
                     tt::Leaf::Ident(ident) if ident.sym == sym::crate_ => {
                         // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
                         Op::Ident(tt::Ident {
-                            sym: sym::dollar_crate.clone(),
+                            sym: sym::dollar_crate,
                             span: ident.span,
                             is_raw: tt::IdentIsRaw::No,
                         })
@@ -246,7 +249,7 @@ fn next_op(
                         Mode::Pattern => {
                             return Err(ParseError::unexpected(
                                 "`$$` is not allowed on the pattern side",
-                            ))
+                            ));
                         }
                         Mode::Template => Op::Punct({
                             let mut res = ArrayVec::new();
@@ -255,7 +258,7 @@ fn next_op(
                         }),
                     },
                     tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => {
-                        return Err(ParseError::expected("expected ident"))
+                        return Err(ParseError::expected("expected ident"));
                     }
                 },
             }
@@ -287,7 +290,7 @@ fn next_op(
 }
 
 fn eat_fragment_kind(
-    edition: impl Copy + Fn(SyntaxContextId) -> Edition,
+    edition: impl Copy + Fn(SyntaxContext) -> Edition,
     src: &mut TtIter<'_, Span>,
     mode: Mode,
 ) -> Result<Option<MetaVarKind>, ParseError> {
@@ -348,7 +351,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
         };
         match tt {
             tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => {
-                return Err(ParseError::InvalidRepeat)
+                return Err(ParseError::InvalidRepeat);
             }
             tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()),
             tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()),
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tests.rs b/src/tools/rust-analyzer/crates/mbe/src/tests.rs
index 4a73b6fa05a88..a5672e4e0504b 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tests.rs
@@ -3,7 +3,7 @@
 // FIXME: Move more of the nameres independent tests from
 // crates\hir-def\src\macro_expansion_tests\mod.rs to this
 use expect_test::expect;
-use span::{Edition, EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId};
+use span::{Edition, EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContext};
 use stdx::format_to;
 use tt::{TextRange, TextSize};
 
@@ -26,7 +26,7 @@ fn check_(
             file_id: EditionedFileId::new(FileId::from_raw(0), def_edition),
             ast_id: ErasedFileAstId::from_raw(0),
         },
-        SyntaxContextId::root(Edition::CURRENT),
+        SyntaxContext::root(Edition::CURRENT),
         decl,
     )
     .unwrap();
@@ -42,7 +42,7 @@ fn check_(
     let arg_tt = syntax_bridge::parse_to_token_tree(
         call_edition,
         call_anchor,
-        SyntaxContextId::root(Edition::CURRENT),
+        SyntaxContext::root(Edition::CURRENT),
         arg,
     )
     .unwrap();
@@ -52,7 +52,7 @@ fn check_(
         Span {
             range: TextRange::up_to(TextSize::of(arg)),
             anchor: call_anchor,
-            ctx: SyntaxContextId::root(Edition::CURRENT),
+            ctx: SyntaxContext::root(Edition::CURRENT),
         },
         def_edition,
     );
@@ -109,8 +109,8 @@ fn unbalanced_brace() {
 "#,
         r#""#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..0#2 1:0@0..0#2
-              SUBTREE {} 0:0@9..10#2 0:0@11..12#2
+            SUBTREE $$ 1:0@0..0#ROOT2024 1:0@0..0#ROOT2024
+              SUBTREE {} 0:0@9..10#ROOT2024 0:0@11..12#ROOT2024
 
             {}"#]],
     );
@@ -132,25 +132,25 @@ fn token_mapping_smoke_test() {
 struct MyTraitMap2
 "#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..20#2 1:0@0..20#2
-              IDENT   struct 0:0@34..40#2
-              IDENT   MyTraitMap2 1:0@8..19#2
-              SUBTREE {} 0:0@48..49#2 0:0@100..101#2
-                IDENT   map 0:0@58..61#2
-                PUNCH   : [alone] 0:0@61..62#2
-                PUNCH   : [joint] 0:0@63..64#2
-                PUNCH   : [alone] 0:0@64..65#2
-                IDENT   std 0:0@65..68#2
-                PUNCH   : [joint] 0:0@68..69#2
-                PUNCH   : [alone] 0:0@69..70#2
-                IDENT   collections 0:0@70..81#2
-                PUNCH   : [joint] 0:0@81..82#2
-                PUNCH   : [alone] 0:0@82..83#2
-                IDENT   HashSet 0:0@83..90#2
-                PUNCH   < [alone] 0:0@90..91#2
-                SUBTREE () 0:0@91..92#2 0:0@92..93#2
-                PUNCH   > [joint] 0:0@93..94#2
-                PUNCH   , [alone] 0:0@94..95#2
+            SUBTREE $$ 1:0@0..20#ROOT2024 1:0@0..20#ROOT2024
+              IDENT   struct 0:0@34..40#ROOT2024
+              IDENT   MyTraitMap2 1:0@8..19#ROOT2024
+              SUBTREE {} 0:0@48..49#ROOT2024 0:0@100..101#ROOT2024
+                IDENT   map 0:0@58..61#ROOT2024
+                PUNCH   : [alone] 0:0@61..62#ROOT2024
+                PUNCH   : [joint] 0:0@63..64#ROOT2024
+                PUNCH   : [alone] 0:0@64..65#ROOT2024
+                IDENT   std 0:0@65..68#ROOT2024
+                PUNCH   : [joint] 0:0@68..69#ROOT2024
+                PUNCH   : [alone] 0:0@69..70#ROOT2024
+                IDENT   collections 0:0@70..81#ROOT2024
+                PUNCH   : [joint] 0:0@81..82#ROOT2024
+                PUNCH   : [alone] 0:0@82..83#ROOT2024
+                IDENT   HashSet 0:0@83..90#ROOT2024
+                PUNCH   < [alone] 0:0@90..91#ROOT2024
+                SUBTREE () 0:0@91..92#ROOT2024 0:0@92..93#ROOT2024
+                PUNCH   > [joint] 0:0@93..94#ROOT2024
+                PUNCH   , [alone] 0:0@94..95#ROOT2024
 
             struct MyTraitMap2 {
                 map: ::std::collections::HashSet<()>,
@@ -179,28 +179,28 @@ fn main() {
 }
 "#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..63#2 1:0@0..63#2
-              IDENT   fn 1:0@1..3#2
-              IDENT   main 1:0@4..8#2
-              SUBTREE () 1:0@8..9#2 1:0@9..10#2
-              SUBTREE {} 1:0@11..12#2 1:0@61..62#2
-                LITERAL Integer 1 1:0@17..18#2
-                PUNCH   ; [alone] 1:0@18..19#2
-                LITERAL Float 1.0 1:0@24..27#2
-                PUNCH   ; [alone] 1:0@27..28#2
-                SUBTREE () 1:0@33..34#2 1:0@39..40#2
-                  SUBTREE () 1:0@34..35#2 1:0@37..38#2
-                    LITERAL Integer 1 1:0@35..36#2
-                    PUNCH   , [alone] 1:0@36..37#2
-                  PUNCH   , [alone] 1:0@38..39#2
-                PUNCH   . [alone] 1:0@40..41#2
-                LITERAL Float 0.0 1:0@41..44#2
-                PUNCH   ; [alone] 1:0@44..45#2
-                IDENT   let 1:0@50..53#2
-                IDENT   x 1:0@54..55#2
-                PUNCH   = [alone] 1:0@56..57#2
-                LITERAL Integer 1 1:0@58..59#2
-                PUNCH   ; [alone] 1:0@59..60#2
+            SUBTREE $$ 1:0@0..63#ROOT2024 1:0@0..63#ROOT2024
+              IDENT   fn 1:0@1..3#ROOT2024
+              IDENT   main 1:0@4..8#ROOT2024
+              SUBTREE () 1:0@8..9#ROOT2024 1:0@9..10#ROOT2024
+              SUBTREE {} 1:0@11..12#ROOT2024 1:0@61..62#ROOT2024
+                LITERAL Integer 1 1:0@17..18#ROOT2024
+                PUNCH   ; [alone] 1:0@18..19#ROOT2024
+                LITERAL Float 1.0 1:0@24..27#ROOT2024
+                PUNCH   ; [alone] 1:0@27..28#ROOT2024
+                SUBTREE () 1:0@33..34#ROOT2024 1:0@39..40#ROOT2024
+                  SUBTREE () 1:0@34..35#ROOT2024 1:0@37..38#ROOT2024
+                    LITERAL Integer 1 1:0@35..36#ROOT2024
+                    PUNCH   , [alone] 1:0@36..37#ROOT2024
+                  PUNCH   , [alone] 1:0@38..39#ROOT2024
+                PUNCH   . [alone] 1:0@40..41#ROOT2024
+                LITERAL Float 0.0 1:0@41..44#ROOT2024
+                PUNCH   ; [alone] 1:0@44..45#ROOT2024
+                IDENT   let 1:0@50..53#ROOT2024
+                IDENT   x 1:0@54..55#ROOT2024
+                PUNCH   = [alone] 1:0@56..57#ROOT2024
+                LITERAL Integer 1 1:0@58..59#ROOT2024
+                PUNCH   ; [alone] 1:0@59..60#ROOT2024
 
             fn main(){
                 1;
@@ -226,14 +226,14 @@ fn expr_2021() {
     const { 1 },
 "#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..25#2 1:0@0..25#2
-              IDENT   _ 1:0@5..6#2
-              PUNCH   ; [joint] 0:0@36..37#2
-              SUBTREE () 0:0@34..35#2 0:0@34..35#2
-                IDENT   const 1:0@12..17#2
-                SUBTREE {} 1:0@18..19#2 1:0@22..23#2
-                  LITERAL Integer 1 1:0@20..21#2
-              PUNCH   ; [alone] 0:0@39..40#2
+            SUBTREE $$ 1:0@0..25#ROOT2024 1:0@0..25#ROOT2024
+              IDENT   _ 1:0@5..6#ROOT2024
+              PUNCH   ; [joint] 0:0@36..37#ROOT2024
+              SUBTREE () 0:0@34..35#ROOT2024 0:0@34..35#ROOT2024
+                IDENT   const 1:0@12..17#ROOT2024
+                SUBTREE {} 1:0@18..19#ROOT2024 1:0@22..23#ROOT2024
+                  LITERAL Integer 1 1:0@20..21#ROOT2024
+              PUNCH   ; [alone] 0:0@39..40#ROOT2024
 
             _;
             (const  {
@@ -254,13 +254,13 @@ fn expr_2021() {
         expect![[r#"
             ExpandError {
                 inner: (
-                    1:0@5..6#2,
+                    1:0@5..6#ROOT2024,
                     NoMatchingRule,
                 ),
             }
 
-            SUBTREE $$ 1:0@0..8#2 1:0@0..8#2
-              PUNCH   ; [alone] 0:0@39..40#2
+            SUBTREE $$ 1:0@0..8#ROOT2024 1:0@0..8#ROOT2024
+              PUNCH   ; [alone] 0:0@39..40#ROOT2024
 
             ;"#]],
     );
@@ -278,13 +278,13 @@ fn expr_2021() {
         expect![[r#"
             ExpandError {
                 inner: (
-                    1:0@5..10#2,
+                    1:0@5..10#ROOT2024,
                     NoMatchingRule,
                 ),
             }
 
-            SUBTREE $$ 1:0@0..18#2 1:0@0..18#2
-              PUNCH   ; [alone] 0:0@39..40#2
+            SUBTREE $$ 1:0@0..18#ROOT2024 1:0@0..18#ROOT2024
+              PUNCH   ; [alone] 0:0@39..40#ROOT2024
 
             ;"#]],
     );
@@ -304,26 +304,26 @@ fn expr_2021() {
     break 'foo bar,
 "#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..76#2 1:0@0..76#2
-              LITERAL Integer 4 1:0@5..6#2
-              PUNCH   ; [joint] 0:0@41..42#2
-              LITERAL Str literal 1:0@12..21#2
-              PUNCH   ; [joint] 0:0@41..42#2
-              SUBTREE () 0:0@39..40#2 0:0@39..40#2
-                IDENT   funcall 1:0@27..34#2
-                SUBTREE () 1:0@34..35#2 1:0@35..36#2
-              PUNCH   ; [joint] 0:0@41..42#2
-              SUBTREE () 0:0@39..40#2 0:0@39..40#2
-                IDENT   future 1:0@42..48#2
-                PUNCH   . [alone] 1:0@48..49#2
-                IDENT   await 1:0@49..54#2
-              PUNCH   ; [joint] 0:0@41..42#2
-              SUBTREE () 0:0@39..40#2 0:0@39..40#2
-                IDENT   break 1:0@60..65#2
-                PUNCH   ' [joint] 1:0@66..67#2
-                IDENT   foo 1:0@67..70#2
-                IDENT   bar 1:0@71..74#2
-              PUNCH   ; [alone] 0:0@44..45#2
+            SUBTREE $$ 1:0@0..76#ROOT2024 1:0@0..76#ROOT2024
+              LITERAL Integer 4 1:0@5..6#ROOT2024
+              PUNCH   ; [joint] 0:0@41..42#ROOT2024
+              LITERAL Str literal 1:0@12..21#ROOT2024
+              PUNCH   ; [joint] 0:0@41..42#ROOT2024
+              SUBTREE () 0:0@39..40#ROOT2024 0:0@39..40#ROOT2024
+                IDENT   funcall 1:0@27..34#ROOT2024
+                SUBTREE () 1:0@34..35#ROOT2024 1:0@35..36#ROOT2024
+              PUNCH   ; [joint] 0:0@41..42#ROOT2024
+              SUBTREE () 0:0@39..40#ROOT2024 0:0@39..40#ROOT2024
+                IDENT   future 1:0@42..48#ROOT2024
+                PUNCH   . [alone] 1:0@48..49#ROOT2024
+                IDENT   await 1:0@49..54#ROOT2024
+              PUNCH   ; [joint] 0:0@41..42#ROOT2024
+              SUBTREE () 0:0@39..40#ROOT2024 0:0@39..40#ROOT2024
+                IDENT   break 1:0@60..65#ROOT2024
+                PUNCH   ' [joint] 1:0@66..67#ROOT2024
+                IDENT   foo 1:0@67..70#ROOT2024
+                IDENT   bar 1:0@71..74#ROOT2024
+              PUNCH   ; [alone] 0:0@44..45#ROOT2024
 
             4;
             "literal";
@@ -345,13 +345,13 @@ fn expr_2021() {
         expect![[r#"
             ExpandError {
                 inner: (
-                    1:0@5..6#2,
+                    1:0@5..6#ROOT2024,
                     NoMatchingRule,
                 ),
             }
 
-            SUBTREE $$ 1:0@0..8#2 1:0@0..8#2
-              PUNCH   ; [alone] 0:0@44..45#2
+            SUBTREE $$ 1:0@0..8#ROOT2024 1:0@0..8#ROOT2024
+              PUNCH   ; [alone] 0:0@44..45#ROOT2024
 
             ;"#]],
     );
diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml
index 114a66add63bd..c80510eedfb8a 100644
--- a/src/tools/rust-analyzer/crates/parser/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml
@@ -20,7 +20,7 @@ tracing = { workspace = true, optional = true }
 edition.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 
 stdx.workspace = true
 
diff --git a/src/tools/rust-analyzer/crates/parser/src/event.rs b/src/tools/rust-analyzer/crates/parser/src/event.rs
index b197b086f377a..5be9cb2a24699 100644
--- a/src/tools/rust-analyzer/crates/parser/src/event.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/event.rs
@@ -5,8 +5,8 @@
 use std::mem;
 
 use crate::{
-    output::Output,
     SyntaxKind::{self, *},
+    output::Output,
 };
 
 /// `Parser` produces a flat list of `Event`s.
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
index fe6b904bd889a..8ddf50db043a6 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
@@ -39,9 +39,9 @@ mod patterns;
 mod types;
 
 use crate::{
-    parser::{CompletedMarker, Marker, Parser},
     SyntaxKind::{self, *},
-    TokenSet, T,
+    T, TokenSet,
+    parser::{CompletedMarker, Marker, Parser},
 };
 
 pub(crate) mod entry {
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
index fe1316c9bfde3..34dcf2a182296 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
@@ -4,8 +4,8 @@ use crate::grammar::attributes::ATTRIBUTE_FIRST;
 
 use super::*;
 
+pub(super) use atom::{LITERAL_FIRST, literal};
 pub(crate) use atom::{block_expr, match_arm_list};
-pub(super) use atom::{literal, LITERAL_FIRST};
 
 #[derive(PartialEq, Eq)]
 pub(super) enum Semicolon {
@@ -58,7 +58,7 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
     // }
     attributes::outer_attrs(p);
 
-    if p.at(T![let]) {
+    if p.at(T![let]) || (p.at(T![super]) && p.nth_at(1, T![let])) {
         let_stmt(p, semicolon);
         m.complete(p, LET_STMT);
         return;
@@ -113,8 +113,9 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
 }
 
 // test let_stmt
-// fn f() { let x: i32 = 92; }
+// fn f() { let x: i32 = 92; super let y; super::foo; }
 pub(super) fn let_stmt(p: &mut Parser<'_>, with_semi: Semicolon) {
+    p.eat(T![super]);
     p.bump(T![let]);
     patterns::pattern(p);
     if p.at(T![:]) {
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
index 407320e1d0825..c66afed91c51a 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
@@ -258,6 +258,15 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
         p.expect(T!['(']);
         type_(p);
         p.expect(T![,]);
+        // Due to our incomplete handling of macro groups, especially
+        // those with empty delimiters, we wrap `expr` fragments in
+        // parentheses sometimes. Since `offset_of` is a macro, and takes
+        // `expr`, the field names could be wrapped in parentheses.
+        let wrapped_in_parens = p.eat(T!['(']);
+        // test offset_of_parens
+        // fn foo() {
+        //     builtin#offset_of(Foo, (bar.baz.0));
+        // }
         while !p.at(EOF) && !p.at(T![')']) {
             name_ref_mod_path_or_index(p);
             if !p.at(T![')']) {
@@ -265,6 +274,9 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
             }
         }
         p.expect(T![')']);
+        if wrapped_in_parens {
+            p.expect(T![')']);
+        }
         Some(m.complete(p, OFFSET_OF_EXPR))
     } else if p.at_contextual_kw(T![format_args]) {
         p.bump_remap(T![format_args]);
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
index 0ac11371c5436..f5f003be48918 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
@@ -132,7 +132,7 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker, is_in_extern: bool) -> Res
         has_mods = true;
     }
 
-    // test_err gen_fn
+    // test_err gen_fn 2021
     // gen fn gen_fn() {}
     // async gen fn async_gen_fn() {}
     if p.at(T![gen]) && p.nth(1) == T![fn] {
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs
index 9a16c9db6daf1..a37569614028a 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs
@@ -107,7 +107,7 @@ pub(crate) fn variant_list(p: &mut Parser<'_>) {
 }
 
 // test record_field_list
-// struct S { a: i32, b: f32 }
+// struct S { a: i32, b: f32, unsafe c: u8 }
 pub(crate) fn record_field_list(p: &mut Parser<'_>) {
     assert!(p.at(T!['{']));
     let m = p.start();
@@ -131,6 +131,7 @@ pub(crate) fn record_field_list(p: &mut Parser<'_>) {
         // struct S { #[attr] f: f32 }
         attributes::outer_attrs(p);
         opt_visibility(p, false);
+        p.eat(T![unsafe]);
         if p.at(IDENT) {
             name(p);
             p.expect(T![:]);
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs
index 9549ec9b4005e..8e255985a205d 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs
@@ -24,6 +24,18 @@ fn const_or_static(p: &mut Parser<'_>, m: Marker, is_const: bool) {
         name(p);
     }
 
+    // FIXME: Recover on statics with generic params/where clause.
+    if is_const {
+        // test generic_const
+        // const C<i32>: u32 = 0;
+        // impl Foo {
+        //     const C<'a>: &'a () = &();
+        // }
+        generic_params::opt_generic_param_list(p);
+    }
+    // test_err generic_static
+    // static C<i32>: u32 = 0;
+
     if p.at(T![:]) {
         types::ascription(p);
     } else {
@@ -32,6 +44,20 @@ fn const_or_static(p: &mut Parser<'_>, m: Marker, is_const: bool) {
     if p.eat(T![=]) {
         expressions::expr(p);
     }
+
+    if is_const {
+        // test const_where_clause
+        // const C<i32>: u32 = 0
+        // where i32: Copy;
+        // trait Foo {
+        //     const C: i32 where i32: Copy;
+        // }
+        generic_params::opt_where_clause(p);
+    }
+    // test_err static_where_clause
+    // static C: u32 = 0
+    // where i32: Copy;
+
     p.expect(T![;]);
     m.complete(p, if is_const { CONST } else { STATIC });
 }
diff --git a/src/tools/rust-analyzer/crates/parser/src/input.rs b/src/tools/rust-analyzer/crates/parser/src/input.rs
index cabdff214df35..4490956f97046 100644
--- a/src/tools/rust-analyzer/crates/parser/src/input.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/input.rs
@@ -12,7 +12,6 @@ type bits = u64;
 /// `Tokens` doesn't include whitespace and comments. Main input to the parser.
 ///
 /// Struct of arrays internally, but this shouldn't really matter.
-#[derive(Default)]
 pub struct Input {
     kind: Vec<SyntaxKind>,
     joint: Vec<bits>,
@@ -21,6 +20,14 @@ pub struct Input {
 
 /// `pub` impl used by callers to create `Tokens`.
 impl Input {
+    #[inline]
+    pub fn with_capacity(capacity: usize) -> Self {
+        Self {
+            kind: Vec::with_capacity(capacity),
+            joint: Vec::with_capacity(capacity / size_of::<bits>()),
+            contextual_kind: Vec::with_capacity(capacity),
+        }
+    }
     #[inline]
     pub fn push(&mut self, kind: SyntaxKind) {
         self.push_impl(kind, SyntaxKind::EOF)
diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
index b0bbc2fa5ff1f..585e7ffb1aeff 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
@@ -10,7 +10,9 @@
 
 use std::ops;
 
-use rustc_literal_escaper::{EscapeError, Mode, unescape_byte, unescape_char, unescape_mixed, unescape_unicode};
+use rustc_literal_escaper::{
+    EscapeError, Mode, unescape_byte, unescape_char, unescape_mixed, unescape_unicode,
+};
 
 use crate::{
     Edition,
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
index 398ad7cf66ce6..7963f00bb25ce 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -35,6 +35,8 @@ mod shortcuts;
 mod syntax_kind;
 mod token_set;
 
+pub use T_ as T;
+
 #[cfg(test)]
 mod tests;
 
diff --git a/src/tools/rust-analyzer/crates/parser/src/parser.rs b/src/tools/rust-analyzer/crates/parser/src/parser.rs
index b058686276444..36a363afe93a7 100644
--- a/src/tools/rust-analyzer/crates/parser/src/parser.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/parser.rs
@@ -5,11 +5,11 @@ use std::cell::Cell;
 use drop_bomb::DropBomb;
 
 use crate::{
-    event::Event,
-    input::Input,
     Edition,
     SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
-    TokenSet, T,
+    T, TokenSet,
+    event::Event,
+    input::Input,
 };
 
 /// `Parser` struct provides the low-level API for
diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
index 32569d5c3fe92..e2baec890c3a6 100644
--- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
@@ -27,7 +27,7 @@ pub enum StrStep<'a> {
 impl LexedStr<'_> {
     pub fn to_input(&self, edition: Edition) -> crate::Input {
         let _p = tracing::info_span!("LexedStr::to_input").entered();
-        let mut res = crate::Input::default();
+        let mut res = crate::Input::with_capacity(self.len());
         let mut was_joint = false;
         for i in 0..self.len() {
             let kind = self.kind(i);
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs
index 6a8cca9ccc79d..7311947525ed9 100644
--- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs
@@ -1,6 +1,7 @@
 //! Defines [`SyntaxKind`] -- a fieldless enum of all possible syntactic
 //! constructs of the Rust language.
 
+#[rustfmt::skip]
 mod generated;
 
 use crate::Edition;
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
index 1ff0bbea8b1db..e6f93a1fbda57 100644
--- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
@@ -3,7 +3,7 @@
 #![allow(bad_style, missing_docs, unreachable_pub)]
 use crate::Edition;
 #[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."]
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+#[derive(Debug)]
 #[repr(u16)]
 pub enum SyntaxKind {
     #[doc(hidden)]
@@ -156,7 +156,6 @@ pub enum SyntaxKind {
     SHEBANG,
     WHITESPACE,
     ABI,
-    ADT,
     ARG_LIST,
     ARRAY_EXPR,
     ARRAY_TYPE,
@@ -165,16 +164,13 @@ pub enum SyntaxKind {
     ASM_DIR_SPEC,
     ASM_EXPR,
     ASM_LABEL,
-    ASM_OPERAND,
     ASM_OPERAND_EXPR,
     ASM_OPERAND_NAMED,
     ASM_OPTION,
     ASM_OPTIONS,
-    ASM_PIECE,
     ASM_REG_OPERAND,
     ASM_REG_SPEC,
     ASM_SYM,
-    ASSOC_ITEM,
     ASSOC_ITEM_LIST,
     ASSOC_TYPE_ARG,
     ATTR,
@@ -195,23 +191,18 @@ pub enum SyntaxKind {
     CONTINUE_EXPR,
     DYN_TRAIT_TYPE,
     ENUM,
-    EXPR,
     EXPR_STMT,
     EXTERN_BLOCK,
     EXTERN_CRATE,
-    EXTERN_ITEM,
     EXTERN_ITEM_LIST,
     FIELD_EXPR,
-    FIELD_LIST,
     FN,
     FN_PTR_TYPE,
     FORMAT_ARGS_ARG,
     FORMAT_ARGS_EXPR,
     FOR_EXPR,
     FOR_TYPE,
-    GENERIC_ARG,
     GENERIC_ARG_LIST,
-    GENERIC_PARAM,
     GENERIC_PARAM_LIST,
     IDENT_PAT,
     IF_EXPR,
@@ -219,7 +210,6 @@ pub enum SyntaxKind {
     IMPL_TRAIT_TYPE,
     INDEX_EXPR,
     INFER_TYPE,
-    ITEM,
     ITEM_LIST,
     LABEL,
     LET_ELSE,
@@ -257,7 +247,6 @@ pub enum SyntaxKind {
     PAREN_EXPR,
     PAREN_PAT,
     PAREN_TYPE,
-    PAT,
     PATH,
     PATH_EXPR,
     PATH_PAT,
@@ -288,7 +277,6 @@ pub enum SyntaxKind {
     SLICE_TYPE,
     SOURCE_FILE,
     STATIC,
-    STMT,
     STMT_LIST,
     STRUCT,
     TOKEN_TREE,
@@ -301,7 +289,6 @@ pub enum SyntaxKind {
     TUPLE_PAT,
     TUPLE_STRUCT_PAT,
     TUPLE_TYPE,
-    TYPE,
     TYPE_ALIAS,
     TYPE_ARG,
     TYPE_BOUND,
@@ -310,12 +297,10 @@ pub enum SyntaxKind {
     UNDERSCORE_EXPR,
     UNION,
     USE,
-    USE_BOUND_GENERIC_ARG,
     USE_BOUND_GENERIC_ARGS,
     USE_TREE,
     USE_TREE_LIST,
     VARIANT,
-    VARIANT_DEF,
     VARIANT_LIST,
     VISIBILITY,
     WHERE_CLAUSE,
@@ -343,7 +328,6 @@ impl SyntaxKind {
             | INT_NUMBER
             | STRING
             | ABI
-            | ADT
             | ARG_LIST
             | ARRAY_EXPR
             | ARRAY_TYPE
@@ -352,16 +336,13 @@ impl SyntaxKind {
             | ASM_DIR_SPEC
             | ASM_EXPR
             | ASM_LABEL
-            | ASM_OPERAND
             | ASM_OPERAND_EXPR
             | ASM_OPERAND_NAMED
             | ASM_OPTION
             | ASM_OPTIONS
-            | ASM_PIECE
             | ASM_REG_OPERAND
             | ASM_REG_SPEC
             | ASM_SYM
-            | ASSOC_ITEM
             | ASSOC_ITEM_LIST
             | ASSOC_TYPE_ARG
             | ATTR
@@ -382,23 +363,18 @@ impl SyntaxKind {
             | CONTINUE_EXPR
             | DYN_TRAIT_TYPE
             | ENUM
-            | EXPR
             | EXPR_STMT
             | EXTERN_BLOCK
             | EXTERN_CRATE
-            | EXTERN_ITEM
             | EXTERN_ITEM_LIST
             | FIELD_EXPR
-            | FIELD_LIST
             | FN
             | FN_PTR_TYPE
             | FORMAT_ARGS_ARG
             | FORMAT_ARGS_EXPR
             | FOR_EXPR
             | FOR_TYPE
-            | GENERIC_ARG
             | GENERIC_ARG_LIST
-            | GENERIC_PARAM
             | GENERIC_PARAM_LIST
             | IDENT_PAT
             | IF_EXPR
@@ -406,7 +382,6 @@ impl SyntaxKind {
             | IMPL_TRAIT_TYPE
             | INDEX_EXPR
             | INFER_TYPE
-            | ITEM
             | ITEM_LIST
             | LABEL
             | LET_ELSE
@@ -444,7 +419,6 @@ impl SyntaxKind {
             | PAREN_EXPR
             | PAREN_PAT
             | PAREN_TYPE
-            | PAT
             | PATH
             | PATH_EXPR
             | PATH_PAT
@@ -475,7 +449,6 @@ impl SyntaxKind {
             | SLICE_TYPE
             | SOURCE_FILE
             | STATIC
-            | STMT
             | STMT_LIST
             | STRUCT
             | TOKEN_TREE
@@ -488,7 +461,6 @@ impl SyntaxKind {
             | TUPLE_PAT
             | TUPLE_STRUCT_PAT
             | TUPLE_TYPE
-            | TYPE
             | TYPE_ALIAS
             | TYPE_ARG
             | TYPE_BOUND
@@ -497,12 +469,10 @@ impl SyntaxKind {
             | UNDERSCORE_EXPR
             | UNION
             | USE
-            | USE_BOUND_GENERIC_ARG
             | USE_BOUND_GENERIC_ARGS
             | USE_TREE
             | USE_TREE_LIST
             | VARIANT
-            | VARIANT_DEF
             | VARIANT_LIST
             | VISIBILITY
             | WHERE_CLAUSE
@@ -1024,4 +994,29 @@ impl SyntaxKind {
     }
 }
 #[macro_export]
-macro_rules ! T { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; }
+macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; }
+impl ::core::marker::Copy for SyntaxKind {}
+impl ::core::clone::Clone for SyntaxKind {
+    #[inline]
+    fn clone(&self) -> Self { *self }
+}
+impl ::core::cmp::PartialEq for SyntaxKind {
+    #[inline]
+    fn eq(&self, other: &Self) -> bool { (*self as u16) == (*other as u16) }
+}
+impl ::core::cmp::Eq for SyntaxKind {}
+impl ::core::cmp::PartialOrd for SyntaxKind {
+    #[inline]
+    fn partial_cmp(&self, other: &Self) -> core::option::Option<core::cmp::Ordering> {
+        Some(self.cmp(other))
+    }
+}
+impl ::core::cmp::Ord for SyntaxKind {
+    #[inline]
+    fn cmp(&self, other: &Self) -> core::cmp::Ordering { (*self as u16).cmp(&(*other as u16)) }
+}
+impl ::core::hash::Hash for SyntaxKind {
+    fn hash<H: ::core::hash::Hasher>(&self, state: &mut H) {
+        ::core::mem::discriminant(self).hash(state);
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs
index 1a747731587c7..6c9d02aaa8f8c 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs
@@ -139,6 +139,10 @@ mod ok {
         run_and_expect_no_errors("test_data/parser/inline/ok/const_trait_bound.rs");
     }
     #[test]
+    fn const_where_clause() {
+        run_and_expect_no_errors("test_data/parser/inline/ok/const_where_clause.rs");
+    }
+    #[test]
     fn continue_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/continue_expr.rs"); }
     #[test]
     fn crate_path() { run_and_expect_no_errors("test_data/parser/inline/ok/crate_path.rs"); }
@@ -278,6 +282,8 @@ mod ok {
         run_and_expect_no_errors("test_data/parser/inline/ok/generic_arg_bounds.rs");
     }
     #[test]
+    fn generic_const() { run_and_expect_no_errors("test_data/parser/inline/ok/generic_const.rs"); }
+    #[test]
     fn generic_param_attribute() {
         run_and_expect_no_errors("test_data/parser/inline/ok/generic_param_attribute.rs");
     }
@@ -416,6 +422,10 @@ mod ok {
         run_and_expect_no_errors("test_data/parser/inline/ok/nocontentexpr_after_item.rs");
     }
     #[test]
+    fn offset_of_parens() {
+        run_and_expect_no_errors("test_data/parser/inline/ok/offset_of_parens.rs");
+    }
+    #[test]
     fn or_pattern() { run_and_expect_no_errors("test_data/parser/inline/ok/or_pattern.rs"); }
     #[test]
     fn param_list() { run_and_expect_no_errors("test_data/parser/inline/ok/param_list.rs"); }
@@ -745,7 +755,12 @@ mod err {
         run_and_expect_errors("test_data/parser/inline/err/fn_pointer_type_missing_fn.rs");
     }
     #[test]
-    fn gen_fn() { run_and_expect_errors("test_data/parser/inline/err/gen_fn.rs"); }
+    fn gen_fn() {
+        run_and_expect_errors_with_edition(
+            "test_data/parser/inline/err/gen_fn.rs",
+            crate::Edition::Edition2021,
+        );
+    }
     #[test]
     fn generic_arg_list_recover() {
         run_and_expect_errors("test_data/parser/inline/err/generic_arg_list_recover.rs");
@@ -759,6 +774,8 @@ mod err {
         run_and_expect_errors("test_data/parser/inline/err/generic_param_list_recover.rs");
     }
     #[test]
+    fn generic_static() { run_and_expect_errors("test_data/parser/inline/err/generic_static.rs"); }
+    #[test]
     fn impl_type() { run_and_expect_errors("test_data/parser/inline/err/impl_type.rs"); }
     #[test]
     fn let_else_right_curly_brace() {
@@ -831,6 +848,10 @@ mod err {
         run_and_expect_errors("test_data/parser/inline/err/recover_from_missing_const_default.rs");
     }
     #[test]
+    fn static_where_clause() {
+        run_and_expect_errors("test_data/parser/inline/err/static_where_clause.rs");
+    }
+    #[test]
     fn struct_field_recover() {
         run_and_expect_errors("test_data/parser/inline/err/struct_field_recover.rs");
     }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast
index f8a7d0e552cac..b6fd5a5d99bdf 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast
@@ -1,4 +1,6 @@
 SOURCE_FILE
+  COMMENT "// 2021"
+  WHITESPACE "\n"
   ERROR
     PATH
       PATH_SEGMENT
@@ -42,6 +44,6 @@ SOURCE_FILE
         L_CURLY "{"
         R_CURLY "}"
   WHITESPACE "\n"
-error 3: expected an item
-error 24: expected fn, trait or impl
-error 28: expected an item
+error 11: expected an item
+error 32: expected fn, trait or impl
+error 36: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs
index 80882e0a4044a..778693ca9570b 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs
@@ -1,2 +1,3 @@
+// 2021
 gen fn gen_fn() {}
 async gen fn async_gen_fn() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rast
new file mode 100644
index 0000000000000..485ad11f233ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+  STATIC
+    STATIC_KW "static"
+    WHITESPACE " "
+    NAME
+      IDENT "C"
+  ERROR
+    L_ANGLE "<"
+  ERROR
+    PATH
+      PATH_SEGMENT
+        NAME_REF
+          IDENT "i32"
+  ERROR
+    R_ANGLE ">"
+  ERROR
+    COLON ":"
+  WHITESPACE " "
+  ERROR
+    PATH
+      PATH_SEGMENT
+        NAME_REF
+          IDENT "u32"
+  WHITESPACE " "
+  ERROR
+    EQ "="
+  WHITESPACE " "
+  ERROR
+    INT_NUMBER "0"
+  ERROR
+    SEMICOLON ";"
+  WHITESPACE "\n"
+error 8: missing type for `const` or `static`
+error 8: expected SEMICOLON
+error 8: expected an item
+error 12: expected an item
+error 12: expected an item
+error 13: expected an item
+error 18: expected an item
+error 19: expected an item
+error 21: expected an item
+error 22: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rs
new file mode 100644
index 0000000000000..d76aa7a205bc7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rs
@@ -0,0 +1 @@
+static C<i32>: u32 = 0;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rast
new file mode 100644
index 0000000000000..cde3e47ad5c3b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rast
@@ -0,0 +1,44 @@
+SOURCE_FILE
+  STATIC
+    STATIC_KW "static"
+    WHITESPACE " "
+    NAME
+      IDENT "C"
+    COLON ":"
+    WHITESPACE " "
+    PATH_TYPE
+      PATH
+        PATH_SEGMENT
+          NAME_REF
+            IDENT "u32"
+    WHITESPACE " "
+    EQ "="
+    WHITESPACE " "
+    LITERAL
+      INT_NUMBER "0"
+  WHITESPACE "\n"
+  ERROR
+    WHERE_KW "where"
+  WHITESPACE " "
+  ERROR
+    PATH
+      PATH_SEGMENT
+        NAME_REF
+          IDENT "i32"
+  ERROR
+    COLON ":"
+  WHITESPACE " "
+  ERROR
+    PATH
+      PATH_SEGMENT
+        NAME_REF
+          IDENT "Copy"
+  ERROR
+    SEMICOLON ";"
+  WHITESPACE "\n"
+error 17: expected SEMICOLON
+error 18: expected an item
+error 27: expected an item
+error 27: expected an item
+error 33: expected an item
+error 33: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rs
new file mode 100644
index 0000000000000..c330f35da2430
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rs
@@ -0,0 +1,2 @@
+static C: u32 = 0
+where i32: Copy;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rast
new file mode 100644
index 0000000000000..12148f6afe4bc
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rast
@@ -0,0 +1,89 @@
+SOURCE_FILE
+  CONST
+    CONST_KW "const"
+    WHITESPACE " "
+    NAME
+      IDENT "C"
+    GENERIC_PARAM_LIST
+      L_ANGLE "<"
+      TYPE_PARAM
+        NAME
+          IDENT "i32"
+      R_ANGLE ">"
+    COLON ":"
+    WHITESPACE " "
+    PATH_TYPE
+      PATH
+        PATH_SEGMENT
+          NAME_REF
+            IDENT "u32"
+    WHITESPACE " "
+    EQ "="
+    WHITESPACE " "
+    LITERAL
+      INT_NUMBER "0"
+    WHITESPACE "\n"
+    WHERE_CLAUSE
+      WHERE_KW "where"
+      WHITESPACE " "
+      WHERE_PRED
+        PATH_TYPE
+          PATH
+            PATH_SEGMENT
+              NAME_REF
+                IDENT "i32"
+        COLON ":"
+        WHITESPACE " "
+        TYPE_BOUND_LIST
+          TYPE_BOUND
+            PATH_TYPE
+              PATH
+                PATH_SEGMENT
+                  NAME_REF
+                    IDENT "Copy"
+    SEMICOLON ";"
+  WHITESPACE "\n"
+  TRAIT
+    TRAIT_KW "trait"
+    WHITESPACE " "
+    NAME
+      IDENT "Foo"
+    WHITESPACE " "
+    ASSOC_ITEM_LIST
+      L_CURLY "{"
+      WHITESPACE "\n    "
+      CONST
+        CONST_KW "const"
+        WHITESPACE " "
+        NAME
+          IDENT "C"
+        COLON ":"
+        WHITESPACE " "
+        PATH_TYPE
+          PATH
+            PATH_SEGMENT
+              NAME_REF
+                IDENT "i32"
+        WHITESPACE " "
+        WHERE_CLAUSE
+          WHERE_KW "where"
+          WHITESPACE " "
+          WHERE_PRED
+            PATH_TYPE
+              PATH
+                PATH_SEGMENT
+                  NAME_REF
+                    IDENT "i32"
+            COLON ":"
+            WHITESPACE " "
+            TYPE_BOUND_LIST
+              TYPE_BOUND
+                PATH_TYPE
+                  PATH
+                    PATH_SEGMENT
+                      NAME_REF
+                        IDENT "Copy"
+        SEMICOLON ";"
+      WHITESPACE "\n"
+      R_CURLY "}"
+  WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rs
new file mode 100644
index 0000000000000..5ad4b2fe83234
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rs
@@ -0,0 +1,5 @@
+const C<i32>: u32 = 0
+where i32: Copy;
+trait Foo {
+    const C: i32 where i32: Copy;
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rast
new file mode 100644
index 0000000000000..bf432b99b9de9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rast
@@ -0,0 +1,71 @@
+SOURCE_FILE
+  CONST
+    CONST_KW "const"
+    WHITESPACE " "
+    NAME
+      IDENT "C"
+    GENERIC_PARAM_LIST
+      L_ANGLE "<"
+      TYPE_PARAM
+        NAME
+          IDENT "i32"
+      R_ANGLE ">"
+    COLON ":"
+    WHITESPACE " "
+    PATH_TYPE
+      PATH
+        PATH_SEGMENT
+          NAME_REF
+            IDENT "u32"
+    WHITESPACE " "
+    EQ "="
+    WHITESPACE " "
+    LITERAL
+      INT_NUMBER "0"
+    SEMICOLON ";"
+  WHITESPACE "\n"
+  IMPL
+    IMPL_KW "impl"
+    WHITESPACE " "
+    PATH_TYPE
+      PATH
+        PATH_SEGMENT
+          NAME_REF
+            IDENT "Foo"
+    WHITESPACE " "
+    ASSOC_ITEM_LIST
+      L_CURLY "{"
+      WHITESPACE "\n    "
+      CONST
+        CONST_KW "const"
+        WHITESPACE " "
+        NAME
+          IDENT "C"
+        GENERIC_PARAM_LIST
+          L_ANGLE "<"
+          LIFETIME_PARAM
+            LIFETIME
+              LIFETIME_IDENT "'a"
+          R_ANGLE ">"
+        COLON ":"
+        WHITESPACE " "
+        REF_TYPE
+          AMP "&"
+          LIFETIME
+            LIFETIME_IDENT "'a"
+          WHITESPACE " "
+          TUPLE_TYPE
+            L_PAREN "("
+            R_PAREN ")"
+        WHITESPACE " "
+        EQ "="
+        WHITESPACE " "
+        REF_EXPR
+          AMP "&"
+          TUPLE_EXPR
+            L_PAREN "("
+            R_PAREN ")"
+        SEMICOLON ";"
+      WHITESPACE "\n"
+      R_CURLY "}"
+  WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rs
new file mode 100644
index 0000000000000..ce718a46288d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rs
@@ -0,0 +1,4 @@
+const C<i32>: u32 = 0;
+impl Foo {
+    const C<'a>: &'a () = &();
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast
index de9d0fc19ee1f..d99dad4cedd17 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast
@@ -32,5 +32,28 @@ SOURCE_FILE
             INT_NUMBER "92"
           SEMICOLON ";"
         WHITESPACE " "
+        LET_STMT
+          SUPER_KW "super"
+          WHITESPACE " "
+          LET_KW "let"
+          WHITESPACE " "
+          IDENT_PAT
+            NAME
+              IDENT "y"
+          SEMICOLON ";"
+        WHITESPACE " "
+        EXPR_STMT
+          PATH_EXPR
+            PATH
+              PATH
+                PATH_SEGMENT
+                  NAME_REF
+                    SUPER_KW "super"
+              COLON2 "::"
+              PATH_SEGMENT
+                NAME_REF
+                  IDENT "foo"
+          SEMICOLON ";"
+        WHITESPACE " "
         R_CURLY "}"
   WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs
index 8003999fd08f7..d4cc1be4aec77 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs
@@ -1 +1 @@
-fn f() { let x: i32 = 92; }
+fn f() { let x: i32 = 92; super let y; super::foo; }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rast
new file mode 100644
index 0000000000000..4e23455cfcc32
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+  FN
+    FN_KW "fn"
+    WHITESPACE " "
+    NAME
+      IDENT "foo"
+    PARAM_LIST
+      L_PAREN "("
+      R_PAREN ")"
+    WHITESPACE " "
+    BLOCK_EXPR
+      STMT_LIST
+        L_CURLY "{"
+        WHITESPACE "\n    "
+        EXPR_STMT
+          OFFSET_OF_EXPR
+            BUILTIN_KW "builtin"
+            POUND "#"
+            OFFSET_OF_KW "offset_of"
+            L_PAREN "("
+            PATH_TYPE
+              PATH
+                PATH_SEGMENT
+                  NAME_REF
+                    IDENT "Foo"
+            COMMA ","
+            WHITESPACE " "
+            L_PAREN "("
+            NAME_REF
+              IDENT "bar"
+            DOT "."
+            NAME_REF
+              IDENT "baz"
+            DOT "."
+            NAME_REF
+              INT_NUMBER "0"
+            R_PAREN ")"
+            R_PAREN ")"
+          SEMICOLON ";"
+        WHITESPACE "\n"
+        R_CURLY "}"
+  WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rs
new file mode 100644
index 0000000000000..a797d5c8206ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rs
@@ -0,0 +1,3 @@
+fn foo() {
+    builtin#offset_of(Foo, (bar.baz.0));
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast
index 065d7e7e81f24..07686f509c1bc 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast
@@ -30,6 +30,20 @@ SOURCE_FILE
             PATH_SEGMENT
               NAME_REF
                 IDENT "f32"
+      COMMA ","
+      WHITESPACE " "
+      RECORD_FIELD
+        UNSAFE_KW "unsafe"
+        WHITESPACE " "
+        NAME
+          IDENT "c"
+        COLON ":"
+        WHITESPACE " "
+        PATH_TYPE
+          PATH
+            PATH_SEGMENT
+              NAME_REF
+                IDENT "u8"
       WHITESPACE " "
       R_CURLY "}"
   WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs
index a3bd7787db77c..1f4612f53913b 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs
@@ -1 +1 @@
-struct S { a: i32, b: f32 }
+struct S { a: i32, b: f32, unsafe c: u8 }
diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs
index 3d722b1ff1155..2c6a82bf0c3fc 100644
--- a/src/tools/rust-analyzer/crates/paths/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs
@@ -248,7 +248,9 @@ impl AbsPath {
     }
 
     pub fn canonicalize(&self) -> ! {
-        panic!("We explicitly do not provide canonicalization API, as that is almost always a wrong solution, see #14430")
+        panic!(
+            "We explicitly do not provide canonicalization API, as that is almost always a wrong solution, see #14430"
+        )
     }
 
     /// Equivalent of [`Utf8Path::strip_prefix`] for `AbsPath`.
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/json.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/json.rs
index ec89f6a9e65d2..c8f774031b584 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/json.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/json.rs
@@ -1,6 +1,7 @@
 //! Protocol functions for json.
 use std::io::{self, BufRead, Write};
 
+/// Reads a JSON message from the input stream.
 pub fn read_json<'a>(
     inp: &mut impl BufRead,
     buf: &'a mut String,
@@ -26,10 +27,10 @@ pub fn read_json<'a>(
     }
 }
 
+/// Writes a JSON message to the output stream.
 pub fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
     tracing::debug!("> {}", msg);
     out.write_all(msg.as_bytes())?;
     out.write_all(b"\n")?;
-    out.flush()?;
-    Ok(())
+    out.flush()
 }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs
index 4b831e4acebb9..55185aa492ded 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs
@@ -10,7 +10,7 @@ use serde_derive::{Deserialize, Serialize};
 use crate::ProcMacroKind;
 
 pub use self::flat::{
-    deserialize_span_data_index_map, serialize_span_data_index_map, FlatTree, SpanDataIndexMap,
+    FlatTree, SpanDataIndexMap, deserialize_span_data_index_map, serialize_span_data_index_map,
 };
 pub use span::TokenId;
 
@@ -20,69 +20,103 @@ pub const VERSION_CHECK_VERSION: u32 = 1;
 pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2;
 pub const HAS_GLOBAL_SPANS: u32 = 3;
 pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4;
-/// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field
+/// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field.
 pub const EXTENDED_LEAF_DATA: u32 = 5;
 
+/// Current API version of the proc-macro protocol.
 pub const CURRENT_API_VERSION: u32 = EXTENDED_LEAF_DATA;
 
+/// Represents requests sent from the client to the proc-macro-srv.
 #[derive(Debug, Serialize, Deserialize)]
 pub enum Request {
+    /// Retrieves a list of macros from a given dynamic library.
     /// Since [`NO_VERSION_CHECK_VERSION`]
     ListMacros { dylib_path: Utf8PathBuf },
+
+    /// Expands a procedural macro.
     /// Since [`NO_VERSION_CHECK_VERSION`]
     ExpandMacro(Box<ExpandMacro>),
+
+    /// Performs an API version check between the client and the server.
     /// Since [`VERSION_CHECK_VERSION`]
     ApiVersionCheck {},
+
+    /// Sets server-specific configurations.
     /// Since [`RUST_ANALYZER_SPAN_SUPPORT`]
     SetConfig(ServerConfig),
 }
 
+/// Defines the mode used for handling span data.
 #[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)]
 pub enum SpanMode {
+    /// Default mode, where spans are identified by an ID.
     #[default]
     Id,
+
+    /// Rust Analyzer-specific span handling mode.
     RustAnalyzer,
 }
 
+/// Represents responses sent from the proc-macro-srv to the client.
 #[derive(Debug, Serialize, Deserialize)]
 pub enum Response {
+    /// Returns a list of available macros in a dynamic library.
     /// Since [`NO_VERSION_CHECK_VERSION`]
     ListMacros(Result<Vec<(String, ProcMacroKind)>, String>),
+
+    /// Returns result of a macro expansion.
     /// Since [`NO_VERSION_CHECK_VERSION`]
     ExpandMacro(Result<FlatTree, PanicMessage>),
+
+    /// Returns the API version supported by the server.
     /// Since [`NO_VERSION_CHECK_VERSION`]
     ApiVersionCheck(u32),
+
+    /// Confirms the application of a configuration update.
     /// Since [`RUST_ANALYZER_SPAN_SUPPORT`]
     SetConfig(ServerConfig),
+
+    /// Returns the result of a macro expansion, including extended span data.
     /// Since [`RUST_ANALYZER_SPAN_SUPPORT`]
     ExpandMacroExtended(Result<ExpandMacroExtended, PanicMessage>),
 }
 
+/// Configuration settings for the proc-macro-srv.
 #[derive(Debug, Serialize, Deserialize, Default)]
 #[serde(default)]
 pub struct ServerConfig {
+    /// Defines how span data should be handled.
     pub span_mode: SpanMode,
 }
 
+/// Represents an extended macro expansion response, including span data mappings.
 #[derive(Debug, Serialize, Deserialize)]
 pub struct ExpandMacroExtended {
+    /// The expanded syntax tree.
     pub tree: FlatTree,
+    /// Additional span data mappings.
     pub span_data_table: Vec<u32>,
 }
 
+/// Represents an error message when a macro expansion results in a panic.
 #[derive(Debug, Serialize, Deserialize)]
 pub struct PanicMessage(pub String);
 
+/// Represents a macro expansion request sent from the client.
 #[derive(Debug, Serialize, Deserialize)]
 pub struct ExpandMacro {
+    /// The path to the dynamic library containing the macro.
     pub lib: Utf8PathBuf,
     /// Environment variables to set during macro expansion.
     pub env: Vec<(String, String)>,
+    /// The current working directory for the macro expansion.
     pub current_dir: Option<String>,
+    /// Macro expansion data, including the macro body, name and attributes.
     #[serde(flatten)]
     pub data: ExpandMacroData,
 }
 
+/// Represents the input data required for expanding a macro.
 #[derive(Debug, Serialize, Deserialize)]
 pub struct ExpandMacroData {
     /// Argument of macro call.
@@ -103,18 +137,24 @@ pub struct ExpandMacroData {
     #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
     #[serde(default)]
     pub has_global_spans: ExpnGlobals,
+    /// Table of additional span data.
     #[serde(skip_serializing_if = "Vec::is_empty")]
     #[serde(default)]
     pub span_data_table: Vec<u32>,
 }
 
+/// Represents global expansion settings, including span resolution.
 #[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)]
 pub struct ExpnGlobals {
+    /// Determines whether to serialize the expansion settings.
     #[serde(skip_serializing)]
     #[serde(default)]
     pub serialize: bool,
+    /// Defines the `def_site` span location.
     pub def_site: usize,
+    /// Defines the `call_site` span location.
     pub call_site: usize,
+    /// Defines the `mixed_site` span location.
     pub mixed_site: usize,
 }
 
@@ -150,16 +190,18 @@ pub trait Message: serde::Serialize + DeserializeOwned {
 impl Message for Request {}
 impl Message for Response {}
 
+/// Type alias for a function that reads protocol messages from a buffered input stream.
 #[allow(type_alias_bounds)]
 type ProtocolRead<R: BufRead> =
     for<'i, 'buf> fn(inp: &'i mut R, buf: &'buf mut String) -> io::Result<Option<&'buf String>>;
+/// Type alias for a function that writes protocol messages to an output stream.
 #[allow(type_alias_bounds)]
 type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) -> io::Result<()>;
 
 #[cfg(test)]
 mod tests {
-    use intern::{sym, Symbol};
-    use span::{Edition, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize};
+    use intern::{Symbol, sym};
+    use span::{Edition, ErasedFileAstId, Span, SpanAnchor, SyntaxContext, TextRange, TextSize};
     use tt::{
         Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, TopSubtree,
         TopSubtreeBuilder,
@@ -180,12 +222,12 @@ mod tests {
             open: Span {
                 range: TextRange::empty(TextSize::new(0)),
                 anchor,
-                ctx: SyntaxContextId::root(Edition::CURRENT),
+                ctx: SyntaxContext::root(Edition::CURRENT),
             },
             close: Span {
                 range: TextRange::empty(TextSize::new(19)),
                 anchor,
-                ctx: SyntaxContextId::root(Edition::CURRENT),
+                ctx: SyntaxContext::root(Edition::CURRENT),
             },
             kind: DelimiterKind::Invisible,
         });
@@ -196,7 +238,7 @@ mod tests {
                 span: Span {
                     range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
                     anchor,
-                    ctx: SyntaxContextId::root(Edition::CURRENT),
+                    ctx: SyntaxContext::root(Edition::CURRENT),
                 },
                 is_raw: tt::IdentIsRaw::No,
             }
@@ -208,7 +250,7 @@ mod tests {
                 span: Span {
                     range: TextRange::at(TextSize::new(5), TextSize::of("r#Foo")),
                     anchor,
-                    ctx: SyntaxContextId::root(Edition::CURRENT),
+                    ctx: SyntaxContext::root(Edition::CURRENT),
                 },
                 is_raw: tt::IdentIsRaw::Yes,
             }
@@ -219,7 +261,7 @@ mod tests {
             span: Span {
                 range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
                 anchor,
-                ctx: SyntaxContextId::root(Edition::CURRENT),
+                ctx: SyntaxContext::root(Edition::CURRENT),
             },
             kind: tt::LitKind::Str,
             suffix: None,
@@ -229,7 +271,7 @@ mod tests {
             span: Span {
                 range: TextRange::at(TextSize::new(13), TextSize::of('@')),
                 anchor,
-                ctx: SyntaxContextId::root(Edition::CURRENT),
+                ctx: SyntaxContext::root(Edition::CURRENT),
             },
             spacing: Spacing::Joint,
         }));
@@ -238,23 +280,23 @@ mod tests {
             Span {
                 range: TextRange::at(TextSize::new(14), TextSize::of('{')),
                 anchor,
-                ctx: SyntaxContextId::root(Edition::CURRENT),
+                ctx: SyntaxContext::root(Edition::CURRENT),
             },
         );
         builder.push(Leaf::Literal(Literal {
-            symbol: sym::INTEGER_0.clone(),
+            symbol: sym::INTEGER_0,
             span: Span {
                 range: TextRange::at(TextSize::new(15), TextSize::of("0u32")),
                 anchor,
-                ctx: SyntaxContextId::root(Edition::CURRENT),
+                ctx: SyntaxContext::root(Edition::CURRENT),
             },
             kind: tt::LitKind::Integer,
-            suffix: Some(sym::u32.clone()),
+            suffix: Some(sym::u32),
         }));
         builder.close(Span {
             range: TextRange::at(TextSize::new(19), TextSize::of('}')),
             anchor,
-            ctx: SyntaxContextId::root(Edition::CURRENT),
+            ctx: SyntaxContext::root(Edition::CURRENT),
         });
 
         builder.build()
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
index c194f301714fc..597ffa05d203e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
@@ -40,9 +40,7 @@ use std::collections::VecDeque;
 use intern::Symbol;
 use rustc_hash::FxHashMap;
 use serde_derive::{Deserialize, Serialize};
-use span::{
-    EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TokenId,
-};
+use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContext, TextRange, TokenId};
 
 use crate::legacy_protocol::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA};
 
@@ -74,7 +72,9 @@ pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap {
                     ast_id: ErasedFileAstId::from_raw(ast_id),
                 },
                 range: TextRange::new(start.into(), end.into()),
-                ctx: SyntaxContextId::from_u32(e),
+                // SAFETY: We only receive spans from the server. If someone mess up the communication UB can happen,
+                // but that will be their problem.
+                ctx: unsafe { SyntaxContext::from_u32(e) },
             }
         })
         .collect()
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index dc3328ebcda48..25c30b6db4a4c 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -13,20 +13,23 @@ mod process;
 
 use paths::{AbsPath, AbsPathBuf};
 use span::Span;
-use std::{fmt, io, sync::Arc};
+use std::{fmt, io, sync::Arc, time::SystemTime};
 
 use crate::{
     legacy_protocol::msg::{
-        deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro,
-        ExpandMacroData, ExpnGlobals, FlatTree, PanicMessage, Request, Response, SpanDataIndexMap,
-        HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT,
+        ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, HAS_GLOBAL_SPANS, PanicMessage,
+        RUST_ANALYZER_SPAN_SUPPORT, Request, Response, SpanDataIndexMap,
+        deserialize_span_data_index_map, flat::serialize_span_data_index_map,
     },
     process::ProcMacroServerProcess,
 };
 
+/// Represents different kinds of procedural macros that can be expanded by the external server.
 #[derive(Copy, Clone, Eq, PartialEq, Debug, serde_derive::Serialize, serde_derive::Deserialize)]
 pub enum ProcMacroKind {
+    /// A macro that derives implementations for a struct or enum.
     CustomDerive,
+    /// An attribute-like procedural macro.
     Attr,
     // This used to be called FuncLike, so that's what the server expects currently.
     #[serde(alias = "Bang")]
@@ -46,11 +49,13 @@ pub struct ProcMacroClient {
     path: AbsPathBuf,
 }
 
+/// Represents a dynamically loaded library containing procedural macros.
 pub struct MacroDylib {
     path: AbsPathBuf,
 }
 
 impl MacroDylib {
+    /// Creates a new MacroDylib instance with the given path.
     pub fn new(path: AbsPathBuf) -> MacroDylib {
         MacroDylib { path }
     }
@@ -66,6 +71,7 @@ pub struct ProcMacro {
     dylib_path: Arc<AbsPathBuf>,
     name: Box<str>,
     kind: ProcMacroKind,
+    dylib_last_modified: Option<SystemTime>,
 }
 
 impl Eq for ProcMacro {}
@@ -73,11 +79,13 @@ impl PartialEq for ProcMacro {
     fn eq(&self, other: &Self) -> bool {
         self.name == other.name
             && self.kind == other.kind
-            && Arc::ptr_eq(&self.dylib_path, &other.dylib_path)
+            && self.dylib_path == other.dylib_path
+            && self.dylib_last_modified == other.dylib_last_modified
             && Arc::ptr_eq(&self.process, &other.process)
     }
 }
 
+/// Represents errors encountered when communicating with the proc-macro server.
 #[derive(Clone, Debug)]
 pub struct ServerError {
     pub message: String,
@@ -97,15 +105,17 @@ impl fmt::Display for ServerError {
 
 impl ProcMacroClient {
     /// Spawns an external process as the proc macro server and returns a client connected to it.
-    pub fn spawn(
+    pub fn spawn<'a>(
         process_path: &AbsPath,
-        env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>
-            + Clone,
+        env: impl IntoIterator<
+            Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
+        > + Clone,
     ) -> io::Result<ProcMacroClient> {
         let process = ProcMacroServerProcess::run(process_path, env)?;
         Ok(ProcMacroClient { process: Arc::new(process), path: process_path.to_owned() })
     }
 
+    /// Returns the absolute path to the proc-macro server.
     pub fn server_path(&self) -> &AbsPath {
         &self.path
     }
@@ -116,6 +126,9 @@ impl ProcMacroClient {
         let macros = self.process.find_proc_macros(&dylib.path)?;
 
         let dylib_path = Arc::new(dylib.path);
+        let dylib_last_modified = std::fs::metadata(dylib_path.as_path())
+            .ok()
+            .and_then(|metadata| metadata.modified().ok());
         match macros {
             Ok(macros) => Ok(macros
                 .into_iter()
@@ -124,26 +137,32 @@ impl ProcMacroClient {
                     name: name.into(),
                     kind,
                     dylib_path: dylib_path.clone(),
+                    dylib_last_modified,
                 })
                 .collect()),
             Err(message) => Err(ServerError { message, io: None }),
         }
     }
 
+    /// Checks if the proc-macro server has exited.
     pub fn exited(&self) -> Option<&ServerError> {
         self.process.exited()
     }
 }
 
 impl ProcMacro {
+    /// Returns the name of the procedural macro.
     pub fn name(&self) -> &str {
         &self.name
     }
 
+    /// Returns the type of procedural macro.
     pub fn kind(&self) -> ProcMacroKind {
         self.kind
     }
 
+    /// Expands the procedural macro by sending an expansion request to the server.
+    /// This includes span information and environmental context.
     pub fn expand(
         &self,
         subtree: tt::SubtreeView<'_, Span>,
@@ -152,7 +171,7 @@ impl ProcMacro {
         def_site: Span,
         call_site: Span,
         mixed_site: Span,
-        current_dir: Option<String>,
+        current_dir: String,
     ) -> Result<Result<tt::TopSubtree<Span>, PanicMessage>, ServerError> {
         let version = self.process.version();
 
@@ -180,7 +199,7 @@ impl ProcMacro {
             },
             lib: self.dylib_path.to_path_buf().into(),
             env,
-            current_dir,
+            current_dir: Some(current_dir),
         };
 
         let response = self.process.send_task(Request::ExpandMacro(Box::new(task)))?;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index d998b23d3bbef..fcea75ef672a1 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -11,16 +11,17 @@ use paths::AbsPath;
 use stdx::JodChild;
 
 use crate::{
+    ProcMacroKind, ServerError,
     legacy_protocol::{
         json::{read_json, write_json},
         msg::{
-            Message, Request, Response, ServerConfig, SpanMode, CURRENT_API_VERSION,
-            RUST_ANALYZER_SPAN_SUPPORT,
+            CURRENT_API_VERSION, Message, RUST_ANALYZER_SPAN_SUPPORT, Request, Response,
+            ServerConfig, SpanMode,
         },
     },
-    ProcMacroKind, ServerError,
 };
 
+/// Represents a process handling proc-macro communication.
 #[derive(Debug)]
 pub(crate) struct ProcMacroServerProcess {
     /// The state of the proc-macro server process, the protocol is currently strictly sequential
@@ -32,6 +33,7 @@ pub(crate) struct ProcMacroServerProcess {
     exited: OnceLock<AssertUnwindSafe<ServerError>>,
 }
 
+/// Maintains the state of the proc-macro server process.
 #[derive(Debug)]
 struct ProcessSrvState {
     process: Process,
@@ -40,10 +42,12 @@ struct ProcessSrvState {
 }
 
 impl ProcMacroServerProcess {
-    pub(crate) fn run(
+    /// Starts the proc-macro server and performs a version check
+    pub(crate) fn run<'a>(
         process_path: &AbsPath,
-        env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>
-            + Clone,
+        env: impl IntoIterator<
+            Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
+        > + Clone,
     ) -> io::Result<ProcMacroServerProcess> {
         let create_srv = || {
             let mut process = Process::run(process_path, env.clone())?;
@@ -59,8 +63,7 @@ impl ProcMacroServerProcess {
         let mut srv = create_srv()?;
         tracing::info!("sending proc-macro server version check");
         match srv.version_check() {
-            Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
-                io::ErrorKind::Other,
+            Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::other(
                 format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
             This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain."
                 ),
@@ -79,20 +82,23 @@ impl ProcMacroServerProcess {
             Err(e) => {
                 tracing::info!(%e, "proc-macro version check failed");
                 Err(
-                    io::Error::new(io::ErrorKind::Other, format!("proc-macro server version check failed: {e}")),
+                    io::Error::other(format!("proc-macro server version check failed: {e}")),
                 )
             }
         }
     }
 
+    /// Returns the server error if the process has exited.
     pub(crate) fn exited(&self) -> Option<&ServerError> {
         self.exited.get().map(|it| &it.0)
     }
 
+    /// Retrieves the API version of the proc-macro server.
     pub(crate) fn version(&self) -> u32 {
         self.version
     }
 
+    /// Checks the API version of the running proc-macro server.
     fn version_check(&self) -> Result<u32, ServerError> {
         let request = Request::ApiVersionCheck {};
         let response = self.send_task(request)?;
@@ -103,6 +109,7 @@ impl ProcMacroServerProcess {
         }
     }
 
+    /// Enable support for rust-analyzer span mode if the server supports it.
     fn enable_rust_analyzer_spans(&self) -> Result<SpanMode, ServerError> {
         let request = Request::SetConfig(ServerConfig { span_mode: SpanMode::RustAnalyzer });
         let response = self.send_task(request)?;
@@ -113,6 +120,7 @@ impl ProcMacroServerProcess {
         }
     }
 
+    /// Finds proc-macros in a given dynamic library.
     pub(crate) fn find_proc_macros(
         &self,
         dylib_path: &AbsPath,
@@ -127,6 +135,7 @@ impl ProcMacroServerProcess {
         }
     }
 
+    /// Sends a request to the proc-macro server and waits for a response.
     pub(crate) fn send_task(&self, req: Request) -> Result<Response, ServerError> {
         if let Some(server_error) = self.exited.get() {
             return Err(server_error.0.clone());
@@ -177,20 +186,25 @@ impl ProcMacroServerProcess {
     }
 }
 
+/// Manages the execution of the proc-macro server process.
 #[derive(Debug)]
 struct Process {
     child: JodChild,
 }
 
 impl Process {
-    fn run(
+    /// Runs a new proc-macro server process with the specified environment variables.
+    fn run<'a>(
         path: &AbsPath,
-        env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>,
+        env: impl IntoIterator<
+            Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
+        >,
     ) -> io::Result<Process> {
         let child = JodChild(mk_child(path, env)?);
         Ok(Process { child })
     }
 
+    /// Retrieves stdin and stdout handles for the process.
     fn stdio(&mut self) -> Option<(ChildStdin, BufReader<ChildStdout>)> {
         let stdin = self.child.stdin.take()?;
         let stdout = self.child.stdout.take()?;
@@ -200,14 +214,22 @@ impl Process {
     }
 }
 
-fn mk_child(
+/// Creates and configures a new child process for the proc-macro server.
+fn mk_child<'a>(
     path: &AbsPath,
-    env: impl IntoIterator<Item = (impl AsRef<std::ffi::OsStr>, impl AsRef<std::ffi::OsStr>)>,
+    extra_env: impl IntoIterator<
+        Item = (impl AsRef<std::ffi::OsStr>, &'a Option<impl 'a + AsRef<std::ffi::OsStr>>),
+    >,
 ) -> io::Result<Child> {
     #[allow(clippy::disallowed_methods)]
     let mut cmd = Command::new(path);
-    cmd.envs(env)
-        .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable")
+    for env in extra_env {
+        match env {
+            (key, Some(val)) => cmd.env(key, val),
+            (key, None) => cmd.env_remove(key),
+        };
+    }
+    cmd.env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable")
         .stdin(Stdio::piped())
         .stdout(Stdio::piped())
         .stderr(Stdio::inherit());
@@ -221,6 +243,7 @@ fn mk_child(
     cmd.spawn()
 }
 
+/// Sends a request to the server and reads the response.
 fn send_request(
     mut writer: &mut impl Write,
     mut reader: &mut impl BufRead,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
index 57a28b00365f6..ab421021b8bfd 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
@@ -8,6 +8,7 @@ authors.workspace = true
 edition.workspace = true
 license.workspace = true
 rust-version.workspace = true
+publish = false
 
 [dependencies]
 proc-macro-srv.workspace = true
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
index de59e88aac40c..c47ed053254bf 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
@@ -1,6 +1,7 @@
 //! A standalone binary for `proc-macro-srv`.
 //! Driver for proc macro server
 #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+#![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))]
 #![allow(clippy::print_stderr)]
 
 #[cfg(feature = "in-rust-tree")]
@@ -14,7 +15,9 @@ use main_loop::run;
 fn main() -> std::io::Result<()> {
     let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE");
     if v.is_err() {
-        eprintln!("This is an IDE implementation detail, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE.");
+        eprintln!(
+            "This is an IDE implementation detail, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE."
+        );
         eprintln!(
             "Note that this tool's API is highly unstable and may break without prior notice"
         );
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs
index 569070766f1c6..f54dff1f2d822 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs
@@ -4,8 +4,8 @@ use std::io;
 use proc_macro_api::legacy_protocol::{
     json::{read_json, write_json},
     msg::{
-        self, deserialize_span_data_index_map, serialize_span_data_index_map, ExpandMacroData,
-        ExpnGlobals, Message, SpanMode, TokenId, CURRENT_API_VERSION,
+        self, CURRENT_API_VERSION, ExpandMacroData, ExpnGlobals, Message, SpanMode, TokenId,
+        deserialize_span_data_index_map, serialize_span_data_index_map,
     },
 };
 use proc_macro_srv::EnvSnapshot;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
index d3b56b402ea83..8fd675d0d31f4 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
@@ -13,11 +13,9 @@ rust-version.workspace = true
 
 [dependencies]
 object.workspace = true
-libc.workspace = true
 libloading.workspace = true
 memmap2.workspace = true
 
-stdx.workspace = true
 tt.workspace = true
 syntax-bridge.workspace = true
 paths.workspace = true
@@ -27,6 +25,9 @@ intern.workspace = true
 
 ra-ap-rustc_lexer.workspace = true
 
+[target.'cfg(unix)'.dependencies]
+libc.workspace = true
+
 [dev-dependencies]
 expect-test.workspace = true
 
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml
index 16fcc92962072..eddefb33c0ff1 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml
@@ -3,10 +3,10 @@ name = "proc-macro-test"
 version = "0.0.0"
 publish = false
 
-edition = "2021"
+edition = "2024"
 license = "MIT OR Apache-2.0"
 
 [lib]
 
 [build-dependencies]
-cargo_metadata = "0.18.1"
+cargo_metadata = "0.19.2"
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs
index d3d58a6df0115..b97569d4dbdf1 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs
@@ -110,7 +110,7 @@ fn main() {
     let mut artifact_path = None;
     for message in Message::parse_stream(output.stdout.as_slice()) {
         if let Message::CompilerArtifact(artifact) = message.unwrap() {
-            if artifact.target.kind.contains(&"proc-macro".to_string())
+            if artifact.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
                 && (artifact.package_id.repr.starts_with(&repr)
                     || artifact.package_id.repr == pkgid)
             {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore
index 2c96eb1b6517f..2f7896d1d1365 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore
@@ -1,2 +1 @@
 target/
-Cargo.lock
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.lock b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.lock
new file mode 100644
index 0000000000000..99c7ca10affb2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.lock
@@ -0,0 +1,7 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 4
+
+[[package]]
+name = "proc-macro-test-impl"
+version = "0.0.0"
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
index fb98d758a8b7b..33b7c2bb0ad66 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
@@ -2,7 +2,7 @@
 name = "proc-macro-test-impl"
 version = "0.0.0"
 license = "MIT OR Apache-2.0"
-edition = "2021"
+edition = "2024"
 publish = false
 
 [lib]
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
index 749a7760592b5..dfdbb4c95fcac 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
@@ -90,7 +90,7 @@ pub fn attr_error(args: TokenStream, item: TokenStream) -> TokenStream {
 
 #[proc_macro_derive(DeriveEmpty)]
 pub fn derive_empty(_item: TokenStream) -> TokenStream {
-    TokenStream::new()
+    TokenStream::default()
 }
 
 #[proc_macro_derive(DerivePanic)]
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
index cbf7a277bfae6..c49159df9916d 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
@@ -9,7 +9,7 @@ use libloading::Library;
 use object::Object;
 use paths::{Utf8Path, Utf8PathBuf};
 
-use crate::{proc_macros::ProcMacros, server_impl::TopSubtree, ProcMacroKind, ProcMacroSrvSpan};
+use crate::{ProcMacroKind, ProcMacroSrvSpan, proc_macros::ProcMacros, server_impl::TopSubtree};
 
 /// Loads dynamic library in platform dependent manner.
 ///
@@ -21,13 +21,32 @@ use crate::{proc_macros::ProcMacros, server_impl::TopSubtree, ProcMacroKind, Pro
 /// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1)
 ///
 /// It seems that on Windows that behaviour is default, so we do nothing in that case.
+///
+/// # Safety
+///
+/// The caller is responsible for ensuring that the path is valid proc-macro library
 #[cfg(windows)]
-fn load_library(file: &Utf8Path) -> Result<Library, libloading::Error> {
+unsafe fn load_library(file: &Utf8Path) -> Result<Library, libloading::Error> {
+    // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library
     unsafe { Library::new(file) }
 }
 
+/// Loads dynamic library in platform dependent manner.
+///
+/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample)
+/// and [here](https://github.com/rust-lang/rust/issues/60593).
+///
+/// Usage of RTLD_DEEPBIND
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1)
+///
+/// It seems that on Windows that behaviour is default, so we do nothing in that case.
+///
+/// # Safety
+///
+/// The caller is responsible for ensuring that the path is valid proc-macro library
 #[cfg(unix)]
-fn load_library(file: &Utf8Path) -> Result<Library, libloading::Error> {
+unsafe fn load_library(file: &Utf8Path) -> Result<Library, libloading::Error> {
     // not defined by POSIX, different values on mips vs other targets
     #[cfg(target_env = "gnu")]
     use libc::RTLD_DEEPBIND;
@@ -39,6 +58,7 @@ fn load_library(file: &Utf8Path) -> Result<Library, libloading::Error> {
     #[cfg(not(target_env = "gnu"))]
     const RTLD_DEEPBIND: std::os::raw::c_int = 0x0;
 
+    // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library
     unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) }
 }
 
@@ -84,26 +104,32 @@ struct ProcMacroLibrary {
 impl ProcMacroLibrary {
     fn open(path: &Utf8Path) -> Result<Self, LoadProcMacroDylibError> {
         let file = fs::File::open(path)?;
+        #[allow(clippy::undocumented_unsafe_blocks)] // FIXME
         let file = unsafe { memmap2::Mmap::map(&file) }?;
         let obj = object::File::parse(&*file)
             .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
         let version_info = version::read_dylib_info(&obj)?;
+        if version_info.version_string != crate::RUSTC_VERSION_STRING {
+            return Err(LoadProcMacroDylibError::AbiMismatch(version_info.version_string));
+        }
+
         let symbol_name =
             find_registrar_symbol(&obj).map_err(invalid_data_err)?.ok_or_else(|| {
                 invalid_data_err(format!("Cannot find registrar symbol in file {path}"))
             })?;
 
-        let lib = load_library(path).map_err(invalid_data_err)?;
-        let proc_macros = unsafe {
-            // SAFETY: We extend the lifetime here to avoid referential borrow problems
-            // We never reveal proc_macros to the outside and drop it before _lib
-            std::mem::transmute::<&ProcMacros, &'static ProcMacros>(ProcMacros::from_lib(
-                &lib,
-                symbol_name,
-                &version_info.version_string,
-            )?)
-        };
-        Ok(ProcMacroLibrary { _lib: lib, proc_macros })
+        // SAFETY: We have verified the validity of the dylib as a proc-macro library
+        let lib = unsafe { load_library(path) }.map_err(invalid_data_err)?;
+        // SAFETY: We have verified the validity of the dylib as a proc-macro library
+        // The 'static lifetime is a lie, it's actually the lifetime of the library but unavoidable
+        // due to self-referentiality
+        // But we make sure that we do not drop it before the symbol is dropped
+        let proc_macros =
+            unsafe { lib.get::<&'static &'static ProcMacros>(symbol_name.as_bytes()) };
+        match proc_macros {
+            Ok(proc_macros) => Ok(ProcMacroLibrary { proc_macros: *proc_macros, _lib: lib }),
+            Err(e) => Err(e.into()),
+        }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs
index 7668f419040cf..3b2551f08c480 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs
@@ -27,7 +27,7 @@ pub fn read_dylib_info(obj: &object::File<'_>) -> io::Result<RustCInfo> {
     let mut items = ver_str.split_whitespace();
     let tag = items.next().ok_or_else(|| err!("version format error"))?;
     if tag != "rustc" {
-        return Err(err!("version format error (No rustc tag)"));
+        return Err(err!("no rustc tag"));
     }
 
     let version_part = items.next().ok_or_else(|| err!("no version string"))?;
@@ -83,7 +83,7 @@ fn read_section<'a>(obj: &object::File<'a>, section_name: &str) -> io::Result<&'
 /// A proc macro crate binary's ".rustc" section has following byte layout:
 /// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes
 /// * ff060000 734e6150 is followed, it's the snappy format magic bytes,
-///   means bytes from here(including this sequence) are compressed in
+///   means bytes from here (including this sequence) are compressed in
 ///   snappy compression format. Version info is inside here, so decompress
 ///   this.
 ///
@@ -110,7 +110,7 @@ pub fn read_version(obj: &object::File<'_>) -> io::Result<String> {
         ));
     }
     let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
-    // Last breaking version change is:
+    // Last version with breaking changes is:
     // https://github.com/rust-lang/rust/commit/b94cfefc860715fb2adf72a6955423d384c69318
     let (mut metadata_portion, bytes_before_version) = match version {
         8 => {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
index f28821b4afc5c..223c5a54b7034 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
@@ -11,10 +11,11 @@
 //!   rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
 
 #![cfg(any(feature = "sysroot-abi", rust_analyzer))]
+#![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))]
 #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
 #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
 #![allow(unreachable_pub, internal_features, clippy::disallowed_types, clippy::print_stderr)]
-#![deny(deprecated_safe)]
+#![deny(deprecated_safe, clippy::undocumented_unsafe_blocks)]
 
 extern crate proc_macro;
 #[cfg(feature = "in-rust-tree")]
@@ -30,7 +31,7 @@ mod proc_macros;
 mod server_impl;
 
 use std::{
-    collections::{hash_map::Entry, HashMap},
+    collections::{HashMap, hash_map::Entry},
     env,
     ffi::OsString,
     fs,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs
index 58f5e80dc4ea6..18532706c4aaa 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs
@@ -2,11 +2,7 @@
 
 use proc_macro::bridge;
 
-use libloading::Library;
-
-use crate::{
-    dylib::LoadProcMacroDylibError, server_impl::TopSubtree, ProcMacroKind, ProcMacroSrvSpan,
-};
+use crate::{ProcMacroKind, ProcMacroSrvSpan, server_impl::TopSubtree};
 
 #[repr(transparent)]
 pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
@@ -18,28 +14,6 @@ impl From<bridge::PanicMessage> for crate::PanicMessage {
 }
 
 impl ProcMacros {
-    /// Load a new ABI.
-    ///
-    /// # Arguments
-    ///
-    /// *`lib` - The dynamic library containing the macro implementations
-    /// *`symbol_name` - The symbol name the macros can be found attributes
-    /// *`info` - RustCInfo about the compiler that was used to compile the
-    ///           macro crate. This is the information we use to figure out
-    ///           which ABI to return
-    pub(crate) fn from_lib<'l>(
-        lib: &'l Library,
-        symbol_name: String,
-        version_string: &str,
-    ) -> Result<&'l ProcMacros, LoadProcMacroDylibError> {
-        if version_string != crate::RUSTC_VERSION_STRING {
-            return Err(LoadProcMacroDylibError::AbiMismatch(version_string.to_owned()));
-        }
-        unsafe { lib.get::<&'l &'l ProcMacros>(symbol_name.as_bytes()) }
-            .map(|it| **it)
-            .map_err(Into::into)
-    }
-
     pub(crate) fn expand<S: ProcMacroSrvSpan>(
         &self,
         macro_name: &str,
@@ -52,7 +26,7 @@ impl ProcMacros {
         let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
 
         let parsed_attributes = attributes
-            .map_or_else(crate::server_impl::TokenStream::new, |attr| {
+            .map_or_else(crate::server_impl::TokenStream::default, |attr| {
                 crate::server_impl::TokenStream::with_subtree(attr)
             });
 
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index 80f6d85a3d05c..47555a5db2f74 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -11,10 +11,10 @@ use std::{
 
 use intern::Symbol;
 use proc_macro::bridge::{self, server};
-use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
+use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
 use tt::{TextRange, TextSize};
 
-use crate::server_impl::{literal_kind_to_internal, token_stream::TokenStreamBuilder, TopSubtree};
+use crate::server_impl::{TopSubtree, literal_kind_to_internal, token_stream::TokenStreamBuilder};
 mod tt {
     pub use tt::*;
 
@@ -207,7 +207,7 @@ impl server::TokenStream for RaSpanServer {
         base: Option<Self::TokenStream>,
         trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
     ) -> Self::TokenStream {
-        let mut builder = TokenStreamBuilder::new();
+        let mut builder = TokenStreamBuilder::default();
         if let Some(base) = base {
             builder.push(base);
         }
@@ -222,7 +222,7 @@ impl server::TokenStream for RaSpanServer {
         base: Option<Self::TokenStream>,
         streams: Vec<Self::TokenStream>,
     ) -> Self::TokenStream {
-        let mut builder = TokenStreamBuilder::new();
+        let mut builder = TokenStreamBuilder::default();
         if let Some(base) = base {
             builder.push(base);
         }
@@ -415,7 +415,7 @@ impl server::Server for RaSpanServer {
 
 #[cfg(test)]
 mod tests {
-    use span::{EditionedFileId, FileId, SyntaxContextId};
+    use span::{EditionedFileId, FileId, SyntaxContext};
 
     use super::*;
 
@@ -427,7 +427,7 @@ mod tests {
                 file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
                 ast_id: span::ErasedFileAstId::from_raw(0),
             },
-            ctx: SyntaxContextId::root(span::Edition::CURRENT),
+            ctx: SyntaxContext::root(span::Edition::CURRENT),
         };
         let s = TokenStream {
             token_trees: vec![
@@ -469,7 +469,7 @@ mod tests {
                 file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
                 ast_id: span::ErasedFileAstId::from_raw(0),
             },
-            ctx: SyntaxContextId::root(span::Edition::CURRENT),
+            ctx: SyntaxContext::root(span::Edition::CURRENT),
         };
         let subtree_paren_a = vec![
             tt::TokenTree::Subtree(tt::Subtree {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs
index 4d7c7c46766b0..c002be4be6ffd 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -5,7 +5,7 @@ use std::ops::{Bound, Range};
 use intern::Symbol;
 use proc_macro::bridge::{self, server};
 
-use crate::server_impl::{literal_kind_to_internal, token_stream::TokenStreamBuilder, TopSubtree};
+use crate::server_impl::{TopSubtree, literal_kind_to_internal, token_stream::TokenStreamBuilder};
 mod tt {
     pub use span::TokenId;
 
@@ -187,7 +187,7 @@ impl server::TokenStream for TokenIdServer {
         base: Option<Self::TokenStream>,
         trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
     ) -> Self::TokenStream {
-        let mut builder = TokenStreamBuilder::new();
+        let mut builder = TokenStreamBuilder::default();
         if let Some(base) = base {
             builder.push(base);
         }
@@ -202,7 +202,7 @@ impl server::TokenStream for TokenIdServer {
         base: Option<Self::TokenStream>,
         streams: Vec<Self::TokenStream>,
     ) -> Self::TokenStream {
-        let mut builder = TokenStreamBuilder::new();
+        let mut builder = TokenStreamBuilder::default();
         if let Some(base) = base {
             builder.push(base);
         }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs
index 645f7e7c59a32..4946a4f2a6218 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs
@@ -2,13 +2,20 @@
 
 use proc_macro::bridge;
 
-use crate::server_impl::{delim_to_external, literal_kind_to_external, TopSubtree};
+use crate::server_impl::{TopSubtree, delim_to_external, literal_kind_to_external};
 
 #[derive(Clone)]
 pub struct TokenStream<S> {
     pub(super) token_trees: Vec<tt::TokenTree<S>>,
 }
 
+// #[derive(Default)] would mean that `S: Default`.
+impl<S> Default for TokenStream<S> {
+    fn default() -> Self {
+        Self { token_trees: Default::default() }
+    }
+}
+
 impl<S: std::fmt::Debug + Copy> std::fmt::Debug for TokenStream<S> {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         f.debug_struct("TokenStream")
@@ -17,17 +24,7 @@ impl<S: std::fmt::Debug + Copy> std::fmt::Debug for TokenStream<S> {
     }
 }
 
-impl<S> Default for TokenStream<S> {
-    fn default() -> Self {
-        Self { token_trees: vec![] }
-    }
-}
-
 impl<S: Copy> TokenStream<S> {
-    pub(crate) fn new() -> Self {
-        TokenStream::default()
-    }
-
     pub(crate) fn with_subtree(subtree: TopSubtree<S>) -> Self {
         let delimiter_kind = subtree.top_subtree().delimiter.kind;
         let mut token_trees = subtree.0;
@@ -145,10 +142,6 @@ pub(super) mod token_stream_impls {
 }
 
 impl<S: Copy> TokenStreamBuilder<S> {
-    pub(super) fn new() -> TokenStreamBuilder<S> {
-        TokenStreamBuilder { acc: TokenStream::new() }
-    }
-
     pub(super) fn push(&mut self, stream: TokenStream<S>) {
         self.acc.token_trees.extend(stream.token_trees)
     }
@@ -157,3 +150,9 @@ impl<S: Copy> TokenStreamBuilder<S> {
         self.acc
     }
 }
+
+impl<S: Copy> Default for TokenStreamBuilder<S> {
+    fn default() -> Self {
+        Self { acc: TokenStream::default() }
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
index 4bd365be7ca8b..011221459657a 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
@@ -12,7 +12,7 @@ fn test_derive_empty() {
         "DeriveEmpty",
         r#"struct S;"#,
         expect!["SUBTREE $$ 1 1"],
-        expect!["SUBTREE $$ 42:2@0..100#2 42:2@0..100#2"],
+        expect!["SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024"],
     );
 }
 
@@ -29,12 +29,12 @@ fn test_derive_error() {
                 LITERAL Str #[derive(DeriveError)] struct S ; 1
               PUNCH   ; [alone] 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              IDENT   compile_error 42:2@0..100#2
-              PUNCH   ! [alone] 42:2@0..100#2
-              SUBTREE () 42:2@0..100#2 42:2@0..100#2
-                LITERAL Str #[derive(DeriveError)] struct S ; 42:2@0..100#2
-              PUNCH   ; [alone] 42:2@0..100#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              IDENT   compile_error 42:2@0..100#ROOT2024
+              PUNCH   ! [alone] 42:2@0..100#ROOT2024
+              SUBTREE () 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+                LITERAL Str #[derive(DeriveError)] struct S ; 42:2@0..100#ROOT2024
+              PUNCH   ; [alone] 42:2@0..100#ROOT2024"#]],
     );
 }
 
@@ -53,14 +53,14 @@ fn test_fn_like_macro_noop() {
               PUNCH   , [alone] 1
               SUBTREE [] 1 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              IDENT   ident 42:2@0..5#2
-              PUNCH   , [alone] 42:2@5..6#2
-              LITERAL Integer 0 42:2@7..8#2
-              PUNCH   , [alone] 42:2@8..9#2
-              LITERAL Integer 1 42:2@10..11#2
-              PUNCH   , [alone] 42:2@11..12#2
-              SUBTREE [] 42:2@13..14#2 42:2@14..15#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              IDENT   ident 42:2@0..5#ROOT2024
+              PUNCH   , [alone] 42:2@5..6#ROOT2024
+              LITERAL Integer 0 42:2@7..8#ROOT2024
+              PUNCH   , [alone] 42:2@8..9#ROOT2024
+              LITERAL Integer 1 42:2@10..11#ROOT2024
+              PUNCH   , [alone] 42:2@11..12#ROOT2024
+              SUBTREE [] 42:2@13..14#ROOT2024 42:2@14..15#ROOT2024"#]],
     );
 }
 
@@ -75,10 +75,10 @@ fn test_fn_like_macro_clone_ident_subtree() {
               PUNCH   , [alone] 1
               SUBTREE [] 1 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              IDENT   ident 42:2@0..5#2
-              PUNCH   , [alone] 42:2@5..6#2
-              SUBTREE [] 42:2@7..8#2 42:2@7..8#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              IDENT   ident 42:2@0..5#ROOT2024
+              PUNCH   , [alone] 42:2@5..6#ROOT2024
+              SUBTREE [] 42:2@7..8#ROOT2024 42:2@7..8#ROOT2024"#]],
     );
 }
 
@@ -91,8 +91,8 @@ fn test_fn_like_macro_clone_raw_ident() {
             SUBTREE $$ 1 1
               IDENT   r#async 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              IDENT   r#async 42:2@0..7#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              IDENT   r#async 42:2@0..7#ROOT2024"#]],
     );
 }
 
@@ -106,8 +106,8 @@ fn test_fn_like_fn_like_span_join() {
             SUBTREE $$ 1 1
               IDENT   r#joined 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              IDENT   r#joined 42:2@0..11#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              IDENT   r#joined 42:2@0..11#ROOT2024"#]],
     );
 }
 
@@ -123,10 +123,10 @@ fn test_fn_like_fn_like_span_ops() {
               IDENT   resolved_at_def_site 1
               IDENT   start_span 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              IDENT   set_def_site 41:1@0..150#2
-              IDENT   resolved_at_def_site 42:2@13..33#2
-              IDENT   start_span 42:2@34..34#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              IDENT   set_def_site 41:1@0..150#ROOT2024
+              IDENT   resolved_at_def_site 42:2@13..33#ROOT2024
+              IDENT   start_span 42:2@34..34#ROOT2024"#]],
     );
 }
 
@@ -145,14 +145,14 @@ fn test_fn_like_mk_literals() {
               LITERAL Integer 123i64 1
               LITERAL Integer 123 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              LITERAL ByteStr byte_string 42:2@0..100#2
-              LITERAL Char c 42:2@0..100#2
-              LITERAL Str string 42:2@0..100#2
-              LITERAL Float 3.14f64 42:2@0..100#2
-              LITERAL Float 3.14 42:2@0..100#2
-              LITERAL Integer 123i64 42:2@0..100#2
-              LITERAL Integer 123 42:2@0..100#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              LITERAL ByteStr byte_string 42:2@0..100#ROOT2024
+              LITERAL Char c 42:2@0..100#ROOT2024
+              LITERAL Str string 42:2@0..100#ROOT2024
+              LITERAL Float 3.14f64 42:2@0..100#ROOT2024
+              LITERAL Float 3.14 42:2@0..100#ROOT2024
+              LITERAL Integer 123i64 42:2@0..100#ROOT2024
+              LITERAL Integer 123 42:2@0..100#ROOT2024"#]],
     );
 }
 
@@ -166,9 +166,9 @@ fn test_fn_like_mk_idents() {
               IDENT   standard 1
               IDENT   r#raw 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              IDENT   standard 42:2@0..100#2
-              IDENT   r#raw 42:2@0..100#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              IDENT   standard 42:2@0..100#ROOT2024
+              IDENT   r#raw 42:2@0..100#ROOT2024"#]],
     );
 }
 
@@ -200,27 +200,27 @@ fn test_fn_like_macro_clone_literals() {
               PUNCH   , [alone] 1
               LITERAL CStr null 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              LITERAL Integer 1u16 42:2@0..4#2
-              PUNCH   , [alone] 42:2@4..5#2
-              LITERAL Integer 2_u32 42:2@6..11#2
-              PUNCH   , [alone] 42:2@11..12#2
-              PUNCH   - [alone] 42:2@13..14#2
-              LITERAL Integer 4i64 42:2@14..18#2
-              PUNCH   , [alone] 42:2@18..19#2
-              LITERAL Float 3.14f32 42:2@20..27#2
-              PUNCH   , [alone] 42:2@27..28#2
-              LITERAL Str hello bridge 42:2@29..43#2
-              PUNCH   , [alone] 42:2@43..44#2
-              LITERAL Str suffixedsuffix 42:2@45..61#2
-              PUNCH   , [alone] 42:2@61..62#2
-              LITERAL StrRaw(2) raw 42:2@63..73#2
-              PUNCH   , [alone] 42:2@73..74#2
-              LITERAL Char a 42:2@75..78#2
-              PUNCH   , [alone] 42:2@78..79#2
-              LITERAL Byte b 42:2@80..84#2
-              PUNCH   , [alone] 42:2@84..85#2
-              LITERAL CStr null 42:2@86..93#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              LITERAL Integer 1u16 42:2@0..4#ROOT2024
+              PUNCH   , [alone] 42:2@4..5#ROOT2024
+              LITERAL Integer 2_u32 42:2@6..11#ROOT2024
+              PUNCH   , [alone] 42:2@11..12#ROOT2024
+              PUNCH   - [alone] 42:2@13..14#ROOT2024
+              LITERAL Integer 4i64 42:2@14..18#ROOT2024
+              PUNCH   , [alone] 42:2@18..19#ROOT2024
+              LITERAL Float 3.14f32 42:2@20..27#ROOT2024
+              PUNCH   , [alone] 42:2@27..28#ROOT2024
+              LITERAL Str hello bridge 42:2@29..43#ROOT2024
+              PUNCH   , [alone] 42:2@43..44#ROOT2024
+              LITERAL Str suffixedsuffix 42:2@45..61#ROOT2024
+              PUNCH   , [alone] 42:2@61..62#ROOT2024
+              LITERAL StrRaw(2) raw 42:2@63..73#ROOT2024
+              PUNCH   , [alone] 42:2@73..74#ROOT2024
+              LITERAL Char a 42:2@75..78#ROOT2024
+              PUNCH   , [alone] 42:2@78..79#ROOT2024
+              LITERAL Byte b 42:2@80..84#ROOT2024
+              PUNCH   , [alone] 42:2@84..85#ROOT2024
+              LITERAL CStr null 42:2@86..93#ROOT2024"#]],
     );
 }
 
@@ -241,12 +241,12 @@ fn test_attr_macro() {
                 LITERAL Str #[attr_error(some arguments)] mod m {} 1
               PUNCH   ; [alone] 1"#]],
         expect![[r#"
-            SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
-              IDENT   compile_error 42:2@0..100#2
-              PUNCH   ! [alone] 42:2@0..100#2
-              SUBTREE () 42:2@0..100#2 42:2@0..100#2
-                LITERAL Str #[attr_error(some arguments)] mod m {} 42:2@0..100#2
-              PUNCH   ; [alone] 42:2@0..100#2"#]],
+            SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+              IDENT   compile_error 42:2@0..100#ROOT2024
+              PUNCH   ! [alone] 42:2@0..100#ROOT2024
+              SUBTREE () 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+                LITERAL Str #[attr_error(some arguments)] mod m {} 42:2@0..100#ROOT2024
+              PUNCH   ; [alone] 42:2@0..100#ROOT2024"#]],
     );
 }
 
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
index 1b085520d5656..a476a70a74095 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
@@ -1,10 +1,10 @@
 //! utils used in proc-macro tests
 
 use expect_test::Expect;
-use span::{EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId, TokenId};
+use span::{EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContext, TokenId};
 use tt::TextRange;
 
-use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
+use crate::{EnvSnapshot, ProcMacroSrv, dylib, proc_macro_test_dylib_path};
 
 fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
     crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
@@ -17,7 +17,7 @@ fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStrea
 
 fn parse_string_spanned(
     anchor: SpanAnchor,
-    call_site: SyntaxContextId,
+    call_site: SyntaxContext,
     src: &str,
 ) -> crate::server_impl::TokenStream<Span> {
     crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
@@ -81,7 +81,7 @@ fn assert_expand_impl(
             file_id: EditionedFileId::current_edition(FileId::from_raw(41)),
             ast_id: ErasedFileAstId::from_raw(1),
         },
-        ctx: SyntaxContextId::root(span::Edition::CURRENT),
+        ctx: SyntaxContext::root(span::Edition::CURRENT),
     };
     let call_site = Span {
         range: TextRange::new(0.into(), 100.into()),
@@ -89,7 +89,7 @@ fn assert_expand_impl(
             file_id: EditionedFileId::current_edition(FileId::from_raw(42)),
             ast_id: ErasedFileAstId::from_raw(2),
         },
-        ctx: SyntaxContextId::root(span::Edition::CURRENT),
+        ctx: SyntaxContext::root(span::Edition::CURRENT),
     };
     let mixed_site = call_site;
 
diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml
index 9384fe265584f..1fb13832720e6 100644
--- a/src/tools/rust-analyzer/crates/profile/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml
@@ -13,12 +13,14 @@ rust-version.workspace = true
 
 [dependencies]
 cfg-if = "1.0.0"
-libc.workspace = true
-jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true }
+jemalloc-ctl = { version = "0.5.4", package = "tikv-jemalloc-ctl", optional = true }
 
 [target.'cfg(all(target_os = "linux", not(target_env = "ohos")))'.dependencies]
 perf-event = "=0.4.7"
 
+[target.'cfg(all(target_os = "linux", target_env = "gnu"))'.dependencies]
+libc.workspace = true
+
 [target.'cfg(windows)'.dependencies]
 windows-sys = { version = "0.59", features = [
     "Win32_System_Threading",
diff --git a/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs b/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs
index f5b8eca060f96..1462259d627b3 100644
--- a/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs
+++ b/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs
@@ -78,7 +78,8 @@ fn memusage_linux() -> MemoryUsage {
         let alloc = unsafe { libc::mallinfo() }.uordblks as isize;
         MemoryUsage { allocated: Bytes(alloc) }
     } else {
-        let mallinfo2: fn() -> libc::mallinfo2 = unsafe { std::mem::transmute(mallinfo2) };
+        let mallinfo2: extern "C" fn() -> libc::mallinfo2 =
+            unsafe { std::mem::transmute(mallinfo2) };
         let alloc = mallinfo2().uordblks as isize;
         MemoryUsage { allocated: Bytes(alloc) }
     }
diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
index 83def0e6b2a91..64ea75922fbe4 100644
--- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
@@ -34,7 +34,7 @@ stdx.workspace = true
 toolchain.workspace = true
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 
 [lints]
 workspace = true
diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs
index b0939229f93e2..e0c38ccf3331c 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs
@@ -9,7 +9,7 @@
 use std::{cell::RefCell, io, mem, process::Command};
 
 use base_db::Env;
-use cargo_metadata::{camino::Utf8Path, Message};
+use cargo_metadata::{Message, camino::Utf8Path};
 use cfg::CfgAtom;
 use itertools::Itertools;
 use la_arena::ArenaMap;
@@ -19,8 +19,8 @@ use serde::Deserialize as _;
 use toolchain::Tool;
 
 use crate::{
-    utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath,
-    Package, Sysroot, TargetKind,
+    CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, Package, Sysroot,
+    TargetKind, utf8_stdout,
 };
 
 /// Output of the build script and proc-macro building steps for a workspace.
@@ -163,7 +163,7 @@ impl WorkspaceBuildScripts {
     pub(crate) fn rustc_crates(
         rustc: &CargoWorkspace,
         current_dir: &AbsPath,
-        extra_env: &FxHashMap<String, String>,
+        extra_env: &FxHashMap<String, Option<String>>,
         sysroot: &Sysroot,
     ) -> Self {
         let mut bs = WorkspaceBuildScripts::default();
@@ -172,16 +172,14 @@ impl WorkspaceBuildScripts {
         }
         let res = (|| {
             let target_libdir = (|| {
-                let mut cargo_config = sysroot.tool(Tool::Cargo, current_dir);
-                cargo_config.envs(extra_env);
+                let mut cargo_config = sysroot.tool(Tool::Cargo, current_dir, extra_env);
                 cargo_config
                     .args(["rustc", "-Z", "unstable-options", "--print", "target-libdir"])
                     .env("RUSTC_BOOTSTRAP", "1");
                 if let Ok(it) = utf8_stdout(&mut cargo_config) {
                     return Ok(it);
                 }
-                let mut cmd = sysroot.tool(Tool::Rustc, current_dir);
-                cmd.envs(extra_env);
+                let mut cmd = sysroot.tool(Tool::Rustc, current_dir, extra_env);
                 cmd.args(["--print", "target-libdir"]);
                 utf8_stdout(&mut cmd)
             })()?;
@@ -343,7 +341,8 @@ impl WorkspaceBuildScripts {
                     Message::CompilerArtifact(message) => {
                         with_output_for(&message.package_id.repr, &mut |name, data| {
                             progress(format!("building proc-macros: {name}"));
-                            if message.target.kind.iter().any(|k| k == "proc-macro") {
+                            if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
+                            {
                                 // Skip rmeta file
                                 if let Some(filename) =
                                     message.filenames.iter().find(|file| is_dylib(file))
@@ -389,12 +388,12 @@ impl WorkspaceBuildScripts {
     ) -> io::Result<Command> {
         let mut cmd = match config.run_build_script_command.as_deref() {
             Some([program, args @ ..]) => {
-                let mut cmd = toolchain::command(program, current_dir);
+                let mut cmd = toolchain::command(program, current_dir, &config.extra_env);
                 cmd.args(args);
                 cmd
             }
             _ => {
-                let mut cmd = sysroot.tool(Tool::Cargo, current_dir);
+                let mut cmd = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
 
                 cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
                 cmd.args(&config.extra_args);
@@ -447,7 +446,6 @@ impl WorkspaceBuildScripts {
             }
         };
 
-        cmd.envs(&config.extra_env);
         if config.wrap_rustc_in_build_scripts {
             // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
             // that to compile only proc macros and build scripts during the initial
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
index 014028a0b63e2..6e730b1aea266 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -35,6 +35,8 @@ pub struct CargoWorkspace {
     target_directory: AbsPathBuf,
     manifest_path: ManifestPath,
     is_virtual_workspace: bool,
+    /// Whether this workspace represents the sysroot workspace.
+    is_sysroot: bool,
     /// Environment variables set in the `.cargo/config` file.
     config_env: Env,
 }
@@ -102,11 +104,14 @@ pub struct CargoConfig {
     /// Extra args to pass to the cargo command.
     pub extra_args: Vec<String>,
     /// Extra env vars to set when invoking the cargo command
-    pub extra_env: FxHashMap<String, String>,
+    pub extra_env: FxHashMap<String, Option<String>>,
     pub invocation_strategy: InvocationStrategy,
     /// Optional path to use instead of `target` when building
     pub target_dir: Option<Utf8PathBuf>,
+    /// Gate `#[test]` behind `#[cfg(test)]`
     pub set_test: bool,
+    /// Load the project without any dependencies
+    pub no_deps: bool,
 }
 
 pub type Package = Idx<PackageData>;
@@ -224,21 +229,26 @@ pub enum TargetKind {
     Example,
     Test,
     Bench,
+    /// Cargo calls this kind `custom-build`
     BuildScript,
     Other,
 }
 
 impl TargetKind {
-    fn new(kinds: &[String]) -> TargetKind {
+    fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
         for kind in kinds {
-            return match kind.as_str() {
-                "bin" => TargetKind::Bin,
-                "test" => TargetKind::Test,
-                "bench" => TargetKind::Bench,
-                "example" => TargetKind::Example,
-                "custom-build" => TargetKind::BuildScript,
-                "proc-macro" => TargetKind::Lib { is_proc_macro: true },
-                _ if kind.contains("lib") => TargetKind::Lib { is_proc_macro: false },
+            return match kind {
+                cargo_metadata::TargetKind::Bin => TargetKind::Bin,
+                cargo_metadata::TargetKind::Test => TargetKind::Test,
+                cargo_metadata::TargetKind::Bench => TargetKind::Bench,
+                cargo_metadata::TargetKind::Example => TargetKind::Example,
+                cargo_metadata::TargetKind::CustomBuild => TargetKind::BuildScript,
+                cargo_metadata::TargetKind::ProcMacro => TargetKind::Lib { is_proc_macro: true },
+                cargo_metadata::TargetKind::Lib
+                | cargo_metadata::TargetKind::DyLib
+                | cargo_metadata::TargetKind::CDyLib
+                | cargo_metadata::TargetKind::StaticLib
+                | cargo_metadata::TargetKind::RLib => TargetKind::Lib { is_proc_macro: false },
                 _ => continue,
             };
         }
@@ -252,6 +262,22 @@ impl TargetKind {
     pub fn is_proc_macro(self) -> bool {
         matches!(self, TargetKind::Lib { is_proc_macro: true })
     }
+
+    /// If this is a valid cargo target, returns the name cargo uses in command line arguments
+    /// and output, otherwise None.
+    /// https://docs.rs/cargo_metadata/latest/cargo_metadata/enum.TargetKind.html
+    pub fn as_cargo_target(self) -> Option<&'static str> {
+        match self {
+            TargetKind::Bin => Some("bin"),
+            TargetKind::Lib { is_proc_macro: true } => Some("proc-macro"),
+            TargetKind::Lib { is_proc_macro: false } => Some("lib"),
+            TargetKind::Example => Some("example"),
+            TargetKind::Test => Some("test"),
+            TargetKind::Bench => Some("bench"),
+            TargetKind::BuildScript => Some("custom-build"),
+            TargetKind::Other => None,
+        }
+    }
 }
 
 #[derive(Default, Clone, Debug, PartialEq, Eq)]
@@ -263,7 +289,7 @@ pub struct CargoMetadataConfig {
     /// Extra args to pass to the cargo command.
     pub extra_args: Vec<String>,
     /// Extra env vars to set when invoking the cargo command
-    pub extra_env: FxHashMap<String, String>,
+    pub extra_env: FxHashMap<String, Option<String>>,
 }
 
 // Deserialize helper for the cargo metadata
@@ -285,6 +311,7 @@ impl CargoWorkspace {
         current_dir: &AbsPath,
         config: &CargoMetadataConfig,
         sysroot: &Sysroot,
+        no_deps: bool,
         locked: bool,
         progress: &dyn Fn(String),
     ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
@@ -293,8 +320,8 @@ impl CargoWorkspace {
             current_dir,
             config,
             sysroot,
+            no_deps,
             locked,
-            false,
             progress,
         );
         if let Ok((_, Some(ref e))) = res {
@@ -312,15 +339,14 @@ impl CargoWorkspace {
         current_dir: &AbsPath,
         config: &CargoMetadataConfig,
         sysroot: &Sysroot,
-        locked: bool,
         no_deps: bool,
+        locked: bool,
         progress: &dyn Fn(String),
     ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
-        let cargo = sysroot.tool(Tool::Cargo, current_dir);
+        let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
         let mut meta = MetadataCommand::new();
         meta.cargo_path(cargo.get_program());
         cargo.get_envs().for_each(|(var, val)| _ = meta.env(var, val.unwrap_or_default()));
-        config.extra_env.iter().for_each(|(var, val)| _ = meta.env(var, val));
         meta.manifest_path(cargo_toml.to_path_buf());
         match &config.features {
             CargoFeatures::All => {
@@ -418,6 +444,7 @@ impl CargoWorkspace {
         mut meta: cargo_metadata::Metadata,
         ws_manifest_path: ManifestPath,
         cargo_config_env: Env,
+        is_sysroot: bool,
     ) -> CargoWorkspace {
         let mut pkg_by_id = FxHashMap::default();
         let mut packages = Arena::default();
@@ -456,7 +483,7 @@ impl CargoWorkspace {
                 cargo_metadata::Edition::E2015 => Edition::Edition2015,
                 cargo_metadata::Edition::E2018 => Edition::Edition2018,
                 cargo_metadata::Edition::E2021 => Edition::Edition2021,
-                cargo_metadata::Edition::_E2024 => Edition::Edition2024,
+                cargo_metadata::Edition::E2024 => Edition::Edition2024,
                 _ => {
                     tracing::error!("Unsupported edition `{:?}`", edition);
                     Edition::CURRENT
@@ -539,6 +566,7 @@ impl CargoWorkspace {
             target_directory,
             manifest_path: ws_manifest_path,
             is_virtual_workspace,
+            is_sysroot,
             config_env: cargo_config_env,
         }
     }
@@ -596,7 +624,7 @@ impl CargoWorkspace {
         // this pkg is inside this cargo workspace, fallback to workspace root
         if found {
             return Some(vec![
-                ManifestPath::try_from(self.workspace_root().join("Cargo.toml")).ok()?
+                ManifestPath::try_from(self.workspace_root().join("Cargo.toml")).ok()?,
             ]);
         }
 
@@ -632,4 +660,8 @@ impl CargoWorkspace {
     pub fn env(&self) -> &Env {
         &self.config_env
     }
+
+    pub fn is_sysroot(&self) -> bool {
+        self.is_sysroot
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/project-model/src/env.rs b/src/tools/rust-analyzer/crates/project-model/src/env.rs
index 37fffba295590..f2e5df171ae32 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/env.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/env.rs
@@ -4,7 +4,7 @@ use paths::Utf8Path;
 use rustc_hash::FxHashMap;
 use toolchain::Tool;
 
-use crate::{utf8_stdout, ManifestPath, PackageData, Sysroot, TargetKind};
+use crate::{ManifestPath, PackageData, Sysroot, TargetKind, utf8_stdout};
 
 /// Recreates the compile-time environment variables that Cargo sets.
 ///
@@ -62,11 +62,10 @@ pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: Targe
 
 pub(crate) fn cargo_config_env(
     manifest: &ManifestPath,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
     sysroot: &Sysroot,
 ) -> Env {
-    let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent());
-    cargo_config.envs(extra_env);
+    let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
     cargo_config
         .args(["-Z", "unstable-options", "config", "get", "env"])
         .env("RUSTC_BOOTSTRAP", "1");
diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
index 21a993c5a5ed1..436af64cf1326 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
@@ -48,12 +48,12 @@ mod tests;
 
 use std::{
     fmt,
-    fs::{self, read_dir, ReadDir},
+    fs::{self, ReadDir, read_dir},
     io,
     process::Command,
 };
 
-use anyhow::{bail, format_err, Context};
+use anyhow::{Context, bail, format_err};
 use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
 use rustc_hash::FxHashSet;
 
@@ -102,7 +102,9 @@ impl ProjectManifest {
         if path.extension().unwrap_or_default() == "rs" {
             return Ok(ProjectManifest::CargoScript(path));
         }
-        bail!("project root must point to a Cargo.toml, rust-project.json or <script>.rs file: {path}");
+        bail!(
+            "project root must point to a Cargo.toml, rust-project.json or <script>.rs file: {path}"
+        );
     }
 
     pub fn discover_single(path: &AbsPath) -> anyhow::Result<ProjectManifest> {
diff --git a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
index 73a4e6e121691..4f43be2f38fdb 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
@@ -21,11 +21,7 @@ impl TryFrom<AbsPathBuf> for ManifestPath {
     type Error = AbsPathBuf;
 
     fn try_from(file: AbsPathBuf) -> Result<Self, Self::Error> {
-        if file.parent().is_none() {
-            Err(file)
-        } else {
-            Ok(ManifestPath { file })
-        }
+        if file.parent().is_none() { Err(file) } else { Ok(ManifestPath { file }) }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
index b2df8e4703ab3..d3e1f11d78881 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
@@ -53,7 +53,7 @@ use base_db::{CrateDisplayName, CrateName};
 use cfg::CfgAtom;
 use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
 use rustc_hash::{FxHashMap, FxHashSet};
-use serde::{de, Deserialize, Serialize};
+use serde::{Deserialize, Serialize, de};
 use span::Edition;
 
 use crate::{ManifestPath, TargetKind};
@@ -85,8 +85,7 @@ impl ProjectJson {
     ///
     /// * `manifest` - The path to the `rust-project.json`.
     /// * `base` - The path to the workspace root (i.e. the folder containing `rust-project.json`)
-    /// * `data` - The parsed contents of `rust-project.json`, or project json that's passed via
-    ///   configuration.
+    /// * `data` - The parsed contents of `rust-project.json`, or project json that's passed via configuration.
     pub fn new(
         manifest: Option<ManifestPath>,
         base: &AbsPath,
@@ -452,7 +451,7 @@ pub enum TargetKindData {
 }
 /// Identifies a crate by position in the crates array.
 ///
-/// This will differ from `CrateId` when multiple `ProjectJson`
+/// This will differ from `Crate` when multiple `ProjectJson`
 /// workspaces are loaded.
 #[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)]
 #[serde(transparent)]
diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
index 13812e96fe7a5..c7c1b04318677 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
@@ -6,16 +6,16 @@
 
 use std::{env, fs, ops::Not, path::Path, process::Command};
 
-use anyhow::{format_err, Result};
+use anyhow::{Result, format_err};
 use itertools::Itertools;
 use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
 use rustc_hash::FxHashMap;
 use stdx::format_to;
-use toolchain::{probe_for_binary, Tool};
+use toolchain::{Tool, probe_for_binary};
 
 use crate::{
-    cargo_workspace::CargoMetadataConfig, utf8_stdout, CargoWorkspace, ManifestPath, ProjectJson,
-    RustSourceWorkspaceConfig,
+    CargoWorkspace, ManifestPath, ProjectJson, RustSourceWorkspaceConfig,
+    cargo_workspace::CargoMetadataConfig, utf8_stdout,
 };
 
 #[derive(Debug, Clone, PartialEq, Eq)]
@@ -86,7 +86,7 @@ impl Sysroot {
 
 impl Sysroot {
     /// Attempts to discover the toolchain's sysroot from the given `dir`.
-    pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Sysroot {
+    pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, Option<String>>) -> Sysroot {
         let sysroot_dir = discover_sysroot_dir(dir, extra_env);
         let rust_lib_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| {
             discover_rust_lib_src_dir_or_add_component(sysroot_dir, dir, extra_env)
@@ -96,7 +96,7 @@ impl Sysroot {
 
     pub fn discover_with_src_override(
         current_dir: &AbsPath,
-        extra_env: &FxHashMap<String, String>,
+        extra_env: &FxHashMap<String, Option<String>>,
         rust_lib_src_dir: AbsPathBuf,
     ) -> Sysroot {
         let sysroot_dir = discover_sysroot_dir(current_dir, extra_env);
@@ -118,7 +118,12 @@ impl Sysroot {
     }
 
     /// Returns a command to run a tool preferring the cargo proxies if the sysroot exists.
-    pub fn tool(&self, tool: Tool, current_dir: impl AsRef<Path>) -> Command {
+    pub fn tool(
+        &self,
+        tool: Tool,
+        current_dir: impl AsRef<Path>,
+        envs: &FxHashMap<String, Option<String>>,
+    ) -> Command {
         match self.root() {
             Some(root) => {
                 // special case rustc, we can look that up directly in the sysroot's bin folder
@@ -127,15 +132,15 @@ impl Sysroot {
                     if let Some(path) =
                         probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into())
                     {
-                        return toolchain::command(path, current_dir);
+                        return toolchain::command(path, current_dir, envs);
                     }
                 }
 
-                let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir);
+                let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir, envs);
                 cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(root));
                 cmd
             }
-            _ => toolchain::command(tool.path(), current_dir),
+            _ => toolchain::command(tool.path(), current_dir, envs),
         }
     }
 
@@ -292,7 +297,7 @@ impl Sysroot {
         // the sysroot uses `public-dependency`, so we make cargo think it's a nightly
         cargo_config.extra_env.insert(
             "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(),
-            "nightly".to_owned(),
+            Some("nightly".to_owned()),
         );
 
         let (mut res, _) = match CargoWorkspace::fetch_metadata(
@@ -300,6 +305,7 @@ impl Sysroot {
             rust_lib_src_dir,
             &cargo_config,
             self,
+            false,
             // Make sure we never attempt to write to the sysroot
             true,
             &|_| (),
@@ -360,17 +366,16 @@ impl Sysroot {
             res.packages.remove(idx);
         });
 
-        let cargo_workspace = CargoWorkspace::new(res, library_manifest, Default::default());
+        let cargo_workspace = CargoWorkspace::new(res, library_manifest, Default::default(), true);
         Some(RustLibSrcWorkspace::Workspace(cargo_workspace))
     }
 }
 
 fn discover_sysroot_dir(
     current_dir: &AbsPath,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
 ) -> Result<AbsPathBuf> {
-    let mut rustc = toolchain::command(Tool::Rustc.path(), current_dir);
-    rustc.envs(extra_env);
+    let mut rustc = toolchain::command(Tool::Rustc.path(), current_dir, extra_env);
     rustc.current_dir(current_dir).args(["--print", "sysroot"]);
     tracing::debug!("Discovering sysroot by {:?}", rustc);
     let stdout = utf8_stdout(&mut rustc)?;
@@ -397,12 +402,11 @@ fn discover_rust_lib_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
 fn discover_rust_lib_src_dir_or_add_component(
     sysroot_path: &AbsPathBuf,
     current_dir: &AbsPath,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
 ) -> Result<AbsPathBuf> {
     discover_rust_lib_src_dir(sysroot_path)
         .or_else(|| {
-            let mut rustup = toolchain::command(Tool::Rustup.prefer_proxy(), current_dir);
-            rustup.envs(extra_env);
+            let mut rustup = toolchain::command(Tool::Rustup.prefer_proxy(), current_dir, extra_env);
             rustup.args(["component", "add", "rust-src"]);
             tracing::info!("adding rust-src component by {:?}", rustup);
             utf8_stdout(&mut rustup).ok()?;
@@ -424,21 +428,13 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
     let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml");
     let rustc_src = ManifestPath::try_from(rustc_src).ok()?;
     tracing::debug!("checking for rustc source code: {rustc_src}");
-    if fs::metadata(&rustc_src).is_ok() {
-        Some(rustc_src)
-    } else {
-        None
-    }
+    if fs::metadata(&rustc_src).is_ok() { Some(rustc_src) } else { None }
 }
 
 fn get_rust_lib_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
     let rust_lib_src = sysroot_path.join("lib/rustlib/src/rust/library");
     tracing::debug!("checking sysroot library: {rust_lib_src}");
-    if fs::metadata(&rust_lib_src).is_ok() {
-        Some(rust_lib_src)
-    } else {
-        None
-    }
+    if fs::metadata(&rust_lib_src).is_ok() { Some(rust_lib_src) } else { None }
 }
 
 // FIXME: Remove this, that will bump our project MSRV to 1.82
diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
index 837406227323e..c69891b7463de 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
@@ -1,7 +1,7 @@
-use base_db::{CrateGraph, ProcMacroPaths};
+use base_db::{CrateGraphBuilder, ProcMacroPaths};
 use cargo_metadata::Metadata;
 use cfg::{CfgAtom, CfgDiff};
-use expect_test::{expect_file, ExpectFile};
+use expect_test::{ExpectFile, expect_file};
 use intern::sym;
 use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf};
 use rustc_hash::FxHashMap;
@@ -10,12 +10,12 @@ use span::FileId;
 use triomphe::Arc;
 
 use crate::{
-    sysroot::RustLibSrcWorkspace, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides,
-    ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, RustSourceWorkspaceConfig,
-    Sysroot, WorkspaceBuildScripts,
+    CargoWorkspace, CfgOverrides, ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace,
+    RustSourceWorkspaceConfig, Sysroot, WorkspaceBuildScripts, sysroot::RustLibSrcWorkspace,
+    workspace::ProjectWorkspaceKind,
 };
 
-fn load_cargo(file: &str) -> (CrateGraph, ProcMacroPaths) {
+fn load_cargo(file: &str) -> (CrateGraphBuilder, ProcMacroPaths) {
     let project_workspace = load_workspace_from_metadata(file);
     to_crate_graph(project_workspace, &mut Default::default())
 }
@@ -23,7 +23,7 @@ fn load_cargo(file: &str) -> (CrateGraph, ProcMacroPaths) {
 fn load_cargo_with_overrides(
     file: &str,
     cfg_overrides: CfgOverrides,
-) -> (CrateGraph, ProcMacroPaths) {
+) -> (CrateGraphBuilder, ProcMacroPaths) {
     let project_workspace =
         ProjectWorkspace { cfg_overrides, ..load_workspace_from_metadata(file) };
     to_crate_graph(project_workspace, &mut Default::default())
@@ -33,7 +33,7 @@ fn load_workspace_from_metadata(file: &str) -> ProjectWorkspace {
     let meta: Metadata = get_test_json_file(file);
     let manifest_path =
         ManifestPath::try_from(AbsPathBuf::try_from(meta.workspace_root.clone()).unwrap()).unwrap();
-    let cargo_workspace = CargoWorkspace::new(meta, manifest_path, Default::default());
+    let cargo_workspace = CargoWorkspace::new(meta, manifest_path, Default::default(), false);
     ProjectWorkspace {
         kind: ProjectWorkspaceKind::Cargo {
             cargo: cargo_workspace,
@@ -51,10 +51,10 @@ fn load_workspace_from_metadata(file: &str) -> ProjectWorkspace {
     }
 }
 
-fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) {
+fn load_rust_project(file: &str) -> (CrateGraphBuilder, ProcMacroPaths) {
     let data = get_test_json_file(file);
     let project = rooted_project_json(data);
-    let sysroot = get_fake_sysroot();
+    let sysroot = Sysroot::empty();
     let project_workspace = ProjectWorkspace {
         kind: ProjectWorkspaceKind::Json(project),
         sysroot,
@@ -101,36 +101,11 @@ fn replace_root(s: &mut String, direction: bool) {
     }
 }
 
-fn replace_fake_sys_root(s: &mut String) {
-    let fake_sysroot_path = get_test_path("fake-sysroot");
-    let fake_sysroot_path = if cfg!(windows) {
-        let normalized_path = fake_sysroot_path.as_str().replace('\\', r#"\\"#);
-        format!(r#"{normalized_path}\\"#)
-    } else {
-        format!("{}/", fake_sysroot_path.as_str())
-    };
-    *s = s.replace(&fake_sysroot_path, "$FAKESYSROOT$")
-}
-
 fn get_test_path(file: &str) -> Utf8PathBuf {
     let base = Utf8PathBuf::from(env!("CARGO_MANIFEST_DIR"));
     base.join("test_data").join(file)
 }
 
-fn get_fake_sysroot() -> Sysroot {
-    let sysroot_path = get_test_path("fake-sysroot");
-    // there's no `libexec/` directory with a `proc-macro-srv` binary in that
-    // fake sysroot, so we give them both the same path:
-    let sysroot_dir = AbsPathBuf::assert(sysroot_path);
-    let sysroot_src_dir = sysroot_dir.clone();
-    let mut sysroot = Sysroot::new(Some(sysroot_dir), Some(sysroot_src_dir));
-    let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
-    if let Some(loaded_sysroot) = loaded_sysroot {
-        sysroot.set_workspace(loaded_sysroot);
-    }
-    sysroot
-}
-
 fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
     let mut root = "$ROOT$".to_owned();
     replace_root(&mut root, true);
@@ -142,7 +117,7 @@ fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
 fn to_crate_graph(
     project_workspace: ProjectWorkspace,
     file_map: &mut FxHashMap<AbsPathBuf, FileId>,
-) -> (CrateGraph, ProcMacroPaths) {
+) -> (CrateGraphBuilder, ProcMacroPaths) {
     project_workspace.to_crate_graph(
         &mut {
             |path| {
@@ -154,19 +129,18 @@ fn to_crate_graph(
     )
 }
 
-fn check_crate_graph(crate_graph: CrateGraph, expect: ExpectFile) {
+fn check_crate_graph(crate_graph: CrateGraphBuilder, expect: ExpectFile) {
     let mut crate_graph = format!("{crate_graph:#?}");
 
     replace_root(&mut crate_graph, false);
     replace_cargo(&mut crate_graph);
-    replace_fake_sys_root(&mut crate_graph);
     expect.assert_eq(&crate_graph);
 }
 
 #[test]
 fn cargo_hello_world_project_model_with_wildcard_overrides() {
     let cfg_overrides = CfgOverrides {
-        global: CfgDiff::new(Vec::new(), vec![CfgAtom::Flag(sym::test.clone())]),
+        global: CfgDiff::new(Vec::new(), vec![CfgAtom::Flag(sym::test)]),
         selective: Default::default(),
     };
     let (crate_graph, _proc_macros) =
@@ -185,7 +159,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
         global: Default::default(),
         selective: std::iter::once((
             "libc".to_owned(),
-            CfgDiff::new(Vec::new(), vec![CfgAtom::Flag(sym::test.clone())]),
+            CfgDiff::new(Vec::new(), vec![CfgAtom::Flag(sym::test)]),
         ))
         .collect(),
     };
@@ -256,7 +230,7 @@ fn smoke_test_real_sysroot_cargo() {
     let meta: Metadata = get_test_json_file("hello-world-metadata.json");
     let manifest_path =
         ManifestPath::try_from(AbsPathBuf::try_from(meta.workspace_root.clone()).unwrap()).unwrap();
-    let cargo_workspace = CargoWorkspace::new(meta, manifest_path, Default::default());
+    let cargo_workspace = CargoWorkspace::new(meta, manifest_path, Default::default(), false);
     let mut sysroot = Sysroot::discover(
         AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))),
         &Default::default(),
diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs
index e472da0c89b0d..a77f76797fca2 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs
@@ -11,7 +11,7 @@ use crate::{toolchain_info::QueryConfig, utf8_stdout};
 pub fn get(
     config: QueryConfig<'_>,
     target: Option<&str>,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
 ) -> Vec<CfgAtom> {
     let _p = tracing::info_span!("rustc_cfg::get").entered();
 
@@ -58,14 +58,13 @@ pub fn get(
 
 fn rustc_print_cfg(
     target: Option<&str>,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
     config: QueryConfig<'_>,
 ) -> anyhow::Result<String> {
     const RUSTC_ARGS: [&str; 2] = ["--print", "cfg"];
     let (sysroot, current_dir) = match config {
         QueryConfig::Cargo(sysroot, cargo_toml) => {
-            let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent());
-            cmd.envs(extra_env);
+            let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
             cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS);
             if let Some(target) = target {
                 cmd.args(["--target", target]);
@@ -86,8 +85,7 @@ fn rustc_print_cfg(
         QueryConfig::Rustc(sysroot, current_dir) => (sysroot, current_dir),
     };
 
-    let mut cmd = sysroot.tool(Tool::Rustc, current_dir);
-    cmd.envs(extra_env);
+    let mut cmd = sysroot.tool(Tool::Rustc, current_dir, extra_env);
     cmd.args(RUSTC_ARGS);
     cmd.arg("-O");
     if let Some(target) = target {
diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs
index 94645a91f65ba..a4d0ec69537ae 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs
@@ -4,13 +4,13 @@ use anyhow::Context;
 use rustc_hash::FxHashMap;
 use toolchain::Tool;
 
-use crate::{toolchain_info::QueryConfig, utf8_stdout, Sysroot};
+use crate::{Sysroot, toolchain_info::QueryConfig, utf8_stdout};
 
 /// Uses `rustc --print target-spec-json`.
 pub fn get(
     config: QueryConfig<'_>,
     target: Option<&str>,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
 ) -> anyhow::Result<String> {
     const RUSTC_ARGS: [&str; 2] = ["--print", "target-spec-json"];
     let process = |output: String| {
@@ -21,8 +21,7 @@ pub fn get(
     };
     let (sysroot, current_dir) = match config {
         QueryConfig::Cargo(sysroot, cargo_toml) => {
-            let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent());
-            cmd.envs(extra_env);
+            let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
             cmd.env("RUSTC_BOOTSTRAP", "1");
             cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS).args([
                 "--",
@@ -43,11 +42,8 @@ pub fn get(
         QueryConfig::Rustc(sysroot, current_dir) => (sysroot, current_dir),
     };
 
-    let mut cmd = Sysroot::tool(sysroot, Tool::Rustc, current_dir);
-    cmd.envs(extra_env)
-        .env("RUSTC_BOOTSTRAP", "1")
-        .args(["-Z", "unstable-options"])
-        .args(RUSTC_ARGS);
+    let mut cmd = Sysroot::tool(sysroot, Tool::Rustc, current_dir, extra_env);
+    cmd.env("RUSTC_BOOTSTRAP", "1").args(["-Z", "unstable-options"]).args(RUSTC_ARGS);
     if let Some(target) = target {
         cmd.args(["--target", target]);
     }
diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_tuple.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_tuple.rs
index 0476de58f2309..f6ab853219784 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_tuple.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_tuple.rs
@@ -5,14 +5,14 @@ use anyhow::Context;
 use rustc_hash::FxHashMap;
 use toolchain::Tool;
 
-use crate::{toolchain_info::QueryConfig, utf8_stdout, ManifestPath, Sysroot};
+use crate::{ManifestPath, Sysroot, toolchain_info::QueryConfig, utf8_stdout};
 
 /// For cargo, runs `cargo -Zunstable-options config get build.target` to get the configured project target(s).
 /// For rustc, runs `rustc --print -vV` to get the host target.
 pub fn get(
     config: QueryConfig<'_>,
     target: Option<&str>,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
 ) -> anyhow::Result<Vec<String>> {
     let _p = tracing::info_span!("target_tuple::get").entered();
     if let Some(target) = target {
@@ -32,12 +32,11 @@ pub fn get(
 }
 
 fn rustc_discover_host_tuple(
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
     sysroot: &Sysroot,
     current_dir: &Path,
 ) -> anyhow::Result<String> {
-    let mut cmd = sysroot.tool(Tool::Rustc, current_dir);
-    cmd.envs(extra_env);
+    let mut cmd = sysroot.tool(Tool::Rustc, current_dir, extra_env);
     cmd.arg("-vV");
     let stdout = utf8_stdout(&mut cmd)
         .with_context(|| format!("unable to discover host platform via `{cmd:?}`"))?;
@@ -53,11 +52,10 @@ fn rustc_discover_host_tuple(
 
 fn cargo_config_build_target(
     cargo_toml: &ManifestPath,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
     sysroot: &Sysroot,
 ) -> Option<Vec<String>> {
-    let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent());
-    cmd.envs(extra_env);
+    let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
     cmd.current_dir(cargo_toml.parent()).env("RUSTC_BOOTSTRAP", "1");
     cmd.args(["-Z", "unstable-options", "config", "get", "build.target"]);
     // if successful we receive `build.target = "target-tuple"`
diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/version.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/version.rs
index e795fdf1d64fa..91ba859859149 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/version.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/version.rs
@@ -9,17 +9,16 @@ use crate::{toolchain_info::QueryConfig, utf8_stdout};
 
 pub(crate) fn get(
     config: QueryConfig<'_>,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
 ) -> Result<Option<Version>, anyhow::Error> {
     let (mut cmd, prefix) = match config {
         QueryConfig::Cargo(sysroot, cargo_toml) => {
-            (sysroot.tool(Tool::Cargo, cargo_toml.parent()), "cargo ")
+            (sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env), "cargo ")
         }
         QueryConfig::Rustc(sysroot, current_dir) => {
-            (sysroot.tool(Tool::Rustc, current_dir), "rustc ")
+            (sysroot.tool(Tool::Rustc, current_dir, extra_env), "rustc ")
         }
     };
-    cmd.envs(extra_env);
     cmd.arg("--version");
     let out = utf8_stdout(&mut cmd).with_context(|| format!("Failed to query rust toolchain version via `{cmd:?}`, is your toolchain setup correctly?"))?;
 
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
index 62c13c7d9ec8d..c6e0cf36aff2a 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -6,11 +6,12 @@ use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread};
 
 use anyhow::Context;
 use base_db::{
-    CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env,
-    LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult,
+    CrateBuilderId, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin,
+    CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroPaths,
+    TargetLayoutLoadResult,
 };
 use cfg::{CfgAtom, CfgDiff, CfgOptions};
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
 use paths::{AbsPath, AbsPathBuf};
 use rustc_hash::{FxHashMap, FxHashSet};
 use semver::Version;
@@ -19,15 +20,15 @@ use tracing::instrument;
 use triomphe::Arc;
 
 use crate::{
+    CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package,
+    ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
+    WorkspaceBuildScripts,
     build_dependencies::BuildScriptOutput,
     cargo_workspace::{CargoMetadataConfig, DepKind, PackageData, RustLibSource},
     env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
     project_json::{Crate, CrateArrayIdx},
     sysroot::RustLibSrcWorkspace,
-    toolchain_info::{rustc_cfg, target_data_layout, target_tuple, version, QueryConfig},
-    CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package,
-    ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
-    WorkspaceBuildScripts,
+    toolchain_info::{QueryConfig, rustc_cfg, target_data_layout, target_tuple, version},
 };
 use tracing::{debug, error, info};
 
@@ -219,6 +220,7 @@ impl ProjectWorkspace {
             sysroot,
             sysroot_src,
             target,
+            no_deps,
             ..
         } = config;
         let mut sysroot = match (sysroot, sysroot_src) {
@@ -300,12 +302,17 @@ impl ProjectWorkspace {
                             extra_env: extra_env.clone(),
                         },
                         &sysroot,
+                        *no_deps,
                         false,
                         &|_| (),
                     ) {
                         Ok((meta, _error)) => {
-                            let workspace =
-                                CargoWorkspace::new(meta, cargo_toml.clone(), Env::default());
+                            let workspace = CargoWorkspace::new(
+                                meta,
+                                cargo_toml.clone(),
+                                Env::default(),
+                                false,
+                            );
                             let build_scripts = WorkspaceBuildScripts::rustc_crates(
                                 &workspace,
                                 workspace_dir,
@@ -320,8 +327,8 @@ impl ProjectWorkspace {
                                 "Failed to read Cargo metadata from rustc source at {rustc_dir}",
                             );
                             Err(Some(format!(
-                            "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
-                        )))
+                                "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
+                            )))
                         }
                     }
                 })
@@ -338,6 +345,7 @@ impl ProjectWorkspace {
                         extra_env: extra_env.clone(),
                     },
                     &sysroot,
+                    *no_deps,
                     false,
                     &|_| (),
                 )
@@ -378,7 +386,7 @@ impl ProjectWorkspace {
                 "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}",
             )
         })?;
-        let cargo = CargoWorkspace::new(meta, cargo_toml.clone(), cargo_config_extra_env);
+        let cargo = CargoWorkspace::new(meta, cargo_toml.clone(), cargo_config_extra_env, false);
         if let Some(loaded_sysroot) = loaded_sysroot {
             sysroot.set_workspace(loaded_sysroot);
         }
@@ -506,6 +514,7 @@ impl ProjectWorkspace {
                 extra_env: config.extra_env.clone(),
             },
             &sysroot,
+            config.no_deps,
             false,
             &|_| (),
         )
@@ -514,7 +523,7 @@ impl ProjectWorkspace {
             let cargo_config_extra_env =
                 cargo_config_env(detached_file, &config.extra_env, &sysroot);
             (
-                CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env),
+                CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false),
                 WorkspaceBuildScripts::default(),
                 error.map(Arc::new),
             )
@@ -847,11 +856,15 @@ impl ProjectWorkspace {
     pub fn to_crate_graph(
         &self,
         load: FileLoader<'_>,
-        extra_env: &FxHashMap<String, String>,
-    ) -> (CrateGraph, ProcMacroPaths) {
+        extra_env: &FxHashMap<String, Option<String>>,
+    ) -> (CrateGraphBuilder, ProcMacroPaths) {
         let _p = tracing::info_span!("ProjectWorkspace::to_crate_graph").entered();
 
         let Self { kind, sysroot, cfg_overrides, rustc_cfg, .. } = self;
+        let crate_ws_data = Arc::new(CrateWorkspaceData {
+            toolchain: self.toolchain.clone(),
+            data_layout: self.target_layout.clone(),
+        });
         let (crate_graph, proc_macros) = match kind {
             ProjectWorkspaceKind::Json(project) => project_json_to_crate_graph(
                 rustc_cfg.clone(),
@@ -861,6 +874,8 @@ impl ProjectWorkspace {
                 extra_env,
                 cfg_overrides,
                 self.set_test,
+                false,
+                crate_ws_data,
             ),
             ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _ } => {
                 cargo_to_crate_graph(
@@ -872,6 +887,7 @@ impl ProjectWorkspace {
                     cfg_overrides,
                     build_scripts,
                     self.set_test,
+                    crate_ws_data,
                 )
             }
             ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => {
@@ -885,6 +901,7 @@ impl ProjectWorkspace {
                         cfg_overrides,
                         build_scripts,
                         self.set_test,
+                        crate_ws_data,
                     )
                 } else {
                     detached_file_to_crate_graph(
@@ -894,6 +911,7 @@ impl ProjectWorkspace {
                         sysroot,
                         cfg_overrides,
                         self.set_test,
+                        crate_ws_data,
                     )
                 }
             }
@@ -956,18 +974,27 @@ fn project_json_to_crate_graph(
     load: FileLoader<'_>,
     project: &ProjectJson,
     sysroot: &Sysroot,
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
     override_cfg: &CfgOverrides,
     set_test: bool,
-) -> (CrateGraph, ProcMacroPaths) {
-    let mut res = (CrateGraph::default(), ProcMacroPaths::default());
+    is_sysroot: bool,
+    crate_ws_data: Arc<CrateWorkspaceData>,
+) -> (CrateGraphBuilder, ProcMacroPaths) {
+    let mut res = (CrateGraphBuilder::default(), ProcMacroPaths::default());
     let (crate_graph, proc_macros) = &mut res;
-    let (public_deps, libproc_macro) =
-        sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load);
+    let (public_deps, libproc_macro) = sysroot_to_crate_graph(
+        crate_graph,
+        sysroot,
+        rustc_cfg.clone(),
+        load,
+        // FIXME: This looks incorrect but I don't think this matters.
+        crate_ws_data.clone(),
+    );
 
     let mut cfg_cache: FxHashMap<&str, Vec<CfgAtom>> = FxHashMap::default();
+    let project_root = Arc::new(project.project_root().to_path_buf());
 
-    let idx_to_crate_id: FxHashMap<CrateArrayIdx, CrateId> = project
+    let idx_to_crate_id: FxHashMap<CrateArrayIdx, _> = project
         .crates()
         .filter_map(|(idx, krate)| Some((idx, krate, load(&krate.root_module)?)))
         .map(
@@ -1007,11 +1034,11 @@ fn project_json_to_crate_graph(
                         target_cfgs.iter().chain(cfg.iter()).cloned().collect();
 
                     if *is_workspace_member {
-                        if set_test {
+                        if set_test && !is_sysroot {
                             // Add test cfg for local crates
-                            cfg_options.insert_atom(sym::test.clone());
+                            cfg_options.insert_atom(sym::test);
                         }
-                        cfg_options.insert_atom(sym::rust_analyzer.clone());
+                        cfg_options.insert_atom(sym::rust_analyzer);
                     }
 
                     override_cfg.apply(
@@ -1029,19 +1056,27 @@ fn project_json_to_crate_graph(
                     *edition,
                     display_name.clone(),
                     version.clone(),
-                    Arc::new(cfg_options),
+                    cfg_options,
                     None,
                     env,
                     if let Some(name) = display_name.clone() {
-                        CrateOrigin::Local {
-                            repo: repository.clone(),
-                            name: Some(name.canonical_name().to_owned()),
+                        if is_sysroot {
+                            CrateOrigin::Lang(LangCrateOrigin::from(name.canonical_name().as_str()))
+                        } else {
+                            CrateOrigin::Local {
+                                repo: repository.clone(),
+                                name: Some(name.canonical_name().to_owned()),
+                            }
                         }
                     } else {
                         CrateOrigin::Local { repo: None, name: None }
                     },
                     *is_proc_macro,
-                    proc_macro_cwd.clone(),
+                    match proc_macro_cwd {
+                        Some(path) => Arc::new(path.clone()),
+                        None => project_root.clone(),
+                    },
+                    crate_ws_data.clone(),
                 );
                 debug!(
                     ?crate_graph_crate_id,
@@ -1092,12 +1127,18 @@ fn cargo_to_crate_graph(
     override_cfg: &CfgOverrides,
     build_scripts: &WorkspaceBuildScripts,
     set_test: bool,
-) -> (CrateGraph, ProcMacroPaths) {
+    crate_ws_data: Arc<CrateWorkspaceData>,
+) -> (CrateGraphBuilder, ProcMacroPaths) {
     let _p = tracing::info_span!("cargo_to_crate_graph").entered();
-    let mut res = (CrateGraph::default(), ProcMacroPaths::default());
+    let mut res = (CrateGraphBuilder::default(), ProcMacroPaths::default());
     let (crate_graph, proc_macros) = &mut res;
-    let (public_deps, libproc_macro) =
-        sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load);
+    let (public_deps, libproc_macro) = sysroot_to_crate_graph(
+        crate_graph,
+        sysroot,
+        rustc_cfg.clone(),
+        load,
+        crate_ws_data.clone(),
+    );
 
     let cfg_options = CfgOptions::from_iter(rustc_cfg);
 
@@ -1106,6 +1147,7 @@ fn cargo_to_crate_graph(
     let mut pkg_crates = FxHashMap::default();
     // Does any crate signal to rust-analyzer that they need the rustc_private crates?
     let mut has_private = false;
+    let workspace_proc_macro_cwd = Arc::new(cargo.workspace_root().to_path_buf());
 
     // Next, create crates for each package, target pair
     for pkg in cargo.packages() {
@@ -1115,11 +1157,11 @@ fn cargo_to_crate_graph(
             let mut cfg_options = cfg_options.clone();
 
             if cargo[pkg].is_local {
-                if set_test {
+                if set_test && !cargo.is_sysroot() {
                     // Add test cfg for local crates
-                    cfg_options.insert_atom(sym::test.clone());
+                    cfg_options.insert_atom(sym::test);
                 }
-                cfg_options.insert_atom(sym::rust_analyzer.clone());
+                cfg_options.insert_atom(sym::rust_analyzer);
             }
 
             override_cfg.apply(&mut cfg_options, &cargo[pkg].name);
@@ -1128,7 +1170,10 @@ fn cargo_to_crate_graph(
 
         let mut lib_tgt = None;
         for &tgt in cargo[pkg].targets.iter() {
-            if !matches!(cargo[tgt].kind, TargetKind::Lib { .. }) && !cargo[pkg].is_member {
+            let pkg_data = &cargo[pkg];
+            if !matches!(cargo[tgt].kind, TargetKind::Lib { .. })
+                && (!pkg_data.is_member || cargo.is_sysroot())
+            {
                 // For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't
                 // add any targets except the library target, since those will not work correctly if
                 // they use dev-dependencies.
@@ -1141,7 +1186,6 @@ fn cargo_to_crate_graph(
             let Some(file_id) = load(root) else { continue };
 
             let build_data = build_scripts.get_output(pkg);
-            let pkg_data = &cargo[pkg];
             let crate_id = add_target_crate_root(
                 crate_graph,
                 proc_macros,
@@ -1153,9 +1197,13 @@ fn cargo_to_crate_graph(
                 name,
                 kind,
                 if pkg_data.is_local {
-                    CrateOrigin::Local {
-                        repo: pkg_data.repository.clone(),
-                        name: Some(Symbol::intern(&pkg_data.name)),
+                    if cargo.is_sysroot() {
+                        CrateOrigin::Lang(LangCrateOrigin::from(&*pkg_data.name))
+                    } else {
+                        CrateOrigin::Local {
+                            repo: pkg_data.repository.clone(),
+                            name: Some(Symbol::intern(&pkg_data.name)),
+                        }
                     }
                 } else {
                     CrateOrigin::Library {
@@ -1163,6 +1211,12 @@ fn cargo_to_crate_graph(
                         name: Symbol::intern(&pkg_data.name),
                     }
                 },
+                crate_ws_data.clone(),
+                if pkg_data.is_member {
+                    workspace_proc_macro_cwd.clone()
+                } else {
+                    Arc::new(pkg_data.manifest.parent().to_path_buf())
+                },
             );
             if let TargetKind::Lib { .. } = kind {
                 lib_tgt = Some((crate_id, name.clone()));
@@ -1267,6 +1321,8 @@ fn cargo_to_crate_graph(
                 } else {
                     rustc_build_scripts
                 },
+                // FIXME: This looks incorrect but I don't think this causes problems.
+                crate_ws_data,
             );
         }
     }
@@ -1280,19 +1336,26 @@ fn detached_file_to_crate_graph(
     sysroot: &Sysroot,
     override_cfg: &CfgOverrides,
     set_test: bool,
-) -> (CrateGraph, ProcMacroPaths) {
+    crate_ws_data: Arc<CrateWorkspaceData>,
+) -> (CrateGraphBuilder, ProcMacroPaths) {
     let _p = tracing::info_span!("detached_file_to_crate_graph").entered();
-    let mut crate_graph = CrateGraph::default();
-    let (public_deps, _libproc_macro) =
-        sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
+    let mut crate_graph = CrateGraphBuilder::default();
+    let (public_deps, _libproc_macro) = sysroot_to_crate_graph(
+        &mut crate_graph,
+        sysroot,
+        rustc_cfg.clone(),
+        load,
+        // FIXME: This looks incorrect but I don't think this causes problems.
+        crate_ws_data.clone(),
+    );
 
     let mut cfg_options = CfgOptions::from_iter(rustc_cfg);
     if set_test {
-        cfg_options.insert_atom(sym::test.clone());
+        cfg_options.insert_atom(sym::test);
     }
-    cfg_options.insert_atom(sym::rust_analyzer.clone());
+    cfg_options.insert_atom(sym::rust_analyzer);
     override_cfg.apply(&mut cfg_options, "");
-    let cfg_options = Arc::new(cfg_options);
+    let cfg_options = cfg_options;
 
     let file_id = match load(detached_file) {
         Some(file_id) => file_id,
@@ -1315,31 +1378,35 @@ fn detached_file_to_crate_graph(
             name: display_name.map(|n| n.canonical_name().to_owned()),
         },
         false,
-        None,
+        Arc::new(detached_file.parent().to_path_buf()),
+        crate_ws_data,
     );
 
     public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
     (crate_graph, FxHashMap::default())
 }
 
+// FIXME: There shouldn't really be a need for duplicating all of this?
 fn handle_rustc_crates(
-    crate_graph: &mut CrateGraph,
+    crate_graph: &mut CrateGraphBuilder,
     proc_macros: &mut ProcMacroPaths,
-    pkg_to_lib_crate: &mut FxHashMap<Package, CrateId>,
+    pkg_to_lib_crate: &mut FxHashMap<Package, CrateBuilderId>,
     load: FileLoader<'_>,
     rustc_workspace: &CargoWorkspace,
     cargo: &CargoWorkspace,
     public_deps: &SysrootPublicDeps,
-    libproc_macro: Option<CrateId>,
-    pkg_crates: &FxHashMap<Package, Vec<(CrateId, TargetKind)>>,
+    libproc_macro: Option<CrateBuilderId>,
+    pkg_crates: &FxHashMap<Package, Vec<(CrateBuilderId, TargetKind)>>,
     cfg_options: &CfgOptions,
     override_cfg: &CfgOverrides,
     build_scripts: &WorkspaceBuildScripts,
+    crate_ws_data: Arc<CrateWorkspaceData>,
 ) {
     let mut rustc_pkg_crates = FxHashMap::default();
     // The root package of the rustc-dev component is rustc_driver, so we match that
     let root_pkg =
         rustc_workspace.packages().find(|&package| rustc_workspace[package].name == "rustc_driver");
+    let workspace_proc_macro_cwd = Arc::new(cargo.workspace_root().to_path_buf());
     // The rustc workspace might be incomplete (such as if rustc-dev is not
     // installed for the current toolchain) and `rustc_source` is set to discover.
     if let Some(root_pkg) = root_pkg {
@@ -1353,14 +1420,15 @@ fn handle_rustc_crates(
             if rustc_pkg_crates.contains_key(&pkg) {
                 continue;
             }
-            for dep in &rustc_workspace[pkg].dependencies {
+            let pkg_data = &rustc_workspace[pkg];
+            for dep in &pkg_data.dependencies {
                 queue.push_back(dep.pkg);
             }
 
             let mut cfg_options = cfg_options.clone();
-            override_cfg.apply(&mut cfg_options, &rustc_workspace[pkg].name);
+            override_cfg.apply(&mut cfg_options, &pkg_data.name);
 
-            for &tgt in rustc_workspace[pkg].targets.iter() {
+            for &tgt in pkg_data.targets.iter() {
                 let kind @ TargetKind::Lib { is_proc_macro } = rustc_workspace[tgt].kind else {
                     continue;
                 };
@@ -1370,13 +1438,19 @@ fn handle_rustc_crates(
                         crate_graph,
                         proc_macros,
                         rustc_workspace,
-                        &rustc_workspace[pkg],
+                        pkg_data,
                         build_scripts.get_output(pkg).zip(Some(build_scripts.error().is_some())),
                         cfg_options.clone(),
                         file_id,
                         &rustc_workspace[tgt].name,
                         kind,
-                        CrateOrigin::Rustc { name: Symbol::intern(&rustc_workspace[pkg].name) },
+                        CrateOrigin::Rustc { name: Symbol::intern(&pkg_data.name) },
+                        crate_ws_data.clone(),
+                        if pkg_data.is_member {
+                            workspace_proc_macro_cwd.clone()
+                        } else {
+                            Arc::new(pkg_data.manifest.parent().to_path_buf())
+                        },
                     );
                     pkg_to_lib_crate.insert(pkg, crate_id);
                     // Add dependencies on core / std / alloc for this crate
@@ -1417,7 +1491,7 @@ fn handle_rustc_crates(
                     // This avoids the situation where `from` depends on e.g. `arrayvec`, but
                     // `rust_analyzer` thinks that it should use the one from the `rustc_source`
                     // instead of the one from `crates.io`
-                    if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) {
+                    if !crate_graph[*from].basic.dependencies.iter().any(|d| d.name == name) {
                         add_dep(crate_graph, *from, name.clone(), to);
                     }
                 }
@@ -1427,7 +1501,7 @@ fn handle_rustc_crates(
 }
 
 fn add_target_crate_root(
-    crate_graph: &mut CrateGraph,
+    crate_graph: &mut CrateGraphBuilder,
     proc_macros: &mut ProcMacroPaths,
     cargo: &CargoWorkspace,
     pkg: &PackageData,
@@ -1437,22 +1511,25 @@ fn add_target_crate_root(
     cargo_name: &str,
     kind: TargetKind,
     origin: CrateOrigin,
-) -> CrateId {
+    crate_ws_data: Arc<CrateWorkspaceData>,
+    proc_macro_cwd: Arc<AbsPathBuf>,
+) -> CrateBuilderId {
     let edition = pkg.edition;
     let potential_cfg_options = if pkg.features.is_empty() {
         None
     } else {
         let mut potential_cfg_options = cfg_options.clone();
-        potential_cfg_options.extend(pkg.features.iter().map(|feat| CfgAtom::KeyValue {
-            key: sym::feature.clone(),
-            value: Symbol::intern(feat.0),
-        }));
+        potential_cfg_options.extend(
+            pkg.features
+                .iter()
+                .map(|feat| CfgAtom::KeyValue { key: sym::feature, value: Symbol::intern(feat.0) }),
+        );
         Some(potential_cfg_options)
     };
     let cfg_options = {
         let mut opts = cfg_options;
         for feature in pkg.active_features.iter() {
-            opts.insert_key_value(sym::feature.clone(), Symbol::intern(feature));
+            opts.insert_key_value(sym::feature, Symbol::intern(feature));
         }
         if let Some(cfgs) = build_data.map(|(it, _)| &it.cfgs) {
             opts.extend(cfgs.iter().cloned());
@@ -1473,16 +1550,13 @@ fn add_target_crate_root(
         edition,
         Some(CrateDisplayName::from_canonical_name(cargo_name)),
         Some(pkg.version.to_string()),
-        Arc::new(cfg_options),
-        potential_cfg_options.map(Arc::new),
+        cfg_options,
+        potential_cfg_options,
         env,
         origin,
         matches!(kind, TargetKind::Lib { is_proc_macro: true }),
-        Some(if pkg.is_member {
-            cargo.workspace_root().to_path_buf()
-        } else {
-            pkg.manifest.parent().to_path_buf()
-        }),
+        proc_macro_cwd,
+        crate_ws_data,
     );
     if let TargetKind::Lib { is_proc_macro: true } = kind {
         let proc_macro = match build_data {
@@ -1503,12 +1577,12 @@ fn add_target_crate_root(
 
 #[derive(Default, Debug)]
 struct SysrootPublicDeps {
-    deps: Vec<(CrateName, CrateId, bool)>,
+    deps: Vec<(CrateName, CrateBuilderId, bool)>,
 }
 
 impl SysrootPublicDeps {
     /// Makes `from` depend on the public sysroot crates.
-    fn add_to_crate_graph(&self, crate_graph: &mut CrateGraph, from: CrateId) {
+    fn add_to_crate_graph(&self, crate_graph: &mut CrateGraphBuilder, from: CrateBuilderId) {
         for (name, krate, prelude) in &self.deps {
             add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude, true);
         }
@@ -1516,22 +1590,14 @@ impl SysrootPublicDeps {
 }
 
 fn extend_crate_graph_with_sysroot(
-    crate_graph: &mut CrateGraph,
-    mut sysroot_crate_graph: CrateGraph,
+    crate_graph: &mut CrateGraphBuilder,
+    mut sysroot_crate_graph: CrateGraphBuilder,
     mut sysroot_proc_macros: ProcMacroPaths,
-) -> (SysrootPublicDeps, Option<CrateId>) {
+) -> (SysrootPublicDeps, Option<CrateBuilderId>) {
     let mut pub_deps = vec![];
     let mut libproc_macro = None;
-    let diff = CfgDiff::new(vec![], vec![CfgAtom::Flag(sym::test.clone())]);
-    for (cid, c) in sysroot_crate_graph.iter_mut() {
-        // uninject `test` flag so `core` keeps working.
-        Arc::make_mut(&mut c.cfg_options).apply_diff(diff.clone());
-        // patch the origin
-        if c.origin.is_local() {
-            let lang_crate = LangCrateOrigin::from(
-                c.display_name.as_ref().map_or("", |it| it.canonical_name().as_str()),
-            );
-            c.origin = CrateOrigin::Lang(lang_crate);
+    for cid in sysroot_crate_graph.iter() {
+        if let CrateOrigin::Lang(lang_crate) = sysroot_crate_graph[cid].basic.origin {
             match lang_crate {
                 LangCrateOrigin::Test
                 | LangCrateOrigin::Alloc
@@ -1579,15 +1645,16 @@ fn extend_crate_graph_with_sysroot(
 }
 
 fn sysroot_to_crate_graph(
-    crate_graph: &mut CrateGraph,
+    crate_graph: &mut CrateGraphBuilder,
     sysroot: &Sysroot,
     rustc_cfg: Vec<CfgAtom>,
     load: FileLoader<'_>,
-) -> (SysrootPublicDeps, Option<CrateId>) {
+    crate_ws_data: Arc<CrateWorkspaceData>,
+) -> (SysrootPublicDeps, Option<CrateBuilderId>) {
     let _p = tracing::info_span!("sysroot_to_crate_graph").entered();
     match sysroot.workspace() {
         RustLibSrcWorkspace::Workspace(cargo) => {
-            let (cg, pm) = cargo_to_crate_graph(
+            let (sysroot_cg, sysroot_pm) = cargo_to_crate_graph(
                 load,
                 None,
                 cargo,
@@ -1596,21 +1663,23 @@ fn sysroot_to_crate_graph(
                 &CfgOverrides {
                     global: CfgDiff::new(
                         vec![
-                            CfgAtom::Flag(sym::debug_assertions.clone()),
-                            CfgAtom::Flag(sym::miri.clone()),
+                            CfgAtom::Flag(sym::debug_assertions),
+                            CfgAtom::Flag(sym::miri),
+                            CfgAtom::Flag(sym::bootstrap),
                         ],
-                        vec![],
+                        vec![CfgAtom::Flag(sym::test)],
                     ),
                     ..Default::default()
                 },
                 &WorkspaceBuildScripts::default(),
                 false,
+                crate_ws_data,
             );
 
-            extend_crate_graph_with_sysroot(crate_graph, cg, pm)
+            extend_crate_graph_with_sysroot(crate_graph, sysroot_cg, sysroot_pm)
         }
         RustLibSrcWorkspace::Json(project_json) => {
-            let (cg, pm) = project_json_to_crate_graph(
+            let (sysroot_cg, sysroot_pm) = project_json_to_crate_graph(
                 rustc_cfg,
                 load,
                 project_json,
@@ -1618,50 +1687,51 @@ fn sysroot_to_crate_graph(
                 &FxHashMap::default(),
                 &CfgOverrides {
                     global: CfgDiff::new(
-                        vec![
-                            CfgAtom::Flag(sym::debug_assertions.clone()),
-                            CfgAtom::Flag(sym::miri.clone()),
-                        ],
+                        vec![CfgAtom::Flag(sym::debug_assertions), CfgAtom::Flag(sym::miri)],
                         vec![],
                     ),
                     ..Default::default()
                 },
                 false,
+                true,
+                crate_ws_data,
             );
 
-            extend_crate_graph_with_sysroot(crate_graph, cg, pm)
+            extend_crate_graph_with_sysroot(crate_graph, sysroot_cg, sysroot_pm)
         }
         RustLibSrcWorkspace::Stitched(stitched) => {
-            let cfg_options = Arc::new({
+            let cfg_options = {
                 let mut cfg_options = CfgOptions::default();
                 cfg_options.extend(rustc_cfg);
-                cfg_options.insert_atom(sym::debug_assertions.clone());
-                cfg_options.insert_atom(sym::miri.clone());
+                cfg_options.insert_atom(sym::debug_assertions);
+                cfg_options.insert_atom(sym::miri);
                 cfg_options
-            });
-            let sysroot_crates: FxHashMap<crate::sysroot::stitched::RustLibSrcCrate, CrateId> =
-                stitched
-                    .crates()
-                    .filter_map(|krate| {
-                        let file_id = load(&stitched[krate].root)?;
-
-                        let display_name =
-                            CrateDisplayName::from_canonical_name(&stitched[krate].name);
-                        let crate_id = crate_graph.add_crate_root(
-                            file_id,
-                            Edition::CURRENT_FIXME,
-                            Some(display_name),
-                            None,
-                            cfg_options.clone(),
-                            None,
-                            Env::default(),
-                            CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)),
-                            false,
-                            None,
-                        );
-                        Some((krate, crate_id))
-                    })
-                    .collect();
+            };
+            let sysroot_crates: FxHashMap<
+                crate::sysroot::stitched::RustLibSrcCrate,
+                CrateBuilderId,
+            > = stitched
+                .crates()
+                .filter_map(|krate| {
+                    let file_id = load(&stitched[krate].root)?;
+
+                    let display_name = CrateDisplayName::from_canonical_name(&stitched[krate].name);
+                    let crate_id = crate_graph.add_crate_root(
+                        file_id,
+                        Edition::CURRENT_FIXME,
+                        Some(display_name),
+                        None,
+                        cfg_options.clone(),
+                        None,
+                        Env::default(),
+                        CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)),
+                        false,
+                        Arc::new(stitched[krate].root.parent().to_path_buf()),
+                        crate_ws_data.clone(),
+                    );
+                    Some((krate, crate_id))
+                })
+                .collect();
 
             for from in stitched.crates() {
                 for &to in stitched[from].deps.iter() {
@@ -1691,22 +1761,32 @@ fn sysroot_to_crate_graph(
     }
 }
 
-fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) {
-    add_dep_inner(graph, from, Dependency::new(name, to))
+fn add_dep(
+    graph: &mut CrateGraphBuilder,
+    from: CrateBuilderId,
+    name: CrateName,
+    to: CrateBuilderId,
+) {
+    add_dep_inner(graph, from, DependencyBuilder::new(name, to))
 }
 
 fn add_dep_with_prelude(
-    graph: &mut CrateGraph,
-    from: CrateId,
+    graph: &mut CrateGraphBuilder,
+    from: CrateBuilderId,
     name: CrateName,
-    to: CrateId,
+    to: CrateBuilderId,
     prelude: bool,
     sysroot: bool,
 ) {
-    add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude, sysroot))
+    add_dep_inner(graph, from, DependencyBuilder::with_prelude(name, to, prelude, sysroot))
 }
 
-fn add_proc_macro_dep(crate_graph: &mut CrateGraph, from: CrateId, to: CrateId, prelude: bool) {
+fn add_proc_macro_dep(
+    crate_graph: &mut CrateGraphBuilder,
+    from: CrateBuilderId,
+    to: CrateBuilderId,
+    prelude: bool,
+) {
     add_dep_with_prelude(
         crate_graph,
         from,
@@ -1717,14 +1797,14 @@ fn add_proc_macro_dep(crate_graph: &mut CrateGraph, from: CrateId, to: CrateId,
     );
 }
 
-fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) {
+fn add_dep_inner(graph: &mut CrateGraphBuilder, from: CrateBuilderId, dep: DependencyBuilder) {
     if let Err(err) = graph.add_dep(from, dep) {
         tracing::warn!("{}", err)
     }
 }
 
 fn sysroot_metadata_config(
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
     targets: &[String],
 ) -> CargoMetadataConfig {
     CargoMetadataConfig {
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/alloc/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/core/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_abort/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/panic_unwind/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/proc_macro/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/profiler_builtins/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/std/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/stdarch/crates/std_detect/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/term/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/test/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/test_data/fake-sysroot/unwind/src/lib.rs
deleted file mode 100644
index e69de29bb2d1d..0000000000000
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
index fae0b6fcca4dd..4ef9d81611974 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
@@ -1,20 +1,45 @@
 {
-    0: CrateData {
-        root_file_id: FileId(
-            1,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "hello_world",
+    0: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                1,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "hello-world",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "hello_world",
+                    ),
+                    canonical_name: "hello-world",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
@@ -22,7 +47,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -44,45 +68,62 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
-            ),
-        ),
     },
-    1: CrateData {
-        root_file_id: FileId(
-            2,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "hello_world",
+    1: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                2,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(0),
+                    name: CrateName(
+                        "hello_world",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "hello-world",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "hello_world",
+                    ),
+                    canonical_name: "hello-world",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
@@ -90,7 +131,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -112,53 +152,62 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "hello_world",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
-            ),
-        ),
     },
-    2: CrateData {
-        root_file_id: FileId(
-            3,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "an_example",
+    2: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                3,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(0),
+                    name: CrateName(
+                        "hello_world",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "an-example",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "an_example",
+                    ),
+                    canonical_name: "an-example",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
@@ -166,7 +215,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -188,53 +236,62 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "hello_world",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
-            ),
-        ),
     },
-    3: CrateData {
-        root_file_id: FileId(
-            4,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "it",
+    3: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                4,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(0),
+                    name: CrateName(
+                        "hello_world",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "it",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "it",
+                    ),
+                    canonical_name: "it",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
@@ -242,7 +299,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -264,53 +320,58 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "hello_world",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
+            ),
+            toolchain: None,
+        },
+    },
+    4: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                5,
+            ),
+            edition: Edition2015,
+            dependencies: [],
+            origin: Library {
+                repo: Some(
+                    "https://github.com/rust-lang/libc",
                 ),
-                prelude: true,
-                sysroot: false,
+                name: "libc",
             },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
             ),
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
+        extra: ExtraCrateData {
+            version: Some(
+                "0.2.98",
             ),
-        ),
-    },
-    4: CrateData {
-        root_file_id: FileId(
-            5,
-        ),
-        edition: Edition2015,
-        version: Some(
-            "0.2.98",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "libc",
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "libc",
+                    ),
+                    canonical_name: "libc",
+                },
+            ),
+            potential_cfg_options: Some(
+                CfgOptions(
+                    [
+                        "feature=align",
+                        "feature=const-extern-fn",
+                        "feature=default",
+                        "feature=extra_traits",
+                        "feature=rustc-dep-of-std",
+                        "feature=std",
+                        "feature=use_std",
+                        "true",
+                    ],
                 ),
-                canonical_name: "libc",
-            },
-        ),
+            ),
+        },
         cfg_options: CfgOptions(
             [
                 "feature=default",
@@ -318,20 +379,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: Some(
-            CfgOptions(
-                [
-                    "feature=align",
-                    "feature=const-extern-fn",
-                    "feature=default",
-                    "feature=extra_traits",
-                    "feature=rustc-dep-of-std",
-                    "feature=std",
-                    "feature=use_std",
-                    "true",
-                ],
-            ),
-        ),
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -353,18 +400,11 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [],
-        origin: Library {
-            repo: Some(
-                "https://github.com/rust-lang/libc",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
-            name: "libc",
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
-            ),
-        ),
     },
 }
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
index fae0b6fcca4dd..4ef9d81611974 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
@@ -1,20 +1,45 @@
 {
-    0: CrateData {
-        root_file_id: FileId(
-            1,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "hello_world",
+    0: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                1,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "hello-world",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "hello_world",
+                    ),
+                    canonical_name: "hello-world",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
@@ -22,7 +47,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -44,45 +68,62 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
-            ),
-        ),
     },
-    1: CrateData {
-        root_file_id: FileId(
-            2,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "hello_world",
+    1: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                2,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(0),
+                    name: CrateName(
+                        "hello_world",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "hello-world",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "hello_world",
+                    ),
+                    canonical_name: "hello-world",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
@@ -90,7 +131,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -112,53 +152,62 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "hello_world",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
-            ),
-        ),
     },
-    2: CrateData {
-        root_file_id: FileId(
-            3,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "an_example",
+    2: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                3,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(0),
+                    name: CrateName(
+                        "hello_world",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "an-example",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "an_example",
+                    ),
+                    canonical_name: "an-example",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
@@ -166,7 +215,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -188,53 +236,62 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "hello_world",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
-            ),
-        ),
     },
-    3: CrateData {
-        root_file_id: FileId(
-            4,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "it",
+    3: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                4,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(0),
+                    name: CrateName(
+                        "hello_world",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "it",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "it",
+                    ),
+                    canonical_name: "it",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
@@ -242,7 +299,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -264,53 +320,58 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "hello_world",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
+            ),
+            toolchain: None,
+        },
+    },
+    4: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                5,
+            ),
+            edition: Edition2015,
+            dependencies: [],
+            origin: Library {
+                repo: Some(
+                    "https://github.com/rust-lang/libc",
                 ),
-                prelude: true,
-                sysroot: false,
+                name: "libc",
             },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
             ),
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
+        extra: ExtraCrateData {
+            version: Some(
+                "0.2.98",
             ),
-        ),
-    },
-    4: CrateData {
-        root_file_id: FileId(
-            5,
-        ),
-        edition: Edition2015,
-        version: Some(
-            "0.2.98",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "libc",
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "libc",
+                    ),
+                    canonical_name: "libc",
+                },
+            ),
+            potential_cfg_options: Some(
+                CfgOptions(
+                    [
+                        "feature=align",
+                        "feature=const-extern-fn",
+                        "feature=default",
+                        "feature=extra_traits",
+                        "feature=rustc-dep-of-std",
+                        "feature=std",
+                        "feature=use_std",
+                        "true",
+                    ],
                 ),
-                canonical_name: "libc",
-            },
-        ),
+            ),
+        },
         cfg_options: CfgOptions(
             [
                 "feature=default",
@@ -318,20 +379,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: Some(
-            CfgOptions(
-                [
-                    "feature=align",
-                    "feature=const-extern-fn",
-                    "feature=default",
-                    "feature=extra_traits",
-                    "feature=rustc-dep-of-std",
-                    "feature=std",
-                    "feature=use_std",
-                    "true",
-                ],
-            ),
-        ),
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -353,18 +400,11 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [],
-        origin: Library {
-            repo: Some(
-                "https://github.com/rust-lang/libc",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
-            name: "libc",
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
-            ),
-        ),
     },
 }
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
index 566174882ddac..52089d1dbc2ce 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
@@ -1,27 +1,51 @@
 {
-    0: CrateData {
-        root_file_id: FileId(
-            1,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "hello_world",
+    0: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                1,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "hello-world",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "hello_world",
+                    ),
+                    canonical_name: "hello-world",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -43,52 +67,68 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
-            ),
-        ),
     },
-    1: CrateData {
-        root_file_id: FileId(
-            2,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "hello_world",
+    1: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                2,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(0),
+                    name: CrateName(
+                        "hello_world",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "hello-world",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "hello_world",
+                    ),
+                    canonical_name: "hello-world",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -110,60 +150,68 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "hello_world",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
-            ),
-        ),
     },
-    2: CrateData {
-        root_file_id: FileId(
-            3,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "an_example",
+    2: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                3,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(0),
+                    name: CrateName(
+                        "hello_world",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "an-example",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "an_example",
+                    ),
+                    canonical_name: "an-example",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -185,60 +233,68 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "hello_world",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
-            ),
-        ),
     },
-    3: CrateData {
-        root_file_id: FileId(
-            4,
-        ),
-        edition: Edition2018,
-        version: Some(
-            "0.1.0",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "it",
+    3: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                4,
+            ),
+            edition: Edition2018,
+            dependencies: [
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(0),
+                    name: CrateName(
+                        "hello_world",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+                Dependency {
+                    crate_id: Idx::<CrateBuilder>(4),
+                    name: CrateName(
+                        "libc",
+                    ),
+                    prelude: true,
+                    sysroot: false,
+                },
+            ],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello-world",
                 ),
-                canonical_name: "it",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$hello-world",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: Some(
+                "0.1.0",
+            ),
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "it",
+                    ),
+                    canonical_name: "it",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -260,53 +316,58 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "hello_world",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "libc",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
+            ),
+            toolchain: None,
+        },
+    },
+    4: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                5,
+            ),
+            edition: Edition2015,
+            dependencies: [],
+            origin: Library {
+                repo: Some(
+                    "https://github.com/rust-lang/libc",
                 ),
-                prelude: true,
-                sysroot: false,
+                name: "libc",
             },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello-world",
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
             ),
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$hello-world",
+        extra: ExtraCrateData {
+            version: Some(
+                "0.2.98",
             ),
-        ),
-    },
-    4: CrateData {
-        root_file_id: FileId(
-            5,
-        ),
-        edition: Edition2015,
-        version: Some(
-            "0.2.98",
-        ),
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "libc",
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "libc",
+                    ),
+                    canonical_name: "libc",
+                },
+            ),
+            potential_cfg_options: Some(
+                CfgOptions(
+                    [
+                        "feature=align",
+                        "feature=const-extern-fn",
+                        "feature=default",
+                        "feature=extra_traits",
+                        "feature=rustc-dep-of-std",
+                        "feature=std",
+                        "feature=use_std",
+                        "true",
+                    ],
                 ),
-                canonical_name: "libc",
-            },
-        ),
+            ),
+        },
         cfg_options: CfgOptions(
             [
                 "feature=default",
@@ -314,20 +375,6 @@
                 "true",
             ],
         ),
-        potential_cfg_options: Some(
-            CfgOptions(
-                [
-                    "feature=align",
-                    "feature=const-extern-fn",
-                    "feature=default",
-                    "feature=extra_traits",
-                    "feature=rustc-dep-of-std",
-                    "feature=std",
-                    "feature=use_std",
-                    "true",
-                ],
-            ),
-        ),
         env: Env {
             entries: {
                 "CARGO": "$CARGO$",
@@ -349,18 +396,11 @@
                 "CARGO_PKG_VERSION_PRE": "",
             },
         },
-        dependencies: [],
-        origin: Library {
-            repo: Some(
-                "https://github.com/rust-lang/libc",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "target_data_layout not loaded",
             ),
-            name: "libc",
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: Some(
-            AbsPathBuf(
-                "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
-            ),
-        ),
     },
 }
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt
index 9b4be19c41c83..98fe598eb3a32 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt
@@ -1,429 +1,34 @@
 {
-    0: CrateData {
-        root_file_id: FileId(
-            1,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "alloc",
-                ),
-                canonical_name: "alloc",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(1),
-                name: CrateName(
-                    "core",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Lang(
-            Alloc,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    1: CrateData {
-        root_file_id: FileId(
-            2,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "core",
-                ),
-                canonical_name: "core",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Core,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    2: CrateData {
-        root_file_id: FileId(
-            3,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "panic_abort",
-                ),
-                canonical_name: "panic_abort",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    3: CrateData {
-        root_file_id: FileId(
-            4,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "panic_unwind",
-                ),
-                canonical_name: "panic_unwind",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    4: CrateData {
-        root_file_id: FileId(
-            5,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "proc_macro",
-                ),
-                canonical_name: "proc_macro",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(6),
-                name: CrateName(
-                    "std",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(1),
-                name: CrateName(
-                    "core",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Lang(
-            ProcMacro,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    5: CrateData {
-        root_file_id: FileId(
-            6,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "profiler_builtins",
-                ),
-                canonical_name: "profiler_builtins",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    6: CrateData {
-        root_file_id: FileId(
-            7,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "std",
-                ),
-                canonical_name: "std",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "alloc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(3),
-                name: CrateName(
-                    "panic_unwind",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(2),
-                name: CrateName(
-                    "panic_abort",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(1),
-                name: CrateName(
-                    "core",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(5),
-                name: CrateName(
-                    "profiler_builtins",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(9),
-                name: CrateName(
-                    "unwind",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(7),
-                name: CrateName(
-                    "std_detect",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(8),
-                name: CrateName(
-                    "test",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Lang(
-            Std,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    7: CrateData {
-        root_file_id: FileId(
-            8,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "std_detect",
-                ),
-                canonical_name: "std_detect",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    8: CrateData {
-        root_file_id: FileId(
-            9,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "test",
+    0: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                1,
+            ),
+            edition: Edition2018,
+            dependencies: [],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello_world",
                 ),
-                canonical_name: "test",
             },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$",
+            ),
         },
-        dependencies: [],
-        origin: Lang(
-            Test,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    9: CrateData {
-        root_file_id: FileId(
-            10,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "unwind",
-                ),
-                canonical_name: "unwind",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
+        extra: ExtraCrateData {
+            version: None,
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "hello_world",
+                    ),
+                    canonical_name: "hello_world",
+                },
+            ),
+            potential_cfg_options: None,
         },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    10: CrateData {
-        root_file_id: FileId(
-            11,
-        ),
-        edition: Edition2018,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "hello_world",
-                ),
-                canonical_name: "hello_world",
-            },
-        ),
         cfg_options: CfgOptions(
             [
                 "group1_cfg=some_config",
@@ -434,75 +39,46 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {},
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(1),
-                name: CrateName(
-                    "core",
-                ),
-                prelude: true,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "alloc",
-                ),
-                prelude: false,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(6),
-                name: CrateName(
-                    "std",
-                ),
-                prelude: true,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(8),
-                name: CrateName(
-                    "test",
-                ),
-                prelude: false,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "proc_macro",
-                ),
-                prelude: false,
-                sysroot: true,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello_world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "test has no data layout",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: None,
     },
-    11: CrateData {
-        root_file_id: FileId(
-            11,
-        ),
-        edition: Edition2018,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
+    1: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                1,
+            ),
+            edition: Edition2018,
+            dependencies: [],
+            origin: Local {
+                repo: None,
+                name: Some(
                     "other_crate",
                 ),
-                canonical_name: "other_crate",
             },
-        ),
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$",
+            ),
+        },
+        extra: ExtraCrateData {
+            version: None,
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "other_crate",
+                    ),
+                    canonical_name: "other_crate",
+                },
+            ),
+            potential_cfg_options: None,
+        },
         cfg_options: CfgOptions(
             [
                 "group2_cfg=fourth_config",
@@ -513,59 +89,14 @@
                 "unrelated_cfg",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {},
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(1),
-                name: CrateName(
-                    "core",
-                ),
-                prelude: true,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "alloc",
-                ),
-                prelude: false,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(6),
-                name: CrateName(
-                    "std",
-                ),
-                prelude: true,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(8),
-                name: CrateName(
-                    "test",
-                ),
-                prelude: false,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "proc_macro",
-                ),
-                prelude: false,
-                sysroot: true,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "other_crate",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "test has no data layout",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: None,
     },
 }
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
index 4c8e66e8e968b..0dc373b5b47ed 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
@@ -1,429 +1,34 @@
 {
-    0: CrateData {
-        root_file_id: FileId(
-            1,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "alloc",
-                ),
-                canonical_name: "alloc",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(1),
-                name: CrateName(
-                    "core",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Lang(
-            Alloc,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    1: CrateData {
-        root_file_id: FileId(
-            2,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "core",
-                ),
-                canonical_name: "core",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Core,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    2: CrateData {
-        root_file_id: FileId(
-            3,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "panic_abort",
-                ),
-                canonical_name: "panic_abort",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    3: CrateData {
-        root_file_id: FileId(
-            4,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "panic_unwind",
-                ),
-                canonical_name: "panic_unwind",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    4: CrateData {
-        root_file_id: FileId(
-            5,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "proc_macro",
-                ),
-                canonical_name: "proc_macro",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(6),
-                name: CrateName(
-                    "std",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(1),
-                name: CrateName(
-                    "core",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Lang(
-            ProcMacro,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    5: CrateData {
-        root_file_id: FileId(
-            6,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "profiler_builtins",
-                ),
-                canonical_name: "profiler_builtins",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    6: CrateData {
-        root_file_id: FileId(
-            7,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "std",
-                ),
-                canonical_name: "std",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "alloc",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(3),
-                name: CrateName(
-                    "panic_unwind",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(2),
-                name: CrateName(
-                    "panic_abort",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(1),
-                name: CrateName(
-                    "core",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(5),
-                name: CrateName(
-                    "profiler_builtins",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(9),
-                name: CrateName(
-                    "unwind",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(7),
-                name: CrateName(
-                    "std_detect",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(8),
-                name: CrateName(
-                    "test",
-                ),
-                prelude: true,
-                sysroot: false,
-            },
-        ],
-        origin: Lang(
-            Std,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    7: CrateData {
-        root_file_id: FileId(
-            8,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "std_detect",
-                ),
-                canonical_name: "std_detect",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
-        },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    8: CrateData {
-        root_file_id: FileId(
-            9,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "test",
+    0: CrateBuilder {
+        basic: CrateData {
+            root_file_id: FileId(
+                1,
+            ),
+            edition: Edition2018,
+            dependencies: [],
+            origin: Local {
+                repo: None,
+                name: Some(
+                    "hello_world",
                 ),
-                canonical_name: "test",
             },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
+            is_proc_macro: false,
+            proc_macro_cwd: AbsPathBuf(
+                "$ROOT$",
+            ),
         },
-        dependencies: [],
-        origin: Lang(
-            Test,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    9: CrateData {
-        root_file_id: FileId(
-            10,
-        ),
-        edition: Edition2021,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "unwind",
-                ),
-                canonical_name: "unwind",
-            },
-        ),
-        cfg_options: CfgOptions(
-            [
-                "debug_assertions",
-                "miri",
-                "true",
-            ],
-        ),
-        potential_cfg_options: None,
-        env: Env {
-            entries: {},
+        extra: ExtraCrateData {
+            version: None,
+            display_name: Some(
+                CrateDisplayName {
+                    crate_name: CrateName(
+                        "hello_world",
+                    ),
+                    canonical_name: "hello_world",
+                },
+            ),
+            potential_cfg_options: None,
         },
-        dependencies: [],
-        origin: Lang(
-            Other,
-        ),
-        is_proc_macro: false,
-        proc_macro_cwd: None,
-    },
-    10: CrateData {
-        root_file_id: FileId(
-            11,
-        ),
-        edition: Edition2018,
-        version: None,
-        display_name: Some(
-            CrateDisplayName {
-                crate_name: CrateName(
-                    "hello_world",
-                ),
-                canonical_name: "hello_world",
-            },
-        ),
         cfg_options: CfgOptions(
             [
                 "rust_analyzer",
@@ -431,59 +36,14 @@
                 "true",
             ],
         ),
-        potential_cfg_options: None,
         env: Env {
             entries: {},
         },
-        dependencies: [
-            Dependency {
-                crate_id: Idx::<CrateData>(1),
-                name: CrateName(
-                    "core",
-                ),
-                prelude: true,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(0),
-                name: CrateName(
-                    "alloc",
-                ),
-                prelude: false,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(6),
-                name: CrateName(
-                    "std",
-                ),
-                prelude: true,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(8),
-                name: CrateName(
-                    "test",
-                ),
-                prelude: false,
-                sysroot: true,
-            },
-            Dependency {
-                crate_id: Idx::<CrateData>(4),
-                name: CrateName(
-                    "proc_macro",
-                ),
-                prelude: false,
-                sysroot: true,
-            },
-        ],
-        origin: Local {
-            repo: None,
-            name: Some(
-                "hello_world",
+        ws_data: CrateWorkspaceData {
+            data_layout: Err(
+                "test has no data layout",
             ),
+            toolchain: None,
         },
-        is_proc_macro: false,
-        proc_macro_cwd: None,
     },
 }
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/Cargo.toml b/src/tools/rust-analyzer/crates/query-group-macro/Cargo.toml
new file mode 100644
index 0000000000000..8aeb262942319
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "query-group-macro"
+version = "0.0.0"
+repository.workspace = true
+description = "A macro mimicking the `#[salsa::query_group]` macro for migrating to new Salsa"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
+
+[lib]
+proc-macro = true
+
+[dependencies]
+proc-macro2 = "1.0"
+quote = "1.0"
+syn = { version = "2.0", features = ["full", "extra-traits", "visit-mut"] }
+
+[dev-dependencies]
+expect-test = "1.5.1"
+salsa.workspace = true
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs b/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs
new file mode 100644
index 0000000000000..f4f316c1ac1e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/src/lib.rs
@@ -0,0 +1,522 @@
+//! A macro that mimics the old Salsa-style `#[query_group]` macro.
+
+use core::fmt;
+use std::vec;
+
+use proc_macro::TokenStream;
+use proc_macro2::Span;
+use queries::{
+    GeneratedInputStruct, InputQuery, InputSetter, InputSetterWithDurability, Intern, Lookup,
+    Queries, SetterKind, TrackedQuery, Transparent,
+};
+use quote::{ToTokens, format_ident, quote};
+use syn::parse::{Parse, ParseStream};
+use syn::punctuated::Punctuated;
+use syn::spanned::Spanned;
+use syn::visit_mut::VisitMut;
+use syn::{
+    Attribute, FnArg, ItemTrait, Path, Token, TraitItem, TraitItemFn, parse_quote,
+    parse_quote_spanned,
+};
+
+mod queries;
+
+#[proc_macro_attribute]
+pub fn query_group(args: TokenStream, input: TokenStream) -> TokenStream {
+    match query_group_impl(args, input.clone()) {
+        Ok(tokens) => tokens,
+        Err(e) => token_stream_with_error(input, e),
+    }
+}
+
+#[derive(Debug)]
+struct InputStructField {
+    name: proc_macro2::TokenStream,
+    ty: proc_macro2::TokenStream,
+}
+
+impl fmt::Display for InputStructField {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(f, "{}", self.name)
+    }
+}
+
+struct SalsaAttr {
+    name: String,
+    tts: TokenStream,
+    span: Span,
+}
+
+impl std::fmt::Debug for SalsaAttr {
+    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(fmt, "{:?}", self.name)
+    }
+}
+
+impl TryFrom<syn::Attribute> for SalsaAttr {
+    type Error = syn::Attribute;
+
+    fn try_from(attr: syn::Attribute) -> Result<SalsaAttr, syn::Attribute> {
+        if is_not_salsa_attr_path(attr.path()) {
+            return Err(attr);
+        }
+
+        let span = attr.span();
+
+        let name = attr.path().segments[1].ident.to_string();
+        let tts = match attr.meta {
+            syn::Meta::Path(path) => path.into_token_stream(),
+            syn::Meta::List(ref list) => {
+                let tts = list
+                    .into_token_stream()
+                    .into_iter()
+                    .skip(attr.path().to_token_stream().into_iter().count());
+                proc_macro2::TokenStream::from_iter(tts)
+            }
+            syn::Meta::NameValue(nv) => nv.into_token_stream(),
+        }
+        .into();
+
+        Ok(SalsaAttr { name, tts, span })
+    }
+}
+
+fn is_not_salsa_attr_path(path: &syn::Path) -> bool {
+    path.segments.first().map(|s| s.ident != "salsa").unwrap_or(true) || path.segments.len() != 2
+}
+
+fn filter_attrs(attrs: Vec<Attribute>) -> (Vec<Attribute>, Vec<SalsaAttr>) {
+    let mut other = vec![];
+    let mut salsa = vec![];
+    // Leave non-salsa attributes untouched. These are
+    // attributes that don't start with `salsa::` or don't have
+    // exactly two segments in their path.
+    for attr in attrs {
+        match SalsaAttr::try_from(attr) {
+            Ok(it) => salsa.push(it),
+            Err(it) => other.push(it),
+        }
+    }
+    (other, salsa)
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+enum QueryKind {
+    Input,
+    Tracked,
+    TrackedWithSalsaStruct,
+    Transparent,
+    Interned,
+}
+
+#[derive(Default, Debug, Clone)]
+struct Cycle {
+    cycle_fn: Option<(syn::Ident, Path)>,
+    cycle_initial: Option<(syn::Ident, Path)>,
+    cycle_result: Option<(syn::Ident, Path)>,
+}
+
+impl Parse for Cycle {
+    fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
+        let options = Punctuated::<Option, Token![,]>::parse_terminated(input)?;
+        let mut cycle_fn = None;
+        let mut cycle_initial = None;
+        let mut cycle_result = None;
+        for option in options {
+            let name = option.name.to_string();
+            match &*name {
+                "cycle_fn" => {
+                    if cycle_fn.is_some() {
+                        return Err(syn::Error::new_spanned(&option.name, "duplicate option"));
+                    }
+                    cycle_fn = Some((option.name, option.value));
+                }
+                "cycle_initial" => {
+                    if cycle_initial.is_some() {
+                        return Err(syn::Error::new_spanned(&option.name, "duplicate option"));
+                    }
+                    cycle_initial = Some((option.name, option.value));
+                }
+                "cycle_result" => {
+                    if cycle_result.is_some() {
+                        return Err(syn::Error::new_spanned(&option.name, "duplicate option"));
+                    }
+                    cycle_result = Some((option.name, option.value));
+                }
+                _ => {
+                    return Err(syn::Error::new_spanned(
+                        &option.name,
+                        "unknown cycle option. Accepted values: `cycle_result`, `cycle_fn`, `cycle_initial`",
+                    ));
+                }
+            }
+        }
+        return Ok(Self { cycle_fn, cycle_initial, cycle_result });
+
+        struct Option {
+            name: syn::Ident,
+            value: Path,
+        }
+
+        impl Parse for Option {
+            fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
+                let name = input.parse()?;
+                input.parse::<Token![=]>()?;
+                let value = input.parse()?;
+                Ok(Self { name, value })
+            }
+        }
+    }
+}
+
+pub(crate) fn query_group_impl(
+    _args: proc_macro::TokenStream,
+    input: proc_macro::TokenStream,
+) -> Result<proc_macro::TokenStream, syn::Error> {
+    let mut item_trait = syn::parse::<ItemTrait>(input)?;
+
+    let supertraits = &item_trait.supertraits;
+
+    let db_attr: Attribute = parse_quote! {
+        #[salsa::db]
+    };
+    item_trait.attrs.push(db_attr);
+
+    let trait_name_ident = &item_trait.ident.clone();
+    let input_struct_name = format_ident!("{}Data", trait_name_ident);
+    let create_data_ident = format_ident!("create_data_{}", trait_name_ident);
+
+    let mut input_struct_fields: Vec<InputStructField> = vec![];
+    let mut trait_methods = vec![];
+    let mut setter_trait_methods = vec![];
+    let mut lookup_signatures = vec![];
+    let mut lookup_methods = vec![];
+
+    for item in &mut item_trait.items {
+        if let syn::TraitItem::Fn(method) = item {
+            let method_name = &method.sig.ident;
+            let signature = &method.sig;
+
+            let (_attrs, salsa_attrs) = filter_attrs(method.attrs.clone());
+
+            let mut query_kind = QueryKind::TrackedWithSalsaStruct;
+            let mut invoke = None;
+            let mut cycle = None;
+            let mut interned_struct_path = None;
+            let mut lru = None;
+
+            let params: Vec<FnArg> = signature.inputs.clone().into_iter().collect();
+            let pat_and_tys = params
+                .into_iter()
+                .filter(|fn_arg| matches!(fn_arg, FnArg::Typed(_)))
+                .map(|fn_arg| match fn_arg {
+                    FnArg::Typed(pat_type) => pat_type.clone(),
+                    FnArg::Receiver(_) => unreachable!("this should have been filtered out"),
+                })
+                .collect::<Vec<syn::PatType>>();
+
+            for SalsaAttr { name, tts, span } in salsa_attrs {
+                match name.as_str() {
+                    "cycle" => {
+                        let c = syn::parse::<Parenthesized<Cycle>>(tts)?;
+                        cycle = Some(c.0);
+                    }
+                    "input" => {
+                        if !pat_and_tys.is_empty() {
+                            return Err(syn::Error::new(
+                                span,
+                                "input methods cannot have a parameter",
+                            ));
+                        }
+                        query_kind = QueryKind::Input;
+                    }
+                    "interned" => {
+                        let syn::ReturnType::Type(_, ty) = &signature.output else {
+                            return Err(syn::Error::new(
+                                span,
+                                "interned queries must have return type",
+                            ));
+                        };
+                        let syn::Type::Path(path) = &**ty else {
+                            return Err(syn::Error::new(
+                                span,
+                                "interned queries must have return type",
+                            ));
+                        };
+                        interned_struct_path = Some(path.path.clone());
+                        query_kind = QueryKind::Interned;
+                    }
+                    "invoke_interned" => {
+                        let path = syn::parse::<Parenthesized<Path>>(tts)?;
+                        invoke = Some(path.0.clone());
+                        query_kind = QueryKind::Tracked;
+                    }
+                    "invoke" => {
+                        let path = syn::parse::<Parenthesized<Path>>(tts)?;
+                        invoke = Some(path.0.clone());
+                        if query_kind != QueryKind::Transparent {
+                            query_kind = QueryKind::TrackedWithSalsaStruct;
+                        }
+                    }
+                    "tracked" if method.default.is_some() => {
+                        query_kind = QueryKind::TrackedWithSalsaStruct;
+                    }
+                    "lru" => {
+                        let lru_count = syn::parse::<Parenthesized<syn::LitInt>>(tts)?;
+                        let lru_count = lru_count.0.base10_parse::<u32>()?;
+
+                        lru = Some(lru_count);
+                    }
+                    "transparent" => {
+                        query_kind = QueryKind::Transparent;
+                    }
+                    _ => return Err(syn::Error::new(span, format!("unknown attribute `{name}`"))),
+                }
+            }
+
+            let syn::ReturnType::Type(_, return_ty) = signature.output.clone() else {
+                return Err(syn::Error::new(signature.span(), "Queries must have a return type"));
+            };
+
+            if let syn::Type::Path(ref ty_path) = *return_ty {
+                if matches!(query_kind, QueryKind::Input) {
+                    let field = InputStructField {
+                        name: method_name.to_token_stream(),
+                        ty: ty_path.path.to_token_stream(),
+                    };
+
+                    input_struct_fields.push(field);
+                }
+            }
+
+            if let Some(block) = &mut method.default {
+                SelfToDbRewriter.visit_block_mut(block);
+            }
+
+            match (query_kind, invoke) {
+                // input
+                (QueryKind::Input, None) => {
+                    let query = InputQuery {
+                        signature: method.sig.clone(),
+                        create_data_ident: create_data_ident.clone(),
+                    };
+                    let value = Queries::InputQuery(query);
+                    trait_methods.push(value);
+
+                    let setter = InputSetter {
+                        signature: method.sig.clone(),
+                        return_type: *return_ty.clone(),
+                        create_data_ident: create_data_ident.clone(),
+                    };
+                    setter_trait_methods.push(SetterKind::Plain(setter));
+
+                    let setter = InputSetterWithDurability {
+                        signature: method.sig.clone(),
+                        return_type: *return_ty.clone(),
+                        create_data_ident: create_data_ident.clone(),
+                    };
+                    setter_trait_methods.push(SetterKind::WithDurability(setter));
+                }
+                (QueryKind::Interned, None) => {
+                    let interned_struct_path = interned_struct_path.unwrap();
+                    let method = Intern {
+                        signature: signature.clone(),
+                        pat_and_tys: pat_and_tys.clone(),
+                        interned_struct_path: interned_struct_path.clone(),
+                    };
+
+                    trait_methods.push(Queries::Intern(method));
+
+                    let mut method = Lookup {
+                        signature: signature.clone(),
+                        pat_and_tys: pat_and_tys.clone(),
+                        return_ty: *return_ty,
+                        interned_struct_path,
+                    };
+                    method.prepare_signature();
+
+                    lookup_signatures
+                        .push(TraitItem::Fn(make_trait_method(method.signature.clone())));
+                    lookup_methods.push(method);
+                }
+                // tracked function. it might have an invoke, or might not.
+                (QueryKind::Tracked, invoke) => {
+                    let method = TrackedQuery {
+                        trait_name: trait_name_ident.clone(),
+                        generated_struct: Some(GeneratedInputStruct {
+                            input_struct_name: input_struct_name.clone(),
+                            create_data_ident: create_data_ident.clone(),
+                        }),
+                        signature: signature.clone(),
+                        pat_and_tys: pat_and_tys.clone(),
+                        invoke,
+                        cycle,
+                        lru,
+                        default: method.default.take(),
+                    };
+
+                    trait_methods.push(Queries::TrackedQuery(method));
+                }
+                (QueryKind::TrackedWithSalsaStruct, invoke) => {
+                    let method = TrackedQuery {
+                        trait_name: trait_name_ident.clone(),
+                        generated_struct: None,
+                        signature: signature.clone(),
+                        pat_and_tys: pat_and_tys.clone(),
+                        invoke,
+                        cycle,
+                        lru,
+                        default: method.default.take(),
+                    };
+
+                    trait_methods.push(Queries::TrackedQuery(method))
+                }
+                (QueryKind::Transparent, invoke) => {
+                    let method = Transparent {
+                        signature: method.sig.clone(),
+                        pat_and_tys: pat_and_tys.clone(),
+                        invoke,
+                        default: method.default.take(),
+                    };
+                    trait_methods.push(Queries::Transparent(method));
+                }
+                // error/invalid constructions
+                (QueryKind::Interned, Some(path)) => {
+                    return Err(syn::Error::new(
+                        path.span(),
+                        "Interned queries cannot be used with an `#[invoke]`".to_string(),
+                    ));
+                }
+                (QueryKind::Input, Some(path)) => {
+                    return Err(syn::Error::new(
+                        path.span(),
+                        "Inputs cannot be used with an `#[invoke]`".to_string(),
+                    ));
+                }
+            }
+        }
+    }
+
+    let fields = input_struct_fields
+        .into_iter()
+        .map(|input| {
+            let name = input.name;
+            let ret = input.ty;
+            quote! { #name: Option<#ret> }
+        })
+        .collect::<Vec<proc_macro2::TokenStream>>();
+
+    let input_struct = quote! {
+        #[salsa::input]
+        pub(crate) struct #input_struct_name {
+            #(#fields),*
+        }
+    };
+
+    let field_params = std::iter::repeat_n(quote! { None }, fields.len())
+        .collect::<Vec<proc_macro2::TokenStream>>();
+
+    let create_data_method = quote! {
+        #[allow(non_snake_case)]
+        #[salsa::tracked]
+        fn #create_data_ident(db: &dyn #trait_name_ident) -> #input_struct_name {
+            #input_struct_name::new(db, #(#field_params),*)
+        }
+    };
+
+    let mut setter_signatures = vec![];
+    let mut setter_methods = vec![];
+    for trait_item in setter_trait_methods
+        .iter()
+        .map(|method| method.to_token_stream())
+        .map(|tokens| syn::parse2::<syn::TraitItemFn>(tokens).unwrap())
+    {
+        let mut methods_sans_body = trait_item.clone();
+        methods_sans_body.default = None;
+        methods_sans_body.semi_token = Some(syn::Token![;](trait_item.span()));
+
+        setter_signatures.push(TraitItem::Fn(methods_sans_body));
+        setter_methods.push(TraitItem::Fn(trait_item));
+    }
+
+    item_trait.items.append(&mut setter_signatures);
+    item_trait.items.append(&mut lookup_signatures);
+
+    let trait_impl = quote! {
+        #[salsa::db]
+        impl<DB> #trait_name_ident for DB
+        where
+            DB: #supertraits,
+        {
+            #(#trait_methods)*
+
+            #(#setter_methods)*
+
+            #(#lookup_methods)*
+        }
+    };
+    RemoveAttrsFromTraitMethods.visit_item_trait_mut(&mut item_trait);
+
+    let out = quote! {
+        #item_trait
+
+        #trait_impl
+
+        #input_struct
+
+        #create_data_method
+    }
+    .into();
+
+    Ok(out)
+}
+
+/// Parenthesis helper
+pub(crate) struct Parenthesized<T>(pub(crate) T);
+
+impl<T> syn::parse::Parse for Parenthesized<T>
+where
+    T: syn::parse::Parse,
+{
+    fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
+        let content;
+        syn::parenthesized!(content in input);
+        content.parse::<T>().map(Parenthesized)
+    }
+}
+
+fn make_trait_method(sig: syn::Signature) -> TraitItemFn {
+    TraitItemFn {
+        attrs: vec![],
+        sig: sig.clone(),
+        semi_token: Some(syn::Token![;](sig.span())),
+        default: None,
+    }
+}
+
+struct RemoveAttrsFromTraitMethods;
+
+impl VisitMut for RemoveAttrsFromTraitMethods {
+    fn visit_item_trait_mut(&mut self, i: &mut syn::ItemTrait) {
+        for item in &mut i.items {
+            if let TraitItem::Fn(trait_item_fn) = item {
+                trait_item_fn.attrs = vec![];
+            }
+        }
+    }
+}
+
+pub(crate) fn token_stream_with_error(mut tokens: TokenStream, error: syn::Error) -> TokenStream {
+    tokens.extend(TokenStream::from(error.into_compile_error()));
+    tokens
+}
+
+struct SelfToDbRewriter;
+
+impl VisitMut for SelfToDbRewriter {
+    fn visit_expr_path_mut(&mut self, i: &mut syn::ExprPath) {
+        if i.path.is_ident("self") {
+            i.path = parse_quote_spanned!(i.path.span() => db);
+        }
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs b/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs
new file mode 100644
index 0000000000000..d4d40588bfc73
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs
@@ -0,0 +1,354 @@
+//! The IR of the `#[query_group]` macro.
+
+use quote::{ToTokens, format_ident, quote, quote_spanned};
+use syn::{FnArg, Ident, PatType, Path, Receiver, ReturnType, Type, parse_quote, spanned::Spanned};
+
+use crate::Cycle;
+
+pub(crate) struct TrackedQuery {
+    pub(crate) trait_name: Ident,
+    pub(crate) signature: syn::Signature,
+    pub(crate) pat_and_tys: Vec<PatType>,
+    pub(crate) invoke: Option<Path>,
+    pub(crate) default: Option<syn::Block>,
+    pub(crate) cycle: Option<Cycle>,
+    pub(crate) lru: Option<u32>,
+    pub(crate) generated_struct: Option<GeneratedInputStruct>,
+}
+
+pub(crate) struct GeneratedInputStruct {
+    pub(crate) input_struct_name: Ident,
+    pub(crate) create_data_ident: Ident,
+}
+
+impl ToTokens for TrackedQuery {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        let sig = &self.signature;
+        let trait_name = &self.trait_name;
+
+        let ret = &sig.output;
+
+        let invoke = match &self.invoke {
+            Some(path) => path.to_token_stream(),
+            None => sig.ident.to_token_stream(),
+        };
+
+        let fn_ident = &sig.ident;
+        let shim: Ident = format_ident!("{}_shim", fn_ident);
+
+        let options = self
+            .cycle
+            .as_ref()
+            .map(|Cycle { cycle_fn, cycle_initial, cycle_result }| {
+                let cycle_fn = cycle_fn.as_ref().map(|(ident, path)| quote!(#ident=#path));
+                let cycle_initial =
+                    cycle_initial.as_ref().map(|(ident, path)| quote!(#ident=#path));
+                let cycle_result = cycle_result.as_ref().map(|(ident, path)| quote!(#ident=#path));
+                let options = cycle_fn.into_iter().chain(cycle_initial).chain(cycle_result);
+                quote!(#(#options),*)
+            })
+            .into_iter()
+            .chain(self.lru.map(|lru| quote!(lru = #lru)));
+        let annotation = quote!(#[salsa::tracked( #(#options),* )]);
+
+        let pat_and_tys = &self.pat_and_tys;
+        let params = self
+            .pat_and_tys
+            .iter()
+            .map(|pat_type| pat_type.pat.clone())
+            .collect::<Vec<Box<syn::Pat>>>();
+
+        let invoke_block = match &self.default {
+            Some(default) => quote! { #default },
+            None => {
+                let invoke_params: proc_macro2::TokenStream = quote! {db, #(#params),*};
+                quote_spanned! { invoke.span() =>  {#invoke(#invoke_params)}}
+            }
+        };
+
+        let method = match &self.generated_struct {
+            Some(generated_struct) => {
+                let input_struct_name = &generated_struct.input_struct_name;
+                let create_data_ident = &generated_struct.create_data_ident;
+
+                quote! {
+                    #sig {
+                        #annotation
+                        fn #shim(
+                            db: &dyn #trait_name,
+                            _input: #input_struct_name,
+                            #(#pat_and_tys),*
+                        ) #ret
+                            #invoke_block
+                        #shim(self, #create_data_ident(self), #(#params),*)
+                    }
+                }
+            }
+            None => {
+                quote! {
+                    #sig {
+                        #annotation
+                        fn #shim(
+                            db: &dyn #trait_name,
+                            #(#pat_and_tys),*
+                        ) #ret
+                            #invoke_block
+
+                        #shim(self, #(#params),*)
+                    }
+                }
+            }
+        };
+
+        method.to_tokens(tokens);
+    }
+}
+
+pub(crate) struct InputQuery {
+    pub(crate) signature: syn::Signature,
+    pub(crate) create_data_ident: Ident,
+}
+
+impl ToTokens for InputQuery {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        let sig = &self.signature;
+        let fn_ident = &sig.ident;
+        let create_data_ident = &self.create_data_ident;
+
+        let method = quote! {
+            #sig {
+                let data = #create_data_ident(self);
+                data.#fn_ident(self).unwrap()
+            }
+        };
+        method.to_tokens(tokens);
+    }
+}
+
+pub(crate) struct InputSetter {
+    pub(crate) signature: syn::Signature,
+    pub(crate) return_type: syn::Type,
+    pub(crate) create_data_ident: Ident,
+}
+
+impl ToTokens for InputSetter {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        let sig = &mut self.signature.clone();
+
+        let ty = &self.return_type;
+        let fn_ident = &sig.ident;
+        let create_data_ident = &self.create_data_ident;
+
+        let setter_ident = format_ident!("set_{}", fn_ident);
+        sig.ident = setter_ident.clone();
+
+        let value_argument: PatType = parse_quote!(__value: #ty);
+        sig.inputs.push(FnArg::Typed(value_argument.clone()));
+
+        // make `&self` `&mut self` instead.
+        let mut_receiver: Receiver = parse_quote!(&mut self);
+        if let Some(og) = sig.inputs.first_mut() {
+            *og = FnArg::Receiver(mut_receiver)
+        }
+
+        // remove the return value.
+        sig.output = ReturnType::Default;
+
+        let value = &value_argument.pat;
+        let method = quote! {
+            #sig {
+                use salsa::Setter;
+                let data = #create_data_ident(self);
+                data.#setter_ident(self).to(Some(#value));
+            }
+        };
+        method.to_tokens(tokens);
+    }
+}
+
+pub(crate) struct InputSetterWithDurability {
+    pub(crate) signature: syn::Signature,
+    pub(crate) return_type: syn::Type,
+    pub(crate) create_data_ident: Ident,
+}
+
+impl ToTokens for InputSetterWithDurability {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        let sig = &mut self.signature.clone();
+
+        let ty = &self.return_type;
+        let fn_ident = &sig.ident;
+        let setter_ident = format_ident!("set_{}", fn_ident);
+
+        let create_data_ident = &self.create_data_ident;
+
+        sig.ident = format_ident!("set_{}_with_durability", fn_ident);
+
+        let value_argument: PatType = parse_quote!(__value: #ty);
+        sig.inputs.push(FnArg::Typed(value_argument.clone()));
+
+        let durability_argument: PatType = parse_quote!(durability: salsa::Durability);
+        sig.inputs.push(FnArg::Typed(durability_argument.clone()));
+
+        // make `&self` `&mut self` instead.
+        let mut_receiver: Receiver = parse_quote!(&mut self);
+        if let Some(og) = sig.inputs.first_mut() {
+            *og = FnArg::Receiver(mut_receiver)
+        }
+
+        // remove the return value.
+        sig.output = ReturnType::Default;
+
+        let value = &value_argument.pat;
+        let durability = &durability_argument.pat;
+        let method = quote! {
+            #sig {
+                use salsa::Setter;
+                let data = #create_data_ident(self);
+                data.#setter_ident(self)
+                    .with_durability(#durability)
+                    .to(Some(#value));
+            }
+        };
+        method.to_tokens(tokens);
+    }
+}
+
+pub(crate) enum SetterKind {
+    Plain(InputSetter),
+    WithDurability(InputSetterWithDurability),
+}
+
+impl ToTokens for SetterKind {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        match self {
+            SetterKind::Plain(input_setter) => input_setter.to_tokens(tokens),
+            SetterKind::WithDurability(input_setter_with_durability) => {
+                input_setter_with_durability.to_tokens(tokens)
+            }
+        }
+    }
+}
+
+pub(crate) struct Transparent {
+    pub(crate) signature: syn::Signature,
+    pub(crate) pat_and_tys: Vec<PatType>,
+    pub(crate) invoke: Option<Path>,
+    pub(crate) default: Option<syn::Block>,
+}
+
+impl ToTokens for Transparent {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        let sig = &self.signature;
+
+        let ty = self
+            .pat_and_tys
+            .iter()
+            .map(|pat_type| pat_type.pat.clone())
+            .collect::<Vec<Box<syn::Pat>>>();
+
+        let invoke = match &self.invoke {
+            Some(path) => path.to_token_stream(),
+            None => sig.ident.to_token_stream(),
+        };
+
+        let method = match &self.default {
+            Some(default) => quote! {
+                #sig { let db = self; #default }
+            },
+            None => quote! {
+                #sig {
+                    #invoke(self, #(#ty),*)
+                }
+            },
+        };
+
+        method.to_tokens(tokens);
+    }
+}
+pub(crate) struct Intern {
+    pub(crate) signature: syn::Signature,
+    pub(crate) pat_and_tys: Vec<PatType>,
+    pub(crate) interned_struct_path: Path,
+}
+
+impl ToTokens for Intern {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        let sig = &self.signature;
+
+        let ty = self.pat_and_tys.to_vec();
+
+        let interned_pat = ty.first().expect("at least one pat; this is a bug");
+        let interned_pat = &interned_pat.pat;
+
+        let wrapper_struct = self.interned_struct_path.to_token_stream();
+
+        let method = quote! {
+            #sig {
+                #wrapper_struct::new(self, #interned_pat)
+            }
+        };
+
+        method.to_tokens(tokens);
+    }
+}
+
+pub(crate) struct Lookup {
+    pub(crate) signature: syn::Signature,
+    pub(crate) pat_and_tys: Vec<PatType>,
+    pub(crate) return_ty: Type,
+    pub(crate) interned_struct_path: Path,
+}
+
+impl Lookup {
+    pub(crate) fn prepare_signature(&mut self) {
+        let sig = &self.signature;
+
+        let ident = format_ident!("lookup_{}", sig.ident);
+
+        let ty = self.pat_and_tys.to_vec();
+
+        let interned_key = &self.return_ty;
+
+        let interned_pat = ty.first().expect("at least one pat; this is a bug");
+        let interned_return_ty = &interned_pat.ty;
+
+        self.signature = parse_quote!(
+            fn #ident(&self, id: #interned_key) -> #interned_return_ty
+        );
+    }
+}
+
+impl ToTokens for Lookup {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        let sig = &self.signature;
+
+        let wrapper_struct = self.interned_struct_path.to_token_stream();
+        let method = quote! {
+            #sig {
+                #wrapper_struct::ingredient(self).data(self.as_dyn_database(), id.as_id()).0.clone()
+            }
+        };
+
+        method.to_tokens(tokens);
+    }
+}
+
+#[allow(clippy::large_enum_variant)]
+pub(crate) enum Queries {
+    TrackedQuery(TrackedQuery),
+    InputQuery(InputQuery),
+    Intern(Intern),
+    Transparent(Transparent),
+}
+
+impl ToTokens for Queries {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+        match self {
+            Queries::TrackedQuery(tracked_query) => tracked_query.to_tokens(tokens),
+            Queries::InputQuery(input_query) => input_query.to_tokens(tokens),
+            Queries::Transparent(transparent) => transparent.to_tokens(tokens),
+            Queries::Intern(intern) => intern.to_tokens(tokens),
+        }
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/arity.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/arity.rs
new file mode 100644
index 0000000000000..f1b29612a173f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/arity.rs
@@ -0,0 +1,32 @@
+use query_group_macro::query_group;
+
+#[query_group]
+pub trait ArityDb: salsa::Database {
+    #[salsa::invoke_interned(one)]
+    fn one(&self, a: ()) -> String;
+
+    #[salsa::invoke_interned(two)]
+    fn two(&self, a: (), b: ()) -> String;
+
+    #[salsa::invoke_interned(three)]
+    fn three(&self, a: (), b: (), c: ()) -> String;
+
+    #[salsa::invoke_interned(none)]
+    fn none(&self) -> String;
+}
+
+fn one(_db: &dyn ArityDb, _a: ()) -> String {
+    String::new()
+}
+
+fn two(_db: &dyn ArityDb, _a: (), _b: ()) -> String {
+    String::new()
+}
+
+fn three(_db: &dyn ArityDb, _a: (), _b: (), _c: ()) -> String {
+    String::new()
+}
+
+fn none(_db: &dyn ArityDb) -> String {
+    String::new()
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/hello_world.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/hello_world.rs
new file mode 100644
index 0000000000000..b0aec8dc53b28
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/hello_world.rs
@@ -0,0 +1,129 @@
+use expect_test::expect;
+use query_group_macro::query_group;
+
+mod logger_db;
+use logger_db::LoggerDb;
+
+#[query_group]
+pub trait HelloWorldDatabase: salsa::Database {
+    // input
+    // // input with no params
+    #[salsa::input]
+    fn input_string(&self) -> String;
+
+    // unadorned query
+    #[salsa::invoke_interned(length_query)]
+    fn length_query(&self, key: ()) -> usize;
+
+    // unadorned query
+    fn length_query_with_no_params(&self) -> usize;
+
+    // renamed/invoke query
+    #[salsa::invoke_interned(invoke_length_query_actual)]
+    fn invoke_length_query(&self, key: ()) -> usize;
+
+    // not a query. should not invoked
+    #[salsa::transparent]
+    fn transparent_length(&self, key: ()) -> usize;
+
+    #[salsa::transparent]
+    #[salsa::invoke_interned(transparent_and_invoke_length_actual)]
+    fn transparent_and_invoke_length(&self, key: ()) -> usize;
+}
+
+fn length_query(db: &dyn HelloWorldDatabase, _key: ()) -> usize {
+    db.input_string().len()
+}
+
+fn length_query_with_no_params(db: &dyn HelloWorldDatabase) -> usize {
+    db.input_string().len()
+}
+
+fn invoke_length_query_actual(db: &dyn HelloWorldDatabase, _key: ()) -> usize {
+    db.input_string().len()
+}
+
+fn transparent_length(db: &dyn HelloWorldDatabase, _key: ()) -> usize {
+    db.input_string().len()
+}
+
+fn transparent_and_invoke_length_actual(db: &dyn HelloWorldDatabase, _key: ()) -> usize {
+    db.input_string().len()
+}
+
+#[test]
+fn unadorned_query() {
+    let mut db = LoggerDb::default();
+
+    db.set_input_string(String::from("Hello, world!"));
+    let len = db.length_query(());
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(DidValidateMemoizedValue { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: length_query_shim(Id(800)) })",
+            "salsa_event(WillCheckCancellation)",
+        ]"#]]);
+}
+
+#[test]
+fn invoke_query() {
+    let mut db = LoggerDb::default();
+
+    db.set_input_string(String::from("Hello, world!"));
+    let len = db.invoke_length_query(());
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(DidValidateMemoizedValue { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: invoke_length_query_shim(Id(800)) })",
+            "salsa_event(WillCheckCancellation)",
+        ]"#]]);
+}
+
+#[test]
+fn transparent() {
+    let mut db = LoggerDb::default();
+
+    db.set_input_string(String::from("Hello, world!"));
+    let len = db.transparent_length(());
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(DidValidateMemoizedValue { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+        ]"#]]);
+}
+
+#[test]
+fn transparent_invoke() {
+    let mut db = LoggerDb::default();
+
+    db.set_input_string(String::from("Hello, world!"));
+    let len = db.transparent_and_invoke_length(());
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(DidValidateMemoizedValue { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: transparent_and_invoke_length_shim(Id(800)) })",
+            "salsa_event(WillCheckCancellation)",
+        ]"#]]);
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/interned.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/interned.rs
new file mode 100644
index 0000000000000..26ed316122a53
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/interned.rs
@@ -0,0 +1,50 @@
+use query_group_macro::query_group;
+
+use expect_test::expect;
+use salsa::plumbing::AsId;
+
+mod logger_db;
+use logger_db::LoggerDb;
+
+#[salsa::interned(no_lifetime)]
+pub struct InternedString {
+    data: String,
+}
+
+#[query_group]
+pub trait InternedDB: salsa::Database {
+    #[salsa::interned]
+    fn intern_string(&self, data: String) -> InternedString;
+
+    fn interned_len(&self, id: InternedString) -> usize;
+}
+
+fn interned_len(db: &dyn InternedDB, id: InternedString) -> usize {
+    db.lookup_intern_string(id).len()
+}
+
+#[test]
+fn intern_round_trip() {
+    let db = LoggerDb::default();
+
+    let id = db.intern_string(String::from("Hello, world!"));
+    let s = db.lookup_intern_string(id);
+
+    assert_eq!(s.len(), 13);
+    db.assert_logs(expect![[r#"[]"#]]);
+}
+
+#[test]
+fn intern_with_query() {
+    let db = LoggerDb::default();
+
+    let id = db.intern_string(String::from("Hello, world!"));
+    let len = db.interned_len(id);
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: interned_len_shim(Id(0)) })",
+        ]"#]]);
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/logger_db.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/logger_db.rs
new file mode 100644
index 0000000000000..5cf9be36f70ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/logger_db.rs
@@ -0,0 +1,60 @@
+use std::sync::{Arc, Mutex};
+
+#[salsa::db]
+#[derive(Default, Clone)]
+pub(crate) struct LoggerDb {
+    storage: salsa::Storage<Self>,
+    logger: Logger,
+}
+
+#[derive(Default, Clone)]
+struct Logger {
+    logs: Arc<Mutex<Vec<String>>>,
+}
+
+#[salsa::db]
+impl salsa::Database for LoggerDb {
+    fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
+        let event = event();
+        match event.kind {
+            salsa::EventKind::WillExecute { .. }
+            | salsa::EventKind::WillCheckCancellation
+            | salsa::EventKind::DidValidateMemoizedValue { .. }
+            | salsa::EventKind::WillDiscardStaleOutput { .. }
+            | salsa::EventKind::DidDiscard { .. } => {
+                self.push_log(format!("salsa_event({:?})", event.kind));
+            }
+            _ => {}
+        }
+    }
+}
+
+impl LoggerDb {
+    /// Log an event from inside a tracked function.
+    pub(crate) fn push_log(&self, string: String) {
+        self.logger.logs.lock().unwrap().push(string);
+    }
+
+    /// Asserts what the (formatted) logs should look like,
+    /// clearing the logged events. This takes `&mut self` because
+    /// it is meant to be run from outside any tracked functions.
+    pub(crate) fn assert_logs(&self, expected: expect_test::Expect) {
+        let logs = std::mem::take(&mut *self.logger.logs.lock().unwrap());
+        expected.assert_eq(&format!("{:#?}", logs));
+    }
+}
+
+/// Test the logger database.
+///
+/// This test isn't very interesting, but it *does* remove a dead code warning.
+#[test]
+fn test_logger_db() {
+    let db = LoggerDb::default();
+    db.push_log("test".to_string());
+    db.assert_logs(expect_test::expect![
+        r#"
+        [
+            "test",
+        ]"#
+    ]);
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/lru.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/lru.rs
new file mode 100644
index 0000000000000..f56dd5c2f9b17
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/lru.rs
@@ -0,0 +1,68 @@
+use expect_test::expect;
+
+mod logger_db;
+use logger_db::LoggerDb;
+use query_group_macro::query_group;
+
+#[query_group]
+pub trait LruDB: salsa::Database {
+    // // input with no params
+    #[salsa::input]
+    fn input_string(&self) -> String;
+
+    #[salsa::lru(16)]
+    #[salsa::invoke_interned(length_query)]
+    fn length_query(&self, key: ()) -> usize;
+
+    #[salsa::lru(16)]
+    #[salsa::invoke_interned(invoked_query)]
+    fn length_query_invoke(&self, key: ()) -> usize;
+}
+
+fn length_query(db: &dyn LruDB, _key: ()) -> usize {
+    db.input_string().len()
+}
+
+fn invoked_query(db: &dyn LruDB, _key: ()) -> usize {
+    db.input_string().len()
+}
+
+#[test]
+fn plain_lru() {
+    let mut db = LoggerDb::default();
+
+    db.set_input_string(String::from("Hello, world!"));
+    let len = db.length_query(());
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: create_data_LruDB(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(DidValidateMemoizedValue { database_key: create_data_LruDB(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: length_query_shim(Id(800)) })",
+            "salsa_event(WillCheckCancellation)",
+        ]"#]]);
+}
+
+#[test]
+fn invoke_lru() {
+    let mut db = LoggerDb::default();
+
+    db.set_input_string(String::from("Hello, world!"));
+    let len = db.length_query_invoke(());
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: create_data_LruDB(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(DidValidateMemoizedValue { database_key: create_data_LruDB(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: length_query_invoke_shim(Id(800)) })",
+            "salsa_event(WillCheckCancellation)",
+        ]"#]]);
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/multiple_dbs.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/multiple_dbs.rs
new file mode 100644
index 0000000000000..f36e7fdbebf7f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/multiple_dbs.rs
@@ -0,0 +1,25 @@
+use query_group_macro::query_group;
+
+#[query_group]
+pub trait DatabaseOne: salsa::Database {
+    #[salsa::input]
+    fn input_string(&self) -> String;
+
+    // unadorned query
+    #[salsa::invoke_interned(length)]
+    fn length(&self, key: ()) -> usize;
+}
+
+#[query_group]
+pub trait DatabaseTwo: DatabaseOne {
+    #[salsa::invoke_interned(second_length)]
+    fn second_length(&self, key: ()) -> usize;
+}
+
+fn length(db: &dyn DatabaseOne, _key: ()) -> usize {
+    db.input_string().len()
+}
+
+fn second_length(db: &dyn DatabaseTwo, _key: ()) -> usize {
+    db.input_string().len()
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/old_and_new.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/old_and_new.rs
new file mode 100644
index 0000000000000..a18b23a7d8a93
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/old_and_new.rs
@@ -0,0 +1,109 @@
+use expect_test::expect;
+
+mod logger_db;
+use logger_db::LoggerDb;
+use query_group_macro::query_group;
+
+#[salsa::input]
+struct Input {
+    str: String,
+}
+
+#[query_group]
+trait PartialMigrationDatabase: salsa::Database {
+    fn length_query(&self, input: Input) -> usize;
+
+    // renamed/invoke query
+    #[salsa::invoke(invoke_length_query_actual)]
+    fn invoke_length_query(&self, input: Input) -> usize;
+
+    // invoke tracked function
+    #[salsa::invoke(invoke_length_tracked_actual)]
+    fn invoke_length_tracked(&self, input: Input) -> usize;
+}
+
+fn length_query(db: &dyn PartialMigrationDatabase, input: Input) -> usize {
+    input.str(db).len()
+}
+
+fn invoke_length_query_actual(db: &dyn PartialMigrationDatabase, input: Input) -> usize {
+    input.str(db).len()
+}
+
+#[salsa::tracked]
+fn invoke_length_tracked_actual(db: &dyn PartialMigrationDatabase, input: Input) -> usize {
+    input.str(db).len()
+}
+
+#[test]
+fn unadorned_query() {
+    let db = LoggerDb::default();
+
+    let input = Input::new(&db, String::from("Hello, world!"));
+    let len = db.length_query(input);
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: length_query_shim(Id(0)) })",
+        ]"#]]);
+}
+
+#[test]
+fn invoke_query() {
+    let db = LoggerDb::default();
+
+    let input = Input::new(&db, String::from("Hello, world!"));
+    let len = db.invoke_length_query(input);
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: invoke_length_query_shim(Id(0)) })",
+        ]"#]]);
+}
+
+// todo: does this even make sense?
+#[test]
+fn invoke_tracked_query() {
+    let db = LoggerDb::default();
+
+    let input = Input::new(&db, String::from("Hello, world!"));
+    let len = db.invoke_length_tracked(input);
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: invoke_length_tracked_shim(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: invoke_length_tracked_actual(Id(0)) })",
+        ]"#]]);
+}
+
+#[test]
+fn new_salsa_baseline() {
+    let db = LoggerDb::default();
+
+    #[salsa::input]
+    struct Input {
+        str: String,
+    }
+
+    #[salsa::tracked]
+    fn new_salsa_length_query(db: &dyn PartialMigrationDatabase, input: Input) -> usize {
+        input.str(db).len()
+    }
+
+    let input = Input::new(&db, String::from("Hello, world!"));
+    let len = new_salsa_length_query(&db, input);
+
+    assert_eq!(len, 13);
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: new_salsa_length_query(Id(0)) })",
+        ]"#]]);
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/result.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/result.rs
new file mode 100644
index 0000000000000..06f7f403c7e3a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/result.rs
@@ -0,0 +1,52 @@
+mod logger_db;
+use expect_test::expect;
+use logger_db::LoggerDb;
+
+use query_group_macro::query_group;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct Error;
+
+#[query_group]
+pub trait ResultDatabase: salsa::Database {
+    #[salsa::input]
+    fn input_string(&self) -> String;
+
+    #[salsa::invoke_interned(length)]
+    fn length(&self, key: ()) -> Result<usize, Error>;
+
+    #[salsa::invoke_interned(length2)]
+    fn length2(&self, key: ()) -> Result<usize, Error>;
+}
+
+fn length(db: &dyn ResultDatabase, _key: ()) -> Result<usize, Error> {
+    Ok(db.input_string().len())
+}
+
+fn length2(db: &dyn ResultDatabase, _key: ()) -> Result<usize, Error> {
+    Ok(db.input_string().len())
+}
+
+#[test]
+fn test_queries_with_results() {
+    let mut db = LoggerDb::default();
+    let input = "hello";
+    db.set_input_string(input.to_owned());
+    assert_eq!(db.length(()), Ok(input.len()));
+    assert_eq!(db.length2(()), Ok(input.len()));
+
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: create_data_ResultDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(DidValidateMemoizedValue { database_key: create_data_ResultDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: length_shim(Id(800)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: length2_shim(Id(c00)) })",
+            "salsa_event(WillCheckCancellation)",
+        ]"#]]);
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/supertrait.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/supertrait.rs
new file mode 100644
index 0000000000000..70073ac1de323
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/supertrait.rs
@@ -0,0 +1,20 @@
+use query_group_macro::query_group;
+
+#[salsa::db]
+pub trait SourceDb: salsa::Database {
+    /// Text of the file.
+    fn file_text(&self, id: usize) -> String;
+}
+
+#[query_group]
+pub trait RootDb: SourceDb {
+    #[salsa::invoke_interned(parse)]
+    fn parse(&self, id: usize) -> String;
+}
+
+fn parse(db: &dyn RootDb, id: usize) -> String {
+    // this is the test: does the following compile?
+    db.file_text(id);
+
+    String::new()
+}
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/tests/tuples.rs b/src/tools/rust-analyzer/crates/query-group-macro/tests/tuples.rs
new file mode 100644
index 0000000000000..af0e852695e32
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/query-group-macro/tests/tuples.rs
@@ -0,0 +1,39 @@
+use query_group_macro::query_group;
+
+mod logger_db;
+use expect_test::expect;
+use logger_db::LoggerDb;
+
+#[query_group]
+pub trait HelloWorldDatabase: salsa::Database {
+    #[salsa::input]
+    fn input_string(&self) -> String;
+
+    #[salsa::invoke_interned(length_query)]
+    fn length_query(&self, key: ()) -> (usize, usize);
+}
+
+fn length_query(db: &dyn HelloWorldDatabase, _key: ()) -> (usize, usize) {
+    let len = db.input_string().len();
+    (len, len)
+}
+
+#[test]
+fn query() {
+    let mut db = LoggerDb::default();
+
+    db.set_input_string(String::from("Hello, world!"));
+    let len = db.length_query(());
+
+    assert_eq!(len, (13, 13));
+    db.assert_logs(expect![[r#"
+        [
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(DidValidateMemoizedValue { database_key: create_data_HelloWorldDatabase(Id(0)) })",
+            "salsa_event(WillCheckCancellation)",
+            "salsa_event(WillExecute { database_key: length_query_shim(Id(800)) })",
+            "salsa_event(WillCheckCancellation)",
+        ]"#]]);
+}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/Cargo.toml b/src/tools/rust-analyzer/crates/ra-salsa/Cargo.toml
deleted file mode 100644
index 57a20be0cadd7..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/Cargo.toml
+++ /dev/null
@@ -1,35 +0,0 @@
-[package]
-name = "salsa"
-version = "0.0.0"
-authors = ["Salsa developers"]
-edition = "2021"
-license = "Apache-2.0 OR MIT"
-repository = "https://github.com/salsa-rs/salsa"
-description = "A generic framework for on-demand, incrementalized computation (experimental)"
-
-rust-version.workspace = true
-
-[lib]
-name = "ra_salsa"
-
-[dependencies]
-indexmap = "2.1.0"
-lock_api = "0.4"
-tracing = "0.1"
-parking_lot = "0.12.1"
-rustc-hash = "2.0.0"
-smallvec = "1.0.0"
-oorandom = "11"
-triomphe.workspace = true
-itertools.workspace = true
-
-ra-salsa-macros = { version = "0.0.0", path = "ra-salsa-macros", package = "salsa-macros" }
-
-[dev-dependencies]
-linked-hash-map = "0.5.6"
-rand = "0.8.5"
-expect-test = "1.4.0"
-dissimilar = "1.0.7"
-
-[lints]
-workspace = true
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/FAQ.md b/src/tools/rust-analyzer/crates/ra-salsa/FAQ.md
deleted file mode 100644
index 9c9f6f92da990..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/FAQ.md
+++ /dev/null
@@ -1,34 +0,0 @@
-# Frequently asked questions
-
-## Why is it called salsa?
-
-I like salsa! Don't you?! Well, ok, there's a bit more to it. The
-underlying algorithm for figuring out which bits of code need to be
-re-executed after any given change is based on the algorithm used in
-rustc. Michael Woerister and I first described the rustc algorithm in
-terms of two colors, red and green, and hence we called it the
-"red-green algorithm". This made me think of the New Mexico State
-Question --- ["Red or green?"][nm] --- which refers to chile
-(salsa). Although this version no longer uses colors (we borrowed
-revision counters from Glimmer, instead), I still like the name.
-
-[nm]: https://www.sos.state.nm.us/about-new-mexico/state-question/
-
-## What is the relationship between salsa and an Entity-Component System (ECS)?
-
-You may have noticed that Salsa "feels" a lot like an ECS in some
-ways. That's true -- Salsa's queries are a bit like *components* (and
-the keys to the queries are a bit like *entities*). But there is one
-big difference: **ECS is -- at its heart -- a mutable system**. You
-can get or set a component of some entity whenever you like. In
-contrast, salsa's queries **define "derived values" via pure
-computations**.
-
-Partly as a consequence, ECS doesn't handle incremental updates for
-you. When you update some component of some entity, you have to ensure
-that other entities' components are updated appropriately.
-
-Finally, ECS offers interesting metadata and "aspect-like" facilities,
-such as iterating over all entities that share certain components.
-Salsa has no analogue to that.
-
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/LICENSE-APACHE b/src/tools/rust-analyzer/crates/ra-salsa/LICENSE-APACHE
deleted file mode 100644
index 1b5ec8b78e237..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/LICENSE-APACHE
+++ /dev/null
@@ -1,176 +0,0 @@
-                              Apache License
-                        Version 2.0, January 2004
-                     http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-   "License" shall mean the terms and conditions for use, reproduction,
-   and distribution as defined by Sections 1 through 9 of this document.
-
-   "Licensor" shall mean the copyright owner or entity authorized by
-   the copyright owner that is granting the License.
-
-   "Legal Entity" shall mean the union of the acting entity and all
-   other entities that control, are controlled by, or are under common
-   control with that entity. For the purposes of this definition,
-   "control" means (i) the power, direct or indirect, to cause the
-   direction or management of such entity, whether by contract or
-   otherwise, or (ii) ownership of fifty percent (50%) or more of the
-   outstanding shares, or (iii) beneficial ownership of such entity.
-
-   "You" (or "Your") shall mean an individual or Legal Entity
-   exercising permissions granted by this License.
-
-   "Source" form shall mean the preferred form for making modifications,
-   including but not limited to software source code, documentation
-   source, and configuration files.
-
-   "Object" form shall mean any form resulting from mechanical
-   transformation or translation of a Source form, including but
-   not limited to compiled object code, generated documentation,
-   and conversions to other media types.
-
-   "Work" shall mean the work of authorship, whether in Source or
-   Object form, made available under the License, as indicated by a
-   copyright notice that is included in or attached to the work
-   (an example is provided in the Appendix below).
-
-   "Derivative Works" shall mean any work, whether in Source or Object
-   form, that is based on (or derived from) the Work and for which the
-   editorial revisions, annotations, elaborations, or other modifications
-   represent, as a whole, an original work of authorship. For the purposes
-   of this License, Derivative Works shall not include works that remain
-   separable from, or merely link (or bind by name) to the interfaces of,
-   the Work and Derivative Works thereof.
-
-   "Contribution" shall mean any work of authorship, including
-   the original version of the Work and any modifications or additions
-   to that Work or Derivative Works thereof, that is intentionally
-   submitted to Licensor for inclusion in the Work by the copyright owner
-   or by an individual or Legal Entity authorized to submit on behalf of
-   the copyright owner. For the purposes of this definition, "submitted"
-   means any form of electronic, verbal, or written communication sent
-   to the Licensor or its representatives, including but not limited to
-   communication on electronic mailing lists, source code control systems,
-   and issue tracking systems that are managed by, or on behalf of, the
-   Licensor for the purpose of discussing and improving the Work, but
-   excluding communication that is conspicuously marked or otherwise
-   designated in writing by the copyright owner as "Not a Contribution."
-
-   "Contributor" shall mean Licensor and any individual or Legal Entity
-   on behalf of whom a Contribution has been received by Licensor and
-   subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
-   this License, each Contributor hereby grants to You a perpetual,
-   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-   copyright license to reproduce, prepare Derivative Works of,
-   publicly display, publicly perform, sublicense, and distribute the
-   Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
-   this License, each Contributor hereby grants to You a perpetual,
-   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-   (except as stated in this section) patent license to make, have made,
-   use, offer to sell, sell, import, and otherwise transfer the Work,
-   where such license applies only to those patent claims licensable
-   by such Contributor that are necessarily infringed by their
-   Contribution(s) alone or by combination of their Contribution(s)
-   with the Work to which such Contribution(s) was submitted. If You
-   institute patent litigation against any entity (including a
-   cross-claim or counterclaim in a lawsuit) alleging that the Work
-   or a Contribution incorporated within the Work constitutes direct
-   or contributory patent infringement, then any patent licenses
-   granted to You under this License for that Work shall terminate
-   as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
-   Work or Derivative Works thereof in any medium, with or without
-   modifications, and in Source or Object form, provided that You
-   meet the following conditions:
-
-   (a) You must give any other recipients of the Work or
-       Derivative Works a copy of this License; and
-
-   (b) You must cause any modified files to carry prominent notices
-       stating that You changed the files; and
-
-   (c) You must retain, in the Source form of any Derivative Works
-       that You distribute, all copyright, patent, trademark, and
-       attribution notices from the Source form of the Work,
-       excluding those notices that do not pertain to any part of
-       the Derivative Works; and
-
-   (d) If the Work includes a "NOTICE" text file as part of its
-       distribution, then any Derivative Works that You distribute must
-       include a readable copy of the attribution notices contained
-       within such NOTICE file, excluding those notices that do not
-       pertain to any part of the Derivative Works, in at least one
-       of the following places: within a NOTICE text file distributed
-       as part of the Derivative Works; within the Source form or
-       documentation, if provided along with the Derivative Works; or,
-       within a display generated by the Derivative Works, if and
-       wherever such third-party notices normally appear. The contents
-       of the NOTICE file are for informational purposes only and
-       do not modify the License. You may add Your own attribution
-       notices within Derivative Works that You distribute, alongside
-       or as an addendum to the NOTICE text from the Work, provided
-       that such additional attribution notices cannot be construed
-       as modifying the License.
-
-   You may add Your own copyright statement to Your modifications and
-   may provide additional or different license terms and conditions
-   for use, reproduction, or distribution of Your modifications, or
-   for any such Derivative Works as a whole, provided Your use,
-   reproduction, and distribution of the Work otherwise complies with
-   the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
-   any Contribution intentionally submitted for inclusion in the Work
-   by You to the Licensor shall be under the terms and conditions of
-   this License, without any additional terms or conditions.
-   Notwithstanding the above, nothing herein shall supersede or modify
-   the terms of any separate license agreement you may have executed
-   with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
-   names, trademarks, service marks, or product names of the Licensor,
-   except as required for reasonable and customary use in describing the
-   origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
-   agreed to in writing, Licensor provides the Work (and each
-   Contributor provides its Contributions) on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-   implied, including, without limitation, any warranties or conditions
-   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-   PARTICULAR PURPOSE. You are solely responsible for determining the
-   appropriateness of using or redistributing the Work and assume any
-   risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
-   whether in tort (including negligence), contract, or otherwise,
-   unless required by applicable law (such as deliberate and grossly
-   negligent acts) or agreed to in writing, shall any Contributor be
-   liable to You for damages, including any direct, indirect, special,
-   incidental, or consequential damages of any character arising as a
-   result of this License or out of the use or inability to use the
-   Work (including but not limited to damages for loss of goodwill,
-   work stoppage, computer failure or malfunction, or any and all
-   other commercial damages or losses), even if such Contributor
-   has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
-   the Work or Derivative Works thereof, You may choose to offer,
-   and charge a fee for, acceptance of support, warranty, indemnity,
-   or other liability obligations and/or rights consistent with this
-   License. However, in accepting such obligations, You may act only
-   on Your own behalf and on Your sole responsibility, not on behalf
-   of any other Contributor, and only if You agree to indemnify,
-   defend, and hold each Contributor harmless for any liability
-   incurred by, or claims asserted against, such Contributor by reason
-   of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/LICENSE-MIT b/src/tools/rust-analyzer/crates/ra-salsa/LICENSE-MIT
deleted file mode 100644
index 31aa79387f27e..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/LICENSE-MIT
+++ /dev/null
@@ -1,23 +0,0 @@
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/README.md b/src/tools/rust-analyzer/crates/ra-salsa/README.md
deleted file mode 100644
index 4a8d9f8c7317a..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/README.md
+++ /dev/null
@@ -1,42 +0,0 @@
-# salsa
-
-*A generic framework for on-demand, incrementalized computation.*
-
-## Obligatory warning
-
-This is a fork of https://github.com/salsa-rs/salsa/ adjusted to rust-analyzer's needs.
-
-## Credits
-
-This system is heavily inspired by [adapton](http://adapton.org/), [glimmer](https://github.com/glimmerjs/glimmer-vm), and rustc's query
-system. So credit goes to Eduard-Mihai Burtescu, Matthew Hammer,
-Yehuda Katz, and Michael Woerister.
-
-## Key idea
-
-The key idea of `salsa` is that you define your program as a set of
-**queries**. Every query is used like function `K -> V` that maps from
-some key of type `K` to a value of type `V`. Queries come in two basic
-varieties:
-
-- **Inputs**: the base inputs to your system. You can change these
-  whenever you like.
-- **Functions**: pure functions (no side effects) that transform your
-  inputs into other values. The results of queries is memoized to
-  avoid recomputing them a lot. When you make changes to the inputs,
-  we'll figure out (fairly intelligently) when we can re-use these
-  memoized values and when we have to recompute them.
-
-## Want to learn more?
-
-To learn more about Salsa, try one of the following:
-
-- read the [heavily commented `hello_world` example](https://github.com/salsa-rs/salsa/blob/master/examples/hello_world/main.rs);
-- check out the [Salsa book](https://salsa-rs.github.io/salsa);
-- watch one of our [videos](https://salsa-rs.github.io/salsa/videos.html).
-
-## Getting in touch
-
-The bulk of the discussion happens in the [issues](https://github.com/salsa-rs/salsa/issues)
-and [pull requests](https://github.com/salsa-rs/salsa/pulls),
-but we have a [zulip chat](https://salsa.zulipchat.com/) as well.
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/Cargo.toml b/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/Cargo.toml
deleted file mode 100644
index 5613d75c75225..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/Cargo.toml
+++ /dev/null
@@ -1,23 +0,0 @@
-[package]
-name = "salsa-macros"
-version = "0.0.0"
-authors = ["Salsa developers"]
-edition = "2021"
-license = "Apache-2.0 OR MIT"
-repository = "https://github.com/salsa-rs/salsa"
-description = "Procedural macros for the salsa crate"
-
-rust-version.workspace = true
-
-[lib]
-proc-macro = true
-name = "ra_salsa_macros"
-
-[dependencies]
-heck = "0.4"
-proc-macro2 = "1.0"
-quote = "1.0"
-syn = { version = "2.0", features = ["full", "extra-traits"] }
-
-[lints]
-workspace = true
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/LICENSE-APACHE b/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/LICENSE-APACHE
deleted file mode 100644
index 0bf2cad6488f5..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/LICENSE-APACHE
+++ /dev/null
@@ -1 +0,0 @@
-../LICENSE-APACHE
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/LICENSE-MIT b/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/LICENSE-MIT
deleted file mode 100644
index d99cce5f72063..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/LICENSE-MIT
+++ /dev/null
@@ -1 +0,0 @@
-../LICENSE-MIT
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/README.md b/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/README.md
deleted file mode 100644
index 94389aee61a0e..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/README.md
+++ /dev/null
@@ -1 +0,0 @@
-../README.md
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/database_storage.rs b/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/database_storage.rs
deleted file mode 100644
index 63ab84a621e7e..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/database_storage.rs
+++ /dev/null
@@ -1,243 +0,0 @@
-//! Implementation for `[ra_salsa::database]` decorator.
-
-use heck::ToSnakeCase;
-use proc_macro::TokenStream;
-use syn::parse::{Parse, ParseStream};
-use syn::punctuated::Punctuated;
-use syn::{Ident, ItemStruct, Path, Token};
-
-type PunctuatedQueryGroups = Punctuated<QueryGroup, Token![,]>;
-
-pub(crate) fn database(args: TokenStream, input: TokenStream) -> TokenStream {
-    let args = syn::parse_macro_input!(args as QueryGroupList);
-    let input = syn::parse_macro_input!(input as ItemStruct);
-
-    let query_groups = &args.query_groups;
-    let database_name = &input.ident;
-    let visibility = &input.vis;
-    let db_storage_field = quote! { storage };
-
-    let mut output = proc_macro2::TokenStream::new();
-    output.extend(quote! { #input });
-
-    let query_group_names_snake: Vec<_> = query_groups
-        .iter()
-        .map(|query_group| {
-            let group_name = query_group.name();
-            Ident::new(&group_name.to_string().to_snake_case(), group_name.span())
-        })
-        .collect();
-
-    let query_group_storage_names: Vec<_> = query_groups
-        .iter()
-        .map(|QueryGroup { group_path }| {
-            quote! {
-                <#group_path as ra_salsa::plumbing::QueryGroup>::GroupStorage
-            }
-        })
-        .collect();
-
-    // For each query group `foo::MyGroup` create a link to its
-    // `foo::MyGroupGroupStorage`
-    let mut storage_fields = proc_macro2::TokenStream::new();
-    let mut storage_initializers = proc_macro2::TokenStream::new();
-    let mut has_group_impls = proc_macro2::TokenStream::new();
-    for (((query_group, group_name_snake), group_storage), group_index) in query_groups
-        .iter()
-        .zip(&query_group_names_snake)
-        .zip(&query_group_storage_names)
-        .zip(0_u16..)
-    {
-        let group_path = &query_group.group_path;
-
-        // rewrite the last identifier (`MyGroup`, above) to
-        // (e.g.) `MyGroupGroupStorage`.
-        storage_fields.extend(quote! {
-            #group_name_snake: #group_storage,
-        });
-
-        // rewrite the last identifier (`MyGroup`, above) to
-        // (e.g.) `MyGroupGroupStorage`.
-        storage_initializers.extend(quote! {
-            #group_name_snake: #group_storage::new(#group_index),
-        });
-
-        // ANCHOR:HasQueryGroup
-        has_group_impls.extend(quote! {
-            impl ra_salsa::plumbing::HasQueryGroup<#group_path> for #database_name {
-                fn group_storage(&self) -> &#group_storage {
-                    &self.#db_storage_field.query_store().#group_name_snake
-                }
-
-                fn group_storage_mut(&mut self) -> (&#group_storage, &mut ra_salsa::Runtime) {
-                    let (query_store_mut, runtime) = self.#db_storage_field.query_store_mut();
-                    (&query_store_mut.#group_name_snake, runtime)
-                }
-            }
-        });
-        // ANCHOR_END:HasQueryGroup
-    }
-
-    // create group storage wrapper struct
-    output.extend(quote! {
-        #[doc(hidden)]
-        #visibility struct __SalsaDatabaseStorage {
-            #storage_fields
-        }
-
-        impl Default for __SalsaDatabaseStorage {
-            fn default() -> Self {
-                Self {
-                    #storage_initializers
-                }
-            }
-        }
-    });
-
-    // Create a tuple (D1, D2, ...) where Di is the data for a given query group.
-    let mut database_data = vec![];
-    for QueryGroup { group_path } in query_groups {
-        database_data.push(quote! {
-            <#group_path as ra_salsa::plumbing::QueryGroup>::GroupData
-        });
-    }
-
-    // ANCHOR:DatabaseStorageTypes
-    output.extend(quote! {
-        impl ra_salsa::plumbing::DatabaseStorageTypes for #database_name {
-            type DatabaseStorage = __SalsaDatabaseStorage;
-        }
-    });
-    // ANCHOR_END:DatabaseStorageTypes
-
-    // ANCHOR:DatabaseOps
-    let mut fmt_ops = proc_macro2::TokenStream::new();
-    let mut maybe_changed_ops = proc_macro2::TokenStream::new();
-    let mut cycle_recovery_strategy_ops = proc_macro2::TokenStream::new();
-    let mut for_each_ops = proc_macro2::TokenStream::new();
-    for ((QueryGroup { group_path }, group_storage), group_index) in
-        query_groups.iter().zip(&query_group_storage_names).zip(0_u16..)
-    {
-        fmt_ops.extend(quote! {
-            #group_index => {
-                let storage: &#group_storage =
-                    <Self as ra_salsa::plumbing::HasQueryGroup<#group_path>>::group_storage(self);
-                storage.fmt_index(self, input, fmt)
-            }
-        });
-        maybe_changed_ops.extend(quote! {
-            #group_index => {
-                let storage: &#group_storage =
-                    <Self as ra_salsa::plumbing::HasQueryGroup<#group_path>>::group_storage(self);
-                storage.maybe_changed_after(self, input, revision)
-            }
-        });
-        cycle_recovery_strategy_ops.extend(quote! {
-            #group_index => {
-                let storage: &#group_storage =
-                    <Self as ra_salsa::plumbing::HasQueryGroup<#group_path>>::group_storage(self);
-                storage.cycle_recovery_strategy(self, input)
-            }
-        });
-        for_each_ops.extend(quote! {
-            let storage: &#group_storage =
-                <Self as ra_salsa::plumbing::HasQueryGroup<#group_path>>::group_storage(self);
-            storage.for_each_query(runtime, &mut op);
-        });
-    }
-    output.extend(quote! {
-        impl ra_salsa::plumbing::DatabaseOps for #database_name {
-            fn ops_database(&self) -> &dyn ra_salsa::Database {
-                self
-            }
-
-            fn ops_salsa_runtime(&self) -> &ra_salsa::Runtime {
-                self.#db_storage_field.salsa_runtime()
-            }
-
-            fn synthetic_write(&mut self, durability: ra_salsa::Durability) {
-                self.#db_storage_field.salsa_runtime_mut().synthetic_write(durability)
-            }
-
-            fn fmt_index(
-                &self,
-                input: ra_salsa::DatabaseKeyIndex,
-                fmt: &mut std::fmt::Formatter<'_>,
-            ) -> std::fmt::Result {
-                match input.group_index() {
-                    #fmt_ops
-                    i => panic!("ra_salsa: invalid group index {}", i)
-                }
-            }
-
-            fn maybe_changed_after(
-                &self,
-                input: ra_salsa::DatabaseKeyIndex,
-                revision: ra_salsa::Revision
-            ) -> bool {
-                match input.group_index() {
-                    #maybe_changed_ops
-                    i => panic!("ra_salsa: invalid group index {}", i)
-                }
-            }
-
-            fn cycle_recovery_strategy(
-                &self,
-                input: ra_salsa::DatabaseKeyIndex,
-            ) -> ra_salsa::plumbing::CycleRecoveryStrategy {
-                match input.group_index() {
-                    #cycle_recovery_strategy_ops
-                    i => panic!("ra_salsa: invalid group index {}", i)
-                }
-            }
-
-            fn for_each_query(
-                &self,
-                mut op: &mut dyn FnMut(&dyn ra_salsa::plumbing::QueryStorageMassOps),
-            ) {
-                let runtime = ra_salsa::Database::salsa_runtime(self);
-                #for_each_ops
-            }
-        }
-    });
-    // ANCHOR_END:DatabaseOps
-
-    output.extend(has_group_impls);
-
-    output.into()
-}
-
-#[derive(Clone, Debug)]
-struct QueryGroupList {
-    query_groups: PunctuatedQueryGroups,
-}
-
-impl Parse for QueryGroupList {
-    fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
-        let query_groups: PunctuatedQueryGroups =
-            input.parse_terminated(QueryGroup::parse, Token![,])?;
-        Ok(QueryGroupList { query_groups })
-    }
-}
-
-#[derive(Clone, Debug)]
-struct QueryGroup {
-    group_path: Path,
-}
-
-impl QueryGroup {
-    /// The name of the query group trait.
-    fn name(&self) -> Ident {
-        self.group_path.segments.last().unwrap().ident.clone()
-    }
-}
-
-impl Parse for QueryGroup {
-    /// ```ignore
-    ///         impl HelloWorldDatabase;
-    /// ```
-    fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
-        let group_path: Path = input.parse()?;
-        Ok(QueryGroup { group_path })
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/lib.rs b/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/lib.rs
deleted file mode 100644
index d3e17c5ebf1d4..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/lib.rs
+++ /dev/null
@@ -1,125 +0,0 @@
-//! This crate provides salsa's macros and attributes.
-
-#![recursion_limit = "256"]
-
-#[macro_use]
-extern crate quote;
-
-use proc_macro::TokenStream;
-
-mod database_storage;
-mod parenthesized;
-mod query_group;
-
-/// The decorator that defines a salsa "query group" trait. This is a
-/// trait that defines everything that a block of queries need to
-/// execute, as well as defining the queries themselves that are
-/// exported for others to use.
-///
-/// This macro declares the "prototype" for a group of queries. It will
-/// expand into a trait and a set of structs, one per query.
-///
-/// For each query, you give the name of the accessor method to invoke
-/// the query (e.g., `my_query`, below), as well as its parameter
-/// types and the output type. You also give the name for a query type
-/// (e.g., `MyQuery`, below) that represents the query, and optionally
-/// other details, such as its storage.
-///
-/// # Examples
-///
-/// The simplest example is something like this:
-///
-/// ```ignore
-/// #[salsa::query_group]
-/// trait TypeckDatabase {
-///     #[salsa::input] // see below for other legal attributes
-///     fn my_query(&self, input: u32) -> u64;
-///
-///     /// Queries can have any number of inputs (including zero); if there
-///     /// is not exactly one input, then the key type will be
-///     /// a tuple of the input types, so in this case `(u32, f32)`.
-///     fn other_query(&self, input1: u32, input2: f32) -> u64;
-/// }
-/// ```
-///
-/// Here is a list of legal `salsa::XXX` attributes:
-///
-/// - Storage attributes: control how the query data is stored and set. These
-///   are described in detail in the section below.
-///   - `#[salsa::input]`
-///   - `#[salsa::memoized]`
-///   - `#[salsa::dependencies]`
-/// - Query execution:
-///   - `#[salsa::invoke(path::to::my_fn)]` -- for a non-input, this
-///     indicates the function to call when a query must be
-///     recomputed. The default is to call a function in the same
-///     module with the same name as the query.
-///   - `#[query_type(MyQueryTypeName)]` specifies the name of the
-///     dummy struct created for the query. Default is the name of the
-///     query, in camel case, plus the word "Query" (e.g.,
-///     `MyQueryQuery` and `OtherQueryQuery` in the examples above).
-///
-/// # Storage attributes
-///
-/// Here are the possible storage values for each query.  The default
-/// is `storage memoized`.
-///
-/// ## Input queries
-///
-/// Specifying `storage input` will give you an **input
-/// query**. Unlike derived queries, whose value is given by a
-/// function, input queries are explicitly set by doing
-/// `db.query(QueryType).set(key, value)` (where `QueryType` is the
-/// `type` specified for the query). Accessing a value that has not
-/// yet been set will panic. Each time you invoke `set`, we assume the
-/// value has changed, and so we will potentially re-execute derived
-/// queries that read (transitively) from this input.
-///
-/// ## Derived queries
-///
-/// Derived queries are specified by a function.
-///
-/// - `#[salsa::memoized]` (the default) -- The result is memoized
-///   between calls.  If the inputs have changed, we will recompute
-///   the value, but then compare against the old memoized value,
-///   which can significantly reduce the amount of recomputation
-///   required in new revisions. This does require that the value
-///   implements `Eq`.
-/// - `#[salsa::dependencies]` -- does not cache the value, so it will
-///   be recomputed every time it is needed. We do track the inputs, however,
-///   so if they have not changed, then things that rely on this query
-///   may be known not to have changed.
-///
-/// ## Attribute combinations
-///
-/// Some attributes are mutually exclusive. For example, it is an error to add
-/// multiple storage specifiers or to annotate a function to `invoke` on an
-/// `input` query.
-#[proc_macro_attribute]
-pub fn query_group(args: TokenStream, input: TokenStream) -> TokenStream {
-    query_group::query_group(args, input)
-}
-
-/// This attribute is placed on your database struct. It takes a list of the
-/// query groups that your database supports. The format looks like so:
-///
-/// ```rust,ignore
-/// #[salsa::database(MyQueryGroup1, MyQueryGroup2)]
-/// struct MyDatabase {
-///     runtime: salsa::Runtime<MyDatabase>, // <-- your database will need this field, too
-/// }
-/// ```
-///
-/// Here, the struct `MyDatabase` would support the two query groups
-/// `MyQueryGroup1` and `MyQueryGroup2`. In addition to the `database`
-/// attribute, the struct needs to have a `runtime` field (of type
-/// [`salsa::Runtime`]) and to implement the `salsa::Database` trait.
-///
-/// See [the `hello_world` example][hw] for more details.
-///
-/// [`salsa::Runtime`]: struct.Runtime.html
-/// [hw]: https://github.com/salsa-rs/salsa/tree/master/examples/hello_world
-#[proc_macro_attribute]
-pub fn database(args: TokenStream, input: TokenStream) -> TokenStream {
-    database_storage::database(args, input)
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/parenthesized.rs b/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/parenthesized.rs
deleted file mode 100644
index 5ecd1b8a05838..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/parenthesized.rs
+++ /dev/null
@@ -1,13 +0,0 @@
-//! Parenthesis helper
-pub(crate) struct Parenthesized<T>(pub(crate) T);
-
-impl<T> syn::parse::Parse for Parenthesized<T>
-where
-    T: syn::parse::Parse,
-{
-    fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
-        let content;
-        syn::parenthesized!(content in input);
-        content.parse::<T>().map(Parenthesized)
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/query_group.rs b/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/query_group.rs
deleted file mode 100644
index d761a5e798e89..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/ra-salsa-macros/src/query_group.rs
+++ /dev/null
@@ -1,753 +0,0 @@
-//! Implementation for `[ra_salsa::query_group]` decorator.
-
-use crate::parenthesized::Parenthesized;
-use heck::ToUpperCamelCase;
-use proc_macro::TokenStream;
-use proc_macro2::Span;
-use quote::ToTokens;
-use syn::{
-    parse_macro_input, parse_quote, spanned::Spanned, Attribute, Error, FnArg, Ident, ItemTrait,
-    ReturnType, TraitItem, Type,
-};
-
-/// Implementation for `[ra_salsa::query_group]` decorator.
-pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream {
-    let group_struct = parse_macro_input!(args as Ident);
-    let input: ItemTrait = parse_macro_input!(input as ItemTrait);
-    // println!("args: {:#?}", args);
-    // println!("input: {:#?}", input);
-
-    let input_span = input.span();
-    let (trait_attrs, salsa_attrs) = filter_attrs(input.attrs);
-    if !salsa_attrs.is_empty() {
-        return Error::new(input_span, format!("unsupported attributes: {salsa_attrs:?}"))
-            .to_compile_error()
-            .into();
-    }
-
-    let trait_vis = input.vis;
-    let trait_name = input.ident;
-    let _generics = input.generics.clone();
-    let dyn_db = quote! { dyn #trait_name };
-
-    // Decompose the trait into the corresponding queries.
-    let mut queries = vec![];
-    for item in input.items {
-        if let TraitItem::Fn(method) = item {
-            let query_name = method.sig.ident.to_string();
-
-            let mut storage = QueryStorage::Memoized;
-            let mut cycle = None;
-            let mut invoke = None;
-
-            let mut query_type =
-                format_ident!("{}Query", query_name.to_string().to_upper_camel_case());
-            let mut num_storages = 0;
-
-            // Extract attributes.
-            let (attrs, salsa_attrs) = filter_attrs(method.attrs);
-            for SalsaAttr { name, tts, span } in salsa_attrs {
-                match name.as_str() {
-                    "memoized" => {
-                        storage = QueryStorage::Memoized;
-                        num_storages += 1;
-                    }
-                    "dependencies" => {
-                        storage = QueryStorage::LruDependencies;
-                        num_storages += 1;
-                    }
-                    "lru" => {
-                        storage = QueryStorage::LruMemoized;
-                        num_storages += 1;
-                    }
-                    "input" => {
-                        storage = QueryStorage::Input;
-                        num_storages += 1;
-                    }
-                    "interned" => {
-                        storage = QueryStorage::Interned;
-                        num_storages += 1;
-                    }
-                    "cycle" => {
-                        cycle = Some(parse_macro_input!(tts as Parenthesized<syn::Path>).0);
-                    }
-                    "invoke" => {
-                        invoke = Some(parse_macro_input!(tts as Parenthesized<syn::Path>).0);
-                    }
-                    "query_type" => {
-                        query_type = parse_macro_input!(tts as Parenthesized<Ident>).0;
-                    }
-                    "transparent" => {
-                        storage = QueryStorage::Transparent;
-                        num_storages += 1;
-                    }
-                    _ => {
-                        return Error::new(span, format!("unknown ra_salsa attribute `{name}`"))
-                            .to_compile_error()
-                            .into();
-                    }
-                }
-            }
-
-            let sig_span = method.sig.span();
-            // Check attribute combinations.
-            if num_storages > 1 {
-                return Error::new(sig_span, "multiple storage attributes specified")
-                    .to_compile_error()
-                    .into();
-            }
-            match &invoke {
-                Some(invoke) if storage == QueryStorage::Input => {
-                    return Error::new(
-                        invoke.span(),
-                        "#[ra_salsa::invoke] cannot be set on #[ra_salsa::input] queries",
-                    )
-                    .to_compile_error()
-                    .into();
-                }
-                _ => {}
-            }
-
-            // Extract keys.
-            let mut iter = method.sig.inputs.iter();
-            let self_receiver = match iter.next() {
-                Some(FnArg::Receiver(sr)) if sr.mutability.is_none() => sr,
-                _ => {
-                    return Error::new(
-                        sig_span,
-                        format!("first argument of query `{query_name}` must be `&self`"),
-                    )
-                    .to_compile_error()
-                    .into();
-                }
-            };
-            let mut keys: Vec<(Ident, Type)> = vec![];
-            for (idx, arg) in iter.enumerate() {
-                match arg {
-                    FnArg::Typed(syn::PatType { pat, ty, .. }) => keys.push((
-                        match pat.as_ref() {
-                            syn::Pat::Ident(ident_pat) => ident_pat.ident.clone(),
-                            _ => format_ident!("key{}", idx),
-                        },
-                        Type::clone(ty),
-                    )),
-                    arg => {
-                        return Error::new(
-                            arg.span(),
-                            format!("unsupported argument `{arg:?}` of `{query_name}`",),
-                        )
-                        .to_compile_error()
-                        .into();
-                    }
-                }
-            }
-
-            // Extract value.
-            let value = match method.sig.output {
-                ReturnType::Type(_, ref ty) => ty.as_ref().clone(),
-                ref ret => {
-                    return Error::new(
-                        ret.span(),
-                        format!("unsupported return type `{ret:?}` of `{query_name}`"),
-                    )
-                    .to_compile_error()
-                    .into();
-                }
-            };
-
-            // For `#[ra_salsa::interned]` keys, we create a "lookup key" automatically.
-            //
-            // For a query like:
-            //
-            //     fn foo(&self, x: Key1, y: Key2) -> u32
-            //
-            // we would create
-            //
-            //     fn lookup_foo(&self, x: u32) -> (Key1, Key2)
-            let lookup_query = if let QueryStorage::Interned = storage {
-                let lookup_query_type =
-                    format_ident!("{}LookupQuery", query_name.to_string().to_upper_camel_case());
-                let lookup_fn_name = format_ident!("lookup_{}", query_name);
-                let keys = keys.iter().map(|(_, ty)| ty);
-                let lookup_value: Type = parse_quote!((#(#keys),*));
-                let lookup_keys = vec![(parse_quote! { key }, value.clone())];
-                Some(Query {
-                    query_type: lookup_query_type,
-                    query_name: format!("{lookup_fn_name}"),
-                    fn_name: lookup_fn_name,
-                    receiver: self_receiver.clone(),
-                    attrs: vec![], // FIXME -- some automatically generated docs on this method?
-                    storage: QueryStorage::InternedLookup { intern_query_type: query_type.clone() },
-                    keys: lookup_keys,
-                    value: lookup_value,
-                    invoke: None,
-                    cycle: cycle.clone(),
-                })
-            } else {
-                None
-            };
-
-            queries.push(Query {
-                query_type,
-                query_name,
-                fn_name: method.sig.ident,
-                receiver: self_receiver.clone(),
-                attrs,
-                storage,
-                keys,
-                value,
-                invoke,
-                cycle,
-            });
-
-            queries.extend(lookup_query);
-        }
-    }
-
-    let group_storage = format_ident!("{}GroupStorage__", trait_name, span = Span::call_site());
-
-    let mut query_fn_declarations = proc_macro2::TokenStream::new();
-    let mut query_fn_definitions = proc_macro2::TokenStream::new();
-    let mut storage_fields = proc_macro2::TokenStream::new();
-    let mut queries_with_storage = vec![];
-    for query in &queries {
-        #[allow(clippy::map_identity)]
-        // clippy is incorrect here, this is not the identity function due to match ergonomics
-        let (key_names, keys): (Vec<_>, Vec<_>) = query.keys.iter().map(|(a, b)| (a, b)).unzip();
-        let value = &query.value;
-        let fn_name = &query.fn_name;
-        let qt = &query.query_type;
-        let attrs = &query.attrs;
-        let self_receiver = &query.receiver;
-
-        query_fn_declarations.extend(quote! {
-            #(#attrs)*
-            fn #fn_name(#self_receiver, #(#key_names: #keys),*) -> #value;
-        });
-
-        // Special case: transparent queries don't create actual storage,
-        // just inline the definition
-        if let QueryStorage::Transparent = query.storage {
-            let invoke = query.invoke_tt();
-            query_fn_definitions.extend(quote! {
-                fn #fn_name(&self, #(#key_names: #keys),*) -> #value {
-                    #invoke(self, #(#key_names),*)
-                }
-            });
-            continue;
-        }
-
-        queries_with_storage.push(fn_name);
-
-        let tracing = if let QueryStorage::Memoized | QueryStorage::LruMemoized = query.storage {
-            let s = format!("{trait_name}::{fn_name}");
-            Some(quote! {
-                let _p = tracing::trace_span!(#s, #(#key_names = tracing::field::debug(&#key_names)),*).entered();
-            })
-        } else {
-            None
-        }
-        .into_iter();
-
-        query_fn_definitions.extend(quote! {
-            fn #fn_name(&self, #(#key_names: #keys),*) -> #value {
-                #(#tracing),*
-                // Create a shim to force the code to be monomorphized in the
-                // query crate. Our experiments revealed that this makes a big
-                // difference in total compilation time in rust-analyzer, though
-                // it's not totally obvious why that should be.
-                fn __shim(db: &(dyn #trait_name + '_), #(#key_names: #keys),*) -> #value {
-                    ra_salsa::plumbing::get_query_table::<#qt>(db).get((#(#key_names),*))
-                }
-                __shim(self, #(#key_names),*)
-
-            }
-        });
-
-        // For input queries, we need `set_foo` etc
-        if let QueryStorage::Input = query.storage {
-            let set_fn_name = format_ident!("set_{}", fn_name);
-            let set_with_durability_fn_name = format_ident!("set_{}_with_durability", fn_name);
-
-            let set_fn_docs = format!(
-                "
-                Set the value of the `{fn_name}` input.
-
-                See `{fn_name}` for details.
-
-                *Note:* Setting values will trigger cancellation
-                of any ongoing queries; this method blocks until
-                those queries have been cancelled.
-            "
-            );
-
-            let set_constant_fn_docs = format!(
-                "
-                Set the value of the `{fn_name}` input with a
-                specific durability instead of the default of
-                `Durability::LOW`. You can use `Durability::MAX`
-                to promise that its value will never change again.
-
-                See `{fn_name}` for details.
-
-                *Note:* Setting values will trigger cancellation
-                of any ongoing queries; this method blocks until
-                those queries have been cancelled.
-            "
-            );
-
-            query_fn_declarations.extend(quote! {
-                # [doc = #set_fn_docs]
-                fn #set_fn_name(&mut self, #(#key_names: #keys,)* value__: #value);
-
-
-                # [doc = #set_constant_fn_docs]
-                fn #set_with_durability_fn_name(&mut self, #(#key_names: #keys,)* value__: #value, durability__: ra_salsa::Durability);
-            });
-
-            query_fn_definitions.extend(quote! {
-                fn #set_fn_name(&mut self, #(#key_names: #keys,)* value__: #value) {
-                    fn __shim(db: &mut dyn #trait_name, #(#key_names: #keys,)* value__: #value) {
-                        ra_salsa::plumbing::get_query_table_mut::<#qt>(db).set((#(#key_names),*), value__)
-                    }
-                    __shim(self, #(#key_names,)* value__)
-                }
-
-                fn #set_with_durability_fn_name(&mut self, #(#key_names: #keys,)* value__: #value, durability__: ra_salsa::Durability) {
-                    fn __shim(db: &mut dyn #trait_name, #(#key_names: #keys,)* value__: #value, durability__: ra_salsa::Durability) {
-                        ra_salsa::plumbing::get_query_table_mut::<#qt>(db).set_with_durability((#(#key_names),*), value__, durability__)
-                    }
-                    __shim(self, #(#key_names,)* value__ ,durability__)
-                }
-            });
-        }
-
-        // A field for the storage struct
-        storage_fields.extend(quote! {
-            #fn_name: std::sync::Arc<<#qt as ra_salsa::Query>::Storage>,
-        });
-    }
-
-    // Emit the trait itself.
-    let mut output = {
-        let bounds = &input.supertraits;
-        quote! {
-            #(#trait_attrs)*
-            #trait_vis trait #trait_name :
-            ra_salsa::Database +
-            ra_salsa::plumbing::HasQueryGroup<#group_struct> +
-            #bounds
-            {
-                #query_fn_declarations
-            }
-        }
-    };
-
-    // Emit the query group struct and impl of `QueryGroup`.
-    output.extend(quote! {
-        /// Representative struct for the query group.
-        #trait_vis struct #group_struct { }
-
-        impl ra_salsa::plumbing::QueryGroup for #group_struct
-        {
-            type DynDb = #dyn_db;
-            type GroupStorage = #group_storage;
-        }
-    });
-
-    // Emit an impl of the trait
-    output.extend({
-        let bounds = input.supertraits;
-        quote! {
-            impl<DB> #trait_name for DB
-            where
-                DB: #bounds,
-                DB: ra_salsa::Database,
-                DB: ra_salsa::plumbing::HasQueryGroup<#group_struct>,
-            {
-                #query_fn_definitions
-            }
-        }
-    });
-
-    let non_transparent_queries =
-        || queries.iter().filter(|q| !matches!(q.storage, QueryStorage::Transparent));
-
-    // Emit the query types.
-    for (query, query_index) in non_transparent_queries().zip(0_u16..) {
-        let fn_name = &query.fn_name;
-        let qt = &query.query_type;
-
-        let storage = match &query.storage {
-            QueryStorage::Memoized => quote!(ra_salsa::plumbing::MemoizedStorage<Self>),
-            QueryStorage::LruMemoized => quote!(ra_salsa::plumbing::LruMemoizedStorage<Self>),
-            QueryStorage::LruDependencies => {
-                quote!(ra_salsa::plumbing::LruDependencyStorage<Self>)
-            }
-            QueryStorage::Input if query.keys.is_empty() => {
-                quote!(ra_salsa::plumbing::UnitInputStorage<Self>)
-            }
-            QueryStorage::Input => quote!(ra_salsa::plumbing::InputStorage<Self>),
-            QueryStorage::Interned => quote!(ra_salsa::plumbing::InternedStorage<Self>),
-            QueryStorage::InternedLookup { intern_query_type } => {
-                quote!(ra_salsa::plumbing::LookupInternedStorage<Self, #intern_query_type>)
-            }
-            QueryStorage::Transparent => panic!("should have been filtered"),
-        };
-        let keys = query.keys.iter().map(|(_, ty)| ty);
-        let value = &query.value;
-        let query_name = &query.query_name;
-
-        // Emit the query struct and implement the Query trait on it.
-        output.extend(quote! {
-            #[derive(Default, Debug)]
-            #trait_vis struct #qt;
-        });
-
-        output.extend(quote! {
-            impl #qt {
-                /// Get access to extra methods pertaining to this query.
-                /// You can also use it to invoke this query.
-                #trait_vis fn in_db(self, db: &#dyn_db) -> ra_salsa::QueryTable<'_, Self>
-                {
-                    ra_salsa::plumbing::get_query_table::<#qt>(db)
-                }
-            }
-        });
-
-        output.extend(quote! {
-            impl #qt {
-                /// Like `in_db`, but gives access to methods for setting the
-                /// value of an input. Not applicable to derived queries.
-                ///
-                /// # Threads, cancellation, and blocking
-                ///
-                /// Mutating the value of a query cannot be done while there are
-                /// still other queries executing. If you are using your database
-                /// within a single thread, this is not a problem: you only have
-                /// `&self` access to the database, but this method requires `&mut
-                /// self`.
-                ///
-                /// However, if you have used `snapshot` to create other threads,
-                /// then attempts to `set` will **block the current thread** until
-                /// those snapshots are dropped (usually when those threads
-                /// complete). This also implies that if you create a snapshot but
-                /// do not send it to another thread, then invoking `set` will
-                /// deadlock.
-                ///
-                /// Before blocking, the thread that is attempting to `set` will
-                /// also set a cancellation flag. This will cause any query
-                /// invocations in other threads to unwind with a `Cancelled`
-                /// sentinel value and eventually let the `set` succeed once all
-                /// threads have unwound past the ra_salsa invocation.
-                ///
-                /// If your query implementations are performing expensive
-                /// operations without invoking another query, you can also use
-                /// the `Runtime::unwind_if_cancelled` method to check for an
-                /// ongoing cancellation and bring those operations to a close,
-                /// thus allowing the `set` to succeed. Otherwise, long-running
-                /// computations may lead to "starvation", meaning that the
-                /// thread attempting to `set` has to wait a long, long time. =)
-                #trait_vis fn in_db_mut(self, db: &mut #dyn_db) -> ra_salsa::QueryTableMut<'_, Self>
-                {
-                    ra_salsa::plumbing::get_query_table_mut::<#qt>(db)
-                }
-            }
-
-            impl<'d> ra_salsa::QueryDb<'d> for #qt
-            {
-                type DynDb = #dyn_db + 'd;
-                type Group = #group_struct;
-                type GroupStorage = #group_storage;
-            }
-
-            // ANCHOR:Query_impl
-            impl ra_salsa::Query for #qt
-            {
-                type Key = (#(#keys),*);
-                type Value = #value;
-                type Storage = #storage;
-
-                const QUERY_INDEX: u16 = #query_index;
-
-                const QUERY_NAME: &'static str = #query_name;
-
-                fn query_storage<'a>(
-                    group_storage: &'a <Self as ra_salsa::QueryDb<'_>>::GroupStorage,
-                ) -> &'a std::sync::Arc<Self::Storage> {
-                    &group_storage.#fn_name
-                }
-
-                fn query_storage_mut<'a>(
-                    group_storage: &'a <Self as ra_salsa::QueryDb<'_>>::GroupStorage,
-                ) -> &'a std::sync::Arc<Self::Storage> {
-                    &group_storage.#fn_name
-                }
-            }
-            // ANCHOR_END:Query_impl
-        });
-
-        // Implement the QueryFunction trait for queries which need it.
-        if query.storage.needs_query_function() {
-            let span = query.fn_name.span();
-
-            let key_names: Vec<_> = query.keys.iter().map(|(pat, _)| pat).collect();
-            let key_pattern = if query.keys.len() == 1 {
-                quote! { #(#key_names),* }
-            } else {
-                quote! { (#(#key_names),*) }
-            };
-            let invoke = query.invoke_tt();
-
-            let recover = if let Some(cycle_recovery_fn) = &query.cycle {
-                quote! {
-                    const CYCLE_STRATEGY: ra_salsa::plumbing::CycleRecoveryStrategy =
-                        ra_salsa::plumbing::CycleRecoveryStrategy::Fallback;
-                    fn cycle_fallback(db: &<Self as ra_salsa::QueryDb<'_>>::DynDb, cycle: &ra_salsa::Cycle, #key_pattern: &<Self as ra_salsa::Query>::Key)
-                        -> <Self as ra_salsa::Query>::Value {
-                        #cycle_recovery_fn(
-                                db,
-                                cycle,
-                                #(#key_names),*
-                        )
-                    }
-                }
-            } else {
-                quote! {
-                    const CYCLE_STRATEGY: ra_salsa::plumbing::CycleRecoveryStrategy =
-                        ra_salsa::plumbing::CycleRecoveryStrategy::Panic;
-                }
-            };
-
-            output.extend(quote_spanned! {span=>
-                // ANCHOR:QueryFunction_impl
-                impl ra_salsa::plumbing::QueryFunction for #qt
-                {
-                    fn execute(db: &<Self as ra_salsa::QueryDb<'_>>::DynDb, #key_pattern: <Self as ra_salsa::Query>::Key)
-                        -> <Self as ra_salsa::Query>::Value {
-                        #invoke(db, #(#key_names),*)
-                    }
-
-                    #recover
-                }
-                // ANCHOR_END:QueryFunction_impl
-            });
-        }
-    }
-
-    let mut fmt_ops = proc_macro2::TokenStream::new();
-    for (Query { fn_name, .. }, query_index) in non_transparent_queries().zip(0_u16..) {
-        fmt_ops.extend(quote! {
-            #query_index => {
-                ra_salsa::plumbing::QueryStorageOps::fmt_index(
-                    &*self.#fn_name, db, input.key_index(), fmt,
-                )
-            }
-        });
-    }
-
-    let mut maybe_changed_ops = proc_macro2::TokenStream::new();
-    for (Query { fn_name, .. }, query_index) in non_transparent_queries().zip(0_u16..) {
-        maybe_changed_ops.extend(quote! {
-            #query_index => {
-                ra_salsa::plumbing::QueryStorageOps::maybe_changed_after(
-                    &*self.#fn_name, db, input.key_index(), revision
-                )
-            }
-        });
-    }
-
-    let mut cycle_recovery_strategy_ops = proc_macro2::TokenStream::new();
-    for (Query { fn_name, .. }, query_index) in non_transparent_queries().zip(0_u16..) {
-        cycle_recovery_strategy_ops.extend(quote! {
-            #query_index => {
-                ra_salsa::plumbing::QueryStorageOps::cycle_recovery_strategy(
-                    &*self.#fn_name
-                )
-            }
-        });
-    }
-
-    let mut for_each_ops = proc_macro2::TokenStream::new();
-    for Query { fn_name, .. } in non_transparent_queries() {
-        for_each_ops.extend(quote! {
-            op(&*self.#fn_name);
-        });
-    }
-
-    // Emit query group storage struct
-    output.extend(quote! {
-        #trait_vis struct #group_storage {
-            #storage_fields
-        }
-
-        // ANCHOR:group_storage_new
-        impl #group_storage {
-            #trait_vis fn new(group_index: u16) -> Self {
-                #group_storage {
-                    #(
-                        #queries_with_storage:
-                        std::sync::Arc::new(ra_salsa::plumbing::QueryStorageOps::new(group_index)),
-                    )*
-                }
-            }
-        }
-        // ANCHOR_END:group_storage_new
-
-        // ANCHOR:group_storage_methods
-        impl #group_storage {
-            #trait_vis fn fmt_index(
-                &self,
-                db: &(#dyn_db + '_),
-                input: ra_salsa::DatabaseKeyIndex,
-                fmt: &mut std::fmt::Formatter<'_>,
-            ) -> std::fmt::Result {
-                match input.query_index() {
-                    #fmt_ops
-                    i => panic!("ra_salsa: impossible query index {}", i),
-                }
-            }
-
-            #trait_vis fn maybe_changed_after(
-                &self,
-                db: &(#dyn_db + '_),
-                input: ra_salsa::DatabaseKeyIndex,
-                revision: ra_salsa::Revision,
-            ) -> bool {
-                match input.query_index() {
-                    #maybe_changed_ops
-                    i => panic!("ra_salsa: impossible query index {}", i),
-                }
-            }
-
-            #trait_vis fn cycle_recovery_strategy(
-                &self,
-                db: &(#dyn_db + '_),
-                input: ra_salsa::DatabaseKeyIndex,
-            ) -> ra_salsa::plumbing::CycleRecoveryStrategy {
-                match input.query_index() {
-                    #cycle_recovery_strategy_ops
-                    i => panic!("ra_salsa: impossible query index {}", i),
-                }
-            }
-
-            #trait_vis fn for_each_query(
-                &self,
-                _runtime: &ra_salsa::Runtime,
-                mut op: &mut dyn FnMut(&dyn ra_salsa::plumbing::QueryStorageMassOps),
-            ) {
-                #for_each_ops
-            }
-        }
-        // ANCHOR_END:group_storage_methods
-    });
-    output.into()
-}
-
-struct SalsaAttr {
-    name: String,
-    tts: TokenStream,
-    span: Span,
-}
-
-impl std::fmt::Debug for SalsaAttr {
-    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        write!(fmt, "{:?}", self.name)
-    }
-}
-
-impl TryFrom<syn::Attribute> for SalsaAttr {
-    type Error = syn::Attribute;
-
-    fn try_from(attr: syn::Attribute) -> Result<SalsaAttr, syn::Attribute> {
-        if is_not_salsa_attr_path(attr.path()) {
-            return Err(attr);
-        }
-
-        let span = attr.span();
-        let name = attr.path().segments[1].ident.to_string();
-        let tts = match attr.meta {
-            syn::Meta::Path(path) => path.into_token_stream(),
-            syn::Meta::List(ref list) => {
-                let tts = list
-                    .into_token_stream()
-                    .into_iter()
-                    .skip(attr.path().to_token_stream().into_iter().count());
-                proc_macro2::TokenStream::from_iter(tts)
-            }
-            syn::Meta::NameValue(nv) => nv.into_token_stream(),
-        }
-        .into();
-
-        Ok(SalsaAttr { name, tts, span })
-    }
-}
-
-fn is_not_salsa_attr_path(path: &syn::Path) -> bool {
-    path.segments.first().map(|s| s.ident != "ra_salsa").unwrap_or(true) || path.segments.len() != 2
-}
-
-fn filter_attrs(attrs: Vec<Attribute>) -> (Vec<Attribute>, Vec<SalsaAttr>) {
-    let mut other = vec![];
-    let mut ra_salsa = vec![];
-    // Leave non-ra_salsa attributes untouched. These are
-    // attributes that don't start with `ra_salsa::` or don't have
-    // exactly two segments in their path.
-    // Keep the ra_salsa attributes around.
-    for attr in attrs {
-        match SalsaAttr::try_from(attr) {
-            Ok(it) => ra_salsa.push(it),
-            Err(it) => other.push(it),
-        }
-    }
-    (other, ra_salsa)
-}
-
-#[derive(Debug)]
-struct Query {
-    fn_name: Ident,
-    receiver: syn::Receiver,
-    query_name: String,
-    attrs: Vec<syn::Attribute>,
-    query_type: Ident,
-    storage: QueryStorage,
-    keys: Vec<(Ident, syn::Type)>,
-    value: syn::Type,
-    invoke: Option<syn::Path>,
-    cycle: Option<syn::Path>,
-}
-
-impl Query {
-    fn invoke_tt(&self) -> proc_macro2::TokenStream {
-        match &self.invoke {
-            Some(i) => i.into_token_stream(),
-            None => self.fn_name.clone().into_token_stream(),
-        }
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-enum QueryStorage {
-    Memoized,
-    LruDependencies,
-    LruMemoized,
-    Input,
-    Interned,
-    InternedLookup { intern_query_type: Ident },
-    Transparent,
-}
-
-impl QueryStorage {
-    /// Do we need a `QueryFunction` impl for this type of query?
-    fn needs_query_function(&self) -> bool {
-        match self {
-            QueryStorage::Input
-            | QueryStorage::Interned
-            | QueryStorage::InternedLookup { .. }
-            | QueryStorage::Transparent => false,
-            QueryStorage::Memoized | QueryStorage::LruMemoized | QueryStorage::LruDependencies => {
-                true
-            }
-        }
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/debug.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/debug.rs
deleted file mode 100644
index 5f113541f04cf..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/debug.rs
+++ /dev/null
@@ -1,65 +0,0 @@
-//! Debugging APIs: these are meant for use when unit-testing or
-//! debugging your application but aren't ordinarily needed.
-
-use crate::durability::Durability;
-use crate::plumbing::QueryStorageOps;
-use crate::Query;
-use crate::QueryTable;
-
-/// Additional methods on queries that can be used to "peek into"
-/// their current state. These methods are meant for debugging and
-/// observing the effects of garbage collection etc.
-pub trait DebugQueryTable {
-    /// Key of this query.
-    type Key;
-
-    /// Value of this query.
-    type Value;
-
-    /// Returns a lower bound on the durability for the given key.
-    /// This is typically the minimum durability of all values that
-    /// the query accessed, but we may return a lower durability in
-    /// some cases.
-    fn durability(&self, key: Self::Key) -> Durability;
-
-    /// Get the (current) set of the entries in the query table.
-    fn entries<C>(&self) -> C
-    where
-        C: FromIterator<TableEntry<Self::Key, Self::Value>>;
-}
-
-/// An entry from a query table, for debugging and inspecting the table state.
-#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
-#[non_exhaustive]
-pub struct TableEntry<K, V> {
-    /// key of the query
-    pub key: K,
-    /// value of the query, if it is stored
-    pub value: Option<V>,
-}
-
-impl<K, V> TableEntry<K, V> {
-    pub(crate) fn new(key: K, value: Option<V>) -> TableEntry<K, V> {
-        TableEntry { key, value }
-    }
-}
-
-impl<Q> DebugQueryTable for QueryTable<'_, Q>
-where
-    Q: Query,
-    Q::Storage: QueryStorageOps<Q>,
-{
-    type Key = Q::Key;
-    type Value = Q::Value;
-
-    fn durability(&self, key: Q::Key) -> Durability {
-        self.storage.durability(self.db, &key)
-    }
-
-    fn entries<C>(&self) -> C
-    where
-        C: FromIterator<TableEntry<Self::Key, Self::Value>>,
-    {
-        self.storage.entries(self.db)
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/derived.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/derived.rs
deleted file mode 100644
index 8b2fdd6b19cc8..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/derived.rs
+++ /dev/null
@@ -1,163 +0,0 @@
-use crate::debug::TableEntry;
-use crate::durability::Durability;
-use crate::hash::FxIndexMap;
-use crate::plumbing::DerivedQueryStorageOps;
-use crate::plumbing::QueryFunction;
-use crate::plumbing::QueryStorageMassOps;
-use crate::plumbing::QueryStorageOps;
-use crate::runtime::StampedValue;
-use crate::Runtime;
-use crate::{Database, DatabaseKeyIndex, QueryDb, Revision};
-use parking_lot::RwLock;
-use std::borrow::Borrow;
-use std::hash::Hash;
-use triomphe::Arc;
-
-mod slot;
-use slot::Slot;
-
-/// Memoized queries store the result plus a list of the other queries
-/// that they invoked. This means we can avoid recomputing them when
-/// none of those inputs have changed.
-pub type MemoizedStorage<Q> = DerivedStorage<Q>;
-
-/// Handles storage where the value is 'derived' by executing a
-/// function (in contrast to "inputs").
-pub struct DerivedStorage<Q>
-where
-    Q: QueryFunction,
-{
-    group_index: u16,
-    slot_map: RwLock<FxIndexMap<Q::Key, Arc<Slot<Q>>>>,
-}
-
-impl<Q> std::panic::RefUnwindSafe for DerivedStorage<Q>
-where
-    Q: QueryFunction,
-
-    Q::Key: std::panic::RefUnwindSafe,
-    Q::Value: std::panic::RefUnwindSafe,
-{
-}
-
-impl<Q> DerivedStorage<Q>
-where
-    Q: QueryFunction,
-    Q::Value: Eq,
-{
-    fn slot(&self, key: &Q::Key) -> Arc<Slot<Q>> {
-        if let Some(v) = self.slot_map.read().get(key) {
-            return v.clone();
-        }
-
-        let mut write = self.slot_map.write();
-        let entry = write.entry(key.clone());
-        let key_index = entry.index() as u32;
-        let database_key_index = DatabaseKeyIndex {
-            group_index: self.group_index,
-            query_index: Q::QUERY_INDEX,
-            key_index,
-        };
-        entry.or_insert_with(|| Arc::new(Slot::new(database_key_index))).clone()
-    }
-}
-
-impl<Q> QueryStorageOps<Q> for DerivedStorage<Q>
-where
-    Q: QueryFunction,
-    Q::Value: Eq,
-{
-    const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = Q::CYCLE_STRATEGY;
-
-    fn new(group_index: u16) -> Self {
-        DerivedStorage { group_index, slot_map: RwLock::new(FxIndexMap::default()) }
-    }
-
-    fn fmt_index(
-        &self,
-        _db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        fmt: &mut std::fmt::Formatter<'_>,
-    ) -> std::fmt::Result {
-        let slot_map = self.slot_map.read();
-        let key = slot_map.get_index(index as usize).unwrap().0;
-        write!(fmt, "{}::{}({:?})", std::any::type_name::<Q>(), Q::QUERY_NAME, key)
-    }
-
-    fn maybe_changed_after(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        revision: Revision,
-    ) -> bool {
-        debug_assert!(revision < db.salsa_runtime().current_revision());
-        let (key, slot) = {
-            let read = self.slot_map.read();
-            let Some((key, slot)) = read.get_index(index as usize) else {
-                return false;
-            };
-            (key.clone(), slot.clone())
-        };
-        slot.maybe_changed_after(db, revision, &key)
-    }
-
-    fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value {
-        db.unwind_if_cancelled();
-
-        let slot = self.slot(key);
-        let StampedValue { value, durability, changed_at } = slot.read(db, key);
-
-        db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
-            slot.database_key_index(),
-            durability,
-            changed_at,
-        );
-
-        value
-    }
-
-    fn durability(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Durability {
-        self.slot_map.read().get(key).map_or(Durability::LOW, |slot| slot.durability(db))
-    }
-
-    fn entries<C>(&self, _db: &<Q as QueryDb<'_>>::DynDb) -> C
-    where
-        C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>,
-    {
-        let slot_map = self.slot_map.read();
-        slot_map.iter().filter_map(|(key, slot)| slot.as_table_entry(key)).collect()
-    }
-}
-
-impl<Q> QueryStorageMassOps for DerivedStorage<Q>
-where
-    Q: QueryFunction,
-{
-    fn purge(&self) {
-        *self.slot_map.write() = Default::default();
-    }
-}
-
-impl<Q> DerivedQueryStorageOps<Q> for DerivedStorage<Q>
-where
-    Q: QueryFunction,
-    Q::Value: Eq,
-{
-    fn invalidate<S>(&self, runtime: &mut Runtime, key: &S)
-    where
-        S: Eq + Hash,
-        Q::Key: Borrow<S>,
-    {
-        runtime.with_incremented_revision(|new_revision| {
-            let map_read = self.slot_map.read();
-
-            if let Some(slot) = map_read.get(key) {
-                if let Some(durability) = slot.invalidate(new_revision) {
-                    return Some(durability);
-                }
-            }
-
-            None
-        })
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/derived/slot.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/derived/slot.rs
deleted file mode 100644
index cfe2c48f411f1..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/derived/slot.rs
+++ /dev/null
@@ -1,782 +0,0 @@
-use crate::debug::TableEntry;
-use crate::durability::Durability;
-use crate::plumbing::{DatabaseOps, QueryFunction};
-use crate::revision::Revision;
-use crate::runtime::local_state::ActiveQueryGuard;
-use crate::runtime::local_state::QueryRevisions;
-use crate::runtime::Runtime;
-use crate::runtime::RuntimeId;
-use crate::runtime::StampedValue;
-use crate::runtime::WaitResult;
-use crate::Cycle;
-use crate::{Database, DatabaseKeyIndex, Event, EventKind, QueryDb};
-use parking_lot::{RawRwLock, RwLock};
-use std::ops::Deref;
-use std::sync::atomic::{AtomicBool, Ordering};
-use tracing::trace;
-
-pub(super) struct Slot<Q>
-where
-    Q: QueryFunction,
-{
-    key_index: u32,
-    // FIXME: Yeet this
-    group_index: u16,
-    state: RwLock<QueryState<Q>>,
-}
-
-/// Defines the "current state" of query's memoized results.
-enum QueryState<Q>
-where
-    Q: QueryFunction,
-{
-    NotComputed,
-
-    /// The runtime with the given id is currently computing the
-    /// result of this query.
-    InProgress {
-        id: RuntimeId,
-
-        /// Set to true if any other queries are blocked,
-        /// waiting for this query to complete.
-        anyone_waiting: AtomicBool,
-    },
-
-    /// We have computed the query already, and here is the result.
-    Memoized(Memo<Q::Value>),
-}
-
-struct Memo<V> {
-    /// The result of the query, if we decide to memoize it.
-    value: V,
-
-    /// Last revision when this memo was verified; this begins
-    /// as the current revision.
-    pub(crate) verified_at: Revision,
-
-    /// Revision information
-    revisions: QueryRevisions,
-}
-
-/// Return value of `probe` helper.
-enum ProbeState<V, G> {
-    /// Another thread was active but has completed.
-    /// Try again!
-    Retry,
-
-    /// No entry for this key at all.
-    NotComputed(G),
-
-    /// There is an entry, but its contents have not been
-    /// verified in this revision.
-    Stale(G),
-
-    /// There is an entry which has been verified,
-    /// and it has the following value-- or, we blocked
-    /// on another thread, and that resulted in a cycle.
-    UpToDate(V),
-}
-
-/// Return value of `maybe_changed_after_probe` helper.
-enum MaybeChangedSinceProbeState<G> {
-    /// Another thread was active but has completed.
-    /// Try again!
-    Retry,
-
-    /// Value may have changed in the given revision.
-    ChangedAt(Revision),
-
-    /// There is a stale cache entry that has not been
-    /// verified in this revision, so we can't say.
-    Stale(G),
-}
-
-impl<Q> Slot<Q>
-where
-    Q: QueryFunction,
-    Q::Value: Eq,
-{
-    pub(super) fn new(database_key_index: DatabaseKeyIndex) -> Self {
-        Self {
-            key_index: database_key_index.key_index,
-            group_index: database_key_index.group_index,
-            state: RwLock::new(QueryState::NotComputed),
-        }
-    }
-
-    pub(super) fn database_key_index(&self) -> DatabaseKeyIndex {
-        DatabaseKeyIndex {
-            group_index: self.group_index,
-            query_index: Q::QUERY_INDEX,
-            key_index: self.key_index,
-        }
-    }
-
-    pub(super) fn read(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        key: &Q::Key,
-    ) -> StampedValue<Q::Value> {
-        let runtime = db.salsa_runtime();
-
-        // NB: We don't need to worry about people modifying the
-        // revision out from under our feet. Either `db` is a frozen
-        // database, in which case there is a lock, or the mutator
-        // thread is the current thread, and it will be prevented from
-        // doing any `set` invocations while the query function runs.
-        let revision_now = runtime.current_revision();
-
-        trace!("{:?}: invoked at {:?}", self, revision_now,);
-
-        // First, do a check with a read-lock.
-        loop {
-            match self.probe(db, self.state.read(), runtime, revision_now) {
-                ProbeState::UpToDate(v) => return v,
-                ProbeState::Stale(..) | ProbeState::NotComputed(..) => break,
-                ProbeState::Retry => continue,
-            }
-        }
-
-        self.read_upgrade(db, key, revision_now)
-    }
-
-    /// Second phase of a read operation: acquires an upgradable-read
-    /// and -- if needed -- validates whether inputs have changed,
-    /// recomputes value, etc. This is invoked after our initial probe
-    /// shows a potentially out of date value.
-    fn read_upgrade(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        key: &Q::Key,
-        revision_now: Revision,
-    ) -> StampedValue<Q::Value> {
-        let runtime = db.salsa_runtime();
-
-        trace!("{:?}: read_upgrade(revision_now={:?})", self, revision_now,);
-
-        // Check with an upgradable read to see if there is a value
-        // already. (This permits other readers but prevents anyone
-        // else from running `read_upgrade` at the same time.)
-        let mut old_memo = loop {
-            match self.probe(db, self.state.upgradable_read(), runtime, revision_now) {
-                ProbeState::UpToDate(v) => return v,
-                ProbeState::Stale(state) | ProbeState::NotComputed(state) => {
-                    type RwLockUpgradableReadGuard<'a, T> =
-                        lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>;
-
-                    let mut state = RwLockUpgradableReadGuard::upgrade(state);
-                    match std::mem::replace(&mut *state, QueryState::in_progress(runtime.id())) {
-                        QueryState::Memoized(old_memo) => break Some(old_memo),
-                        QueryState::InProgress { .. } => unreachable!(),
-                        QueryState::NotComputed => break None,
-                    }
-                }
-                ProbeState::Retry => continue,
-            }
-        };
-
-        let panic_guard = PanicGuard::new(self, runtime);
-        let active_query = runtime.push_query(self.database_key_index());
-
-        // If we have an old-value, it *may* now be stale, since there
-        // has been a new revision since the last time we checked. So,
-        // first things first, let's walk over each of our previous
-        // inputs and check whether they are out of date.
-        if let Some(memo) = &mut old_memo {
-            if let Some(value) = memo.verify_value(db.ops_database(), revision_now, &active_query) {
-                trace!("{:?}: validated old memoized value", self,);
-
-                db.salsa_event(Event {
-                    runtime_id: runtime.id(),
-                    kind: EventKind::DidValidateMemoizedValue {
-                        database_key: self.database_key_index(),
-                    },
-                });
-
-                panic_guard.proceed(old_memo);
-
-                return value;
-            }
-        }
-
-        self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo, key)
-    }
-
-    fn execute(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        runtime: &Runtime,
-        revision_now: Revision,
-        active_query: ActiveQueryGuard<'_>,
-        panic_guard: PanicGuard<'_, Q>,
-        old_memo: Option<Memo<Q::Value>>,
-        key: &Q::Key,
-    ) -> StampedValue<Q::Value> {
-        tracing::trace!("{:?}: executing query", self.database_key_index().debug(db));
-
-        db.salsa_event(Event {
-            runtime_id: db.salsa_runtime().id(),
-            kind: EventKind::WillExecute { database_key: self.database_key_index() },
-        });
-
-        // Query was not previously executed, or value is potentially
-        // stale, or value is absent. Let's execute!
-        let value = match Cycle::catch(|| Q::execute(db, key.clone())) {
-            Ok(v) => v,
-            Err(cycle) => {
-                tracing::trace!(
-                    "{:?}: caught cycle {:?}, have strategy {:?}",
-                    self.database_key_index().debug(db),
-                    cycle,
-                    Q::CYCLE_STRATEGY,
-                );
-                match Q::CYCLE_STRATEGY {
-                    crate::plumbing::CycleRecoveryStrategy::Panic => {
-                        panic_guard.proceed(None);
-                        cycle.throw()
-                    }
-                    crate::plumbing::CycleRecoveryStrategy::Fallback => {
-                        if let Some(c) = active_query.take_cycle() {
-                            assert!(c.is(&cycle));
-                            Q::cycle_fallback(db, &cycle, key)
-                        } else {
-                            // we are not a participant in this cycle
-                            debug_assert!(!cycle
-                                .participant_keys()
-                                .any(|k| k == self.database_key_index()));
-                            cycle.throw()
-                        }
-                    }
-                }
-            }
-        };
-
-        let mut revisions = active_query.pop();
-
-        // We assume that query is side-effect free -- that is, does
-        // not mutate the "inputs" to the query system. Sanity check
-        // that assumption here, at least to the best of our ability.
-        assert_eq!(
-            runtime.current_revision(),
-            revision_now,
-            "revision altered during query execution",
-        );
-
-        // If the new value is equal to the old one, then it didn't
-        // really change, even if some of its inputs have. So we can
-        // "backdate" its `changed_at` revision to be the same as the
-        // old value.
-        if let Some(old_memo) = &old_memo {
-            // Careful: if the value became less durable than it
-            // used to be, that is a "breaking change" that our
-            // consumers must be aware of. Becoming *more* durable
-            // is not. See the test `constant_to_non_constant`.
-            if revisions.durability >= old_memo.revisions.durability && old_memo.value == value {
-                trace!(
-                    "read_upgrade({:?}): value is equal, back-dating to {:?}",
-                    self,
-                    old_memo.revisions.changed_at,
-                );
-
-                assert!(old_memo.revisions.changed_at <= revisions.changed_at);
-                revisions.changed_at = old_memo.revisions.changed_at;
-            }
-        }
-
-        let new_value = StampedValue {
-            value,
-            durability: revisions.durability,
-            changed_at: revisions.changed_at,
-        };
-
-        let memo_value = new_value.value.clone();
-
-        trace!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,);
-
-        panic_guard.proceed(Some(Memo { value: memo_value, verified_at: revision_now, revisions }));
-
-        new_value
-    }
-
-    /// Helper for `read` that does a shallow check (not recursive) if we have an up-to-date value.
-    ///
-    /// Invoked with the guard `state` corresponding to the `QueryState` of some `Slot` (the guard
-    /// can be either read or write). Returns a suitable `ProbeState`:
-    ///
-    /// - `ProbeState::UpToDate(r)` if the table has an up-to-date value (or we blocked on another
-    ///   thread that produced such a value).
-    /// - `ProbeState::StaleOrAbsent(g)` if either (a) there is no memo for this key, (b) the memo
-    ///   has no value; or (c) the memo has not been verified at the current revision.
-    ///
-    /// Note that in case `ProbeState::UpToDate`, the lock will have been released.
-    fn probe<StateGuard>(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        state: StateGuard,
-        runtime: &Runtime,
-        revision_now: Revision,
-    ) -> ProbeState<StampedValue<Q::Value>, StateGuard>
-    where
-        StateGuard: Deref<Target = QueryState<Q>>,
-    {
-        match &*state {
-            QueryState::NotComputed => ProbeState::NotComputed(state),
-
-            QueryState::InProgress { id, anyone_waiting } => {
-                let other_id = *id;
-
-                // NB: `Ordering::Relaxed` is sufficient here,
-                // as there are no loads that are "gated" on this
-                // value. Everything that is written is also protected
-                // by a lock that must be acquired. The role of this
-                // boolean is to decide *whether* to acquire the lock,
-                // not to gate future atomic reads.
-                anyone_waiting.store(true, Ordering::Relaxed);
-
-                self.block_on_or_unwind(db, runtime, other_id, state);
-
-                // Other thread completely normally, so our value may be available now.
-                ProbeState::Retry
-            }
-
-            QueryState::Memoized(memo) => {
-                trace!(
-                    "{:?}: found memoized value, verified_at={:?}, changed_at={:?}",
-                    self,
-                    memo.verified_at,
-                    memo.revisions.changed_at,
-                );
-
-                if memo.verified_at < revision_now {
-                    return ProbeState::Stale(state);
-                }
-
-                let value = &memo.value;
-                let value = StampedValue {
-                    durability: memo.revisions.durability,
-                    changed_at: memo.revisions.changed_at,
-                    value: value.clone(),
-                };
-
-                trace!("{:?}: returning memoized value changed at {:?}", self, value.changed_at);
-
-                ProbeState::UpToDate(value)
-            }
-        }
-    }
-
-    pub(super) fn durability(&self, db: &<Q as QueryDb<'_>>::DynDb) -> Durability {
-        match &*self.state.read() {
-            QueryState::NotComputed => Durability::LOW,
-            QueryState::InProgress { .. } => panic!("query in progress"),
-            QueryState::Memoized(memo) => {
-                if memo.check_durability(db.salsa_runtime()) {
-                    memo.revisions.durability
-                } else {
-                    Durability::LOW
-                }
-            }
-        }
-    }
-
-    pub(super) fn as_table_entry(&self, key: &Q::Key) -> Option<TableEntry<Q::Key, Q::Value>> {
-        match &*self.state.read() {
-            QueryState::NotComputed => None,
-            QueryState::InProgress { .. } => Some(TableEntry::new(key.clone(), None)),
-            QueryState::Memoized(memo) => {
-                Some(TableEntry::new(key.clone(), Some(memo.value.clone())))
-            }
-        }
-    }
-
-    pub(super) fn invalidate(&self, new_revision: Revision) -> Option<Durability> {
-        tracing::trace!("Slot::invalidate(new_revision = {:?})", new_revision);
-        match &mut *self.state.write() {
-            QueryState::Memoized(memo) => {
-                memo.revisions.untracked = true;
-                memo.revisions.inputs = None;
-                memo.revisions.changed_at = new_revision;
-                Some(memo.revisions.durability)
-            }
-            QueryState::NotComputed => None,
-            QueryState::InProgress { .. } => unreachable!(),
-        }
-    }
-
-    pub(super) fn maybe_changed_after(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        revision: Revision,
-        key: &Q::Key,
-    ) -> bool {
-        let runtime = db.salsa_runtime();
-        let revision_now = runtime.current_revision();
-
-        db.unwind_if_cancelled();
-
-        trace!(
-            "maybe_changed_after({:?}) called with revision={:?}, revision_now={:?}",
-            self,
-            revision,
-            revision_now,
-        );
-
-        // Do an initial probe with just the read-lock.
-        //
-        // If we find that a cache entry for the value is present
-        // but hasn't been verified in this revision, we'll have to
-        // do more.
-        loop {
-            match self.maybe_changed_after_probe(db, self.state.read(), runtime, revision_now) {
-                MaybeChangedSinceProbeState::Retry => continue,
-                MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision,
-                MaybeChangedSinceProbeState::Stale(state) => {
-                    drop(state);
-                    return self.maybe_changed_after_upgrade(db, revision, key);
-                }
-            }
-        }
-    }
-
-    fn maybe_changed_after_probe<StateGuard>(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        state: StateGuard,
-        runtime: &Runtime,
-        revision_now: Revision,
-    ) -> MaybeChangedSinceProbeState<StateGuard>
-    where
-        StateGuard: Deref<Target = QueryState<Q>>,
-    {
-        match self.probe(db, state, runtime, revision_now) {
-            ProbeState::Retry => MaybeChangedSinceProbeState::Retry,
-
-            ProbeState::Stale(state) => MaybeChangedSinceProbeState::Stale(state),
-
-            // If we know when value last changed, we can return right away.
-            // Note that we don't need the actual value to be available.
-            ProbeState::UpToDate(StampedValue { value: _, durability: _, changed_at }) => {
-                MaybeChangedSinceProbeState::ChangedAt(changed_at)
-            }
-
-            // If we have nothing cached, then value may have changed.
-            ProbeState::NotComputed(_) => MaybeChangedSinceProbeState::ChangedAt(revision_now),
-        }
-    }
-
-    fn maybe_changed_after_upgrade(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        revision: Revision,
-        key: &Q::Key,
-    ) -> bool {
-        let runtime = db.salsa_runtime();
-        let revision_now = runtime.current_revision();
-
-        // Get an upgradable read lock, which permits other reads but no writers.
-        // Probe again. If the value is stale (needs to be verified), then upgrade
-        // to a write lock and swap it with InProgress while we work.
-        let mut old_memo = match self.maybe_changed_after_probe(
-            db,
-            self.state.upgradable_read(),
-            runtime,
-            revision_now,
-        ) {
-            MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision,
-
-            // If another thread was active, then the cache line is going to be
-            // either verified or cleared out. Just recurse to figure out which.
-            // Note that we don't need an upgradable read.
-            MaybeChangedSinceProbeState::Retry => {
-                return self.maybe_changed_after(db, revision, key)
-            }
-
-            MaybeChangedSinceProbeState::Stale(state) => {
-                type RwLockUpgradableReadGuard<'a, T> =
-                    lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>;
-
-                let mut state = RwLockUpgradableReadGuard::upgrade(state);
-                match std::mem::replace(&mut *state, QueryState::in_progress(runtime.id())) {
-                    QueryState::Memoized(old_memo) => old_memo,
-                    QueryState::NotComputed | QueryState::InProgress { .. } => unreachable!(),
-                }
-            }
-        };
-
-        let panic_guard = PanicGuard::new(self, runtime);
-        let active_query = runtime.push_query(self.database_key_index());
-
-        if old_memo.verify_revisions(db.ops_database(), revision_now, &active_query) {
-            let maybe_changed = old_memo.revisions.changed_at > revision;
-            panic_guard.proceed(Some(old_memo));
-            maybe_changed
-        } else {
-            // We found that this memoized value may have changed
-            // but we have an old value. We can re-run the code and
-            // actually *check* if it has changed.
-            let StampedValue { changed_at, .. } = self.execute(
-                db,
-                runtime,
-                revision_now,
-                active_query,
-                panic_guard,
-                Some(old_memo),
-                key,
-            );
-            changed_at > revision
-        }
-    }
-
-    /// Helper: see [`Runtime::try_block_on_or_unwind`].
-    fn block_on_or_unwind<MutexGuard>(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        runtime: &Runtime,
-        other_id: RuntimeId,
-        mutex_guard: MutexGuard,
-    ) {
-        runtime.block_on_or_unwind(
-            db.ops_database(),
-            self.database_key_index(),
-            other_id,
-            mutex_guard,
-        )
-    }
-}
-
-impl<Q> QueryState<Q>
-where
-    Q: QueryFunction,
-{
-    fn in_progress(id: RuntimeId) -> Self {
-        QueryState::InProgress { id, anyone_waiting: Default::default() }
-    }
-}
-
-struct PanicGuard<'me, Q>
-where
-    Q: QueryFunction,
-    Q::Value: Eq,
-{
-    slot: &'me Slot<Q>,
-    runtime: &'me Runtime,
-}
-
-impl<'me, Q> PanicGuard<'me, Q>
-where
-    Q: QueryFunction,
-    Q::Value: Eq,
-{
-    fn new(slot: &'me Slot<Q>, runtime: &'me Runtime) -> Self {
-        Self { slot, runtime }
-    }
-
-    /// Indicates that we have concluded normally (without panicking).
-    /// If `opt_memo` is some, then this memo is installed as the new
-    /// memoized value. If `opt_memo` is `None`, then the slot is cleared
-    /// and has no value.
-    fn proceed(mut self, opt_memo: Option<Memo<Q::Value>>) {
-        self.overwrite_placeholder(WaitResult::Completed, opt_memo);
-        std::mem::forget(self)
-    }
-
-    /// Overwrites the `InProgress` placeholder for `key` that we
-    /// inserted; if others were blocked, waiting for us to finish,
-    /// then notify them.
-    fn overwrite_placeholder(&mut self, wait_result: WaitResult, opt_memo: Option<Memo<Q::Value>>) {
-        let old_value = {
-            let mut write = self.slot.state.write();
-            match opt_memo {
-                // Replace the `InProgress` marker that we installed with the new
-                // memo, thus releasing our unique access to this key.
-                Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)),
-
-                // We had installed an `InProgress` marker, but we panicked before
-                // it could be removed. At this point, we therefore "own" unique
-                // access to our slot, so we can just remove the key.
-                None => std::mem::replace(&mut *write, QueryState::NotComputed),
-            }
-        };
-
-        match old_value {
-            QueryState::InProgress { id, anyone_waiting } => {
-                assert_eq!(id, self.runtime.id());
-
-                // NB: As noted on the `store`, `Ordering::Relaxed` is
-                // sufficient here. This boolean signals us on whether to
-                // acquire a mutex; the mutex will guarantee that all writes
-                // we are interested in are visible.
-                if anyone_waiting.load(Ordering::Relaxed) {
-                    self.runtime
-                        .unblock_queries_blocked_on(self.slot.database_key_index(), wait_result);
-                }
-            }
-            _ => panic!(
-                "\
-Unexpected panic during query evaluation, aborting the process.
-
-Please report this bug to https://github.com/salsa-rs/salsa/issues."
-            ),
-        }
-    }
-}
-
-impl<Q> Drop for PanicGuard<'_, Q>
-where
-    Q: QueryFunction,
-    Q::Value: Eq,
-{
-    fn drop(&mut self) {
-        if std::thread::panicking() {
-            // We panicked before we could proceed and need to remove `key`.
-            self.overwrite_placeholder(WaitResult::Panicked, None)
-        } else {
-            // If no panic occurred, then panic guard ought to be
-            // "forgotten" and so this Drop code should never run.
-            panic!(".forget() was not called")
-        }
-    }
-}
-
-impl<V> Memo<V>
-where
-    V: Clone,
-{
-    /// Determines whether the value stored in this memo (if any) is still
-    /// valid in the current revision. If so, returns a stamped value.
-    ///
-    /// If needed, this will walk each dependency and
-    /// recursively invoke `maybe_changed_after`, which may in turn
-    /// re-execute the dependency. This can cause cycles to occur,
-    /// so the current query must be pushed onto the
-    /// stack to permit cycle detection and recovery: therefore,
-    /// takes the `active_query` argument as evidence.
-    fn verify_value(
-        &mut self,
-        db: &dyn Database,
-        revision_now: Revision,
-        active_query: &ActiveQueryGuard<'_>,
-    ) -> Option<StampedValue<V>> {
-        if self.verify_revisions(db, revision_now, active_query) {
-            Some(StampedValue {
-                durability: self.revisions.durability,
-                changed_at: self.revisions.changed_at,
-                value: self.value.clone(),
-            })
-        } else {
-            None
-        }
-    }
-
-    /// Determines whether the value represented by this memo is still
-    /// valid in the current revision; note that the value itself is
-    /// not needed for this check. If needed, this will walk each
-    /// dependency and recursively invoke `maybe_changed_after`, which
-    /// may in turn re-execute the dependency. This can cause cycles to occur,
-    /// so the current query must be pushed onto the
-    /// stack to permit cycle detection and recovery: therefore,
-    /// takes the `active_query` argument as evidence.
-    fn verify_revisions(
-        &mut self,
-        db: &dyn Database,
-        revision_now: Revision,
-        _active_query: &ActiveQueryGuard<'_>,
-    ) -> bool {
-        assert!(self.verified_at != revision_now);
-        let verified_at = self.verified_at;
-
-        trace!(
-            "verify_revisions: verified_at={:?}, revision_now={:?}, inputs={:#?}",
-            verified_at,
-            revision_now,
-            self.revisions.inputs
-        );
-
-        if self.check_durability(db.salsa_runtime()) {
-            return self.mark_value_as_verified(revision_now);
-        }
-
-        match &self.revisions.inputs {
-            // We can't validate values that had untracked inputs; just have to
-            // re-execute.
-            None if self.revisions.untracked => return false,
-            None => {}
-
-            // Check whether any of our inputs changed since the
-            // **last point where we were verified** (not since we
-            // last changed). This is important: if we have
-            // memoized values, then an input may have changed in
-            // revision R2, but we found that *our* value was the
-            // same regardless, so our change date is still
-            // R1. But our *verification* date will be R2, and we
-            // are only interested in finding out whether the
-            // input changed *again*.
-            Some(inputs) => {
-                let changed_input =
-                    inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at));
-                if let Some(input) = changed_input {
-                    trace!("validate_memoized_value: `{:?}` may have changed", input);
-
-                    return false;
-                }
-            }
-        };
-
-        self.mark_value_as_verified(revision_now)
-    }
-
-    /// True if this memo is known not to have changed based on its durability.
-    fn check_durability(&self, runtime: &Runtime) -> bool {
-        let last_changed = runtime.last_changed_revision(self.revisions.durability);
-        trace!(
-            "check_durability(last_changed={:?} <= verified_at={:?}) = {:?}",
-            last_changed,
-            self.verified_at,
-            last_changed <= self.verified_at,
-        );
-        last_changed <= self.verified_at
-    }
-
-    fn mark_value_as_verified(&mut self, revision_now: Revision) -> bool {
-        self.verified_at = revision_now;
-        true
-    }
-}
-
-impl<Q> std::fmt::Debug for Slot<Q>
-where
-    Q: QueryFunction,
-{
-    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        write!(fmt, "{:?}", Q::default())
-    }
-}
-
-/// Check that `Slot<Q, >: Send + Sync` as long as
-/// `DB::DatabaseData: Send + Sync`, which in turn implies that
-/// `Q::Key: Send + Sync`, `Q::Value: Send + Sync`.
-#[allow(dead_code)]
-fn check_send_sync<Q>()
-where
-    Q: QueryFunction,
-
-    Q::Key: Send + Sync,
-    Q::Value: Send + Sync,
-{
-    fn is_send_sync<T: Send + Sync>() {}
-    is_send_sync::<Slot<Q>>();
-}
-
-/// Check that `Slot<Q, >: 'static` as long as
-/// `DB::DatabaseData: 'static`, which in turn implies that
-/// `Q::Key: 'static`, `Q::Value: 'static`.
-#[allow(dead_code)]
-fn check_static<Q>()
-where
-    Q: QueryFunction + 'static,
-    Q::Key: 'static,
-    Q::Value: 'static,
-{
-    fn is_static<T: 'static>() {}
-    is_static::<Slot<Q>>();
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru.rs
deleted file mode 100644
index bdb448e2412ee..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru.rs
+++ /dev/null
@@ -1,233 +0,0 @@
-use crate::debug::TableEntry;
-use crate::durability::Durability;
-use crate::hash::FxIndexMap;
-use crate::lru::Lru;
-use crate::plumbing::DerivedQueryStorageOps;
-use crate::plumbing::LruQueryStorageOps;
-use crate::plumbing::QueryFunction;
-use crate::plumbing::QueryStorageMassOps;
-use crate::plumbing::QueryStorageOps;
-use crate::runtime::StampedValue;
-use crate::Runtime;
-use crate::{Database, DatabaseKeyIndex, QueryDb, Revision};
-use parking_lot::RwLock;
-use std::borrow::Borrow;
-use std::hash::Hash;
-use std::marker::PhantomData;
-use triomphe::Arc;
-
-mod slot;
-use slot::Slot;
-
-/// Memoized queries store the result plus a list of the other queries
-/// that they invoked. This means we can avoid recomputing them when
-/// none of those inputs have changed.
-pub type MemoizedStorage<Q> = DerivedStorage<Q, AlwaysMemoizeValue>;
-
-/// "Dependency" queries just track their dependencies and not the
-/// actual value (which they produce on demand). This lessens the
-/// storage requirements.
-pub type DependencyStorage<Q> = DerivedStorage<Q, NeverMemoizeValue>;
-
-/// Handles storage where the value is 'derived' by executing a
-/// function (in contrast to "inputs").
-pub struct DerivedStorage<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    group_index: u16,
-    lru_list: Lru<Slot<Q, MP>>,
-    slot_map: RwLock<FxIndexMap<Q::Key, Arc<Slot<Q, MP>>>>,
-    policy: PhantomData<MP>,
-}
-
-impl<Q, MP> std::panic::RefUnwindSafe for DerivedStorage<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-    Q::Key: std::panic::RefUnwindSafe,
-    Q::Value: std::panic::RefUnwindSafe,
-{
-}
-
-pub trait MemoizationPolicy<Q>: Send + Sync
-where
-    Q: QueryFunction,
-{
-    fn should_memoize_value(key: &Q::Key) -> bool;
-
-    fn memoized_value_eq(old_value: &Q::Value, new_value: &Q::Value) -> bool;
-}
-
-pub enum AlwaysMemoizeValue {}
-impl<Q> MemoizationPolicy<Q> for AlwaysMemoizeValue
-where
-    Q: QueryFunction,
-    Q::Value: Eq,
-{
-    fn should_memoize_value(_key: &Q::Key) -> bool {
-        true
-    }
-
-    fn memoized_value_eq(old_value: &Q::Value, new_value: &Q::Value) -> bool {
-        old_value == new_value
-    }
-}
-
-pub enum NeverMemoizeValue {}
-impl<Q> MemoizationPolicy<Q> for NeverMemoizeValue
-where
-    Q: QueryFunction,
-{
-    fn should_memoize_value(_key: &Q::Key) -> bool {
-        false
-    }
-
-    fn memoized_value_eq(_old_value: &Q::Value, _new_value: &Q::Value) -> bool {
-        panic!("cannot reach since we never memoize")
-    }
-}
-
-impl<Q, MP> DerivedStorage<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    fn slot(&self, key: &Q::Key) -> Arc<Slot<Q, MP>> {
-        if let Some(v) = self.slot_map.read().get(key) {
-            return v.clone();
-        }
-
-        let mut write = self.slot_map.write();
-        let entry = write.entry(key.clone());
-        let key_index = entry.index() as u32;
-        let database_key_index = DatabaseKeyIndex {
-            group_index: self.group_index,
-            query_index: Q::QUERY_INDEX,
-            key_index,
-        };
-        entry.or_insert_with(|| Arc::new(Slot::new(database_key_index))).clone()
-    }
-}
-
-impl<Q, MP> QueryStorageOps<Q> for DerivedStorage<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = Q::CYCLE_STRATEGY;
-
-    fn new(group_index: u16) -> Self {
-        DerivedStorage {
-            group_index,
-            slot_map: RwLock::new(FxIndexMap::default()),
-            lru_list: Default::default(),
-            policy: PhantomData,
-        }
-    }
-
-    fn fmt_index(
-        &self,
-        _db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        fmt: &mut std::fmt::Formatter<'_>,
-    ) -> std::fmt::Result {
-        let slot_map = self.slot_map.read();
-        let key = slot_map.get_index(index as usize).unwrap().0;
-        write!(fmt, "{}::{}({:?})", std::any::type_name::<Q>(), Q::QUERY_NAME, key)
-    }
-
-    fn maybe_changed_after(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        revision: Revision,
-    ) -> bool {
-        debug_assert!(revision < db.salsa_runtime().current_revision());
-        let (key, slot) = {
-            let read = self.slot_map.read();
-            let Some((key, slot)) = read.get_index(index as usize) else {
-                return false;
-            };
-            (key.clone(), slot.clone())
-        };
-        slot.maybe_changed_after(db, revision, &key)
-    }
-
-    fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value {
-        db.unwind_if_cancelled();
-
-        let slot = self.slot(key);
-        let StampedValue { value, durability, changed_at } = slot.read(db, key);
-
-        if let Some(evicted) = self.lru_list.record_use(&slot) {
-            evicted.evict();
-        }
-
-        db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
-            slot.database_key_index(),
-            durability,
-            changed_at,
-        );
-
-        value
-    }
-
-    fn durability(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Durability {
-        self.slot(key).durability(db)
-    }
-
-    fn entries<C>(&self, _db: &<Q as QueryDb<'_>>::DynDb) -> C
-    where
-        C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>,
-    {
-        let slot_map = self.slot_map.read();
-        slot_map.iter().filter_map(|(key, slot)| slot.as_table_entry(key)).collect()
-    }
-}
-
-impl<Q, MP> QueryStorageMassOps for DerivedStorage<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    fn purge(&self) {
-        self.lru_list.purge();
-        *self.slot_map.write() = Default::default();
-    }
-}
-
-impl<Q, MP> LruQueryStorageOps for DerivedStorage<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    fn set_lru_capacity(&self, new_capacity: u16) {
-        self.lru_list.set_lru_capacity(new_capacity);
-    }
-}
-
-impl<Q, MP> DerivedQueryStorageOps<Q> for DerivedStorage<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    fn invalidate<S>(&self, runtime: &mut Runtime, key: &S)
-    where
-        S: Eq + Hash,
-        Q::Key: Borrow<S>,
-    {
-        runtime.with_incremented_revision(|new_revision| {
-            let map_read = self.slot_map.read();
-
-            if let Some(slot) = map_read.get(key) {
-                if let Some(durability) = slot.invalidate(new_revision) {
-                    return Some(durability);
-                }
-            }
-
-            None
-        })
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru/slot.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru/slot.rs
deleted file mode 100644
index 73a5e07aa05ab..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru/slot.rs
+++ /dev/null
@@ -1,856 +0,0 @@
-use crate::debug::TableEntry;
-use crate::derived_lru::MemoizationPolicy;
-use crate::durability::Durability;
-use crate::lru::LruIndex;
-use crate::lru::LruNode;
-use crate::plumbing::{DatabaseOps, QueryFunction};
-use crate::revision::Revision;
-use crate::runtime::local_state::ActiveQueryGuard;
-use crate::runtime::local_state::QueryRevisions;
-use crate::runtime::Runtime;
-use crate::runtime::RuntimeId;
-use crate::runtime::StampedValue;
-use crate::runtime::WaitResult;
-use crate::Cycle;
-use crate::{Database, DatabaseKeyIndex, Event, EventKind, QueryDb};
-use parking_lot::{RawRwLock, RwLock};
-use std::marker::PhantomData;
-use std::ops::Deref;
-use std::sync::atomic::{AtomicBool, Ordering};
-use tracing::trace;
-
-pub(super) struct Slot<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    key_index: u32,
-    group_index: u16,
-    state: RwLock<QueryState<Q>>,
-    lru_index: LruIndex,
-    policy: PhantomData<MP>,
-}
-
-/// Defines the "current state" of query's memoized results.
-enum QueryState<Q>
-where
-    Q: QueryFunction,
-{
-    NotComputed,
-
-    /// The runtime with the given id is currently computing the
-    /// result of this query.
-    InProgress {
-        id: RuntimeId,
-
-        /// Set to true if any other queries are blocked,
-        /// waiting for this query to complete.
-        anyone_waiting: AtomicBool,
-    },
-
-    /// We have computed the query already, and here is the result.
-    Memoized(Memo<Q::Value>),
-}
-
-struct Memo<V> {
-    /// The result of the query, if we decide to memoize it.
-    value: Option<V>,
-
-    /// Last revision when this memo was verified; this begins
-    /// as the current revision.
-    pub(crate) verified_at: Revision,
-
-    /// Revision information
-    revisions: QueryRevisions,
-}
-
-/// Return value of `probe` helper.
-enum ProbeState<V, G> {
-    /// Another thread was active but has completed.
-    /// Try again!
-    Retry,
-
-    /// No entry for this key at all.
-    NotComputed(G),
-
-    /// There is an entry, but its contents have not been
-    /// verified in this revision.
-    Stale(G),
-
-    /// There is an entry, and it has been verified
-    /// in this revision, but it has no cached
-    /// value. The `Revision` is the revision where the
-    /// value last changed (if we were to recompute it).
-    NoValue(G, Revision),
-
-    /// There is an entry which has been verified,
-    /// and it has the following value-- or, we blocked
-    /// on another thread, and that resulted in a cycle.
-    UpToDate(V),
-}
-
-/// Return value of `maybe_changed_after_probe` helper.
-enum MaybeChangedSinceProbeState<G> {
-    /// Another thread was active but has completed.
-    /// Try again!
-    Retry,
-
-    /// Value may have changed in the given revision.
-    ChangedAt(Revision),
-
-    /// There is a stale cache entry that has not been
-    /// verified in this revision, so we can't say.
-    Stale(G),
-}
-
-impl<Q, MP> Slot<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    pub(super) fn new(database_key_index: DatabaseKeyIndex) -> Self {
-        Self {
-            key_index: database_key_index.key_index,
-            group_index: database_key_index.group_index,
-            state: RwLock::new(QueryState::NotComputed),
-            lru_index: LruIndex::default(),
-            policy: PhantomData,
-        }
-    }
-
-    pub(super) fn database_key_index(&self) -> DatabaseKeyIndex {
-        DatabaseKeyIndex {
-            group_index: self.group_index,
-            query_index: Q::QUERY_INDEX,
-            key_index: self.key_index,
-        }
-    }
-
-    pub(super) fn read(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        key: &Q::Key,
-    ) -> StampedValue<Q::Value> {
-        let runtime = db.salsa_runtime();
-
-        // NB: We don't need to worry about people modifying the
-        // revision out from under our feet. Either `db` is a frozen
-        // database, in which case there is a lock, or the mutator
-        // thread is the current thread, and it will be prevented from
-        // doing any `set` invocations while the query function runs.
-        let revision_now = runtime.current_revision();
-
-        trace!("{:?}: invoked at {:?}", self, revision_now,);
-
-        // First, do a check with a read-lock.
-        loop {
-            match self.probe(db, self.state.read(), runtime, revision_now) {
-                ProbeState::UpToDate(v) => return v,
-                ProbeState::Stale(..) | ProbeState::NoValue(..) | ProbeState::NotComputed(..) => {
-                    break
-                }
-                ProbeState::Retry => continue,
-            }
-        }
-
-        self.read_upgrade(db, key, revision_now)
-    }
-
-    /// Second phase of a read operation: acquires an upgradable-read
-    /// and -- if needed -- validates whether inputs have changed,
-    /// recomputes value, etc. This is invoked after our initial probe
-    /// shows a potentially out of date value.
-    fn read_upgrade(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        key: &Q::Key,
-        revision_now: Revision,
-    ) -> StampedValue<Q::Value> {
-        let runtime = db.salsa_runtime();
-
-        trace!("{:?}: read_upgrade(revision_now={:?})", self, revision_now,);
-
-        // Check with an upgradable read to see if there is a value
-        // already. (This permits other readers but prevents anyone
-        // else from running `read_upgrade` at the same time.)
-        let mut old_memo = loop {
-            match self.probe(db, self.state.upgradable_read(), runtime, revision_now) {
-                ProbeState::UpToDate(v) => return v,
-                ProbeState::Stale(state)
-                | ProbeState::NotComputed(state)
-                | ProbeState::NoValue(state, _) => {
-                    type RwLockUpgradableReadGuard<'a, T> =
-                        lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>;
-
-                    let mut state = RwLockUpgradableReadGuard::upgrade(state);
-                    match std::mem::replace(&mut *state, QueryState::in_progress(runtime.id())) {
-                        QueryState::Memoized(old_memo) => break Some(old_memo),
-                        QueryState::InProgress { .. } => unreachable!(),
-                        QueryState::NotComputed => break None,
-                    }
-                }
-                ProbeState::Retry => continue,
-            }
-        };
-
-        let panic_guard = PanicGuard::new(self, runtime);
-        let active_query = runtime.push_query(self.database_key_index());
-
-        // If we have an old-value, it *may* now be stale, since there
-        // has been a new revision since the last time we checked. So,
-        // first things first, let's walk over each of our previous
-        // inputs and check whether they are out of date.
-        if let Some(memo) = &mut old_memo {
-            if let Some(value) = memo.verify_value(db.ops_database(), revision_now, &active_query) {
-                trace!("{:?}: validated old memoized value", self,);
-
-                db.salsa_event(Event {
-                    runtime_id: runtime.id(),
-                    kind: EventKind::DidValidateMemoizedValue {
-                        database_key: self.database_key_index(),
-                    },
-                });
-
-                panic_guard.proceed(old_memo);
-
-                return value;
-            }
-        }
-
-        self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo, key)
-    }
-
-    fn execute(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        runtime: &Runtime,
-        revision_now: Revision,
-        active_query: ActiveQueryGuard<'_>,
-        panic_guard: PanicGuard<'_, Q, MP>,
-        old_memo: Option<Memo<Q::Value>>,
-        key: &Q::Key,
-    ) -> StampedValue<Q::Value> {
-        tracing::trace!("{:?}: executing query", self.database_key_index().debug(db));
-
-        db.salsa_event(Event {
-            runtime_id: db.salsa_runtime().id(),
-            kind: EventKind::WillExecute { database_key: self.database_key_index() },
-        });
-
-        // Query was not previously executed, or value is potentially
-        // stale, or value is absent. Let's execute!
-        let value = match Cycle::catch(|| Q::execute(db, key.clone())) {
-            Ok(v) => v,
-            Err(cycle) => {
-                tracing::trace!(
-                    "{:?}: caught cycle {:?}, have strategy {:?}",
-                    self.database_key_index().debug(db),
-                    cycle,
-                    Q::CYCLE_STRATEGY,
-                );
-                match Q::CYCLE_STRATEGY {
-                    crate::plumbing::CycleRecoveryStrategy::Panic => {
-                        panic_guard.proceed(None);
-                        cycle.throw()
-                    }
-                    crate::plumbing::CycleRecoveryStrategy::Fallback => {
-                        if let Some(c) = active_query.take_cycle() {
-                            assert!(c.is(&cycle));
-                            Q::cycle_fallback(db, &cycle, key)
-                        } else {
-                            // we are not a participant in this cycle
-                            debug_assert!(!cycle
-                                .participant_keys()
-                                .any(|k| k == self.database_key_index()));
-                            cycle.throw()
-                        }
-                    }
-                }
-            }
-        };
-
-        let mut revisions = active_query.pop();
-
-        // We assume that query is side-effect free -- that is, does
-        // not mutate the "inputs" to the query system. Sanity check
-        // that assumption here, at least to the best of our ability.
-        assert_eq!(
-            runtime.current_revision(),
-            revision_now,
-            "revision altered during query execution",
-        );
-
-        // If the new value is equal to the old one, then it didn't
-        // really change, even if some of its inputs have. So we can
-        // "backdate" its `changed_at` revision to be the same as the
-        // old value.
-        if let Some(old_memo) = &old_memo {
-            if let Some(old_value) = &old_memo.value {
-                // Careful: if the value became less durable than it
-                // used to be, that is a "breaking change" that our
-                // consumers must be aware of. Becoming *more* durable
-                // is not. See the test `constant_to_non_constant`.
-                if revisions.durability >= old_memo.revisions.durability
-                    && MP::memoized_value_eq(old_value, &value)
-                {
-                    trace!(
-                        "read_upgrade({:?}): value is equal, back-dating to {:?}",
-                        self,
-                        old_memo.revisions.changed_at,
-                    );
-
-                    assert!(old_memo.revisions.changed_at <= revisions.changed_at);
-                    revisions.changed_at = old_memo.revisions.changed_at;
-                }
-            }
-        }
-
-        let new_value = StampedValue {
-            value,
-            durability: revisions.durability,
-            changed_at: revisions.changed_at,
-        };
-
-        let memo_value =
-            if self.should_memoize_value(key) { Some(new_value.value.clone()) } else { None };
-
-        trace!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,);
-
-        panic_guard.proceed(Some(Memo { value: memo_value, verified_at: revision_now, revisions }));
-
-        new_value
-    }
-
-    /// Helper for `read` that does a shallow check (not recursive) if we have an up-to-date value.
-    ///
-    /// Invoked with the guard `state` corresponding to the `QueryState` of some `Slot` (the guard
-    /// can be either read or write). Returns a suitable `ProbeState`:
-    ///
-    /// - `ProbeState::UpToDate(r)` if the table has an up-to-date value (or we blocked on another
-    ///   thread that produced such a value).
-    /// - `ProbeState::StaleOrAbsent(g)` if either (a) there is no memo for this key, (b) the memo
-    ///   has no value; or (c) the memo has not been verified at the current revision.
-    ///
-    /// Note that in case `ProbeState::UpToDate`, the lock will have been released.
-    fn probe<StateGuard>(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        state: StateGuard,
-        runtime: &Runtime,
-        revision_now: Revision,
-    ) -> ProbeState<StampedValue<Q::Value>, StateGuard>
-    where
-        StateGuard: Deref<Target = QueryState<Q>>,
-    {
-        match &*state {
-            QueryState::NotComputed => ProbeState::NotComputed(state),
-
-            QueryState::InProgress { id, anyone_waiting } => {
-                let other_id = *id;
-
-                // NB: `Ordering::Relaxed` is sufficient here,
-                // as there are no loads that are "gated" on this
-                // value. Everything that is written is also protected
-                // by a lock that must be acquired. The role of this
-                // boolean is to decide *whether* to acquire the lock,
-                // not to gate future atomic reads.
-                anyone_waiting.store(true, Ordering::Relaxed);
-
-                self.block_on_or_unwind(db, runtime, other_id, state);
-
-                // Other thread completely normally, so our value may be available now.
-                ProbeState::Retry
-            }
-
-            QueryState::Memoized(memo) => {
-                trace!(
-                    "{:?}: found memoized value, verified_at={:?}, changed_at={:?}",
-                    self,
-                    memo.verified_at,
-                    memo.revisions.changed_at,
-                );
-
-                if memo.verified_at < revision_now {
-                    return ProbeState::Stale(state);
-                }
-
-                if let Some(value) = &memo.value {
-                    let value = StampedValue {
-                        durability: memo.revisions.durability,
-                        changed_at: memo.revisions.changed_at,
-                        value: value.clone(),
-                    };
-
-                    trace!(
-                        "{:?}: returning memoized value changed at {:?}",
-                        self,
-                        value.changed_at
-                    );
-
-                    ProbeState::UpToDate(value)
-                } else {
-                    let changed_at = memo.revisions.changed_at;
-                    ProbeState::NoValue(state, changed_at)
-                }
-            }
-        }
-    }
-
-    pub(super) fn durability(&self, db: &<Q as QueryDb<'_>>::DynDb) -> Durability {
-        match &*self.state.read() {
-            QueryState::NotComputed => Durability::LOW,
-            QueryState::InProgress { .. } => panic!("query in progress"),
-            QueryState::Memoized(memo) => {
-                if memo.check_durability(db.salsa_runtime()) {
-                    memo.revisions.durability
-                } else {
-                    Durability::LOW
-                }
-            }
-        }
-    }
-
-    pub(super) fn as_table_entry(&self, key: &Q::Key) -> Option<TableEntry<Q::Key, Q::Value>> {
-        match &*self.state.read() {
-            QueryState::NotComputed => None,
-            QueryState::InProgress { .. } => Some(TableEntry::new(key.clone(), None)),
-            QueryState::Memoized(memo) => Some(TableEntry::new(key.clone(), memo.value.clone())),
-        }
-    }
-
-    pub(super) fn evict(&self) {
-        let mut state = self.state.write();
-        if let QueryState::Memoized(memo) = &mut *state {
-            // Evicting a value with an untracked input could
-            // lead to inconsistencies. Note that we can't check
-            // `has_untracked_input` when we add the value to the cache,
-            // because inputs can become untracked in the next revision.
-            if memo.has_untracked_input() {
-                return;
-            }
-            memo.value = None;
-        }
-    }
-
-    pub(super) fn invalidate(&self, new_revision: Revision) -> Option<Durability> {
-        tracing::trace!("Slot::invalidate(new_revision = {:?})", new_revision);
-        match &mut *self.state.write() {
-            QueryState::Memoized(memo) => {
-                memo.revisions.untracked = true;
-                memo.revisions.inputs = None;
-                memo.revisions.changed_at = new_revision;
-                Some(memo.revisions.durability)
-            }
-            QueryState::NotComputed => None,
-            QueryState::InProgress { .. } => unreachable!(),
-        }
-    }
-
-    pub(super) fn maybe_changed_after(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        revision: Revision,
-        key: &Q::Key,
-    ) -> bool {
-        let runtime = db.salsa_runtime();
-        let revision_now = runtime.current_revision();
-
-        db.unwind_if_cancelled();
-
-        trace!(
-            "maybe_changed_after({:?}) called with revision={:?}, revision_now={:?}",
-            self,
-            revision,
-            revision_now,
-        );
-
-        // Do an initial probe with just the read-lock.
-        //
-        // If we find that a cache entry for the value is present
-        // but hasn't been verified in this revision, we'll have to
-        // do more.
-        loop {
-            match self.maybe_changed_after_probe(db, self.state.read(), runtime, revision_now) {
-                MaybeChangedSinceProbeState::Retry => continue,
-                MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision,
-                MaybeChangedSinceProbeState::Stale(state) => {
-                    drop(state);
-                    return self.maybe_changed_after_upgrade(db, revision, key);
-                }
-            }
-        }
-    }
-
-    fn maybe_changed_after_probe<StateGuard>(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        state: StateGuard,
-        runtime: &Runtime,
-        revision_now: Revision,
-    ) -> MaybeChangedSinceProbeState<StateGuard>
-    where
-        StateGuard: Deref<Target = QueryState<Q>>,
-    {
-        match self.probe(db, state, runtime, revision_now) {
-            ProbeState::Retry => MaybeChangedSinceProbeState::Retry,
-
-            ProbeState::Stale(state) => MaybeChangedSinceProbeState::Stale(state),
-
-            // If we know when value last changed, we can return right away.
-            // Note that we don't need the actual value to be available.
-            ProbeState::NoValue(_, changed_at)
-            | ProbeState::UpToDate(StampedValue { value: _, durability: _, changed_at }) => {
-                MaybeChangedSinceProbeState::ChangedAt(changed_at)
-            }
-
-            // If we have nothing cached, then value may have changed.
-            ProbeState::NotComputed(_) => MaybeChangedSinceProbeState::ChangedAt(revision_now),
-        }
-    }
-
-    fn maybe_changed_after_upgrade(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        revision: Revision,
-        key: &Q::Key,
-    ) -> bool {
-        let runtime = db.salsa_runtime();
-        let revision_now = runtime.current_revision();
-
-        // Get an upgradable read lock, which permits other reads but no writers.
-        // Probe again. If the value is stale (needs to be verified), then upgrade
-        // to a write lock and swap it with InProgress while we work.
-        let mut old_memo = match self.maybe_changed_after_probe(
-            db,
-            self.state.upgradable_read(),
-            runtime,
-            revision_now,
-        ) {
-            MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision,
-
-            // If another thread was active, then the cache line is going to be
-            // either verified or cleared out. Just recurse to figure out which.
-            // Note that we don't need an upgradable read.
-            MaybeChangedSinceProbeState::Retry => {
-                return self.maybe_changed_after(db, revision, key)
-            }
-
-            MaybeChangedSinceProbeState::Stale(state) => {
-                type RwLockUpgradableReadGuard<'a, T> =
-                    lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>;
-
-                let mut state = RwLockUpgradableReadGuard::upgrade(state);
-                match std::mem::replace(&mut *state, QueryState::in_progress(runtime.id())) {
-                    QueryState::Memoized(old_memo) => old_memo,
-                    QueryState::NotComputed | QueryState::InProgress { .. } => unreachable!(),
-                }
-            }
-        };
-
-        let panic_guard = PanicGuard::new(self, runtime);
-        let active_query = runtime.push_query(self.database_key_index());
-
-        if old_memo.verify_revisions(db.ops_database(), revision_now, &active_query) {
-            let maybe_changed = old_memo.revisions.changed_at > revision;
-            panic_guard.proceed(Some(old_memo));
-            maybe_changed
-        } else if old_memo.value.is_some() {
-            // We found that this memoized value may have changed
-            // but we have an old value. We can re-run the code and
-            // actually *check* if it has changed.
-            let StampedValue { changed_at, .. } = self.execute(
-                db,
-                runtime,
-                revision_now,
-                active_query,
-                panic_guard,
-                Some(old_memo),
-                key,
-            );
-            changed_at > revision
-        } else {
-            // We found that inputs to this memoized value may have chanced
-            // but we don't have an old value to compare against or re-use.
-            // No choice but to drop the memo and say that its value may have changed.
-            panic_guard.proceed(None);
-            true
-        }
-    }
-
-    /// Helper: see [`Runtime::try_block_on_or_unwind`].
-    fn block_on_or_unwind<MutexGuard>(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        runtime: &Runtime,
-        other_id: RuntimeId,
-        mutex_guard: MutexGuard,
-    ) {
-        runtime.block_on_or_unwind(
-            db.ops_database(),
-            self.database_key_index(),
-            other_id,
-            mutex_guard,
-        )
-    }
-
-    fn should_memoize_value(&self, key: &Q::Key) -> bool {
-        MP::should_memoize_value(key)
-    }
-}
-
-impl<Q> QueryState<Q>
-where
-    Q: QueryFunction,
-{
-    fn in_progress(id: RuntimeId) -> Self {
-        QueryState::InProgress { id, anyone_waiting: Default::default() }
-    }
-}
-
-struct PanicGuard<'me, Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    slot: &'me Slot<Q, MP>,
-    runtime: &'me Runtime,
-}
-
-impl<'me, Q, MP> PanicGuard<'me, Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    fn new(slot: &'me Slot<Q, MP>, runtime: &'me Runtime) -> Self {
-        Self { slot, runtime }
-    }
-
-    /// Indicates that we have concluded normally (without panicking).
-    /// If `opt_memo` is some, then this memo is installed as the new
-    /// memoized value. If `opt_memo` is `None`, then the slot is cleared
-    /// and has no value.
-    fn proceed(mut self, opt_memo: Option<Memo<Q::Value>>) {
-        self.overwrite_placeholder(WaitResult::Completed, opt_memo);
-        std::mem::forget(self)
-    }
-
-    /// Overwrites the `InProgress` placeholder for `key` that we
-    /// inserted; if others were blocked, waiting for us to finish,
-    /// then notify them.
-    fn overwrite_placeholder(&mut self, wait_result: WaitResult, opt_memo: Option<Memo<Q::Value>>) {
-        let old_value = {
-            let mut write = self.slot.state.write();
-            match opt_memo {
-                // Replace the `InProgress` marker that we installed with the new
-                // memo, thus releasing our unique access to this key.
-                Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)),
-
-                // We had installed an `InProgress` marker, but we panicked before
-                // it could be removed. At this point, we therefore "own" unique
-                // access to our slot, so we can just remove the key.
-                None => std::mem::replace(&mut *write, QueryState::NotComputed),
-            }
-        };
-
-        match old_value {
-            QueryState::InProgress { id, anyone_waiting } => {
-                assert_eq!(id, self.runtime.id());
-
-                // NB: As noted on the `store`, `Ordering::Relaxed` is
-                // sufficient here. This boolean signals us on whether to
-                // acquire a mutex; the mutex will guarantee that all writes
-                // we are interested in are visible.
-                if anyone_waiting.load(Ordering::Relaxed) {
-                    self.runtime
-                        .unblock_queries_blocked_on(self.slot.database_key_index(), wait_result);
-                }
-            }
-            _ => panic!(
-                "\
-Unexpected panic during query evaluation, aborting the process.
-
-Please report this bug to https://github.com/salsa-rs/salsa/issues."
-            ),
-        }
-    }
-}
-
-impl<Q, MP> Drop for PanicGuard<'_, Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    fn drop(&mut self) {
-        if std::thread::panicking() {
-            // We panicked before we could proceed and need to remove `key`.
-            self.overwrite_placeholder(WaitResult::Panicked, None)
-        } else {
-            // If no panic occurred, then panic guard ought to be
-            // "forgotten" and so this Drop code should never run.
-            panic!(".forget() was not called")
-        }
-    }
-}
-
-impl<V> Memo<V>
-where
-    V: Clone,
-{
-    /// Determines whether the value stored in this memo (if any) is still
-    /// valid in the current revision. If so, returns a stamped value.
-    ///
-    /// If needed, this will walk each dependency and
-    /// recursively invoke `maybe_changed_after`, which may in turn
-    /// re-execute the dependency. This can cause cycles to occur,
-    /// so the current query must be pushed onto the
-    /// stack to permit cycle detection and recovery: therefore,
-    /// takes the `active_query` argument as evidence.
-    fn verify_value(
-        &mut self,
-        db: &dyn Database,
-        revision_now: Revision,
-        active_query: &ActiveQueryGuard<'_>,
-    ) -> Option<StampedValue<V>> {
-        // If we don't have a memoized value, nothing to validate.
-        if self.value.is_none() {
-            return None;
-        }
-        if self.verify_revisions(db, revision_now, active_query) {
-            self.value.clone().map(|value| StampedValue {
-                durability: self.revisions.durability,
-                changed_at: self.revisions.changed_at,
-                value,
-            })
-        } else {
-            None
-        }
-    }
-
-    /// Determines whether the value represented by this memo is still
-    /// valid in the current revision; note that the value itself is
-    /// not needed for this check. If needed, this will walk each
-    /// dependency and recursively invoke `maybe_changed_after`, which
-    /// may in turn re-execute the dependency. This can cause cycles to occur,
-    /// so the current query must be pushed onto the
-    /// stack to permit cycle detection and recovery: therefore,
-    /// takes the `active_query` argument as evidence.
-    fn verify_revisions(
-        &mut self,
-        db: &dyn Database,
-        revision_now: Revision,
-        _active_query: &ActiveQueryGuard<'_>,
-    ) -> bool {
-        assert!(self.verified_at != revision_now);
-        let verified_at = self.verified_at;
-
-        trace!(
-            "verify_revisions: verified_at={:?}, revision_now={:?}, inputs={:#?}",
-            verified_at,
-            revision_now,
-            self.revisions.inputs
-        );
-
-        if self.check_durability(db.salsa_runtime()) {
-            return self.mark_value_as_verified(revision_now);
-        }
-
-        match &self.revisions.inputs {
-            // We can't validate values that had untracked inputs; just have to
-            // re-execute.
-            None if self.revisions.untracked => return false,
-            None => {}
-
-            // Check whether any of our inputs changed since the
-            // **last point where we were verified** (not since we
-            // last changed). This is important: if we have
-            // memoized values, then an input may have changed in
-            // revision R2, but we found that *our* value was the
-            // same regardless, so our change date is still
-            // R1. But our *verification* date will be R2, and we
-            // are only interested in finding out whether the
-            // input changed *again*.
-            Some(inputs) => {
-                let changed_input =
-                    inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at));
-                if let Some(input) = changed_input {
-                    trace!("validate_memoized_value: `{:?}` may have changed", input);
-
-                    return false;
-                }
-            }
-        };
-
-        self.mark_value_as_verified(revision_now)
-    }
-
-    /// True if this memo is known not to have changed based on its durability.
-    fn check_durability(&self, runtime: &Runtime) -> bool {
-        let last_changed = runtime.last_changed_revision(self.revisions.durability);
-        trace!(
-            "check_durability(last_changed={:?} <= verified_at={:?}) = {:?}",
-            last_changed,
-            self.verified_at,
-            last_changed <= self.verified_at,
-        );
-        last_changed <= self.verified_at
-    }
-
-    fn mark_value_as_verified(&mut self, revision_now: Revision) -> bool {
-        self.verified_at = revision_now;
-        true
-    }
-
-    fn has_untracked_input(&self) -> bool {
-        self.revisions.untracked
-    }
-}
-
-impl<Q, MP> std::fmt::Debug for Slot<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        write!(fmt, "{:?}", Q::default())
-    }
-}
-
-impl<Q, MP> LruNode for Slot<Q, MP>
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-{
-    fn lru_index(&self) -> &LruIndex {
-        &self.lru_index
-    }
-}
-
-/// Check that `Slot<Q, MP>: Send + Sync` as long as
-/// `DB::DatabaseData: Send + Sync`, which in turn implies that
-/// `Q::Key: Send + Sync`, `Q::Value: Send + Sync`.
-#[allow(dead_code)]
-fn check_send_sync<Q, MP>()
-where
-    Q: QueryFunction,
-    MP: MemoizationPolicy<Q>,
-    Q::Key: Send + Sync,
-    Q::Value: Send + Sync,
-{
-    fn is_send_sync<T: Send + Sync>() {}
-    is_send_sync::<Slot<Q, MP>>();
-}
-
-/// Check that `Slot<Q, MP>: 'static` as long as
-/// `DB::DatabaseData: 'static`, which in turn implies that
-/// `Q::Key: 'static`, `Q::Value: 'static`.
-#[allow(dead_code)]
-fn check_static<Q, MP>()
-where
-    Q: QueryFunction + 'static,
-    MP: MemoizationPolicy<Q> + 'static,
-    Q::Key: 'static,
-    Q::Value: 'static,
-{
-    fn is_static<T: 'static>() {}
-    is_static::<Slot<Q, MP>>();
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/durability.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/durability.rs
deleted file mode 100644
index 9116f1606faf1..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/durability.rs
+++ /dev/null
@@ -1,49 +0,0 @@
-/// Describes how likely a value is to change -- how "durable" it is.
-/// By default, inputs have `Durability::LOW` and interned values have
-/// `Durability::HIGH`. But inputs can be explicitly set with other
-/// durabilities.
-///
-/// We use durabilities to optimize the work of "revalidating" a query
-/// after some input has changed. Ordinarily, in a new revision,
-/// queries have to trace all their inputs back to the base inputs to
-/// determine if any of those inputs have changed. But if we know that
-/// the only changes were to inputs of low durability (the common
-/// case), and we know that the query only used inputs of medium
-/// durability or higher, then we can skip that enumeration.
-///
-/// Typically, one assigns low durabilities to inputs that the user is
-/// frequently editing. Medium or high durabilities are used for
-/// configuration, the source from library crates, or other things
-/// that are unlikely to be edited.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Durability(u8);
-
-impl Durability {
-    /// Low durability: things that change frequently.
-    ///
-    /// Example: part of the crate being edited
-    pub const LOW: Durability = Durability(0);
-
-    /// Medium durability: things that change sometimes, but rarely.
-    ///
-    /// Example: a Cargo.toml file
-    pub const MEDIUM: Durability = Durability(1);
-
-    /// High durability: things that are not expected to change under
-    /// common usage.
-    ///
-    /// Example: the standard library or something from crates.io
-    pub const HIGH: Durability = Durability(2);
-
-    /// The maximum possible durability; equivalent to HIGH but
-    /// "conceptually" distinct (i.e., if we add more durability
-    /// levels, this could change).
-    pub(crate) const MAX: Durability = Self::HIGH;
-
-    /// Number of durability levels.
-    pub(crate) const LEN: usize = Self::MAX.index() + 1;
-
-    pub(crate) const fn index(self) -> usize {
-        self.0 as usize
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/hash.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/hash.rs
deleted file mode 100644
index 3b2d7df3fbeaa..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/hash.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-pub(crate) type FxHasher = std::hash::BuildHasherDefault<rustc_hash::FxHasher>;
-pub(crate) type FxIndexSet<K> = indexmap::IndexSet<K, FxHasher>;
-pub(crate) type FxIndexMap<K, V> = indexmap::IndexMap<K, V, FxHasher>;
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/input.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/input.rs
deleted file mode 100644
index 4992a0c7271cc..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/input.rs
+++ /dev/null
@@ -1,371 +0,0 @@
-use crate::debug::TableEntry;
-use crate::durability::Durability;
-use crate::hash::FxIndexMap;
-use crate::plumbing::CycleRecoveryStrategy;
-use crate::plumbing::InputQueryStorageOps;
-use crate::plumbing::QueryStorageMassOps;
-use crate::plumbing::QueryStorageOps;
-use crate::revision::Revision;
-use crate::runtime::StampedValue;
-use crate::Database;
-use crate::Query;
-use crate::Runtime;
-use crate::{DatabaseKeyIndex, QueryDb};
-use indexmap::map::Entry;
-use parking_lot::RwLock;
-use std::iter;
-use tracing::trace;
-
-/// Input queries store the result plus a list of the other queries
-/// that they invoked. This means we can avoid recomputing them when
-/// none of those inputs have changed.
-pub struct InputStorage<Q>
-where
-    Q: Query,
-{
-    group_index: u16,
-    slots: RwLock<FxIndexMap<Q::Key, Slot<Q::Value>>>,
-}
-
-struct Slot<V> {
-    key_index: u32,
-    stamped_value: RwLock<StampedValue<V>>,
-}
-
-impl<Q> std::panic::RefUnwindSafe for InputStorage<Q>
-where
-    Q: Query,
-    Q::Key: std::panic::RefUnwindSafe,
-    Q::Value: std::panic::RefUnwindSafe,
-{
-}
-
-impl<Q> QueryStorageOps<Q> for InputStorage<Q>
-where
-    Q: Query,
-{
-    const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = CycleRecoveryStrategy::Panic;
-
-    fn new(group_index: u16) -> Self {
-        InputStorage { group_index, slots: Default::default() }
-    }
-
-    fn fmt_index(
-        &self,
-        _db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        fmt: &mut std::fmt::Formatter<'_>,
-    ) -> std::fmt::Result {
-        let slot_map = self.slots.read();
-        let key = slot_map.get_index(index as usize).unwrap().0;
-        write!(fmt, "{}({:?})", Q::QUERY_NAME, key)
-    }
-
-    fn maybe_changed_after(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        revision: Revision,
-    ) -> bool {
-        debug_assert!(revision < db.salsa_runtime().current_revision());
-        let slots = &self.slots.read();
-        let Some((_, slot)) = slots.get_index(index as usize) else {
-            return true;
-        };
-
-        trace!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,);
-
-        let changed_at = slot.stamped_value.read().changed_at;
-
-        trace!("maybe_changed_after: changed_at = {:?}", changed_at);
-
-        changed_at > revision
-    }
-
-    fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value {
-        db.unwind_if_cancelled();
-
-        let slots = &self.slots.read();
-        let slot = slots
-            .get(key)
-            .unwrap_or_else(|| panic!("no value set for {:?}({:?})", Q::default(), key));
-
-        let StampedValue { value, durability, changed_at } = slot.stamped_value.read().clone();
-
-        db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
-            DatabaseKeyIndex {
-                group_index: self.group_index,
-                query_index: Q::QUERY_INDEX,
-                key_index: slot.key_index,
-            },
-            durability,
-            changed_at,
-        );
-
-        value
-    }
-
-    fn durability(&self, _db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Durability {
-        match self.slots.read().get(key) {
-            Some(slot) => slot.stamped_value.read().durability,
-            None => panic!("no value set for {:?}({:?})", Q::default(), key),
-        }
-    }
-
-    fn entries<C>(&self, _db: &<Q as QueryDb<'_>>::DynDb) -> C
-    where
-        C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>,
-    {
-        let slots = self.slots.read();
-        slots
-            .iter()
-            .map(|(key, slot)| {
-                TableEntry::new(key.clone(), Some(slot.stamped_value.read().value.clone()))
-            })
-            .collect()
-    }
-}
-
-impl<Q> QueryStorageMassOps for InputStorage<Q>
-where
-    Q: Query,
-{
-    fn purge(&self) {
-        *self.slots.write() = Default::default();
-    }
-}
-
-impl<Q> InputQueryStorageOps<Q> for InputStorage<Q>
-where
-    Q: Query,
-{
-    fn set(&self, runtime: &mut Runtime, key: &Q::Key, value: Q::Value, durability: Durability) {
-        tracing::trace!("{:?}({:?}) = {:?} ({:?})", Q::default(), key, value, durability);
-
-        // The value is changing, so we need a new revision (*). We also
-        // need to update the 'last changed' revision by invoking
-        // `guard.mark_durability_as_changed`.
-        //
-        // CAREFUL: This will block until the global revision lock can
-        // be acquired. If there are still queries executing, they may
-        // need to read from this input. Therefore, we wait to acquire
-        // the lock on `map` until we also hold the global query write
-        // lock.
-        //
-        // (*) Technically, since you can't presently access an input
-        // for a non-existent key, and you can't enumerate the set of
-        // keys, we only need a new revision if the key used to
-        // exist. But we may add such methods in the future and this
-        // case doesn't generally seem worth optimizing for.
-        runtime.with_incremented_revision(|next_revision| {
-            let mut slots = self.slots.write();
-
-            // Do this *after* we acquire the lock, so that we are not
-            // racing with somebody else to modify this same cell.
-            // (Otherwise, someone else might write a *newer* revision
-            // into the same cell while we block on the lock.)
-            let stamped_value = StampedValue { value, durability, changed_at: next_revision };
-
-            match slots.entry(key.clone()) {
-                Entry::Occupied(entry) => {
-                    let mut slot_stamped_value = entry.get().stamped_value.write();
-                    let old_durability = slot_stamped_value.durability;
-                    *slot_stamped_value = stamped_value;
-                    Some(old_durability)
-                }
-
-                Entry::Vacant(entry) => {
-                    let key_index = entry.index() as u32;
-                    entry.insert(Slot { key_index, stamped_value: RwLock::new(stamped_value) });
-                    None
-                }
-            }
-        });
-    }
-}
-
-/// Same as `InputStorage`, but optimized for queries that take no inputs.
-pub struct UnitInputStorage<Q>
-where
-    Q: Query<Key = ()>,
-{
-    slot: UnitSlot<Q::Value>,
-}
-
-struct UnitSlot<V> {
-    database_key_index: DatabaseKeyIndex,
-    stamped_value: RwLock<Option<StampedValue<V>>>,
-}
-
-impl<Q> std::panic::RefUnwindSafe for UnitInputStorage<Q>
-where
-    Q: Query<Key = ()>,
-    Q::Key: std::panic::RefUnwindSafe,
-    Q::Value: std::panic::RefUnwindSafe,
-{
-}
-
-impl<Q> QueryStorageOps<Q> for UnitInputStorage<Q>
-where
-    Q: Query<Key = ()>,
-{
-    const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = CycleRecoveryStrategy::Panic;
-
-    fn new(group_index: u16) -> Self {
-        let database_key_index =
-            DatabaseKeyIndex { group_index, query_index: Q::QUERY_INDEX, key_index: 0 };
-        UnitInputStorage { slot: UnitSlot { database_key_index, stamped_value: RwLock::new(None) } }
-    }
-
-    fn fmt_index(
-        &self,
-        _db: &<Q as QueryDb<'_>>::DynDb,
-        _index: u32,
-        fmt: &mut std::fmt::Formatter<'_>,
-    ) -> std::fmt::Result {
-        write!(fmt, "{}", Q::QUERY_NAME)
-    }
-
-    fn maybe_changed_after(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        _index: u32,
-        revision: Revision,
-    ) -> bool {
-        debug_assert!(revision < db.salsa_runtime().current_revision());
-
-        trace!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,);
-
-        let Some(value) = &*self.slot.stamped_value.read() else {
-            return true;
-        };
-        let changed_at = value.changed_at;
-
-        trace!("maybe_changed_after: changed_at = {:?}", changed_at);
-
-        changed_at > revision
-    }
-
-    fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, &(): &Q::Key) -> Q::Value {
-        db.unwind_if_cancelled();
-
-        let StampedValue { value, durability, changed_at } = self
-            .slot
-            .stamped_value
-            .read()
-            .clone()
-            .unwrap_or_else(|| panic!("no value set for {:?}", Q::default()));
-
-        db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
-            self.slot.database_key_index,
-            durability,
-            changed_at,
-        );
-
-        value
-    }
-
-    fn durability(&self, _db: &<Q as QueryDb<'_>>::DynDb, &(): &Q::Key) -> Durability {
-        match &*self.slot.stamped_value.read() {
-            Some(stamped_value) => stamped_value.durability,
-            None => panic!("no value set for {:?}", Q::default(),),
-        }
-    }
-
-    fn entries<C>(&self, _db: &<Q as QueryDb<'_>>::DynDb) -> C
-    where
-        C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>,
-    {
-        iter::once(TableEntry::new(
-            (),
-            self.slot.stamped_value.read().as_ref().map(|it| it.value.clone()),
-        ))
-        .collect()
-    }
-}
-
-impl<Q> QueryStorageMassOps for UnitInputStorage<Q>
-where
-    Q: Query<Key = ()>,
-{
-    fn purge(&self) {
-        *self.slot.stamped_value.write() = Default::default();
-    }
-}
-
-impl<Q> InputQueryStorageOps<Q> for UnitInputStorage<Q>
-where
-    Q: Query<Key = ()>,
-{
-    fn set(&self, runtime: &mut Runtime, (): &Q::Key, value: Q::Value, durability: Durability) {
-        tracing::trace!("{:?} = {:?} ({:?})", Q::default(), value, durability);
-
-        // The value is changing, so we need a new revision (*). We also
-        // need to update the 'last changed' revision by invoking
-        // `guard.mark_durability_as_changed`.
-        //
-        // CAREFUL: This will block until the global revision lock can
-        // be acquired. If there are still queries executing, they may
-        // need to read from this input. Therefore, we wait to acquire
-        // the lock on `map` until we also hold the global query write
-        // lock.
-        //
-        // (*) Technically, since you can't presently access an input
-        // for a non-existent key, and you can't enumerate the set of
-        // keys, we only need a new revision if the key used to
-        // exist. But we may add such methods in the future and this
-        // case doesn't generally seem worth optimizing for.
-        runtime.with_incremented_revision(|next_revision| {
-            let mut stamped_value_slot = self.slot.stamped_value.write();
-
-            // Do this *after* we acquire the lock, so that we are not
-            // racing with somebody else to modify this same cell.
-            // (Otherwise, someone else might write a *newer* revision
-            // into the same cell while we block on the lock.)
-            let stamped_value = StampedValue { value, durability, changed_at: next_revision };
-
-            match &mut *stamped_value_slot {
-                Some(slot_stamped_value) => {
-                    let old_durability = slot_stamped_value.durability;
-                    *slot_stamped_value = stamped_value;
-                    Some(old_durability)
-                }
-
-                stamped_value_slot @ None => {
-                    *stamped_value_slot = Some(stamped_value);
-                    None
-                }
-            }
-        });
-    }
-}
-
-/// Check that `Slot<Q, MP>: Send + Sync` as long as
-/// `DB::DatabaseData: Send + Sync`, which in turn implies that
-/// `Q::Key: Send + Sync`, `Q::Value: Send + Sync`.
-#[allow(dead_code)]
-fn check_send_sync<Q>()
-where
-    Q: Query,
-    Q::Key: Send + Sync,
-    Q::Value: Send + Sync,
-{
-    fn is_send_sync<T: Send + Sync>() {}
-    is_send_sync::<Slot<Q::Value>>();
-    is_send_sync::<UnitSlot<Q::Value>>();
-}
-
-/// Check that `Slot<Q, MP>: 'static` as long as
-/// `DB::DatabaseData: 'static`, which in turn implies that
-/// `Q::Key: 'static`, `Q::Value: 'static`.
-#[allow(dead_code)]
-fn check_static<Q>()
-where
-    Q: Query + 'static,
-    Q::Key: 'static,
-    Q::Value: 'static,
-{
-    fn is_static<T: 'static>() {}
-    is_static::<Slot<Q::Value>>();
-    is_static::<UnitSlot<Q::Value>>();
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/intern_id.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/intern_id.rs
deleted file mode 100644
index 35b495998e16a..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/intern_id.rs
+++ /dev/null
@@ -1,131 +0,0 @@
-use std::fmt;
-use std::num::NonZeroU32;
-
-/// The "raw-id" is used for interned keys in salsa -- it is basically
-/// a newtype'd u32. Typically, it is wrapped in a type of your own
-/// devising. For more information about interned keys, see [the
-/// interned key RFC][rfc].
-///
-/// # Creating a `InternId`
-//
-/// InternId values can be constructed using the `From` impls,
-/// which are implemented for `u32` and `usize`:
-///
-/// ```
-/// # use ra_salsa::InternId;
-/// let intern_id1 = InternId::from(22_u32);
-/// let intern_id2 = InternId::from(22_usize);
-/// assert_eq!(intern_id1, intern_id2);
-/// ```
-///
-/// # Converting to a u32 or usize
-///
-/// Normally, there should be no need to access the underlying integer
-/// in a `InternId`. But if you do need to do so, you can convert to a
-/// `usize` using the `as_u32` or `as_usize` methods or the `From` impls.
-///
-/// ```
-/// # use ra_salsa::InternId;;
-/// let intern_id = InternId::from(22_u32);
-/// let value = u32::from(intern_id);
-/// assert_eq!(value, 22);
-/// ```
-///
-/// ## Illegal values
-///
-/// Be warned, however, that `InternId` values cannot be created from
-/// *arbitrary* values -- in particular large values greater than
-/// `InternId::MAX` will panic. Those large values are reserved so that
-/// the Rust compiler can use them as sentinel values, which means
-/// that (for example) `Option<InternId>` is represented in a single
-/// word.
-///
-/// ```should_panic
-/// # use ra_salsa::InternId;;
-/// InternId::from(InternId::MAX);
-/// ```
-///
-/// [rfc]: https://github.com/salsa-rs/salsa-rfcs/pull/2
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct InternId {
-    value: NonZeroU32,
-}
-
-impl InternId {
-    /// The maximum allowed `InternId`. This value can grow between
-    /// releases without affecting semver.
-    pub const MAX: u32 = 0xFFFF_FF00;
-
-    /// Creates a new InternId.
-    ///
-    /// # Safety
-    ///
-    /// `value` must be less than `MAX`
-    pub const unsafe fn new_unchecked(value: u32) -> Self {
-        debug_assert!(value < InternId::MAX);
-        let value = unsafe { NonZeroU32::new_unchecked(value + 1) };
-        InternId { value }
-    }
-
-    /// Convert this raw-id into a u32 value.
-    ///
-    /// ```
-    /// # use ra_salsa::InternId;
-    /// let intern_id = InternId::from(22_u32);
-    /// let value = intern_id.as_usize();
-    /// assert_eq!(value, 22);
-    /// ```
-    pub fn as_u32(self) -> u32 {
-        self.value.get() - 1
-    }
-
-    /// Convert this raw-id into a usize value.
-    ///
-    /// ```
-    /// # use ra_salsa::InternId;
-    /// let intern_id = InternId::from(22_u32);
-    /// let value = intern_id.as_usize();
-    /// assert_eq!(value, 22);
-    /// ```
-    pub fn as_usize(self) -> usize {
-        self.as_u32() as usize
-    }
-}
-
-impl From<InternId> for u32 {
-    fn from(raw: InternId) -> u32 {
-        raw.as_u32()
-    }
-}
-
-impl From<InternId> for usize {
-    fn from(raw: InternId) -> usize {
-        raw.as_usize()
-    }
-}
-
-impl From<u32> for InternId {
-    fn from(id: u32) -> InternId {
-        assert!(id < InternId::MAX);
-        unsafe { InternId::new_unchecked(id) }
-    }
-}
-
-impl From<usize> for InternId {
-    fn from(id: usize) -> InternId {
-        assert!(id < (InternId::MAX as usize));
-        unsafe { InternId::new_unchecked(id as u32) }
-    }
-}
-
-impl fmt::Debug for InternId {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        self.as_usize().fmt(f)
-    }
-}
-
-impl fmt::Display for InternId {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        self.as_usize().fmt(f)
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/interned.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/interned.rs
deleted file mode 100644
index 42c398d697de6..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/interned.rs
+++ /dev/null
@@ -1,510 +0,0 @@
-use crate::debug::TableEntry;
-use crate::durability::Durability;
-use crate::intern_id::InternId;
-use crate::plumbing::CycleRecoveryStrategy;
-use crate::plumbing::HasQueryGroup;
-use crate::plumbing::QueryStorageMassOps;
-use crate::plumbing::QueryStorageOps;
-use crate::revision::Revision;
-use crate::Query;
-use crate::QueryTable;
-use crate::{Database, DatabaseKeyIndex, QueryDb};
-use parking_lot::RwLock;
-use rustc_hash::FxHashMap;
-use std::collections::hash_map::Entry;
-use std::fmt::Debug;
-use std::hash::Hash;
-use triomphe::Arc;
-
-const INTERN_DURABILITY: Durability = Durability::HIGH;
-
-/// Handles storage where the value is 'derived' by executing a
-/// function (in contrast to "inputs").
-pub struct InternedStorage<Q>
-where
-    Q: Query,
-    Q::Key: InternValue,
-    Q::Value: InternKey,
-{
-    group_index: u16,
-    tables: RwLock<InternTables<MappedKey<Q>, Q::Key>>,
-}
-
-/// Storage for the looking up interned things.
-pub struct LookupInternedStorage<Q, IQ>
-where
-    Q: Query,
-    Q::Key: InternKey,
-    Q::Value: InternValue,
-{
-    phantom: std::marker::PhantomData<(Q::Key, IQ)>,
-}
-
-struct InternTables<K, V> {
-    /// Map from the key to the corresponding intern-index.
-    map: FxHashMap<K, InternId>,
-
-    /// For each valid intern-index, stores the interned value.
-    values: Vec<Arc<Slot<V>>>,
-}
-
-/// Trait implemented for the "key" that results from a
-/// `#[salsa::intern]` query.  This is basically meant to be a
-/// "newtype"'d `u32`.
-pub trait InternKey {
-    /// Create an instance of the intern-key from a `u32` value.
-    fn from_intern_id(v: InternId) -> Self;
-
-    /// Extract the `u32` with which the intern-key was created.
-    fn as_intern_id(&self) -> InternId;
-}
-
-impl InternKey for InternId {
-    fn from_intern_id(v: InternId) -> InternId {
-        v
-    }
-
-    fn as_intern_id(&self) -> InternId {
-        *self
-    }
-}
-
-/// Trait implemented for the "value" that is being interned.
-pub trait InternValue {
-    /// They key used to intern this value by.
-    type Key: Eq + Hash + Debug + Clone;
-    /// Maps the value to a key that will be used to intern it.
-    fn into_key(&self) -> Self::Key;
-    /// Calls the given function with the key that was used to intern this value.
-    ///
-    /// This is mainly used to prevent frequent cloning of the key when doing a lookup.
-    #[inline]
-    fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
-        f(&self.into_key())
-    }
-}
-
-impl<A: InternValue + Eq + Hash + Debug + Clone, B: InternValue + Eq + Hash + Debug + Clone>
-    InternValue for (A, B)
-{
-    type Key = Self;
-    #[inline]
-    fn into_key(&self) -> Self::Key {
-        self.clone()
-    }
-    #[inline]
-    fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
-        f(self)
-    }
-}
-
-pub trait InternValueTrivial
-where
-    Self: Eq + Hash + Debug + Clone,
-{
-}
-
-/// Implement [`InternValue`] trivially, that is without actually mapping at all.
-impl<V: InternValueTrivial> InternValue for V {
-    type Key = Self;
-    #[inline]
-    fn into_key(&self) -> Self::Key {
-        self.clone()
-    }
-    #[inline]
-    fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
-        f(self)
-    }
-}
-
-impl InternValueTrivial for String {}
-
-#[derive(Debug)]
-struct Slot<V> {
-    /// key index for this slot.
-    key_index: u32,
-
-    /// Value that was interned.
-    value: V,
-
-    /// When was this intern'd?
-    ///
-    /// (This informs the "changed-at" result)
-    interned_at: Revision,
-}
-
-impl<Q> std::panic::RefUnwindSafe for InternedStorage<Q>
-where
-    Q: Query,
-    Q::Key: InternValue,
-    Q::Key: std::panic::RefUnwindSafe,
-    Q::Value: InternKey,
-    Q::Value: std::panic::RefUnwindSafe,
-{
-}
-
-impl<K: Debug + Hash + Eq, V> InternTables<K, V> {
-    /// Returns the slot for the given key.
-    fn slot_for_key(&self, key: &K) -> Option<(Arc<Slot<V>>, InternId)> {
-        let &index = self.map.get(key)?;
-        Some((self.slot_for_index(index), index))
-    }
-
-    /// Returns the slot at the given index.
-    fn slot_for_index(&self, index: InternId) -> Arc<Slot<V>> {
-        let slot = &self.values[index.as_usize()];
-        slot.clone()
-    }
-}
-
-impl<K, V> Default for InternTables<K, V>
-where
-    K: Eq + Hash,
-{
-    fn default() -> Self {
-        Self { map: Default::default(), values: Default::default() }
-    }
-}
-
-type MappedKey<Q> = <<Q as Query>::Key as InternValue>::Key;
-
-impl<Q> InternedStorage<Q>
-where
-    Q: Query,
-    Q::Key: InternValue,
-    Q::Value: InternKey,
-{
-    /// Creates a new slot.
-    fn intern_index(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        mapped_key: MappedKey<Q>,
-        insert: impl FnOnce(Q::Value) -> Q::Key,
-    ) -> (Arc<Slot<Q::Key>>, InternId) {
-        let revision_now = db.salsa_runtime().current_revision();
-
-        let mut tables = self.tables.write();
-        let tables = &mut *tables;
-        let entry = match tables.map.entry(mapped_key) {
-            Entry::Vacant(entry) => entry,
-            Entry::Occupied(entry) => {
-                // Somebody inserted this key while we were waiting
-                // for the write lock. In this case, we don't need to
-                // update the `accessed_at` field because they should
-                // have already done so!
-                let index = *entry.get();
-                let slot = &tables.values[index.as_usize()];
-                return (slot.clone(), index);
-            }
-        };
-
-        let create_slot = |index: InternId| {
-            Arc::new(Slot {
-                key_index: index.as_u32(),
-                value: insert(Q::Value::from_intern_id(index)),
-                interned_at: revision_now,
-            })
-        };
-
-        let index = InternId::from(tables.values.len());
-        let slot = create_slot(index);
-        tables.values.push(slot.clone());
-        entry.insert(index);
-
-        (slot, index)
-    }
-
-    fn intern_check(&self, key: &MappedKey<Q>) -> Option<(Arc<Slot<Q::Key>>, InternId)> {
-        self.tables.read().slot_for_key(key)
-    }
-
-    /// Given an index, lookup and clone its value, updating the
-    /// `accessed_at` time if necessary.
-    fn lookup_value(&self, index: InternId) -> Arc<Slot<Q::Key>> {
-        self.tables.read().slot_for_index(index)
-    }
-
-    fn fetch_or_insert(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        key: MappedKey<Q>,
-        insert: impl FnOnce(Q::Value) -> Q::Key,
-    ) -> Q::Value {
-        db.unwind_if_cancelled();
-        let (slot, index) = match self.intern_check(&key) {
-            Some(i) => i,
-            None => self.intern_index(db, key, insert),
-        };
-        let changed_at = slot.interned_at;
-        db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
-            DatabaseKeyIndex {
-                group_index: self.group_index,
-                query_index: Q::QUERY_INDEX,
-                key_index: slot.key_index,
-            },
-            INTERN_DURABILITY,
-            changed_at,
-        );
-        <Q::Value>::from_intern_id(index)
-    }
-}
-
-impl<Q> QueryStorageOps<Q> for InternedStorage<Q>
-where
-    Q: Query,
-    Q::Key: InternValue,
-    Q::Value: InternKey,
-{
-    const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = CycleRecoveryStrategy::Panic;
-
-    fn new(group_index: u16) -> Self {
-        InternedStorage { group_index, tables: RwLock::new(InternTables::default()) }
-    }
-
-    fn fmt_index(
-        &self,
-        _db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        fmt: &mut std::fmt::Formatter<'_>,
-    ) -> std::fmt::Result {
-        let intern_id = InternId::from(index);
-        let slot = self.lookup_value(intern_id);
-        write!(fmt, "{}({:?})", Q::QUERY_NAME, slot.value)
-    }
-
-    fn maybe_changed_after(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        input: u32,
-        revision: Revision,
-    ) -> bool {
-        debug_assert!(revision < db.salsa_runtime().current_revision());
-        let intern_id = InternId::from(input);
-        let slot = self.lookup_value(intern_id);
-        slot.maybe_changed_after(revision)
-    }
-
-    fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value {
-        db.unwind_if_cancelled();
-
-        let (slot, index) = match key.with_key(|key| self.intern_check(key)) {
-            Some(i) => i,
-            None => self.intern_index(db, key.into_key(), |_| key.clone()),
-        };
-        let changed_at = slot.interned_at;
-        db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
-            DatabaseKeyIndex {
-                group_index: self.group_index,
-                query_index: Q::QUERY_INDEX,
-                key_index: slot.key_index,
-            },
-            INTERN_DURABILITY,
-            changed_at,
-        );
-        <Q::Value>::from_intern_id(index)
-    }
-
-    fn durability(&self, _db: &<Q as QueryDb<'_>>::DynDb, _key: &Q::Key) -> Durability {
-        INTERN_DURABILITY
-    }
-
-    fn entries<C>(&self, _db: &<Q as QueryDb<'_>>::DynDb) -> C
-    where
-        C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>,
-    {
-        let tables = self.tables.read();
-        tables
-            .map
-            .values()
-            .map(|index| {
-                TableEntry::new(
-                    tables.values[index.as_usize()].value.clone(),
-                    Some(<Q::Value>::from_intern_id(*index)),
-                )
-            })
-            .collect()
-    }
-}
-
-impl<Q> QueryStorageMassOps for InternedStorage<Q>
-where
-    Q: Query,
-    Q::Key: InternValue,
-    Q::Value: InternKey,
-{
-    fn purge(&self) {
-        *self.tables.write() = Default::default();
-    }
-}
-
-// Workaround for
-// ```
-// IQ: for<'d> QueryDb<
-//     'd,
-//     DynDb = <Q as QueryDb<'d>>::DynDb,
-//     Group = <Q as QueryDb<'d>>::Group,
-//     GroupStorage = <Q as QueryDb<'d>>::GroupStorage,
-// >,
-// ```
-// not working to make rustc know DynDb, Group and GroupStorage being the same in `Q` and `IQ`
-#[doc(hidden)]
-pub trait EqualDynDb<'d, IQ>: QueryDb<'d>
-where
-    IQ: QueryDb<'d>,
-{
-    fn convert_db(d: &Self::DynDb) -> &IQ::DynDb;
-    fn convert_group_storage(d: &Self::GroupStorage) -> &IQ::GroupStorage;
-}
-
-impl<'d, IQ, Q> EqualDynDb<'d, IQ> for Q
-where
-    Q: QueryDb<'d, DynDb = IQ::DynDb, Group = IQ::Group, GroupStorage = IQ::GroupStorage>,
-    Q::DynDb: HasQueryGroup<Q::Group>,
-    IQ: QueryDb<'d>,
-{
-    fn convert_db(d: &Self::DynDb) -> &IQ::DynDb {
-        d
-    }
-    fn convert_group_storage(d: &Self::GroupStorage) -> &IQ::GroupStorage {
-        d
-    }
-}
-
-impl<Q, IQ> QueryStorageOps<Q> for LookupInternedStorage<Q, IQ>
-where
-    Q: Query,
-    Q::Key: InternKey,
-    Q::Value: InternValue,
-    IQ: Query<Key = Q::Value, Value = Q::Key, Storage = InternedStorage<IQ>>,
-    for<'d> Q: EqualDynDb<'d, IQ>,
-{
-    const CYCLE_STRATEGY: CycleRecoveryStrategy = CycleRecoveryStrategy::Panic;
-
-    fn new(_group_index: u16) -> Self {
-        LookupInternedStorage { phantom: std::marker::PhantomData }
-    }
-
-    fn fmt_index(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        fmt: &mut std::fmt::Formatter<'_>,
-    ) -> std::fmt::Result {
-        let group_storage =
-            <<Q as QueryDb<'_>>::DynDb as HasQueryGroup<Q::Group>>::group_storage(db);
-        let interned_storage = IQ::query_storage(Q::convert_group_storage(group_storage));
-        interned_storage.fmt_index(Q::convert_db(db), index, fmt)
-    }
-
-    fn maybe_changed_after(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        input: u32,
-        revision: Revision,
-    ) -> bool {
-        let group_storage =
-            <<Q as QueryDb<'_>>::DynDb as HasQueryGroup<Q::Group>>::group_storage(db);
-        let interned_storage = IQ::query_storage(Q::convert_group_storage(group_storage));
-        interned_storage.maybe_changed_after(Q::convert_db(db), input, revision)
-    }
-
-    fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value {
-        let index = key.as_intern_id();
-        let group_storage =
-            <<Q as QueryDb<'_>>::DynDb as HasQueryGroup<Q::Group>>::group_storage(db);
-        let interned_storage = IQ::query_storage(Q::convert_group_storage(group_storage));
-        let slot = interned_storage.lookup_value(index);
-        let value = slot.value.clone();
-        let interned_at = slot.interned_at;
-        db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
-            DatabaseKeyIndex {
-                group_index: interned_storage.group_index,
-                query_index: Q::QUERY_INDEX,
-                key_index: slot.key_index,
-            },
-            INTERN_DURABILITY,
-            interned_at,
-        );
-        value
-    }
-
-    fn durability(&self, _db: &<Q as QueryDb<'_>>::DynDb, _key: &Q::Key) -> Durability {
-        INTERN_DURABILITY
-    }
-
-    fn entries<C>(&self, db: &<Q as QueryDb<'_>>::DynDb) -> C
-    where
-        C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>,
-    {
-        let group_storage =
-            <<Q as QueryDb<'_>>::DynDb as HasQueryGroup<Q::Group>>::group_storage(db);
-        let interned_storage = IQ::query_storage(Q::convert_group_storage(group_storage));
-        let tables = interned_storage.tables.read();
-        tables
-            .map
-            .values()
-            .map(|index| {
-                TableEntry::new(
-                    <Q::Key>::from_intern_id(*index),
-                    Some(tables.values[index.as_usize()].value.clone()),
-                )
-            })
-            .collect()
-    }
-}
-
-impl<Q, IQ> QueryStorageMassOps for LookupInternedStorage<Q, IQ>
-where
-    Q: Query,
-    Q::Key: InternKey,
-    Q::Value: InternValue,
-    IQ: Query<Key = Q::Value, Value = Q::Key>,
-{
-    fn purge(&self) {}
-}
-
-impl<K> Slot<K> {
-    fn maybe_changed_after(&self, revision: Revision) -> bool {
-        self.interned_at > revision
-    }
-}
-
-/// Check that `Slot<Q, MP>: Send + Sync` as long as
-/// `DB::DatabaseData: Send + Sync`, which in turn implies that
-/// `Q::Key: Send + Sync`, `Q::Value: Send + Sync`.
-#[allow(dead_code)]
-fn check_send_sync<K>()
-where
-    K: Send + Sync,
-{
-    fn is_send_sync<T: Send + Sync>() {}
-    is_send_sync::<Slot<K>>();
-}
-
-/// Check that `Slot<Q, MP>: 'static` as long as
-/// `DB::DatabaseData: 'static`, which in turn implies that
-/// `Q::Key: 'static`, `Q::Value: 'static`.
-#[allow(dead_code)]
-fn check_static<K>()
-where
-    K: 'static,
-{
-    fn is_static<T: 'static>() {}
-    is_static::<Slot<K>>();
-}
-
-impl<Q> QueryTable<'_, Q>
-where
-    Q: Query<Storage = InternedStorage<Q>>,
-    Q::Key: InternValue,
-    Q::Value: InternKey,
-{
-    /// Fetches the intern id for the given key or inserts it if it does not exist.
-    pub fn get_or_insert(
-        &self,
-        key: MappedKey<Q>,
-        insert: impl FnOnce(Q::Value) -> Q::Key,
-    ) -> Q::Value {
-        self.storage.fetch_or_insert(self.db, key, insert)
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/lib.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/lib.rs
deleted file mode 100644
index 843b6d31f0c33..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/lib.rs
+++ /dev/null
@@ -1,750 +0,0 @@
-#![allow(clippy::type_complexity)]
-#![allow(clippy::question_mark)]
-#![allow(missing_docs)]
-#![warn(rust_2018_idioms)]
-
-//! The salsa crate is a crate for incremental recomputation.  It
-//! permits you to define a "database" of queries with both inputs and
-//! values derived from those inputs; as you set the inputs, you can
-//! re-execute the derived queries and it will try to re-use results
-//! from previous invocations as appropriate.
-
-mod derived;
-mod derived_lru;
-mod durability;
-mod hash;
-mod input;
-mod intern_id;
-mod interned;
-mod lru;
-mod revision;
-mod runtime;
-mod storage;
-
-pub mod debug;
-/// Items in this module are public for implementation reasons,
-/// and are exempt from the SemVer guarantees.
-#[doc(hidden)]
-pub mod plumbing;
-
-use crate::plumbing::CycleRecoveryStrategy;
-use crate::plumbing::DerivedQueryStorageOps;
-use crate::plumbing::InputQueryStorageOps;
-use crate::plumbing::LruQueryStorageOps;
-use crate::plumbing::QueryStorageMassOps;
-use crate::plumbing::QueryStorageOps;
-pub use crate::revision::Revision;
-use std::fmt::{self, Debug};
-use std::hash::Hash;
-use std::panic::AssertUnwindSafe;
-use std::panic::{self, UnwindSafe};
-
-pub use crate::durability::Durability;
-pub use crate::intern_id::InternId;
-pub use crate::interned::{InternKey, InternValue, InternValueTrivial};
-pub use crate::runtime::Runtime;
-pub use crate::runtime::RuntimeId;
-pub use crate::storage::Storage;
-
-/// The base trait which your "query context" must implement. Gives
-/// access to the salsa runtime, which you must embed into your query
-/// context (along with whatever other state you may require).
-pub trait Database: plumbing::DatabaseOps {
-    /// This function is invoked at key points in the salsa
-    /// runtime. It permits the database to be customized and to
-    /// inject logging or other custom behavior.
-    fn salsa_event(&self, event_fn: Event) {
-        _ = event_fn;
-    }
-
-    /// Starts unwinding the stack if the current revision is cancelled.
-    ///
-    /// This method can be called by query implementations that perform
-    /// potentially expensive computations, in order to speed up propagation of
-    /// cancellation.
-    ///
-    /// Cancellation will automatically be triggered by salsa on any query
-    /// invocation.
-    ///
-    /// This method should not be overridden by `Database` implementors. A
-    /// `salsa_event` is emitted when this method is called, so that should be
-    /// used instead.
-    #[inline]
-    fn unwind_if_cancelled(&self) {
-        let runtime = self.salsa_runtime();
-        self.salsa_event(Event {
-            runtime_id: runtime.id(),
-            kind: EventKind::WillCheckCancellation,
-        });
-
-        let current_revision = runtime.current_revision();
-        let pending_revision = runtime.pending_revision();
-        tracing::trace!(
-            "unwind_if_cancelled: current_revision={:?}, pending_revision={:?}",
-            current_revision,
-            pending_revision
-        );
-        if pending_revision > current_revision {
-            runtime.unwind_cancelled();
-        }
-    }
-
-    /// Gives access to the underlying salsa runtime.
-    ///
-    /// This method should not be overridden by `Database` implementors.
-    fn salsa_runtime(&self) -> &Runtime {
-        self.ops_salsa_runtime()
-    }
-
-    /// A "synthetic write" causes the system to act *as though* some
-    /// input of durability `durability` has changed. This is mostly
-    /// useful for profiling scenarios.
-    ///
-    /// **WARNING:** Just like an ordinary write, this method triggers
-    /// cancellation. If you invoke it while a snapshot exists, it
-    /// will block until that snapshot is dropped -- if that snapshot
-    /// is owned by the current thread, this could trigger deadlock.
-    fn synthetic_write(&mut self, durability: Durability) {
-        plumbing::DatabaseOps::synthetic_write(self, durability)
-    }
-}
-
-/// The `Event` struct identifies various notable things that can
-/// occur during salsa execution. Instances of this struct are given
-/// to `salsa_event`.
-pub struct Event {
-    /// The id of the snapshot that triggered the event.  Usually
-    /// 1-to-1 with a thread, as well.
-    pub runtime_id: RuntimeId,
-
-    /// What sort of event was it.
-    pub kind: EventKind,
-}
-
-impl Event {
-    /// Returns a type that gives a user-readable debug output.
-    /// Use like `println!("{:?}", index.debug(db))`.
-    pub fn debug<'me, D>(&'me self, db: &'me D) -> impl std::fmt::Debug + 'me
-    where
-        D: ?Sized + plumbing::DatabaseOps,
-    {
-        EventDebug { event: self, db }
-    }
-}
-
-impl fmt::Debug for Event {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt.debug_struct("Event")
-            .field("runtime_id", &self.runtime_id)
-            .field("kind", &self.kind)
-            .finish()
-    }
-}
-
-struct EventDebug<'me, D: ?Sized>
-where
-    D: plumbing::DatabaseOps,
-{
-    event: &'me Event,
-    db: &'me D,
-}
-
-impl<D: ?Sized> fmt::Debug for EventDebug<'_, D>
-where
-    D: plumbing::DatabaseOps,
-{
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt.debug_struct("Event")
-            .field("runtime_id", &self.event.runtime_id)
-            .field("kind", &self.event.kind.debug(self.db))
-            .finish()
-    }
-}
-
-/// An enum identifying the various kinds of events that can occur.
-pub enum EventKind {
-    /// Occurs when we found that all inputs to a memoized value are
-    /// up-to-date and hence the value can be re-used without
-    /// executing the closure.
-    ///
-    /// Executes before the "re-used" value is returned.
-    DidValidateMemoizedValue {
-        /// The database-key for the affected value. Implements `Debug`.
-        database_key: DatabaseKeyIndex,
-    },
-
-    /// Indicates that another thread (with id `other_runtime_id`) is processing the
-    /// given query (`database_key`), so we will block until they
-    /// finish.
-    ///
-    /// Executes after we have registered with the other thread but
-    /// before they have answered us.
-    ///
-    /// (NB: you can find the `id` of the current thread via the
-    /// `salsa_runtime`)
-    WillBlockOn {
-        /// The id of the runtime we will block on.
-        other_runtime_id: RuntimeId,
-
-        /// The database-key for the affected value. Implements `Debug`.
-        database_key: DatabaseKeyIndex,
-    },
-
-    /// Indicates that the function for this query will be executed.
-    /// This is either because it has never executed before or because
-    /// its inputs may be out of date.
-    WillExecute {
-        /// The database-key for the affected value. Implements `Debug`.
-        database_key: DatabaseKeyIndex,
-    },
-
-    /// Indicates that `unwind_if_cancelled` was called and salsa will check if
-    /// the current revision has been cancelled.
-    WillCheckCancellation,
-}
-
-impl EventKind {
-    /// Returns a type that gives a user-readable debug output.
-    /// Use like `println!("{:?}", index.debug(db))`.
-    pub fn debug<'me, D>(&'me self, db: &'me D) -> impl std::fmt::Debug + 'me
-    where
-        D: ?Sized + plumbing::DatabaseOps,
-    {
-        EventKindDebug { kind: self, db }
-    }
-}
-
-impl fmt::Debug for EventKind {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self {
-            EventKind::DidValidateMemoizedValue { database_key } => fmt
-                .debug_struct("DidValidateMemoizedValue")
-                .field("database_key", database_key)
-                .finish(),
-            EventKind::WillBlockOn { other_runtime_id, database_key } => fmt
-                .debug_struct("WillBlockOn")
-                .field("other_runtime_id", other_runtime_id)
-                .field("database_key", database_key)
-                .finish(),
-            EventKind::WillExecute { database_key } => {
-                fmt.debug_struct("WillExecute").field("database_key", database_key).finish()
-            }
-            EventKind::WillCheckCancellation => fmt.debug_struct("WillCheckCancellation").finish(),
-        }
-    }
-}
-
-struct EventKindDebug<'me, D: ?Sized>
-where
-    D: plumbing::DatabaseOps,
-{
-    kind: &'me EventKind,
-    db: &'me D,
-}
-
-impl<D: ?Sized> fmt::Debug for EventKindDebug<'_, D>
-where
-    D: plumbing::DatabaseOps,
-{
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self.kind {
-            EventKind::DidValidateMemoizedValue { database_key } => fmt
-                .debug_struct("DidValidateMemoizedValue")
-                .field("database_key", &database_key.debug(self.db))
-                .finish(),
-            EventKind::WillBlockOn { other_runtime_id, database_key } => fmt
-                .debug_struct("WillBlockOn")
-                .field("other_runtime_id", &other_runtime_id)
-                .field("database_key", &database_key.debug(self.db))
-                .finish(),
-            EventKind::WillExecute { database_key } => fmt
-                .debug_struct("WillExecute")
-                .field("database_key", &database_key.debug(self.db))
-                .finish(),
-            EventKind::WillCheckCancellation => fmt.debug_struct("WillCheckCancellation").finish(),
-        }
-    }
-}
-
-/// Indicates a database that also supports parallel query
-/// evaluation. All of Salsa's base query support is capable of
-/// parallel execution, but for it to work, your query key/value types
-/// must also be `Send`, as must any additional data in your database.
-pub trait ParallelDatabase: Database + Send {
-    /// Creates a second handle to the database that holds the
-    /// database fixed at a particular revision. So long as this
-    /// "frozen" handle exists, any attempt to [`set`] an input will
-    /// block.
-    ///
-    /// [`set`]: struct.QueryTable.html#method.set
-    ///
-    /// This is the method you are meant to use most of the time in a
-    /// parallel setting where modifications may arise asynchronously
-    /// (e.g., a language server). In this context, it is common to
-    /// wish to "fork off" a snapshot of the database performing some
-    /// series of queries in parallel and arranging the results. Using
-    /// this method for that purpose ensures that those queries will
-    /// see a consistent view of the database (it is also advisable
-    /// for those queries to use the [`Database::unwind_if_cancelled`]
-    /// method to check for cancellation).
-    ///
-    /// # Panics
-    ///
-    /// It is not permitted to create a snapshot from inside of a
-    /// query. Attempting to do so will panic.
-    ///
-    /// # Deadlock warning
-    ///
-    /// The intended pattern for snapshots is that, once created, they
-    /// are sent to another thread and used from there. As such, the
-    /// `snapshot` acquires a "read lock" on the database --
-    /// therefore, so long as the `snapshot` is not dropped, any
-    /// attempt to `set` a value in the database will block. If the
-    /// `snapshot` is owned by the same thread that is attempting to
-    /// `set`, this will cause a problem.
-    ///
-    /// # How to implement this
-    ///
-    /// Typically, this method will create a second copy of your
-    /// database type (`MyDatabaseType`, in the example below),
-    /// cloning over each of the fields from `self` into this new
-    /// copy. For the field that stores the salsa runtime, you should
-    /// use [the `Runtime::snapshot` method][rfm] to create a snapshot of the
-    /// runtime. Finally, package up the result using `Snapshot::new`,
-    /// which is a simple wrapper type that only gives `&self` access
-    /// to the database within (thus preventing the use of methods
-    /// that may mutate the inputs):
-    ///
-    /// [rfm]: struct.Runtime.html#method.snapshot
-    ///
-    /// ```rust,ignore
-    /// impl ParallelDatabase for MyDatabaseType {
-    ///     fn snapshot(&self) -> Snapshot<Self> {
-    ///         Snapshot::new(
-    ///             MyDatabaseType {
-    ///                 runtime: self.runtime.snapshot(self),
-    ///                 other_field: self.other_field.clone(),
-    ///             }
-    ///         )
-    ///     }
-    /// }
-    /// ```
-    fn snapshot(&self) -> Snapshot<Self>;
-}
-
-/// Simple wrapper struct that takes ownership of a database `DB` and
-/// only gives `&self` access to it. See [the `snapshot` method][fm]
-/// for more details.
-///
-/// [fm]: trait.ParallelDatabase.html#method.snapshot
-#[derive(Debug)]
-pub struct Snapshot<DB: ?Sized>
-where
-    DB: ParallelDatabase,
-{
-    db: DB,
-}
-
-impl<DB> Snapshot<DB>
-where
-    DB: ParallelDatabase,
-{
-    /// Creates a `Snapshot` that wraps the given database handle
-    /// `db`. From this point forward, only shared references to `db`
-    /// will be possible.
-    pub fn new(db: DB) -> Self {
-        Snapshot { db }
-    }
-}
-
-impl<DB> std::ops::Deref for Snapshot<DB>
-where
-    DB: ParallelDatabase,
-{
-    type Target = DB;
-
-    fn deref(&self) -> &DB {
-        &self.db
-    }
-}
-
-/// An integer that uniquely identifies a particular query instance within the
-/// database. Used to track dependencies between queries. Fully ordered and
-/// equatable but those orderings are arbitrary, and meant to be used only for
-/// inserting into maps and the like.
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
-pub struct DatabaseKeyIndex {
-    group_index: u16,
-    query_index: u16,
-    key_index: u32,
-}
-
-impl DatabaseKeyIndex {
-    /// Returns the index of the query group containing this key.
-    #[inline]
-    pub fn group_index(self) -> u16 {
-        self.group_index
-    }
-
-    /// Returns the index of the query within its query group.
-    #[inline]
-    pub fn query_index(self) -> u16 {
-        self.query_index
-    }
-
-    /// Returns the index of this particular query key within the query.
-    #[inline]
-    pub fn key_index(self) -> u32 {
-        self.key_index
-    }
-
-    /// Returns a type that gives a user-readable debug output.
-    /// Use like `println!("{:?}", index.debug(db))`.
-    pub fn debug<D>(self, db: &D) -> impl std::fmt::Debug + '_
-    where
-        D: ?Sized + plumbing::DatabaseOps,
-    {
-        DatabaseKeyIndexDebug { index: self, db }
-    }
-}
-
-/// Helper type for `DatabaseKeyIndex::debug`
-struct DatabaseKeyIndexDebug<'me, D: ?Sized>
-where
-    D: plumbing::DatabaseOps,
-{
-    index: DatabaseKeyIndex,
-    db: &'me D,
-}
-
-impl<D: ?Sized> std::fmt::Debug for DatabaseKeyIndexDebug<'_, D>
-where
-    D: plumbing::DatabaseOps,
-{
-    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        self.db.fmt_index(self.index, fmt)
-    }
-}
-
-/// Trait implements by all of the "special types" associated with
-/// each of your queries.
-///
-/// Base trait of `Query` that has a lifetime parameter to allow the `DynDb` to be non-'static.
-pub trait QueryDb<'d>: Sized {
-    /// Dyn version of the associated trait for this query group.
-    type DynDb: ?Sized + Database + HasQueryGroup<Self::Group> + 'd;
-
-    /// Associate query group struct.
-    type Group: plumbing::QueryGroup<GroupStorage = Self::GroupStorage>;
-
-    /// Generated struct that contains storage for all queries in a group.
-    type GroupStorage;
-}
-
-/// Trait implements by all of the "special types" associated with
-/// each of your queries.
-pub trait Query: Debug + Default + Sized + for<'d> QueryDb<'d> {
-    /// Type that you give as a parameter -- for queries with zero
-    /// or more than one input, this will be a tuple.
-    type Key: Clone + Debug + Hash + Eq;
-
-    /// What value does the query return?
-    type Value: Clone + Debug;
-
-    /// Internal struct storing the values for the query.
-    // type Storage: plumbing::QueryStorageOps<Self>;
-    type Storage;
-
-    /// A unique index identifying this query within the group.
-    const QUERY_INDEX: u16;
-
-    /// Name of the query method (e.g., `foo`)
-    const QUERY_NAME: &'static str;
-
-    /// Extract storage for this query from the storage for its group.
-    fn query_storage<'a>(
-        group_storage: &'a <Self as QueryDb<'_>>::GroupStorage,
-    ) -> &'a std::sync::Arc<Self::Storage>;
-
-    /// Extract storage for this query from the storage for its group.
-    fn query_storage_mut<'a>(
-        group_storage: &'a <Self as QueryDb<'_>>::GroupStorage,
-    ) -> &'a std::sync::Arc<Self::Storage>;
-}
-
-/// Return value from [the `query` method] on `Database`.
-/// Gives access to various less common operations on queries.
-///
-/// [the `query` method]: trait.Database.html#method.query
-pub struct QueryTable<'me, Q>
-where
-    Q: Query,
-{
-    db: &'me <Q as QueryDb<'me>>::DynDb,
-    storage: &'me Q::Storage,
-}
-
-impl<'me, Q> QueryTable<'me, Q>
-where
-    Q: Query,
-    Q::Storage: QueryStorageOps<Q>,
-{
-    /// Constructs a new `QueryTable`.
-    pub fn new(db: &'me <Q as QueryDb<'me>>::DynDb, storage: &'me Q::Storage) -> Self {
-        Self { db, storage }
-    }
-
-    /// Execute the query on a given input. Usually it's easier to
-    /// invoke the trait method directly. Note that for variadic
-    /// queries (those with no inputs, or those with more than one
-    /// input) the key will be a tuple.
-    pub fn get(&self, key: Q::Key) -> Q::Value {
-        self.storage.fetch(self.db, &key)
-    }
-
-    /// Completely clears the storage for this query.
-    ///
-    /// This method breaks internal invariants of salsa, so any further queries
-    /// might return nonsense results. It is useful only in very specific
-    /// circumstances -- for example, when one wants to observe which values
-    /// dropped together with the table
-    pub fn purge(&self)
-    where
-        Q::Storage: plumbing::QueryStorageMassOps,
-    {
-        self.storage.purge();
-    }
-
-    pub fn storage(&self) -> &<Q as Query>::Storage {
-        self.storage
-    }
-}
-
-/// Return value from [the `query_mut` method] on `Database`.
-/// Gives access to the `set` method, notably, that is used to
-/// set the value of an input query.
-///
-/// [the `query_mut` method]: trait.Database.html#method.query_mut
-pub struct QueryTableMut<'me, Q>
-where
-    Q: Query + 'me,
-{
-    runtime: &'me mut Runtime,
-    storage: &'me Q::Storage,
-}
-
-impl<'me, Q> QueryTableMut<'me, Q>
-where
-    Q: Query,
-{
-    /// Constructs a new `QueryTableMut`.
-    pub fn new(runtime: &'me mut Runtime, storage: &'me Q::Storage) -> Self {
-        Self { runtime, storage }
-    }
-
-    /// Assign a value to an "input query". Must be used outside of
-    /// an active query computation.
-    ///
-    /// If you are using `snapshot`, see the notes on blocking
-    /// and cancellation on [the `query_mut` method].
-    ///
-    /// [the `query_mut` method]: trait.Database.html#method.query_mut
-    pub fn set(&mut self, key: Q::Key, value: Q::Value)
-    where
-        Q::Storage: plumbing::InputQueryStorageOps<Q>,
-    {
-        self.set_with_durability(key, value, Durability::LOW);
-    }
-
-    /// Assign a value to an "input query", with the additional
-    /// promise that this value will **never change**. Must be used
-    /// outside of an active query computation.
-    ///
-    /// If you are using `snapshot`, see the notes on blocking
-    /// and cancellation on [the `query_mut` method].
-    ///
-    /// [the `query_mut` method]: trait.Database.html#method.query_mut
-    pub fn set_with_durability(&mut self, key: Q::Key, value: Q::Value, durability: Durability)
-    where
-        Q::Storage: plumbing::InputQueryStorageOps<Q>,
-    {
-        self.storage.set(self.runtime, &key, value, durability);
-    }
-
-    /// Sets the size of LRU cache of values for this query table.
-    ///
-    /// That is, at most `cap` values will be preset in the table at the same
-    /// time. This helps with keeping maximum memory usage under control, at the
-    /// cost of potential extra recalculations of evicted values.
-    ///
-    /// If `cap` is zero, all values are preserved, this is the default.
-    pub fn set_lru_capacity(&self, cap: u16)
-    where
-        Q::Storage: plumbing::LruQueryStorageOps,
-    {
-        self.storage.set_lru_capacity(cap);
-    }
-
-    /// Marks the computed value as outdated.
-    ///
-    /// This causes salsa to re-execute the query function on the next access to
-    /// the query, even if all dependencies are up to date.
-    ///
-    /// This is most commonly used as part of the [on-demand input
-    /// pattern](https://salsa-rs.github.io/salsa/common_patterns/on_demand_inputs.html).
-    pub fn invalidate(&mut self, key: &Q::Key)
-    where
-        Q::Storage: plumbing::DerivedQueryStorageOps<Q>,
-    {
-        self.storage.invalidate(self.runtime, key)
-    }
-}
-
-/// A panic payload indicating that execution of a salsa query was cancelled.
-///
-/// This can occur for a few reasons:
-/// *
-/// *
-/// *
-#[derive(Debug)]
-#[non_exhaustive]
-pub enum Cancelled {
-    /// The query was operating on revision R, but there is a pending write to move to revision R+1.
-    #[non_exhaustive]
-    PendingWrite,
-
-    /// The query was blocked on another thread, and that thread panicked.
-    #[non_exhaustive]
-    PropagatedPanic,
-}
-
-impl Cancelled {
-    fn throw(self) -> ! {
-        // We use resume and not panic here to avoid running the panic
-        // hook (that is, to avoid collecting and printing backtrace).
-        std::panic::resume_unwind(Box::new(self));
-    }
-
-    /// Runs `f`, and catches any salsa cancellation.
-    pub fn catch<F, T>(f: F) -> Result<T, Cancelled>
-    where
-        F: FnOnce() -> T + UnwindSafe,
-    {
-        match panic::catch_unwind(f) {
-            Ok(t) => Ok(t),
-            Err(payload) => match payload.downcast() {
-                Ok(cancelled) => Err(*cancelled),
-                Err(payload) => panic::resume_unwind(payload),
-            },
-        }
-    }
-}
-
-impl std::fmt::Display for Cancelled {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let why = match self {
-            Cancelled::PendingWrite => "pending write",
-            Cancelled::PropagatedPanic => "propagated panic",
-        };
-        f.write_str("cancelled because of ")?;
-        f.write_str(why)
-    }
-}
-
-impl std::error::Error for Cancelled {}
-
-/// Captures the participants of a cycle that occurred when executing a query.
-///
-/// This type is meant to be used to help give meaningful error messages to the
-/// user or to help salsa developers figure out why their program is resulting
-/// in a computation cycle.
-///
-/// It is used in a few ways:
-///
-/// * During [cycle recovery](https://https://salsa-rs.github.io/salsa/cycles/fallback.html),
-///   where it is given to the fallback function.
-/// * As the panic value when an unexpected cycle (i.e., a cycle where one or more participants
-///   lacks cycle recovery information) occurs.
-///
-/// You can read more about cycle handling in
-/// the [salsa book](https://https://salsa-rs.github.io/salsa/cycles.html).
-#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct Cycle {
-    participants: plumbing::CycleParticipants,
-}
-
-impl Cycle {
-    pub(crate) fn new(participants: plumbing::CycleParticipants) -> Self {
-        Self { participants }
-    }
-
-    /// True if two `Cycle` values represent the same cycle.
-    pub(crate) fn is(&self, cycle: &Cycle) -> bool {
-        triomphe::Arc::ptr_eq(&self.participants, &cycle.participants)
-    }
-
-    pub(crate) fn throw(self) -> ! {
-        tracing::trace!("throwing cycle {:?}", self);
-        std::panic::resume_unwind(Box::new(self))
-    }
-
-    pub(crate) fn catch<T>(execute: impl FnOnce() -> T) -> Result<T, Cycle> {
-        match std::panic::catch_unwind(AssertUnwindSafe(execute)) {
-            Ok(v) => Ok(v),
-            Err(err) => match err.downcast::<Cycle>() {
-                Ok(cycle) => Err(*cycle),
-                Err(other) => std::panic::resume_unwind(other),
-            },
-        }
-    }
-
-    /// Iterate over the [`DatabaseKeyIndex`] for each query participating
-    /// in the cycle. The start point of this iteration within the cycle
-    /// is arbitrary but deterministic, but the ordering is otherwise determined
-    /// by the execution.
-    pub fn participant_keys(&self) -> impl Iterator<Item = DatabaseKeyIndex> + '_ {
-        self.participants.iter().copied()
-    }
-
-    /// Returns a vector with the debug information for
-    /// all the participants in the cycle.
-    pub fn all_participants<DB: ?Sized + Database>(&self, db: &DB) -> Vec<String> {
-        self.participant_keys().map(|d| format!("{:?}", d.debug(db))).collect()
-    }
-
-    /// Returns a vector with the debug information for
-    /// those participants in the cycle that lacked recovery
-    /// information.
-    pub fn unexpected_participants<DB: ?Sized + Database>(&self, db: &DB) -> Vec<String> {
-        self.participant_keys()
-            .filter(|&d| db.cycle_recovery_strategy(d) == CycleRecoveryStrategy::Panic)
-            .map(|d| format!("{:?}", d.debug(db)))
-            .collect()
-    }
-
-    /// Returns a "debug" view onto this strict that can be used to print out information.
-    pub fn debug<'me, DB: ?Sized + Database>(&'me self, db: &'me DB) -> impl std::fmt::Debug + 'me {
-        struct UnexpectedCycleDebug<'me> {
-            c: &'me Cycle,
-            db: &'me dyn Database,
-        }
-
-        impl std::fmt::Debug for UnexpectedCycleDebug<'_> {
-            fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-                fmt.debug_struct("UnexpectedCycle")
-                    .field("all_participants", &self.c.all_participants(self.db))
-                    .field("unexpected_participants", &self.c.unexpected_participants(self.db))
-                    .finish()
-            }
-        }
-
-        UnexpectedCycleDebug { c: self, db: db.ops_database() }
-    }
-}
-
-// Re-export the procedural macros.
-#[allow(unused_imports)]
-#[macro_use]
-extern crate ra_salsa_macros;
-use plumbing::HasQueryGroup;
-pub use ra_salsa_macros::*;
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/lru.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/lru.rs
deleted file mode 100644
index 7fbd42f92627a..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/lru.rs
+++ /dev/null
@@ -1,325 +0,0 @@
-use oorandom::Rand64;
-use parking_lot::Mutex;
-use std::fmt::Debug;
-use std::sync::atomic::AtomicU16;
-use std::sync::atomic::Ordering;
-use triomphe::Arc;
-
-/// A simple and approximate concurrent lru list.
-///
-/// We assume but do not verify that each node is only used with one
-/// list. If this is not the case, it is not *unsafe*, but panics and
-/// weird results will ensue.
-///
-/// Each "node" in the list is of type `Node` and must implement
-/// `LruNode`, which is a trait that gives access to a field that
-/// stores the index in the list. This index gives us a rough idea of
-/// how recently the node has been used.
-#[derive(Debug)]
-pub(crate) struct Lru<Node>
-where
-    Node: LruNode,
-{
-    green_zone: AtomicU16,
-    data: Mutex<LruData<Node>>,
-}
-
-#[derive(Debug)]
-struct LruData<Node> {
-    end_red_zone: u16,
-    end_yellow_zone: u16,
-    end_green_zone: u16,
-    rng: Rand64,
-    entries: Vec<Arc<Node>>,
-}
-
-pub(crate) trait LruNode: Sized + Debug {
-    fn lru_index(&self) -> &LruIndex;
-}
-
-#[derive(Debug)]
-pub(crate) struct LruIndex {
-    /// Index in the appropriate LRU list, or std::u16::MAX if not a
-    /// member.
-    index: AtomicU16,
-}
-
-impl<Node> Default for Lru<Node>
-where
-    Node: LruNode,
-{
-    fn default() -> Self {
-        Lru::new()
-    }
-}
-
-// We always use a fixed seed for our randomness so that we have
-// predictable results.
-const LRU_SEED: &str = "Hello, Rustaceans";
-
-impl<Node> Lru<Node>
-where
-    Node: LruNode,
-{
-    /// Creates a new LRU list where LRU caching is disabled.
-    pub(crate) fn new() -> Self {
-        Self::with_seed(LRU_SEED)
-    }
-
-    #[cfg_attr(not(test), allow(dead_code))]
-    fn with_seed(seed: &str) -> Self {
-        Lru { green_zone: AtomicU16::new(0), data: Mutex::new(LruData::with_seed(seed)) }
-    }
-
-    /// Adjust the total number of nodes permitted to have a value at
-    /// once.  If `len` is zero, this disables LRU caching completely.
-    pub(crate) fn set_lru_capacity(&self, len: u16) {
-        let mut data = self.data.lock();
-
-        // We require each zone to have at least 1 slot. Therefore,
-        // the length cannot be just 1 or 2.
-        if len == 0 {
-            self.green_zone.store(0, Ordering::Release);
-            data.resize(0, 0, 0);
-        } else {
-            let len = std::cmp::max(len, 3);
-
-            // Top 10% is the green zone. This must be at least length 1.
-            let green_zone = std::cmp::max(len / 10, 1);
-
-            // Next 20% is the yellow zone.
-            let yellow_zone = std::cmp::max(len / 5, 1);
-
-            // Remaining 70% is the red zone.
-            let red_zone = len - yellow_zone - green_zone;
-
-            // We need quick access to the green zone.
-            self.green_zone.store(green_zone, Ordering::Release);
-
-            // Resize existing array.
-            data.resize(green_zone, yellow_zone, red_zone);
-        }
-    }
-
-    /// Records that `node` was used. This may displace an old node (if the LRU limits are
-    pub(crate) fn record_use(&self, node: &Arc<Node>) -> Option<Arc<Node>> {
-        tracing::trace!("record_use(node={:?})", node);
-
-        // Load green zone length and check if the LRU cache is even enabled.
-        let green_zone = self.green_zone.load(Ordering::Acquire);
-        tracing::trace!("record_use: green_zone={}", green_zone);
-        if green_zone == 0 {
-            return None;
-        }
-
-        // Find current index of list (if any) and the current length
-        // of our green zone.
-        let index = node.lru_index().load();
-        tracing::trace!("record_use: index={}", index);
-
-        // Already a member of the list, and in the green zone -- nothing to do!
-        if index < green_zone {
-            return None;
-        }
-
-        self.data.lock().record_use(node)
-    }
-
-    pub(crate) fn purge(&self) {
-        self.green_zone.store(0, Ordering::SeqCst);
-        *self.data.lock() = LruData::with_seed(LRU_SEED);
-    }
-}
-
-impl<Node> LruData<Node>
-where
-    Node: LruNode,
-{
-    fn with_seed(seed_str: &str) -> Self {
-        Self::with_rng(rng_with_seed(seed_str))
-    }
-
-    fn with_rng(rng: Rand64) -> Self {
-        LruData { end_yellow_zone: 0, end_green_zone: 0, end_red_zone: 0, entries: Vec::new(), rng }
-    }
-
-    fn green_zone(&self) -> std::ops::Range<u16> {
-        0..self.end_green_zone
-    }
-
-    fn yellow_zone(&self) -> std::ops::Range<u16> {
-        self.end_green_zone..self.end_yellow_zone
-    }
-
-    fn red_zone(&self) -> std::ops::Range<u16> {
-        self.end_yellow_zone..self.end_red_zone
-    }
-
-    fn resize(&mut self, len_green_zone: u16, len_yellow_zone: u16, len_red_zone: u16) {
-        self.end_green_zone = len_green_zone;
-        self.end_yellow_zone = self.end_green_zone + len_yellow_zone;
-        self.end_red_zone = self.end_yellow_zone + len_red_zone;
-        let entries =
-            std::mem::replace(&mut self.entries, Vec::with_capacity(self.end_red_zone as usize));
-
-        tracing::trace!("green_zone = {:?}", self.green_zone());
-        tracing::trace!("yellow_zone = {:?}", self.yellow_zone());
-        tracing::trace!("red_zone = {:?}", self.red_zone());
-
-        // We expect to resize when the LRU cache is basically empty.
-        // So just forget all the old LRU indices to start.
-        for entry in entries {
-            entry.lru_index().clear();
-        }
-    }
-
-    /// Records that a node was used. If it is already a member of the
-    /// LRU list, it is promoted to the green zone (unless it's
-    /// already there). Otherwise, it is added to the list first and
-    /// *then* promoted to the green zone. Adding a new node to the
-    /// list may displace an old member of the red zone, in which case
-    /// that is returned.
-    fn record_use(&mut self, node: &Arc<Node>) -> Option<Arc<Node>> {
-        tracing::trace!("record_use(node={:?})", node);
-
-        // NB: When this is invoked, we have typically already loaded
-        // the LRU index (to check if it is in green zone). But that
-        // check was done outside the lock and -- for all we know --
-        // the index may have changed since. So we always reload.
-        let index = node.lru_index().load();
-
-        if index < self.end_green_zone {
-            None
-        } else if index < self.end_yellow_zone {
-            self.promote_yellow_to_green(node, index);
-            None
-        } else if index < self.end_red_zone {
-            self.promote_red_to_green(node, index);
-            None
-        } else {
-            self.insert_new(node)
-        }
-    }
-
-    /// Inserts a node that is not yet a member of the LRU list. If
-    /// the list is at capacity, this can displace an existing member.
-    fn insert_new(&mut self, node: &Arc<Node>) -> Option<Arc<Node>> {
-        debug_assert!(!node.lru_index().is_in_lru());
-
-        // Easy case: we still have capacity. Push it, and then promote
-        // it up to the appropriate zone.
-        let len = self.entries.len() as u16;
-        if len < self.end_red_zone {
-            self.entries.push(node.clone());
-            node.lru_index().store(len);
-            tracing::trace!("inserted node {:?} at {}", node, len);
-            return self.record_use(node);
-        }
-
-        // Harder case: no capacity. Create some by evicting somebody from red
-        // zone and then promoting.
-        let victim_index = self.pick_index(self.red_zone());
-        let victim_node = std::mem::replace(&mut self.entries[victim_index as usize], node.clone());
-        tracing::trace!("evicting red node {:?} from {}", victim_node, victim_index);
-        victim_node.lru_index().clear();
-        self.promote_red_to_green(node, victim_index);
-        Some(victim_node)
-    }
-
-    /// Promotes the node `node`, stored at `red_index` (in the red
-    /// zone), into a green index, demoting yellow/green nodes at
-    /// random.
-    ///
-    /// NB: It is not required that `node.lru_index()` is up-to-date
-    /// when entering this method.
-    fn promote_red_to_green(&mut self, node: &Arc<Node>, red_index: u16) {
-        debug_assert!(self.red_zone().contains(&red_index));
-
-        // Pick a yellow at random and switch places with it.
-        //
-        // Subtle: we do not update `node.lru_index` *yet* -- we're
-        // going to invoke `self.promote_yellow` next, and it will get
-        // updated then.
-        let yellow_index = self.pick_index(self.yellow_zone());
-        tracing::trace!(
-            "demoting yellow node {:?} from {} to red at {}",
-            self.entries[yellow_index as usize],
-            yellow_index,
-            red_index,
-        );
-        self.entries.swap(yellow_index as usize, red_index as usize);
-        self.entries[red_index as usize].lru_index().store(red_index);
-
-        // Now move ourselves up into the green zone.
-        self.promote_yellow_to_green(node, yellow_index);
-    }
-
-    /// Promotes the node `node`, stored at `yellow_index` (in the
-    /// yellow zone), into a green index, demoting a green node at
-    /// random to replace it.
-    ///
-    /// NB: It is not required that `node.lru_index()` is up-to-date
-    /// when entering this method.
-    fn promote_yellow_to_green(&mut self, node: &Arc<Node>, yellow_index: u16) {
-        debug_assert!(self.yellow_zone().contains(&yellow_index));
-
-        // Pick a yellow at random and switch places with it.
-        let green_index = self.pick_index(self.green_zone());
-        tracing::trace!(
-            "demoting green node {:?} from {} to yellow at {}",
-            self.entries[green_index as usize],
-            green_index,
-            yellow_index
-        );
-        self.entries.swap(green_index as usize, yellow_index as usize);
-        self.entries[yellow_index as usize].lru_index().store(yellow_index);
-        node.lru_index().store(green_index);
-
-        tracing::trace!("promoted {:?} to green index {}", node, green_index);
-    }
-
-    fn pick_index(&mut self, zone: std::ops::Range<u16>) -> u16 {
-        let end_index = std::cmp::min(zone.end, self.entries.len() as u16);
-        self.rng.rand_range(zone.start as u64..end_index as u64) as u16
-    }
-}
-
-impl Default for LruIndex {
-    fn default() -> Self {
-        Self { index: AtomicU16::new(u16::MAX) }
-    }
-}
-
-impl LruIndex {
-    fn load(&self) -> u16 {
-        self.index.load(Ordering::Acquire) // see note on ordering below
-    }
-
-    fn store(&self, value: u16) {
-        self.index.store(value, Ordering::Release) // see note on ordering below
-    }
-
-    fn clear(&self) {
-        self.store(u16::MAX);
-    }
-
-    fn is_in_lru(&self) -> bool {
-        self.load() != u16::MAX
-    }
-}
-
-fn rng_with_seed(seed_str: &str) -> Rand64 {
-    let mut seed: [u8; 16] = [0; 16];
-    for (i, &b) in seed_str.as_bytes().iter().take(16).enumerate() {
-        seed[i] = b;
-    }
-    Rand64::new(u128::from_le_bytes(seed))
-}
-
-// A note on ordering:
-//
-// I chose to use AcqRel for the ordering but I don't think it's
-// strictly needed.  All writes occur under a lock, so they should be
-// ordered w/r/t one another.  As for the reads, they can occur
-// outside the lock, but they don't themselves enable dependent reads
-// -- if the reads are out of bounds, we would acquire a lock.
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/plumbing.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/plumbing.rs
deleted file mode 100644
index e96b9daa979fc..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/plumbing.rs
+++ /dev/null
@@ -1,245 +0,0 @@
-#![allow(missing_docs)]
-
-use crate::debug::TableEntry;
-use crate::durability::Durability;
-use crate::Cycle;
-use crate::Database;
-use crate::Query;
-use crate::QueryTable;
-use crate::QueryTableMut;
-use std::borrow::Borrow;
-use std::fmt::Debug;
-use std::hash::Hash;
-use triomphe::Arc;
-
-pub use crate::derived::MemoizedStorage;
-pub use crate::derived_lru::DependencyStorage as LruDependencyStorage;
-pub use crate::derived_lru::MemoizedStorage as LruMemoizedStorage;
-pub use crate::input::{InputStorage, UnitInputStorage};
-pub use crate::interned::InternedStorage;
-pub use crate::interned::LookupInternedStorage;
-pub use crate::{revision::Revision, DatabaseKeyIndex, QueryDb, Runtime};
-
-/// Defines various associated types. An impl of this
-/// should be generated for your query-context type automatically by
-/// the `database_storage` macro, so you shouldn't need to mess
-/// with this trait directly.
-pub trait DatabaseStorageTypes: Database {
-    /// Defines the "storage type", where all the query data is kept.
-    /// This type is defined by the `database_storage` macro.
-    type DatabaseStorage: Default;
-}
-
-/// Internal operations that the runtime uses to operate on the database.
-pub trait DatabaseOps {
-    /// Upcast this type to a `dyn Database`.
-    fn ops_database(&self) -> &dyn Database;
-
-    /// Gives access to the underlying salsa runtime.
-    fn ops_salsa_runtime(&self) -> &Runtime;
-
-    /// A "synthetic write" causes the system to act *as though* some
-    /// input of durability `durability` has changed. This is mostly
-    /// useful for profiling scenarios.
-    ///
-    /// **WARNING:** Just like an ordinary write, this method triggers
-    /// cancellation. If you invoke it while a snapshot exists, it
-    /// will block until that snapshot is dropped -- if that snapshot
-    /// is owned by the current thread, this could trigger deadlock.
-    fn synthetic_write(&mut self, durability: Durability);
-
-    /// Formats a database key index in a human readable fashion.
-    fn fmt_index(
-        &self,
-        index: DatabaseKeyIndex,
-        fmt: &mut std::fmt::Formatter<'_>,
-    ) -> std::fmt::Result;
-
-    /// True if the computed value for `input` may have changed since `revision`.
-    fn maybe_changed_after(&self, input: DatabaseKeyIndex, revision: Revision) -> bool;
-
-    /// Find the `CycleRecoveryStrategy` for a given input.
-    fn cycle_recovery_strategy(&self, input: DatabaseKeyIndex) -> CycleRecoveryStrategy;
-
-    /// Executes the callback for each kind of query.
-    fn for_each_query(&self, op: &mut dyn FnMut(&dyn QueryStorageMassOps));
-}
-
-/// Internal operations performed on the query storage as a whole
-/// (note that these ops do not need to know the identity of the
-/// query, unlike `QueryStorageOps`).
-pub trait QueryStorageMassOps {
-    fn purge(&self);
-}
-
-pub trait DatabaseKey: Clone + Debug + Eq + Hash {}
-
-pub trait QueryFunction: Query {
-    /// See `CycleRecoveryStrategy`
-    const CYCLE_STRATEGY: CycleRecoveryStrategy;
-
-    fn execute(db: &<Self as QueryDb<'_>>::DynDb, key: Self::Key) -> Self::Value;
-
-    fn cycle_fallback(
-        db: &<Self as QueryDb<'_>>::DynDb,
-        cycle: &Cycle,
-        key: &Self::Key,
-    ) -> Self::Value {
-        let _ = (db, cycle, key);
-        panic!("query `{:?}` doesn't support cycle fallback", Self::default())
-    }
-}
-
-/// Cycle recovery strategy: Is this query capable of recovering from
-/// a cycle that results from executing the function? If so, how?
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum CycleRecoveryStrategy {
-    /// Cannot recover from cycles: panic.
-    ///
-    /// This is the default. It is also what happens if a cycle
-    /// occurs and the queries involved have different recovery
-    /// strategies.
-    ///
-    /// In the case of a failure due to a cycle, the panic
-    /// value will be XXX (FIXME).
-    Panic,
-
-    /// Recovers from cycles by storing a sentinel value.
-    ///
-    /// This value is computed by the `QueryFunction::cycle_fallback`
-    /// function.
-    Fallback,
-}
-
-/// Create a query table, which has access to the storage for the query
-/// and offers methods like `get`.
-pub fn get_query_table<'me, Q>(db: &'me <Q as QueryDb<'me>>::DynDb) -> QueryTable<'me, Q>
-where
-    Q: Query + 'me,
-    Q::Storage: QueryStorageOps<Q>,
-{
-    let group_storage: &Q::GroupStorage = HasQueryGroup::group_storage(db);
-    let query_storage: &Q::Storage = Q::query_storage(group_storage);
-    QueryTable::new(db, query_storage)
-}
-
-/// Create a mutable query table, which has access to the storage
-/// for the query and offers methods like `set`.
-pub fn get_query_table_mut<'me, Q>(db: &'me mut <Q as QueryDb<'me>>::DynDb) -> QueryTableMut<'me, Q>
-where
-    Q: Query,
-{
-    let (group_storage, runtime) = HasQueryGroup::group_storage_mut(db);
-    let query_storage = Q::query_storage_mut(group_storage);
-    QueryTableMut::new(runtime, &**query_storage)
-}
-
-pub trait QueryGroup: Sized {
-    type GroupStorage;
-
-    /// Dyn version of the associated database trait.
-    type DynDb: ?Sized + Database + HasQueryGroup<Self>;
-}
-
-/// Trait implemented by a database for each group that it supports.
-/// `S` and `K` are the types for *group storage* and *group key*, respectively.
-pub trait HasQueryGroup<G>: Database
-where
-    G: QueryGroup,
-{
-    /// Access the group storage struct from the database.
-    fn group_storage(&self) -> &G::GroupStorage;
-
-    /// Access the group storage struct from the database.
-    /// Also returns a ref to the `Runtime`, since otherwise
-    /// the database is borrowed and one cannot get access to it.
-    fn group_storage_mut(&mut self) -> (&G::GroupStorage, &mut Runtime);
-}
-
-// ANCHOR:QueryStorageOps
-pub trait QueryStorageOps<Q>
-where
-    Self: QueryStorageMassOps,
-    Q: Query,
-{
-    // ANCHOR_END:QueryStorageOps
-
-    /// See CycleRecoveryStrategy
-    const CYCLE_STRATEGY: CycleRecoveryStrategy;
-
-    fn new(group_index: u16) -> Self;
-
-    /// Format a database key index in a suitable way.
-    fn fmt_index(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        fmt: &mut std::fmt::Formatter<'_>,
-    ) -> std::fmt::Result;
-
-    // ANCHOR:maybe_changed_after
-    /// True if the value of `input`, which must be from this query, may have
-    /// changed after the given revision ended.
-    ///
-    /// This function should only be invoked with a revision less than the current
-    /// revision.
-    fn maybe_changed_after(
-        &self,
-        db: &<Q as QueryDb<'_>>::DynDb,
-        index: u32,
-        revision: Revision,
-    ) -> bool;
-    // ANCHOR_END:maybe_changed_after
-
-    fn cycle_recovery_strategy(&self) -> CycleRecoveryStrategy {
-        Self::CYCLE_STRATEGY
-    }
-
-    // ANCHOR:fetch
-    /// Execute the query, returning the result (often, the result
-    /// will be memoized).  This is the "main method" for
-    /// queries.
-    ///
-    /// Returns `Err` in the event of a cycle, meaning that computing
-    /// the value for this `key` is recursively attempting to fetch
-    /// itself.
-    fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value;
-    // ANCHOR_END:fetch
-
-    /// Returns the durability associated with a given key.
-    fn durability(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Durability;
-
-    /// Get the (current) set of the entries in the query storage
-    fn entries<C>(&self, db: &<Q as QueryDb<'_>>::DynDb) -> C
-    where
-        C: std::iter::FromIterator<TableEntry<Q::Key, Q::Value>>;
-}
-
-/// An optional trait that is implemented for "user mutable" storage:
-/// that is, storage whose value is not derived from other storage but
-/// is set independently.
-pub trait InputQueryStorageOps<Q>
-where
-    Q: Query,
-{
-    fn set(&self, runtime: &mut Runtime, key: &Q::Key, new_value: Q::Value, durability: Durability);
-}
-
-/// An optional trait that is implemented for "user mutable" storage:
-/// that is, storage whose value is not derived from other storage but
-/// is set independently.
-pub trait LruQueryStorageOps {
-    fn set_lru_capacity(&self, new_capacity: u16);
-}
-
-pub trait DerivedQueryStorageOps<Q>
-where
-    Q: Query,
-{
-    fn invalidate<S>(&self, runtime: &mut Runtime, key: &S)
-    where
-        S: Eq + Hash,
-        Q::Key: Borrow<S>;
-}
-
-pub type CycleParticipants = Arc<Vec<DatabaseKeyIndex>>;
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/revision.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/revision.rs
deleted file mode 100644
index 7f4c333fb1995..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/revision.rs
+++ /dev/null
@@ -1,66 +0,0 @@
-use std::num::NonZeroU32;
-use std::sync::atomic::{AtomicU32, Ordering};
-
-/// Value of the initial revision, as a u32. We don't use 0
-/// because we want to use a `NonZeroU32`.
-const START: u32 = 1;
-
-/// A unique identifier for the current version of the database; each
-/// time an input is changed, the revision number is incremented.
-/// `Revision` is used internally to track which values may need to be
-/// recomputed, but is not something you should have to interact with
-/// directly as a user of salsa.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct Revision {
-    generation: NonZeroU32,
-}
-
-impl Revision {
-    pub(crate) fn start() -> Self {
-        Self::from(START)
-    }
-
-    pub(crate) fn from(g: u32) -> Self {
-        Self { generation: NonZeroU32::new(g).unwrap() }
-    }
-
-    pub(crate) fn next(self) -> Revision {
-        Self::from(self.generation.get() + 1)
-    }
-
-    fn as_u32(self) -> u32 {
-        self.generation.get()
-    }
-}
-
-impl std::fmt::Debug for Revision {
-    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        write!(fmt, "R{}", self.generation)
-    }
-}
-
-#[derive(Debug)]
-pub(crate) struct AtomicRevision {
-    data: AtomicU32,
-}
-
-impl AtomicRevision {
-    pub(crate) const fn start() -> Self {
-        Self { data: AtomicU32::new(START) }
-    }
-
-    pub(crate) fn load(&self) -> Revision {
-        Revision::from(self.data.load(Ordering::SeqCst))
-    }
-
-    pub(crate) fn store(&self, r: Revision) {
-        self.data.store(r.as_u32(), Ordering::SeqCst);
-    }
-
-    /// Increment by 1, returning previous value.
-    pub(crate) fn fetch_then_increment(&self) -> Revision {
-        let v = self.data.fetch_add(1, Ordering::SeqCst);
-        assert!(v != u32::MAX, "revision overflow");
-        Revision::from(v)
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/runtime.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/runtime.rs
deleted file mode 100644
index cb16ba0044dfd..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/runtime.rs
+++ /dev/null
@@ -1,668 +0,0 @@
-use crate::durability::Durability;
-use crate::hash::FxIndexSet;
-use crate::plumbing::CycleRecoveryStrategy;
-use crate::revision::{AtomicRevision, Revision};
-use crate::{Cancelled, Cycle, Database, DatabaseKeyIndex, Event, EventKind};
-use itertools::Itertools;
-use parking_lot::lock_api::{RawRwLock, RawRwLockRecursive};
-use parking_lot::{Mutex, RwLock};
-use std::hash::Hash;
-use std::panic::panic_any;
-use std::sync::atomic::{AtomicU32, Ordering};
-use tracing::trace;
-use triomphe::{Arc, ThinArc};
-
-mod dependency_graph;
-use dependency_graph::DependencyGraph;
-
-pub(crate) mod local_state;
-use local_state::LocalState;
-
-use self::local_state::{ActiveQueryGuard, QueryRevisions};
-
-/// The salsa runtime stores the storage for all queries as well as
-/// tracking the query stack and dependencies between cycles.
-///
-/// Each new runtime you create (e.g., via `Runtime::new` or
-/// `Runtime::default`) will have an independent set of query storage
-/// associated with it. Normally, therefore, you only do this once, at
-/// the start of your application.
-pub struct Runtime {
-    /// Our unique runtime id.
-    id: RuntimeId,
-
-    /// If this is a "forked" runtime, then the `revision_guard` will
-    /// be `Some`; this guard holds a read-lock on the global query
-    /// lock.
-    revision_guard: Option<RevisionGuard>,
-
-    /// Local state that is specific to this runtime (thread).
-    local_state: LocalState,
-
-    /// Shared state that is accessible via all runtimes.
-    shared_state: Arc<SharedState>,
-}
-
-#[derive(Clone, Debug)]
-pub(crate) enum WaitResult {
-    Completed,
-    Panicked,
-    Cycle(Cycle),
-}
-
-impl Default for Runtime {
-    fn default() -> Self {
-        Runtime {
-            id: RuntimeId { counter: 0 },
-            revision_guard: None,
-            shared_state: Default::default(),
-            local_state: Default::default(),
-        }
-    }
-}
-
-impl std::fmt::Debug for Runtime {
-    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        fmt.debug_struct("Runtime")
-            .field("id", &self.id())
-            .field("forked", &self.revision_guard.is_some())
-            .field("shared_state", &self.shared_state)
-            .finish()
-    }
-}
-
-impl Runtime {
-    /// Create a new runtime; equivalent to `Self::default`. This is
-    /// used when creating a new database.
-    pub fn new() -> Self {
-        Self::default()
-    }
-
-    /// See [`crate::storage::Storage::snapshot`].
-    pub(crate) fn snapshot(&self) -> Self {
-        if self.local_state.query_in_progress() {
-            panic!("it is not legal to `snapshot` during a query (see salsa-rs/salsa#80)");
-        }
-
-        let revision_guard = RevisionGuard::new(&self.shared_state);
-
-        let id = RuntimeId { counter: self.shared_state.next_id.fetch_add(1, Ordering::SeqCst) };
-
-        Runtime {
-            id,
-            revision_guard: Some(revision_guard),
-            shared_state: self.shared_state.clone(),
-            local_state: Default::default(),
-        }
-    }
-
-    /// A "synthetic write" causes the system to act *as though* some
-    /// input of durability `durability` has changed. This is mostly
-    /// useful for profiling scenarios.
-    ///
-    /// **WARNING:** Just like an ordinary write, this method triggers
-    /// cancellation. If you invoke it while a snapshot exists, it
-    /// will block until that snapshot is dropped -- if that snapshot
-    /// is owned by the current thread, this could trigger deadlock.
-    pub fn synthetic_write(&mut self, durability: Durability) {
-        self.with_incremented_revision(|_next_revision| Some(durability));
-    }
-
-    /// The unique identifier attached to this `SalsaRuntime`. Each
-    /// snapshotted runtime has a distinct identifier.
-    #[inline]
-    pub fn id(&self) -> RuntimeId {
-        self.id
-    }
-
-    /// Returns the database-key for the query that this thread is
-    /// actively executing (if any).
-    pub fn active_query(&self) -> Option<DatabaseKeyIndex> {
-        self.local_state.active_query()
-    }
-
-    /// Read current value of the revision counter.
-    #[inline]
-    pub(crate) fn current_revision(&self) -> Revision {
-        self.shared_state.revisions[0].load()
-    }
-
-    /// The revision in which values with durability `d` may have last
-    /// changed.  For D0, this is just the current revision. But for
-    /// higher levels of durability, this value may lag behind the
-    /// current revision. If we encounter a value of durability Di,
-    /// then, we can check this function to get a "bound" on when the
-    /// value may have changed, which allows us to skip walking its
-    /// dependencies.
-    #[inline]
-    pub(crate) fn last_changed_revision(&self, d: Durability) -> Revision {
-        self.shared_state.revisions[d.index()].load()
-    }
-
-    /// Read current value of the revision counter.
-    #[inline]
-    pub(crate) fn pending_revision(&self) -> Revision {
-        self.shared_state.pending_revision.load()
-    }
-
-    #[cold]
-    pub(crate) fn unwind_cancelled(&self) {
-        self.report_untracked_read();
-        Cancelled::PendingWrite.throw();
-    }
-
-    /// Acquires the **global query write lock** (ensuring that no queries are
-    /// executing) and then increments the current revision counter; invokes
-    /// `op` with the global query write lock still held.
-    ///
-    /// While we wait to acquire the global query write lock, this method will
-    /// also increment `pending_revision_increments`, thus signalling to queries
-    /// that their results are "cancelled" and they should abort as expeditiously
-    /// as possible.
-    ///
-    /// The `op` closure should actually perform the writes needed. It is given
-    /// the new revision as an argument, and its return value indicates whether
-    /// any pre-existing value was modified:
-    ///
-    /// - returning `None` means that no pre-existing value was modified (this
-    ///   could occur e.g. when setting some key on an input that was never set
-    ///   before)
-    /// - returning `Some(d)` indicates that a pre-existing value was modified
-    ///   and it had the durability `d`. This will update the records for when
-    ///   values with each durability were modified.
-    ///
-    /// Note that, given our writer model, we can assume that only one thread is
-    /// attempting to increment the global revision at a time.
-    pub(crate) fn with_incremented_revision<F>(&mut self, op: F)
-    where
-        F: FnOnce(Revision) -> Option<Durability>,
-    {
-        tracing::trace!("increment_revision()");
-
-        if !self.permits_increment() {
-            panic!("increment_revision invoked during a query computation");
-        }
-
-        // Set the `pending_revision` field so that people
-        // know current revision is cancelled.
-        let current_revision = self.shared_state.pending_revision.fetch_then_increment();
-
-        // To modify the revision, we need the lock.
-        let shared_state = self.shared_state.clone();
-        let _lock = shared_state.query_lock.write();
-
-        let old_revision = self.shared_state.revisions[0].fetch_then_increment();
-        assert_eq!(current_revision, old_revision);
-
-        let new_revision = current_revision.next();
-
-        trace!("increment_revision: incremented to {:?}", new_revision);
-
-        if let Some(d) = op(new_revision) {
-            for rev in &self.shared_state.revisions[1..=d.index()] {
-                rev.store(new_revision);
-            }
-        }
-    }
-
-    pub(crate) fn permits_increment(&self) -> bool {
-        self.revision_guard.is_none() && !self.local_state.query_in_progress()
-    }
-
-    #[inline]
-    pub(crate) fn push_query(&self, database_key_index: DatabaseKeyIndex) -> ActiveQueryGuard<'_> {
-        self.local_state.push_query(database_key_index)
-    }
-
-    /// Reports that the currently active query read the result from
-    /// another query.
-    ///
-    /// Also checks whether the "cycle participant" flag is set on
-    /// the current stack frame -- if so, panics with `CycleParticipant`
-    /// value, which should be caught by the code executing the query.
-    ///
-    /// # Parameters
-    ///
-    /// - `database_key`: the query whose result was read
-    /// - `changed_revision`: the last revision in which the result of that
-    ///   query had changed
-    pub(crate) fn report_query_read_and_unwind_if_cycle_resulted(
-        &self,
-        input: DatabaseKeyIndex,
-        durability: Durability,
-        changed_at: Revision,
-    ) {
-        self.local_state
-            .report_query_read_and_unwind_if_cycle_resulted(input, durability, changed_at);
-    }
-
-    /// Reports that the query depends on some state unknown to salsa.
-    ///
-    /// Queries which report untracked reads will be re-executed in the next
-    /// revision.
-    pub fn report_untracked_read(&self) {
-        self.local_state.report_untracked_read(self.current_revision());
-    }
-
-    /// Acts as though the current query had read an input with the given durability; this will force the current query's durability to be at most `durability`.
-    ///
-    /// This is mostly useful to control the durability level for [on-demand inputs](https://salsa-rs.github.io/salsa/common_patterns/on_demand_inputs.html).
-    pub fn report_synthetic_read(&self, durability: Durability) {
-        let changed_at = self.last_changed_revision(durability);
-        self.local_state.report_synthetic_read(durability, changed_at);
-    }
-
-    /// Handles a cycle in the dependency graph that was detected when the
-    /// current thread tried to block on `database_key_index` which is being
-    /// executed by `to_id`. If this function returns, then `to_id` no longer
-    /// depends on the current thread, and so we should continue executing
-    /// as normal. Otherwise, the function will throw a `Cycle` which is expected
-    /// to be caught by some frame on our stack. This occurs either if there is
-    /// a frame on our stack with cycle recovery (possibly the top one!) or if there
-    /// is no cycle recovery at all.
-    fn unblock_cycle_and_maybe_throw(
-        &self,
-        db: &dyn Database,
-        dg: &mut DependencyGraph,
-        database_key_index: DatabaseKeyIndex,
-        to_id: RuntimeId,
-    ) {
-        trace!("unblock_cycle_and_maybe_throw(database_key={:?})", database_key_index);
-
-        let mut from_stack = self.local_state.take_query_stack();
-        let from_id = self.id();
-
-        // Make a "dummy stack frame". As we iterate through the cycle, we will collect the
-        // inputs from each participant. Then, if we are participating in cycle recovery, we
-        // will propagate those results to all participants.
-        let mut cycle_query = ActiveQuery::new(database_key_index);
-
-        // Identify the cycle participants:
-        let cycle = {
-            let mut v = vec![];
-            dg.for_each_cycle_participant(
-                from_id,
-                &mut from_stack,
-                database_key_index,
-                to_id,
-                |aqs| {
-                    aqs.iter_mut().for_each(|aq| {
-                        cycle_query.add_from(aq);
-                        v.push(aq.database_key_index);
-                    });
-                },
-            );
-
-            // We want to give the participants in a deterministic order
-            // (at least for this execution, not necessarily across executions),
-            // no matter where it started on the stack. Find the minimum
-            // key and rotate it to the front.
-            let index = v.iter().position_min().unwrap_or_default();
-            v.rotate_left(index);
-
-            // No need to store extra memory.
-            v.shrink_to_fit();
-
-            Cycle::new(Arc::new(v))
-        };
-        trace!("cycle {:?}, cycle_query {:#?}", cycle.debug(db), cycle_query,);
-
-        // We can remove the cycle participants from the list of dependencies;
-        // they are a strongly connected component (SCC) and we only care about
-        // dependencies to things outside the SCC that control whether it will
-        // form again.
-        cycle_query.remove_cycle_participants(&cycle);
-
-        // Mark each cycle participant that has recovery set, along with
-        // any frames that come after them on the same thread. Those frames
-        // are going to be unwound so that fallback can occur.
-        dg.for_each_cycle_participant(from_id, &mut from_stack, database_key_index, to_id, |aqs| {
-            aqs.iter_mut()
-                .skip_while(|aq| match db.cycle_recovery_strategy(aq.database_key_index) {
-                    CycleRecoveryStrategy::Panic => true,
-                    CycleRecoveryStrategy::Fallback => false,
-                })
-                .for_each(|aq| {
-                    trace!("marking {:?} for fallback", aq.database_key_index.debug(db));
-                    aq.take_inputs_from(&cycle_query);
-                    assert!(aq.cycle.is_none());
-                    aq.cycle = Some(cycle.clone());
-                });
-        });
-
-        // Unblock every thread that has cycle recovery with a `WaitResult::Cycle`.
-        // They will throw the cycle, which will be caught by the frame that has
-        // cycle recovery so that it can execute that recovery.
-        let (me_recovered, others_recovered) =
-            dg.maybe_unblock_runtimes_in_cycle(from_id, &from_stack, database_key_index, to_id);
-
-        self.local_state.restore_query_stack(from_stack);
-
-        if me_recovered {
-            // If the current thread has recovery, we want to throw
-            // so that it can begin.
-            cycle.throw()
-        } else if others_recovered {
-            // If other threads have recovery but we didn't: return and we will block on them.
-        } else {
-            // if nobody has recover, then we panic
-            panic_any(cycle);
-        }
-    }
-
-    /// Block until `other_id` completes executing `database_key`;
-    /// panic or unwind in the case of a cycle.
-    ///
-    /// `query_mutex_guard` is the guard for the current query's state;
-    /// it will be dropped after we have successfully registered the
-    /// dependency.
-    ///
-    /// # Propagating panics
-    ///
-    /// If the thread `other_id` panics, then our thread is considered
-    /// cancelled, so this function will panic with a `Cancelled` value.
-    ///
-    /// # Cycle handling
-    ///
-    /// If the thread `other_id` already depends on the current thread,
-    /// and hence there is a cycle in the query graph, then this function
-    /// will unwind instead of returning normally. The method of unwinding
-    /// depends on the [`Self::mutual_cycle_recovery_strategy`]
-    /// of the cycle participants:
-    ///
-    /// * [`CycleRecoveryStrategy::Panic`]: panic with the [`Cycle`] as the value.
-    /// * [`CycleRecoveryStrategy::Fallback`]: initiate unwinding with [`CycleParticipant::unwind`].
-    pub(crate) fn block_on_or_unwind<QueryMutexGuard>(
-        &self,
-        db: &dyn Database,
-        database_key: DatabaseKeyIndex,
-        other_id: RuntimeId,
-        query_mutex_guard: QueryMutexGuard,
-    ) {
-        let mut dg = self.shared_state.dependency_graph.lock();
-
-        if dg.depends_on(other_id, self.id()) {
-            self.unblock_cycle_and_maybe_throw(db, &mut dg, database_key, other_id);
-
-            // If the above fn returns, then (via cycle recovery) it has unblocked the
-            // cycle, so we can continue.
-            assert!(!dg.depends_on(other_id, self.id()));
-        }
-
-        db.salsa_event(Event {
-            runtime_id: self.id(),
-            kind: EventKind::WillBlockOn { other_runtime_id: other_id, database_key },
-        });
-
-        let stack = self.local_state.take_query_stack();
-
-        let (stack, result) = DependencyGraph::block_on(
-            dg,
-            self.id(),
-            database_key,
-            other_id,
-            stack,
-            query_mutex_guard,
-        );
-
-        self.local_state.restore_query_stack(stack);
-
-        match result {
-            WaitResult::Completed => (),
-
-            // If the other thread panicked, then we consider this thread
-            // cancelled. The assumption is that the panic will be detected
-            // by the other thread and responded to appropriately.
-            WaitResult::Panicked => Cancelled::PropagatedPanic.throw(),
-
-            WaitResult::Cycle(c) => c.throw(),
-        }
-    }
-
-    /// Invoked when this runtime completed computing `database_key` with
-    /// the given result `wait_result` (`wait_result` should be `None` if
-    /// computing `database_key` panicked and could not complete).
-    /// This function unblocks any dependent queries and allows them
-    /// to continue executing.
-    pub(crate) fn unblock_queries_blocked_on(
-        &self,
-        database_key: DatabaseKeyIndex,
-        wait_result: WaitResult,
-    ) {
-        self.shared_state
-            .dependency_graph
-            .lock()
-            .unblock_runtimes_blocked_on(database_key, wait_result);
-    }
-}
-
-/// State that will be common to all threads (when we support multiple threads)
-struct SharedState {
-    /// Stores the next id to use for a snapshotted runtime (starts at 1).
-    next_id: AtomicU32,
-
-    /// Whenever derived queries are executing, they acquire this lock
-    /// in read mode. Mutating inputs (and thus creating a new
-    /// revision) requires a write lock (thus guaranteeing that no
-    /// derived queries are in progress). Note that this is not needed
-    /// to prevent **race conditions** -- the revision counter itself
-    /// is stored in an `AtomicUsize` so it can be cheaply read
-    /// without acquiring the lock.  Rather, the `query_lock` is used
-    /// to ensure a higher-level consistency property.
-    query_lock: RwLock<()>,
-
-    /// This is typically equal to `revision` -- set to `revision+1`
-    /// when a new revision is pending (which implies that the current
-    /// revision is cancelled).
-    pending_revision: AtomicRevision,
-
-    /// Stores the "last change" revision for values of each Durability.
-    /// This vector is always of length at least 1 (for Durability 0)
-    /// but its total length depends on the number of Durabilities. The
-    /// element at index 0 is special as it represents the "current
-    /// revision".  In general, we have the invariant that revisions
-    /// in here are *declining* -- that is, `revisions[i] >=
-    /// revisions[i + 1]`, for all `i`. This is because when you
-    /// modify a value with durability D, that implies that values
-    /// with durability less than D may have changed too.
-    revisions: [AtomicRevision; Durability::LEN],
-
-    /// The dependency graph tracks which runtimes are blocked on one
-    /// another, waiting for queries to terminate.
-    dependency_graph: Mutex<DependencyGraph>,
-}
-
-impl std::panic::RefUnwindSafe for SharedState {}
-
-impl Default for SharedState {
-    fn default() -> Self {
-        #[allow(clippy::declare_interior_mutable_const)]
-        const START: AtomicRevision = AtomicRevision::start();
-        SharedState {
-            next_id: AtomicU32::new(1),
-            query_lock: Default::default(),
-            revisions: [START; Durability::LEN],
-            pending_revision: START,
-            dependency_graph: Default::default(),
-        }
-    }
-}
-
-impl std::fmt::Debug for SharedState {
-    fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        let query_lock = if self.query_lock.is_locked_exclusive() {
-            "<wlocked>"
-        } else if self.query_lock.is_locked() {
-            "<rlocked>"
-        } else {
-            "<unlocked>"
-        };
-        fmt.debug_struct("SharedState")
-            .field("query_lock", &query_lock)
-            .field("revisions", &self.revisions)
-            .field("pending_revision", &self.pending_revision)
-            .finish()
-    }
-}
-
-#[derive(Debug)]
-struct ActiveQuery {
-    /// What query is executing
-    database_key_index: DatabaseKeyIndex,
-
-    /// Minimum durability of inputs observed so far.
-    durability: Durability,
-
-    /// Maximum revision of all inputs observed. If we observe an
-    /// untracked read, this will be set to the most recent revision.
-    changed_at: Revision,
-
-    /// Set of subqueries that were accessed thus far, or `None` if
-    /// there was an untracked the read.
-    dependencies: Option<FxIndexSet<DatabaseKeyIndex>>,
-
-    /// Stores the entire cycle, if one is found and this query is part of it.
-    cycle: Option<Cycle>,
-}
-
-impl ActiveQuery {
-    fn new(database_key_index: DatabaseKeyIndex) -> Self {
-        ActiveQuery {
-            database_key_index,
-            durability: Durability::MAX,
-            changed_at: Revision::start(),
-            dependencies: Some(FxIndexSet::default()),
-            cycle: None,
-        }
-    }
-
-    fn add_read(&mut self, input: DatabaseKeyIndex, durability: Durability, revision: Revision) {
-        if let Some(set) = &mut self.dependencies {
-            set.insert(input);
-        }
-
-        self.durability = self.durability.min(durability);
-        self.changed_at = self.changed_at.max(revision);
-    }
-
-    fn add_untracked_read(&mut self, changed_at: Revision) {
-        self.dependencies = None;
-        self.durability = Durability::LOW;
-        self.changed_at = changed_at;
-    }
-
-    fn add_synthetic_read(&mut self, durability: Durability, revision: Revision) {
-        self.dependencies = None;
-        self.durability = self.durability.min(durability);
-        self.changed_at = self.changed_at.max(revision);
-    }
-
-    pub(crate) fn revisions(&self) -> QueryRevisions {
-        let (inputs, untracked) = match &self.dependencies {
-            None => (None, true),
-
-            Some(dependencies) => (
-                if dependencies.is_empty() {
-                    None
-                } else {
-                    Some(ThinArc::from_header_and_iter((), dependencies.iter().copied()))
-                },
-                false,
-            ),
-        };
-
-        QueryRevisions {
-            changed_at: self.changed_at,
-            inputs,
-            untracked,
-            durability: self.durability,
-        }
-    }
-
-    /// Adds any dependencies from `other` into `self`.
-    /// Used during cycle recovery, see [`Runtime::create_cycle_error`].
-    fn add_from(&mut self, other: &ActiveQuery) {
-        self.changed_at = self.changed_at.max(other.changed_at);
-        self.durability = self.durability.min(other.durability);
-        if let Some(other_dependencies) = &other.dependencies {
-            if let Some(my_dependencies) = &mut self.dependencies {
-                my_dependencies.extend(other_dependencies.iter().copied());
-            }
-        } else {
-            self.dependencies = None;
-        }
-    }
-
-    /// Removes the participants in `cycle` from my dependencies.
-    /// Used during cycle recovery, see [`Runtime::create_cycle_error`].
-    fn remove_cycle_participants(&mut self, cycle: &Cycle) {
-        if let Some(my_dependencies) = &mut self.dependencies {
-            for p in cycle.participant_keys() {
-                my_dependencies.swap_remove(&p);
-            }
-        }
-    }
-
-    /// Copy the changed-at, durability, and dependencies from `cycle_query`.
-    /// Used during cycle recovery, see [`Runtime::create_cycle_error`].
-    pub(crate) fn take_inputs_from(&mut self, cycle_query: &ActiveQuery) {
-        self.changed_at = cycle_query.changed_at;
-        self.durability = cycle_query.durability;
-        self.dependencies.clone_from(&cycle_query.dependencies);
-    }
-}
-
-/// A unique identifier for a particular runtime. Each time you create
-/// a snapshot, a fresh `RuntimeId` is generated. Once a snapshot is
-/// complete, its `RuntimeId` may potentially be re-used.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct RuntimeId {
-    counter: u32,
-}
-
-#[derive(Clone, Debug)]
-pub(crate) struct StampedValue<V> {
-    pub(crate) value: V,
-    pub(crate) durability: Durability,
-    pub(crate) changed_at: Revision,
-}
-
-struct RevisionGuard {
-    shared_state: Arc<SharedState>,
-}
-
-impl RevisionGuard {
-    fn new(shared_state: &Arc<SharedState>) -> Self {
-        // Subtle: we use a "recursive" lock here so that it is not an
-        // error to acquire a read-lock when one is already held (this
-        // happens when a query uses `snapshot` to spawn off parallel
-        // workers, for example).
-        //
-        // This has the side-effect that we are responsible to ensure
-        // that people contending for the write lock do not starve,
-        // but this is what we achieve via the cancellation mechanism.
-        //
-        // (In particular, since we only ever have one "mutating
-        // handle" to the database, the only contention for the global
-        // query lock occurs when there are "futures" evaluating
-        // queries in parallel, and those futures hold a read-lock
-        // already, so the starvation problem is more about them bring
-        // themselves to a close, versus preventing other people from
-        // *starting* work).
-        unsafe {
-            shared_state.query_lock.raw().lock_shared_recursive();
-        }
-
-        Self { shared_state: shared_state.clone() }
-    }
-}
-
-impl Drop for RevisionGuard {
-    fn drop(&mut self) {
-        // Release our read-lock without using RAII. As documented in
-        // `Snapshot::new` above, this requires the unsafe keyword.
-        unsafe {
-            self.shared_state.query_lock.raw().unlock_shared();
-        }
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/runtime/dependency_graph.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/runtime/dependency_graph.rs
deleted file mode 100644
index ed1d499f63735..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/runtime/dependency_graph.rs
+++ /dev/null
@@ -1,250 +0,0 @@
-use triomphe::Arc;
-
-use crate::{DatabaseKeyIndex, RuntimeId};
-use parking_lot::{Condvar, MutexGuard};
-use rustc_hash::FxHashMap;
-use smallvec::SmallVec;
-
-use super::{ActiveQuery, WaitResult};
-
-type QueryStack = Vec<ActiveQuery>;
-
-#[derive(Debug, Default)]
-pub(super) struct DependencyGraph {
-    /// A `(K -> V)` pair in this map indicates that the runtime
-    /// `K` is blocked on some query executing in the runtime `V`.
-    /// This encodes a graph that must be acyclic (or else deadlock
-    /// will result).
-    edges: FxHashMap<RuntimeId, Edge>,
-
-    /// Encodes the `RuntimeId` that are blocked waiting for the result
-    /// of a given query.
-    query_dependents: FxHashMap<DatabaseKeyIndex, SmallVec<[RuntimeId; 4]>>,
-
-    /// When a key K completes which had dependent queries Qs blocked on it,
-    /// it stores its `WaitResult` here. As they wake up, each query Q in Qs will
-    /// come here to fetch their results.
-    wait_results: FxHashMap<RuntimeId, (QueryStack, WaitResult)>,
-}
-
-#[derive(Debug)]
-struct Edge {
-    blocked_on_id: RuntimeId,
-    blocked_on_key: DatabaseKeyIndex,
-    stack: QueryStack,
-
-    /// Signalled whenever a query with dependents completes.
-    /// Allows those dependents to check if they are ready to unblock.
-    condvar: Arc<parking_lot::Condvar>,
-}
-
-impl DependencyGraph {
-    /// True if `from_id` depends on `to_id`.
-    ///
-    /// (i.e., there is a path from `from_id` to `to_id` in the graph.)
-    pub(super) fn depends_on(&mut self, from_id: RuntimeId, to_id: RuntimeId) -> bool {
-        let mut p = from_id;
-        while let Some(q) = self.edges.get(&p).map(|edge| edge.blocked_on_id) {
-            if q == to_id {
-                return true;
-            }
-
-            p = q;
-        }
-        p == to_id
-    }
-
-    /// Invokes `closure` with a `&mut ActiveQuery` for each query that participates in the cycle.
-    /// The cycle runs as follows:
-    ///
-    /// 1. The runtime `from_id`, which has the stack `from_stack`, would like to invoke `database_key`...
-    /// 2. ...but `database_key` is already being executed by `to_id`...
-    /// 3. ...and `to_id` is transitively dependent on something which is present on `from_stack`.
-    pub(super) fn for_each_cycle_participant(
-        &mut self,
-        from_id: RuntimeId,
-        from_stack: &mut QueryStack,
-        database_key: DatabaseKeyIndex,
-        to_id: RuntimeId,
-        mut closure: impl FnMut(&mut [ActiveQuery]),
-    ) {
-        debug_assert!(self.depends_on(to_id, from_id));
-
-        // To understand this algorithm, consider this [drawing](https://is.gd/TGLI9v):
-        //
-        //    database_key = QB2
-        //    from_id = A
-        //    to_id = B
-        //    from_stack = [QA1, QA2, QA3]
-        //
-        //    self.edges[B] = { C, QC2, [QB1..QB3] }
-        //    self.edges[C] = { A, QA2, [QC1..QC3] }
-        //
-        //         The cyclic
-        //         edge we have
-        //         failed to add.
-        //           :
-        //    A      :    B         C
-        //           :
-        //    QA1    v    QB1       QC1
-        // ┌► QA2    ┌──► QB2   ┌─► QC2
-        // │  QA3 ───┘    QB3 ──┘   QC3 ───┐
-        // │                               │
-        // └───────────────────────────────┘
-        //
-        // Final output: [QB2, QB3, QC2, QC3, QA2, QA3]
-
-        let mut id = to_id;
-        let mut key = database_key;
-        while id != from_id {
-            // Looking at the diagram above, the idea is to
-            // take the edge from `to_id` starting at `key`
-            // (inclusive) and down to the end. We can then
-            // load up the next thread (i.e., we start at B/QB2,
-            // and then load up the dependency on C/QC2).
-            let edge = self.edges.get_mut(&id).unwrap();
-            let prefix = edge.stack.iter_mut().take_while(|p| p.database_key_index != key).count();
-            closure(&mut edge.stack[prefix..]);
-            id = edge.blocked_on_id;
-            key = edge.blocked_on_key;
-        }
-
-        // Finally, we copy in the results from `from_stack`.
-        let prefix = from_stack.iter_mut().take_while(|p| p.database_key_index != key).count();
-        closure(&mut from_stack[prefix..]);
-    }
-
-    /// Unblock each blocked runtime (excluding the current one) if some
-    /// query executing in that runtime is participating in cycle fallback.
-    ///
-    /// Returns a boolean (Current, Others) where:
-    /// * Current is true if the current runtime has cycle participants
-    ///   with fallback;
-    /// * Others is true if other runtimes were unblocked.
-    pub(super) fn maybe_unblock_runtimes_in_cycle(
-        &mut self,
-        from_id: RuntimeId,
-        from_stack: &QueryStack,
-        database_key: DatabaseKeyIndex,
-        to_id: RuntimeId,
-    ) -> (bool, bool) {
-        // See diagram in `for_each_cycle_participant`.
-        let mut id = to_id;
-        let mut key = database_key;
-        let mut others_unblocked = false;
-        while id != from_id {
-            let edge = self.edges.get(&id).unwrap();
-            let prefix = edge.stack.iter().take_while(|p| p.database_key_index != key).count();
-            let next_id = edge.blocked_on_id;
-            let next_key = edge.blocked_on_key;
-
-            if let Some(cycle) = edge.stack[prefix..].iter().rev().find_map(|aq| aq.cycle.clone()) {
-                // Remove `id` from the list of runtimes blocked on `next_key`:
-                self.query_dependents.get_mut(&next_key).unwrap().retain(|r| *r != id);
-
-                // Unblock runtime so that it can resume execution once lock is released:
-                self.unblock_runtime(id, WaitResult::Cycle(cycle));
-
-                others_unblocked = true;
-            }
-
-            id = next_id;
-            key = next_key;
-        }
-
-        let prefix = from_stack.iter().take_while(|p| p.database_key_index != key).count();
-        let this_unblocked = from_stack[prefix..].iter().any(|aq| aq.cycle.is_some());
-
-        (this_unblocked, others_unblocked)
-    }
-
-    /// Modifies the graph so that `from_id` is blocked
-    /// on `database_key`, which is being computed by
-    /// `to_id`.
-    ///
-    /// For this to be reasonable, the lock on the
-    /// results table for `database_key` must be held.
-    /// This ensures that computing `database_key` doesn't
-    /// complete before `block_on` executes.
-    ///
-    /// Preconditions:
-    /// * No path from `to_id` to `from_id`
-    ///   (i.e., `me.depends_on(to_id, from_id)` is false)
-    /// * `held_mutex` is a read lock (or stronger) on `database_key`
-    pub(super) fn block_on<QueryMutexGuard>(
-        mut me: MutexGuard<'_, Self>,
-        from_id: RuntimeId,
-        database_key: DatabaseKeyIndex,
-        to_id: RuntimeId,
-        from_stack: QueryStack,
-        query_mutex_guard: QueryMutexGuard,
-    ) -> (QueryStack, WaitResult) {
-        let condvar = me.add_edge(from_id, database_key, to_id, from_stack);
-
-        // Release the mutex that prevents `database_key`
-        // from completing, now that the edge has been added.
-        drop(query_mutex_guard);
-
-        loop {
-            if let Some(stack_and_result) = me.wait_results.remove(&from_id) {
-                debug_assert!(!me.edges.contains_key(&from_id));
-                return stack_and_result;
-            }
-            condvar.wait(&mut me);
-        }
-    }
-
-    /// Helper for `block_on`: performs actual graph modification
-    /// to add a dependency edge from `from_id` to `to_id`, which is
-    /// computing `database_key`.
-    fn add_edge(
-        &mut self,
-        from_id: RuntimeId,
-        database_key: DatabaseKeyIndex,
-        to_id: RuntimeId,
-        from_stack: QueryStack,
-    ) -> Arc<parking_lot::Condvar> {
-        assert_ne!(from_id, to_id);
-        debug_assert!(!self.edges.contains_key(&from_id));
-        debug_assert!(!self.depends_on(to_id, from_id));
-
-        let condvar = Arc::new(Condvar::new());
-        self.edges.insert(
-            from_id,
-            Edge {
-                blocked_on_id: to_id,
-                blocked_on_key: database_key,
-                stack: from_stack,
-                condvar: condvar.clone(),
-            },
-        );
-        self.query_dependents.entry(database_key).or_default().push(from_id);
-        condvar
-    }
-
-    /// Invoked when runtime `to_id` completes executing
-    /// `database_key`.
-    pub(super) fn unblock_runtimes_blocked_on(
-        &mut self,
-        database_key: DatabaseKeyIndex,
-        wait_result: WaitResult,
-    ) {
-        let dependents = self.query_dependents.remove(&database_key).unwrap_or_default();
-
-        for from_id in dependents {
-            self.unblock_runtime(from_id, wait_result.clone());
-        }
-    }
-
-    /// Unblock the runtime with the given id with the given wait-result.
-    /// This will cause it resume execution (though it will have to grab
-    /// the lock on this data structure first, to recover the wait result).
-    fn unblock_runtime(&mut self, id: RuntimeId, wait_result: WaitResult) {
-        let edge = self.edges.remove(&id).expect("not blocked");
-        self.wait_results.insert(id, (edge.stack, wait_result));
-
-        // Now that we have inserted the `wait_results`,
-        // notify the thread.
-        edge.condvar.notify_one();
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/runtime/local_state.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/runtime/local_state.rs
deleted file mode 100644
index 4ab4bad0cc508..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/runtime/local_state.rs
+++ /dev/null
@@ -1,204 +0,0 @@
-use tracing::trace;
-use triomphe::ThinArc;
-
-use crate::durability::Durability;
-use crate::runtime::ActiveQuery;
-use crate::runtime::Revision;
-use crate::Cycle;
-use crate::DatabaseKeyIndex;
-use std::cell::RefCell;
-
-/// State that is specific to a single execution thread.
-///
-/// Internally, this type uses ref-cells.
-///
-/// **Note also that all mutations to the database handle (and hence
-/// to the local-state) must be undone during unwinding.**
-pub(super) struct LocalState {
-    /// Vector of active queries.
-    ///
-    /// This is normally `Some`, but it is set to `None`
-    /// while the query is blocked waiting for a result.
-    ///
-    /// Unwinding note: pushes onto this vector must be popped -- even
-    /// during unwinding.
-    query_stack: RefCell<Option<Vec<ActiveQuery>>>,
-}
-
-/// Summarizes "all the inputs that a query used"
-#[derive(Debug, Clone)]
-pub(crate) struct QueryRevisions {
-    /// The most revision in which some input changed.
-    pub(crate) changed_at: Revision,
-
-    /// Minimum durability of the inputs to this query.
-    pub(crate) durability: Durability,
-
-    /// Whether the input is untracked.
-    /// Invariant: if `untracked`, `inputs` is `None`.
-    /// Why is this encoded like this and not a proper enum? Struct size, this saves us 8 bytes.
-    pub(crate) untracked: bool,
-
-    /// The inputs that went into our query, if we are tracking them.
-    pub(crate) inputs: Option<ThinArc<(), DatabaseKeyIndex>>,
-}
-
-impl Default for LocalState {
-    fn default() -> Self {
-        LocalState { query_stack: RefCell::new(Some(Vec::new())) }
-    }
-}
-
-impl LocalState {
-    #[inline]
-    pub(super) fn push_query(&self, database_key_index: DatabaseKeyIndex) -> ActiveQueryGuard<'_> {
-        let mut query_stack = self.query_stack.borrow_mut();
-        let query_stack = query_stack.as_mut().expect("local stack taken");
-        query_stack.push(ActiveQuery::new(database_key_index));
-        ActiveQueryGuard { local_state: self, database_key_index, push_len: query_stack.len() }
-    }
-
-    fn with_query_stack<R>(&self, c: impl FnOnce(&mut Vec<ActiveQuery>) -> R) -> R {
-        c(self.query_stack.borrow_mut().as_mut().expect("query stack taken"))
-    }
-
-    pub(super) fn query_in_progress(&self) -> bool {
-        self.with_query_stack(|stack| !stack.is_empty())
-    }
-
-    pub(super) fn active_query(&self) -> Option<DatabaseKeyIndex> {
-        self.with_query_stack(|stack| {
-            stack.last().map(|active_query| active_query.database_key_index)
-        })
-    }
-
-    pub(super) fn report_query_read_and_unwind_if_cycle_resulted(
-        &self,
-        input: DatabaseKeyIndex,
-        durability: Durability,
-        changed_at: Revision,
-    ) {
-        trace!(
-            "report_query_read_and_unwind_if_cycle_resulted(input={:?}, durability={:?}, changed_at={:?})",
-            input, durability, changed_at
-        );
-        self.with_query_stack(|stack| {
-            if let Some(top_query) = stack.last_mut() {
-                top_query.add_read(input, durability, changed_at);
-
-                // We are a cycle participant:
-                //
-                //     C0 --> ... --> Ci --> Ci+1 -> ... -> Cn --> C0
-                //                        ^   ^
-                //                        :   |
-                //         This edge -----+   |
-                //                            |
-                //                            |
-                //                            N0
-                //
-                // In this case, the value we have just read from `Ci+1`
-                // is actually the cycle fallback value and not especially
-                // interesting. We unwind now with `CycleParticipant` to avoid
-                // executing the rest of our query function. This unwinding
-                // will be caught and our own fallback value will be used.
-                //
-                // Note that `Ci+1` may` have *other* callers who are not
-                // participants in the cycle (e.g., N0 in the graph above).
-                // They will not have the `cycle` marker set in their
-                // stack frames, so they will just read the fallback value
-                // from `Ci+1` and continue on their merry way.
-                if let Some(cycle) = &top_query.cycle {
-                    cycle.clone().throw()
-                }
-            }
-        })
-    }
-
-    pub(super) fn report_untracked_read(&self, current_revision: Revision) {
-        self.with_query_stack(|stack| {
-            if let Some(top_query) = stack.last_mut() {
-                top_query.add_untracked_read(current_revision);
-            }
-        })
-    }
-
-    /// Update the top query on the stack to act as though it read a value
-    /// of durability `durability` which changed in `revision`.
-    pub(super) fn report_synthetic_read(&self, durability: Durability, revision: Revision) {
-        self.with_query_stack(|stack| {
-            if let Some(top_query) = stack.last_mut() {
-                top_query.add_synthetic_read(durability, revision);
-            }
-        })
-    }
-
-    /// Takes the query stack and returns it. This is used when
-    /// the current thread is blocking. The stack must be restored
-    /// with [`Self::restore_query_stack`] when the thread unblocks.
-    pub(super) fn take_query_stack(&self) -> Vec<ActiveQuery> {
-        self.query_stack.take().expect("query stack already taken")
-    }
-
-    /// Restores a query stack taken with [`Self::take_query_stack`] once
-    /// the thread unblocks.
-    pub(super) fn restore_query_stack(&self, stack: Vec<ActiveQuery>) {
-        assert!(self.query_stack.borrow().is_none(), "query stack not taken");
-        self.query_stack.replace(Some(stack));
-    }
-}
-
-impl std::panic::RefUnwindSafe for LocalState {}
-
-/// When a query is pushed onto the `active_query` stack, this guard
-/// is returned to represent its slot. The guard can be used to pop
-/// the query from the stack -- in the case of unwinding, the guard's
-/// destructor will also remove the query.
-pub(crate) struct ActiveQueryGuard<'me> {
-    local_state: &'me LocalState,
-    push_len: usize,
-    database_key_index: DatabaseKeyIndex,
-}
-
-impl ActiveQueryGuard<'_> {
-    fn pop_helper(&self) -> ActiveQuery {
-        self.local_state.with_query_stack(|stack| {
-            // Sanity check: pushes and pops should be balanced.
-            assert_eq!(stack.len(), self.push_len);
-            debug_assert_eq!(stack.last().unwrap().database_key_index, self.database_key_index);
-            stack.pop().unwrap()
-        })
-    }
-
-    /// Invoked when the query has successfully completed execution.
-    pub(super) fn complete(self) -> ActiveQuery {
-        let query = self.pop_helper();
-        std::mem::forget(self);
-        query
-    }
-
-    /// Pops an active query from the stack. Returns the [`QueryRevisions`]
-    /// which summarizes the other queries that were accessed during this
-    /// query's execution.
-    #[inline]
-    pub(crate) fn pop(self) -> QueryRevisions {
-        // Extract accumulated inputs.
-        let popped_query = self.complete();
-
-        // If this frame were a cycle participant, it would have unwound.
-        assert!(popped_query.cycle.is_none());
-
-        popped_query.revisions()
-    }
-
-    /// If the active query is registered as a cycle participant, remove and
-    /// return that cycle.
-    pub(crate) fn take_cycle(&self) -> Option<Cycle> {
-        self.local_state.with_query_stack(|stack| stack.last_mut()?.cycle.take())
-    }
-}
-
-impl Drop for ActiveQueryGuard<'_> {
-    fn drop(&mut self) {
-        self.pop_helper();
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/storage.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/storage.rs
deleted file mode 100644
index e0acf44041b49..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/src/storage.rs
+++ /dev/null
@@ -1,53 +0,0 @@
-use crate::{plumbing::DatabaseStorageTypes, Runtime};
-use triomphe::Arc;
-
-/// Stores the cached results and dependency information for all the queries
-/// defined on your salsa database. Also embeds a [`Runtime`] which is used to
-/// manage query execution. Every database must include a `storage:
-/// Storage<Self>` field.
-pub struct Storage<DB: DatabaseStorageTypes> {
-    query_store: Arc<DB::DatabaseStorage>,
-    runtime: Runtime,
-}
-
-impl<DB: DatabaseStorageTypes> Default for Storage<DB> {
-    fn default() -> Self {
-        Self { query_store: Default::default(), runtime: Default::default() }
-    }
-}
-
-impl<DB: DatabaseStorageTypes> Storage<DB> {
-    /// Gives access to the underlying salsa runtime.
-    pub fn salsa_runtime(&self) -> &Runtime {
-        &self.runtime
-    }
-
-    /// Gives access to the underlying salsa runtime.
-    pub fn salsa_runtime_mut(&mut self) -> &mut Runtime {
-        &mut self.runtime
-    }
-
-    /// Access the query storage tables. Not meant to be used directly by end
-    /// users.
-    pub fn query_store(&self) -> &DB::DatabaseStorage {
-        &self.query_store
-    }
-
-    /// Access the query storage tables. Not meant to be used directly by end
-    /// users.
-    pub fn query_store_mut(&mut self) -> (&DB::DatabaseStorage, &mut Runtime) {
-        (&self.query_store, &mut self.runtime)
-    }
-
-    /// Returns a "snapshotted" storage, suitable for use in a forked database.
-    /// This snapshot hold a read-lock on the global state, which means that any
-    /// attempt to `set` an input will block until the forked runtime is
-    /// dropped. See `ParallelDatabase::snapshot` for more information.
-    ///
-    /// **Warning.** This second handle is intended to be used from a separate
-    /// thread. Using two database handles from the **same thread** can lead to
-    /// deadlock.
-    pub fn snapshot(&self) -> Self {
-        Storage { query_store: self.query_store.clone(), runtime: self.runtime.snapshot() }
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/cycles.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/cycles.rs
deleted file mode 100644
index 3c3931e658538..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/cycles.rs
+++ /dev/null
@@ -1,492 +0,0 @@
-use std::panic::UnwindSafe;
-
-use expect_test::expect;
-use ra_salsa::{Durability, ParallelDatabase, Snapshot};
-
-// Axes:
-//
-// Threading
-// * Intra-thread
-// * Cross-thread -- part of cycle is on one thread, part on another
-//
-// Recovery strategies:
-// * Panic
-// * Fallback
-// * Mixed -- multiple strategies within cycle participants
-//
-// Across revisions:
-// * N/A -- only one revision
-// * Present in new revision, not old
-// * Present in old revision, not new
-// * Present in both revisions
-//
-// Dependencies
-// * Tracked
-// * Untracked -- cycle participant(s) contain untracked reads
-//
-// Layers
-// * Direct -- cycle participant is directly invoked from test
-// * Indirect -- invoked a query that invokes the cycle
-//
-//
-// | Thread | Recovery | Old, New | Dep style | Layers   | Test Name      |
-// | ------ | -------- | -------- | --------- | ------   | ---------      |
-// | Intra  | Panic    | N/A      | Tracked   | direct   | cycle_memoized |
-// | Intra  | Panic    | N/A      | Untracked | direct   | cycle_volatile |
-// | Intra  | Fallback | N/A      | Tracked   | direct   | cycle_cycle  |
-// | Intra  | Fallback | N/A      | Tracked   | indirect | inner_cycle |
-// | Intra  | Fallback | Both     | Tracked   | direct   | cycle_revalidate |
-// | Intra  | Fallback | New      | Tracked   | direct   | cycle_appears |
-// | Intra  | Fallback | Old      | Tracked   | direct   | cycle_disappears |
-// | Intra  | Fallback | Old      | Tracked   | direct   | cycle_disappears_durability |
-// | Intra  | Mixed    | N/A      | Tracked   | direct   | cycle_mixed_1 |
-// | Intra  | Mixed    | N/A      | Tracked   | direct   | cycle_mixed_2 |
-// | Cross  | Fallback | N/A      | Tracked   | both     | parallel/cycles.rs: recover_parallel_cycle |
-// | Cross  | Panic    | N/A      | Tracked   | both     | parallel/cycles.rs: panic_parallel_cycle |
-
-#[derive(PartialEq, Eq, Hash, Clone, Debug)]
-struct Error {
-    cycle: Vec<String>,
-}
-
-#[ra_salsa::database(GroupStruct)]
-#[derive(Default)]
-struct DatabaseImpl {
-    storage: ra_salsa::Storage<Self>,
-}
-
-impl ra_salsa::Database for DatabaseImpl {}
-
-impl ParallelDatabase for DatabaseImpl {
-    fn snapshot(&self) -> Snapshot<Self> {
-        Snapshot::new(DatabaseImpl { storage: self.storage.snapshot() })
-    }
-}
-
-/// The queries A, B, and C in `Database` can be configured
-/// to invoke one another in arbitrary ways using this
-/// enum.
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
-enum CycleQuery {
-    None,
-    A,
-    B,
-    C,
-    AthenC,
-}
-
-#[ra_salsa::query_group(GroupStruct)]
-trait Database: ra_salsa::Database {
-    // `a` and `b` depend on each other and form a cycle
-    fn memoized_a(&self) -> ();
-    fn memoized_b(&self) -> ();
-    fn volatile_a(&self) -> ();
-    fn volatile_b(&self) -> ();
-
-    #[ra_salsa::input]
-    fn a_invokes(&self) -> CycleQuery;
-
-    #[ra_salsa::input]
-    fn b_invokes(&self) -> CycleQuery;
-
-    #[ra_salsa::input]
-    fn c_invokes(&self) -> CycleQuery;
-
-    #[ra_salsa::cycle(recover_a)]
-    fn cycle_a(&self) -> Result<(), Error>;
-
-    #[ra_salsa::cycle(recover_b)]
-    fn cycle_b(&self) -> Result<(), Error>;
-
-    fn cycle_c(&self) -> Result<(), Error>;
-}
-
-fn recover_a(db: &dyn Database, cycle: &ra_salsa::Cycle) -> Result<(), Error> {
-    Err(Error { cycle: cycle.all_participants(db) })
-}
-
-fn recover_b(db: &dyn Database, cycle: &ra_salsa::Cycle) -> Result<(), Error> {
-    Err(Error { cycle: cycle.all_participants(db) })
-}
-
-fn memoized_a(db: &dyn Database) {
-    db.memoized_b()
-}
-
-fn memoized_b(db: &dyn Database) {
-    db.memoized_a()
-}
-
-fn volatile_a(db: &dyn Database) {
-    db.salsa_runtime().report_untracked_read();
-    db.volatile_b()
-}
-
-fn volatile_b(db: &dyn Database) {
-    db.salsa_runtime().report_untracked_read();
-    db.volatile_a()
-}
-
-impl CycleQuery {
-    fn invoke(self, db: &dyn Database) -> Result<(), Error> {
-        match self {
-            CycleQuery::A => db.cycle_a(),
-            CycleQuery::B => db.cycle_b(),
-            CycleQuery::C => db.cycle_c(),
-            CycleQuery::AthenC => {
-                let _ = db.cycle_a();
-                db.cycle_c()
-            }
-            CycleQuery::None => Ok(()),
-        }
-    }
-}
-
-fn cycle_a(db: &dyn Database) -> Result<(), Error> {
-    db.a_invokes().invoke(db)
-}
-
-fn cycle_b(db: &dyn Database) -> Result<(), Error> {
-    db.b_invokes().invoke(db)
-}
-
-fn cycle_c(db: &dyn Database) -> Result<(), Error> {
-    db.c_invokes().invoke(db)
-}
-
-#[track_caller]
-fn extract_cycle(f: impl FnOnce() + UnwindSafe) -> ra_salsa::Cycle {
-    let v = std::panic::catch_unwind(f);
-    if let Err(d) = &v {
-        if let Some(cycle) = d.downcast_ref::<ra_salsa::Cycle>() {
-            return cycle.clone();
-        }
-    }
-    panic!("unexpected value: {v:?}")
-}
-
-#[test]
-fn cycle_memoized() {
-    let db = DatabaseImpl::default();
-    let cycle = extract_cycle(|| db.memoized_a());
-    expect![[r#"
-        [
-            "cycles::MemoizedAQuery::memoized_a(())",
-            "cycles::MemoizedBQuery::memoized_b(())",
-        ]
-    "#]]
-    .assert_debug_eq(&cycle.unexpected_participants(&db));
-}
-
-#[test]
-fn cycle_volatile() {
-    let db = DatabaseImpl::default();
-    let cycle = extract_cycle(|| db.volatile_a());
-    expect![[r#"
-        [
-            "cycles::VolatileAQuery::volatile_a(())",
-            "cycles::VolatileBQuery::volatile_b(())",
-        ]
-    "#]]
-    .assert_debug_eq(&cycle.unexpected_participants(&db));
-}
-
-#[test]
-fn cycle_cycle() {
-    let mut query = DatabaseImpl::default();
-
-    //     A --> B
-    //     ^     |
-    //     +-----+
-
-    query.set_a_invokes(CycleQuery::B);
-    query.set_b_invokes(CycleQuery::A);
-
-    assert!(query.cycle_a().is_err());
-}
-
-#[test]
-fn inner_cycle() {
-    let mut query = DatabaseImpl::default();
-
-    //     A --> B <-- C
-    //     ^     |
-    //     +-----+
-
-    query.set_a_invokes(CycleQuery::B);
-    query.set_b_invokes(CycleQuery::A);
-    query.set_c_invokes(CycleQuery::B);
-
-    let err = query.cycle_c();
-    assert!(err.is_err());
-    let cycle = err.unwrap_err().cycle;
-    expect![[r#"
-        [
-            "cycles::CycleAQuery::cycle_a(())",
-            "cycles::CycleBQuery::cycle_b(())",
-        ]
-    "#]]
-    .assert_debug_eq(&cycle);
-}
-
-#[test]
-fn cycle_revalidate() {
-    let mut db = DatabaseImpl::default();
-
-    //     A --> B
-    //     ^     |
-    //     +-----+
-    db.set_a_invokes(CycleQuery::B);
-    db.set_b_invokes(CycleQuery::A);
-
-    assert!(db.cycle_a().is_err());
-    db.set_b_invokes(CycleQuery::A); // same value as default
-    assert!(db.cycle_a().is_err());
-}
-
-#[test]
-fn cycle_revalidate_unchanged_twice() {
-    let mut db = DatabaseImpl::default();
-
-    //     A --> B
-    //     ^     |
-    //     +-----+
-    db.set_a_invokes(CycleQuery::B);
-    db.set_b_invokes(CycleQuery::A);
-
-    assert!(db.cycle_a().is_err());
-    db.set_c_invokes(CycleQuery::A); // force new revision
-
-    // on this run
-    expect![[r#"
-        Err(
-            Error {
-                cycle: [
-                    "cycles::CycleAQuery::cycle_a(())",
-                    "cycles::CycleBQuery::cycle_b(())",
-                ],
-            },
-        )
-    "#]]
-    .assert_debug_eq(&db.cycle_a());
-}
-
-#[test]
-fn cycle_appears() {
-    let mut db = DatabaseImpl::default();
-
-    //     A --> B
-    db.set_a_invokes(CycleQuery::B);
-    db.set_b_invokes(CycleQuery::None);
-    assert!(db.cycle_a().is_ok());
-
-    //     A --> B
-    //     ^     |
-    //     +-----+
-    db.set_b_invokes(CycleQuery::A);
-    tracing::debug!("Set Cycle Leaf");
-    assert!(db.cycle_a().is_err());
-}
-
-#[test]
-fn cycle_disappears() {
-    let mut db = DatabaseImpl::default();
-
-    //     A --> B
-    //     ^     |
-    //     +-----+
-    db.set_a_invokes(CycleQuery::B);
-    db.set_b_invokes(CycleQuery::A);
-    assert!(db.cycle_a().is_err());
-
-    //     A --> B
-    db.set_b_invokes(CycleQuery::None);
-    assert!(db.cycle_a().is_ok());
-}
-
-/// A variant on `cycle_disappears` in which the values of
-/// `a_invokes` and `b_invokes` are set with durability values.
-/// If we are not careful, this could cause us to overlook
-/// the fact that the cycle will no longer occur.
-#[test]
-fn cycle_disappears_durability() {
-    let mut db = DatabaseImpl::default();
-    db.set_a_invokes_with_durability(CycleQuery::B, Durability::LOW);
-    db.set_b_invokes_with_durability(CycleQuery::A, Durability::HIGH);
-
-    let res = db.cycle_a();
-    assert!(res.is_err());
-
-    // At this point, `a` read `LOW` input, and `b` read `HIGH` input. However,
-    // because `b` participates in the same cycle as `a`, its final durability
-    // should be `LOW`.
-    //
-    // Check that setting a `LOW` input causes us to re-execute `b` query, and
-    // observe that the cycle goes away.
-    db.set_a_invokes_with_durability(CycleQuery::None, Durability::LOW);
-
-    let res = db.cycle_b();
-    assert!(res.is_ok());
-}
-
-#[test]
-fn cycle_mixed_1() {
-    let mut db = DatabaseImpl::default();
-    //     A --> B <-- C
-    //           |     ^
-    //           +-----+
-    db.set_a_invokes(CycleQuery::B);
-    db.set_b_invokes(CycleQuery::C);
-    db.set_c_invokes(CycleQuery::B);
-
-    let u = db.cycle_c();
-    expect![[r#"
-        Err(
-            Error {
-                cycle: [
-                    "cycles::CycleBQuery::cycle_b(())",
-                    "cycles::CycleCQuery::cycle_c(())",
-                ],
-            },
-        )
-    "#]]
-    .assert_debug_eq(&u);
-}
-
-#[test]
-fn cycle_mixed_2() {
-    let mut db = DatabaseImpl::default();
-
-    // Configuration:
-    //
-    //     A --> B --> C
-    //     ^           |
-    //     +-----------+
-    db.set_a_invokes(CycleQuery::B);
-    db.set_b_invokes(CycleQuery::C);
-    db.set_c_invokes(CycleQuery::A);
-
-    let u = db.cycle_a();
-    expect![[r#"
-        Err(
-            Error {
-                cycle: [
-                    "cycles::CycleAQuery::cycle_a(())",
-                    "cycles::CycleBQuery::cycle_b(())",
-                    "cycles::CycleCQuery::cycle_c(())",
-                ],
-            },
-        )
-    "#]]
-    .assert_debug_eq(&u);
-}
-
-#[test]
-fn cycle_deterministic_order() {
-    // No matter whether we start from A or B, we get the same set of participants:
-    let db = || {
-        let mut db = DatabaseImpl::default();
-        //     A --> B
-        //     ^     |
-        //     +-----+
-        db.set_a_invokes(CycleQuery::B);
-        db.set_b_invokes(CycleQuery::A);
-        db
-    };
-    let a = db().cycle_a();
-    let b = db().cycle_b();
-    expect![[r#"
-        (
-            Err(
-                Error {
-                    cycle: [
-                        "cycles::CycleAQuery::cycle_a(())",
-                        "cycles::CycleBQuery::cycle_b(())",
-                    ],
-                },
-            ),
-            Err(
-                Error {
-                    cycle: [
-                        "cycles::CycleAQuery::cycle_a(())",
-                        "cycles::CycleBQuery::cycle_b(())",
-                    ],
-                },
-            ),
-        )
-    "#]]
-    .assert_debug_eq(&(a, b));
-}
-
-#[test]
-fn cycle_multiple() {
-    // No matter whether we start from A or B, we get the same set of participants:
-    let mut db = DatabaseImpl::default();
-
-    // Configuration:
-    //
-    //     A --> B <-- C
-    //     ^     |     ^
-    //     +-----+     |
-    //           |     |
-    //           +-----+
-    //
-    // Here, conceptually, B encounters a cycle with A and then
-    // recovers.
-    db.set_a_invokes(CycleQuery::B);
-    db.set_b_invokes(CycleQuery::AthenC);
-    db.set_c_invokes(CycleQuery::B);
-
-    let c = db.cycle_c();
-    let b = db.cycle_b();
-    let a = db.cycle_a();
-    expect![[r#"
-        (
-            Err(
-                Error {
-                    cycle: [
-                        "cycles::CycleAQuery::cycle_a(())",
-                        "cycles::CycleBQuery::cycle_b(())",
-                    ],
-                },
-            ),
-            Err(
-                Error {
-                    cycle: [
-                        "cycles::CycleAQuery::cycle_a(())",
-                        "cycles::CycleBQuery::cycle_b(())",
-                    ],
-                },
-            ),
-            Err(
-                Error {
-                    cycle: [
-                        "cycles::CycleAQuery::cycle_a(())",
-                        "cycles::CycleBQuery::cycle_b(())",
-                    ],
-                },
-            ),
-        )
-    "#]]
-    .assert_debug_eq(&(a, b, c));
-}
-
-#[test]
-fn cycle_recovery_set_but_not_participating() {
-    let mut db = DatabaseImpl::default();
-
-    //     A --> C -+
-    //           ^  |
-    //           +--+
-    db.set_a_invokes(CycleQuery::C);
-    db.set_c_invokes(CycleQuery::C);
-
-    // Here we expect C to panic and A not to recover:
-    let r = extract_cycle(|| drop(db.cycle_a()));
-    expect![[r#"
-        [
-            "cycles::CycleCQuery::cycle_c(())",
-        ]
-    "#]]
-    .assert_debug_eq(&r.all_participants(&db));
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/dyn_trait.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/dyn_trait.rs
deleted file mode 100644
index 6075ae5c11e32..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/dyn_trait.rs
+++ /dev/null
@@ -1,28 +0,0 @@
-//! Test that you can implement a query using a `dyn Trait` setup.
-
-#[ra_salsa::database(DynTraitStorage)]
-#[derive(Default)]
-struct DynTraitDatabase {
-    storage: ra_salsa::Storage<Self>,
-}
-
-impl ra_salsa::Database for DynTraitDatabase {}
-
-#[ra_salsa::query_group(DynTraitStorage)]
-trait DynTrait {
-    #[ra_salsa::input]
-    fn input(&self, x: u32) -> u32;
-
-    fn output(&self, x: u32) -> u32;
-}
-
-fn output(db: &dyn DynTrait, x: u32) -> u32 {
-    db.input(x) * 2
-}
-
-#[test]
-fn dyn_trait() {
-    let mut query = DynTraitDatabase::default();
-    query.set_input(22, 23);
-    assert_eq!(query.output(22), 46);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/constants.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/constants.rs
deleted file mode 100644
index 6e51545b60a15..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/constants.rs
+++ /dev/null
@@ -1,145 +0,0 @@
-use crate::implementation::{TestContext, TestContextImpl};
-use ra_salsa::debug::DebugQueryTable;
-use ra_salsa::Durability;
-
-#[ra_salsa::query_group(Constants)]
-pub(crate) trait ConstantsDatabase: TestContext {
-    #[ra_salsa::input]
-    fn input(&self, key: char) -> usize;
-
-    fn add(&self, key1: char, key2: char) -> usize;
-
-    fn add3(&self, key1: char, key2: char, key3: char) -> usize;
-}
-
-fn add(db: &dyn ConstantsDatabase, key1: char, key2: char) -> usize {
-    db.log().add(format!("add({key1}, {key2})"));
-    db.input(key1) + db.input(key2)
-}
-
-fn add3(db: &dyn ConstantsDatabase, key1: char, key2: char, key3: char) -> usize {
-    db.log().add(format!("add3({key1}, {key2}, {key3})"));
-    db.add(key1, key2) + db.input(key3)
-}
-
-// Test we can assign a constant and things will be correctly
-// recomputed afterwards.
-#[test]
-fn invalidate_constant() {
-    let db = &mut TestContextImpl::default();
-    db.set_input_with_durability('a', 44, Durability::HIGH);
-    db.set_input_with_durability('b', 22, Durability::HIGH);
-    assert_eq!(db.add('a', 'b'), 66);
-
-    db.set_input_with_durability('a', 66, Durability::HIGH);
-    assert_eq!(db.add('a', 'b'), 88);
-}
-
-#[test]
-fn invalidate_constant_1() {
-    let db = &mut TestContextImpl::default();
-
-    // Not constant:
-    db.set_input('a', 44);
-    assert_eq!(db.add('a', 'a'), 88);
-
-    // Becomes constant:
-    db.set_input_with_durability('a', 44, Durability::HIGH);
-    assert_eq!(db.add('a', 'a'), 88);
-
-    // Invalidates:
-    db.set_input_with_durability('a', 33, Durability::HIGH);
-    assert_eq!(db.add('a', 'a'), 66);
-}
-
-// Test cases where we assign same value to 'a' after declaring it a
-// constant.
-#[test]
-fn set_after_constant_same_value() {
-    let db = &mut TestContextImpl::default();
-    db.set_input_with_durability('a', 44, Durability::HIGH);
-    db.set_input_with_durability('a', 44, Durability::HIGH);
-    db.set_input('a', 44);
-}
-
-#[test]
-fn not_constant() {
-    let mut db = TestContextImpl::default();
-
-    db.set_input('a', 22);
-    db.set_input('b', 44);
-    assert_eq!(db.add('a', 'b'), 66);
-    assert_eq!(Durability::LOW, AddQuery.in_db(&db).durability(('a', 'b')));
-}
-
-#[test]
-fn durability() {
-    let mut db = TestContextImpl::default();
-
-    db.set_input_with_durability('a', 22, Durability::HIGH);
-    db.set_input_with_durability('b', 44, Durability::HIGH);
-    assert_eq!(db.add('a', 'b'), 66);
-    assert_eq!(Durability::HIGH, AddQuery.in_db(&db).durability(('a', 'b')));
-}
-
-#[test]
-fn mixed_constant() {
-    let mut db = TestContextImpl::default();
-
-    db.set_input_with_durability('a', 22, Durability::HIGH);
-    db.set_input('b', 44);
-    assert_eq!(db.add('a', 'b'), 66);
-    assert_eq!(Durability::LOW, AddQuery.in_db(&db).durability(('a', 'b')));
-}
-
-#[test]
-fn becomes_constant_with_change() {
-    let mut db = TestContextImpl::default();
-
-    db.set_input('a', 22);
-    db.set_input('b', 44);
-    assert_eq!(db.add('a', 'b'), 66);
-    assert_eq!(Durability::LOW, AddQuery.in_db(&db).durability(('a', 'b')));
-
-    db.set_input_with_durability('a', 23, Durability::HIGH);
-    assert_eq!(db.add('a', 'b'), 67);
-    assert_eq!(Durability::LOW, AddQuery.in_db(&db).durability(('a', 'b')));
-
-    db.set_input_with_durability('b', 45, Durability::HIGH);
-    assert_eq!(db.add('a', 'b'), 68);
-    assert_eq!(Durability::HIGH, AddQuery.in_db(&db).durability(('a', 'b')));
-
-    db.set_input_with_durability('b', 45, Durability::MEDIUM);
-    assert_eq!(db.add('a', 'b'), 68);
-    assert_eq!(Durability::MEDIUM, AddQuery.in_db(&db).durability(('a', 'b')));
-}
-
-// Test a subtle case in which an input changes from constant to
-// non-constant, but its value doesn't change. If we're not careful,
-// this can cause us to incorrectly consider derived values as still
-// being constant.
-#[test]
-fn constant_to_non_constant() {
-    let mut db = TestContextImpl::default();
-
-    db.set_input_with_durability('a', 11, Durability::HIGH);
-    db.set_input_with_durability('b', 22, Durability::HIGH);
-    db.set_input_with_durability('c', 33, Durability::HIGH);
-
-    // Here, `add3` invokes `add`, which yields 33. Both calls are
-    // constant.
-    assert_eq!(db.add3('a', 'b', 'c'), 66);
-
-    db.set_input('a', 11);
-
-    // Here, `add3` invokes `add`, which *still* yields 33, but which
-    // is no longer constant. Since value didn't change, we might
-    // preserve `add3` unchanged, not noticing that it is no longer
-    // constant.
-    assert_eq!(db.add3('a', 'b', 'c'), 66);
-
-    // In that case, we would not get the correct result here, when
-    // 'a' changes *again*.
-    db.set_input('a', 22);
-    assert_eq!(db.add3('a', 'b', 'c'), 77);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/counter.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/counter.rs
deleted file mode 100644
index c04857e24c9e8..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/counter.rs
+++ /dev/null
@@ -1,14 +0,0 @@
-use std::cell::Cell;
-
-#[derive(Default)]
-pub(crate) struct Counter {
-    value: Cell<usize>,
-}
-
-impl Counter {
-    pub(crate) fn increment(&self) -> usize {
-        let v = self.value.get();
-        self.value.set(v + 1);
-        v
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/implementation.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/implementation.rs
deleted file mode 100644
index e9a59c4630451..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/implementation.rs
+++ /dev/null
@@ -1,59 +0,0 @@
-use crate::constants;
-use crate::counter::Counter;
-use crate::log::Log;
-use crate::memoized_dep_inputs;
-use crate::memoized_inputs;
-use crate::memoized_volatile;
-
-pub(crate) trait TestContext: ra_salsa::Database {
-    fn clock(&self) -> &Counter;
-    fn log(&self) -> &Log;
-}
-
-#[ra_salsa::database(
-    constants::Constants,
-    memoized_dep_inputs::MemoizedDepInputs,
-    memoized_inputs::MemoizedInputs,
-    memoized_volatile::MemoizedVolatile
-)]
-#[derive(Default)]
-pub(crate) struct TestContextImpl {
-    storage: ra_salsa::Storage<TestContextImpl>,
-    clock: Counter,
-    log: Log,
-}
-
-impl TestContextImpl {
-    #[track_caller]
-    pub(crate) fn assert_log(&self, expected_log: &[&str]) {
-        let expected_text = &format!("{expected_log:#?}");
-        let actual_text = &format!("{:#?}", self.log().take());
-
-        if expected_text == actual_text {
-            return;
-        }
-
-        #[allow(clippy::print_stdout)]
-        for diff in dissimilar::diff(expected_text, actual_text) {
-            match diff {
-                dissimilar::Chunk::Delete(l) => println!("-{l}"),
-                dissimilar::Chunk::Equal(l) => println!(" {l}"),
-                dissimilar::Chunk::Insert(r) => println!("+{r}"),
-            }
-        }
-
-        panic!("incorrect log results");
-    }
-}
-
-impl TestContext for TestContextImpl {
-    fn clock(&self) -> &Counter {
-        &self.clock
-    }
-
-    fn log(&self) -> &Log {
-        &self.log
-    }
-}
-
-impl ra_salsa::Database for TestContextImpl {}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/log.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/log.rs
deleted file mode 100644
index 1ee57fe667d5f..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/log.rs
+++ /dev/null
@@ -1,16 +0,0 @@
-use std::cell::RefCell;
-
-#[derive(Default)]
-pub(crate) struct Log {
-    data: RefCell<Vec<String>>,
-}
-
-impl Log {
-    pub(crate) fn add(&self, text: impl Into<String>) {
-        self.data.borrow_mut().push(text.into());
-    }
-
-    pub(crate) fn take(&self) -> Vec<String> {
-        self.data.take()
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/main.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/main.rs
deleted file mode 100644
index bcd13c75f7157..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/main.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-mod constants;
-mod counter;
-mod implementation;
-mod log;
-mod memoized_dep_inputs;
-mod memoized_inputs;
-mod memoized_volatile;
-
-fn main() {}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/memoized_dep_inputs.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/memoized_dep_inputs.rs
deleted file mode 100644
index 0043bb45745d8..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/memoized_dep_inputs.rs
+++ /dev/null
@@ -1,60 +0,0 @@
-use crate::implementation::{TestContext, TestContextImpl};
-
-#[ra_salsa::query_group(MemoizedDepInputs)]
-pub(crate) trait MemoizedDepInputsContext: TestContext {
-    fn dep_memoized2(&self) -> usize;
-    fn dep_memoized1(&self) -> usize;
-    #[ra_salsa::dependencies]
-    fn dep_derived1(&self) -> usize;
-    #[ra_salsa::input]
-    fn dep_input1(&self) -> usize;
-    #[ra_salsa::input]
-    fn dep_input2(&self) -> usize;
-}
-
-fn dep_memoized2(db: &dyn MemoizedDepInputsContext) -> usize {
-    db.log().add("Memoized2 invoked");
-    db.dep_memoized1()
-}
-
-fn dep_memoized1(db: &dyn MemoizedDepInputsContext) -> usize {
-    db.log().add("Memoized1 invoked");
-    db.dep_derived1() * 2
-}
-
-fn dep_derived1(db: &dyn MemoizedDepInputsContext) -> usize {
-    db.log().add("Derived1 invoked");
-    db.dep_input1() / 2
-}
-
-#[test]
-fn revalidate() {
-    let db = &mut TestContextImpl::default();
-
-    db.set_dep_input1(0);
-
-    // Initial run starts from Memoized2:
-    let v = db.dep_memoized2();
-    assert_eq!(v, 0);
-    db.assert_log(&["Memoized2 invoked", "Memoized1 invoked", "Derived1 invoked"]);
-
-    // After that, we first try to validate Memoized1 but wind up
-    // running Memoized2. Note that we don't try to validate
-    // Derived1, so it is invoked by Memoized1.
-    db.set_dep_input1(44);
-    let v = db.dep_memoized2();
-    assert_eq!(v, 44);
-    db.assert_log(&["Memoized1 invoked", "Derived1 invoked", "Memoized2 invoked"]);
-
-    // Here validation of Memoized1 succeeds so Memoized2 never runs.
-    db.set_dep_input1(45);
-    let v = db.dep_memoized2();
-    assert_eq!(v, 44);
-    db.assert_log(&["Memoized1 invoked", "Derived1 invoked"]);
-
-    // Here, a change to input2 doesn't affect us, so nothing runs.
-    db.set_dep_input2(45);
-    let v = db.dep_memoized2();
-    assert_eq!(v, 44);
-    db.assert_log(&[]);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/memoized_inputs.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/memoized_inputs.rs
deleted file mode 100644
index 007dc3db95a34..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/memoized_inputs.rs
+++ /dev/null
@@ -1,76 +0,0 @@
-use crate::implementation::{TestContext, TestContextImpl};
-
-#[ra_salsa::query_group(MemoizedInputs)]
-pub(crate) trait MemoizedInputsContext: TestContext {
-    fn max(&self) -> usize;
-    #[ra_salsa::input]
-    fn input1(&self) -> usize;
-    #[ra_salsa::input]
-    fn input2(&self) -> usize;
-}
-
-fn max(db: &dyn MemoizedInputsContext) -> usize {
-    db.log().add("Max invoked");
-    std::cmp::max(db.input1(), db.input2())
-}
-
-#[test]
-fn revalidate() {
-    let db = &mut TestContextImpl::default();
-
-    db.set_input1(0);
-    db.set_input2(0);
-
-    let v = db.max();
-    assert_eq!(v, 0);
-    db.assert_log(&["Max invoked"]);
-
-    let v = db.max();
-    assert_eq!(v, 0);
-    db.assert_log(&[]);
-
-    db.set_input1(44);
-    db.assert_log(&[]);
-
-    let v = db.max();
-    assert_eq!(v, 44);
-    db.assert_log(&["Max invoked"]);
-
-    let v = db.max();
-    assert_eq!(v, 44);
-    db.assert_log(&[]);
-
-    db.set_input1(44);
-    db.assert_log(&[]);
-    db.set_input2(66);
-    db.assert_log(&[]);
-    db.set_input1(64);
-    db.assert_log(&[]);
-
-    let v = db.max();
-    assert_eq!(v, 66);
-    db.assert_log(&["Max invoked"]);
-
-    let v = db.max();
-    assert_eq!(v, 66);
-    db.assert_log(&[]);
-}
-
-/// Test that invoking `set` on an input with the same value still
-/// triggers a new revision.
-#[test]
-fn set_after_no_change() {
-    let db = &mut TestContextImpl::default();
-
-    db.set_input2(0);
-
-    db.set_input1(44);
-    let v = db.max();
-    assert_eq!(v, 44);
-    db.assert_log(&["Max invoked"]);
-
-    db.set_input1(44);
-    let v = db.max();
-    assert_eq!(v, 44);
-    db.assert_log(&["Max invoked"]);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/memoized_volatile.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/memoized_volatile.rs
deleted file mode 100644
index cd00cc2e6ccb6..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/incremental/memoized_volatile.rs
+++ /dev/null
@@ -1,77 +0,0 @@
-use crate::implementation::{TestContext, TestContextImpl};
-use ra_salsa::{Database, Durability};
-
-#[ra_salsa::query_group(MemoizedVolatile)]
-pub(crate) trait MemoizedVolatileContext: TestContext {
-    // Queries for testing a "volatile" value wrapped by
-    // memoization.
-    fn memoized2(&self) -> usize;
-    fn memoized1(&self) -> usize;
-    fn volatile(&self) -> usize;
-}
-
-fn memoized2(db: &dyn MemoizedVolatileContext) -> usize {
-    db.log().add("Memoized2 invoked");
-    db.memoized1()
-}
-
-fn memoized1(db: &dyn MemoizedVolatileContext) -> usize {
-    db.log().add("Memoized1 invoked");
-    let v = db.volatile();
-    v / 2
-}
-
-fn volatile(db: &dyn MemoizedVolatileContext) -> usize {
-    db.log().add("Volatile invoked");
-    db.salsa_runtime().report_untracked_read();
-    db.clock().increment()
-}
-
-#[test]
-fn volatile_x2() {
-    let query = TestContextImpl::default();
-
-    // Invoking volatile twice doesn't execute twice, because volatile
-    // queries are memoized by default.
-    query.volatile();
-    query.volatile();
-    query.assert_log(&["Volatile invoked"]);
-}
-
-/// Test that:
-///
-/// - On the first run of R0, we recompute everything.
-/// - On the second run of R1, we recompute nothing.
-/// - On the first run of R1, we recompute Memoized1 but not Memoized2 (since Memoized1 result
-///   did not change).
-/// - On the second run of R1, we recompute nothing.
-/// - On the first run of R2, we recompute everything (since Memoized1 result *did* change).
-#[test]
-fn revalidate() {
-    let mut query = TestContextImpl::default();
-
-    query.memoized2();
-    query.assert_log(&["Memoized2 invoked", "Memoized1 invoked", "Volatile invoked"]);
-
-    query.memoized2();
-    query.assert_log(&[]);
-
-    // Second generation: volatile will change (to 1) but memoized1
-    // will not (still 0, as 1/2 = 0)
-    query.synthetic_write(Durability::LOW);
-    query.memoized2();
-    query.assert_log(&["Volatile invoked", "Memoized1 invoked"]);
-    query.memoized2();
-    query.assert_log(&[]);
-
-    // Third generation: volatile will change (to 2) and memoized1
-    // will too (to 1).  Therefore, after validating that Memoized1
-    // changed, we now invoke Memoized2.
-    query.synthetic_write(Durability::LOW);
-
-    query.memoized2();
-    query.assert_log(&["Volatile invoked", "Memoized1 invoked", "Memoized2 invoked"]);
-
-    query.memoized2();
-    query.assert_log(&[]);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/interned.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/interned.rs
deleted file mode 100644
index 108b129fa3f76..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/interned.rs
+++ /dev/null
@@ -1,90 +0,0 @@
-//! Test that you can implement a query using a `dyn Trait` setup.
-
-use ra_salsa::InternId;
-
-#[ra_salsa::database(InternStorage)]
-#[derive(Default)]
-struct Database {
-    storage: ra_salsa::Storage<Self>,
-}
-
-impl ra_salsa::Database for Database {}
-
-impl ra_salsa::ParallelDatabase for Database {
-    fn snapshot(&self) -> ra_salsa::Snapshot<Self> {
-        ra_salsa::Snapshot::new(Database { storage: self.storage.snapshot() })
-    }
-}
-
-#[ra_salsa::query_group(InternStorage)]
-trait Intern {
-    #[ra_salsa::interned]
-    fn intern1(&self, x: String) -> InternId;
-
-    #[ra_salsa::interned]
-    fn intern2(&self, x: String, y: String) -> InternId;
-
-    #[ra_salsa::interned]
-    fn intern_key(&self, x: String) -> InternKey;
-}
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
-pub struct InternKey(InternId);
-
-impl ra_salsa::InternKey for InternKey {
-    fn from_intern_id(v: InternId) -> Self {
-        InternKey(v)
-    }
-
-    fn as_intern_id(&self) -> InternId {
-        self.0
-    }
-}
-
-#[test]
-fn test_intern1() {
-    let db = Database::default();
-    let foo0 = db.intern1("foo".to_owned());
-    let bar0 = db.intern1("bar".to_owned());
-    let foo1 = db.intern1("foo".to_owned());
-    let bar1 = db.intern1("bar".to_owned());
-
-    assert_eq!(foo0, foo1);
-    assert_eq!(bar0, bar1);
-    assert_ne!(foo0, bar0);
-
-    assert_eq!("foo".to_owned(), db.lookup_intern1(foo0));
-    assert_eq!("bar".to_owned(), db.lookup_intern1(bar0));
-}
-
-#[test]
-fn test_intern2() {
-    let db = Database::default();
-    let foo0 = db.intern2("x".to_owned(), "foo".to_owned());
-    let bar0 = db.intern2("x".to_owned(), "bar".to_owned());
-    let foo1 = db.intern2("x".to_owned(), "foo".to_owned());
-    let bar1 = db.intern2("x".to_owned(), "bar".to_owned());
-
-    assert_eq!(foo0, foo1);
-    assert_eq!(bar0, bar1);
-    assert_ne!(foo0, bar0);
-
-    assert_eq!(("x".to_owned(), "foo".to_owned()), db.lookup_intern2(foo0));
-    assert_eq!(("x".to_owned(), "bar".to_owned()), db.lookup_intern2(bar0));
-}
-
-#[test]
-fn test_intern_key() {
-    let db = Database::default();
-    let foo0 = db.intern_key("foo".to_owned());
-    let bar0 = db.intern_key("bar".to_owned());
-    let foo1 = db.intern_key("foo".to_owned());
-    let bar1 = db.intern_key("bar".to_owned());
-
-    assert_eq!(foo0, foo1);
-    assert_eq!(bar0, bar1);
-    assert_ne!(foo0, bar0);
-
-    assert_eq!("foo".to_owned(), db.lookup_intern_key(foo0));
-    assert_eq!("bar".to_owned(), db.lookup_intern_key(bar0));
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/lru.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/lru.rs
deleted file mode 100644
index f351f24246868..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/lru.rs
+++ /dev/null
@@ -1,104 +0,0 @@
-//! Test setting LRU actually limits the number of things in the database;
-use std::sync::{
-    atomic::{AtomicUsize, Ordering},
-    Arc,
-};
-
-#[derive(Debug, PartialEq, Eq)]
-struct HotPotato(u32);
-
-static N_POTATOES: AtomicUsize = AtomicUsize::new(0);
-
-impl HotPotato {
-    fn new(id: u32) -> HotPotato {
-        N_POTATOES.fetch_add(1, Ordering::SeqCst);
-        HotPotato(id)
-    }
-}
-
-impl Drop for HotPotato {
-    fn drop(&mut self) {
-        N_POTATOES.fetch_sub(1, Ordering::SeqCst);
-    }
-}
-
-#[ra_salsa::query_group(QueryGroupStorage)]
-trait QueryGroup: ra_salsa::Database {
-    #[ra_salsa::lru]
-    fn get(&self, x: u32) -> Arc<HotPotato>;
-    #[ra_salsa::lru]
-    fn get_volatile(&self, x: u32) -> usize;
-}
-
-fn get(_db: &dyn QueryGroup, x: u32) -> Arc<HotPotato> {
-    Arc::new(HotPotato::new(x))
-}
-
-fn get_volatile(db: &dyn QueryGroup, _x: u32) -> usize {
-    static COUNTER: AtomicUsize = AtomicUsize::new(0);
-    db.salsa_runtime().report_untracked_read();
-    COUNTER.fetch_add(1, Ordering::SeqCst)
-}
-
-#[ra_salsa::database(QueryGroupStorage)]
-#[derive(Default)]
-struct Database {
-    storage: ra_salsa::Storage<Self>,
-}
-
-impl ra_salsa::Database for Database {}
-
-#[test]
-fn lru_works() {
-    let mut db = Database::default();
-    GetQuery.in_db_mut(&mut db).set_lru_capacity(32);
-    assert_eq!(N_POTATOES.load(Ordering::SeqCst), 0);
-
-    for i in 0..128u32 {
-        let p = db.get(i);
-        assert_eq!(p.0, i)
-    }
-    assert_eq!(N_POTATOES.load(Ordering::SeqCst), 32);
-
-    for i in 0..128u32 {
-        let p = db.get(i);
-        assert_eq!(p.0, i)
-    }
-    assert_eq!(N_POTATOES.load(Ordering::SeqCst), 32);
-
-    GetQuery.in_db_mut(&mut db).set_lru_capacity(32);
-    assert_eq!(N_POTATOES.load(Ordering::SeqCst), 32);
-
-    GetQuery.in_db_mut(&mut db).set_lru_capacity(64);
-    assert_eq!(N_POTATOES.load(Ordering::SeqCst), 32);
-    for i in 0..128u32 {
-        let p = db.get(i);
-        assert_eq!(p.0, i)
-    }
-    assert_eq!(N_POTATOES.load(Ordering::SeqCst), 64);
-
-    // Special case: setting capacity to zero disables LRU
-    GetQuery.in_db_mut(&mut db).set_lru_capacity(0);
-    assert_eq!(N_POTATOES.load(Ordering::SeqCst), 64);
-    for i in 0..128u32 {
-        let p = db.get(i);
-        assert_eq!(p.0, i)
-    }
-    assert_eq!(N_POTATOES.load(Ordering::SeqCst), 128);
-
-    drop(db);
-    assert_eq!(N_POTATOES.load(Ordering::SeqCst), 0);
-}
-
-#[test]
-fn lru_doesnt_break_volatile_queries() {
-    let mut db = Database::default();
-    GetVolatileQuery.in_db_mut(&mut db).set_lru_capacity(32);
-    // Here, we check that we execute each volatile query at most once, despite
-    // LRU. That does mean that we have more values in DB than the LRU capacity,
-    // but it's much better than inconsistent results from volatile queries!
-    for i in (0..3).flat_map(|_| 0..128usize) {
-        let x = db.get_volatile(i as u32);
-        assert_eq!(x, i)
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/macros.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/macros.rs
deleted file mode 100644
index 7bb6369b500c1..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/macros.rs
+++ /dev/null
@@ -1,12 +0,0 @@
-#[ra_salsa::query_group(MyStruct)]
-trait MyDatabase: ra_salsa::Database {
-    #[ra_salsa::invoke(another_module::another_name)]
-    fn my_query(&self, key: ()) -> ();
-}
-
-mod another_module {
-    #[allow(dead_code)]
-    pub(crate) fn another_name(_: &dyn crate::MyDatabase, (): ()) {}
-}
-
-fn main() {}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/no_send_sync.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/no_send_sync.rs
deleted file mode 100644
index 56bd3f4a7ed18..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/no_send_sync.rs
+++ /dev/null
@@ -1,31 +0,0 @@
-use std::rc::Rc;
-
-#[ra_salsa::query_group(NoSendSyncStorage)]
-trait NoSendSyncDatabase: ra_salsa::Database {
-    fn no_send_sync_value(&self, key: bool) -> Rc<bool>;
-    fn no_send_sync_key(&self, key: Rc<bool>) -> bool;
-}
-
-fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Rc<bool> {
-    Rc::new(key)
-}
-
-fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Rc<bool>) -> bool {
-    *key
-}
-
-#[ra_salsa::database(NoSendSyncStorage)]
-#[derive(Default)]
-struct DatabaseImpl {
-    storage: ra_salsa::Storage<Self>,
-}
-
-impl ra_salsa::Database for DatabaseImpl {}
-
-#[test]
-fn no_send_sync() {
-    let db = DatabaseImpl::default();
-
-    assert_eq!(db.no_send_sync_value(true), Rc::new(true));
-    assert!(!db.no_send_sync_key(Rc::new(false)));
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/on_demand_inputs.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/on_demand_inputs.rs
deleted file mode 100644
index 4d7832f9ba05b..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/on_demand_inputs.rs
+++ /dev/null
@@ -1,147 +0,0 @@
-//! Test that "on-demand" input pattern works.
-//!
-//! On-demand inputs are inputs computed lazily on the fly. They are simulated
-//! via a b query with zero inputs, which uses `add_synthetic_read` to
-//! tweak durability and `invalidate` to clear the input.
-
-#![allow(clippy::disallowed_types, clippy::type_complexity)]
-
-use std::{cell::RefCell, collections::HashMap, rc::Rc};
-
-use ra_salsa::{Database as _, Durability, EventKind};
-
-#[ra_salsa::query_group(QueryGroupStorage)]
-trait QueryGroup: ra_salsa::Database + AsRef<HashMap<u32, u32>> {
-    fn a(&self, x: u32) -> u32;
-    fn b(&self, x: u32) -> u32;
-    fn c(&self, x: u32) -> u32;
-}
-
-fn a(db: &dyn QueryGroup, x: u32) -> u32 {
-    let durability = if x % 2 == 0 { Durability::LOW } else { Durability::HIGH };
-    db.salsa_runtime().report_synthetic_read(durability);
-    let external_state: &HashMap<u32, u32> = db.as_ref();
-    external_state[&x]
-}
-
-fn b(db: &dyn QueryGroup, x: u32) -> u32 {
-    db.a(x)
-}
-
-fn c(db: &dyn QueryGroup, x: u32) -> u32 {
-    db.b(x)
-}
-
-#[ra_salsa::database(QueryGroupStorage)]
-#[derive(Default)]
-struct Database {
-    storage: ra_salsa::Storage<Self>,
-    external_state: HashMap<u32, u32>,
-    on_event: Option<Box<dyn Fn(&Database, ra_salsa::Event)>>,
-}
-
-impl ra_salsa::Database for Database {
-    fn salsa_event(&self, event: ra_salsa::Event) {
-        if let Some(cb) = &self.on_event {
-            cb(self, event)
-        }
-    }
-}
-
-impl AsRef<HashMap<u32, u32>> for Database {
-    fn as_ref(&self) -> &HashMap<u32, u32> {
-        &self.external_state
-    }
-}
-
-#[test]
-fn on_demand_input_works() {
-    let mut db = Database::default();
-
-    db.external_state.insert(1, 10);
-    assert_eq!(db.b(1), 10);
-    assert_eq!(db.a(1), 10);
-
-    // We changed external state, but haven't signaled about this yet,
-    // so we expect to see the old answer
-    db.external_state.insert(1, 92);
-    assert_eq!(db.b(1), 10);
-    assert_eq!(db.a(1), 10);
-
-    AQuery.in_db_mut(&mut db).invalidate(&1);
-    assert_eq!(db.b(1), 92);
-    assert_eq!(db.a(1), 92);
-
-    // Downstream queries should also be rerun if we call `a` first.
-    db.external_state.insert(1, 50);
-    AQuery.in_db_mut(&mut db).invalidate(&1);
-    assert_eq!(db.a(1), 50);
-    assert_eq!(db.b(1), 50);
-}
-
-#[test]
-fn on_demand_input_durability() {
-    let mut db = Database::default();
-
-    let events = Rc::new(RefCell::new(vec![]));
-    db.on_event = Some(Box::new({
-        let events = events.clone();
-        move |db, event| {
-            if let EventKind::WillCheckCancellation = event.kind {
-                // these events are not interesting
-            } else {
-                events.borrow_mut().push(format!("{:?}", event.debug(db)))
-            }
-        }
-    }));
-
-    events.replace(vec![]);
-    db.external_state.insert(1, 10);
-    db.external_state.insert(2, 20);
-    assert_eq!(db.b(1), 10);
-    assert_eq!(db.b(2), 20);
-    expect_test::expect![[r#"
-        RefCell {
-            value: [
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: on_demand_inputs::BQuery::b(1) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: on_demand_inputs::AQuery::a(1) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: on_demand_inputs::BQuery::b(2) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: on_demand_inputs::AQuery::a(2) } }",
-            ],
-        }
-    "#]].assert_debug_eq(&events);
-
-    db.synthetic_write(Durability::LOW);
-    events.replace(vec![]);
-    assert_eq!(db.c(1), 10);
-    assert_eq!(db.c(2), 20);
-    // Re-execute `a(2)` because that has low durability, but not `a(1)`
-    expect_test::expect![[r#"
-        RefCell {
-            value: [
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: on_demand_inputs::CQuery::c(1) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: on_demand_inputs::BQuery::b(1) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: on_demand_inputs::CQuery::c(2) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: on_demand_inputs::AQuery::a(2) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: on_demand_inputs::BQuery::b(2) } }",
-            ],
-        }
-    "#]].assert_debug_eq(&events);
-
-    db.synthetic_write(Durability::HIGH);
-    events.replace(vec![]);
-    assert_eq!(db.c(1), 10);
-    assert_eq!(db.c(2), 20);
-    // Re-execute both `a(1)` and `a(2)`, but we don't re-execute any `b` queries as the
-    // result didn't actually change.
-    expect_test::expect![[r#"
-        RefCell {
-            value: [
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: on_demand_inputs::AQuery::a(1) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: on_demand_inputs::CQuery::c(1) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: on_demand_inputs::AQuery::a(2) } }",
-                "Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: on_demand_inputs::CQuery::c(2) } }",
-            ],
-        }
-    "#]].assert_debug_eq(&events);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/panic_safely.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/panic_safely.rs
deleted file mode 100644
index 047a50eb4b2b4..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/panic_safely.rs
+++ /dev/null
@@ -1,93 +0,0 @@
-use ra_salsa::{Database, ParallelDatabase, Snapshot};
-use std::panic::{self, AssertUnwindSafe};
-use std::sync::atomic::{AtomicU32, Ordering::SeqCst};
-
-#[ra_salsa::query_group(PanicSafelyStruct)]
-trait PanicSafelyDatabase: ra_salsa::Database {
-    #[ra_salsa::input]
-    fn one(&self) -> usize;
-
-    fn panic_safely(&self) -> ();
-
-    fn outer(&self) -> ();
-}
-
-fn panic_safely(db: &dyn PanicSafelyDatabase) {
-    assert_eq!(db.one(), 1);
-}
-
-static OUTER_CALLS: AtomicU32 = AtomicU32::new(0);
-
-fn outer(db: &dyn PanicSafelyDatabase) {
-    OUTER_CALLS.fetch_add(1, SeqCst);
-    db.panic_safely();
-}
-
-#[ra_salsa::database(PanicSafelyStruct)]
-#[derive(Default)]
-struct DatabaseStruct {
-    storage: ra_salsa::Storage<Self>,
-}
-
-impl ra_salsa::Database for DatabaseStruct {}
-
-impl ra_salsa::ParallelDatabase for DatabaseStruct {
-    fn snapshot(&self) -> Snapshot<Self> {
-        Snapshot::new(DatabaseStruct { storage: self.storage.snapshot() })
-    }
-}
-
-#[test]
-fn should_panic_safely() {
-    let mut db = DatabaseStruct::default();
-    db.set_one(0);
-
-    // Invoke `db.panic_safely() without having set `db.one`. `db.one` will
-    // return 0 and we should catch the panic.
-    let result = panic::catch_unwind(AssertUnwindSafe({
-        let db = db.snapshot();
-        move || db.panic_safely()
-    }));
-    assert!(result.is_err());
-
-    // Set `db.one` to 1 and assert ok
-    db.set_one(1);
-    let result = panic::catch_unwind(AssertUnwindSafe(|| db.panic_safely()));
-    assert!(result.is_ok());
-
-    // Check, that memoized outer is not invalidated by a panic
-    {
-        assert_eq!(OUTER_CALLS.load(SeqCst), 0);
-        db.outer();
-        assert_eq!(OUTER_CALLS.load(SeqCst), 1);
-
-        db.set_one(0);
-        let result = panic::catch_unwind(AssertUnwindSafe(|| db.outer()));
-        assert!(result.is_err());
-        assert_eq!(OUTER_CALLS.load(SeqCst), 1);
-
-        db.set_one(1);
-        db.outer();
-        assert_eq!(OUTER_CALLS.load(SeqCst), 2);
-    }
-}
-
-#[test]
-fn storages_are_unwind_safe() {
-    fn check_unwind_safe<T: std::panic::UnwindSafe>() {}
-    check_unwind_safe::<&DatabaseStruct>();
-}
-
-#[test]
-fn panics_clear_query_stack() {
-    let db = DatabaseStruct::default();
-
-    // Invoke `db.panic_if_not_one() without having set `db.input`. `db.input`
-    // will default to 0 and we should catch the panic.
-    let result = panic::catch_unwind(AssertUnwindSafe(|| db.panic_safely()));
-    assert!(result.is_err());
-
-    // The database has been poisoned and any attempt to increment the
-    // revision should panic.
-    assert_eq!(db.salsa_runtime().active_query(), None);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/cancellation.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/cancellation.rs
deleted file mode 100644
index e47a8ef9aa813..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/cancellation.rs
+++ /dev/null
@@ -1,132 +0,0 @@
-use crate::setup::{CancellationFlag, Knobs, ParDatabase, ParDatabaseImpl, WithValue};
-use ra_salsa::{Cancelled, ParallelDatabase};
-
-macro_rules! assert_cancelled {
-    ($thread:expr) => {
-        match $thread.join() {
-            Ok(value) => panic!("expected cancellation, got {:?}", value),
-            Err(payload) => match payload.downcast::<Cancelled>() {
-                Ok(_) => {}
-                Err(payload) => ::std::panic::resume_unwind(payload),
-            },
-        }
-    };
-}
-
-/// Add test where a call to `sum` is cancelled by a simultaneous
-/// write. Check that we recompute the result in next revision, even
-/// though none of the inputs have changed.
-#[test]
-fn in_par_get_set_cancellation_immediate() {
-    let mut db = ParDatabaseImpl::default();
-
-    db.set_input('a', 100);
-    db.set_input('b', 10);
-    db.set_input('c', 1);
-    db.set_input('d', 0);
-
-    let thread1 = std::thread::spawn({
-        let db = db.snapshot();
-        move || {
-            // This will not return until it sees cancellation is
-            // signaled.
-            db.knobs().sum_signal_on_entry.with_value(1, || {
-                db.knobs()
-                    .sum_wait_for_cancellation
-                    .with_value(CancellationFlag::Panic, || db.sum("abc"))
-            })
-        }
-    });
-
-    // Wait until we have entered `sum` in the other thread.
-    db.wait_for(1);
-
-    // Try to set the input. This will signal cancellation.
-    db.set_input('d', 1000);
-
-    // This should re-compute the value (even though no input has changed).
-    let thread2 = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.sum("abc")
-    });
-
-    assert_eq!(db.sum("d"), 1000);
-    assert_cancelled!(thread1);
-    assert_eq!(thread2.join().unwrap(), 111);
-}
-
-/// Here, we check that `sum`'s cancellation is propagated
-/// to `sum2` properly.
-#[test]
-fn in_par_get_set_cancellation_transitive() {
-    let mut db = ParDatabaseImpl::default();
-
-    db.set_input('a', 100);
-    db.set_input('b', 10);
-    db.set_input('c', 1);
-    db.set_input('d', 0);
-
-    let thread1 = std::thread::spawn({
-        let db = db.snapshot();
-        move || {
-            // This will not return until it sees cancellation is
-            // signaled.
-            db.knobs().sum_signal_on_entry.with_value(1, || {
-                db.knobs()
-                    .sum_wait_for_cancellation
-                    .with_value(CancellationFlag::Panic, || db.sum2("abc"))
-            })
-        }
-    });
-
-    // Wait until we have entered `sum` in the other thread.
-    db.wait_for(1);
-
-    // Try to set the input. This will signal cancellation.
-    db.set_input('d', 1000);
-
-    // This should re-compute the value (even though no input has changed).
-    let thread2 = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.sum2("abc")
-    });
-
-    assert_eq!(db.sum2("d"), 1000);
-    assert_cancelled!(thread1);
-    assert_eq!(thread2.join().unwrap(), 111);
-}
-
-/// https://github.com/ra_salsa-rs/ra_salsa/issues/66
-#[test]
-fn no_back_dating_in_cancellation() {
-    let mut db = ParDatabaseImpl::default();
-
-    db.set_input('a', 1);
-    let thread1 = std::thread::spawn({
-        let db = db.snapshot();
-        move || {
-            // Here we compute a long-chain of queries,
-            // but the last one gets cancelled.
-            db.knobs().sum_signal_on_entry.with_value(1, || {
-                db.knobs()
-                    .sum_wait_for_cancellation
-                    .with_value(CancellationFlag::Panic, || db.sum3("a"))
-            })
-        }
-    });
-
-    db.wait_for(1);
-
-    // Set unrelated input to bump revision
-    db.set_input('b', 2);
-
-    // Here we should recompuet the whole chain again, clearing the cancellation
-    // state. If we get `usize::max()` here, it is a bug!
-    assert_eq!(db.sum3("a"), 1);
-
-    assert_cancelled!(thread1);
-
-    db.set_input('a', 3);
-    db.set_input('a', 4);
-    assert_eq!(db.sum3("ab"), 6);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/frozen.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/frozen.rs
deleted file mode 100644
index 9e42e26151750..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/frozen.rs
+++ /dev/null
@@ -1,57 +0,0 @@
-use crate::setup::{ParDatabase, ParDatabaseImpl};
-use crate::signal::Signal;
-use ra_salsa::{Database, ParallelDatabase};
-use std::{
-    panic::{catch_unwind, AssertUnwindSafe},
-    sync::Arc,
-};
-
-/// Add test where a call to `sum` is cancelled by a simultaneous
-/// write. Check that we recompute the result in next revision, even
-/// though none of the inputs have changed.
-#[test]
-fn in_par_get_set_cancellation() {
-    let mut db = ParDatabaseImpl::default();
-
-    db.set_input('a', 1);
-
-    let signal = Arc::new(Signal::default());
-
-    let thread1 = std::thread::spawn({
-        let db = db.snapshot();
-        let signal = signal.clone();
-        move || {
-            // Check that cancellation flag is not yet set, because
-            // `set` cannot have been called yet.
-            catch_unwind(AssertUnwindSafe(|| db.unwind_if_cancelled())).unwrap();
-
-            // Signal other thread to proceed.
-            signal.signal(1);
-
-            // Wait for other thread to signal cancellation
-            catch_unwind(AssertUnwindSafe(|| loop {
-                db.unwind_if_cancelled();
-                std::thread::yield_now();
-            }))
-            .unwrap_err();
-        }
-    });
-
-    let thread2 = std::thread::spawn({
-        move || {
-            // Wait until thread 1 has asserted that they are not cancelled
-            // before we invoke `set.`
-            signal.wait_for(1);
-
-            // This will block until thread1 drops the revision lock.
-            db.set_input('a', 2);
-
-            db.input('a')
-        }
-    });
-
-    thread1.join().unwrap();
-
-    let c = thread2.join().unwrap();
-    assert_eq!(c, 2);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/independent.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/independent.rs
deleted file mode 100644
index cbbac0608d194..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/independent.rs
+++ /dev/null
@@ -1,29 +0,0 @@
-use crate::setup::{ParDatabase, ParDatabaseImpl};
-use ra_salsa::ParallelDatabase;
-
-/// Test two `sum` queries (on distinct keys) executing in different
-/// threads. Really just a test that `snapshot` etc compiles.
-#[test]
-fn in_par_two_independent_queries() {
-    let mut db = ParDatabaseImpl::default();
-
-    db.set_input('a', 100);
-    db.set_input('b', 10);
-    db.set_input('c', 1);
-    db.set_input('d', 200);
-    db.set_input('e', 20);
-    db.set_input('f', 2);
-
-    let thread1 = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.sum("abc")
-    });
-
-    let thread2 = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.sum("def")
-    });
-
-    assert_eq!(thread1.join().unwrap(), 111);
-    assert_eq!(thread2.join().unwrap(), 222);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/main.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/main.rs
deleted file mode 100644
index 31c0da1837570..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/main.rs
+++ /dev/null
@@ -1,13 +0,0 @@
-mod setup;
-
-mod cancellation;
-mod frozen;
-mod independent;
-mod parallel_cycle_all_recover;
-mod parallel_cycle_mid_recover;
-mod parallel_cycle_none_recover;
-mod parallel_cycle_one_recovers;
-mod race;
-mod signal;
-mod stress;
-mod true_parallel;
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_all_recover.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_all_recover.rs
deleted file mode 100644
index dabdb3babc00b..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_all_recover.rs
+++ /dev/null
@@ -1,109 +0,0 @@
-//! Test for cycle recover spread across two threads.
-//! See `../cycles.rs` for a complete listing of cycle tests,
-//! both intra and cross thread.
-
-use crate::setup::{Knobs, ParDatabaseImpl};
-use ra_salsa::ParallelDatabase;
-
-// Recover cycle test:
-//
-// The pattern is as follows.
-//
-// Thread A                   Thread B
-// --------                   --------
-// a1                         b1
-// |                          wait for stage 1 (blocks)
-// signal stage 1             |
-// wait for stage 2 (blocks)  (unblocked)
-// |                          signal stage 2
-// (unblocked)                wait for stage 3 (blocks)
-// a2                         |
-// b1 (blocks -> stage 3)     |
-// |                          (unblocked)
-// |                          b2
-// |                          a1 (cycle detected, recovers)
-// |                          b2 completes, recovers
-// |                          b1 completes, recovers
-// a2 sees cycle, recovers
-// a1 completes, recovers
-
-#[test]
-fn parallel_cycle_all_recover() {
-    let db = ParDatabaseImpl::default();
-    db.knobs().signal_on_will_block.set(3);
-
-    let thread_a = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.a1(1)
-    });
-
-    let thread_b = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.b1(1)
-    });
-
-    assert_eq!(thread_a.join().unwrap(), 11);
-    assert_eq!(thread_b.join().unwrap(), 21);
-}
-
-#[ra_salsa::query_group(ParallelCycleAllRecover)]
-pub(crate) trait TestDatabase: Knobs {
-    #[ra_salsa::cycle(recover_a1)]
-    fn a1(&self, key: i32) -> i32;
-
-    #[ra_salsa::cycle(recover_a2)]
-    fn a2(&self, key: i32) -> i32;
-
-    #[ra_salsa::cycle(recover_b1)]
-    fn b1(&self, key: i32) -> i32;
-
-    #[ra_salsa::cycle(recover_b2)]
-    fn b2(&self, key: i32) -> i32;
-}
-
-fn recover_a1(_db: &dyn TestDatabase, _cycle: &ra_salsa::Cycle, key: &i32) -> i32 {
-    tracing::debug!("recover_a1");
-    key * 10 + 1
-}
-
-fn recover_a2(_db: &dyn TestDatabase, _cycle: &ra_salsa::Cycle, key: &i32) -> i32 {
-    tracing::debug!("recover_a2");
-    key * 10 + 2
-}
-
-fn recover_b1(_db: &dyn TestDatabase, _cycle: &ra_salsa::Cycle, key: &i32) -> i32 {
-    tracing::debug!("recover_b1");
-    key * 20 + 1
-}
-
-fn recover_b2(_db: &dyn TestDatabase, _cycle: &ra_salsa::Cycle, key: &i32) -> i32 {
-    tracing::debug!("recover_b2");
-    key * 20 + 2
-}
-
-fn a1(db: &dyn TestDatabase, key: i32) -> i32 {
-    // Wait to create the cycle until both threads have entered
-    db.signal(1);
-    db.wait_for(2);
-
-    db.a2(key)
-}
-
-fn a2(db: &dyn TestDatabase, key: i32) -> i32 {
-    db.b1(key)
-}
-
-fn b1(db: &dyn TestDatabase, key: i32) -> i32 {
-    // Wait to create the cycle until both threads have entered
-    db.wait_for(1);
-    db.signal(2);
-
-    // Wait for thread A to block on this thread
-    db.wait_for(3);
-
-    db.b2(key)
-}
-
-fn b2(db: &dyn TestDatabase, key: i32) -> i32 {
-    db.a1(key)
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_mid_recover.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_mid_recover.rs
deleted file mode 100644
index 20c508e0b8bf9..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_mid_recover.rs
+++ /dev/null
@@ -1,109 +0,0 @@
-//! Test for cycle recover spread across two threads.
-//! See `../cycles.rs` for a complete listing of cycle tests,
-//! both intra and cross thread.
-
-use crate::setup::{Knobs, ParDatabaseImpl};
-use ra_salsa::ParallelDatabase;
-
-// Recover cycle test:
-//
-// The pattern is as follows.
-//
-// Thread A                   Thread B
-// --------                   --------
-// a1                         b1
-// |                          wait for stage 1 (blocks)
-// signal stage 1             |
-// wait for stage 2 (blocks)  (unblocked)
-// |                          |
-// |                          b2
-// |                          b3
-// |                          a1 (blocks -> stage 2)
-// (unblocked)                |
-// a2 (cycle detected)        |
-//                            b3 recovers
-//                            b2 resumes
-//                            b1 panics because bug
-
-#[test]
-fn parallel_cycle_mid_recovers() {
-    let db = ParDatabaseImpl::default();
-    db.knobs().signal_on_will_block.set(2);
-
-    let thread_a = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.a1(1)
-    });
-
-    let thread_b = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.b1(1)
-    });
-
-    // We expect that the recovery function yields
-    // `1 * 20 + 2`, which is returned (and forwarded)
-    // to b1, and from there to a2 and a1.
-    assert_eq!(thread_a.join().unwrap(), 22);
-    assert_eq!(thread_b.join().unwrap(), 22);
-}
-
-#[ra_salsa::query_group(ParallelCycleMidRecovers)]
-pub(crate) trait TestDatabase: Knobs {
-    fn a1(&self, key: i32) -> i32;
-
-    fn a2(&self, key: i32) -> i32;
-
-    #[ra_salsa::cycle(recover_b1)]
-    fn b1(&self, key: i32) -> i32;
-
-    fn b2(&self, key: i32) -> i32;
-
-    #[ra_salsa::cycle(recover_b3)]
-    fn b3(&self, key: i32) -> i32;
-}
-
-fn recover_b1(_db: &dyn TestDatabase, _cycle: &ra_salsa::Cycle, key: &i32) -> i32 {
-    tracing::debug!("recover_b1");
-    key * 20 + 2
-}
-
-fn recover_b3(_db: &dyn TestDatabase, _cycle: &ra_salsa::Cycle, key: &i32) -> i32 {
-    tracing::debug!("recover_b1");
-    key * 200 + 2
-}
-
-fn a1(db: &dyn TestDatabase, key: i32) -> i32 {
-    // tell thread b we have started
-    db.signal(1);
-
-    // wait for thread b to block on a1
-    db.wait_for(2);
-
-    db.a2(key)
-}
-
-fn a2(db: &dyn TestDatabase, key: i32) -> i32 {
-    // create the cycle
-    db.b1(key)
-}
-
-fn b1(db: &dyn TestDatabase, key: i32) -> i32 {
-    // wait for thread a to have started
-    db.wait_for(1);
-
-    db.b2(key);
-
-    0
-}
-
-fn b2(db: &dyn TestDatabase, key: i32) -> i32 {
-    // will encounter a cycle but recover
-    db.b3(key);
-    db.b1(key); // hasn't recovered yet
-    0
-}
-
-fn b3(db: &dyn TestDatabase, key: i32) -> i32 {
-    // will block on thread a, signaling stage 2
-    db.a1(key)
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_none_recover.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_none_recover.rs
deleted file mode 100644
index 88d5fee0a22a4..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_none_recover.rs
+++ /dev/null
@@ -1,68 +0,0 @@
-//! Test a cycle where no queries recover that occurs across threads.
-//! See the `../cycles.rs` for a complete listing of cycle tests,
-//! both intra and cross thread.
-
-use crate::setup::{Knobs, ParDatabaseImpl};
-use expect_test::expect;
-use ra_salsa::ParallelDatabase;
-
-#[test]
-fn parallel_cycle_none_recover() {
-    let db = ParDatabaseImpl::default();
-    db.knobs().signal_on_will_block.set(3);
-
-    let thread_a = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.a(-1)
-    });
-
-    let thread_b = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.b(-1)
-    });
-
-    // We expect B to panic because it detects a cycle (it is the one that calls A, ultimately).
-    // Right now, it panics with a string.
-    let err_b = thread_b.join().unwrap_err();
-    if let Some(c) = err_b.downcast_ref::<ra_salsa::Cycle>() {
-        expect![[r#"
-            [
-                "parallel::parallel_cycle_none_recover::AQuery::a(-1)",
-                "parallel::parallel_cycle_none_recover::BQuery::b(-1)",
-            ]
-        "#]]
-        .assert_debug_eq(&c.unexpected_participants(&db));
-    } else {
-        panic!("b failed in an unexpected way: {err_b:?}");
-    }
-
-    // We expect A to propagate a panic, which causes us to use the sentinel
-    // type `Canceled`.
-    assert!(thread_a.join().unwrap_err().downcast_ref::<ra_salsa::Cycle>().is_some());
-}
-
-#[ra_salsa::query_group(ParallelCycleNoneRecover)]
-pub(crate) trait TestDatabase: Knobs {
-    fn a(&self, key: i32) -> i32;
-    fn b(&self, key: i32) -> i32;
-}
-
-fn a(db: &dyn TestDatabase, key: i32) -> i32 {
-    // Wait to create the cycle until both threads have entered
-    db.signal(1);
-    db.wait_for(2);
-
-    db.b(key)
-}
-
-fn b(db: &dyn TestDatabase, key: i32) -> i32 {
-    // Wait to create the cycle until both threads have entered
-    db.wait_for(1);
-    db.signal(2);
-
-    // Wait for thread A to block on this thread
-    db.wait_for(3);
-
-    // Now try to execute A
-    db.a(key)
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_one_recovers.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_one_recovers.rs
deleted file mode 100644
index 074ed1bd349de..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/parallel_cycle_one_recovers.rs
+++ /dev/null
@@ -1,94 +0,0 @@
-//! Test for cycle recover spread across two threads.
-//! See `../cycles.rs` for a complete listing of cycle tests,
-//! both intra and cross thread.
-
-use crate::setup::{Knobs, ParDatabaseImpl};
-use ra_salsa::ParallelDatabase;
-
-// Recover cycle test:
-//
-// The pattern is as follows.
-//
-// Thread A                   Thread B
-// --------                   --------
-// a1                         b1
-// |                          wait for stage 1 (blocks)
-// signal stage 1             |
-// wait for stage 2 (blocks)  (unblocked)
-// |                          signal stage 2
-// (unblocked)                wait for stage 3 (blocks)
-// a2                         |
-// b1 (blocks -> stage 3)     |
-// |                          (unblocked)
-// |                          b2
-// |                          a1 (cycle detected)
-// a2 recovery fn executes    |
-// a1 completes normally      |
-//                            b2 completes, recovers
-//                            b1 completes, recovers
-
-#[test]
-fn parallel_cycle_one_recovers() {
-    let db = ParDatabaseImpl::default();
-    db.knobs().signal_on_will_block.set(3);
-
-    let thread_a = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.a1(1)
-    });
-
-    let thread_b = std::thread::spawn({
-        let db = db.snapshot();
-        move || db.b1(1)
-    });
-
-    // We expect that the recovery function yields
-    // `1 * 20 + 2`, which is returned (and forwarded)
-    // to b1, and from there to a2 and a1.
-    assert_eq!(thread_a.join().unwrap(), 22);
-    assert_eq!(thread_b.join().unwrap(), 22);
-}
-
-#[ra_salsa::query_group(ParallelCycleOneRecovers)]
-pub(crate) trait TestDatabase: Knobs {
-    fn a1(&self, key: i32) -> i32;
-
-    #[ra_salsa::cycle(recover)]
-    fn a2(&self, key: i32) -> i32;
-
-    fn b1(&self, key: i32) -> i32;
-
-    fn b2(&self, key: i32) -> i32;
-}
-
-fn recover(_db: &dyn TestDatabase, _cycle: &ra_salsa::Cycle, key: &i32) -> i32 {
-    tracing::debug!("recover");
-    key * 20 + 2
-}
-
-fn a1(db: &dyn TestDatabase, key: i32) -> i32 {
-    // Wait to create the cycle until both threads have entered
-    db.signal(1);
-    db.wait_for(2);
-
-    db.a2(key)
-}
-
-fn a2(db: &dyn TestDatabase, key: i32) -> i32 {
-    db.b1(key)
-}
-
-fn b1(db: &dyn TestDatabase, key: i32) -> i32 {
-    // Wait to create the cycle until both threads have entered
-    db.wait_for(1);
-    db.signal(2);
-
-    // Wait for thread A to block on this thread
-    db.wait_for(3);
-
-    db.b2(key)
-}
-
-fn b2(db: &dyn TestDatabase, key: i32) -> i32 {
-    db.a1(key)
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/race.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/race.rs
deleted file mode 100644
index 7aa6d4530b4a7..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/race.rs
+++ /dev/null
@@ -1,37 +0,0 @@
-use std::panic::AssertUnwindSafe;
-
-use crate::setup::{ParDatabase, ParDatabaseImpl};
-use ra_salsa::{Cancelled, ParallelDatabase};
-
-/// Test where a read and a set are racing with one another.
-/// Should be atomic.
-#[test]
-fn in_par_get_set_race() {
-    let mut db = ParDatabaseImpl::default();
-
-    db.set_input('a', 100);
-    db.set_input('b', 10);
-    db.set_input('c', 1);
-
-    let thread1 = std::thread::spawn({
-        let db = db.snapshot();
-        move || Cancelled::catch(AssertUnwindSafe(|| db.sum("abc")))
-    });
-
-    let thread2 = std::thread::spawn(move || {
-        db.set_input('a', 1000);
-        db.sum("a")
-    });
-
-    // If the 1st thread runs first, you get 111, otherwise you get
-    // 1011; if they run concurrently and the 1st thread observes the
-    // cancellation, it'll unwind.
-    let result1 = thread1.join().unwrap();
-    if let Ok(value1) = result1 {
-        assert!(value1 == 111 || value1 == 1011, "illegal result {value1}");
-    }
-
-    // thread2 can not observe a cancellation because it performs a
-    // database write before running any other queries.
-    assert_eq!(thread2.join().unwrap(), 1000);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/setup.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/setup.rs
deleted file mode 100644
index fd1f51326e320..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/setup.rs
+++ /dev/null
@@ -1,197 +0,0 @@
-use crate::signal::Signal;
-use ra_salsa::Database;
-use ra_salsa::ParallelDatabase;
-use ra_salsa::Snapshot;
-use std::sync::Arc;
-use std::{
-    cell::Cell,
-    panic::{catch_unwind, resume_unwind, AssertUnwindSafe},
-};
-
-#[ra_salsa::query_group(Par)]
-pub(crate) trait ParDatabase: Knobs {
-    #[ra_salsa::input]
-    fn input(&self, key: char) -> usize;
-
-    fn sum(&self, key: &'static str) -> usize;
-
-    /// Invokes `sum`
-    fn sum2(&self, key: &'static str) -> usize;
-
-    /// Invokes `sum` but doesn't really care about the result.
-    fn sum2_drop_sum(&self, key: &'static str) -> usize;
-
-    /// Invokes `sum2`
-    fn sum3(&self, key: &'static str) -> usize;
-
-    /// Invokes `sum2_drop_sum`
-    fn sum3_drop_sum(&self, key: &'static str) -> usize;
-}
-
-/// Various "knobs" and utilities used by tests to force
-/// a certain behavior.
-pub(crate) trait Knobs {
-    fn knobs(&self) -> &KnobsStruct;
-
-    fn signal(&self, stage: usize);
-
-    fn wait_for(&self, stage: usize);
-}
-
-pub(crate) trait WithValue<T> {
-    fn with_value<R>(&self, value: T, closure: impl FnOnce() -> R) -> R;
-}
-
-impl<T> WithValue<T> for Cell<T> {
-    fn with_value<R>(&self, value: T, closure: impl FnOnce() -> R) -> R {
-        let old_value = self.replace(value);
-
-        let result = catch_unwind(AssertUnwindSafe(closure));
-
-        self.set(old_value);
-
-        match result {
-            Ok(r) => r,
-            Err(payload) => resume_unwind(payload),
-        }
-    }
-}
-
-#[derive(Default, Clone, Copy, PartialEq, Eq)]
-pub(crate) enum CancellationFlag {
-    #[default]
-    Down,
-    Panic,
-}
-
-/// Various "knobs" that can be used to customize how the queries
-/// behave on one specific thread. Note that this state is
-/// intentionally thread-local (apart from `signal`).
-#[derive(Clone, Default)]
-pub(crate) struct KnobsStruct {
-    /// A kind of flexible barrier used to coordinate execution across
-    /// threads to ensure we reach various weird states.
-    pub(crate) signal: Arc<Signal>,
-
-    /// When this database is about to block, send a signal.
-    pub(crate) signal_on_will_block: Cell<usize>,
-
-    /// Invocations of `sum` will signal this stage on entry.
-    pub(crate) sum_signal_on_entry: Cell<usize>,
-
-    /// Invocations of `sum` will wait for this stage on entry.
-    pub(crate) sum_wait_for_on_entry: Cell<usize>,
-
-    /// If true, invocations of `sum` will panic before they exit.
-    pub(crate) sum_should_panic: Cell<bool>,
-
-    /// If true, invocations of `sum` will wait for cancellation before
-    /// they exit.
-    pub(crate) sum_wait_for_cancellation: Cell<CancellationFlag>,
-
-    /// Invocations of `sum` will wait for this stage prior to exiting.
-    pub(crate) sum_wait_for_on_exit: Cell<usize>,
-
-    /// Invocations of `sum` will signal this stage prior to exiting.
-    pub(crate) sum_signal_on_exit: Cell<usize>,
-
-    /// Invocations of `sum3_drop_sum` will panic unconditionally
-    pub(crate) sum3_drop_sum_should_panic: Cell<bool>,
-}
-
-fn sum(db: &dyn ParDatabase, key: &'static str) -> usize {
-    let mut sum = 0;
-
-    db.signal(db.knobs().sum_signal_on_entry.get());
-
-    db.wait_for(db.knobs().sum_wait_for_on_entry.get());
-
-    if db.knobs().sum_should_panic.get() {
-        panic!("query set to panic before exit")
-    }
-
-    for ch in key.chars() {
-        sum += db.input(ch);
-    }
-
-    match db.knobs().sum_wait_for_cancellation.get() {
-        CancellationFlag::Down => (),
-        CancellationFlag::Panic => {
-            tracing::debug!("waiting for cancellation");
-            loop {
-                db.unwind_if_cancelled();
-                std::thread::yield_now();
-            }
-        }
-    }
-
-    db.wait_for(db.knobs().sum_wait_for_on_exit.get());
-
-    db.signal(db.knobs().sum_signal_on_exit.get());
-
-    sum
-}
-
-fn sum2(db: &dyn ParDatabase, key: &'static str) -> usize {
-    db.sum(key)
-}
-
-fn sum2_drop_sum(db: &dyn ParDatabase, key: &'static str) -> usize {
-    let _ = db.sum(key);
-    22
-}
-
-fn sum3(db: &dyn ParDatabase, key: &'static str) -> usize {
-    db.sum2(key)
-}
-
-fn sum3_drop_sum(db: &dyn ParDatabase, key: &'static str) -> usize {
-    if db.knobs().sum3_drop_sum_should_panic.get() {
-        panic!("sum3_drop_sum executed")
-    }
-    db.sum2_drop_sum(key)
-}
-
-#[ra_salsa::database(
-    Par,
-    crate::parallel_cycle_all_recover::ParallelCycleAllRecover,
-    crate::parallel_cycle_none_recover::ParallelCycleNoneRecover,
-    crate::parallel_cycle_mid_recover::ParallelCycleMidRecovers,
-    crate::parallel_cycle_one_recovers::ParallelCycleOneRecovers
-)]
-#[derive(Default)]
-pub(crate) struct ParDatabaseImpl {
-    storage: ra_salsa::Storage<Self>,
-    knobs: KnobsStruct,
-}
-
-impl Database for ParDatabaseImpl {
-    fn salsa_event(&self, event: ra_salsa::Event) {
-        if let ra_salsa::EventKind::WillBlockOn { .. } = event.kind {
-            self.signal(self.knobs().signal_on_will_block.get());
-        }
-    }
-}
-
-impl ParallelDatabase for ParDatabaseImpl {
-    fn snapshot(&self) -> Snapshot<Self> {
-        Snapshot::new(ParDatabaseImpl {
-            storage: self.storage.snapshot(),
-            knobs: self.knobs.clone(),
-        })
-    }
-}
-
-impl Knobs for ParDatabaseImpl {
-    fn knobs(&self) -> &KnobsStruct {
-        &self.knobs
-    }
-
-    fn signal(&self, stage: usize) {
-        self.knobs.signal.signal(stage);
-    }
-
-    fn wait_for(&self, stage: usize) {
-        self.knobs.signal.wait_for(stage);
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/signal.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/signal.rs
deleted file mode 100644
index 0af7b66e4826d..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/signal.rs
+++ /dev/null
@@ -1,40 +0,0 @@
-use parking_lot::{Condvar, Mutex};
-
-#[derive(Default)]
-pub(crate) struct Signal {
-    value: Mutex<usize>,
-    cond_var: Condvar,
-}
-
-impl Signal {
-    pub(crate) fn signal(&self, stage: usize) {
-        tracing::debug!("signal({})", stage);
-
-        // This check avoids acquiring the lock for things that will
-        // clearly be a no-op. Not *necessary* but helps to ensure we
-        // are more likely to encounter weird race conditions;
-        // otherwise calls to `sum` will tend to be unnecessarily
-        // synchronous.
-        if stage > 0 {
-            let mut v = self.value.lock();
-            if stage > *v {
-                *v = stage;
-                self.cond_var.notify_all();
-            }
-        }
-    }
-
-    /// Waits until the given condition is true; the fn is invoked
-    /// with the current stage.
-    pub(crate) fn wait_for(&self, stage: usize) {
-        tracing::debug!("wait_for({})", stage);
-
-        // As above, avoid lock if clearly a no-op.
-        if stage > 0 {
-            let mut v = self.value.lock();
-            while *v < stage {
-                self.cond_var.wait(&mut v);
-            }
-        }
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/stress.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/stress.rs
deleted file mode 100644
index f3a435b47f147..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/stress.rs
+++ /dev/null
@@ -1,168 +0,0 @@
-use rand::seq::SliceRandom;
-use rand::Rng;
-
-use ra_salsa::ParallelDatabase;
-use ra_salsa::Snapshot;
-use ra_salsa::{Cancelled, Database};
-
-// Number of operations a reader performs
-const N_MUTATOR_OPS: usize = 100;
-const N_READER_OPS: usize = 100;
-
-#[ra_salsa::query_group(Stress)]
-trait StressDatabase: ra_salsa::Database {
-    #[ra_salsa::input]
-    fn a(&self, key: usize) -> usize;
-
-    fn b(&self, key: usize) -> usize;
-
-    fn c(&self, key: usize) -> usize;
-}
-
-fn b(db: &dyn StressDatabase, key: usize) -> usize {
-    db.unwind_if_cancelled();
-    db.a(key)
-}
-
-fn c(db: &dyn StressDatabase, key: usize) -> usize {
-    db.b(key)
-}
-
-#[ra_salsa::database(Stress)]
-#[derive(Default)]
-struct StressDatabaseImpl {
-    storage: ra_salsa::Storage<Self>,
-}
-
-impl ra_salsa::Database for StressDatabaseImpl {}
-
-impl ra_salsa::ParallelDatabase for StressDatabaseImpl {
-    fn snapshot(&self) -> Snapshot<StressDatabaseImpl> {
-        Snapshot::new(StressDatabaseImpl { storage: self.storage.snapshot() })
-    }
-}
-
-#[derive(Clone, Copy, Debug)]
-enum Query {
-    A,
-    B,
-    C,
-}
-
-enum MutatorOp {
-    WriteOp(WriteOp),
-    LaunchReader { ops: Vec<ReadOp>, check_cancellation: bool },
-}
-
-#[derive(Debug)]
-enum WriteOp {
-    SetA(usize, usize),
-}
-
-#[derive(Debug)]
-enum ReadOp {
-    Get(Query, usize),
-}
-
-impl rand::distributions::Distribution<Query> for rand::distributions::Standard {
-    fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> Query {
-        *[Query::A, Query::B, Query::C].choose(rng).unwrap()
-    }
-}
-
-impl rand::distributions::Distribution<MutatorOp> for rand::distributions::Standard {
-    fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> MutatorOp {
-        if rng.gen_bool(0.5) {
-            MutatorOp::WriteOp(rng.gen())
-        } else {
-            MutatorOp::LaunchReader {
-                ops: (0..N_READER_OPS).map(|_| rng.gen()).collect(),
-                check_cancellation: rng.gen(),
-            }
-        }
-    }
-}
-
-impl rand::distributions::Distribution<WriteOp> for rand::distributions::Standard {
-    fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> WriteOp {
-        let key = rng.gen::<usize>() % 10;
-        let value = rng.gen::<usize>() % 10;
-        WriteOp::SetA(key, value)
-    }
-}
-
-impl rand::distributions::Distribution<ReadOp> for rand::distributions::Standard {
-    fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> ReadOp {
-        let query = rng.gen::<Query>();
-        let key = rng.gen::<usize>() % 10;
-        ReadOp::Get(query, key)
-    }
-}
-
-fn db_reader_thread(db: &StressDatabaseImpl, ops: Vec<ReadOp>, check_cancellation: bool) {
-    for op in ops {
-        if check_cancellation {
-            db.unwind_if_cancelled();
-        }
-        op.execute(db);
-    }
-}
-
-impl WriteOp {
-    fn execute(self, db: &mut StressDatabaseImpl) {
-        match self {
-            WriteOp::SetA(key, value) => {
-                db.set_a(key, value);
-            }
-        }
-    }
-}
-
-impl ReadOp {
-    fn execute(self, db: &StressDatabaseImpl) {
-        match self {
-            ReadOp::Get(query, key) => match query {
-                Query::A => {
-                    db.a(key);
-                }
-                Query::B => {
-                    let _ = db.b(key);
-                }
-                Query::C => {
-                    let _ = db.c(key);
-                }
-            },
-        }
-    }
-}
-
-#[test]
-fn stress_test() {
-    let mut db = StressDatabaseImpl::default();
-    for i in 0..10 {
-        db.set_a(i, i);
-    }
-
-    let mut rng = rand::thread_rng();
-
-    // generate the ops that the mutator thread will perform
-    let write_ops: Vec<MutatorOp> = (0..N_MUTATOR_OPS).map(|_| rng.gen()).collect();
-
-    // execute the "main thread", which sometimes snapshots off other threads
-    let mut all_threads = vec![];
-    for op in write_ops {
-        match op {
-            MutatorOp::WriteOp(w) => w.execute(&mut db),
-            MutatorOp::LaunchReader { ops, check_cancellation } => {
-                all_threads.push(std::thread::spawn({
-                    let db = db.snapshot();
-                    move || Cancelled::catch(|| db_reader_thread(&db, ops, check_cancellation))
-                }))
-            }
-        }
-    }
-
-    for thread in all_threads {
-        thread.join().unwrap().ok();
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/true_parallel.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/true_parallel.rs
deleted file mode 100644
index 44db17bd85253..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/parallel/true_parallel.rs
+++ /dev/null
@@ -1,125 +0,0 @@
-use crate::setup::{Knobs, ParDatabase, ParDatabaseImpl, WithValue};
-use ra_salsa::ParallelDatabase;
-use std::panic::{self, AssertUnwindSafe};
-
-/// Test where two threads are executing sum. We show that they can
-/// both be executing sum in parallel by having thread1 wait for
-/// thread2 to send a signal before it leaves (similarly, thread2
-/// waits for thread1 to send a signal before it enters).
-#[test]
-fn true_parallel_different_keys() {
-    let mut db = ParDatabaseImpl::default();
-
-    db.set_input('a', 100);
-    db.set_input('b', 10);
-    db.set_input('c', 1);
-
-    // Thread 1 will signal stage 1 when it enters and wait for stage 2.
-    let thread1 = std::thread::spawn({
-        let db = db.snapshot();
-        move || {
-            let v = db
-                .knobs()
-                .sum_signal_on_entry
-                .with_value(1, || db.knobs().sum_wait_for_on_exit.with_value(2, || db.sum("a")));
-            v
-        }
-    });
-
-    // Thread 2 will wait_for stage 1 when it enters and signal stage 2
-    // when it leaves.
-    let thread2 = std::thread::spawn({
-        let db = db.snapshot();
-        move || {
-            let v = db
-                .knobs()
-                .sum_wait_for_on_entry
-                .with_value(1, || db.knobs().sum_signal_on_exit.with_value(2, || db.sum("b")));
-            v
-        }
-    });
-
-    assert_eq!(thread1.join().unwrap(), 100);
-    assert_eq!(thread2.join().unwrap(), 10);
-}
-
-/// Add a test that tries to trigger a conflict, where we fetch
-/// `sum("abc")` from two threads simultaneously, and of them
-/// therefore has to block.
-#[test]
-fn true_parallel_same_keys() {
-    let mut db = ParDatabaseImpl::default();
-
-    db.set_input('a', 100);
-    db.set_input('b', 10);
-    db.set_input('c', 1);
-
-    // Thread 1 will wait_for a barrier in the start of `sum`
-    let thread1 = std::thread::spawn({
-        let db = db.snapshot();
-        move || {
-            let v = db
-                .knobs()
-                .sum_signal_on_entry
-                .with_value(1, || db.knobs().sum_wait_for_on_entry.with_value(2, || db.sum("abc")));
-            v
-        }
-    });
-
-    // Thread 2 will wait until Thread 1 has entered sum and then --
-    // once it has set itself to block -- signal Thread 1 to
-    // continue. This way, we test out the mechanism of one thread
-    // blocking on another.
-    let thread2 = std::thread::spawn({
-        let db = db.snapshot();
-        move || {
-            db.knobs().signal.wait_for(1);
-            db.knobs().signal_on_will_block.set(2);
-            db.sum("abc")
-        }
-    });
-
-    assert_eq!(thread1.join().unwrap(), 111);
-    assert_eq!(thread2.join().unwrap(), 111);
-}
-
-/// Add a test that tries to trigger a conflict, where we fetch `sum("a")`
-/// from two threads simultaneously. After `thread2` begins blocking,
-/// we force `thread1` to panic and should see that propagate to `thread2`.
-#[test]
-fn true_parallel_propagate_panic() {
-    let mut db = ParDatabaseImpl::default();
-
-    db.set_input('a', 1);
-
-    // `thread1` will wait_for a barrier in the start of `sum`. Once it can
-    // continue, it will panic.
-    let thread1 = std::thread::spawn({
-        let db = db.snapshot();
-        move || {
-            let v = db.knobs().sum_signal_on_entry.with_value(1, || {
-                db.knobs()
-                    .sum_wait_for_on_entry
-                    .with_value(2, || db.knobs().sum_should_panic.with_value(true, || db.sum("a")))
-            });
-            v
-        }
-    });
-
-    // `thread2` will wait until `thread1` has entered sum and then -- once it
-    // has set itself to block -- signal `thread1` to continue.
-    let thread2 = std::thread::spawn({
-        let db = db.snapshot();
-        move || {
-            db.knobs().signal.wait_for(1);
-            db.knobs().signal_on_will_block.set(2);
-            db.sum("a")
-        }
-    });
-
-    let result1 = panic::catch_unwind(AssertUnwindSafe(|| thread1.join().unwrap()));
-    let result2 = panic::catch_unwind(AssertUnwindSafe(|| thread2.join().unwrap()));
-
-    assert!(result1.is_err());
-    assert!(result2.is_err());
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/implementation.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/implementation.rs
deleted file mode 100644
index 39b2befd15b71..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/implementation.rs
+++ /dev/null
@@ -1,19 +0,0 @@
-use crate::queries;
-use std::cell::Cell;
-
-#[ra_salsa::database(queries::GroupStruct)]
-#[derive(Default)]
-pub(crate) struct DatabaseImpl {
-    storage: ra_salsa::Storage<Self>,
-    counter: Cell<usize>,
-}
-
-impl queries::Counter for DatabaseImpl {
-    fn increment(&self) -> usize {
-        let v = self.counter.get();
-        self.counter.set(v + 1);
-        v
-    }
-}
-
-impl ra_salsa::Database for DatabaseImpl {}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/main.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/main.rs
deleted file mode 100644
index e92c61740e0c8..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/main.rs
+++ /dev/null
@@ -1,5 +0,0 @@
-mod implementation;
-mod queries;
-mod tests;
-
-fn main() {}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/queries.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/queries.rs
deleted file mode 100644
index bc9b10ae7bb24..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/queries.rs
+++ /dev/null
@@ -1,22 +0,0 @@
-pub(crate) trait Counter: ra_salsa::Database {
-    fn increment(&self) -> usize;
-}
-
-#[ra_salsa::query_group(GroupStruct)]
-pub(crate) trait Database: Counter {
-    fn memoized(&self) -> usize;
-    fn volatile(&self) -> usize;
-}
-
-/// Because this query is memoized, we only increment the counter
-/// the first time it is invoked.
-fn memoized(db: &dyn Database) -> usize {
-    db.volatile()
-}
-
-/// Because this query is volatile, each time it is invoked,
-/// we will increment the counter.
-fn volatile(db: &dyn Database) -> usize {
-    db.salsa_runtime().report_untracked_read();
-    db.increment()
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/tests.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/tests.rs
deleted file mode 100644
index 7c33bbfc7475b..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/storage_varieties/tests.rs
+++ /dev/null
@@ -1,49 +0,0 @@
-#![cfg(test)]
-
-use crate::implementation::DatabaseImpl;
-use crate::queries::Database;
-use ra_salsa::Database as _Database;
-use ra_salsa::Durability;
-
-#[test]
-fn memoized_twice() {
-    let db = DatabaseImpl::default();
-    let v1 = db.memoized();
-    let v2 = db.memoized();
-    assert_eq!(v1, v2);
-}
-
-#[test]
-fn volatile_twice() {
-    let mut db = DatabaseImpl::default();
-    let v1 = db.volatile();
-    let v2 = db.volatile(); // volatiles are cached, so 2nd read returns the same
-    assert_eq!(v1, v2);
-
-    db.synthetic_write(Durability::LOW); // clears volatile caches
-
-    let v3 = db.volatile(); // will re-increment the counter
-    let v4 = db.volatile(); // second call will be cached
-    assert_eq!(v1 + 1, v3);
-    assert_eq!(v3, v4);
-}
-
-#[test]
-fn intermingled() {
-    let mut db = DatabaseImpl::default();
-    let v1 = db.volatile();
-    let v2 = db.memoized();
-    let v3 = db.volatile(); // cached
-    let v4 = db.memoized(); // cached
-
-    assert_eq!(v1, v2);
-    assert_eq!(v1, v3);
-    assert_eq!(v2, v4);
-
-    db.synthetic_write(Durability::LOW); // clears volatile caches
-
-    let v5 = db.memoized(); // re-executes volatile, caches new result
-    let v6 = db.memoized(); // re-use cached result
-    assert_eq!(v4 + 1, v5);
-    assert_eq!(v5, v6);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/transparent.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/transparent.rs
deleted file mode 100644
index 886f46410658b..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/transparent.rs
+++ /dev/null
@@ -1,39 +0,0 @@
-//! Test that transparent (uncached) queries work
-
-#[ra_salsa::query_group(QueryGroupStorage)]
-trait QueryGroup {
-    #[ra_salsa::input]
-    fn input(&self, x: u32) -> u32;
-    #[ra_salsa::transparent]
-    fn wrap(&self, x: u32) -> u32;
-    fn get(&self, x: u32) -> u32;
-}
-
-fn wrap(db: &dyn QueryGroup, x: u32) -> u32 {
-    db.input(x)
-}
-
-fn get(db: &dyn QueryGroup, x: u32) -> u32 {
-    db.wrap(x)
-}
-
-#[ra_salsa::database(QueryGroupStorage)]
-#[derive(Default)]
-struct Database {
-    storage: ra_salsa::Storage<Self>,
-}
-
-impl ra_salsa::Database for Database {}
-
-#[test]
-fn transparent_queries_work() {
-    let mut db = Database::default();
-
-    db.set_input(1, 10);
-    assert_eq!(db.get(1), 10);
-    assert_eq!(db.get(1), 10);
-
-    db.set_input(1, 92);
-    assert_eq!(db.get(1), 92);
-    assert_eq!(db.get(1), 92);
-}
diff --git a/src/tools/rust-analyzer/crates/ra-salsa/tests/variadic.rs b/src/tools/rust-analyzer/crates/ra-salsa/tests/variadic.rs
deleted file mode 100644
index 11a6d13ebe214..0000000000000
--- a/src/tools/rust-analyzer/crates/ra-salsa/tests/variadic.rs
+++ /dev/null
@@ -1,51 +0,0 @@
-#[ra_salsa::query_group(HelloWorld)]
-trait HelloWorldDatabase: ra_salsa::Database {
-    #[ra_salsa::input]
-    fn input(&self, a: u32, b: u32) -> u32;
-
-    fn none(&self) -> u32;
-
-    fn one(&self, k: u32) -> u32;
-
-    fn two(&self, a: u32, b: u32) -> u32;
-
-    fn trailing(&self, a: u32, b: u32) -> u32;
-}
-
-fn none(_db: &dyn HelloWorldDatabase) -> u32 {
-    22
-}
-
-fn one(_db: &dyn HelloWorldDatabase, k: u32) -> u32 {
-    k * 2
-}
-
-fn two(_db: &dyn HelloWorldDatabase, a: u32, b: u32) -> u32 {
-    a * b
-}
-
-fn trailing(_db: &dyn HelloWorldDatabase, a: u32, b: u32) -> u32 {
-    a - b
-}
-
-#[ra_salsa::database(HelloWorld)]
-#[derive(Default)]
-struct DatabaseStruct {
-    storage: ra_salsa::Storage<Self>,
-}
-
-impl ra_salsa::Database for DatabaseStruct {}
-
-#[test]
-fn execute() {
-    let mut db = DatabaseStruct::default();
-
-    // test what happens with inputs:
-    db.set_input(1, 2, 3);
-    assert_eq!(db.input(1, 2), 3);
-
-    assert_eq!(db.none(), 22);
-    assert_eq!(db.one(11), 22);
-    assert_eq!(db.two(11, 2), 22);
-    assert_eq!(db.trailing(24, 2), 22);
-}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
index 6c81c238fd3f2..b59d06838e0f2 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
@@ -4,7 +4,7 @@ version = "0.0.0"
 homepage = "https://rust-analyzer.github.io/"
 repository.workspace = true
 description = "A language server for the Rust programming language"
-documentation = "https://rust-analyzer.github.io/manual.html"
+documentation = "https://rust-analyzer.github.io/book/"
 autobins = false
 
 authors.workspace = true
@@ -22,34 +22,34 @@ path = "src/bin/main.rs"
 anyhow.workspace = true
 base64 = "0.22"
 crossbeam-channel.workspace = true
-dirs = "5.0.1"
+dirs = "6.0.0"
 dissimilar.workspace = true
 ide-completion.workspace = true
+indexmap.workspace = true
 itertools.workspace = true
-scip = "0.5.1"
+scip = "0.5.2"
 lsp-types = { version = "=0.95.0", features = ["proposed"] }
-parking_lot = "0.12.1"
-xflags = "0.3.0"
-oorandom = "11.1.3"
+parking_lot = "0.12.3"
+xflags = "0.3.2"
+oorandom = "11.1.5"
 rayon.workspace = true
 rustc-hash.workspace = true
 serde_json = { workspace = true, features = ["preserve_order"] }
 serde.workspace = true
 serde_derive.workspace = true
 tenthash = "1.0.0"
-num_cpus = "1.15.0"
-mimalloc = { version = "0.1.30", default-features = false, optional = true }
+num_cpus = "1.16.0"
+mimalloc = { version = "0.1.44", default-features = false, optional = true }
 lsp-server.workspace = true
 tracing.workspace = true
 tracing-subscriber.workspace = true
 tracing-tree.workspace = true
 triomphe.workspace = true
-toml = "0.8.8"
+toml = "0.8.20"
 nohash-hasher.workspace = true
-always-assert = "0.2.0"
-walkdir = "2.3.2"
+walkdir = "2.5.0"
 semver.workspace = true
-memchr = "2.7.1"
+memchr = "2.7.4"
 cargo_metadata.workspace = true
 process-wrap.workspace = true
 
@@ -81,10 +81,10 @@ windows-sys = { version = "0.59", features = [
 ] }
 
 [target.'cfg(not(target_env = "msvc"))'.dependencies]
-jemallocator = { version = "0.5.0", package = "tikv-jemallocator", optional = true }
+jemallocator = { version = "0.5.4", package = "tikv-jemallocator", optional = true }
 
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
 xshell.workspace = true
 
 test-utils.workspace = true
@@ -94,9 +94,7 @@ syntax-bridge.workspace = true
 [features]
 jemalloc = ["jemallocator", "profile/jemalloc"]
 force-always-assert = ["stdx/force-always-assert"]
-sysroot-abi = []
 in-rust-tree = [
-  "sysroot-abi",
   "syntax/in-rust-tree",
   "parser/in-rust-tree",
   "hir/in-rust-tree",
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
index 1a9cdef256d28..ea5a5eaa6a449 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
@@ -88,6 +88,7 @@ fn actual_main() -> anyhow::Result<ExitCode> {
         flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
         flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?,
         flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?,
+        flags::RustAnalyzerCmd::PrimeCaches(cmd) => cmd.run()?,
     }
     Ok(ExitCode::SUCCESS)
 }
@@ -123,13 +124,19 @@ fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> {
         // https://docs.microsoft.com/en-us/windows/win32/api/dbghelp/nf-dbghelp-syminitialize
         if let Ok(path) = env::current_exe() {
             if let Some(path) = path.parent() {
-                env::set_var("_NT_SYMBOL_PATH", path);
+                // SAFETY: This is safe because this is single-threaded.
+                unsafe {
+                    env::set_var("_NT_SYMBOL_PATH", path);
+                }
             }
         }
     }
 
     if env::var("RUST_BACKTRACE").is_err() {
-        env::set_var("RUST_BACKTRACE", "short");
+        // SAFETY: This is safe because this is single-threaded.
+        unsafe {
+            env::set_var("RUST_BACKTRACE", "short");
+        }
     }
 
     let log_file = env::var("RA_LOG_FILE").ok().map(PathBuf::from).or(log_file_flag);
@@ -253,8 +260,8 @@ fn run_server() -> anyhow::Result<()> {
 
         if !error_sink.is_empty() {
             use lsp_types::{
-                notification::{Notification, ShowMessage},
                 MessageType, ShowMessageParams,
+                notification::{Notification, ShowMessage},
             };
             let not = lsp_server::Notification::new(
                 ShowMessage::METHOD.to_owned(),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
index a7ec5af89fcbf..6643037220ae4 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
@@ -8,6 +8,7 @@ pub mod flags;
 mod highlight;
 mod lsif;
 mod parse;
+mod prime_caches;
 mod run_tests;
 mod rustc_tests;
 mod scip;
@@ -86,6 +87,6 @@ fn full_name_of_item(db: &dyn HirDatabase, module: Module, name: Name) -> String
         .rev()
         .filter_map(|it| it.name(db))
         .chain(Some(name))
-        .map(|it| it.display(db.upcast(), Edition::LATEST).to_string())
+        .map(|it| it.display(db, Edition::LATEST).to_string())
         .join("::")
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index dee76ee15c3df..a62005e3c085f 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -2,20 +2,20 @@
 //! errors.
 
 use std::{
-    env,
+    env, fmt,
+    ops::AddAssign,
     time::{SystemTime, UNIX_EPOCH},
 };
 
 use cfg::{CfgAtom, CfgDiff};
 use hir::{
+    Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ImportPathConfig, ModuleDef, Name,
     db::{DefDatabase, ExpandDatabase, HirDatabase},
-    Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ImportPathConfig,
-    ModuleDef, Name,
 };
 use hir_def::{
+    SyntheticSyntax,
     expr_store::BodySourceMap,
     hir::{ExprId, PatId},
-    SyntheticSyntax,
 };
 use hir_ty::{Interner, Substitution, TyExt, TypeFlags};
 use ide::{
@@ -23,37 +23,27 @@ use ide::{
     InlayHintsConfig, LineCol, RootDatabase,
 };
 use ide_db::{
-    base_db::{
-        ra_salsa::{self, debug::DebugQueryTable, ParallelDatabase},
-        SourceDatabase, SourceRootDatabase,
-    },
     EditionedFileId, LineIndexDatabase, SnippetCap,
+    base_db::{SourceDatabase, salsa::Database},
 };
 use itertools::Itertools;
-use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace};
 use oorandom::Rand32;
-use profile::{Bytes, StopWatch};
+use profile::StopWatch;
 use project_model::{CargoConfig, CfgOverrides, ProjectManifest, ProjectWorkspace, RustLibSource};
 use rayon::prelude::*;
 use rustc_hash::{FxHashMap, FxHashSet};
-use syntax::{AstNode, SyntaxNode};
+use syntax::AstNode;
 use vfs::{AbsPathBuf, Vfs, VfsPath};
 
 use crate::cli::{
+    Verbosity,
     flags::{self, OutputFormat},
     full_name_of_item, print_memory_usage,
     progress_report::ProgressReport,
-    report_metric, Verbosity,
+    report_metric,
 };
 
-/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
-struct Snap<DB>(DB);
-impl<DB: ParallelDatabase> Clone for Snap<ra_salsa::Snapshot<DB>> {
-    fn clone(&self) -> Snap<ra_salsa::Snapshot<DB>> {
-        Snap(self.0.snapshot())
-    }
-}
-
 impl flags::AnalysisStats {
     pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
         let mut rng = {
@@ -69,7 +59,7 @@ impl flags::AnalysisStats {
             all_targets: true,
             set_test: !self.no_test,
             cfg_overrides: CfgOverrides {
-                global: CfgDiff::new(vec![CfgAtom::Flag(hir::sym::miri.clone())], vec![]),
+                global: CfgDiff::new(vec![CfgAtom::Flag(hir::sym::miri)], vec![]),
                 selective: Default::default(),
             },
             ..Default::default()
@@ -117,7 +107,7 @@ impl flags::AnalysisStats {
         }
         eprintln!(")");
 
-        let host = AnalysisHost::with_database(db);
+        let mut host = AnalysisHost::with_database(db);
         let db = host.raw_database();
 
         let mut analysis_sw = self.stop_watch();
@@ -128,26 +118,84 @@ impl flags::AnalysisStats {
         }
 
         let mut item_tree_sw = self.stop_watch();
-        let mut num_item_trees = 0;
-        let source_roots =
-            krates.iter().cloned().map(|krate| db.file_source_root(krate.root_file(db))).unique();
+        let source_roots = krates
+            .iter()
+            .cloned()
+            .map(|krate| db.file_source_root(krate.root_file(db)).source_root_id(db))
+            .unique();
+
+        let mut dep_loc = 0;
+        let mut workspace_loc = 0;
+        let mut dep_item_trees = 0;
+        let mut workspace_item_trees = 0;
+
+        let mut workspace_item_stats = PrettyItemStats::default();
+        let mut dep_item_stats = PrettyItemStats::default();
+
         for source_root_id in source_roots {
-            let source_root = db.source_root(source_root_id);
-            if !source_root.is_library || self.with_deps {
-                for file_id in source_root.iter() {
-                    if let Some(p) = source_root.path_for_file(&file_id) {
-                        if let Some((_, Some("rs"))) = p.name_and_extension() {
-                            db.file_item_tree(EditionedFileId::current_edition(file_id).into());
-                            num_item_trees += 1;
+            let source_root = db.source_root(source_root_id).source_root(db);
+            for file_id in source_root.iter() {
+                if let Some(p) = source_root.path_for_file(&file_id) {
+                    if let Some((_, Some("rs"))) = p.name_and_extension() {
+                        // measure workspace/project code
+                        if !source_root.is_library || self.with_deps {
+                            let length = db.file_text(file_id).text(db).lines().count();
+                            let item_stats = db
+                                .file_item_tree(
+                                    EditionedFileId::current_edition(db, file_id).into(),
+                                )
+                                .item_tree_stats()
+                                .into();
+
+                            workspace_loc += length;
+                            workspace_item_trees += 1;
+                            workspace_item_stats += item_stats;
+                        } else {
+                            let length = db.file_text(file_id).text(db).lines().count();
+                            let item_stats = db
+                                .file_item_tree(
+                                    EditionedFileId::current_edition(db, file_id).into(),
+                                )
+                                .item_tree_stats()
+                                .into();
+
+                            dep_loc += length;
+                            dep_item_trees += 1;
+                            dep_item_stats += item_stats;
                         }
                     }
                 }
             }
         }
-        eprintln!("  item trees: {num_item_trees}");
+        eprintln!("  item trees: {workspace_item_trees}");
         let item_tree_time = item_tree_sw.elapsed();
+
+        eprintln!(
+            "  dependency lines of code: {}, item trees: {}",
+            UsizeWithUnderscore(dep_loc),
+            UsizeWithUnderscore(dep_item_trees),
+        );
+        eprintln!("  dependency item stats: {}", dep_item_stats);
+
+        // FIXME(salsa-transition): bring back stats for ParseQuery (file size)
+        // and ParseMacroExpansionQuery (macro expansion "file") size whenever we implement
+        // Salsa's memory usage tracking works with tracked functions.
+
+        // let mut total_file_size = Bytes::default();
+        // for e in ide_db::base_db::ParseQuery.in_db(db).entries::<Vec<_>>() {
+        //     total_file_size += syntax_len(db.parse(e.key).syntax_node())
+        // }
+
+        // let mut total_macro_file_size = Bytes::default();
+        // for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
+        //     let val = db.parse_macro_expansion(e.key).value.0;
+        //     total_macro_file_size += syntax_len(val.syntax_node())
+        // }
+        // eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
+
         eprintln!("{:<20} {}", "Item Tree Collection:", item_tree_time);
         report_metric("item tree time", item_tree_time.time.as_millis() as u64, "ms");
+        eprintln!("  Total Statistics:");
 
         let mut crate_def_map_sw = self.stop_watch();
         let mut num_crates = 0;
@@ -157,8 +205,9 @@ impl flags::AnalysisStats {
             let module = krate.root_module();
             let file_id = module.definition_source_file_id(db);
             let file_id = file_id.original_file(db);
-            let source_root = db.file_source_root(file_id.into());
-            let source_root = db.source_root(source_root);
+
+            let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             if !source_root.is_library || self.with_deps {
                 num_crates += 1;
                 visit_queue.push(module);
@@ -169,11 +218,16 @@ impl flags::AnalysisStats {
             shuffle(&mut rng, &mut visit_queue);
         }
 
-        eprint!("  crates: {num_crates}");
+        eprint!("    crates: {num_crates}");
         let mut num_decls = 0;
         let mut bodies = Vec::new();
         let mut adts = Vec::new();
         let mut file_ids = Vec::new();
+
+        let mut num_traits = 0;
+        let mut num_macro_rules_macros = 0;
+        let mut num_proc_macros = 0;
+
         while let Some(module) = visit_queue.pop() {
             if visited_modules.insert(module) {
                 file_ids.extend(module.as_source_file_id(db));
@@ -195,6 +249,14 @@ impl flags::AnalysisStats {
                             bodies.push(DefWithBody::from(c));
                         }
                         ModuleDef::Static(s) => bodies.push(DefWithBody::from(s)),
+                        ModuleDef::Trait(_) => num_traits += 1,
+                        ModuleDef::Macro(m) => match m.kind(db) {
+                            hir::MacroKind::Declarative => num_macro_rules_macros += 1,
+                            hir::MacroKind::Derive
+                            | hir::MacroKind::Attr
+                            | hir::MacroKind::ProcMacro => num_proc_macros += 1,
+                            _ => (),
+                        },
                         _ => (),
                     };
                 }
@@ -223,6 +285,26 @@ impl flags::AnalysisStats {
                 .filter(|it| matches!(it, DefWithBody::Const(_) | DefWithBody::Static(_)))
                 .count(),
         );
+
+        eprintln!("  Workspace:");
+        eprintln!(
+            "    traits: {num_traits}, macro_rules macros: {num_macro_rules_macros}, proc_macros: {num_proc_macros}"
+        );
+        eprintln!(
+            "    lines of code: {}, item trees: {}",
+            UsizeWithUnderscore(workspace_loc),
+            UsizeWithUnderscore(workspace_item_trees),
+        );
+        eprintln!("    usages: {}", workspace_item_stats);
+
+        eprintln!("  Dependencies:");
+        eprintln!(
+            "    lines of code: {}, item trees: {}",
+            UsizeWithUnderscore(dep_loc),
+            UsizeWithUnderscore(dep_item_trees),
+        );
+        eprintln!("    declarations: {}", dep_item_stats);
+
         let crate_def_map_time = crate_def_map_sw.elapsed();
         eprintln!("{:<20} {}", "Item Collection:", crate_def_map_time);
         report_metric("crate def map time", crate_def_map_time.time.as_millis() as u64, "ms");
@@ -259,6 +341,9 @@ impl flags::AnalysisStats {
             self.run_term_search(&workspace, db, &vfs, file_ids, verbosity);
         }
 
+        let db = host.raw_database_mut();
+        db.trigger_lru_eviction();
+
         let total_span = analysis_sw.elapsed();
         eprintln!("{:<20} {total_span}", "Total:");
         report_metric("total time", total_span.time.as_millis() as u64, "ms");
@@ -267,20 +352,6 @@ impl flags::AnalysisStats {
         }
         report_metric("total memory", total_span.memory.allocated.megabytes() as u64, "MB");
 
-        if self.source_stats {
-            let mut total_file_size = Bytes::default();
-            for e in ide_db::base_db::ParseQuery.in_db(db).entries::<Vec<_>>() {
-                total_file_size += syntax_len(db.parse(e.key).syntax_node())
-            }
-
-            let mut total_macro_file_size = Bytes::default();
-            for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
-                let val = db.parse_macro_expansion(e.key).value.0;
-                total_macro_file_size += syntax_len(val.syntax_node())
-            }
-            eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
-        }
-
         if verbosity.is_verbose() {
             print_memory_usage(host, vfs);
         }
@@ -369,7 +440,7 @@ impl flags::AnalysisStats {
         let mut bar = match verbosity {
             Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
             _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
-            _ => ProgressReport::new(file_ids.len() as u64),
+            _ => ProgressReport::new(file_ids.len()),
         };
 
         file_ids.sort();
@@ -389,9 +460,12 @@ impl flags::AnalysisStats {
         let mut sw = self.stop_watch();
 
         for &file_id in &file_ids {
+            let file_id = file_id.editioned_file_id(db);
             let sema = hir::Semantics::new(db);
-            let display_target =
-                sema.first_crate_or_default(file_id.file_id()).to_display_target(db);
+            let display_target = match sema.first_crate(file_id.file_id()) {
+                Some(krate) => krate.to_display_target(sema.db),
+                None => continue,
+            };
 
             let parse = sema.parse_guess_edition(file_id.into());
             let file_txt = db.file_text(file_id.into());
@@ -423,6 +497,7 @@ impl flags::AnalysisStats {
                 let range = sema.original_range(expected_tail.syntax()).range;
                 let original_text: String = db
                     .file_text(file_id.into())
+                    .text(db)
                     .chars()
                     .skip(usize::from(range.start()))
                     .take(usize::from(range.end()) - usize::from(range.start()))
@@ -475,7 +550,7 @@ impl flags::AnalysisStats {
                     syntax_hit_found |= trim(&original_text) == trim(&generated);
 
                     // Validate if type-checks
-                    let mut txt = file_txt.to_string();
+                    let mut txt = file_txt.text(db).to_string();
 
                     let edit = ide::TextEdit::replace(range, generated.clone());
                     edit.apply(&mut txt);
@@ -530,7 +605,7 @@ impl flags::AnalysisStats {
             }
             // Revert file back to original state
             if self.validate_term_search {
-                std::fs::write(path, file_txt.to_string()).unwrap();
+                std::fs::write(path, file_txt.text(db).to_string()).unwrap();
             }
 
             bar.inc(1);
@@ -572,6 +647,11 @@ impl flags::AnalysisStats {
     }
 
     fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
+        let mut bar = match verbosity {
+            Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
+            _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
+            _ => ProgressReport::new(bodies.len()),
+        };
         let mut sw = self.stop_watch();
         let mut all = 0;
         let mut fail = 0;
@@ -593,11 +673,13 @@ impl flags::AnalysisStats {
                     .chain(Some(body.name(db).unwrap_or_else(Name::missing)))
                     .map(|it| it.display(db, Edition::LATEST).to_string())
                     .join("::");
-                println!("Mir body for {full_name} failed due {e:?}");
+                bar.println(format!("Mir body for {full_name} failed due {e:?}"));
             }
             fail += 1;
+            bar.tick();
         }
         let mir_lowering_time = sw.elapsed();
+        bar.finish_and_clear();
         eprintln!("{:<20} {}", "MIR lowering:", mir_lowering_time);
         eprintln!("Mir failed bodies: {fail} ({}%)", percentage(fail, all));
         report_metric("mir failed bodies", fail, "#");
@@ -614,17 +696,17 @@ impl flags::AnalysisStats {
         let mut bar = match verbosity {
             Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
             _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
-            _ => ProgressReport::new(bodies.len() as u64),
+            _ => ProgressReport::new(bodies.len()),
         };
 
         if self.parallel {
             let mut inference_sw = self.stop_watch();
-            let snap = Snap(db.snapshot());
+            let snap = db.snapshot();
             bodies
                 .par_iter()
                 .map_with(snap, |snap, &body| {
-                    snap.0.body(body.into());
-                    snap.0.infer(body.into());
+                    snap.body(body.into());
+                    snap.infer(body.into());
                 })
                 .count();
             eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
@@ -675,11 +757,10 @@ impl flags::AnalysisStats {
                         DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
                         DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
                         DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
-                        DefWithBody::InTypeConst(_) => unimplemented!(),
                     };
                     if let Some(src) = source {
                         let original_file = src.file_id.original_file(db);
-                        let path = vfs.file_path(original_file.into());
+                        let path = vfs.file_path(original_file.file_id(db));
                         let syntax_range = src.text_range();
                         format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
                     } else {
@@ -946,7 +1027,7 @@ impl flags::AnalysisStats {
         let mut bar = match verbosity {
             Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
             _ if self.output.is_some() => ProgressReport::hidden(),
-            _ => ProgressReport::new(bodies.len() as u64),
+            _ => ProgressReport::new(bodies.len()),
         };
 
         let mut sw = self.stop_watch();
@@ -989,11 +1070,10 @@ impl flags::AnalysisStats {
                         DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
                         DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
                         DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
-                        DefWithBody::InTypeConst(_) => unimplemented!(),
                     };
                     if let Some(src) = source {
                         let original_file = src.file_id.original_file(db);
-                        let path = vfs.file_path(original_file.into());
+                        let path = vfs.file_path(original_file.file_id(db));
                         let syntax_range = src.text_range();
                         format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
                     } else {
@@ -1047,7 +1127,7 @@ impl flags::AnalysisStats {
                     term_search_borrowck: true,
                 },
                 ide::AssistResolveStrategy::All,
-                file_id.into(),
+                analysis.editioned_file_id_to_vfs(file_id),
             );
         }
         for &file_id in &file_ids {
@@ -1082,7 +1162,7 @@ impl flags::AnalysisStats {
                     fields_to_resolve: InlayFieldsToResolve::empty(),
                     range_exclusive_hints: true,
                 },
-                file_id.into(),
+                analysis.editioned_file_id_to_vfs(file_id),
                 None,
             );
         }
@@ -1098,7 +1178,7 @@ impl flags::AnalysisStats {
                         annotate_enum_variant_references: false,
                         location: ide::AnnotationLocation::AboveName,
                     },
-                    file_id.into(),
+                    analysis.editioned_file_id_to_vfs(file_id),
                 )
                 .unwrap()
                 .into_iter()
@@ -1123,8 +1203,8 @@ fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id:
     let root = db.parse_or_expand(src.file_id);
     let node = src.map(|e| e.to_node(&root).syntax().clone());
     let original_range = node.as_ref().original_file_range_rooted(db);
-    let path = vfs.file_path(original_range.file_id.into());
-    let line_index = db.line_index(original_range.file_id.into());
+    let path = vfs.file_path(original_range.file_id.file_id(db));
+    let line_index = db.line_index(original_range.file_id.file_id(db));
     let text_range = original_range.range;
     let (start, end) =
         (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@@ -1139,8 +1219,8 @@ fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: Pa
     let root = db.parse_or_expand(src.file_id);
     let node = src.map(|e| e.to_node(&root).syntax().clone());
     let original_range = node.as_ref().original_file_range_rooted(db);
-    let path = vfs.file_path(original_range.file_id.into());
-    let line_index = db.line_index(original_range.file_id.into());
+    let path = vfs.file_path(original_range.file_id.file_id(db));
+    let line_index = db.line_index(original_range.file_id.file_id(db));
     let text_range = original_range.range;
     let (start, end) =
         (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@@ -1158,8 +1238,8 @@ fn expr_syntax_range<'a>(
         let root = db.parse_or_expand(src.file_id);
         let node = src.map(|e| e.to_node(&root).syntax().clone());
         let original_range = node.as_ref().original_file_range_rooted(db);
-        let path = vfs.file_path(original_range.file_id.into());
-        let line_index = db.line_index(original_range.file_id.into());
+        let path = vfs.file_path(original_range.file_id.file_id(db));
+        let line_index = db.line_index(original_range.file_id.file_id(db));
         let text_range = original_range.range;
         let (start, end) =
             (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@@ -1179,8 +1259,8 @@ fn pat_syntax_range<'a>(
         let root = db.parse_or_expand(src.file_id);
         let node = src.map(|e| e.to_node(&root).syntax().clone());
         let original_range = node.as_ref().original_file_range_rooted(db);
-        let path = vfs.file_path(original_range.file_id.into());
-        let line_index = db.line_index(original_range.file_id.into());
+        let path = vfs.file_path(original_range.file_id.file_id(db));
+        let line_index = db.line_index(original_range.file_id.file_id(db));
         let text_range = original_range.range;
         let (start, end) =
             (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@@ -1206,8 +1286,82 @@ fn percentage(n: u64, total: u64) -> u64 {
     (n * 100).checked_div(total).unwrap_or(100)
 }
 
-fn syntax_len(node: SyntaxNode) -> usize {
-    // Macro expanded code doesn't contain whitespace, so erase *all* whitespace
-    // to make macro and non-macro code comparable.
-    node.to_string().replace(|it: char| it.is_ascii_whitespace(), "").len()
+#[derive(Default, Debug, Eq, PartialEq)]
+struct UsizeWithUnderscore(usize);
+
+impl fmt::Display for UsizeWithUnderscore {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let num_str = self.0.to_string();
+
+        if num_str.len() <= 3 {
+            return write!(f, "{}", num_str);
+        }
+
+        let mut result = String::new();
+
+        for (count, ch) in num_str.chars().rev().enumerate() {
+            if count > 0 && count % 3 == 0 {
+                result.push('_');
+            }
+            result.push(ch);
+        }
+
+        let result = result.chars().rev().collect::<String>();
+        write!(f, "{}", result)
+    }
 }
+
+impl std::ops::AddAssign for UsizeWithUnderscore {
+    fn add_assign(&mut self, other: UsizeWithUnderscore) {
+        self.0 += other.0;
+    }
+}
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct PrettyItemStats {
+    traits: UsizeWithUnderscore,
+    impls: UsizeWithUnderscore,
+    mods: UsizeWithUnderscore,
+    macro_calls: UsizeWithUnderscore,
+    macro_rules: UsizeWithUnderscore,
+}
+
+impl From<hir_def::item_tree::ItemTreeDataStats> for PrettyItemStats {
+    fn from(value: hir_def::item_tree::ItemTreeDataStats) -> Self {
+        Self {
+            traits: UsizeWithUnderscore(value.traits),
+            impls: UsizeWithUnderscore(value.impls),
+            mods: UsizeWithUnderscore(value.mods),
+            macro_calls: UsizeWithUnderscore(value.macro_calls),
+            macro_rules: UsizeWithUnderscore(value.macro_rules),
+        }
+    }
+}
+
+impl AddAssign for PrettyItemStats {
+    fn add_assign(&mut self, rhs: Self) {
+        self.traits += rhs.traits;
+        self.impls += rhs.impls;
+        self.mods += rhs.mods;
+        self.macro_calls += rhs.macro_calls;
+        self.macro_rules += rhs.macro_rules;
+    }
+}
+
+impl fmt::Display for PrettyItemStats {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(
+            f,
+            "traits: {}, impl: {}, mods: {}, macro calls: {}, macro rules: {}",
+            self.traits, self.impls, self.mods, self.macro_calls, self.macro_rules
+        )
+    }
+}
+
+// FIXME(salsa-transition): bring this back whenever we implement
+// Salsa's memory usage tracking to work with tracked functions.
+// fn syntax_len(node: SyntaxNode) -> usize {
+//     // Macro expanded code doesn't contain whitespace, so erase *all* whitespace
+//     // to make macro and non-macro code comparable.
+//     node.to_string().replace(|it: char| it.is_ascii_whitespace(), "").len()
+// }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
index 6a3ceb640b91a..7c4eeebdfa31f 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -4,10 +4,10 @@
 use project_model::{CargoConfig, RustLibSource};
 use rustc_hash::FxHashSet;
 
-use hir::{db::HirDatabase, sym, Crate, HirFileIdExt, Module};
+use hir::{Crate, Module, db::HirDatabase, sym};
 use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
-use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
-use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+use ide_db::{LineIndexDatabase, base_db::SourceDatabase};
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
 
 use crate::cli::flags;
 
@@ -51,8 +51,8 @@ impl flags::Diagnostics {
 
         let work = all_modules(db).into_iter().filter(|module| {
             let file_id = module.definition_source_file_id(db).original_file(db);
-            let source_root = db.file_source_root(file_id.into());
-            let source_root = db.source_root(source_root);
+            let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             !source_root.is_library
         });
 
@@ -63,13 +63,13 @@ impl flags::Diagnostics {
                     module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned();
                 println!(
                     "processing crate: {crate_name}, module: {}",
-                    _vfs.file_path(file_id.into())
+                    _vfs.file_path(file_id.file_id(db))
                 );
                 for diagnostic in analysis
                     .full_diagnostics(
                         &DiagnosticsConfig::test_sample(),
                         AssistResolveStrategy::None,
-                        file_id.into(),
+                        file_id.file_id(db),
                     )
                     .unwrap()
                 {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
index ff24602144a9d..57f95d114d9d7 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
@@ -53,7 +53,7 @@ xflags::xflags! {
 
         /// Batch typecheck project and print summary statistics
         cmd analysis-stats {
-            /// Directory with Cargo.toml.
+            /// Directory with Cargo.toml or rust-project.json.
             required path: PathBuf
 
             optional --output format: OutputFormat
@@ -62,8 +62,6 @@ xflags::xflags! {
             optional --randomize
             /// Run type inference in parallel.
             optional --parallel
-            /// Print the total length of all source and macro files (whitespace is not counted).
-            optional --source-stats
 
             /// Only analyze items matching this path.
             optional -o, --only path: String
@@ -76,7 +74,7 @@ xflags::xflags! {
 
             /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
             optional --disable-build-scripts
-            /// Don't use expand proc macros.
+            /// Don't expand proc macros.
             optional --disable-proc-macros
             /// Run the proc-macro-srv binary at the specified path.
             optional --proc-macro-srv path: PathBuf
@@ -103,7 +101,7 @@ xflags::xflags! {
 
         /// Run unit tests of the project using mir interpreter
         cmd run-tests {
-            /// Directory with Cargo.toml.
+            /// Directory with Cargo.toml or rust-project.json.
             required path: PathBuf
         }
 
@@ -117,12 +115,12 @@ xflags::xflags! {
         }
 
         cmd diagnostics {
-            /// Directory with Cargo.toml.
+            /// Directory with Cargo.toml or rust-project.json.
             required path: PathBuf
 
             /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
             optional --disable-build-scripts
-            /// Don't use expand proc macros.
+            /// Don't expand proc macros.
             optional --disable-proc-macros
             /// Run the proc-macro-srv binary at the specified path.
             optional --proc-macro-srv path: PathBuf
@@ -130,17 +128,32 @@ xflags::xflags! {
 
         /// Report unresolved references
         cmd unresolved-references {
-            /// Directory with Cargo.toml.
+            /// Directory with Cargo.toml or rust-project.json.
             required path: PathBuf
 
             /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
             optional --disable-build-scripts
-            /// Don't use expand proc macros.
+            /// Don't expand proc macros.
             optional --disable-proc-macros
             /// Run the proc-macro-srv binary at the specified path.
             optional --proc-macro-srv path: PathBuf
         }
 
+        /// Prime caches, as rust-analyzer does typically at startup in interactive sessions.
+        cmd prime-caches {
+            /// Directory with Cargo.toml or rust-project.json.
+            required path: PathBuf
+
+            /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
+            optional --disable-build-scripts
+            /// Don't expand proc macros.
+            optional --disable-proc-macros
+            /// Run the proc-macro-srv binary at the specified path.
+            optional --proc-macro-srv path: PathBuf
+            /// Run cache priming in parallel.
+            optional --parallel
+        }
+
         cmd ssr {
             /// A structured search replace rule (`$a.foo($b) ==>> bar($a, $b)`)
             repeated rule: SsrRule
@@ -199,6 +212,7 @@ pub enum RustAnalyzerCmd {
     RustcTests(RustcTests),
     Diagnostics(Diagnostics),
     UnresolvedReferences(UnresolvedReferences),
+    PrimeCaches(PrimeCaches),
     Ssr(Ssr),
     Search(Search),
     Lsif(Lsif),
@@ -231,7 +245,6 @@ pub struct AnalysisStats {
     pub output: Option<OutputFormat>,
     pub randomize: bool,
     pub parallel: bool,
-    pub source_stats: bool,
     pub only: Option<String>,
     pub with_deps: bool,
     pub no_sysroot: bool,
@@ -279,6 +292,16 @@ pub struct UnresolvedReferences {
     pub proc_macro_srv: Option<PathBuf>,
 }
 
+#[derive(Debug)]
+pub struct PrimeCaches {
+    pub path: PathBuf,
+
+    pub disable_build_scripts: bool,
+    pub disable_proc_macros: bool,
+    pub proc_macro_srv: Option<PathBuf>,
+    pub parallel: bool,
+}
+
 #[derive(Debug)]
 pub struct Ssr {
     pub rule: Vec<SsrRule>,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
index eb5c44418b72d..f3b0699d55157 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
@@ -8,8 +8,8 @@ use ide::{
     RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData,
     VendoredLibrariesConfig,
 };
-use ide_db::{line_index::WideEncoding, LineIndexDatabase};
-use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
+use ide_db::{LineIndexDatabase, line_index::WideEncoding};
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace};
 use lsp_types::lsif;
 use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
 use rustc_hash::FxHashMap;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/prime_caches.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/prime_caches.rs
new file mode 100644
index 0000000000000..46fb701ab4265
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/prime_caches.rs
@@ -0,0 +1,67 @@
+//! Load the project and run cache priming.
+//!
+//! Unlike `analysis-stats`, this command is intended to be used for
+//! benchmarking rust-analyzer's default startup configuration. It *does not*
+//! attempt to simulate the full IDE experience through the lifetime of the
+//! an editing session.
+
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace};
+use profile::StopWatch;
+use project_model::{ProjectManifest, ProjectWorkspace};
+use vfs::AbsPathBuf;
+
+use crate::cli::flags;
+
+impl flags::PrimeCaches {
+    pub fn run(self) -> anyhow::Result<()> {
+        let root =
+            vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize();
+        let config = crate::config::Config::new(
+            root.clone(),
+            lsp_types::ClientCapabilities::default(),
+            vec![],
+            None,
+        );
+        let mut stop_watch = StopWatch::start();
+
+        let cargo_config = config.cargo(None);
+        let with_proc_macro_server = if let Some(p) = &self.proc_macro_srv {
+            let path = vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(p));
+            ProcMacroServerChoice::Explicit(path)
+        } else {
+            ProcMacroServerChoice::Sysroot
+        };
+        let load_cargo_config = LoadCargoConfig {
+            load_out_dirs_from_check: !self.disable_build_scripts,
+            with_proc_macro_server,
+            // while this command is nominally focused on cache priming,
+            // we want to ensure that this command, not `load_workspace_at`,
+            // is responsible for that work.
+            prefill_caches: false,
+        };
+
+        let root = AbsPathBuf::assert_utf8(std::env::current_dir()?.join(root));
+        let root = ProjectManifest::discover_single(&root)?;
+        let workspace = ProjectWorkspace::load(root, &cargo_config, &|_| {})?;
+
+        let (db, _, _) = load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
+        let elapsed = stop_watch.elapsed();
+        eprintln!(
+            "Load time: {:?}ms, memory allocated: {}MB",
+            elapsed.time.as_millis(),
+            elapsed.memory.allocated.megabytes() as u64
+        );
+
+        let threads = if self.parallel { num_cpus::get() } else { 1 };
+        ide_db::prime_caches::parallel_prime_caches(&db, threads, &|_| ());
+
+        let elapsed = stop_watch.elapsed();
+        eprintln!(
+            "Cache priming time: {:?}ms, total memory allocated: {}MB",
+            elapsed.time.as_millis(),
+            elapsed.memory.allocated.megabytes() as u64
+        );
+
+        Ok(())
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs
index 8b143daf2aea4..1b9b870a7c74c 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/progress_report.rs
@@ -9,13 +9,13 @@ pub(crate) struct ProgressReport<'a> {
     text: String,
     hidden: bool,
 
-    len: u64,
+    len: usize,
     pos: u64,
     msg: Option<Box<dyn Fn() -> String + 'a>>,
 }
 
 impl<'a> ProgressReport<'a> {
-    pub(crate) fn new(len: u64) -> ProgressReport<'a> {
+    pub(crate) fn new(len: usize) -> ProgressReport<'a> {
         ProgressReport { curr: 0.0, text: String::new(), hidden: false, len, pos: 0, msg: None }
     }
 
@@ -79,8 +79,8 @@ impl<'a> ProgressReport<'a> {
         // Backtrack to the first differing character
         let mut output = String::new();
         output += &'\x08'.to_string().repeat(self.text.len() - common_prefix_length);
-        // Output new suffix
-        output += &text[common_prefix_length..text.len()];
+        // Output new suffix, using chars() iter to ensure unicode compatibility
+        output.extend(text.chars().skip(common_prefix_length));
 
         // If the new text is shorter than the old one: delete overlapping characters
         if let Some(overlap_count) = self.text.len().checked_sub(text.len()) {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
index 7398b9a9ef0b0..60b33f0a30869 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
@@ -2,14 +2,14 @@
 
 use hir::{Crate, Module};
 use hir_ty::db::HirDatabase;
-use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
+use ide_db::{LineIndexDatabase, base_db::SourceDatabase};
 use profile::StopWatch;
 use project_model::{CargoConfig, RustLibSource};
 use syntax::TextRange;
 
-use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
 
-use crate::cli::{flags, full_name_of_item, Result};
+use crate::cli::{Result, flags, full_name_of_item};
 
 impl flags::RunTests {
     pub fn run(self) -> Result<()> {
@@ -40,10 +40,10 @@ impl flags::RunTests {
                 None => " (unknown line col)".to_owned(),
                 Some(x) => format!("#{}:{}", x.line + 1, x.col),
             };
-            let path = &db
-                .source_root(db.file_source_root(file_id))
-                .path_for_file(&file_id)
-                .map(|x| x.to_string());
+            let source_root = db.file_source_root(file_id).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
+
+            let path = source_root.path_for_file(&file_id).map(|x| x.to_string());
             let path = path.as_deref().unwrap_or("<unknown file>");
             format!("file://{path}{line_col}")
         };
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
index e9ca12deaf6e6..c042c26bd1883 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -10,19 +10,19 @@ use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
 use itertools::Either;
 use paths::Utf8PathBuf;
 use profile::StopWatch;
-use project_model::toolchain_info::{target_data_layout, QueryConfig};
+use project_model::toolchain_info::{QueryConfig, target_data_layout};
 use project_model::{
     CargoConfig, ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, RustLibSource,
     RustSourceWorkspaceConfig, Sysroot,
 };
 
-use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace};
 use rustc_hash::FxHashMap;
 use triomphe::Arc;
 use vfs::{AbsPathBuf, FileId};
 use walkdir::WalkDir;
 
-use crate::cli::{flags, report_metric, Result};
+use crate::cli::{Result, flags, report_metric};
 
 struct Tester {
     host: AnalysisHost,
@@ -139,7 +139,7 @@ impl Tester {
             FxHashMap::default()
         };
         let text = read_to_string(&p).unwrap();
-        let mut change = ChangeWithProcMacros::new();
+        let mut change = ChangeWithProcMacros::default();
         // Ignore unstable tests, since they move too fast and we do not intend to support all of them.
         let mut ignore_test = text.contains("#![feature");
         // Ignore test with extern crates, as this infra don't support them yet.
@@ -164,13 +164,13 @@ impl Tester {
                     let analysis = self.host.analysis();
                     let root_file = self.root_file;
                     move || {
-                        let res = std::panic::catch_unwind(move || {
+                        let res = std::panic::catch_unwind(AssertUnwindSafe(move || {
                             analysis.full_diagnostics(
                                 diagnostic_config,
                                 ide::AssistResolveStrategy::None,
                                 root_file,
                             )
-                        });
+                        }));
                         main.unpark();
                         res
                     }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index fe75872105aec..2062294f807ce 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -8,7 +8,7 @@ use ide::{
     TokenStaticData, VendoredLibrariesConfig,
 };
 use ide_db::LineIndexDatabase;
-use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
 use rustc_hash::{FxHashMap, FxHashSet};
 use scip::types::{self as scip_types, SymbolInformation};
 use tracing::error;
@@ -128,7 +128,7 @@ impl flags::Scip {
             };
 
         // Generates symbols from token monikers.
-        let mut symbol_generator = SymbolGenerator::new();
+        let mut symbol_generator = SymbolGenerator::default();
 
         for StaticIndexedFile { file_id, tokens, .. } in si.files {
             symbol_generator.clear_document_local_state();
@@ -417,16 +417,13 @@ struct TokenSymbols {
     is_inherent_impl: bool,
 }
 
+#[derive(Default)]
 struct SymbolGenerator {
     token_to_symbols: FxHashMap<TokenId, Option<TokenSymbols>>,
     local_count: usize,
 }
 
 impl SymbolGenerator {
-    fn new() -> Self {
-        SymbolGenerator { token_to_symbols: FxHashMap::default(), local_count: 0 }
-    }
-
     fn clear_document_local_state(&mut self) {
         self.local_count = 0;
     }
@@ -517,12 +514,13 @@ mod test {
 
     fn position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (AnalysisHost, FilePosition) {
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(ra_fixture);
+        let change_fixture = ChangeFixture::parse(host.raw_database(), ra_fixture);
         host.raw_database_mut().apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ()");
         let offset = range_or_offset.expect_offset();
-        (host, FilePosition { file_id: file_id.into(), offset })
+        let position = FilePosition { file_id: file_id.file_id(host.raw_database()), offset };
+        (host, position)
     }
 
     /// If expected == "", then assert that there are no symbols (this is basically local symbol)
@@ -872,7 +870,7 @@ pub mod example_mod {
         let s = "/// foo\nfn bar() {}";
 
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(s);
+        let change_fixture = ChangeFixture::parse(host.raw_database(), s);
         host.raw_database_mut().apply_change(change_fixture.change);
 
         let analysis = host.analysis();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index c03688e8009cb..e3e3a143de03a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -1,9 +1,9 @@
 //! Applies structured search replace rules from the command line.
 
 use anyhow::Context;
-use ide_db::{base_db::SourceDatabase, EditionedFileId};
+use ide_db::{EditionedFileId, base_db::SourceDatabase};
 use ide_ssr::MatchFinder;
-use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
 use project_model::{CargoConfig, RustLibSource};
 
 use crate::cli::flags;
@@ -34,7 +34,7 @@ impl flags::Ssr {
         let edits = match_finder.edits();
         for (file_id, edit) in edits {
             if let Some(path) = vfs.file_path(file_id).as_path() {
-                let mut contents = db.file_text(file_id).to_string();
+                let mut contents = db.file_text(file_id).text(db).to_string();
                 edit.apply(&mut contents);
                 std::fs::write(path, contents)
                     .with_context(|| format!("failed to write {path}"))?;
@@ -49,7 +49,7 @@ impl flags::Search {
     /// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
     /// for much else.
     pub fn run(self) -> anyhow::Result<()> {
-        use ide_db::base_db::SourceRootDatabase;
+        use ide_db::base_db::SourceDatabase;
         use ide_db::symbol_index::SymbolsDatabase;
         let cargo_config =
             CargoConfig { all_targets: true, set_test: true, ..CargoConfig::default() };
@@ -70,10 +70,10 @@ impl flags::Search {
         }
         if let Some(debug_snippet) = &self.debug {
             for &root in db.local_roots().iter() {
-                let sr = db.source_root(root);
+                let sr = db.source_root(root).source_root(db);
                 for file_id in sr.iter() {
                     for debug_info in match_finder.debug_where_text_equal(
-                        EditionedFileId::current_edition(file_id),
+                        EditionedFileId::current_edition(db, file_id),
                         debug_snippet,
                     ) {
                         println!("{debug_info:#?}");
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
index 021b1bff39301..bca7c8a098c29 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -1,14 +1,12 @@
 //! Reports references in code that the IDE layer cannot resolve.
-use hir::{db::HirDatabase, sym, AnyDiagnostic, Crate, HirFileIdExt as _, Module, Semantics};
+use hir::{AnyDiagnostic, Crate, Module, Semantics, db::HirDatabase, sym};
 use ide::{AnalysisHost, RootDatabase, TextRange};
 use ide_db::{
-    base_db::{SourceDatabase, SourceRootDatabase},
-    defs::NameRefClass,
-    EditionedFileId, FxHashSet, LineIndexDatabase as _,
+    EditionedFileId, FxHashSet, LineIndexDatabase as _, base_db::SourceDatabase, defs::NameRefClass,
 };
-use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
 use parser::SyntaxKind;
-use syntax::{ast, AstNode, WalkEvent};
+use syntax::{AstNode, WalkEvent, ast};
 use vfs::FileId;
 
 use crate::cli::flags;
@@ -57,27 +55,28 @@ impl flags::UnresolvedReferences {
 
         let work = all_modules(db).into_iter().filter(|module| {
             let file_id = module.definition_source_file_id(db).original_file(db);
-            let source_root = db.file_source_root(file_id.into());
-            let source_root = db.source_root(source_root);
+            let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             !source_root.is_library
         });
 
         for module in work {
             let file_id = module.definition_source_file_id(db).original_file(db);
+            let file_id = file_id.file_id(db);
             if !visited_files.contains(&file_id) {
                 let crate_name =
                     module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned();
-                let file_path = vfs.file_path(file_id.into());
+                let file_path = vfs.file_path(file_id);
                 eprintln!("processing crate: {crate_name}, module: {file_path}",);
 
-                let line_index = db.line_index(file_id.into());
-                let file_text = db.file_text(file_id.into());
+                let line_index = db.line_index(file_id);
+                let file_text = db.file_text(file_id);
 
-                for range in find_unresolved_references(db, &sema, file_id.into(), &module) {
+                for range in find_unresolved_references(db, &sema, file_id, &module) {
                     let line_col = line_index.line_col(range.start());
                     let line = line_col.line + 1;
                     let col = line_col.col + 1;
-                    let text = &file_text[range];
+                    let text = &file_text.text(db)[range];
                     println!("{file_path}:{line}:{col}: {text}");
                 }
 
@@ -124,7 +123,7 @@ fn find_unresolved_references(
         let node = inactive_code.node;
         let range = node.map(|it| it.text_range()).original_node_file_range_rooted(db);
 
-        if range.file_id != file_id {
+        if range.file_id.file_id(db) != file_id {
             continue;
         }
 
@@ -140,7 +139,7 @@ fn all_unresolved_references(
 ) -> Vec<TextRange> {
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
     let file = sema.parse(file_id);
     let root = file.syntax();
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs
index b19a1b8d16700..0035d941e2c60 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs
@@ -13,24 +13,33 @@ use crossbeam_channel::Sender;
 use process_wrap::std::{StdChildWrapper, StdCommandWrap};
 use stdx::process::streaming_output;
 
-/// Cargo output is structured as a one JSON per line. This trait abstracts parsing one line of
-/// cargo output into a Rust data type.
-pub(crate) trait ParseFromLine: Sized + Send + 'static {
-    fn from_line(line: &str, error: &mut String) -> Option<Self>;
-    fn from_eof() -> Option<Self>;
+/// Cargo output is structured as one JSON per line. This trait abstracts parsing one line of
+/// cargo output into a Rust data type
+pub(crate) trait CargoParser<T>: Send + 'static {
+    fn from_line(&self, line: &str, error: &mut String) -> Option<T>;
+    fn from_eof(&self) -> Option<T>;
 }
 
 struct CargoActor<T> {
+    parser: Box<dyn CargoParser<T>>,
     sender: Sender<T>,
     stdout: ChildStdout,
     stderr: ChildStderr,
 }
 
-impl<T: ParseFromLine> CargoActor<T> {
-    fn new(sender: Sender<T>, stdout: ChildStdout, stderr: ChildStderr) -> Self {
-        CargoActor { sender, stdout, stderr }
+impl<T: Sized + Send + 'static> CargoActor<T> {
+    fn new(
+        parser: impl CargoParser<T>,
+        sender: Sender<T>,
+        stdout: ChildStdout,
+        stderr: ChildStderr,
+    ) -> Self {
+        let parser = Box::new(parser);
+        CargoActor { parser, sender, stdout, stderr }
     }
+}
 
+impl<T: Sized + Send + 'static> CargoActor<T> {
     fn run(self) -> io::Result<(bool, String)> {
         // We manually read a line at a time, instead of using serde's
         // stream deserializers, because the deserializer cannot recover
@@ -47,7 +56,7 @@ impl<T: ParseFromLine> CargoActor<T> {
         let mut read_at_least_one_stderr_message = false;
         let process_line = |line: &str, error: &mut String| {
             // Try to deserialize a message from Cargo or Rustc.
-            if let Some(t) = T::from_line(line, error) {
+            if let Some(t) = self.parser.from_line(line, error) {
                 self.sender.send(t).unwrap();
                 true
             } else {
@@ -68,7 +77,7 @@ impl<T: ParseFromLine> CargoActor<T> {
                 }
             },
             &mut || {
-                if let Some(t) = T::from_eof() {
+                if let Some(t) = self.parser.from_eof() {
                     self.sender.send(t).unwrap();
                 }
             },
@@ -116,8 +125,12 @@ impl<T> fmt::Debug for CommandHandle<T> {
     }
 }
 
-impl<T: ParseFromLine> CommandHandle<T> {
-    pub(crate) fn spawn(mut command: Command, sender: Sender<T>) -> std::io::Result<Self> {
+impl<T: Sized + Send + 'static> CommandHandle<T> {
+    pub(crate) fn spawn(
+        mut command: Command,
+        parser: impl CargoParser<T>,
+        sender: Sender<T>,
+    ) -> std::io::Result<Self> {
         command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
 
         let program = command.get_program().into();
@@ -134,7 +147,7 @@ impl<T: ParseFromLine> CommandHandle<T> {
         let stdout = child.0.stdout().take().unwrap();
         let stderr = child.0.stderr().take().unwrap();
 
-        let actor = CargoActor::<T>::new(sender, stdout, stderr);
+        let actor = CargoActor::<T>::new(parser, sender, stdout, stderr);
         let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
             .name("CommandHandle".to_owned())
             .spawn(move || actor.run())
@@ -153,9 +166,9 @@ impl<T: ParseFromLine> CommandHandle<T> {
         if read_at_least_one_message || exit_status.success() {
             Ok(())
         } else {
-            Err(io::Error::new(io::ErrorKind::Other, format!(
-            "Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
-        )))
+            Err(io::Error::other(format!(
+                "Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
+            )))
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 45ac68339b38c..dd827949a9c22 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -15,8 +15,8 @@ use ide::{
     Snippet, SnippetScope, SourceRootId,
 };
 use ide_db::{
-    imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
     SnippetCap,
+    imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
 };
 use itertools::{Either, Itertools};
 use paths::{Utf8Path, Utf8PathBuf};
@@ -27,8 +27,8 @@ use project_model::{
 use rustc_hash::{FxHashMap, FxHashSet};
 use semver::Version;
 use serde::{
-    de::{DeserializeOwned, Error},
     Deserialize, Serialize,
+    de::{DeserializeOwned, Error},
 };
 use stdx::format_to_acc;
 use triomphe::Arc;
@@ -41,6 +41,8 @@ use crate::{
     lsp_ext::{WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
 };
 
+type FxIndexMap<K, V> = indexmap::IndexMap<K, V, rustc_hash::FxBuildHasher>;
+
 mod patch_old_style;
 
 // Conventions for configuration keys to preserve maximal extendability without breakage:
@@ -81,7 +83,7 @@ config_data! {
         cachePriming_numThreads: NumThreads = NumThreads::Physical,
 
         /// Custom completion snippets.
-        completion_snippets_custom: FxHashMap<String, SnippetDef> = Config::completion_snippets_default(),
+        completion_snippets_custom: FxIndexMap<String, SnippetDef> = Config::completion_snippets_default(),
 
 
         /// These paths (file/directories) will be ignored by rust-analyzer. They are
@@ -424,7 +426,7 @@ config_data! {
         ///
         /// Similarly, the JSON representation of `DiscoverArgument::Buildfile` is:
         ///
-        /// ```
+        /// ```json
         /// {
         ///     "buildfile": "BUILD"
         /// }
@@ -532,7 +534,7 @@ config_data! {
         imports_granularity_enforce: bool              = false,
         /// How imports should be grouped into use statements.
         imports_granularity_group: ImportGranularityDef  = ImportGranularityDef::Crate,
-        /// Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.
+        /// Group inserted imports by the [following order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are separated by newlines.
         imports_group_enable: bool                           = true,
         /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
         imports_merge_glob: bool           = true,
@@ -600,13 +602,16 @@ config_data! {
         cargo_extraArgs: Vec<String> = vec![],
         /// Extra environment variables that will be set when running cargo, rustc
         /// or other commands within the workspace. Useful for setting RUSTFLAGS.
-        cargo_extraEnv: FxHashMap<String, String> = FxHashMap::default(),
+        cargo_extraEnv: FxHashMap<String, Option<String>> = FxHashMap::default(),
         /// List of features to activate.
         ///
         /// Set this to `"all"` to pass `--all-features` to cargo.
         cargo_features: CargoFeaturesDef      = CargoFeaturesDef::Selected(vec![]),
         /// Whether to pass `--no-default-features` to cargo.
         cargo_noDefaultFeatures: bool    = false,
+        /// Whether to skip fetching dependencies. If set to "true", the analysis is performed
+        /// entirely offline, and Cargo metadata for dependencies is not fetched.
+        cargo_noDeps: bool = false,
         /// Relative path to the sysroot, or "discover" to try to automatically find it via
         /// "rustc --print sysroot".
         ///
@@ -647,7 +652,7 @@ config_data! {
         check_extraArgs | checkOnSave_extraArgs: Vec<String>             = vec![],
         /// Extra environment variables that will be set when running `cargo check`.
         /// Extends `#rust-analyzer.cargo.extraEnv#`.
-        check_extraEnv | checkOnSave_extraEnv: FxHashMap<String, String> = FxHashMap::default(),
+        check_extraEnv | checkOnSave_extraEnv: FxHashMap<String, Option<String>> = FxHashMap::default(),
         /// List of features to activate. Defaults to
         /// `#rust-analyzer.cargo.features#`.
         ///
@@ -916,10 +921,9 @@ impl Config {
             tracing::info!("updating config from JSON: {:#}", json);
 
             if !(json.is_null() || json.as_object().is_some_and(|it| it.is_empty())) {
-                let mut json_errors = vec![];
                 let detached_files = get_field_json::<Vec<Utf8PathBuf>>(
                     &mut json,
-                    &mut json_errors,
+                    &mut Vec::new(),
                     "detachedFiles",
                     None,
                 )
@@ -931,17 +935,19 @@ impl Config {
                 patch_old_style::patch_json_for_outdated_configs(&mut json);
 
                 let mut json_errors = vec![];
-                let snips = get_field_json::<FxHashMap<String, SnippetDef>>(
-                    &mut json,
-                    &mut json_errors,
-                    "completion_snippets_custom",
-                    None,
-                )
-                .unwrap_or(self.completion_snippets_custom().to_owned());
+
+                let input = FullConfigInput::from_json(json, &mut json_errors);
 
                 // IMPORTANT : This holds as long as ` completion_snippets_custom` is declared `client`.
                 config.snippets.clear();
 
+                let snips = input
+                    .global
+                    .completion_snippets_custom
+                    .as_ref()
+                    .unwrap_or(&self.default_config.global.completion_snippets_custom);
+                #[allow(dead_code)]
+                let _ = Self::completion_snippets_custom;
                 for (name, def) in snips.iter() {
                     if def.prefix.is_empty() && def.postfix.is_empty() {
                         continue;
@@ -968,8 +974,9 @@ impl Config {
                         )),
                     }
                 }
+
                 config.client_config = (
-                    FullConfigInput::from_json(json, &mut json_errors),
+                    input,
                     ConfigErrors(
                         json_errors
                             .into_iter()
@@ -1102,10 +1109,10 @@ impl Config {
             config
                 .client_config
                 .1
-                 .0
+                .0
                 .iter()
-                .chain(config.user_config.as_ref().into_iter().flat_map(|it| it.1 .0.iter()))
-                .chain(config.ratoml_file.values().flat_map(|it| it.1 .0.iter()))
+                .chain(config.user_config.as_ref().into_iter().flat_map(|it| it.1.0.iter()))
+                .chain(config.ratoml_file.values().flat_map(|it| it.1.0.iter()))
                 .chain(config.validation_errors.0.iter())
                 .cloned()
                 .collect(),
@@ -1377,18 +1384,21 @@ impl ConfigErrors {
 
 impl fmt::Display for ConfigErrors {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let errors = self.0.iter().format_with("\n", |inner, f| match &**inner {
-            ConfigErrorInner::Json { config_key: key, error: e } => {
-                f(key)?;
-                f(&": ")?;
-                f(e)
-            }
-            ConfigErrorInner::Toml { config_key: key, error: e } => {
-                f(key)?;
-                f(&": ")?;
-                f(e)
-            }
-            ConfigErrorInner::ParseError { reason } => f(reason),
+        let errors = self.0.iter().format_with("\n", |inner, f| {
+            match &**inner {
+                ConfigErrorInner::Json { config_key: key, error: e } => {
+                    f(key)?;
+                    f(&": ")?;
+                    f(e)
+                }
+                ConfigErrorInner::Toml { config_key: key, error: e } => {
+                    f(key)?;
+                    f(&": ")?;
+                    f(e)
+                }
+                ConfigErrorInner::ParseError { reason } => f(reason),
+            }?;
+            f(&";")
         });
         write!(f, "invalid config value{}:\n{}", if self.0.len() == 1 { "" } else { "s" }, errors)
     }
@@ -1874,7 +1884,10 @@ impl Config {
         self.cargo_extraArgs(source_root)
     }
 
-    pub fn extra_env(&self, source_root: Option<SourceRootId>) -> &FxHashMap<String, String> {
+    pub fn extra_env(
+        &self,
+        source_root: Option<SourceRootId>,
+    ) -> &FxHashMap<String, Option<String>> {
         self.cargo_extraEnv(source_root)
     }
 
@@ -1884,7 +1897,10 @@ impl Config {
         extra_args
     }
 
-    pub fn check_extra_env(&self, source_root: Option<SourceRootId>) -> FxHashMap<String, String> {
+    pub fn check_extra_env(
+        &self,
+        source_root: Option<SourceRootId>,
+    ) -> FxHashMap<String, Option<String>> {
         let mut extra_env = self.cargo_extraEnv(source_root).clone();
         extra_env.extend(self.check_extraEnv(source_root).clone());
         extra_env
@@ -2025,6 +2041,7 @@ impl Config {
             extra_env: self.cargo_extraEnv(source_root).clone(),
             target_dir: self.target_dir_from_config(source_root),
             set_test: *self.cfg_setTest(source_root),
+            no_deps: *self.cargo_noDeps(source_root),
         }
     }
 
@@ -2032,21 +2049,13 @@ impl Config {
         *self.cfg_setTest(source_root)
     }
 
-    pub(crate) fn completion_snippets_default() -> FxHashMap<String, SnippetDef> {
+    pub(crate) fn completion_snippets_default() -> FxIndexMap<String, SnippetDef> {
         serde_json::from_str(
             r#"{
-            "Arc::new": {
-                "postfix": "arc",
-                "body": "Arc::new(${receiver})",
-                "requires": "std::sync::Arc",
-                "description": "Put the expression into an `Arc`",
-                "scope": "expr"
-            },
-            "Rc::new": {
-                "postfix": "rc",
-                "body": "Rc::new(${receiver})",
-                "requires": "std::rc::Rc",
-                "description": "Put the expression into an `Rc`",
+            "Ok": {
+                "postfix": "ok",
+                "body": "Ok(${receiver})",
+                "description": "Wrap the expression in a `Result::Ok`",
                 "scope": "expr"
             },
             "Box::pin": {
@@ -2056,10 +2065,17 @@ impl Config {
                 "description": "Put the expression into a pinned `Box`",
                 "scope": "expr"
             },
-            "Ok": {
-                "postfix": "ok",
-                "body": "Ok(${receiver})",
-                "description": "Wrap the expression in a `Result::Ok`",
+            "Arc::new": {
+                "postfix": "arc",
+                "body": "Arc::new(${receiver})",
+                "requires": "std::sync::Arc",
+                "description": "Put the expression into an `Arc`",
+                "scope": "expr"
+            },
+            "Some": {
+                "postfix": "some",
+                "body": "Some(${receiver})",
+                "description": "Wrap the expression in an `Option::Some`",
                 "scope": "expr"
             },
             "Err": {
@@ -2068,10 +2084,11 @@ impl Config {
                 "description": "Wrap the expression in a `Result::Err`",
                 "scope": "expr"
             },
-            "Some": {
-                "postfix": "some",
-                "body": "Some(${receiver})",
-                "description": "Wrap the expression in an `Option::Some`",
+            "Rc::new": {
+                "postfix": "rc",
+                "body": "Rc::new(${receiver})",
+                "requires": "std::rc::Rc",
+                "description": "Put the expression into an `Rc`",
                 "scope": "expr"
             }
         }"#,
@@ -2719,10 +2736,6 @@ pub enum NumThreads {
 }
 
 macro_rules! _default_val {
-    (@verbatim: $s:literal, $ty:ty) => {{
-        let default_: $ty = serde_json::from_str(&$s).unwrap();
-        default_
-    }};
     ($default:expr, $ty:ty) => {{
         let default_: $ty = $default;
         default_
@@ -2731,9 +2744,6 @@ macro_rules! _default_val {
 use _default_val as default_val;
 
 macro_rules! _default_str {
-    (@verbatim: $s:literal, $_ty:ty) => {
-        $s.to_owned()
-    };
     ($default:expr, $ty:ty) => {{
         let val = default_val!($default, $ty);
         serde_json::to_string_pretty(&val).unwrap()
@@ -2874,7 +2884,7 @@ macro_rules! _config_data {
     ($(#[doc=$dox:literal])* $modname:ident: struct $name:ident <- $input:ident -> {
         $(
             $(#[doc=$doc:literal])*
-            $vis:vis $field:ident $(| $alias:ident)*: $ty:ty = $(@$marker:ident: )? $default:expr,
+            $vis:vis $field:ident $(| $alias:ident)*: $ty:ty = $default:expr,
         )*
     }) => {
         /// Default config values for this grouping.
@@ -2911,7 +2921,7 @@ macro_rules! _config_data {
         impl Default for $name {
             fn default() -> Self {
                 $name {$(
-                    $field: default_val!($(@$marker:)? $default, $ty),
+                    $field: default_val!($default, $ty),
                 )*}
             }
         }
@@ -2947,7 +2957,7 @@ macro_rules! _config_data {
                     $({
                         let field = stringify!($field);
                         let ty = stringify!($ty);
-                        let default = default_str!($(@$marker:)? $default, $ty);
+                        let default = default_str!($default, $ty);
 
                         (field, ty, &[$($doc),*], default)
                     },)*
@@ -3085,8 +3095,7 @@ fn get_field_json<T: DeserializeOwned>(
             json.pointer_mut(&pointer)
                 .map(|it| serde_json::from_value(it.take()).map_err(|e| (e, pointer)))
         })
-        .find(Result::is_ok)
-        .and_then(|res| match res {
+        .flat_map(|res| match res {
             Ok(it) => Some(it),
             Err((e, pointer)) => {
                 tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e);
@@ -3094,6 +3103,7 @@ fn get_field_json<T: DeserializeOwned>(
                 None
             }
         })
+        .next()
 }
 
 fn get_field_toml<T: DeserializeOwned>(
@@ -3210,7 +3220,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
         "FxHashMap<Box<str>, Box<[Box<str>]>>" => set! {
             "type": "object",
         },
-        "FxHashMap<String, SnippetDef>" => set! {
+        "FxIndexMap<String, SnippetDef>" => set! {
             "type": "object",
         },
         "FxHashMap<String, String>" => set! {
@@ -3646,12 +3656,16 @@ fn validate_toml_table(
 #[cfg(test)]
 fn manual(fields: &[SchemaField]) -> String {
     fields.iter().fold(String::new(), |mut acc, (field, _ty, doc, default)| {
-        let name = format!("rust-analyzer.{}", field.replace('_', "."));
+        let id = field.replace('_', ".");
+        let name = format!("rust-analyzer.{id}");
         let doc = doc_comment_to_string(doc);
         if default.contains('\n') {
-            format_to_acc!(acc, " **{name}**\n\nDefault:\n\n```{default}\n\n```\n\n {doc}\n\n ")
+            format_to_acc!(
+                acc,
+                "## {name} {{#{id}}}\n\nDefault:\n```json\n{default}\n```\n\n{doc}\n\n"
+            )
         } else {
-            format_to_acc!(acc, "**{name}** (default: {default})\n\n {doc}\n\n")
+            format_to_acc!(acc, "## {name} {{#{id}}}\n\nDefault: `{default}`\n\n{doc}\n\n")
         }
     })
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
index 92c0c0d048ab2..95857dd8f3b4b 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
@@ -1,5 +1,5 @@
 //! See [`patch_json_for_outdated_configs`]
-use serde_json::{json, Value};
+use serde_json::{Value, json};
 
 /// This function patches the json config to the new expected keys.
 /// That is we try to load old known config keys here and convert them to the new ones.
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
index 0b51dd87fea0c..9b1463b1126bf 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
@@ -7,7 +7,6 @@ use cargo_metadata::PackageId;
 use ide::FileId;
 use ide_db::FxHashMap;
 use itertools::Itertools;
-use nohash_hasher::{IntMap, IntSet};
 use rustc_hash::FxHashSet;
 use stdx::iter_eq_by;
 use triomphe::Arc;
@@ -15,7 +14,7 @@ use triomphe::Arc;
 use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext, main_loop::DiagnosticsTaskKind};
 
 pub(crate) type CheckFixes =
-    Arc<IntMap<usize, FxHashMap<Option<Arc<PackageId>>, IntMap<FileId, Vec<Fix>>>>>;
+    Arc<Vec<FxHashMap<Option<Arc<PackageId>>, FxHashMap<FileId, Vec<Fix>>>>>;
 
 #[derive(Debug, Default, Clone)]
 pub struct DiagnosticsMapConfig {
@@ -29,16 +28,16 @@ pub(crate) type DiagnosticsGeneration = usize;
 
 #[derive(Debug, Default, Clone)]
 pub(crate) struct DiagnosticCollection {
-    // FIXME: should be IntMap<FileId, Vec<ra_id::Diagnostic>>
-    pub(crate) native_syntax: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
-    pub(crate) native_semantic: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
+    // FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>>
+    pub(crate) native_syntax:
+        FxHashMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
+    pub(crate) native_semantic:
+        FxHashMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
     // FIXME: should be Vec<flycheck::Diagnostic>
-    pub(crate) check: IntMap<
-        usize,
-        FxHashMap<Option<Arc<PackageId>>, IntMap<FileId, Vec<lsp_types::Diagnostic>>>,
-    >,
+    pub(crate) check:
+        Vec<FxHashMap<Option<Arc<PackageId>>, FxHashMap<FileId, Vec<lsp_types::Diagnostic>>>>,
     pub(crate) check_fixes: CheckFixes,
-    changes: IntSet<FileId>,
+    changes: FxHashSet<FileId>,
     /// Counter for supplying a new generation number for diagnostics.
     /// This is used to keep track of when to clear the diagnostics for a given file as we compute
     /// diagnostics on multiple worker threads simultaneously which may result in multiple diagnostics
@@ -55,11 +54,11 @@ pub(crate) struct Fix {
 
 impl DiagnosticCollection {
     pub(crate) fn clear_check(&mut self, flycheck_id: usize) {
-        let Some(check) = self.check.get_mut(&flycheck_id) else {
+        let Some(check) = self.check.get_mut(flycheck_id) else {
             return;
         };
         self.changes.extend(check.drain().flat_map(|(_, v)| v.into_keys()));
-        if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
+        if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(flycheck_id) {
             fixes.clear();
         }
     }
@@ -67,7 +66,7 @@ impl DiagnosticCollection {
     pub(crate) fn clear_check_all(&mut self) {
         Arc::make_mut(&mut self.check_fixes).clear();
         self.changes.extend(
-            self.check.values_mut().flat_map(|it| it.drain().flat_map(|(_, v)| v.into_keys())),
+            self.check.iter_mut().flat_map(|it| it.drain().flat_map(|(_, v)| v.into_keys())),
         )
     }
 
@@ -76,14 +75,14 @@ impl DiagnosticCollection {
         flycheck_id: usize,
         package_id: Arc<PackageId>,
     ) {
-        let Some(check) = self.check.get_mut(&flycheck_id) else {
+        let Some(check) = self.check.get_mut(flycheck_id) else {
             return;
         };
         let package_id = Some(package_id);
         if let Some(checks) = check.remove(&package_id) {
             self.changes.extend(checks.into_keys());
         }
-        if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
+        if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(flycheck_id) {
             fixes.remove(&package_id);
         }
     }
@@ -102,10 +101,10 @@ impl DiagnosticCollection {
         diagnostic: lsp_types::Diagnostic,
         fix: Option<Box<Fix>>,
     ) {
-        let diagnostics = self
-            .check
-            .entry(flycheck_id)
-            .or_default()
+        if self.check.len() <= flycheck_id {
+            self.check.resize_with(flycheck_id + 1, Default::default);
+        }
+        let diagnostics = self.check[flycheck_id]
             .entry(package_id.clone())
             .or_default()
             .entry(file_id)
@@ -118,9 +117,10 @@ impl DiagnosticCollection {
 
         if let Some(fix) = fix {
             let check_fixes = Arc::make_mut(&mut self.check_fixes);
-            check_fixes
-                .entry(flycheck_id)
-                .or_default()
+            if check_fixes.len() <= flycheck_id {
+                check_fixes.resize_with(flycheck_id + 1, Default::default);
+            }
+            check_fixes[flycheck_id]
                 .entry(package_id.clone())
                 .or_default()
                 .entry(file_id)
@@ -176,14 +176,14 @@ impl DiagnosticCollection {
         let native_semantic = self.native_semantic.get(&file_id).into_iter().flat_map(|(_, d)| d);
         let check = self
             .check
-            .values()
+            .iter()
             .flat_map(|it| it.values())
             .filter_map(move |it| it.get(&file_id))
             .flatten();
         native_syntax.chain(native_semantic).chain(check)
     }
 
-    pub(crate) fn take_changes(&mut self) -> Option<IntSet<FileId>> {
+    pub(crate) fn take_changes(&mut self) -> Option<FxHashSet<FileId>> {
         if self.changes.is_empty() {
             return None;
         }
@@ -258,7 +258,7 @@ pub(crate) fn fetch_native_diagnostics(
     for (file_id, group) in odd_ones
         .into_iter()
         .sorted_by_key(|it| it.range.file_id)
-        .group_by(|it| it.range.file_id)
+        .chunk_by(|it| it.range.file_id)
         .into_iter()
     {
         if !subscriptions.contains(&file_id) {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
index fafffa043f988..79d8f678de4d6 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -455,11 +455,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
                             .cloned()
                             .chain(subdiagnostics.iter().map(|sub| sub.related.clone()))
                             .collect::<Vec<_>>();
-                        if info.is_empty() {
-                            None
-                        } else {
-                            Some(info)
-                        }
+                        if info.is_empty() { None } else { Some(info) }
                     },
                     tags: if tags.is_empty() { None } else { Some(tags.clone()) },
                     data: Some(serde_json::json!({ "rendered": rd.rendered })),
@@ -528,7 +524,7 @@ mod tests {
 
     use super::*;
 
-    use expect_test::{expect_file, ExpectFile};
+    use expect_test::{ExpectFile, expect_file};
     use lsp_types::ClientCapabilities;
     use paths::Utf8Path;
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
index 0c111319bb41b..67ddc41f3b21a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
@@ -3,13 +3,13 @@
 use std::{io, path::Path};
 
 use crossbeam_channel::Sender;
+use ide_db::FxHashMap;
 use paths::{AbsPathBuf, Utf8Path, Utf8PathBuf};
 use project_model::ProjectJsonData;
 use serde::{Deserialize, Serialize};
-use serde_json::Value;
 use tracing::{info_span, span::EnteredSpan};
 
-use crate::command::{CommandHandle, ParseFromLine};
+use crate::command::{CargoParser, CommandHandle};
 
 pub(crate) const ARG_PLACEHOLDER: &str = "{arg}";
 
@@ -62,11 +62,12 @@ impl DiscoverCommand {
             })
             .collect();
 
-        let mut cmd = toolchain::command(command, current_dir);
+        // TODO: are we sure the extra env should be empty?
+        let mut cmd = toolchain::command(command, current_dir, &FxHashMap::default());
         cmd.args(args);
 
         Ok(DiscoverHandle {
-            _handle: CommandHandle::spawn(cmd, self.sender.clone())?,
+            _handle: CommandHandle::spawn(cmd, DiscoverProjectParser, self.sender.clone())?,
             span: info_span!("discover_command").entered(),
         })
     }
@@ -115,23 +116,26 @@ impl DiscoverProjectMessage {
     }
 }
 
-impl ParseFromLine for DiscoverProjectMessage {
-    fn from_line(line: &str, _error: &mut String) -> Option<Self> {
-        // can the line even be deserialized as JSON?
-        let Ok(data) = serde_json::from_str::<Value>(line) else {
-            let err = DiscoverProjectData::Error { error: line.to_owned(), source: None };
-            return Some(DiscoverProjectMessage::new(err));
-        };
+struct DiscoverProjectParser;
 
-        let Ok(data) = serde_json::from_value::<DiscoverProjectData>(data) else {
-            return None;
-        };
-
-        let msg = DiscoverProjectMessage::new(data);
-        Some(msg)
+impl CargoParser<DiscoverProjectMessage> for DiscoverProjectParser {
+    fn from_line(&self, line: &str, _error: &mut String) -> Option<DiscoverProjectMessage> {
+        match serde_json::from_str::<DiscoverProjectData>(line) {
+            Ok(data) => {
+                let msg = DiscoverProjectMessage::new(data);
+                Some(msg)
+            }
+            Err(err) => {
+                let err = DiscoverProjectData::Error {
+                    error: format!("{:#?}\n{}", err, line),
+                    source: None,
+                };
+                Some(DiscoverProjectMessage::new(err))
+            }
+        }
     }
 
-    fn from_eof() -> Option<Self> {
+    fn from_eof(&self) -> Option<DiscoverProjectMessage> {
         None
     }
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs
index 7529e7c188f8a..2778b311e1e34 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs
@@ -4,7 +4,7 @@
 use std::{fmt, io, process::Command, time::Duration};
 
 use cargo_metadata::PackageId;
-use crossbeam_channel::{select_biased, unbounded, Receiver, Sender};
+use crossbeam_channel::{Receiver, Sender, select_biased, unbounded};
 use ide_db::FxHashSet;
 use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
 use rustc_hash::FxHashMap;
@@ -17,7 +17,7 @@ pub(crate) use cargo_metadata::diagnostic::{
 use toolchain::Tool;
 use triomphe::Arc;
 
-use crate::command::{CommandHandle, ParseFromLine};
+use crate::command::{CargoParser, CommandHandle};
 
 #[derive(Clone, Debug, Default, PartialEq, Eq)]
 pub(crate) enum InvocationStrategy {
@@ -35,7 +35,7 @@ pub(crate) struct CargoOptions {
     pub(crate) features: Vec<String>,
     pub(crate) extra_args: Vec<String>,
     pub(crate) extra_test_bin_args: Vec<String>,
-    pub(crate) extra_env: FxHashMap<String, String>,
+    pub(crate) extra_env: FxHashMap<String, Option<String>>,
     pub(crate) target_dir: Option<Utf8PathBuf>,
 }
 
@@ -69,7 +69,6 @@ impl CargoOptions {
         if let Some(target_dir) = &self.target_dir {
             cmd.arg("--target-dir").arg(target_dir);
         }
-        cmd.envs(&self.extra_env);
     }
 }
 
@@ -83,7 +82,7 @@ pub(crate) enum FlycheckConfig {
     CustomCommand {
         command: String,
         args: Vec<String>,
-        extra_env: FxHashMap<String, String>,
+        extra_env: FxHashMap<String, Option<String>>,
         invocation_strategy: InvocationStrategy,
     },
 }
@@ -329,7 +328,7 @@ impl FlycheckActor {
 
                     tracing::debug!(?command, "will restart flycheck");
                     let (sender, receiver) = unbounded();
-                    match CommandHandle::spawn(command, sender) {
+                    match CommandHandle::spawn(command, CargoCheckParser, sender) {
                         Ok(command_handle) => {
                             tracing::debug!(command = formatted_command, "did restart flycheck");
                             self.command_handle = Some(command_handle);
@@ -401,7 +400,9 @@ impl FlycheckActor {
                             package_id = package_id.as_ref().map(|it| &it.repr),
                             "diagnostic received"
                         );
-                        self.diagnostics_received = DiagnosticsReceived::Yes;
+                        if self.diagnostics_received == DiagnosticsReceived::No {
+                            self.diagnostics_received = DiagnosticsReceived::Yes;
+                        }
                         if let Some(package_id) = &package_id {
                             if self.diagnostics_cleared_for.insert(package_id.clone()) {
                                 tracing::trace!(
@@ -466,7 +467,8 @@ impl FlycheckActor {
     ) -> Option<Command> {
         match &self.config {
             FlycheckConfig::CargoCommand { command, options, ansi_color_output } => {
-                let mut cmd = toolchain::command(Tool::Cargo.path(), &*self.root);
+                let mut cmd =
+                    toolchain::command(Tool::Cargo.path(), &*self.root, &options.extra_env);
                 if let Some(sysroot_root) = &self.sysroot_root {
                     cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root));
                 }
@@ -514,8 +516,7 @@ impl FlycheckActor {
                         &*self.root
                     }
                 };
-                let mut cmd = toolchain::command(command, root);
-                cmd.envs(extra_env);
+                let mut cmd = toolchain::command(command, root, extra_env);
 
                 // If the custom command has a $saved_file placeholder, and
                 // we're saving a file, replace the placeholder in the arguments.
@@ -556,8 +557,10 @@ enum CargoCheckMessage {
     Diagnostic { diagnostic: Diagnostic, package_id: Option<Arc<PackageId>> },
 }
 
-impl ParseFromLine for CargoCheckMessage {
-    fn from_line(line: &str, error: &mut String) -> Option<Self> {
+struct CargoCheckParser;
+
+impl CargoParser<CargoCheckMessage> for CargoCheckParser {
+    fn from_line(&self, line: &str, error: &mut String) -> Option<CargoCheckMessage> {
         let mut deserializer = serde_json::Deserializer::from_str(line);
         deserializer.disable_recursion_limit();
         if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
@@ -586,7 +589,7 @@ impl ParseFromLine for CargoCheckMessage {
         None
     }
 
-    fn from_eof() -> Option<Self> {
+    fn from_eof(&self) -> Option<CargoCheckMessage> {
         None
     }
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
index 54670b675984d..820276e8aea2c 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -5,14 +5,13 @@
 
 use std::{ops::Not as _, time::Instant};
 
-use crossbeam_channel::{unbounded, Receiver, Sender};
+use crossbeam_channel::{Receiver, Sender, unbounded};
 use hir::ChangeWithProcMacros;
 use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
-use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabase, SourceRootDatabase};
+use ide_db::base_db::{Crate, ProcMacroPaths, SourceDatabase};
 use itertools::Itertools;
 use load_cargo::SourceRootConfig;
 use lsp_types::{SemanticTokens, Url};
-use nohash_hasher::IntMap;
 use parking_lot::{
     MappedRwLockReadGuard, Mutex, RwLock, RwLockReadGuard, RwLockUpgradableReadGuard,
     RwLockWriteGuard,
@@ -20,7 +19,7 @@ use parking_lot::{
 use proc_macro_api::ProcMacroClient;
 use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts};
 use rustc_hash::{FxHashMap, FxHashSet};
-use tracing::{span, trace, Level};
+use tracing::{Level, span, trace};
 use triomphe::Arc;
 use vfs::{AbsPathBuf, AnchoredPathBuf, ChangeKind, Vfs, VfsPath};
 
@@ -117,7 +116,7 @@ pub(crate) struct GlobalState {
 
     // VFS
     pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
-    pub(crate) vfs: Arc<RwLock<(vfs::Vfs, IntMap<FileId, LineEndings>)>>,
+    pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
     pub(crate) vfs_config_version: u32,
     pub(crate) vfs_progress_config_version: u32,
     pub(crate) vfs_done: bool,
@@ -158,7 +157,7 @@ pub(crate) struct GlobalState {
     // op queues
     pub(crate) fetch_workspaces_queue: OpQueue<FetchWorkspaceRequest, FetchWorkspaceResponse>,
     pub(crate) fetch_build_data_queue: OpQueue<(), FetchBuildDataResponse>,
-    pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>,
+    pub(crate) fetch_proc_macros_queue: OpQueue<(ChangeWithProcMacros, Vec<ProcMacroPaths>), bool>,
     pub(crate) prime_caches_queue: OpQueue,
     pub(crate) discover_workspace_queue: OpQueue,
 
@@ -181,7 +180,7 @@ pub(crate) struct GlobalStateSnapshot {
     pub(crate) check_fixes: CheckFixes,
     mem_docs: MemDocs,
     pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
-    vfs: Arc<RwLock<(vfs::Vfs, IntMap<FileId, LineEndings>)>>,
+    vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
     pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
     // used to signal semantic highlighting to fall back to syntax based highlighting until
     // proc-macros have been loaded
@@ -265,7 +264,7 @@ impl GlobalState {
             discover_sender,
             discover_receiver,
 
-            vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))),
+            vfs: Arc::new(RwLock::new((vfs::Vfs::default(), Default::default()))),
             vfs_config_version: 0,
             vfs_progress_config_version: 0,
             vfs_span: None,
@@ -300,7 +299,7 @@ impl GlobalState {
             FxHashMap::default();
 
         let (change, modified_rust_files, workspace_structure_change) = {
-            let mut change = ChangeWithProcMacros::new();
+            let mut change = ChangeWithProcMacros::default();
             let mut guard = self.vfs.write();
             let changed_files = guard.0.take_changes();
             if changed_files.is_empty() {
@@ -426,43 +425,48 @@ impl GlobalState {
                     tracing::info!(%vfs_path, ?change_kind, "Processing rust-analyzer.toml changes");
                     if vfs_path.as_path() == user_config_abs_path {
                         tracing::info!(%vfs_path, ?change_kind, "Use config rust-analyzer.toml changes");
-                        change.change_user_config(Some(db.file_text(file_id)));
-                        continue;
+                        change.change_user_config(Some(db.file_text(file_id).text(db)));
                     }
 
                     // If change has been made to a ratoml file that
                     // belongs to a non-local source root, we will ignore it.
-                    let sr_id = db.file_source_root(file_id);
-                    let sr = db.source_root(sr_id);
+                    let source_root_id = db.file_source_root(file_id).source_root_id(db);
+                    let source_root = db.source_root(source_root_id).source_root(db);
 
-                    if !sr.is_library {
+                    if !source_root.is_library {
                         let entry = if workspace_ratoml_paths.contains(&vfs_path) {
-                            tracing::info!(%vfs_path, ?sr_id, "workspace rust-analyzer.toml changes");
+                            tracing::info!(%vfs_path, ?source_root_id, "workspace rust-analyzer.toml changes");
                             change.change_workspace_ratoml(
-                                sr_id,
+                                source_root_id,
                                 vfs_path.clone(),
-                                Some(db.file_text(file_id)),
+                                Some(db.file_text(file_id).text(db)),
                             )
                         } else {
-                            tracing::info!(%vfs_path, ?sr_id, "crate rust-analyzer.toml changes");
+                            tracing::info!(%vfs_path, ?source_root_id, "crate rust-analyzer.toml changes");
                             change.change_ratoml(
-                                sr_id,
+                                source_root_id,
                                 vfs_path.clone(),
-                                Some(db.file_text(file_id)),
+                                Some(db.file_text(file_id).text(db)),
                             )
                         };
 
                         if let Some((kind, old_path, old_text)) = entry {
                             // SourceRoot has more than 1 RATOML files. In this case lexicographically smaller wins.
                             if old_path < vfs_path {
-                                tracing::error!("Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect.");
+                                tracing::error!(
+                                    "Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect."
+                                );
                                 // Put the old one back in.
                                 match kind {
                                     RatomlFileKind::Crate => {
-                                        change.change_ratoml(sr_id, old_path, old_text);
+                                        change.change_ratoml(source_root_id, old_path, old_text);
                                     }
                                     RatomlFileKind::Workspace => {
-                                        change.change_workspace_ratoml(sr_id, old_path, old_text);
+                                        change.change_workspace_ratoml(
+                                            source_root_id,
+                                            old_path,
+                                            old_text,
+                                        );
                                     }
                                 }
                             }
@@ -711,7 +715,7 @@ impl GlobalStateSnapshot {
         self.vfs_read().file_path(file_id).clone()
     }
 
-    pub(crate) fn target_spec_for_crate(&self, crate_id: CrateId) -> Option<TargetSpec> {
+    pub(crate) fn target_spec_for_crate(&self, crate_id: Crate) -> Option<TargetSpec> {
         let file_id = self.analysis.crate_root(crate_id).ok()?;
         let path = self.vfs_read().file_path(file_id).clone();
         let path = path.as_path()?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/hack_recover_crate_name.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/hack_recover_crate_name.rs
deleted file mode 100644
index d7285653c5fa5..0000000000000
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/hack_recover_crate_name.rs
+++ /dev/null
@@ -1,25 +0,0 @@
-//! Currently cargo does not emit crate name in the `cargo test --format=json`, which needs to be changed. This
-//! module contains a way to recover crate names in a very hacky and wrong way.
-
-// FIXME(hack_recover_crate_name): Remove this module.
-
-use std::sync::{Mutex, MutexGuard, OnceLock};
-
-use ide_db::FxHashMap;
-
-static STORAGE: OnceLock<Mutex<FxHashMap<String, String>>> = OnceLock::new();
-
-fn get_storage() -> MutexGuard<'static, FxHashMap<String, String>> {
-    STORAGE.get_or_init(|| Mutex::new(FxHashMap::default())).lock().unwrap()
-}
-
-pub(crate) fn insert_name(name_with_crate: String) {
-    let Some((_, name_without_crate)) = name_with_crate.split_once("::") else {
-        return;
-    };
-    get_storage().insert(name_without_crate.to_owned(), name_with_crate);
-}
-
-pub(crate) fn lookup_name(name_without_crate: String) -> Option<String> {
-    get_storage().get(&name_without_crate).cloned()
-}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
index 4683877db69b0..3b76edf528b69 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -4,10 +4,9 @@ use std::{
     panic, thread,
 };
 
-use ide::Cancelled;
-use ide_db::base_db::ra_salsa::Cycle;
+use ide_db::base_db::salsa::{self, Cancelled};
 use lsp_server::{ExtractError, Response, ResponseError};
-use serde::{de::DeserializeOwned, Serialize};
+use serde::{Serialize, de::DeserializeOwned};
 use stdx::thread::ThreadIntent;
 
 use crate::{
@@ -310,14 +309,12 @@ impl RequestDispatcher<'_> {
 
 #[derive(Debug)]
 enum HandlerCancelledError {
-    PropagatedPanic,
-    Inner(ide::Cancelled),
+    Inner(salsa::Cancelled),
 }
 
 impl std::error::Error for HandlerCancelledError {
     fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
         match self {
-            HandlerCancelledError::PropagatedPanic => None,
             HandlerCancelledError::Inner(cancelled) => Some(cancelled),
         }
     }
@@ -350,9 +347,6 @@ where
             if let Some(panic_message) = panic_message {
                 message.push_str(": ");
                 message.push_str(panic_message)
-            } else if let Some(cycle) = panic.downcast_ref::<Cycle>() {
-                tracing::error!("Cycle propagated out of salsa! This is a bug: {cycle:?}");
-                return Err(HandlerCancelledError::PropagatedPanic);
             } else if let Ok(cancelled) = panic.downcast::<Cancelled>() {
                 tracing::error!("Cancellation propagated out of salsa! This is a bug");
                 return Err(HandlerCancelledError::Inner(*cancelled));
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
index 55344a4d6ac60..a30e5d8ce268e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
@@ -3,6 +3,7 @@
 
 use std::ops::{Deref, Not as _};
 
+use ide_db::base_db::salsa::Cancelled;
 use itertools::Itertools;
 use lsp_types::{
     CancelParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
@@ -305,7 +306,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
         let invocation_strategy_once = state.config.flycheck(None).invocation_strategy_once();
         let may_flycheck_workspace = state.config.flycheck_workspace(None);
         let mut updated = false;
-        let task = move || -> std::result::Result<(), ide::Cancelled> {
+        let task = move || -> std::result::Result<(), Cancelled> {
             if invocation_strategy_once {
                 let saved_file = vfs_path.as_path().map(|p| p.to_owned());
                 world.flycheck[0].restart_workspace(saved_file.clone());
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
index 68b2d6b696289..e08dd80973a73 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
@@ -5,7 +5,7 @@ use std::{fs, io::Write as _, ops::Not, process::Stdio};
 
 use anyhow::Context;
 
-use base64::{prelude::BASE64_STANDARD, Engine};
+use base64::{Engine, prelude::BASE64_STANDARD};
 use ide::{
     AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve,
     FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query,
@@ -35,17 +35,15 @@ use crate::{
     config::{Config, RustfmtConfig, WorkspaceSymbolConfig},
     diagnostics::convert_diagnostic,
     global_state::{FetchWorkspaceRequest, GlobalState, GlobalStateSnapshot},
-    hack_recover_crate_name,
     line_index::LineEndings,
     lsp::{
-        completion_item_hash,
+        LspError, completion_item_hash,
         ext::{
             InternalTestingFetchConfigOption, InternalTestingFetchConfigParams,
             InternalTestingFetchConfigResponse,
         },
         from_proto, to_proto,
         utils::{all_edits_are_disjoint, invalid_params_error},
-        LspError,
     },
     lsp_ext::{
         self, CrateInfoResult, ExternalDocsPair, ExternalDocsResponse, FetchDependencyListParams,
@@ -196,20 +194,38 @@ pub(crate) fn handle_view_item_tree(
     Ok(res)
 }
 
-// cargo test requires the real package name which might contain hyphens but
-// the test identifier passed to this function is the namespace form where hyphens
-// are replaced with underscores so we have to reverse this and find the real package name
-fn find_package_name(namespace_root: &str, cargo: &CargoWorkspace) -> Option<String> {
-    cargo.packages().find_map(|p| {
-        let package_name = &cargo[p].name;
-        if package_name.replace('-', "_") == namespace_root {
-            Some(package_name.clone())
-        } else {
-            None
-        }
+// cargo test requires:
+// - the package is a member of the workspace
+// - the target in the package is not a build script (custom-build)
+// - the package name - the root of the test identifier supplied to this handler can be
+//   a package or a target inside a package.
+// - the target name - if the test identifier is a target, it's needed in addition to the
+//   package name to run the right test
+// - real names - the test identifier uses the namespace form where hyphens are replaced with
+//   underscores. cargo test requires the real name.
+// - the target kind e.g. bin or lib
+fn all_test_targets(cargo: &CargoWorkspace) -> impl Iterator<Item = TestTarget> {
+    cargo.packages().filter(|p| cargo[*p].is_member).flat_map(|p| {
+        let package = &cargo[p];
+        package.targets.iter().filter_map(|t| {
+            let target = &cargo[*t];
+            if target.kind == TargetKind::BuildScript {
+                None
+            } else {
+                Some(TestTarget {
+                    package: package.name.clone(),
+                    target: target.name.clone(),
+                    kind: target.kind,
+                })
+            }
+        })
     })
 }
 
+fn find_test_target(namespace_root: &str, cargo: &CargoWorkspace) -> Option<TestTarget> {
+    all_test_targets(cargo).find(|t| namespace_root == t.target.replace('-', "_"))
+}
+
 pub(crate) fn handle_run_test(
     state: &mut GlobalState,
     params: lsp_ext::RunTestParams,
@@ -217,53 +233,41 @@ pub(crate) fn handle_run_test(
     if let Some(_session) = state.test_run_session.take() {
         state.send_notification::<lsp_ext::EndRunTest>(());
     }
-    // We detect the lowest common ancestor of all included tests, and
-    // run it. We ignore excluded tests for now, the client will handle
-    // it for us.
-    let lca = match params.include {
-        Some(tests) => tests
-            .into_iter()
-            .reduce(|x, y| {
-                let mut common_prefix = "".to_owned();
-                for (xc, yc) in x.chars().zip(y.chars()) {
-                    if xc != yc {
-                        break;
-                    }
-                    common_prefix.push(xc);
-                }
-                common_prefix
-            })
-            .unwrap_or_default(),
-        None => "".to_owned(),
-    };
-    let (namespace_root, test_path) = if lca.is_empty() {
-        (None, None)
-    } else if let Some((namespace_root, path)) = lca.split_once("::") {
-        (Some(namespace_root), Some(path))
-    } else {
-        (Some(lca.as_str()), None)
-    };
+
     let mut handles = vec![];
     for ws in &*state.workspaces {
         if let ProjectWorkspaceKind::Cargo { cargo, .. } = &ws.kind {
-            let test_target = if let Some(namespace_root) = namespace_root {
-                if let Some(package_name) = find_package_name(namespace_root, cargo) {
-                    TestTarget::Package(package_name)
-                } else {
-                    TestTarget::Workspace
-                }
-            } else {
-                TestTarget::Workspace
+            // need to deduplicate `include` to avoid redundant test runs
+            let tests = match params.include {
+                Some(ref include) => include
+                    .iter()
+                    .unique()
+                    .filter_map(|test| {
+                        let (root, remainder) = match test.split_once("::") {
+                            Some((root, remainder)) => (root.to_owned(), Some(remainder)),
+                            None => (test.clone(), None),
+                        };
+                        if let Some(target) = find_test_target(&root, cargo) {
+                            Some((target, remainder))
+                        } else {
+                            tracing::error!("Test target not found for: {test}");
+                            None
+                        }
+                    })
+                    .collect_vec(),
+                None => all_test_targets(cargo).map(|target| (target, None)).collect(),
             };
 
-            let handle = CargoTestHandle::new(
-                test_path,
-                state.config.cargo_test_options(None),
-                cargo.workspace_root(),
-                test_target,
-                state.test_run_sender.clone(),
-            )?;
-            handles.push(handle);
+            for (target, path) in tests {
+                let handle = CargoTestHandle::new(
+                    path,
+                    state.config.cargo_test_options(None),
+                    cargo.workspace_root(),
+                    target,
+                    state.test_run_sender.clone(),
+                )?;
+                handles.push(handle);
+            }
         }
     }
     // Each process send finished signal twice, once for stdout and once for stderr
@@ -287,9 +291,7 @@ pub(crate) fn handle_discover_test(
         }
         None => (snap.analysis.discover_test_roots()?, None),
     };
-    for t in &tests {
-        hack_recover_crate_name::insert_name(t.id.clone());
-    }
+
     Ok(lsp_ext::DiscoverTestResults {
         tests: tests
             .into_iter()
@@ -502,6 +504,7 @@ pub(crate) fn handle_document_diagnostics(
     if !snap.analysis.is_local_source_root(source_root)? {
         return Ok(empty_diagnostic_report());
     }
+    let source_root = snap.analysis.source_root_id(file_id)?;
     let config = snap.config.diagnostics(Some(source_root));
     if !config.enabled {
         return Ok(empty_diagnostic_report());
@@ -930,6 +933,18 @@ pub(crate) fn handle_parent_module(
     Ok(Some(res))
 }
 
+pub(crate) fn handle_child_modules(
+    snap: GlobalStateSnapshot,
+    params: lsp_types::TextDocumentPositionParams,
+) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
+    let _p = tracing::info_span!("handle_child_modules").entered();
+    // locate child module by semantics
+    let position = try_default!(from_proto::file_position(&snap, params)?);
+    let navs = snap.analysis.child_modules(position)?;
+    let res = to_proto::goto_definition_response(&snap, None, navs)?;
+    Ok(Some(res))
+}
+
 pub(crate) fn handle_runnables(
     snap: GlobalStateSnapshot,
     params: lsp_ext::RunnablesParams,
@@ -1068,7 +1083,11 @@ pub(crate) fn handle_related_tests(
 
 pub(crate) fn handle_completion(
     snap: GlobalStateSnapshot,
-    lsp_types::CompletionParams { text_document_position, context,.. }: lsp_types::CompletionParams,
+    lsp_types::CompletionParams {
+        text_document_position,
+        context,
+        ..
+    }: lsp_types::CompletionParams,
 ) -> anyhow::Result<Option<lsp_types::CompletionResponse>> {
     let _p = tracing::info_span!("handle_completion").entered();
     let mut position =
@@ -1117,7 +1136,9 @@ pub(crate) fn handle_completion_resolve(
         .into());
     }
 
-    let Some(data) = original_completion.data.take() else { return Ok(original_completion) };
+    let Some(data) = original_completion.data.take() else {
+        return Ok(original_completion);
+    };
 
     let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?;
 
@@ -1446,7 +1467,7 @@ pub(crate) fn handle_code_action(
     // Fixes from `cargo check`.
     for fix in snap
         .check_fixes
-        .values()
+        .iter()
         .flat_map(|it| it.values())
         .filter_map(|it| it.get(&frange.file_id))
         .flatten()
@@ -1473,7 +1494,7 @@ pub(crate) fn handle_code_action_resolve(
 ) -> anyhow::Result<lsp_ext::CodeAction> {
     let _p = tracing::info_span!("handle_code_action_resolve").entered();
     let Some(params) = code_action.data.take() else {
-        return Err(invalid_params_error("code action without data".to_owned()).into());
+        return Ok(code_action);
     };
 
     let file_id = from_proto::file_id(&snap, &params.code_action_params.text_document.uri)?
@@ -1500,7 +1521,7 @@ pub(crate) fn handle_code_action_resolve(
                 "Failed to parse action id string '{}': {e}",
                 params.id
             ))
-            .into())
+            .into());
         }
     };
 
@@ -1549,13 +1570,21 @@ pub(crate) fn handle_code_action_resolve(
 fn parse_action_id(action_id: &str) -> anyhow::Result<(usize, SingleResolve), String> {
     let id_parts = action_id.split(':').collect::<Vec<_>>();
     match id_parts.as_slice() {
-        [assist_id_string, assist_kind_string, index_string] => {
+        [assist_id_string, assist_kind_string, index_string, subtype_str] => {
             let assist_kind: AssistKind = assist_kind_string.parse()?;
             let index: usize = match index_string.parse() {
                 Ok(index) => index,
                 Err(e) => return Err(format!("Incorrect index string: {e}")),
             };
-            Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind }))
+            let assist_subtype = subtype_str.parse::<usize>().ok();
+            Ok((
+                index,
+                SingleResolve {
+                    assist_id: assist_id_string.to_string(),
+                    assist_kind,
+                    assist_subtype,
+                },
+            ))
         }
         _ => Err("Action id contains incorrect number of segments".to_owned()),
     }
@@ -1608,7 +1637,9 @@ pub(crate) fn handle_code_lens_resolve(
     snap: GlobalStateSnapshot,
     mut code_lens: CodeLens,
 ) -> anyhow::Result<CodeLens> {
-    let Some(data) = code_lens.data.take() else { return Ok(code_lens) };
+    let Some(data) = code_lens.data.take() else {
+        return Ok(code_lens);
+    };
     let resolve = serde_json::from_value::<lsp_ext::CodeLensResolveData>(data)?;
     let Some(annotation) = from_proto::annotation(&snap, code_lens.range, resolve)? else {
         return Ok(code_lens);
@@ -1662,11 +1693,13 @@ pub(crate) fn handle_ssr(
     params: lsp_ext::SsrParams,
 ) -> anyhow::Result<lsp_types::WorkspaceEdit> {
     let _p = tracing::info_span!("handle_ssr").entered();
-    let selections = try_default!(params
-        .selections
-        .iter()
-        .map(|range| from_proto::file_range(&snap, &params.position.text_document, *range))
-        .collect::<Result<Option<Vec<_>>, _>>()?);
+    let selections = try_default!(
+        params
+            .selections
+            .iter()
+            .map(|range| from_proto::file_range(&snap, &params.position.text_document, *range))
+            .collect::<Result<Option<Vec<_>>, _>>()?
+    );
     let position = try_default!(from_proto::file_position(&snap, params.position)?);
     let source_change = snap.analysis.structural_search_replace(
         &params.query,
@@ -1718,14 +1751,18 @@ pub(crate) fn handle_inlay_hints_resolve(
 ) -> anyhow::Result<InlayHint> {
     let _p = tracing::info_span!("handle_inlay_hints_resolve").entered();
 
-    let Some(data) = original_hint.data.take() else { return Ok(original_hint) };
+    let Some(data) = original_hint.data.take() else {
+        return Ok(original_hint);
+    };
     let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
     let file_id = FileId::from_raw(resolve_data.file_id);
     if resolve_data.version != snap.file_version(file_id) {
         tracing::warn!("Inlay hint resolve data is outdated");
         return Ok(original_hint);
     }
-    let Some(hash) = resolve_data.hash.parse().ok() else { return Ok(original_hint) };
+    let Some(hash) = resolve_data.hash.parse().ok() else {
+        return Ok(original_hint);
+    };
     anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data");
 
     let line_index = snap.file_line_index(file_id)?;
@@ -2264,11 +2301,7 @@ fn run_rustfmt(
     let current_dir = match text_document.uri.to_file_path() {
         Ok(mut path) => {
             // pop off file name
-            if path.pop() && path.is_dir() {
-                path
-            } else {
-                std::env::current_dir()?
-            }
+            if path.pop() && path.is_dir() { path } else { std::env::current_dir()? }
         }
         Err(_) => {
             tracing::error!(
@@ -2282,8 +2315,11 @@ fn run_rustfmt(
     let mut command = match snap.config.rustfmt(source_root_id) {
         RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => {
             // FIXME: Set RUSTUP_TOOLCHAIN
-            let mut cmd = toolchain::command(toolchain::Tool::Rustfmt.path(), current_dir);
-            cmd.envs(snap.config.extra_env(source_root_id));
+            let mut cmd = toolchain::command(
+                toolchain::Tool::Rustfmt.path(),
+                current_dir,
+                snap.config.extra_env(source_root_id),
+            );
             cmd.args(extra_args);
 
             if let Some(edition) = edition {
@@ -2325,6 +2361,7 @@ fn run_rustfmt(
         RustfmtConfig::CustomCommand { command, args } => {
             let cmd = Utf8PathBuf::from(&command);
             let target_spec = TargetSpec::for_file(snap, file_id)?;
+            let extra_env = snap.config.extra_env(source_root_id);
             let mut cmd = match target_spec {
                 Some(TargetSpec::Cargo(_)) => {
                     // approach: if the command name contains a path separator, join it with the project root.
@@ -2337,12 +2374,11 @@ fn run_rustfmt(
                     } else {
                         cmd
                     };
-                    toolchain::command(cmd_path, current_dir)
+                    toolchain::command(cmd_path, current_dir, extra_env)
                 }
-                _ => toolchain::command(cmd, current_dir),
+                _ => toolchain::command(cmd, current_dir, extra_env),
             };
 
-            cmd.envs(snap.config.extra_env(source_root_id));
             cmd.args(args);
             cmd
         }
@@ -2385,7 +2421,11 @@ fn run_rustfmt(
                 Ok(None)
             }
             // rustfmt panicked at lexing/parsing the file
-            Some(101) if !rustfmt_not_installed && captured_stderr.starts_with("error[") => {
+            Some(101)
+                if !rustfmt_not_installed
+                    && (captured_stderr.starts_with("error[")
+                        || captured_stderr.starts_with("error:")) =>
+            {
                 Ok(None)
             }
             _ => {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
index c6aa8ba170778..49ebffa909acd 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -16,14 +16,14 @@ use ide::{
     FilePosition, TextSize,
 };
 use ide_db::{
-    imports::insert_use::{ImportGranularity, InsertUseConfig},
     SnippetCap,
+    imports::insert_use::{ImportGranularity, InsertUseConfig},
 };
 use project_model::CargoConfig;
 use test_utils::project_root;
 use vfs::{AbsPathBuf, VfsPath};
 
-use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
 
 #[track_caller]
 fn file_id(vfs: &vfs::Vfs, path: &VfsPath) -> vfs::FileId {
@@ -86,7 +86,7 @@ fn integrated_highlighting_benchmark() {
             "self.data.cargo_buildScripts_rebuildOnSave",
             "self. data. cargo_buildScripts_rebuildOnSave",
         );
-        let mut change = ChangeWithProcMacros::new();
+        let mut change = ChangeWithProcMacros::default();
         change.change_file(file_id, Some(text));
         host.apply_change(change);
     }
@@ -149,7 +149,7 @@ fn integrated_completion_benchmark() {
         let completion_offset =
             patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
                 + "sel".len();
-        let mut change = ChangeWithProcMacros::new();
+        let mut change = ChangeWithProcMacros::default();
         change.change_file(file_id, Some(text));
         host.apply_change(change);
         completion_offset
@@ -200,7 +200,7 @@ fn integrated_completion_benchmark() {
         let completion_offset =
             patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)")
                 + ";sel".len();
-        let mut change = ChangeWithProcMacros::new();
+        let mut change = ChangeWithProcMacros::default();
         change.change_file(file_id, Some(text));
         host.apply_change(change);
         completion_offset
@@ -250,7 +250,7 @@ fn integrated_completion_benchmark() {
         let completion_offset =
             patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
                 + "self.".len();
-        let mut change = ChangeWithProcMacros::new();
+        let mut change = ChangeWithProcMacros::default();
         change.change_file(file_id, Some(text));
         host.apply_change(change);
         completion_offset
@@ -367,7 +367,7 @@ fn integrated_diagnostics_benchmark() {
         let _it = stdx::timeit("change");
         let mut text = host.analysis().file_text(file_id).unwrap().to_string();
         patch(&mut text, "db.struct_data(self.id)", "();\ndb.struct_data(self.id)");
-        let mut change = ChangeWithProcMacros::new();
+        let mut change = ChangeWithProcMacros::default();
         change.change_file(file_id, Some(text));
         host.apply_change(change);
     };
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
index a0d6a0d6da6f8..0dea285e97bd4 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
@@ -24,7 +24,6 @@ mod command;
 mod diagnostics;
 mod discover;
 mod flycheck;
-mod hack_recover_crate_name;
 mod line_index;
 mod main_loop;
 mod mem_docs;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs
index 9c6b69d731290..418fe957590dc 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs
@@ -1,6 +1,6 @@
 //! Advertises the capabilities of the LSP Server.
 use ide::{CompletionFieldsToResolve, InlayFieldsToResolve};
-use ide_db::{line_index::WideEncoding, FxHashSet};
+use ide_db::{FxHashSet, line_index::WideEncoding};
 use lsp_types::{
     CallHierarchyServerCapability, CodeActionKind, CodeActionOptions, CodeActionProviderCapability,
     CodeLensOptions, CompletionOptions, CompletionOptionsCompletionItem, DeclarationCapability,
@@ -157,6 +157,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
             "onEnter": true,
             "openCargoToml": true,
             "parentModule": true,
+            "childModules": true,
             "runnables": {
                 "kinds": [ "cargo" ],
             },
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs
index ca4372aa83f8d..b132323bec5b1 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs
@@ -8,11 +8,11 @@
 
 use std::ops;
 
-use lsp_types::request::Request;
 use lsp_types::Url;
+use lsp_types::request::Request;
 use lsp_types::{
-    notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
-    PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
+    CodeActionKind, DocumentOnTypeFormattingParams, PartialResultParams, Position, Range,
+    TextDocumentIdentifier, WorkDoneProgressParams, notification::Notification,
 };
 use paths::Utf8PathBuf;
 use rustc_hash::FxHashMap;
@@ -399,6 +399,14 @@ impl Request for ParentModule {
     const METHOD: &'static str = "experimental/parentModule";
 }
 
+pub enum ChildModules {}
+
+impl Request for ChildModules {
+    type Params = lsp_types::TextDocumentPositionParams;
+    type Result = Option<lsp_types::GotoDefinitionResponse>;
+    const METHOD: &'static str = "experimental/childModules";
+}
+
 pub enum JoinLines {}
 
 impl Request for JoinLines {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs
index 6375a1a054b7d..fb8a98382905c 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs
@@ -1,7 +1,7 @@
 //! Conversion lsp_types types to rust-analyzer specific ones.
 use anyhow::format_err;
 use ide::{Annotation, AnnotationKind, AssistKind, LineCol};
-use ide_db::{line_index::WideLineCol, FileId, FilePosition, FileRange};
+use ide_db::{FileId, FilePosition, FileRange, line_index::WideLineCol};
 use paths::Utf8PathBuf;
 use syntax::{TextRange, TextSize};
 use vfs::AbsPathBuf;
@@ -103,7 +103,6 @@ pub(crate) fn file_range_uri(
 
 pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> {
     let assist_kind = match &kind {
-        k if k == &lsp_types::CodeActionKind::EMPTY => AssistKind::None,
         k if k == &lsp_types::CodeActionKind::QUICKFIX => AssistKind::QuickFix,
         k if k == &lsp_types::CodeActionKind::REFACTOR => AssistKind::Refactor,
         k if k == &lsp_types::CodeActionKind::REFACTOR_EXTRACT => AssistKind::RefactorExtract,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
index 6db7bcb11102c..4efe330f16ac1 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -6,7 +6,7 @@ use std::{
     sync::atomic::{AtomicU32, Ordering},
 };
 
-use base64::{prelude::BASE64_STANDARD, Engine};
+use base64::{Engine, prelude::BASE64_STANDARD};
 use ide::{
     Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionFieldsToResolve,
     CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange,
@@ -16,7 +16,7 @@ use ide::{
     SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize,
     UpdateTest,
 };
-use ide_db::{assists, rust_doc::format_docs, FxHasher};
+use ide_db::{FxHasher, assists, rust_doc::format_docs, source_change::ChangeAnnotationId};
 use itertools::Itertools;
 use paths::{Utf8Component, Utf8Prefix};
 use semver::VersionReq;
@@ -28,11 +28,10 @@ use crate::{
     global_state::GlobalStateSnapshot,
     line_index::{LineEndings, LineIndex, PositionEncoding},
     lsp::{
-        completion_item_hash,
+        LspError, completion_item_hash,
         ext::ShellRunnableArgs,
         semantic_tokens::{self, standard_fallback_type},
         utils::invalid_params_error,
-        LspError,
     },
     lsp_ext::{self, SnippetTextEdit},
     target_spec::{CargoTargetSpec, TargetSpec},
@@ -200,10 +199,10 @@ pub(crate) fn snippet_text_edit(
     line_index: &LineIndex,
     is_snippet: bool,
     indel: Indel,
+    annotation: Option<ChangeAnnotationId>,
     client_supports_annotations: bool,
 ) -> lsp_ext::SnippetTextEdit {
-    let annotation_id =
-        indel.annotation.filter(|_| client_supports_annotations).map(|it| it.to_string());
+    let annotation_id = annotation.filter(|_| client_supports_annotations).map(|it| it.to_string());
     let text_edit = text_edit(line_index, indel);
     let insert_text_format =
         if is_snippet { Some(lsp_types::InsertTextFormat::SNIPPET) } else { None };
@@ -228,10 +227,17 @@ pub(crate) fn snippet_text_edit_vec(
     text_edit: TextEdit,
     clients_support_annotations: bool,
 ) -> Vec<lsp_ext::SnippetTextEdit> {
+    let annotation = text_edit.change_annotation();
     text_edit
         .into_iter()
         .map(|indel| {
-            self::snippet_text_edit(line_index, is_snippet, indel, clients_support_annotations)
+            self::snippet_text_edit(
+                line_index,
+                is_snippet,
+                indel,
+                annotation,
+                clients_support_annotations,
+            )
         })
         .collect()
 }
@@ -740,7 +746,7 @@ pub(crate) fn semantic_tokens(
                 | HlTag::None
                     if highlight_range.highlight.mods.is_empty() =>
                 {
-                    continue
+                    continue;
                 }
                 _ => (),
             }
@@ -1082,6 +1088,7 @@ fn merge_text_and_snippet_edits(
 ) -> Vec<SnippetTextEdit> {
     let mut edits: Vec<SnippetTextEdit> = vec![];
     let mut snippets = snippet_edit.into_edit_ranges().into_iter().peekable();
+    let annotation = edit.change_annotation();
     let text_edits = edit.into_iter();
     // offset to go from the final source location to the original source location
     let mut source_text_offset = 0i32;
@@ -1127,11 +1134,8 @@ fn merge_text_and_snippet_edits(
             edits.push(snippet_text_edit(
                 line_index,
                 true,
-                Indel {
-                    insert: format!("${snippet_index}"),
-                    delete: snippet_range,
-                    annotation: None,
-                },
+                Indel { insert: format!("${snippet_index}"), delete: snippet_range },
+                annotation,
                 client_supports_annotations,
             ))
         }
@@ -1190,11 +1194,8 @@ fn merge_text_and_snippet_edits(
             edits.push(snippet_text_edit(
                 line_index,
                 true,
-                Indel {
-                    insert: new_text,
-                    delete: current_indel.delete,
-                    annotation: current_indel.annotation,
-                },
+                Indel { insert: new_text, delete: current_indel.delete },
+                annotation,
                 client_supports_annotations,
             ))
         } else {
@@ -1204,6 +1205,7 @@ fn merge_text_and_snippet_edits(
                 line_index,
                 false,
                 current_indel,
+                annotation,
                 client_supports_annotations,
             ));
         }
@@ -1230,7 +1232,8 @@ fn merge_text_and_snippet_edits(
         snippet_text_edit(
             line_index,
             true,
-            Indel { insert: format!("${snippet_index}"), delete: snippet_range, annotation: None },
+            Indel { insert: format!("${snippet_index}"), delete: snippet_range },
+            annotation,
             client_supports_annotations,
         )
     }));
@@ -1251,8 +1254,17 @@ pub(crate) fn snippet_text_document_edit(
     let mut edits = if let Some(snippet_edit) = snippet_edit {
         merge_text_and_snippet_edits(&line_index, edit, snippet_edit, client_supports_annotations)
     } else {
+        let annotation = edit.change_annotation();
         edit.into_iter()
-            .map(|it| snippet_text_edit(&line_index, is_snippet, it, client_supports_annotations))
+            .map(|it| {
+                snippet_text_edit(
+                    &line_index,
+                    is_snippet,
+                    it,
+                    annotation,
+                    client_supports_annotations,
+                )
+            })
             .collect()
     };
 
@@ -1465,7 +1477,7 @@ pub(crate) fn call_hierarchy_item(
 
 pub(crate) fn code_action_kind(kind: AssistKind) -> lsp_types::CodeActionKind {
     match kind {
-        AssistKind::None | AssistKind::Generate => lsp_types::CodeActionKind::EMPTY,
+        AssistKind::Generate => lsp_types::CodeActionKind::EMPTY,
         AssistKind::QuickFix => lsp_types::CodeActionKind::QUICKFIX,
         AssistKind::Refactor => lsp_types::CodeActionKind::REFACTOR,
         AssistKind::RefactorExtract => lsp_types::CodeActionKind::REFACTOR_EXTRACT,
@@ -1502,7 +1514,12 @@ pub(crate) fn code_action(
         (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?),
         (None, Some((index, code_action_params, version))) => {
             res.data = Some(lsp_ext::CodeActionData {
-                id: format!("{}:{}:{index}", assist.id.0, assist.id.1.name()),
+                id: format!(
+                    "{}:{}:{index}:{}",
+                    assist.id.0,
+                    assist.id.1.name(),
+                    assist.id.2.map(|x| x.to_string()).unwrap_or("".to_owned())
+                ),
                 code_action_params,
                 version,
             });
@@ -1536,7 +1553,7 @@ pub(crate) fn runnable(
             );
 
             let cwd = match runnable.kind {
-                ide::RunnableKind::Bin { .. } => workspace_root.clone(),
+                ide::RunnableKind::Bin => workspace_root.clone(),
                 _ => spec.cargo_toml.parent().to_owned(),
             };
 
@@ -1927,19 +1944,11 @@ pub(crate) fn make_update_runnable(
 }
 
 pub(crate) fn implementation_title(count: usize) -> String {
-    if count == 1 {
-        "1 implementation".into()
-    } else {
-        format!("{count} implementations")
-    }
+    if count == 1 { "1 implementation".into() } else { format!("{count} implementations") }
 }
 
 pub(crate) fn reference_title(count: usize) -> String {
-    if count == 1 {
-        "1 reference".into()
-    } else {
-        format!("{count} references")
-    }
+    if count == 1 { "1 reference".into() } else { format!("{count} references") }
 }
 
 pub(crate) fn markup_content(
@@ -1962,7 +1971,7 @@ pub(crate) fn rename_error(err: RenameError) -> LspError {
 
 #[cfg(test)]
 mod tests {
-    use expect_test::{expect, Expect};
+    use expect_test::{Expect, expect};
     use ide::{Analysis, FilePosition};
     use ide_db::source_change::Snippet;
     use test_utils::extract_offset;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs
index 9a9e66be51ce3..673eaa5952f0b 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs
@@ -8,7 +8,7 @@ use triomphe::Arc;
 use crate::{
     global_state::GlobalState,
     line_index::{LineEndings, LineIndex, PositionEncoding},
-    lsp::{from_proto, LspError},
+    lsp::{LspError, from_proto},
     lsp_ext,
 };
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index f5d9469f2622f..bd213ffa57a16 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -8,36 +8,34 @@ use std::{
     time::{Duration, Instant},
 };
 
-use always_assert::always;
-use crossbeam_channel::{select, Receiver};
-use ide_db::base_db::{SourceDatabase, SourceRootDatabase, VfsPath};
+use crossbeam_channel::{Receiver, select};
+use ide_db::base_db::{SourceDatabase, VfsPath, salsa::Database as _};
 use lsp_server::{Connection, Notification, Request};
-use lsp_types::{notification::Notification as _, TextDocumentIdentifier};
+use lsp_types::{TextDocumentIdentifier, notification::Notification as _};
 use stdx::thread::ThreadIntent;
-use tracing::{error, span, Level};
-use vfs::{loader::LoadingProgress, AbsPathBuf, FileId};
+use tracing::{Level, error, span};
+use vfs::{AbsPathBuf, FileId, loader::LoadingProgress};
 
 use crate::{
     config::Config,
-    diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration, NativeDiagnosticsFetchKind},
+    diagnostics::{DiagnosticsGeneration, NativeDiagnosticsFetchKind, fetch_native_diagnostics},
     discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage},
     flycheck::{self, FlycheckMessage},
     global_state::{
-        file_id_to_url, url_to_file_id, FetchBuildDataResponse, FetchWorkspaceRequest,
-        FetchWorkspaceResponse, GlobalState,
+        FetchBuildDataResponse, FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState,
+        file_id_to_url, url_to_file_id,
     },
-    hack_recover_crate_name,
     handlers::{
         dispatch::{NotificationDispatcher, RequestDispatcher},
         request::empty_diagnostic_report,
     },
     lsp::{
         from_proto, to_proto,
-        utils::{notification_is, Progress},
+        utils::{Progress, notification_is},
     },
     lsp_ext,
     reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress},
-    test_runner::{CargoTestMessage, TestState},
+    test_runner::{CargoTestMessage, CargoTestOutput, TestState},
 };
 
 pub fn main_loop(config: Config, connection: Connection) -> anyhow::Result<()> {
@@ -365,6 +363,7 @@ impl GlobalState {
                             fraction = 1.0;
                             title = "Indexing";
 
+                            self.analysis_host.raw_database_mut().trigger_lru_eviction();
                             self.prime_caches_queue.op_completed(());
                             if cancelled {
                                 self.prime_caches_queue
@@ -504,8 +503,10 @@ impl GlobalState {
         if !self.fetch_workspaces_queue.op_in_progress() {
             if let Some((cause, ())) = self.fetch_build_data_queue.should_start_op() {
                 self.fetch_build_data(cause);
-            } else if let Some((cause, paths)) = self.fetch_proc_macros_queue.should_start_op() {
-                self.fetch_proc_macros(cause, paths);
+            } else if let Some((cause, (change, paths))) =
+                self.fetch_proc_macros_queue.should_start_op()
+            {
+                self.fetch_proc_macros(cause, change, paths);
             }
         }
 
@@ -517,7 +518,9 @@ impl GlobalState {
 
         let loop_duration = loop_start.elapsed();
         if loop_duration > Duration::from_millis(100) && was_quiescent {
-            tracing::warn!("overly long loop turn took {loop_duration:?} (event handling took {event_handling_duration:?}): {event_dbg_msg}");
+            tracing::warn!(
+                "overly long loop turn took {loop_duration:?} (event handling took {event_handling_duration:?}): {event_dbg_msg}"
+            );
             self.poke_rust_analyzer_developer(format!(
                 "overly long loop turn took {loop_duration:?} (event handling took {event_handling_duration:?}): {event_dbg_msg}"
             ));
@@ -529,7 +532,7 @@ impl GlobalState {
         let num_worker_threads = self.config.prime_caches_num_threads();
 
         self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, {
-            let analysis = self.snapshot().analysis;
+            let analysis = AssertUnwindSafe(self.snapshot().analysis);
             move |sender| {
                 sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
                 let res = analysis.parallel_prime_caches(num_worker_threads, |progress| {
@@ -555,13 +558,14 @@ impl GlobalState {
                     (excluded == vfs::FileExcluded::No).then_some(file_id)
                 })
                 .filter(|&file_id| {
-                    let source_root = db.file_source_root(file_id);
+                    let source_root_id = db.file_source_root(file_id).source_root_id(db);
+                    let source_root = db.source_root(source_root_id).source_root(db);
                     // Only publish diagnostics for files in the workspace, not from crates.io deps
                     // or the sysroot.
                     // While theoretically these should never have errors, we have quite a few false
                     // positives particularly in the stdlib, and those diagnostics would stay around
                     // forever if we emitted them here.
-                    !db.source_root(source_root).is_library
+                    !source_root.is_library
                 })
                 .collect::<std::sync::Arc<_>>()
         };
@@ -642,8 +646,9 @@ impl GlobalState {
                 (excluded == vfs::FileExcluded::No).then_some(file_id)
             })
             .filter(|&file_id| {
-                let source_root = db.file_source_root(file_id);
-                !db.source_root(source_root).is_library
+                let source_root_id = db.file_source_root(file_id).source_root_id(db);
+                let source_root = db.source_root(source_root_id).source_root(db);
+                !source_root.is_library
             })
             .collect::<Vec<_>>();
         tracing::trace!("updating tests for {:?}", subscriptions);
@@ -659,9 +664,7 @@ impl GlobalState {
                     .filter_map(|f| snapshot.analysis.discover_tests_in_file(f).ok())
                     .flatten()
                     .collect::<Vec<_>>();
-                for t in &tests {
-                    hack_recover_crate_name::insert_name(t.id.clone());
-                }
+
                 Task::DiscoverTest(lsp_ext::DiscoverTestResults {
                     tests: tests
                         .into_iter()
@@ -802,9 +805,10 @@ impl GlobalState {
                 let (state, msg) = match progress {
                     ProcMacroProgress::Begin => (Some(Progress::Begin), None),
                     ProcMacroProgress::Report(msg) => (Some(Progress::Report), Some(msg)),
-                    ProcMacroProgress::End(proc_macro_load_result) => {
+                    ProcMacroProgress::End(change) => {
                         self.fetch_proc_macros_queue.op_completed(true);
-                        self.set_proc_macros(proc_macro_load_result);
+                        self.analysis_host.apply_change(change);
+                        self.finish_loading_crate_graph();
                         (Some(Progress::End), None)
                     }
                 };
@@ -840,7 +844,7 @@ impl GlobalState {
             }
             vfs::loader::Message::Progress { n_total, n_done, dir, config_version } => {
                 let _p = span!(Level::INFO, "GlobalState::handle_vfs_msg/progress").entered();
-                always!(config_version <= self.vfs_config_version);
+                stdx::always!(config_version <= self.vfs_config_version);
 
                 let (n_done, state) = match n_done {
                     LoadingProgress::Started => {
@@ -907,16 +911,15 @@ impl GlobalState {
                 });
             }
             QueuedTask::CheckProcMacroSources(modified_rust_files) => {
-                let crate_graph = self.analysis_host.raw_database().crate_graph();
-                let snap = self.snapshot();
+                let analysis = AssertUnwindSafe(self.snapshot().analysis);
                 self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, {
                     move |sender| {
                         if modified_rust_files.into_iter().any(|file_id| {
                             // FIXME: Check whether these files could be build script related
-                            match snap.analysis.crates_for(file_id) {
-                                Ok(crates) => {
-                                    crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
-                                }
+                            match analysis.crates_for(file_id) {
+                                Ok(crates) => crates.iter().any(|&krate| {
+                                    analysis.is_proc_macro_crate(krate).is_ok_and(|it| it)
+                                }),
                                 _ => false,
                             }
                         }) {
@@ -958,30 +961,31 @@ impl GlobalState {
     }
 
     fn handle_cargo_test_msg(&mut self, message: CargoTestMessage) {
-        match message {
-            CargoTestMessage::Test { name, state } => {
+        match message.output {
+            CargoTestOutput::Test { name, state } => {
                 let state = match state {
                     TestState::Started => lsp_ext::TestState::Started,
                     TestState::Ignored => lsp_ext::TestState::Skipped,
                     TestState::Ok => lsp_ext::TestState::Passed,
                     TestState::Failed { stdout } => lsp_ext::TestState::Failed { message: stdout },
                 };
-                let Some(test_id) = hack_recover_crate_name::lookup_name(name) else {
-                    return;
-                };
+
+                // The notification requires the namespace form (with underscores) of the target
+                let test_id = format!("{}::{name}", message.target.target.replace('-', "_"));
+
                 self.send_notification::<lsp_ext::ChangeTestState>(
                     lsp_ext::ChangeTestStateParams { test_id, state },
                 );
             }
-            CargoTestMessage::Suite => (),
-            CargoTestMessage::Finished => {
+            CargoTestOutput::Suite => (),
+            CargoTestOutput::Finished => {
                 self.test_run_remaining_jobs = self.test_run_remaining_jobs.saturating_sub(1);
                 if self.test_run_remaining_jobs == 0 {
                     self.send_notification::<lsp_ext::EndRunTest>(());
                     self.test_run_session = None;
                 }
             }
-            CargoTestMessage::Custom { text } => {
+            CargoTestOutput::Custom { text } => {
                 self.send_notification::<lsp_ext::AppendOutputToRunTest>(text);
             }
         }
@@ -1170,6 +1174,7 @@ impl GlobalState {
             .on::<NO_RETRY, lsp_ext::InterpretFunction>(handlers::handle_interpret_function)
             .on::<NO_RETRY, lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
             .on::<NO_RETRY, lsp_ext::ParentModule>(handlers::handle_parent_module)
+            .on::<NO_RETRY, lsp_ext::ChildModules>(handlers::handle_child_modules)
             .on::<NO_RETRY, lsp_ext::Runnables>(handlers::handle_runnables)
             .on::<NO_RETRY, lsp_ext::RelatedTests>(handlers::handle_related_tests)
             .on::<NO_RETRY, lsp_ext::CodeActionRequest>(handlers::handle_code_action)
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index dffaa88240b1a..55ed1923653f9 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -15,14 +15,13 @@
 // FIXME: This is a mess that needs some untangling work
 use std::{iter, mem};
 
-use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros, ProcMacrosBuilder};
-use ide::CrateId;
+use hir::{ChangeWithProcMacros, ProcMacrosBuilder, db::DefDatabase};
 use ide_db::{
-    base_db::{ra_salsa::Durability, CrateGraph, CrateWorkspaceData, ProcMacroPaths},
     FxHashMap,
+    base_db::{CrateGraphBuilder, ProcMacroPaths, salsa::Durability},
 };
 use itertools::Itertools;
-use load_cargo::{load_proc_macro, ProjectFolders};
+use load_cargo::{ProjectFolders, load_proc_macro};
 use lsp_types::FileSystemWatcher;
 use proc_macro_api::ProcMacroClient;
 use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts};
@@ -60,7 +59,7 @@ pub(crate) enum BuildDataProgress {
 pub(crate) enum ProcMacroProgress {
     Begin,
     Report(String),
-    End(ProcMacros),
+    End(ChangeWithProcMacros),
 }
 
 impl GlobalState {
@@ -142,6 +141,7 @@ impl GlobalState {
         if self.fetch_build_data_error().is_err() {
             status.health |= lsp_ext::Health::Warning;
             message.push_str("Failed to run build scripts of some packages.\n\n");
+            message.push_str("Please refer to the logs for more details on the errors.");
         }
         if let Some(err) = &self.config_errors {
             status.health |= lsp_ext::Health::Warning;
@@ -158,7 +158,7 @@ impl GlobalState {
         {
             status.health |= lsp_ext::Health::Warning;
             message.push_str("Failed to discover workspace.\n");
-            message.push_str("Consider adding the `Cargo.toml` of the workspace to the [`linkedProjects`](https://rust-analyzer.github.io/manual.html#rust-analyzer.linkedProjects) setting.\n\n");
+            message.push_str("Consider adding the `Cargo.toml` of the workspace to the [`linkedProjects`](https://rust-analyzer.github.io/book/configuration.html#linkedProjects) setting.\n\n");
         }
         if self.fetch_workspace_error().is_err() {
             status.health |= lsp_ext::Health::Error;
@@ -386,7 +386,12 @@ impl GlobalState {
         });
     }
 
-    pub(crate) fn fetch_proc_macros(&mut self, cause: Cause, paths: Vec<ProcMacroPaths>) {
+    pub(crate) fn fetch_proc_macros(
+        &mut self,
+        cause: Cause,
+        mut change: ChangeWithProcMacros,
+        paths: Vec<ProcMacroPaths>,
+    ) {
         info!(%cause, "will load proc macros");
         let ignored_proc_macros = self.config.ignored_proc_macros(None).clone();
         let proc_macro_clients = self.proc_macro_clients.clone();
@@ -403,52 +408,38 @@ impl GlobalState {
             };
 
             let mut builder = ProcMacrosBuilder::default();
-            let chain = proc_macro_clients
+            let proc_macro_clients = proc_macro_clients
                 .iter()
                 .map(|res| res.as_ref().map_err(|e| e.to_string()))
                 .chain(iter::repeat_with(|| Err("proc-macro-srv is not running".into())));
-            for (client, paths) in chain.zip(paths) {
-                paths
-                    .into_iter()
-                    .map(move |(crate_id, res)| {
-                        (
-                            crate_id,
-                            res.map_or_else(
-                                |e| Err((e, true)),
-                                |(crate_name, path)| {
-                                    progress(path.to_string());
-                                    client.as_ref().map_err(|it| (it.clone(), true)).and_then(
-                                        |client| {
-                                            load_proc_macro(
-                                                client,
-                                                &path,
-                                                ignored_proc_macros
-                                                    .iter()
-                                                    .find_map(|(name, macros)| {
-                                                        eq_ignore_underscore(name, &crate_name)
-                                                            .then_some(&**macros)
-                                                    })
-                                                    .unwrap_or_default(),
-                                            )
-                                        },
-                                    )
-                                },
-                            ),
-                        )
-                    })
-                    .for_each(|(krate, res)| builder.insert(krate, res));
+            for (client, paths) in proc_macro_clients.zip(paths) {
+                for (crate_id, res) in paths.iter() {
+                    let expansion_res = match client {
+                        Ok(client) => match res {
+                            Ok((crate_name, path)) => {
+                                progress(path.to_string());
+                                let ignored_proc_macros = ignored_proc_macros
+                                    .iter()
+                                    .find_map(|(name, macros)| {
+                                        eq_ignore_underscore(name, crate_name).then_some(&**macros)
+                                    })
+                                    .unwrap_or_default();
+
+                                load_proc_macro(client, path, ignored_proc_macros)
+                            }
+                            Err(e) => Err((e.clone(), true)),
+                        },
+                        Err(ref e) => Err((e.clone(), true)),
+                    };
+                    builder.insert(*crate_id, expansion_res)
+                }
             }
 
-            sender.send(Task::LoadProcMacros(ProcMacroProgress::End(builder.build()))).unwrap();
+            change.set_proc_macros(builder);
+            sender.send(Task::LoadProcMacros(ProcMacroProgress::End(change))).unwrap();
         });
     }
 
-    pub(crate) fn set_proc_macros(&mut self, proc_macros: ProcMacros) {
-        let mut change = ChangeWithProcMacros::new();
-        change.set_proc_macros(proc_macros);
-        self.analysis_host.apply_change(change);
-    }
-
     pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
         let _p = tracing::info_span!("GlobalState::switch_workspaces").entered();
         tracing::info!(%cause, "will switch workspaces");
@@ -458,11 +449,12 @@ impl GlobalState {
         else {
             return;
         };
+        let switching_from_empty_workspace = self.workspaces.is_empty();
 
-        info!(%cause, ?force_crate_graph_reload);
-        if self.fetch_workspace_error().is_err() && !self.workspaces.is_empty() {
+        info!(%cause, ?force_crate_graph_reload, %switching_from_empty_workspace);
+        if self.fetch_workspace_error().is_err() && !switching_from_empty_workspace {
             if *force_crate_graph_reload {
-                self.recreate_crate_graph(cause);
+                self.recreate_crate_graph(cause, false);
             }
             // It only makes sense to switch to a partially broken workspace
             // if we don't have any workspace at all yet.
@@ -479,36 +471,44 @@ impl GlobalState {
                 .all(|(l, r)| l.eq_ignore_build_data(r));
 
         if same_workspaces {
-            let (workspaces, build_scripts) = match self.fetch_build_data_queue.last_op_result() {
-                Some(FetchBuildDataResponse { workspaces, build_scripts }) => {
-                    (workspaces.clone(), build_scripts.as_slice())
-                }
-                None => (Default::default(), Default::default()),
-            };
-
-            if Arc::ptr_eq(&workspaces, &self.workspaces) {
-                info!("set build scripts to workspaces");
+            if switching_from_empty_workspace {
+                // Switching from empty to empty is a no-op
+                return;
+            }
+            if let Some(FetchBuildDataResponse { workspaces, build_scripts }) =
+                self.fetch_build_data_queue.last_op_result()
+            {
+                if Arc::ptr_eq(workspaces, &self.workspaces) {
+                    info!("set build scripts to workspaces");
+
+                    let workspaces = workspaces
+                        .iter()
+                        .cloned()
+                        .zip(build_scripts)
+                        .map(|(mut ws, bs)| {
+                            ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
+                            ws
+                        })
+                        .collect::<Vec<_>>();
+                    // Workspaces are the same, but we've updated build data.
+                    info!("same workspace, but new build data");
+                    self.workspaces = Arc::new(workspaces);
+                } else {
+                    info!("build scripts do not match the version of the active workspace");
+                    if *force_crate_graph_reload {
+                        self.recreate_crate_graph(cause, switching_from_empty_workspace);
+                    }
 
-                let workspaces = workspaces
-                    .iter()
-                    .cloned()
-                    .zip(build_scripts)
-                    .map(|(mut ws, bs)| {
-                        ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
-                        ws
-                    })
-                    .collect::<Vec<_>>();
-                // Workspaces are the same, but we've updated build data.
-                info!("same workspace, but new build data");
-                self.workspaces = Arc::new(workspaces);
+                    // Current build scripts do not match the version of the active
+                    // workspace, so there's nothing for us to update.
+                    return;
+                }
             } else {
-                info!("build scripts do not match the version of the active workspace");
                 if *force_crate_graph_reload {
-                    self.recreate_crate_graph(cause);
+                    self.recreate_crate_graph(cause, switching_from_empty_workspace);
                 }
 
-                // Current build scripts do not match the version of the active
-                // workspace, so there's nothing for us to update.
+                // No build scripts but unchanged workspaces, nothing to do here
                 return;
             }
         } else {
@@ -527,6 +527,13 @@ impl GlobalState {
             if self.config.run_build_scripts(None) {
                 self.build_deps_changed = false;
                 self.fetch_build_data_queue.request_op("workspace updated".to_owned(), ());
+
+                if !switching_from_empty_workspace {
+                    // `switch_workspaces()` will be called again when build scripts already run, which should
+                    // take a short time. If we update the workspace now we will invalidate proc macros and cfgs,
+                    // and then when build scripts complete we will invalidate them again.
+                    return;
+                }
             }
         }
 
@@ -629,7 +636,7 @@ impl GlobalState {
             Config::user_config_dir_path().as_deref(),
         );
 
-        if (self.proc_macro_clients.is_empty() || !same_workspaces)
+        if (self.proc_macro_clients.len() < self.workspaces.len() || !same_workspaces)
             && self.config.expand_proc_macros()
         {
             info!("Spawning proc-macro servers");
@@ -645,12 +652,14 @@ impl GlobalState {
                     | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, ..)), .. } => cargo
                         .env()
                         .into_iter()
-                        .chain(self.config.extra_env(None))
-                        .map(|(a, b)| (a.clone(), b.clone()))
+                        .map(|(k, v)| (k.clone(), Some(v.clone())))
+                        .chain(
+                            self.config.extra_env(None).iter().map(|(k, v)| (k.clone(), v.clone())),
+                        )
                         .chain(
                             ws.sysroot
                                 .root()
-                                .map(|it| ("RUSTUP_TOOLCHAIN".to_owned(), it.to_string())),
+                                .map(|it| ("RUSTUP_TOOLCHAIN".to_owned(), Some(it.to_string()))),
                         )
                         .collect(),
 
@@ -683,12 +692,12 @@ impl GlobalState {
         self.local_roots_parent_map = Arc::new(self.source_root_config.source_root_parent_map());
 
         info!(?cause, "recreating the crate graph");
-        self.recreate_crate_graph(cause);
+        self.recreate_crate_graph(cause, switching_from_empty_workspace);
 
         info!("did switch workspaces");
     }
 
-    fn recreate_crate_graph(&mut self, cause: String) {
+    fn recreate_crate_graph(&mut self, cause: String, initial_build: bool) {
         info!(?cause, "Building Crate Graph");
         self.report_progress(
             "Building CrateGraph",
@@ -710,7 +719,7 @@ impl GlobalState {
             })
             .collect();
 
-        let (crate_graph, proc_macro_paths, ws_data) = {
+        let (crate_graph, proc_macro_paths) = {
             // Create crate graph from all the workspaces
             let vfs = &self.vfs.read().0;
             let load = |path: &AbsPath| {
@@ -723,25 +732,33 @@ impl GlobalState {
 
             ws_to_crate_graph(&self.workspaces, self.config.extra_env(None), load)
         };
-        let mut change = ChangeWithProcMacros::new();
-        if self.config.expand_proc_macros() {
-            change.set_proc_macros(
-                crate_graph
-                    .iter()
-                    .map(|id| (id, Err(("proc-macro has not been built yet".to_owned(), true))))
-                    .collect(),
-            );
-            self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
+        let mut change = ChangeWithProcMacros::default();
+        if initial_build || !self.config.expand_proc_macros() {
+            if self.config.expand_proc_macros() {
+                change.set_proc_macros(
+                    crate_graph
+                        .iter()
+                        .map(|id| (id, Err(("proc-macro has not been built yet".to_owned(), true))))
+                        .collect(),
+                );
+            } else {
+                change.set_proc_macros(
+                    crate_graph
+                        .iter()
+                        .map(|id| (id, Err(("proc-macro expansion is disabled".to_owned(), false))))
+                        .collect(),
+                );
+            }
+
+            change.set_crate_graph(crate_graph);
+            self.analysis_host.apply_change(change);
+
+            self.finish_loading_crate_graph();
         } else {
-            change.set_proc_macros(
-                crate_graph
-                    .iter()
-                    .map(|id| (id, Err(("proc-macro expansion is disabled".to_owned(), false))))
-                    .collect(),
-            );
+            change.set_crate_graph(crate_graph);
+            self.fetch_proc_macros_queue.request_op(cause, (change, proc_macro_paths));
         }
-        change.set_crate_graph(crate_graph, ws_data);
-        self.analysis_host.apply_change(change);
+
         self.report_progress(
             "Building CrateGraph",
             crate::lsp::utils::Progress::End,
@@ -749,7 +766,9 @@ impl GlobalState {
             None,
             None,
         );
+    }
 
+    pub(crate) fn finish_loading_crate_graph(&mut self) {
         self.process_changes();
         self.reload_flycheck();
     }
@@ -801,11 +820,7 @@ impl GlobalState {
             }
         }
 
-        if buf.is_empty() {
-            Ok(())
-        } else {
-            Err(buf)
-        }
+        if buf.is_empty() { Ok(()) } else { Err(buf) }
     }
 
     fn reload_flycheck(&mut self) {
@@ -880,28 +895,21 @@ impl GlobalState {
 // FIXME: Move this into load-cargo?
 pub fn ws_to_crate_graph(
     workspaces: &[ProjectWorkspace],
-    extra_env: &FxHashMap<String, String>,
+    extra_env: &FxHashMap<String, Option<String>>,
     mut load: impl FnMut(&AbsPath) -> Option<vfs::FileId>,
-) -> (CrateGraph, Vec<ProcMacroPaths>, FxHashMap<CrateId, Arc<CrateWorkspaceData>>) {
-    let mut crate_graph = CrateGraph::default();
+) -> (CrateGraphBuilder, Vec<ProcMacroPaths>) {
+    let mut crate_graph = CrateGraphBuilder::default();
     let mut proc_macro_paths = Vec::default();
-    let mut ws_data = FxHashMap::default();
     for ws in workspaces {
         let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env);
-        let ProjectWorkspace { toolchain, target_layout, .. } = ws;
-
-        let mapping = crate_graph.extend(other, &mut crate_proc_macros);
-        // Populate the side tables for the newly merged crates
-        ws_data.extend(mapping.values().copied().zip(iter::repeat(Arc::new(CrateWorkspaceData {
-            toolchain: toolchain.clone(),
-            data_layout: target_layout.clone(),
-        }))));
+
+        crate_graph.extend(other, &mut crate_proc_macros);
         proc_macro_paths.push(crate_proc_macros);
     }
 
     crate_graph.shrink_to_fit();
     proc_macro_paths.shrink_to_fit();
-    (crate_graph, proc_macro_paths, ws_data)
+    (crate_graph, proc_macro_paths)
 }
 
 pub(crate) fn should_refresh_for_change(
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs
index b28567fe09b58..7132e09146ebc 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs
@@ -4,7 +4,7 @@ use std::mem;
 
 use cfg::{CfgAtom, CfgExpr};
 use hir::sym;
-use ide::{Cancellable, CrateId, FileId, RunnableKind, TestId};
+use ide::{Cancellable, Crate, FileId, RunnableKind, TestId};
 use project_model::project_json::Runnable;
 use project_model::{CargoFeatures, ManifestPath, TargetKind};
 use rustc_hash::FxHashSet;
@@ -54,7 +54,7 @@ pub(crate) struct CargoTargetSpec {
     pub(crate) package: String,
     pub(crate) target: String,
     pub(crate) target_kind: TargetKind,
-    pub(crate) crate_id: CrateId,
+    pub(crate) crate_id: Crate,
     pub(crate) required_features: Vec<String>,
     pub(crate) features: FxHashSet<String>,
     pub(crate) sysroot_root: Option<vfs::AbsPathBuf>,
@@ -264,12 +264,13 @@ mod tests {
 
     use ide::Edition;
     use syntax::{
-        ast::{self, AstNode},
         SmolStr,
+        ast::{self, AstNode},
     };
     use syntax_bridge::{
-        dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
-        syntax_node_to_token_tree, DocCommentDesugarMode,
+        DocCommentDesugarMode,
+        dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
+        syntax_node_to_token_tree,
     };
 
     fn check(cfg: &str, expected_features: &[&str]) {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs
index 3edfb812cf5cf..9c0bc33af6463 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs
@@ -3,12 +3,13 @@
 
 use crossbeam_channel::Sender;
 use paths::AbsPath;
+use project_model::TargetKind;
 use serde::Deserialize as _;
 use serde_derive::Deserialize;
 use toolchain::Tool;
 
 use crate::{
-    command::{CommandHandle, ParseFromLine},
+    command::{CargoParser, CommandHandle},
     flycheck::CargoOptions,
 };
 
@@ -25,9 +26,15 @@ pub(crate) enum TestState {
     },
 }
 
+#[derive(Debug)]
+pub(crate) struct CargoTestMessage {
+    pub target: TestTarget,
+    pub output: CargoTestOutput,
+}
+
 #[derive(Debug, Deserialize)]
 #[serde(tag = "type", rename_all = "camelCase")]
-pub(crate) enum CargoTestMessage {
+pub(crate) enum CargoTestOutput {
     Test {
         name: String,
         #[serde(flatten)]
@@ -40,19 +47,33 @@ pub(crate) enum CargoTestMessage {
     },
 }
 
-impl ParseFromLine for CargoTestMessage {
-    fn from_line(line: &str, _: &mut String) -> Option<Self> {
+pub(crate) struct CargoTestOutputParser {
+    pub target: TestTarget,
+}
+
+impl CargoTestOutputParser {
+    pub(crate) fn new(test_target: &TestTarget) -> Self {
+        Self { target: test_target.clone() }
+    }
+}
+
+impl CargoParser<CargoTestMessage> for CargoTestOutputParser {
+    fn from_line(&self, line: &str, _error: &mut String) -> Option<CargoTestMessage> {
         let mut deserializer = serde_json::Deserializer::from_str(line);
         deserializer.disable_recursion_limit();
-        if let Ok(message) = CargoTestMessage::deserialize(&mut deserializer) {
-            return Some(message);
-        }
 
-        Some(CargoTestMessage::Custom { text: line.to_owned() })
+        Some(CargoTestMessage {
+            target: self.target.clone(),
+            output: if let Ok(message) = CargoTestOutput::deserialize(&mut deserializer) {
+                message
+            } else {
+                CargoTestOutput::Custom { text: line.to_owned() }
+            },
+        })
     }
 
-    fn from_eof() -> Option<Self> {
-        Some(CargoTestMessage::Finished)
+    fn from_eof(&self) -> Option<CargoTestMessage> {
+        Some(CargoTestMessage { target: self.target.clone(), output: CargoTestOutput::Finished })
     }
 }
 
@@ -62,14 +83,14 @@ pub(crate) struct CargoTestHandle {
 }
 
 // Example of a cargo test command:
-// cargo test --workspace --no-fail-fast -- -Z unstable-options --format=json
-// or
-// cargo test --package my-package --no-fail-fast -- module::func -Z unstable-options --format=json
+//
+// cargo test --package my-package --bin my_bin --no-fail-fast -- module::func -Z unstable-options --format=json
 
-#[derive(Debug)]
-pub(crate) enum TestTarget {
-    Workspace,
-    Package(String),
+#[derive(Debug, Clone)]
+pub(crate) struct TestTarget {
+    pub package: String,
+    pub target: String,
+    pub kind: TargetKind,
 }
 
 impl CargoTestHandle {
@@ -80,19 +101,22 @@ impl CargoTestHandle {
         test_target: TestTarget,
         sender: Sender<CargoTestMessage>,
     ) -> std::io::Result<Self> {
-        let mut cmd = toolchain::command(Tool::Cargo.path(), root);
+        let mut cmd = toolchain::command(Tool::Cargo.path(), root, &options.extra_env);
         cmd.env("RUSTC_BOOTSTRAP", "1");
         cmd.arg("test");
 
-        match &test_target {
-            TestTarget::Package(package) => {
-                cmd.arg("--package");
-                cmd.arg(package);
-            }
-            TestTarget::Workspace => {
-                cmd.arg("--workspace");
-            }
-        };
+        cmd.arg("--package");
+        cmd.arg(&test_target.package);
+
+        if let TargetKind::Lib { .. } = test_target.kind {
+            // no name required with lib because there can only be one lib target per package
+            cmd.arg("--lib");
+        } else if let Some(cargo_target) = test_target.kind.as_cargo_target() {
+            cmd.arg(format!("--{cargo_target}"));
+            cmd.arg(&test_target.target);
+        } else {
+            tracing::warn!("Running test for unknown cargo target {:?}", test_target.kind);
+        }
 
         // --no-fail-fast is needed to ensure that all requested tests will run
         cmd.arg("--no-fail-fast");
@@ -110,6 +134,8 @@ impl CargoTestHandle {
             cmd.arg(extra_arg);
         }
 
-        Ok(Self { _handle: CommandHandle::spawn(cmd, sender)? })
+        Ok(Self {
+            _handle: CommandHandle::spawn(cmd, CargoTestOutputParser::new(&test_target), sender)?,
+        })
     }
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs
index 4f208b6c5dd63..ca897aeb3eed6 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs
@@ -6,10 +6,10 @@ use std::io::{self};
 use anyhow::Context;
 use tracing::level_filters::LevelFilter;
 use tracing_subscriber::{
-    filter::{filter_fn, Targets},
-    fmt::{time, MakeWriter},
-    layer::SubscriberExt,
     Layer, Registry,
+    filter::{Targets, filter_fn},
+    fmt::{MakeWriter, time},
+    layer::SubscriberExt,
 };
 use tracing_tree::HierarchicalLayer;
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs
index 5b18762bb9741..e95b8d3e60865 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs
@@ -41,15 +41,14 @@ use std::{
 
 use rustc_hash::FxHashSet;
 use tracing::{
+    Event, Id, Level, Subscriber,
     field::{Field, Visit},
     span::Attributes,
-    Event, Id, Level, Subscriber,
 };
 use tracing_subscriber::{
-    filter,
+    Layer, Registry, filter,
     layer::{Context, SubscriberExt},
     registry::LookupSpan,
-    Layer, Registry,
 };
 
 pub fn init(spec: &str) -> tracing::subscriber::DefaultGuard {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs
index f5394d023a104..1b0d0f0d59933 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs
@@ -12,10 +12,10 @@ use std::{io::Write as _, marker::PhantomData, time::Instant};
 
 use ide_db::FxHashSet;
 use tracing::{
-    span::{Attributes, Id},
     Event, Subscriber,
+    span::{Attributes, Id},
 };
-use tracing_subscriber::{fmt::MakeWriter, layer::Context, registry::LookupSpan, Layer};
+use tracing_subscriber::{Layer, fmt::MakeWriter, layer::Context, registry::LookupSpan};
 
 struct JsonData {
     name: &'static str,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
index 6f26bdc2cf026..96c2ceef6ba14 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -18,17 +18,17 @@ mod testdir;
 use std::{collections::HashMap, path::PathBuf, time::Instant};
 
 use lsp_types::{
+    CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
+    DocumentFormattingParams, DocumentRangeFormattingParams, FileRename, FormattingOptions,
+    GotoDefinitionParams, HoverParams, InlayHint, InlayHintLabel, InlayHintParams,
+    PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
+    TextDocumentPositionParams, WorkDoneProgressParams,
     notification::DidOpenTextDocument,
     request::{
         CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
         InlayHintRequest, InlayHintResolveRequest, RangeFormatting, WillRenameFiles,
         WorkspaceSymbolRequest,
     },
-    CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
-    DocumentFormattingParams, DocumentRangeFormattingParams, FileRename, FormattingOptions,
-    GotoDefinitionParams, HoverParams, InlayHint, InlayHintLabel, InlayHintParams,
-    PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
-    TextDocumentPositionParams, WorkDoneProgressParams,
 };
 use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams};
 use serde_json::json;
@@ -37,7 +37,7 @@ use stdx::format_to_acc;
 use test_utils::skip_slow_tests;
 use testdir::TestDir;
 
-use crate::support::{project, Project};
+use crate::support::{Project, project};
 
 #[test]
 fn completes_items_from_standard_library() {
@@ -1140,26 +1140,12 @@ fn root_contains_symlink_out_dirs_check() {
 }
 
 #[test]
-#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
 fn resolve_proc_macro() {
     use expect_test::expect;
-    use vfs::AbsPathBuf;
     if skip_slow_tests() {
         return;
     }
 
-    let mut sysroot = project_model::Sysroot::discover(
-        &AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()),
-        &Default::default(),
-    );
-    let loaded_sysroot =
-        sysroot.load_workspace(&project_model::RustSourceWorkspaceConfig::default_cargo());
-    if let Some(loaded_sysroot) = loaded_sysroot {
-        sysroot.set_workspace(loaded_sysroot);
-    }
-
-    let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap();
-
     let server = Project::with_fixture(
         r###"
 //- /foo/Cargo.toml
@@ -1171,12 +1157,8 @@ edition = "2021"
 bar = {path = "../bar"}
 
 //- /foo/src/main.rs
-#![allow(internal_features)]
-#![feature(rustc_attrs, decl_macro)]
 use bar::Bar;
 
-#[rustc_builtin_macro]
-macro derive($item:item) {}
 trait Bar {
   fn bar();
 }
@@ -1233,11 +1215,10 @@ pub fn foo(_input: TokenStream) -> TokenStream {
             "buildScripts": {
                 "enable": true
             },
-            "sysroot": null,
+            "sysroot": "discover",
         },
         "procMacro": {
             "enable": true,
-            "server": proc_macro_server_path.as_path().as_str(),
         }
     }))
     .root("foo")
@@ -1248,7 +1229,7 @@ pub fn foo(_input: TokenStream) -> TokenStream {
     let res = server.send_request::<HoverRequest>(HoverParams {
         text_document_position_params: TextDocumentPositionParams::new(
             server.doc_id("foo/src/main.rs"),
-            Position::new(12, 9),
+            Position::new(8, 9),
         ),
         work_done_progress_params: Default::default(),
     });
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs
index 5dfaf0d36503e..3f313b7e57fe0 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs
@@ -1,10 +1,10 @@
 use crate::support::{Project, Server};
 use crate::testdir::TestDir;
 use lsp_types::{
-    notification::{DidChangeTextDocument, DidOpenTextDocument, DidSaveTextDocument},
     DidChangeTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams,
     TextDocumentContentChangeEvent, TextDocumentIdentifier, TextDocumentItem, Url,
     VersionedTextDocumentIdentifier,
+    notification::{DidChangeTextDocument, DidOpenTextDocument, DidSaveTextDocument},
 };
 use paths::Utf8PathBuf;
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
index 1f52f366c5469..3f97952365f3c 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -5,10 +5,10 @@ use std::{
     time::Duration,
 };
 
-use crossbeam_channel::{after, select, Receiver};
+use crossbeam_channel::{Receiver, after, select};
 use itertools::Itertools;
 use lsp_server::{Connection, Message, Notification, Request};
-use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
+use lsp_types::{TextDocumentIdentifier, Url, notification::Exit, request::Shutdown};
 use parking_lot::{Mutex, MutexGuard};
 use paths::{Utf8Path, Utf8PathBuf};
 use rust_analyzer::{
@@ -17,7 +17,7 @@ use rust_analyzer::{
     lsp, main_loop,
 };
 use serde::Serialize;
-use serde_json::{json, to_string_pretty, Value};
+use serde_json::{Value, json, to_string_pretty};
 use test_utils::FixtureWithProjectMeta;
 use tracing_subscriber::fmt::TestWriter;
 use vfs::AbsPathBuf;
@@ -88,11 +88,7 @@ impl Project<'_> {
 
     pub(crate) fn run_lsif(self) -> String {
         let tmp_dir = self.tmp_dir.unwrap_or_else(|| {
-            if self.root_dir_contains_symlink {
-                TestDir::new_symlink()
-            } else {
-                TestDir::new()
-            }
+            if self.root_dir_contains_symlink { TestDir::new_symlink() } else { TestDir::new() }
         });
 
         let FixtureWithProjectMeta {
@@ -148,7 +144,10 @@ impl Project<'_> {
                 let guard = CONFIG_DIR_LOCK.lock();
                 let test_dir = TestDir::new();
                 let value = test_dir.path().to_owned();
-                env::set_var("__TEST_RA_USER_CONFIG_DIR", &value);
+                // SAFETY: This is safe because this is single-threaded.
+                unsafe {
+                    env::set_var("__TEST_RA_USER_CONFIG_DIR", &value);
+                }
                 (guard, test_dir)
             })
         } else {
@@ -156,11 +155,7 @@ impl Project<'_> {
         };
 
         let tmp_dir = self.tmp_dir.unwrap_or_else(|| {
-            if self.root_dir_contains_symlink {
-                TestDir::new_symlink()
-            } else {
-                TestDir::new()
-            }
+            if self.root_dir_contains_symlink { TestDir::new_symlink() } else { TestDir::new() }
         });
 
         static INIT: Once = Once::new();
diff --git a/src/tools/rust-analyzer/crates/span/Cargo.toml b/src/tools/rust-analyzer/crates/span/Cargo.toml
index 097a056c99a45..b3b401c3db44a 100644
--- a/src/tools/rust-analyzer/crates/span/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/span/Cargo.toml
@@ -12,7 +12,7 @@ authors.workspace = true
 
 [dependencies]
 la-arena.workspace = true
-ra-salsa = { workspace = true, optional = true }
+salsa = { workspace = true, optional = true }
 rustc-hash.workspace = true
 hashbrown.workspace = true
 text-size.workspace = true
@@ -23,7 +23,7 @@ syntax.workspace = true
 stdx.workspace = true
 
 [features]
-default = ["ra-salsa"]
+default = ["salsa"]
 
 [lints]
 workspace = true
diff --git a/src/tools/rust-analyzer/crates/span/src/ast_id.rs b/src/tools/rust-analyzer/crates/span/src/ast_id.rs
index 1d81d684511cd..228fba1fa0966 100644
--- a/src/tools/rust-analyzer/crates/span/src/ast_id.rs
+++ b/src/tools/rust-analyzer/crates/span/src/ast_id.rs
@@ -14,7 +14,7 @@ use std::{
 
 use la_arena::{Arena, Idx, RawIdx};
 use rustc_hash::FxHasher;
-use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
+use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ast};
 
 /// See crates\hir-expand\src\ast_id_map.rs
 /// This is a type erased FileAstId.
@@ -132,7 +132,7 @@ pub struct AstIdMap {
     /// Maps stable id to unstable ptr.
     arena: Arena<SyntaxNodePtr>,
     /// Reverse: map ptr to id.
-    map: hashbrown::HashMap<Idx<SyntaxNodePtr>, (), ()>,
+    map: hashbrown::HashTable<Idx<SyntaxNodePtr>>,
 }
 
 impl fmt::Debug for AstIdMap {
@@ -169,13 +169,13 @@ impl AstIdMap {
                 TreeOrder::DepthFirst
             }
         });
-        res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
+        res.map = hashbrown::HashTable::with_capacity(res.arena.len());
         for (idx, ptr) in res.arena.iter() {
             let hash = hash_ptr(ptr);
-            match res.map.raw_entry_mut().from_hash(hash, |idx2| *idx2 == idx) {
-                hashbrown::hash_map::RawEntryMut::Occupied(_) => unreachable!(),
-                hashbrown::hash_map::RawEntryMut::Vacant(entry) => {
-                    entry.insert_with_hasher(hash, idx, (), |&idx| hash_ptr(&res.arena[idx]));
+            match res.map.entry(hash, |&idx2| idx2 == idx, |&idx| hash_ptr(&res.arena[idx])) {
+                hashbrown::hash_table::Entry::Occupied(_) => unreachable!(),
+                hashbrown::hash_table::Entry::Vacant(entry) => {
+                    entry.insert(idx);
                 }
             }
         }
@@ -196,8 +196,8 @@ impl AstIdMap {
     pub fn ast_id_for_ptr<N: AstIdNode>(&self, ptr: AstPtr<N>) -> FileAstId<N> {
         let ptr = ptr.syntax_node_ptr();
         let hash = hash_ptr(&ptr);
-        match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
-            Some((&raw, &())) => FileAstId {
+        match self.map.find(hash, |&idx| self.arena[idx] == ptr) {
+            Some(&raw) => FileAstId {
                 raw: ErasedFileAstId(raw.into_raw().into_u32()),
                 covariant: PhantomData,
             },
@@ -221,8 +221,8 @@ impl AstIdMap {
     fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
         let ptr = SyntaxNodePtr::new(item);
         let hash = hash_ptr(&ptr);
-        match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
-            Some((&idx, &())) => ErasedFileAstId(idx.into_raw().into_u32()),
+        match self.map.find(hash, |&idx| self.arena[idx] == ptr) {
+            Some(&idx) => ErasedFileAstId(idx.into_raw().into_u32()),
             None => panic!(
                 "Can't find {:?} in AstIdMap:\n{:?}\n source text: {}",
                 item,
diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs
index 6becc8e41ed00..b21102f2db716 100644
--- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs
@@ -21,44 +21,276 @@
 //! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer.
 use std::fmt;
 
-#[cfg(not(feature = "ra-salsa"))]
-use crate::InternId;
-#[cfg(feature = "ra-salsa")]
-use ra_salsa::{InternId, InternValue};
+use crate::Edition;
 
-use crate::{Edition, MacroCallId};
+/// A syntax context describes a hierarchy tracking order of macro definitions.
+#[cfg(feature = "salsa")]
+#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
+pub struct SyntaxContext(
+    /// # Invariant
+    ///
+    /// This is either a valid `salsa::Id` or a root `SyntaxContext`.
+    u32,
+    std::marker::PhantomData<&'static salsa::plumbing::interned::Value<SyntaxContext>>,
+);
 
-/// Interned [`SyntaxContextData`].
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct SyntaxContextId(InternId);
+#[cfg(feature = "salsa")]
+const _: () = {
+    use crate::MacroCallId;
+    use salsa::plumbing as zalsa_;
+    use salsa::plumbing::interned as zalsa_struct_;
 
-impl fmt::Debug for SyntaxContextId {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        if f.alternate() {
-            write!(f, "{}", self.0.as_u32())
-        } else {
-            f.debug_tuple("SyntaxContextId").field(&self.0).finish()
+    #[derive(Clone, Eq, Debug)]
+    pub struct SyntaxContextData {
+        outer_expn: Option<MacroCallId>,
+        outer_transparency: Transparency,
+        edition: Edition,
+        parent: SyntaxContext,
+        opaque: SyntaxContext,
+        opaque_and_semitransparent: SyntaxContext,
+    }
+
+    impl PartialEq for SyntaxContextData {
+        fn eq(&self, other: &Self) -> bool {
+            self.outer_expn == other.outer_expn
+                && self.outer_transparency == other.outer_transparency
+                && self.edition == other.edition
+                && self.parent == other.parent
         }
     }
-}
 
-#[cfg(feature = "ra-salsa")]
-impl ra_salsa::InternKey for SyntaxContextId {
-    fn from_intern_id(v: ra_salsa::InternId) -> Self {
-        SyntaxContextId(v)
+    impl std::hash::Hash for SyntaxContextData {
+        fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+            self.outer_expn.hash(state);
+            self.outer_transparency.hash(state);
+            self.edition.hash(state);
+            self.parent.hash(state);
+        }
     }
-    fn as_intern_id(&self) -> ra_salsa::InternId {
-        self.0
+    /// Key to use during hash lookups. Each field is some type that implements `Lookup<T>`
+    /// for the owned type. This permits interning with an `&str` when a `String` is required and so forth.
+    #[derive(Hash)]
+    struct StructKey<'db, T0, T1, T2, T3>(T0, T1, T2, T3, std::marker::PhantomData<&'db ()>);
+
+    impl<'db, T0, T1, T2, T3> zalsa_::interned::HashEqLike<StructKey<'db, T0, T1, T2, T3>>
+        for SyntaxContextData
+    where
+        Option<MacroCallId>: zalsa_::interned::HashEqLike<T0>,
+        Transparency: zalsa_::interned::HashEqLike<T1>,
+        Edition: zalsa_::interned::HashEqLike<T2>,
+        SyntaxContext: zalsa_::interned::HashEqLike<T3>,
+    {
+        fn hash<H: std::hash::Hasher>(&self, h: &mut H) {
+            zalsa_::interned::HashEqLike::<T0>::hash(&self.outer_expn, &mut *h);
+            zalsa_::interned::HashEqLike::<T1>::hash(&self.outer_transparency, &mut *h);
+            zalsa_::interned::HashEqLike::<T2>::hash(&self.edition, &mut *h);
+            zalsa_::interned::HashEqLike::<T3>::hash(&self.parent, &mut *h);
+        }
+        fn eq(&self, data: &StructKey<'db, T0, T1, T2, T3>) -> bool {
+            zalsa_::interned::HashEqLike::<T0>::eq(&self.outer_expn, &data.0)
+                && zalsa_::interned::HashEqLike::<T1>::eq(&self.outer_transparency, &data.1)
+                && zalsa_::interned::HashEqLike::<T2>::eq(&self.edition, &data.2)
+                && zalsa_::interned::HashEqLike::<T3>::eq(&self.parent, &data.3)
+        }
     }
-}
+    impl zalsa_struct_::Configuration for SyntaxContext {
+        const DEBUG_NAME: &'static str = "SyntaxContextData";
+        type Fields<'a> = SyntaxContextData;
+        type Struct<'a> = SyntaxContext;
+        fn struct_from_id<'db>(id: salsa::Id) -> Self::Struct<'db> {
+            SyntaxContext::from_salsa_id(id)
+        }
+        fn deref_struct(s: Self::Struct<'_>) -> salsa::Id {
+            s.as_salsa_id()
+                .expect("`SyntaxContext::deref_structs()` called on a root `SyntaxContext`")
+        }
+    }
+    impl SyntaxContext {
+        pub fn ingredient<Db>(db: &Db) -> &zalsa_struct_::IngredientImpl<Self>
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<SyntaxContext>> =
+                zalsa_::IngredientCache::new();
+            CACHE.get_or_create(db.zalsa(), || {
+                db.zalsa().add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+            })
+        }
+    }
+    impl zalsa_::AsId for SyntaxContext {
+        fn as_id(&self) -> salsa::Id {
+            self.as_salsa_id().expect("`SyntaxContext::as_id()` called on a root `SyntaxContext`")
+        }
+    }
+    impl zalsa_::FromId for SyntaxContext {
+        fn from_id(id: salsa::Id) -> Self {
+            Self::from_salsa_id(id)
+        }
+    }
+    unsafe impl Send for SyntaxContext {}
 
-impl fmt::Display for SyntaxContextId {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "{}", self.0.as_u32())
+    unsafe impl Sync for SyntaxContext {}
+
+    impl zalsa_::SalsaStructInDb for SyntaxContext {
+        type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
+
+        fn lookup_or_create_ingredient_index(
+            aux: &salsa::plumbing::Zalsa,
+        ) -> salsa::plumbing::IngredientIndices {
+            aux.add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>().into()
+        }
+
+        #[inline]
+        fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option<Self> {
+            if type_id == std::any::TypeId::of::<SyntaxContext>() {
+                Some(<Self as salsa::plumbing::FromId>::from_id(id))
+            } else {
+                None
+            }
+        }
+    }
+
+    unsafe impl salsa::plumbing::Update for SyntaxContext {
+        unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
+            if unsafe { *old_pointer } != new_value {
+                unsafe { *old_pointer = new_value };
+                true
+            } else {
+                false
+            }
+        }
+    }
+    impl<'db> SyntaxContext {
+        pub fn new<
+            Db,
+            T0: zalsa_::interned::Lookup<Option<MacroCallId>> + std::hash::Hash,
+            T1: zalsa_::interned::Lookup<Transparency> + std::hash::Hash,
+            T2: zalsa_::interned::Lookup<Edition> + std::hash::Hash,
+            T3: zalsa_::interned::Lookup<SyntaxContext> + std::hash::Hash,
+        >(
+            db: &'db Db,
+            outer_expn: T0,
+            outer_transparency: T1,
+            edition: T2,
+            parent: T3,
+            opaque: impl FnOnce(SyntaxContext) -> SyntaxContext,
+            opaque_and_semitransparent: impl FnOnce(SyntaxContext) -> SyntaxContext,
+        ) -> Self
+        where
+            Db: ?Sized + salsa::Database,
+            Option<MacroCallId>: zalsa_::interned::HashEqLike<T0>,
+            Transparency: zalsa_::interned::HashEqLike<T1>,
+            Edition: zalsa_::interned::HashEqLike<T2>,
+            SyntaxContext: zalsa_::interned::HashEqLike<T3>,
+        {
+            SyntaxContext::ingredient(db).intern(
+                db.as_dyn_database(),
+                StructKey::<'db>(
+                    outer_expn,
+                    outer_transparency,
+                    edition,
+                    parent,
+                    std::marker::PhantomData,
+                ),
+                |id, data| SyntaxContextData {
+                    outer_expn: zalsa_::interned::Lookup::into_owned(data.0),
+                    outer_transparency: zalsa_::interned::Lookup::into_owned(data.1),
+                    edition: zalsa_::interned::Lookup::into_owned(data.2),
+                    parent: zalsa_::interned::Lookup::into_owned(data.3),
+                    opaque: opaque(zalsa_::FromId::from_id(id)),
+                    opaque_and_semitransparent: opaque_and_semitransparent(
+                        zalsa_::FromId::from_id(id),
+                    ),
+                },
+            )
+        }
+
+        /// Invariant: Only [`SyntaxContext::ROOT`] has a [`None`] outer expansion.
+        // FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of
+        // MacroCallId is reserved anyways so we can do bit tagging here just fine.
+        // The bigger issue is that this will cause interning to now create completely separate chains
+        // per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent.
+        pub fn outer_expn<Db>(self, db: &'db Db) -> Option<MacroCallId>
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            let id = self.as_salsa_id()?;
+            let fields = SyntaxContext::ingredient(db).data(db.as_dyn_database(), id);
+            fields.outer_expn
+        }
+
+        pub fn outer_transparency<Db>(self, db: &'db Db) -> Transparency
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            let Some(id) = self.as_salsa_id() else { return Transparency::Opaque };
+            let fields = SyntaxContext::ingredient(db).data(db.as_dyn_database(), id);
+            fields.outer_transparency
+        }
+
+        pub fn edition<Db>(self, db: &'db Db) -> Edition
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            match self.as_salsa_id() {
+                Some(id) => {
+                    let fields = SyntaxContext::ingredient(db).data(db.as_dyn_database(), id);
+                    fields.edition
+                }
+                None => Edition::from_u32(SyntaxContext::MAX_ID - self.into_u32()),
+            }
+        }
+
+        pub fn parent<Db>(self, db: &'db Db) -> SyntaxContext
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            match self.as_salsa_id() {
+                Some(id) => {
+                    let fields = SyntaxContext::ingredient(db).data(db.as_dyn_database(), id);
+                    fields.parent
+                }
+                None => self,
+            }
+        }
+
+        /// This context, but with all transparent and semi-transparent expansions filtered away.
+        pub fn opaque<Db>(self, db: &'db Db) -> SyntaxContext
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            match self.as_salsa_id() {
+                Some(id) => {
+                    let fields = SyntaxContext::ingredient(db).data(db.as_dyn_database(), id);
+                    fields.opaque
+                }
+                None => self,
+            }
+        }
+
+        /// This context, but with all transparent expansions filtered away.
+        pub fn opaque_and_semitransparent<Db>(self, db: &'db Db) -> SyntaxContext
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            match self.as_salsa_id() {
+                Some(id) => {
+                    let fields = SyntaxContext::ingredient(db).data(db.as_dyn_database(), id);
+                    fields.opaque_and_semitransparent
+                }
+                None => self,
+            }
+        }
+    }
+};
+
+impl SyntaxContext {
+    #[inline]
+    pub fn is_root(self) -> bool {
+        (SyntaxContext::MAX_ID - Edition::LATEST as u32) <= self.into_u32()
+            && self.into_u32() <= (SyntaxContext::MAX_ID - Edition::Edition2015 as u32)
     }
-}
 
-impl SyntaxContextId {
     #[inline]
     pub fn remove_root_edition(&mut self) {
         if self.is_root() {
@@ -67,76 +299,70 @@ impl SyntaxContextId {
     }
 
     /// The root context, which is the parent of all other contexts. All [`FileId`]s have this context.
+    #[inline]
     pub const fn root(edition: Edition) -> Self {
-        SyntaxContextId(unsafe { InternId::new_unchecked(edition as u32) })
+        let edition = edition as u32;
+        // SAFETY: Roots are valid `SyntaxContext`s
+        unsafe { SyntaxContext::from_u32(SyntaxContext::MAX_ID - edition) }
     }
+}
 
-    pub fn is_root(self) -> bool {
-        self.into_u32() <= Edition::LATEST as u32
+#[cfg(feature = "salsa")]
+impl SyntaxContext {
+    const MAX_ID: u32 = salsa::Id::MAX_U32 - 1;
+
+    #[inline]
+    pub const fn into_u32(self) -> u32 {
+        self.0
     }
 
-    /// Deconstruct a `SyntaxContextId` into a raw `u32`.
-    /// This should only be used for deserialization purposes for the proc-macro server.
-    pub fn into_u32(self) -> u32 {
-        self.0.as_u32()
+    /// # Safety
+    ///
+    /// The ID must be a valid `SyntaxContext`.
+    #[inline]
+    pub const unsafe fn from_u32(u32: u32) -> Self {
+        // INVARIANT: Our precondition.
+        Self(u32, std::marker::PhantomData)
     }
 
-    /// Constructs a `SyntaxContextId` from a raw `u32`.
-    /// This should only be used for serialization purposes for the proc-macro server.
-    pub fn from_u32(u32: u32) -> Self {
-        Self(InternId::from(u32))
+    #[inline]
+    fn as_salsa_id(self) -> Option<salsa::Id> {
+        if self.is_root() {
+            None
+        } else {
+            // SAFETY: By our invariant, this is either a root (which we verified it's not) or a valid `salsa::Id`.
+            unsafe { Some(salsa::Id::from_u32(self.0)) }
+        }
     }
-}
 
-/// A syntax context describes a hierarchy tracking order of macro definitions.
-#[derive(Copy, Clone, Hash, PartialEq, Eq)]
-pub struct SyntaxContextData {
-    /// Invariant: Only [`SyntaxContextId::ROOT`] has a [`None`] outer expansion.
-    // FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of
-    // MacroCallId is reserved anyways so we can do bit tagging here just fine.
-    // The bigger issue is that this will cause interning to now create completely separate chains
-    // per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent.
-    pub outer_expn: Option<MacroCallId>,
-    pub outer_transparency: Transparency,
-    pub edition: Edition,
-    pub parent: SyntaxContextId,
-    /// This context, but with all transparent and semi-transparent expansions filtered away.
-    pub opaque: SyntaxContextId,
-    /// This context, but with all transparent expansions filtered away.
-    pub opaque_and_semitransparent: SyntaxContextId,
+    #[inline]
+    fn from_salsa_id(id: salsa::Id) -> Self {
+        // SAFETY: This comes from a Salsa ID.
+        unsafe { Self::from_u32(id.as_u32()) }
+    }
 }
+#[cfg(not(feature = "salsa"))]
+#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
+pub struct SyntaxContext(u32);
 
-#[cfg(feature = "ra-salsa")]
-impl InternValue for SyntaxContextData {
-    type Key = (SyntaxContextId, Option<MacroCallId>, Transparency, Edition);
+#[allow(dead_code)]
+const SALSA_MAX_ID_MIRROR: u32 = u32::MAX - 0xFF;
+#[cfg(feature = "salsa")]
+const _: () = assert!(salsa::Id::MAX_U32 == SALSA_MAX_ID_MIRROR);
 
-    fn into_key(&self) -> Self::Key {
-        (self.parent, self.outer_expn, self.outer_transparency, self.edition)
-    }
-}
+#[cfg(not(feature = "salsa"))]
+impl SyntaxContext {
+    const MAX_ID: u32 = SALSA_MAX_ID_MIRROR - 1;
 
-impl std::fmt::Debug for SyntaxContextData {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        f.debug_struct("SyntaxContextData")
-            .field("outer_expn", &self.outer_expn)
-            .field("outer_transparency", &self.outer_transparency)
-            .field("parent", &self.parent)
-            .field("opaque", &self.opaque)
-            .field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
-            .finish()
+    pub const fn into_u32(self) -> u32 {
+        self.0
     }
-}
 
-impl SyntaxContextData {
-    pub fn root(edition: Edition) -> Self {
-        SyntaxContextData {
-            outer_expn: None,
-            outer_transparency: Transparency::Opaque,
-            parent: SyntaxContextId::root(edition),
-            opaque: SyntaxContextId::root(edition),
-            opaque_and_semitransparent: SyntaxContextId::root(edition),
-            edition,
-        }
+    /// # Safety
+    ///
+    /// None. This is always safe to call without the `salsa` feature.
+    pub const unsafe fn from_u32(u32: u32) -> Self {
+        Self(u32)
     }
 }
 
@@ -167,3 +393,23 @@ impl Transparency {
         matches!(self, Self::Opaque)
     }
 }
+
+impl fmt::Display for SyntaxContext {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        if self.is_root() {
+            write!(f, "ROOT{}", Edition::from_u32(SyntaxContext::MAX_ID - self.into_u32()).number())
+        } else {
+            write!(f, "{}", self.into_u32())
+        }
+    }
+}
+
+impl std::fmt::Debug for SyntaxContext {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        if f.alternate() {
+            fmt::Display::fmt(self, f)
+        } else {
+            f.debug_tuple("SyntaxContext").field(&self.0).finish()
+        }
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/span/src/lib.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs
index 8dc957350381c..54f90908f3672 100644
--- a/src/tools/rust-analyzer/crates/span/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/span/src/lib.rs
@@ -1,16 +1,13 @@
 //! File and span related types.
 use std::fmt::{self, Write};
 
-#[cfg(feature = "ra-salsa")]
-use ra_salsa::InternId;
-
 mod ast_id;
 mod hygiene;
 mod map;
 
 pub use self::{
     ast_id::{AstIdMap, AstIdNode, ErasedFileAstId, FileAstId},
-    hygiene::{SyntaxContextData, SyntaxContextId, Transparency},
+    hygiene::{SyntaxContext, Transparency},
     map::{RealSpanMap, SpanMap},
 };
 
@@ -31,7 +28,7 @@ pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId =
     // is required to be stable for the proc-macro-server
     ErasedFileAstId::from_raw(!0 - 1);
 
-pub type Span = SpanData<SyntaxContextId>;
+pub type Span = SpanData<SyntaxContext>;
 
 impl Span {
     pub fn cover(self, other: Span) -> Span {
@@ -183,6 +180,12 @@ impl EditionedFileId {
     }
 }
 
+#[cfg(not(feature = "salsa"))]
+mod salsa {
+    #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+    pub struct Id(u32);
+}
+
 /// Input to the analyzer is a set of files, where each file is identified by
 /// `FileId` and contains source code. However, another source of source code in
 /// Rust are macros: each macro can be thought of as producing a "temporary
@@ -197,166 +200,16 @@ impl EditionedFileId {
 /// (`MacroCallId` uses the location interning. You can check details here:
 /// <https://en.wikipedia.org/wiki/String_interning>).
 ///
-/// The two variants are encoded in a single u32 which are differentiated by the MSB.
-/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
-/// `MacroCallId`.
+/// Internally this holds a `salsa::Id`, but we cannot use this definition here
+/// as it references things from base-db and hir-expand.
 // FIXME: Give this a better fitting name
 #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct HirFileId(u32);
-
-impl From<HirFileId> for u32 {
-    fn from(value: HirFileId) -> Self {
-        value.0
-    }
-}
-
-impl From<MacroCallId> for HirFileId {
-    fn from(value: MacroCallId) -> Self {
-        value.as_file()
-    }
-}
-
-impl fmt::Debug for HirFileId {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        self.repr().fmt(f)
-    }
-}
-
-impl PartialEq<FileId> for HirFileId {
-    fn eq(&self, &other: &FileId) -> bool {
-        self.file_id().map(EditionedFileId::file_id) == Some(other)
-    }
-}
-impl PartialEq<HirFileId> for FileId {
-    fn eq(&self, other: &HirFileId) -> bool {
-        other.file_id().map(EditionedFileId::file_id) == Some(*self)
-    }
-}
-
-impl PartialEq<EditionedFileId> for HirFileId {
-    fn eq(&self, &other: &EditionedFileId) -> bool {
-        *self == HirFileId::from(other)
-    }
-}
-impl PartialEq<HirFileId> for EditionedFileId {
-    fn eq(&self, &other: &HirFileId) -> bool {
-        other == HirFileId::from(*self)
-    }
-}
-impl PartialEq<EditionedFileId> for FileId {
-    fn eq(&self, &other: &EditionedFileId) -> bool {
-        *self == FileId::from(other)
-    }
-}
-impl PartialEq<FileId> for EditionedFileId {
-    fn eq(&self, &other: &FileId) -> bool {
-        other == FileId::from(*self)
-    }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct MacroFileId {
-    pub macro_call_id: MacroCallId,
-}
+pub struct HirFileId(pub salsa::Id);
 
 /// `MacroCallId` identifies a particular macro invocation, like
 /// `println!("Hello, {}", world)`.
 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct MacroCallId(InternId);
-
-#[cfg(feature = "ra-salsa")]
-impl ra_salsa::InternKey for MacroCallId {
-    fn from_intern_id(v: ra_salsa::InternId) -> Self {
-        MacroCallId(v)
-    }
-    fn as_intern_id(&self) -> ra_salsa::InternId {
-        self.0
-    }
-}
-
-impl MacroCallId {
-    pub const MAX_ID: u32 = 0x7fff_ffff;
-
-    pub fn as_file(self) -> HirFileId {
-        MacroFileId { macro_call_id: self }.into()
-    }
-
-    pub fn as_macro_file(self) -> MacroFileId {
-        MacroFileId { macro_call_id: self }
-    }
-}
-
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
-pub enum HirFileIdRepr {
-    FileId(EditionedFileId),
-    MacroFile(MacroFileId),
-}
-
-impl fmt::Debug for HirFileIdRepr {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self {
-            Self::FileId(arg0) => arg0.fmt(f),
-            Self::MacroFile(arg0) => {
-                f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish()
-            }
-        }
-    }
-}
-
-impl From<EditionedFileId> for HirFileId {
-    #[allow(clippy::let_unit_value)]
-    fn from(id: EditionedFileId) -> Self {
-        assert!(id.as_u32() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.as_u32());
-        HirFileId(id.as_u32())
-    }
-}
-
-impl From<MacroFileId> for HirFileId {
-    #[allow(clippy::let_unit_value)]
-    fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
-        let id = id.as_u32();
-        assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {id} is too large");
-        HirFileId(id | Self::MACRO_FILE_TAG_MASK)
-    }
-}
-
-impl HirFileId {
-    const MAX_HIR_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
-    const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
-
-    #[inline]
-    pub fn is_macro(self) -> bool {
-        self.0 & Self::MACRO_FILE_TAG_MASK != 0
-    }
-
-    #[inline]
-    pub fn macro_file(self) -> Option<MacroFileId> {
-        match self.0 & Self::MACRO_FILE_TAG_MASK {
-            0 => None,
-            _ => Some(MacroFileId {
-                macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
-            }),
-        }
-    }
-
-    #[inline]
-    pub fn file_id(self) -> Option<EditionedFileId> {
-        match self.0 & Self::MACRO_FILE_TAG_MASK {
-            0 => Some(EditionedFileId(self.0)),
-            _ => None,
-        }
-    }
-
-    #[inline]
-    pub fn repr(self) -> HirFileIdRepr {
-        match self.0 & Self::MACRO_FILE_TAG_MASK {
-            0 => HirFileIdRepr::FileId(EditionedFileId(self.0)),
-            _ => HirFileIdRepr::MacroFile(MacroFileId {
-                macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
-            }),
-        }
-    }
-}
+pub struct MacroCallId(pub salsa::Id);
 
 /// Legacy span type, only defined here as it is still used by the proc-macro server.
 /// While rust-analyzer doesn't use this anymore at all, RustRover relies on the legacy type for
@@ -369,72 +222,3 @@ impl std::fmt::Debug for TokenId {
         self.0.fmt(f)
     }
 }
-
-#[cfg(not(feature = "ra-salsa"))]
-mod intern_id_proxy {
-    use std::fmt;
-    use std::num::NonZeroU32;
-
-    #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-    pub(super) struct InternId {
-        value: NonZeroU32,
-    }
-
-    impl InternId {
-        pub(super) const MAX: u32 = 0xFFFF_FF00;
-
-        pub(super) const unsafe fn new_unchecked(value: u32) -> Self {
-            debug_assert!(value < InternId::MAX);
-            let value = unsafe { NonZeroU32::new_unchecked(value + 1) };
-            InternId { value }
-        }
-
-        pub(super) fn as_u32(self) -> u32 {
-            self.value.get() - 1
-        }
-
-        pub(super) fn as_usize(self) -> usize {
-            self.as_u32() as usize
-        }
-    }
-
-    impl From<InternId> for u32 {
-        fn from(raw: InternId) -> u32 {
-            raw.as_u32()
-        }
-    }
-
-    impl From<InternId> for usize {
-        fn from(raw: InternId) -> usize {
-            raw.as_usize()
-        }
-    }
-
-    impl From<u32> for InternId {
-        fn from(id: u32) -> InternId {
-            assert!(id < InternId::MAX);
-            unsafe { InternId::new_unchecked(id) }
-        }
-    }
-
-    impl From<usize> for InternId {
-        fn from(id: usize) -> InternId {
-            assert!(id < (InternId::MAX as usize));
-            unsafe { InternId::new_unchecked(id as u32) }
-        }
-    }
-
-    impl fmt::Debug for InternId {
-        fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-            self.as_usize().fmt(f)
-        }
-    }
-
-    impl fmt::Display for InternId {
-        fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-            self.as_usize().fmt(f)
-        }
-    }
-}
-#[cfg(not(feature = "ra-salsa"))]
-use intern_id_proxy::InternId;
diff --git a/src/tools/rust-analyzer/crates/span/src/map.rs b/src/tools/rust-analyzer/crates/span/src/map.rs
index dc35de67fd8db..cc7a886643a9b 100644
--- a/src/tools/rust-analyzer/crates/span/src/map.rs
+++ b/src/tools/rust-analyzer/crates/span/src/map.rs
@@ -6,8 +6,8 @@ use std::{fmt, hash::Hash};
 use stdx::{always, itertools::Itertools};
 
 use crate::{
-    EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SpanData, SyntaxContextId, TextRange,
-    TextSize, ROOT_ERASED_FILE_AST_ID,
+    EditionedFileId, ErasedFileAstId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SpanData,
+    SyntaxContext, TextRange, TextSize,
 };
 
 /// Maps absolute text ranges for the corresponding file to the relevant span data.
@@ -208,7 +208,7 @@ impl RealSpanMap {
         Span {
             range: range - offset,
             anchor: SpanAnchor { file_id: self.file_id, ast_id },
-            ctx: SyntaxContextId::root(self.file_id.edition()),
+            ctx: SyntaxContext::root(self.file_id.edition()),
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
index 3727d0c9562e6..7bda106764b9f 100644
--- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
@@ -12,14 +12,16 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-backtrace = { version = "0.3.67", optional = true }
-jod-thread = "0.1.2"
-libc.workspace = true
+backtrace = { version = "0.3.74", optional = true }
+jod-thread = "1.0.0"
 crossbeam-channel.workspace = true
 itertools.workspace = true
 tracing.workspace = true
 # Think twice before adding anything here
 
+[target.'cfg(unix)'.dependencies]
+libc.workspace = true
+
 [target.'cfg(windows)'.dependencies]
 miow = "0.6.0"
 windows-sys = { version = "0.59", features = ["Win32_Foundation"] }
diff --git a/src/tools/rust-analyzer/crates/stdx/src/anymap.rs b/src/tools/rust-analyzer/crates/stdx/src/anymap.rs
index faf2e6c71789f..a3f6ab89510ed 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/anymap.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/anymap.rs
@@ -1,4 +1,5 @@
 //! This file is a port of only the necessary features from <https://github.com/chris-morgan/anymap> version 1.0.0-beta.2 for use within rust-analyzer.
+//!
 //! Copyright © 2014–2022 Chris Morgan.
 //! COPYING: <https://github.com/chris-morgan/anymap/blob/master/COPYING>
 //! Note that the license is changed from Blue Oak Model 1.0.0 or MIT or Apache-2.0 to MIT OR Apache-2.0
@@ -20,14 +21,14 @@
 
 use core::hash::Hasher;
 
-/// A hasher designed to eke a little more speed out, given `TypeId`’s known characteristics.
+/// A hasher designed to eke a little more speed out, given `TypeId`'s known characteristics.
 ///
-/// Specifically, this is a no-op hasher that expects to be fed a u64’s worth of
+/// Specifically, this is a no-op hasher that expects to be fed a u64's worth of
 /// randomly-distributed bits. It works well for `TypeId` (eliminating start-up time, so that my
-/// get_missing benchmark is ~30ns rather than ~900ns, and being a good deal faster after that, so
-/// that my insert_and_get_on_260_types benchmark is ~12μs instead of ~21.5μs), but will
+/// `get_missing` benchmark is ~30ns rather than ~900ns, and being a good deal faster after that, so
+/// that my `insert_and_get_on_260_types` benchmark is ~12μs instead of ~21.5μs), but will
 /// panic in debug mode and always emit zeros in release mode for any other sorts of inputs, so
-/// yeah, don’t use it! 😀
+/// yeah, don't use it! 😀
 #[derive(Default)]
 pub struct TypeIdHasher {
     value: u64,
@@ -36,9 +37,9 @@ pub struct TypeIdHasher {
 impl Hasher for TypeIdHasher {
     #[inline]
     fn write(&mut self, bytes: &[u8]) {
-        // This expects to receive exactly one 64-bit value, and there’s no realistic chance of
-        // that changing, but I don’t want to depend on something that isn’t expressly part of the
-        // contract for safety. But I’m OK with release builds putting everything in one bucket
+        // This expects to receive exactly one 64-bit value, and there's no realistic chance of
+        // that changing, but I don't want to depend on something that isn't expressly part of the
+        // contract for safety. But I'm OK with release builds putting everything in one bucket
         // if it *did* change (and debug builds panicking).
         debug_assert_eq!(bytes.len(), 8);
         let _ = bytes.try_into().map(|array| self.value = u64::from_ne_bytes(array));
@@ -59,7 +60,7 @@ use ::std::collections::hash_map;
 /// Raw access to the underlying `HashMap`.
 ///
 /// This alias is provided for convenience because of the ugly third generic parameter.
-#[allow(clippy::disallowed_types)] // Uses a custom hasher
+#[expect(clippy::disallowed_types, reason = "Uses a custom hasher")]
 pub type RawMap<A> = hash_map::HashMap<TypeId, Box<A>, BuildHasherDefault<TypeIdHasher>>;
 
 /// A collection containing zero or one values for any given type and allowing convenient,
@@ -73,19 +74,20 @@ pub type RawMap<A> = hash_map::HashMap<TypeId, Box<A>, BuildHasherDefault<TypeId
 ///
 /// Cumulatively, there are thus six forms of map:
 ///
-/// - <code>[Map]&lt;dyn [core::any::Any]&gt;</code>,
+/// - `[Map]<dyn [core::any::Any]>`,
 ///   also spelled [`AnyMap`] for convenience.
-/// - <code>[Map]&lt;dyn [core::any::Any] + Send&gt;</code>
-/// - <code>[Map]&lt;dyn [core::any::Any] + Send + Sync&gt;</code>
+/// - `[Map]<dyn [core::any::Any] + Send>`
+/// - `[Map]<dyn [core::any::Any] + Send + Sync>`
 ///
 /// ## Example
 ///
-/// (Here using the [`AnyMap`] convenience alias; the first line could use
-/// <code>[anymap::Map][Map]::&lt;[core::any::Any]&gt;::new()</code> instead if desired.)
+/// (Here, the [`AnyMap`] convenience alias is used;
+/// the first line could use `[anymap::Map][Map]::<[core::any::Any]>::default()`
+/// instead if desired.)
 ///
 /// ```
 /// # use stdx::anymap;
-#[doc = "let mut data = anymap::AnyMap::new();"]
+/// let mut data = anymap::AnyMap::default();
 /// assert_eq!(data.get(), None::<&i32>);
 /// ```
 ///
@@ -95,29 +97,25 @@ pub struct Map<A: ?Sized + Downcast = dyn Any> {
     raw: RawMap<A>,
 }
 
-/// The most common type of `Map`: just using `Any`; <code>[Map]&lt;dyn [Any]&gt;</code>.
+/// The most common type of `Map`: just using `Any`; `[Map]<dyn [Any]>`.
 ///
 /// Why is this a separate type alias rather than a default value for `Map<A>`?
-/// `Map::new()` doesn’t seem to be happy to infer that it should go with the default
-/// value. It’s a bit sad, really. Ah well, I guess this approach will do.
+/// `Map::default()` doesn't seem to be happy to infer that it should go with the default
+/// value. It's a bit sad, really. Ah well, I guess this approach will do.
 pub type AnyMap = Map<dyn Any>;
+
 impl<A: ?Sized + Downcast> Default for Map<A> {
     #[inline]
     fn default() -> Map<A> {
-        Map::new()
+        Map { raw: RawMap::with_hasher(Default::default()) }
     }
 }
 
 impl<A: ?Sized + Downcast> Map<A> {
-    /// Create an empty collection.
-    #[inline]
-    pub fn new() -> Map<A> {
-        Map { raw: RawMap::with_hasher(Default::default()) }
-    }
-
     /// Returns a reference to the value stored in the collection for the type `T`,
     /// if it exists.
     #[inline]
+    #[must_use]
     pub fn get<T: IntoBox<A>>(&self) -> Option<&T> {
         self.raw.get(&TypeId::of::<T>()).map(|any| unsafe { any.downcast_ref_unchecked::<T>() })
     }
@@ -137,30 +135,30 @@ impl<A: ?Sized + Downcast> Map<A> {
 }
 
 /// A view into a single occupied location in an `Map`.
-pub struct OccupiedEntry<'a, A: ?Sized + Downcast, V: 'a> {
-    inner: hash_map::OccupiedEntry<'a, TypeId, Box<A>>,
+pub struct OccupiedEntry<'map, A: ?Sized + Downcast, V: 'map> {
+    inner: hash_map::OccupiedEntry<'map, TypeId, Box<A>>,
     type_: PhantomData<V>,
 }
 
 /// A view into a single empty location in an `Map`.
-pub struct VacantEntry<'a, A: ?Sized + Downcast, V: 'a> {
-    inner: hash_map::VacantEntry<'a, TypeId, Box<A>>,
+pub struct VacantEntry<'map, A: ?Sized + Downcast, V: 'map> {
+    inner: hash_map::VacantEntry<'map, TypeId, Box<A>>,
     type_: PhantomData<V>,
 }
 
 /// A view into a single location in an `Map`, which may be vacant or occupied.
-pub enum Entry<'a, A: ?Sized + Downcast, V> {
+pub enum Entry<'map, A: ?Sized + Downcast, V> {
     /// An occupied Entry
-    Occupied(OccupiedEntry<'a, A, V>),
+    Occupied(OccupiedEntry<'map, A, V>),
     /// A vacant Entry
-    Vacant(VacantEntry<'a, A, V>),
+    Vacant(VacantEntry<'map, A, V>),
 }
 
-impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> Entry<'a, A, V> {
+impl<'map, A: ?Sized + Downcast, V: IntoBox<A>> Entry<'map, A, V> {
     /// Ensures a value is in the entry by inserting the result of the default function if
     /// empty, and returns a mutable reference to the value in the entry.
     #[inline]
-    pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
+    pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'map mut V {
         match self {
             Entry::Occupied(inner) => inner.into_mut(),
             Entry::Vacant(inner) => inner.insert(default()),
@@ -168,20 +166,21 @@ impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> Entry<'a, A, V> {
     }
 }
 
-impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> OccupiedEntry<'a, A, V> {
-    /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+impl<'map, A: ?Sized + Downcast, V: IntoBox<A>> OccupiedEntry<'map, A, V> {
+    /// Converts the `OccupiedEntry` into a mutable reference to the value in the entry
     /// with a lifetime bound to the collection itself
     #[inline]
-    pub fn into_mut(self) -> &'a mut V {
+    #[must_use]
+    pub fn into_mut(self) -> &'map mut V {
         unsafe { self.inner.into_mut().downcast_mut_unchecked() }
     }
 }
 
-impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> VacantEntry<'a, A, V> {
-    /// Sets the value of the entry with the VacantEntry's key,
+impl<'map, A: ?Sized + Downcast, V: IntoBox<A>> VacantEntry<'map, A, V> {
+    /// Sets the value of the entry with the `VacantEntry`'s key,
     /// and returns a mutable reference to it
     #[inline]
-    pub fn insert(self, value: V) -> &'a mut V {
+    pub fn insert(self, value: V) -> &'map mut V {
         unsafe { self.inner.insert(value.into_box()).downcast_mut_unchecked() }
     }
 }
@@ -206,14 +205,13 @@ mod tests {
     #[test]
     fn type_id_hasher() {
         use core::any::TypeId;
-        use core::hash::Hash;
+        use core::hash::Hash as _;
         fn verify_hashing_with(type_id: TypeId) {
             let mut hasher = TypeIdHasher::default();
             type_id.hash(&mut hasher);
-            // SAFETY: u64 is valid for all bit patterns.
-            let _ = hasher.finish();
+            _ = hasher.finish();
         }
-        // Pick a variety of types, just to demonstrate it’s all sane. Normal, zero-sized, unsized, &c.
+        // Pick a variety of types, just to demonstrate it's all sane. Normal, zero-sized, unsized, &c.
         verify_hashing_with(TypeId::of::<usize>());
         verify_hashing_with(TypeId::of::<()>());
         verify_hashing_with(TypeId::of::<str>());
@@ -225,34 +223,34 @@ mod tests {
 /// Methods for downcasting from an `Any`-like trait object.
 ///
 /// This should only be implemented on trait objects for subtraits of `Any`, though you can
-/// implement it for other types and it’ll work fine, so long as your implementation is correct.
+/// implement it for other types and it'll work fine, so long as your implementation is correct.
 pub trait Downcast {
     /// Gets the `TypeId` of `self`.
     fn type_id(&self) -> TypeId;
 
     // Note the bound through these downcast methods is 'static, rather than the inexpressible
     // concept of Self-but-as-a-trait (where Self is `dyn Trait`). This is sufficient, exceeding
-    // TypeId’s requirements. Sure, you *can* do CloneAny.downcast_unchecked::<NotClone>() and the
-    // type system won’t protect you, but that doesn’t introduce any unsafety: the method is
+    // TypeId's requirements. Sure, you *can* do CloneAny.downcast_unchecked::<NotClone>() and the
+    // type system won't protect you, but that doesn't introduce any unsafety: the method is
     // already unsafe because you can specify the wrong type, and if this were exposing safe
     // downcasting, CloneAny.downcast::<NotClone>() would just return an error, which is just as
     // correct.
     //
-    // Now in theory we could also add T: ?Sized, but that doesn’t play nicely with the common
-    // implementation, so I’m doing without it.
+    // Now in theory we could also add T: ?Sized, but that doesn't play nicely with the common
+    // implementation, so I'm doing without it.
 
     /// Downcast from `&Any` to `&T`, without checking the type matches.
     ///
     /// # Safety
     ///
-    /// The caller must ensure that `T` matches the trait object, on pain of *undefined behaviour*.
+    /// The caller must ensure that `T` matches the trait object, on pain of *undefined behavior*.
     unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T;
 
     /// Downcast from `&mut Any` to `&mut T`, without checking the type matches.
     ///
     /// # Safety
     ///
-    /// The caller must ensure that `T` matches the trait object, on pain of *undefined behaviour*.
+    /// The caller must ensure that `T` matches the trait object, on pain of *undefined behavior*.
     unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T;
 }
 
@@ -272,12 +270,12 @@ macro_rules! implement {
 
             #[inline]
             unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T {
-                unsafe { &*(self as *const Self as *const T) }
+                unsafe { &*std::ptr::from_ref::<Self>(self).cast::<T>() }
             }
 
             #[inline]
             unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T {
-                unsafe { &mut *(self as *mut Self as *mut T) }
+                unsafe { &mut *std::ptr::from_mut::<Self>(self).cast::<T>() }
             }
         }
 
diff --git a/src/tools/rust-analyzer/crates/stdx/src/lib.rs b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
index 8313e1871f138..9a292eacd7f79 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
@@ -12,13 +12,12 @@ pub mod non_empty_vec;
 pub mod panic_context;
 pub mod process;
 pub mod rand;
-pub mod thin_vec;
 pub mod thread;
 
 pub use itertools;
 
 #[inline(always)]
-pub fn is_ci() -> bool {
+pub const fn is_ci() -> bool {
     option_env!("CI").is_some()
 }
 
@@ -27,14 +26,14 @@ pub fn hash_once<Hasher: std::hash::Hasher + Default>(thing: impl std::hash::Has
 }
 
 #[must_use]
-#[allow(clippy::print_stderr)]
+#[expect(clippy::print_stderr, reason = "only visible to developers")]
 pub fn timeit(label: &'static str) -> impl Drop {
     let start = Instant::now();
-    defer(move || eprintln!("{}: {:.2?}", label, start.elapsed()))
+    defer(move || eprintln!("{}: {:.2}", label, start.elapsed().as_nanos()))
 }
 
 /// Prints backtrace to stderr, useful for debugging.
-#[allow(clippy::print_stderr)]
+#[expect(clippy::print_stderr, reason = "only visible to developers")]
 pub fn print_backtrace() {
     #[cfg(feature = "backtrace")]
     eprintln!("{:?}", backtrace::Backtrace::new());
@@ -127,6 +126,7 @@ where
 }
 
 // Taken from rustc.
+#[must_use]
 pub fn to_camel_case(ident: &str) -> String {
     ident
         .trim_matches('_')
@@ -157,7 +157,7 @@ pub fn to_camel_case(ident: &str) -> String {
 
             camel_cased_component
         })
-        .fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
+        .fold((String::new(), None), |(mut acc, prev): (_, Option<String>), next| {
             // separate two components with an underscore if their boundary cannot
             // be distinguished using an uppercase/lowercase case distinction
             let join = prev
@@ -167,16 +167,20 @@ pub fn to_camel_case(ident: &str) -> String {
                     Some(!char_has_case(l) && !char_has_case(f))
                 })
                 .unwrap_or(false);
-            (acc + if join { "_" } else { "" } + &next, Some(next))
+            acc.push_str(if join { "_" } else { "" });
+            acc.push_str(&next);
+            (acc, Some(next))
         })
         .0
 }
 
 // Taken from rustc.
-pub fn char_has_case(c: char) -> bool {
+#[must_use]
+pub const fn char_has_case(c: char) -> bool {
     c.is_lowercase() || c.is_uppercase()
 }
 
+#[must_use]
 pub fn is_upper_snake_case(s: &str) -> bool {
     s.chars().all(|c| c.is_uppercase() || c == '_' || c.is_numeric())
 }
@@ -189,6 +193,7 @@ pub fn replace(buf: &mut String, from: char, to: &str) {
     *buf = buf.replace(from, to);
 }
 
+#[must_use]
 pub fn trim_indent(mut text: &str) -> String {
     if text.starts_with('\n') {
         text = &text[1..];
@@ -202,11 +207,7 @@ pub fn trim_indent(mut text: &str) -> String {
     text.split_inclusive('\n')
         .map(
             |line| {
-                if line.len() <= indent {
-                    line.trim_start_matches(' ')
-                } else {
-                    &line[indent..]
-                }
+                if line.len() <= indent { line.trim_start_matches(' ') } else { &line[indent..] }
             },
         )
         .collect()
@@ -254,8 +255,8 @@ impl ops::DerefMut for JodChild {
 
 impl Drop for JodChild {
     fn drop(&mut self) {
-        let _ = self.0.kill();
-        let _ = self.0.wait();
+        _ = self.0.kill();
+        _ = self.0.wait();
     }
 }
 
@@ -264,12 +265,11 @@ impl JodChild {
         command.spawn().map(Self)
     }
 
+    #[must_use]
+    #[cfg(not(target_arch = "wasm32"))]
     pub fn into_inner(self) -> std::process::Child {
-        if cfg!(target_arch = "wasm32") {
-            panic!("no processes on wasm");
-        }
         // SAFETY: repr transparent, except on WASM
-        unsafe { std::mem::transmute::<JodChild, std::process::Child>(self) }
+        unsafe { std::mem::transmute::<Self, std::process::Child>(self) }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs b/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs
index 342194c7838c6..faa322d9e746c 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/non_empty_vec.rs
@@ -8,8 +8,8 @@ pub struct NonEmptyVec<T> {
 
 impl<T> NonEmptyVec<T> {
     #[inline]
-    pub fn new(first: T) -> Self {
-        NonEmptyVec { first, rest: Vec::new() }
+    pub const fn new(first: T) -> Self {
+        Self { first, rest: Vec::new() }
     }
 
     #[inline]
@@ -24,7 +24,7 @@ impl<T> NonEmptyVec<T> {
 
     #[inline]
     pub fn push(&mut self, value: T) {
-        self.rest.push(value)
+        self.rest.push(value);
     }
 
     #[inline]
diff --git a/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs b/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs
index a35d50b78dfb6..b220451c45a07 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/panic_context.rs
@@ -16,7 +16,7 @@ impl Drop for PanicContext {
 }
 
 pub fn enter(frame: String) -> PanicContext {
-    #[allow(clippy::print_stderr)]
+    #[expect(clippy::print_stderr, reason = "already panicking anyway")]
     fn set_hook() {
         let default_hook = panic::take_hook();
         panic::set_hook(Box::new(move |panic_info| {
diff --git a/src/tools/rust-analyzer/crates/stdx/src/process.rs b/src/tools/rust-analyzer/crates/stdx/src/process.rs
index 75ae064db9a4d..2efeed45e44e0 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/process.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/process.rs
@@ -54,6 +54,9 @@ pub fn streaming_output(
     Ok((stdout, stderr))
 }
 
+/// # Panics
+///
+/// Panics if `cmd` is not configured to have `stdout` and `stderr` as `piped`.
 pub fn spawn_with_streaming_output(
     mut cmd: Command,
     on_stdout_line: &mut dyn FnMut(&str),
@@ -158,9 +161,9 @@ mod imp {
     };
 
     use miow::{
+        Overlapped,
         iocp::{CompletionPort, CompletionStatus},
         pipe::NamedPipe,
-        Overlapped,
     };
     use windows_sys::Win32::Foundation::ERROR_BROKEN_PIPE;
 
diff --git a/src/tools/rust-analyzer/crates/stdx/src/rand.rs b/src/tools/rust-analyzer/crates/stdx/src/rand.rs
index 115a073dab337..e028990900af6 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/rand.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/rand.rs
@@ -1,8 +1,7 @@
-//! We don't use `rand`, as that's too many things for us.
+//! We don't use `rand` because that is too many things for us.
 //!
-//! We currently use oorandom instead, but it's missing these two utilities.
-//! Perhaps we should switch to `fastrand`, or our own small PRNG, it's not like
-//! we need anything more complicated than xor-shift.
+//! `oorandom` is used instead, but it's missing these two utilities.
+//! Switching to `fastrand` or our own small PRNG may be good because only xor-shift is needed.
 
 pub fn shuffle<T>(slice: &mut [T], mut rand_index: impl FnMut(usize) -> usize) {
     let mut remaining = slice.len() - 1;
diff --git a/src/tools/rust-analyzer/crates/stdx/src/thin_vec.rs b/src/tools/rust-analyzer/crates/stdx/src/thin_vec.rs
deleted file mode 100644
index 700220e1d3e52..0000000000000
--- a/src/tools/rust-analyzer/crates/stdx/src/thin_vec.rs
+++ /dev/null
@@ -1,472 +0,0 @@
-use std::alloc::{dealloc, handle_alloc_error, Layout};
-use std::fmt;
-use std::hash::{Hash, Hasher};
-use std::marker::PhantomData;
-use std::ops::{Deref, DerefMut};
-use std::ptr::{addr_of_mut, slice_from_raw_parts_mut, NonNull};
-
-/// A type that is functionally equivalent to `(Header, Box<[Item]>)`,
-/// but all data is stored in one heap allocation and the pointer is thin,
-/// so the whole thing's size is like a pointer.
-pub struct ThinVecWithHeader<Header, Item> {
-    /// INVARIANT: Points to a valid heap allocation that contains `ThinVecInner<Header>`,
-    /// followed by (suitably aligned) `len` `Item`s.
-    ptr: NonNull<ThinVecInner<Header>>,
-    _marker: PhantomData<(Header, Box<[Item]>)>,
-}
-
-// SAFETY: We essentially own both the header and the items.
-unsafe impl<Header: Send, Item: Send> Send for ThinVecWithHeader<Header, Item> {}
-unsafe impl<Header: Sync, Item: Sync> Sync for ThinVecWithHeader<Header, Item> {}
-
-#[derive(Clone)]
-struct ThinVecInner<Header> {
-    header: Header,
-    len: usize,
-}
-
-impl<Header, Item> ThinVecWithHeader<Header, Item> {
-    /// # Safety
-    ///
-    /// The iterator must produce `len` elements.
-    #[inline]
-    unsafe fn from_trusted_len_iter(
-        header: Header,
-        len: usize,
-        items: impl Iterator<Item = Item>,
-    ) -> Self {
-        let (ptr, layout, items_offset) = Self::allocate(len);
-
-        struct DeallocGuard(*mut u8, Layout);
-        impl Drop for DeallocGuard {
-            fn drop(&mut self) {
-                // SAFETY: We allocated this above.
-                unsafe {
-                    dealloc(self.0, self.1);
-                }
-            }
-        }
-        let _dealloc_guard = DeallocGuard(ptr.as_ptr().cast::<u8>(), layout);
-
-        // INVARIANT: Between `0..1` there are only initialized items.
-        struct ItemsGuard<Item>(*mut Item, *mut Item);
-        impl<Item> Drop for ItemsGuard<Item> {
-            fn drop(&mut self) {
-                // SAFETY: Our invariant.
-                unsafe {
-                    slice_from_raw_parts_mut(self.0, self.1.offset_from(self.0) as usize)
-                        .drop_in_place();
-                }
-            }
-        }
-
-        // SAFETY: We allocated enough space.
-        let mut items_ptr = unsafe { ptr.as_ptr().byte_add(items_offset).cast::<Item>() };
-        // INVARIANT: There are zero elements in this range.
-        let mut items_guard = ItemsGuard(items_ptr, items_ptr);
-        items.for_each(|item| {
-            // SAFETY: Our precondition guarantee we won't get more than `len` items, and we allocated
-            // enough space for `len` items.
-            unsafe {
-                items_ptr.write(item);
-                items_ptr = items_ptr.add(1);
-            }
-            // INVARIANT: We just initialized this item.
-            items_guard.1 = items_ptr;
-        });
-
-        // SAFETY: We allocated enough space.
-        unsafe {
-            ptr.write(ThinVecInner { header, len });
-        }
-
-        std::mem::forget(items_guard);
-
-        std::mem::forget(_dealloc_guard);
-
-        // INVARIANT: We allocated and initialized all fields correctly.
-        Self { ptr, _marker: PhantomData }
-    }
-
-    #[inline]
-    fn allocate(len: usize) -> (NonNull<ThinVecInner<Header>>, Layout, usize) {
-        let (layout, items_offset) = Self::layout(len);
-        // SAFETY: We always have `len`, so our allocation cannot be zero-sized.
-        let ptr = unsafe { std::alloc::alloc(layout).cast::<ThinVecInner<Header>>() };
-        let Some(ptr) = NonNull::<ThinVecInner<Header>>::new(ptr) else {
-            handle_alloc_error(layout);
-        };
-        (ptr, layout, items_offset)
-    }
-
-    #[inline]
-    #[allow(clippy::should_implement_trait)]
-    pub fn from_iter<I>(header: Header, items: I) -> Self
-    where
-        I: IntoIterator,
-        I::IntoIter: TrustedLen<Item = Item>,
-    {
-        let items = items.into_iter();
-        // SAFETY: `TrustedLen` guarantees the iterator length is exact.
-        unsafe { Self::from_trusted_len_iter(header, items.len(), items) }
-    }
-
-    #[inline]
-    fn items_offset(&self) -> usize {
-        // SAFETY: We `pad_to_align()` in `layout()`, so at most where accessing past the end of the allocation,
-        // which is allowed.
-        unsafe {
-            Layout::new::<ThinVecInner<Header>>().extend(Layout::new::<Item>()).unwrap_unchecked().1
-        }
-    }
-
-    #[inline]
-    fn header_and_len(&self) -> &ThinVecInner<Header> {
-        // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized.
-        unsafe { &*self.ptr.as_ptr() }
-    }
-
-    #[inline]
-    fn items_ptr(&self) -> *mut [Item] {
-        let len = self.header_and_len().len;
-        // SAFETY: `items_offset()` returns the correct offset of the items, where they are allocated.
-        let ptr = unsafe { self.ptr.as_ptr().byte_add(self.items_offset()).cast::<Item>() };
-        slice_from_raw_parts_mut(ptr, len)
-    }
-
-    #[inline]
-    pub fn header(&self) -> &Header {
-        &self.header_and_len().header
-    }
-
-    #[inline]
-    pub fn header_mut(&mut self) -> &mut Header {
-        // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized.
-        unsafe { &mut *addr_of_mut!((*self.ptr.as_ptr()).header) }
-    }
-
-    #[inline]
-    pub fn items(&self) -> &[Item] {
-        // SAFETY: `items_ptr()` gives a valid pointer.
-        unsafe { &*self.items_ptr() }
-    }
-
-    #[inline]
-    pub fn items_mut(&mut self) -> &mut [Item] {
-        // SAFETY: `items_ptr()` gives a valid pointer.
-        unsafe { &mut *self.items_ptr() }
-    }
-
-    #[inline]
-    pub fn len(&self) -> usize {
-        self.header_and_len().len
-    }
-
-    #[inline]
-    fn layout(len: usize) -> (Layout, usize) {
-        let (layout, items_offset) = Layout::new::<ThinVecInner<Header>>()
-            .extend(Layout::array::<Item>(len).expect("too big `ThinVec` requested"))
-            .expect("too big `ThinVec` requested");
-        let layout = layout.pad_to_align();
-        (layout, items_offset)
-    }
-}
-
-/// # Safety
-///
-/// The length reported must be exactly the number of items yielded.
-pub unsafe trait TrustedLen: ExactSizeIterator {}
-
-unsafe impl<T> TrustedLen for std::vec::IntoIter<T> {}
-unsafe impl<T> TrustedLen for std::slice::Iter<'_, T> {}
-unsafe impl<'a, T: Clone + 'a, I: TrustedLen<Item = &'a T>> TrustedLen for std::iter::Cloned<I> {}
-unsafe impl<T, I: TrustedLen, F: FnMut(I::Item) -> T> TrustedLen for std::iter::Map<I, F> {}
-unsafe impl<T> TrustedLen for std::vec::Drain<'_, T> {}
-unsafe impl<T, const N: usize> TrustedLen for std::array::IntoIter<T, N> {}
-
-impl<Header: Clone, Item: Clone> Clone for ThinVecWithHeader<Header, Item> {
-    #[inline]
-    fn clone(&self) -> Self {
-        Self::from_iter(self.header().clone(), self.items().iter().cloned())
-    }
-}
-
-impl<Header, Item> Drop for ThinVecWithHeader<Header, Item> {
-    #[inline]
-    fn drop(&mut self) {
-        // This must come before we drop `header`, because after that we cannot make a reference to it in `len()`.
-        let len = self.len();
-
-        // SAFETY: The contents are allocated and initialized.
-        unsafe {
-            addr_of_mut!((*self.ptr.as_ptr()).header).drop_in_place();
-            self.items_ptr().drop_in_place();
-        }
-
-        let (layout, _) = Self::layout(len);
-        // SAFETY: This was allocated in `new()` with the same layout calculation.
-        unsafe {
-            dealloc(self.ptr.as_ptr().cast::<u8>(), layout);
-        }
-    }
-}
-
-impl<Header: fmt::Debug, Item: fmt::Debug> fmt::Debug for ThinVecWithHeader<Header, Item> {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_struct("ThinVecWithHeader")
-            .field("header", self.header())
-            .field("items", &self.items())
-            .finish()
-    }
-}
-
-impl<Header: PartialEq, Item: PartialEq> PartialEq for ThinVecWithHeader<Header, Item> {
-    #[inline]
-    fn eq(&self, other: &Self) -> bool {
-        self.header() == other.header() && self.items() == other.items()
-    }
-}
-
-impl<Header: Eq, Item: Eq> Eq for ThinVecWithHeader<Header, Item> {}
-
-impl<Header: Hash, Item: Hash> Hash for ThinVecWithHeader<Header, Item> {
-    #[inline]
-    fn hash<H: Hasher>(&self, state: &mut H) {
-        self.header().hash(state);
-        self.items().hash(state);
-    }
-}
-
-#[derive(Clone, PartialEq, Eq, Hash)]
-pub struct ThinVec<T>(ThinVecWithHeader<(), T>);
-
-impl<T> ThinVec<T> {
-    #[inline]
-    #[allow(clippy::should_implement_trait)]
-    pub fn from_iter<I>(values: I) -> Self
-    where
-        I: IntoIterator,
-        I::IntoIter: TrustedLen<Item = T>,
-    {
-        Self(ThinVecWithHeader::from_iter((), values))
-    }
-
-    #[inline]
-    pub fn len(&self) -> usize {
-        self.0.len()
-    }
-
-    #[inline]
-    pub fn iter(&self) -> std::slice::Iter<'_, T> {
-        (**self).iter()
-    }
-
-    #[inline]
-    pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> {
-        (**self).iter_mut()
-    }
-}
-
-impl<T> Deref for ThinVec<T> {
-    type Target = [T];
-
-    #[inline]
-    fn deref(&self) -> &Self::Target {
-        self.0.items()
-    }
-}
-
-impl<T> DerefMut for ThinVec<T> {
-    #[inline]
-    fn deref_mut(&mut self) -> &mut Self::Target {
-        self.0.items_mut()
-    }
-}
-
-impl<'a, T> IntoIterator for &'a ThinVec<T> {
-    type IntoIter = std::slice::Iter<'a, T>;
-    type Item = &'a T;
-
-    #[inline]
-    fn into_iter(self) -> Self::IntoIter {
-        self.iter()
-    }
-}
-
-impl<'a, T> IntoIterator for &'a mut ThinVec<T> {
-    type IntoIter = std::slice::IterMut<'a, T>;
-    type Item = &'a mut T;
-
-    #[inline]
-    fn into_iter(self) -> Self::IntoIter {
-        self.iter_mut()
-    }
-}
-
-impl<T: fmt::Debug> fmt::Debug for ThinVec<T> {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_list().entries(&**self).finish()
-    }
-}
-
-/// A [`ThinVec`] that requires no allocation for the empty case.
-#[derive(Clone, PartialEq, Eq, Hash)]
-pub struct EmptyOptimizedThinVec<T>(Option<ThinVec<T>>);
-
-impl<T> EmptyOptimizedThinVec<T> {
-    #[inline]
-    #[allow(clippy::should_implement_trait)]
-    pub fn from_iter<I>(values: I) -> Self
-    where
-        I: IntoIterator,
-        I::IntoIter: TrustedLen<Item = T>,
-    {
-        let values = values.into_iter();
-        if values.len() == 0 {
-            Self::empty()
-        } else {
-            Self(Some(ThinVec::from_iter(values)))
-        }
-    }
-
-    #[inline]
-    pub fn empty() -> Self {
-        Self(None)
-    }
-
-    #[inline]
-    pub fn len(&self) -> usize {
-        self.0.as_ref().map_or(0, ThinVec::len)
-    }
-
-    #[inline]
-    pub fn iter(&self) -> std::slice::Iter<'_, T> {
-        (**self).iter()
-    }
-
-    #[inline]
-    pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> {
-        (**self).iter_mut()
-    }
-}
-
-impl<T> Default for EmptyOptimizedThinVec<T> {
-    #[inline]
-    fn default() -> Self {
-        Self::empty()
-    }
-}
-
-impl<T> Deref for EmptyOptimizedThinVec<T> {
-    type Target = [T];
-
-    #[inline]
-    fn deref(&self) -> &Self::Target {
-        self.0.as_deref().unwrap_or_default()
-    }
-}
-
-impl<T> DerefMut for EmptyOptimizedThinVec<T> {
-    #[inline]
-    fn deref_mut(&mut self) -> &mut Self::Target {
-        self.0.as_deref_mut().unwrap_or_default()
-    }
-}
-
-impl<'a, T> IntoIterator for &'a EmptyOptimizedThinVec<T> {
-    type IntoIter = std::slice::Iter<'a, T>;
-    type Item = &'a T;
-
-    #[inline]
-    fn into_iter(self) -> Self::IntoIter {
-        self.iter()
-    }
-}
-
-impl<'a, T> IntoIterator for &'a mut EmptyOptimizedThinVec<T> {
-    type IntoIter = std::slice::IterMut<'a, T>;
-    type Item = &'a mut T;
-
-    #[inline]
-    fn into_iter(self) -> Self::IntoIter {
-        self.iter_mut()
-    }
-}
-
-impl<T: fmt::Debug> fmt::Debug for EmptyOptimizedThinVec<T> {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_list().entries(&**self).finish()
-    }
-}
-
-/// Syntax:
-///
-/// ```ignore
-/// thin_vec_with_header_struct! {
-///     pub new(pub(crate)) struct MyCoolStruct, MyCoolStructHeader {
-///         pub(crate) variable_length: [Ty],
-///         pub field1: CopyTy,
-///         pub field2: NonCopyTy; ref,
-///     }
-/// }
-/// ```
-#[doc(hidden)]
-#[macro_export]
-macro_rules! thin_vec_with_header_struct_ {
-    (@maybe_ref (ref) $($t:tt)*) => { &$($t)* };
-    (@maybe_ref () $($t:tt)*) => { $($t)* };
-    (
-        $vis:vis new($new_vis:vis) struct $struct:ident, $header:ident {
-            $items_vis:vis $items:ident : [$items_ty:ty],
-            $( $header_var_vis:vis $header_var:ident : $header_var_ty:ty $(; $ref:ident)?, )+
-        }
-    ) => {
-        #[derive(Debug, Clone, Eq, PartialEq, Hash)]
-        struct $header {
-            $( $header_var : $header_var_ty, )+
-        }
-
-        #[derive(Clone, Eq, PartialEq, Hash)]
-        $vis struct $struct($crate::thin_vec::ThinVecWithHeader<$header, $items_ty>);
-
-        impl $struct {
-            #[inline]
-            #[allow(unused)]
-            $new_vis fn new<I>(
-                $( $header_var: $header_var_ty, )+
-                $items: I,
-            ) -> Self
-            where
-                I: ::std::iter::IntoIterator,
-                I::IntoIter: $crate::thin_vec::TrustedLen<Item = $items_ty>,
-            {
-                Self($crate::thin_vec::ThinVecWithHeader::from_iter(
-                    $header { $( $header_var, )+ },
-                    $items,
-                ))
-            }
-
-            #[inline]
-            $items_vis fn $items(&self) -> &[$items_ty] {
-                self.0.items()
-            }
-
-            $(
-                #[inline]
-                $header_var_vis fn $header_var(&self) -> $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) $header_var_ty) {
-                    $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) self.0.header().$header_var)
-                }
-            )+
-        }
-
-        impl ::std::fmt::Debug for $struct {
-            fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
-                f.debug_struct(stringify!($struct))
-                    $( .field(stringify!($header_var), &self.$header_var()) )*
-                    .field(stringify!($items), &self.$items())
-                    .finish()
-            }
-        }
-    };
-}
-pub use crate::thin_vec_with_header_struct_ as thin_vec_with_header_struct;
diff --git a/src/tools/rust-analyzer/crates/stdx/src/thread.rs b/src/tools/rust-analyzer/crates/stdx/src/thread.rs
index e577eb4313714..6c742fecf1b68 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/thread.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/thread.rs
@@ -1,12 +1,12 @@
 //! A utility module for working with threads that automatically joins threads upon drop
-//! and abstracts over operating system quality of service (QoS) APIs
+//! and abstracts over operating system quality of service (`QoS`) APIs
 //! through the concept of a “thread intent”.
 //!
 //! The intent of a thread is frozen at thread creation time,
 //! i.e. there is no API to change the intent of a thread once it has been spawned.
 //!
 //! As a system, rust-analyzer should have the property that
-//! old manual scheduling APIs are replaced entirely by QoS.
+//! old manual scheduling APIs are replaced entirely by `QoS`.
 //! To maintain this invariant, we panic when it is clear that
 //! old scheduling APIs have been used.
 //!
@@ -23,10 +23,12 @@ mod pool;
 pub use intent::ThreadIntent;
 pub use pool::Pool;
 
+/// # Panics
+///
+/// Panics if failed to spawn the thread.
 pub fn spawn<F, T>(intent: ThreadIntent, f: F) -> JoinHandle<T>
 where
-    F: FnOnce() -> T,
-    F: Send + 'static,
+    F: (FnOnce() -> T) + Send + 'static,
     T: Send + 'static,
 {
     Builder::new(intent).spawn(f).expect("failed to spawn thread")
@@ -39,26 +41,29 @@ pub struct Builder {
 }
 
 impl Builder {
-    pub fn new(intent: ThreadIntent) -> Builder {
-        Builder { intent, inner: jod_thread::Builder::new(), allow_leak: false }
+    #[must_use]
+    pub fn new(intent: ThreadIntent) -> Self {
+        Self { intent, inner: jod_thread::Builder::new(), allow_leak: false }
     }
 
-    pub fn name(self, name: String) -> Builder {
-        Builder { inner: self.inner.name(name), ..self }
+    #[must_use]
+    pub fn name(self, name: String) -> Self {
+        Self { inner: self.inner.name(name), ..self }
     }
 
-    pub fn stack_size(self, size: usize) -> Builder {
-        Builder { inner: self.inner.stack_size(size), ..self }
+    #[must_use]
+    pub fn stack_size(self, size: usize) -> Self {
+        Self { inner: self.inner.stack_size(size), ..self }
     }
 
-    pub fn allow_leak(self, b: bool) -> Builder {
-        Builder { allow_leak: b, ..self }
+    #[must_use]
+    pub fn allow_leak(self, allow_leak: bool) -> Self {
+        Self { allow_leak, ..self }
     }
 
     pub fn spawn<F, T>(self, f: F) -> std::io::Result<JoinHandle<T>>
     where
-        F: FnOnce() -> T,
-        F: Send + 'static,
+        F: (FnOnce() -> T) + Send + 'static,
         T: Send + 'static,
     {
         let inner_handle = self.inner.spawn(move || {
@@ -78,6 +83,10 @@ pub struct JoinHandle<T = ()> {
 }
 
 impl<T> JoinHandle<T> {
+    /// # Panics
+    ///
+    /// Panics if there is no thread to join.
+    #[must_use]
     pub fn join(mut self) -> T {
         self.inner.take().unwrap().join()
     }
@@ -95,6 +104,7 @@ impl<T> Drop for JoinHandle<T> {
     }
 }
 
+#[expect(clippy::min_ident_chars, reason = "trait impl")]
 impl<T> fmt::Debug for JoinHandle<T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         f.pad("JoinHandle { .. }")
diff --git a/src/tools/rust-analyzer/crates/stdx/src/thread/intent.rs b/src/tools/rust-analyzer/crates/stdx/src/thread/intent.rs
index 7b65db30cc5b0..1203bfc385411 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/thread/intent.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/thread/intent.rs
@@ -1,9 +1,9 @@
-//! An opaque façade around platform-specific QoS APIs.
+//! An opaque façade around platform-specific `QoS` APIs.
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
 // Please maintain order from least to most priority for the derived `Ord` impl.
 pub enum ThreadIntent {
-    /// Any thread which does work that isn’t in the critical path of the user typing
+    /// Any thread which does work that isn't in the critical path of the user typing
     /// (e.g. processing Go To Definition).
     Worker,
 
@@ -34,6 +34,7 @@ use imp::QoSClass;
 
 const IS_QOS_AVAILABLE: bool = imp::IS_QOS_AVAILABLE;
 
+#[expect(clippy::semicolon_if_nothing_returned, reason = "thin wrapper")]
 fn set_current_thread_qos_class(class: QoSClass) {
     imp::set_current_thread_qos_class(class)
 }
@@ -63,7 +64,7 @@ mod imp {
         ///
         /// * **You do not care about how long it takes for work to finish.**
         /// * **You do not care about work being deferred temporarily.**
-        ///   (e.g. if the device’s battery is in a critical state)
+        ///   (e.g. if the device's battery is in a critical state)
         ///
         /// Examples:
         ///
@@ -84,7 +85,7 @@ mod imp {
         /// All other work is prioritized over background tasks.
         Background,
 
-        /// TLDR: tasks that don’t block using your app
+        /// TLDR: tasks that don't block using your app
         ///
         /// Contract:
         ///
@@ -110,7 +111,7 @@ mod imp {
         /// for tasks using this class.
         ///
         /// This QoS class provides a balance between
-        /// performance, responsiveness and efficiency.
+        /// performance, responsiveness, and efficiency.
         Utility,
 
         /// TLDR: tasks that block using your app
@@ -126,10 +127,10 @@ mod imp {
         /// * in a video editor:
         ///   opening a saved project
         /// * in a browser:
-        ///   loading a list of the user’s bookmarks and top sites
+        ///   loading a list of the user's bookmarks and top sites
         ///   when a new tab is created
         /// * in a collaborative word processor:
-        ///   running a search on the document’s content
+        ///   running a search on the document's content
         ///
         /// Use this QoS class for tasks which were initiated by the user
         /// and block the usage of your app while they are in progress.
@@ -208,7 +209,7 @@ mod imp {
             }
 
             _ => {
-                // `pthread_set_qos_class_self_np`’s documentation
+                // `pthread_set_qos_class_self_np`'s documentation
                 // does not mention any other errors.
                 unreachable!("`pthread_set_qos_class_self_np` returned unexpected error {errno}")
             }
@@ -223,7 +224,7 @@ mod imp {
         };
 
         if code != 0 {
-            // `pthread_get_qos_class_np`’s documentation states that
+            // `pthread_get_qos_class_np`'s documentation states that
             // an error value is placed into errno if the return code is not zero.
             // However, it never states what errors are possible.
             // Inspecting the source[0] shows that, as of this writing, it always returns zero.
diff --git a/src/tools/rust-analyzer/crates/stdx/src/thread/pool.rs b/src/tools/rust-analyzer/crates/stdx/src/thread/pool.rs
index 9acc1de922af9..074cd747dacc6 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/thread/pool.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/thread/pool.rs
@@ -10,8 +10,8 @@
 use std::{
     panic::{self, UnwindSafe},
     sync::{
-        atomic::{AtomicUsize, Ordering},
         Arc,
+        atomic::{AtomicUsize, Ordering},
     },
 };
 
@@ -38,7 +38,11 @@ struct Job {
 }
 
 impl Pool {
-    pub fn new(threads: usize) -> Pool {
+    /// # Panics
+    ///
+    /// Panics if job panics
+    #[must_use]
+    pub fn new(threads: usize) -> Self {
         const STACK_SIZE: usize = 8 * 1024 * 1024;
         const INITIAL_INTENT: ThreadIntent = ThreadIntent::Worker;
 
@@ -63,7 +67,7 @@ impl Pool {
                             }
                             extant_tasks.fetch_add(1, Ordering::SeqCst);
                             // discard the panic, we should've logged the backtrace already
-                            _ = panic::catch_unwind(job.f);
+                            drop(panic::catch_unwind(job.f));
                             extant_tasks.fetch_sub(1, Ordering::SeqCst);
                         }
                     }
@@ -73,9 +77,12 @@ impl Pool {
             handles.push(handle);
         }
 
-        Pool { _handles: handles.into_boxed_slice(), extant_tasks, job_sender }
+        Self { _handles: handles.into_boxed_slice(), extant_tasks, job_sender }
     }
 
+    /// # Panics
+    ///
+    /// Panics if job panics
     pub fn spawn<F>(&self, intent: ThreadIntent, f: F)
     where
         F: FnOnce() + Send + UnwindSafe + 'static,
@@ -84,14 +91,20 @@ impl Pool {
             if cfg!(debug_assertions) {
                 intent.assert_is_used_on_current_thread();
             }
-            f()
+            f();
         });
 
         let job = Job { requested_intent: intent, f };
         self.job_sender.send(job).unwrap();
     }
 
+    #[must_use]
     pub fn len(&self) -> usize {
         self.extant_tasks.load(Ordering::SeqCst)
     }
+
+    #[must_use]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml b/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml
index 3e663422a04ec..cccd41d542991 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml
@@ -13,7 +13,6 @@ rust-version.workspace = true
 
 [dependencies]
 rustc-hash.workspace = true
-tracing.workspace = true
 
 # local deps
 syntax.workspace = true
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
index a59a3270c9d9d..d59229952f527 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
@@ -7,12 +7,13 @@ use rustc_hash::{FxHashMap, FxHashSet};
 use span::{Edition, SpanAnchor, SpanData, SpanMap};
 use stdx::{format_to, never};
 use syntax::{
-    ast::{self, make::tokens::doc_comment},
-    format_smolstr, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
+    AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
     SyntaxKind::{self, *},
-    SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
+    SyntaxNode, SyntaxToken, SyntaxTreeBuilder, T, TextRange, TextSize, WalkEvent,
+    ast::{self, make::tokens::doc_comment},
+    format_smolstr,
 };
-use tt::{buffer::Cursor, token_to_literal, Punct};
+use tt::{Punct, buffer::Cursor, token_to_literal};
 
 pub mod prettify_macro_expansion;
 mod to_parser_input;
@@ -45,7 +46,7 @@ impl<S: Copy, SM: SpanMapper<S>> SpanMapper<S> for &SM {
 /// Dummy things for testing where spans don't matter.
 pub mod dummy_test_span_utils {
 
-    use span::{Span, SyntaxContextId};
+    use span::{Span, SyntaxContext};
 
     use super::*;
 
@@ -58,7 +59,7 @@ pub mod dummy_test_span_utils {
             ),
             ast_id: span::ROOT_ERASED_FILE_AST_ID,
         },
-        ctx: SyntaxContextId::root(Edition::CURRENT),
+        ctx: SyntaxContext::root(Edition::CURRENT),
     };
 
     pub struct DummyTestSpanMap;
@@ -74,7 +75,7 @@ pub mod dummy_test_span_utils {
                     ),
                     ast_id: span::ROOT_ERASED_FILE_AST_ID,
                 },
-                ctx: SyntaxContextId::root(Edition::CURRENT),
+                ctx: SyntaxContext::root(Edition::CURRENT),
             }
         }
     }
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs
index fc7caaa988658..e815e07d80a4e 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/prettify_macro_expansion.rs
@@ -1,10 +1,10 @@
 //! Utilities for formatting macro expanded nodes until we get a proper formatter.
 use syntax::{
-    ast::make,
-    ted::{self, Position},
     NodeOrToken,
     SyntaxKind::{self, *},
-    SyntaxNode, SyntaxToken, WalkEvent, T,
+    SyntaxNode, SyntaxToken, T, WalkEvent,
+    ast::make,
+    ted::{self, Position},
 };
 
 /// Renders a [`SyntaxNode`] with whitespace inserted between tokens that require them.
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs
index d37cb508de19d..8871bf56a5df7 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs
@@ -1,12 +1,13 @@
 use rustc_hash::FxHashMap;
 use span::Span;
-use syntax::{ast, AstNode};
+use syntax::{AstNode, ast};
 use test_utils::extract_annotations;
-use tt::{buffer::Cursor, Leaf, Punct, Spacing};
+use tt::{Leaf, Punct, Spacing, buffer::Cursor};
 
 use crate::{
-    dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
-    syntax_node_to_token_tree, DocCommentDesugarMode,
+    DocCommentDesugarMode,
+    dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
+    syntax_node_to_token_tree,
 };
 
 fn check_punct_spacing(fixture: &str) {
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs
index 0dcb2be316c38..021dc6595f9b9 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs
@@ -12,7 +12,7 @@ pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
     buffer: tt::TokenTreesView<'_, SpanData<Ctx>>,
     span_to_edition: &mut dyn FnMut(Ctx) -> Edition,
 ) -> parser::Input {
-    let mut res = parser::Input::default();
+    let mut res = parser::Input::with_capacity(buffer.len());
 
     let mut current = buffer.cursor();
     let mut syntax_context_to_edition_cache = FxHashMap::default();
diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
index 6b356398204e1..510d44d00917a 100644
--- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
@@ -12,13 +12,11 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-cov-mark = "2.0.0-pre.1"
 either.workspace = true
 itertools.workspace = true
 rowan = "=0.15.15"
 rustc-hash.workspace = true
 rustc-literal-escaper.workspace = true
-indexmap.workspace = true
 smol_str.workspace = true
 triomphe.workspace = true
 tracing.workspace = true
@@ -28,8 +26,8 @@ stdx.workspace = true
 
 [dev-dependencies]
 rayon.workspace = true
-expect-test = "1.4.0"
-rustc_apfloat = "0.2.0"
+expect-test = "1.5.1"
+rustc_apfloat = "0.2.2"
 
 test-utils.workspace = true
 
diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
index c2c6dac72de1e..8910911ff0259 100644
--- a/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
@@ -2,8 +2,8 @@
 name = "syntax-fuzz"
 version = "0.0.1"
 publish = false
-edition = "2021"
-rust-version = "1.78"
+edition = "2024"
+rust-version = "1.85"
 
 [package.metadata]
 cargo-fuzz = true
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
index 70a91af6c4790..a0ae0d68581a3 100644
--- a/src/tools/rust-analyzer/crates/syntax/rust.ungram
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -240,7 +240,7 @@ RecordFieldList =
  '{' fields:(RecordField (',' RecordField)* ','?)? '}'
 
 RecordField =
-  Attr* Visibility?
+  Attr* Visibility? 'unsafe'?
   Name ':' Type ('=' Expr)?
 
 TupleFieldList =
@@ -287,8 +287,9 @@ VariantDef =
 Const =
   Attr* Visibility?
   'default'?
-  'const' (Name | '_') ':' Type
-  ('=' body:Expr)? ';'
+  'const' (Name | '_') GenericParamList? ':' Type
+  ('=' body:Expr)?
+  WhereClause? ';'
 
 Static =
   Attr* Visibility?
@@ -348,7 +349,7 @@ Stmt =
 | LetStmt
 
 LetStmt =
-  Attr* 'let' Pat (':' Type)?
+  Attr* 'super'? 'let' Pat (':' Type)?
   '=' initializer:Expr
   LetElse?
   ';'
diff --git a/src/tools/rust-analyzer/crates/syntax/src/algo.rs b/src/tools/rust-analyzer/crates/syntax/src/algo.rs
index a8a83893946a3..3ab9c902625f5 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/algo.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/algo.rs
@@ -3,8 +3,8 @@
 use itertools::Itertools;
 
 use crate::{
-    syntax_editor::Element, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
-    SyntaxToken, TextRange, TextSize,
+    AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
+    TextSize, syntax_editor::Element,
 };
 
 /// Returns ancestors of the node at the offset, sorted by length. This should
@@ -116,3 +116,19 @@ pub fn neighbor<T: AstNode>(me: &T, direction: Direction) -> Option<T> {
 pub fn has_errors(node: &SyntaxNode) -> bool {
     node.children().any(|it| it.kind() == SyntaxKind::ERROR)
 }
+
+pub fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
+    let mut token = match e.into() {
+        SyntaxElement::Node(n) => n.first_token()?,
+        SyntaxElement::Token(t) => t,
+    }
+    .prev_token();
+    while let Some(inner) = token {
+        if !inner.kind().is_trivia() {
+            return Some(inner);
+        } else {
+            token = inner.prev_token();
+        }
+    }
+    None
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
index 72a46f2f9f00b..d787fd076fc91 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -17,8 +17,8 @@ use std::marker::PhantomData;
 use either::Either;
 
 use crate::{
-    syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
     SyntaxKind,
+    syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
 };
 
 pub use self::{
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs
index 052d018e5c9e3..37cb4a434f3a9 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs
@@ -4,8 +4,9 @@
 use std::{fmt, iter, ops};
 
 use crate::{
-    ast::{self, make, AstNode},
-    ted, AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
+    AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
+    ast::{self, AstNode, make},
+    ted,
 };
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
index aedf810b79431..da0bfd4f37f53 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
@@ -5,12 +5,12 @@ use std::iter::{empty, once, successors};
 use parser::{SyntaxKind, T};
 
 use crate::{
-    algo::{self, neighbor},
-    ast::{self, edit::IndentLevel, make, HasGenericArgs, HasGenericParams},
-    ted::{self, Position},
     AstNode, AstToken, Direction, SyntaxElement,
     SyntaxKind::{ATTR, COMMENT, WHITESPACE},
     SyntaxNode, SyntaxToken,
+    algo::{self, neighbor},
+    ast::{self, HasGenericArgs, HasGenericParams, edit::IndentLevel, make},
+    ted::{self, Position},
 };
 
 use super::{GenericParam, HasArgList, HasName};
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
index 93faeb40c32ba..db66995381388 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
@@ -3,15 +3,15 @@
 //! These methods should only do simple, shallow tasks related to the syntax of the node itself.
 
 use crate::{
-    ast::{
-        self,
-        operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
-        support, ArgList, AstChildren, AstNode, BlockExpr, ClosureExpr, Const, Expr, Fn,
-        FormatArgsArg, FormatArgsExpr, MacroDef, Static, TokenTree,
-    },
     AstToken,
     SyntaxKind::{self, *},
     SyntaxNode, SyntaxToken, T,
+    ast::{
+        self, ArgList, AstChildren, AstNode, BlockExpr, ClosureExpr, Const, Expr, Fn,
+        FormatArgsArg, FormatArgsExpr, MacroDef, Static, TokenTree,
+        operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
+        support,
+    },
 };
 
 use super::RangeItem;
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index ebee5e9af2c4d..1243f6418fe2f 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -6,8 +6,7 @@ use crate::{
     SyntaxKind::{self, *},
     SyntaxNode, SyntaxToken, T,
 };
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+use std::{fmt, hash};
 pub struct Abi {
     pub(crate) syntax: SyntaxNode,
 }
@@ -17,8 +16,6 @@ impl Abi {
     #[inline]
     pub fn string_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![string]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ArgList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -28,8 +25,6 @@ impl ArgList {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ArrayExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -46,8 +41,6 @@ impl ArrayExpr {
     #[inline]
     pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ArrayType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -63,8 +56,6 @@ impl ArrayType {
     #[inline]
     pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmClobberAbi {
     pub(crate) syntax: SyntaxNode,
 }
@@ -80,8 +71,6 @@ impl AsmClobberAbi {
     #[inline]
     pub fn string_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![string]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmConst {
     pub(crate) syntax: SyntaxNode,
 }
@@ -91,8 +80,6 @@ impl AsmConst {
     #[inline]
     pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmDirSpec {
     pub(crate) syntax: SyntaxNode,
 }
@@ -110,8 +97,6 @@ impl AsmDirSpec {
     #[inline]
     pub fn out_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![out]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -134,8 +119,6 @@ impl AsmExpr {
     #[inline]
     pub fn builtin_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![builtin]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmLabel {
     pub(crate) syntax: SyntaxNode,
 }
@@ -145,8 +128,6 @@ impl AsmLabel {
     #[inline]
     pub fn label_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![label]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmOperandExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -158,8 +139,6 @@ impl AsmOperandExpr {
     #[inline]
     pub fn fat_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=>]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmOperandNamed {
     pub(crate) syntax: SyntaxNode,
 }
@@ -170,8 +149,6 @@ impl AsmOperandNamed {
     #[inline]
     pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmOption {
     pub(crate) syntax: SyntaxNode,
 }
@@ -205,8 +182,6 @@ impl AsmOption {
         support::token(&self.syntax, T![readonly])
     }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmOptions {
     pub(crate) syntax: SyntaxNode,
 }
@@ -222,8 +197,6 @@ impl AsmOptions {
     #[inline]
     pub fn options_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![options]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmRegOperand {
     pub(crate) syntax: SyntaxNode,
 }
@@ -239,8 +212,6 @@ impl AsmRegOperand {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmRegSpec {
     pub(crate) syntax: SyntaxNode,
 }
@@ -250,8 +221,6 @@ impl AsmRegSpec {
     #[inline]
     pub fn string_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![string]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AsmSym {
     pub(crate) syntax: SyntaxNode,
 }
@@ -261,8 +230,6 @@ impl AsmSym {
     #[inline]
     pub fn sym_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![sym]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AssocItemList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -275,8 +242,6 @@ impl AssocItemList {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AssocTypeArg {
     pub(crate) syntax: SyntaxNode,
 }
@@ -298,8 +263,6 @@ impl AssocTypeArg {
     #[inline]
     pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Attr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -315,8 +278,6 @@ impl Attr {
     #[inline]
     pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AwaitExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -329,8 +290,6 @@ impl AwaitExpr {
     #[inline]
     pub fn await_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![await]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct BecomeExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -341,15 +300,11 @@ impl BecomeExpr {
     #[inline]
     pub fn become_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![become]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct BinExpr {
     pub(crate) syntax: SyntaxNode,
 }
 impl ast::HasAttrs for BinExpr {}
 impl BinExpr {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct BlockExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -372,8 +327,6 @@ impl BlockExpr {
     #[inline]
     pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct BoxPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -383,8 +336,6 @@ impl BoxPat {
     #[inline]
     pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct BreakExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -397,8 +348,6 @@ impl BreakExpr {
     #[inline]
     pub fn break_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![break]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct CallExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -408,8 +357,6 @@ impl CallExpr {
     #[inline]
     pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct CastExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -422,8 +369,6 @@ impl CastExpr {
     #[inline]
     pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ClosureBinder {
     pub(crate) syntax: SyntaxNode,
 }
@@ -433,8 +378,6 @@ impl ClosureBinder {
     #[inline]
     pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ClosureExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -457,13 +400,12 @@ impl ClosureExpr {
     #[inline]
     pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Const {
     pub(crate) syntax: SyntaxNode,
 }
 impl ast::HasAttrs for Const {}
 impl ast::HasDocComments for Const {}
+impl ast::HasGenericParams for Const {}
 impl ast::HasName for Const {}
 impl ast::HasVisibility for Const {}
 impl Const {
@@ -482,8 +424,6 @@ impl Const {
     #[inline]
     pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ConstArg {
     pub(crate) syntax: SyntaxNode,
 }
@@ -491,8 +431,6 @@ impl ConstArg {
     #[inline]
     pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ConstBlockPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -502,8 +440,6 @@ impl ConstBlockPat {
     #[inline]
     pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ConstParam {
     pub(crate) syntax: SyntaxNode,
 }
@@ -521,8 +457,6 @@ impl ConstParam {
     #[inline]
     pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ContinueExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -535,8 +469,6 @@ impl ContinueExpr {
         support::token(&self.syntax, T![continue])
     }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct DynTraitType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -546,8 +478,6 @@ impl DynTraitType {
     #[inline]
     pub fn dyn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![dyn]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Enum {
     pub(crate) syntax: SyntaxNode,
 }
@@ -562,8 +492,6 @@ impl Enum {
     #[inline]
     pub fn enum_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![enum]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ExprStmt {
     pub(crate) syntax: SyntaxNode,
 }
@@ -573,8 +501,6 @@ impl ExprStmt {
     #[inline]
     pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ExternBlock {
     pub(crate) syntax: SyntaxNode,
 }
@@ -588,8 +514,6 @@ impl ExternBlock {
     #[inline]
     pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ExternCrate {
     pub(crate) syntax: SyntaxNode,
 }
@@ -608,8 +532,6 @@ impl ExternCrate {
     #[inline]
     pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ExternItemList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -622,8 +544,6 @@ impl ExternItemList {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct FieldExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -636,8 +556,6 @@ impl FieldExpr {
     #[inline]
     pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Fn {
     pub(crate) syntax: SyntaxNode,
 }
@@ -670,8 +588,6 @@ impl Fn {
     #[inline]
     pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct FnPtrType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -691,8 +607,6 @@ impl FnPtrType {
     #[inline]
     pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ForExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -705,8 +619,6 @@ impl ForExpr {
     #[inline]
     pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ForType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -718,8 +630,6 @@ impl ForType {
     #[inline]
     pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct FormatArgsArg {
     pub(crate) syntax: SyntaxNode,
 }
@@ -730,8 +640,6 @@ impl FormatArgsArg {
     #[inline]
     pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct FormatArgsExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -754,8 +662,6 @@ impl FormatArgsExpr {
         support::token(&self.syntax, T![format_args])
     }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct GenericArgList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -769,8 +675,6 @@ impl GenericArgList {
     #[inline]
     pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct GenericParamList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -782,8 +686,6 @@ impl GenericParamList {
     #[inline]
     pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct IdentPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -799,8 +701,6 @@ impl IdentPat {
     #[inline]
     pub fn ref_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ref]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct IfExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -811,8 +711,6 @@ impl IfExpr {
     #[inline]
     pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Impl {
     pub(crate) syntax: SyntaxNode,
 }
@@ -836,8 +734,6 @@ impl Impl {
     #[inline]
     pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ImplTraitType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -847,8 +743,6 @@ impl ImplTraitType {
     #[inline]
     pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct IndexExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -859,8 +753,6 @@ impl IndexExpr {
     #[inline]
     pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct InferType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -868,8 +760,6 @@ impl InferType {
     #[inline]
     pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ItemList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -881,8 +771,6 @@ impl ItemList {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Label {
     pub(crate) syntax: SyntaxNode,
 }
@@ -892,8 +780,6 @@ impl Label {
     #[inline]
     pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct LetElse {
     pub(crate) syntax: SyntaxNode,
 }
@@ -903,8 +789,6 @@ impl LetElse {
     #[inline]
     pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct LetExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -919,8 +803,6 @@ impl LetExpr {
     #[inline]
     pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct LetStmt {
     pub(crate) syntax: SyntaxNode,
 }
@@ -942,9 +824,9 @@ impl LetStmt {
     pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
     #[inline]
     pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+    #[inline]
+    pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Lifetime {
     pub(crate) syntax: SyntaxNode,
 }
@@ -954,8 +836,6 @@ impl Lifetime {
         support::token(&self.syntax, T![lifetime_ident])
     }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct LifetimeArg {
     pub(crate) syntax: SyntaxNode,
 }
@@ -963,8 +843,6 @@ impl LifetimeArg {
     #[inline]
     pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct LifetimeParam {
     pub(crate) syntax: SyntaxNode,
 }
@@ -974,15 +852,11 @@ impl LifetimeParam {
     #[inline]
     pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Literal {
     pub(crate) syntax: SyntaxNode,
 }
 impl ast::HasAttrs for Literal {}
 impl Literal {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct LiteralPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -992,8 +866,6 @@ impl LiteralPat {
     #[inline]
     pub fn minus_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![-]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct LoopExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1003,8 +875,6 @@ impl LoopExpr {
     #[inline]
     pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroCall {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1020,8 +890,6 @@ impl MacroCall {
     #[inline]
     pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroDef {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1033,8 +901,6 @@ impl MacroDef {
     #[inline]
     pub fn macro_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![macro]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1042,15 +908,11 @@ impl MacroExpr {
     #[inline]
     pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroItems {
     pub(crate) syntax: SyntaxNode,
 }
 impl ast::HasModuleItem for MacroItems {}
 impl MacroItems {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1058,8 +920,6 @@ impl MacroPat {
     #[inline]
     pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroRules {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1077,8 +937,6 @@ impl MacroRules {
         support::token(&self.syntax, T![macro_rules])
     }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroStmts {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1088,8 +946,6 @@ impl MacroStmts {
     #[inline]
     pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1097,8 +953,6 @@ impl MacroType {
     #[inline]
     pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MatchArm {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1115,8 +969,6 @@ impl MatchArm {
     #[inline]
     pub fn fat_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=>]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MatchArmList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1129,8 +981,6 @@ impl MatchArmList {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MatchExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1143,8 +993,6 @@ impl MatchExpr {
     #[inline]
     pub fn match_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![match]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MatchGuard {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1152,8 +1000,6 @@ impl MatchGuard {
     #[inline]
     pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Meta {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1173,8 +1019,6 @@ impl Meta {
     #[inline]
     pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MethodCallExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1189,8 +1033,6 @@ impl MethodCallExpr {
     #[inline]
     pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Module {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1206,8 +1048,6 @@ impl Module {
     #[inline]
     pub fn mod_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mod]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Name {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1217,8 +1057,6 @@ impl Name {
     #[inline]
     pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct NameRef {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1238,8 +1076,6 @@ impl NameRef {
     #[inline]
     pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct NeverType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1247,8 +1083,6 @@ impl NeverType {
     #[inline]
     pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct OffsetOfExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1273,8 +1107,6 @@ impl OffsetOfExpr {
         support::token(&self.syntax, T![offset_of])
     }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct OrPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1284,8 +1116,6 @@ impl OrPat {
     #[inline]
     pub fn pipe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![|]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Param {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1300,8 +1130,6 @@ impl Param {
     #[inline]
     pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ParamList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1319,8 +1147,6 @@ impl ParamList {
     #[inline]
     pub fn pipe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![|]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ParenExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1333,8 +1159,6 @@ impl ParenExpr {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ParenPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1346,8 +1170,6 @@ impl ParenPat {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ParenType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1359,8 +1181,6 @@ impl ParenType {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ParenthesizedArgList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1374,8 +1194,6 @@ impl ParenthesizedArgList {
     #[inline]
     pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Path {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1387,8 +1205,6 @@ impl Path {
     #[inline]
     pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct PathExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1397,8 +1213,6 @@ impl PathExpr {
     #[inline]
     pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct PathPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1406,8 +1220,6 @@ impl PathPat {
     #[inline]
     pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct PathSegment {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1436,8 +1248,6 @@ impl PathSegment {
     #[inline]
     pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct PathType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1445,8 +1255,6 @@ impl PathType {
     #[inline]
     pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct PrefixExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1455,8 +1263,6 @@ impl PrefixExpr {
     #[inline]
     pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct PtrType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1470,21 +1276,15 @@ impl PtrType {
     #[inline]
     pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RangeExpr {
     pub(crate) syntax: SyntaxNode,
 }
 impl ast::HasAttrs for RangeExpr {}
 impl RangeExpr {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RangePat {
     pub(crate) syntax: SyntaxNode,
 }
 impl RangePat {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RecordExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1496,8 +1296,6 @@ impl RecordExpr {
         support::child(&self.syntax)
     }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RecordExprField {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1510,8 +1308,6 @@ impl RecordExprField {
     #[inline]
     pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RecordExprFieldList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1528,8 +1324,6 @@ impl RecordExprFieldList {
     #[inline]
     pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RecordField {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1546,9 +1340,9 @@ impl RecordField {
     pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
     #[inline]
     pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    #[inline]
+    pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RecordFieldList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1560,8 +1354,6 @@ impl RecordFieldList {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RecordPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1573,8 +1365,6 @@ impl RecordPat {
         support::child(&self.syntax)
     }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RecordPatField {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1587,8 +1377,6 @@ impl RecordPatField {
     #[inline]
     pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RecordPatFieldList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1602,8 +1390,6 @@ impl RecordPatFieldList {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RefExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1620,8 +1406,6 @@ impl RefExpr {
     #[inline]
     pub fn raw_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![raw]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RefPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1633,8 +1417,6 @@ impl RefPat {
     #[inline]
     pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RefType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1648,8 +1430,6 @@ impl RefType {
     #[inline]
     pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Rename {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1660,8 +1440,6 @@ impl Rename {
     #[inline]
     pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RestPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1670,8 +1448,6 @@ impl RestPat {
     #[inline]
     pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct RetType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1681,8 +1457,6 @@ impl RetType {
     #[inline]
     pub fn thin_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![->]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ReturnExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1693,8 +1467,6 @@ impl ReturnExpr {
     #[inline]
     pub fn return_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![return]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ReturnTypeSyntax {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1706,8 +1478,6 @@ impl ReturnTypeSyntax {
     #[inline]
     pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct SelfParam {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1725,8 +1495,6 @@ impl SelfParam {
     #[inline]
     pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct SlicePat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1738,8 +1506,6 @@ impl SlicePat {
     #[inline]
     pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct SliceType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1751,8 +1517,6 @@ impl SliceType {
     #[inline]
     pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct SourceFile {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1763,8 +1527,6 @@ impl SourceFile {
     #[inline]
     pub fn shebang_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![shebang]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Static {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1790,8 +1552,6 @@ impl Static {
     #[inline]
     pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct StmtList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1806,8 +1566,6 @@ impl StmtList {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Struct {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1824,8 +1582,6 @@ impl Struct {
     #[inline]
     pub fn struct_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![struct]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TokenTree {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1843,8 +1599,6 @@ impl TokenTree {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Trait {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1864,8 +1618,6 @@ impl Trait {
     #[inline]
     pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TraitAlias {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1884,8 +1636,6 @@ impl TraitAlias {
     #[inline]
     pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TryExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1896,8 +1646,6 @@ impl TryExpr {
     #[inline]
     pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TupleExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1910,8 +1658,6 @@ impl TupleExpr {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TupleField {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1922,8 +1668,6 @@ impl TupleField {
     #[inline]
     pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TupleFieldList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1935,8 +1679,6 @@ impl TupleFieldList {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TuplePat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1948,8 +1690,6 @@ impl TuplePat {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TupleStructPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1963,8 +1703,6 @@ impl TupleStructPat {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TupleType {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1976,8 +1714,6 @@ impl TupleType {
     #[inline]
     pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TypeAlias {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1999,8 +1735,6 @@ impl TypeAlias {
     #[inline]
     pub fn type_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![type]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TypeArg {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2008,8 +1742,6 @@ impl TypeArg {
     #[inline]
     pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TypeBound {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2033,8 +1765,6 @@ impl TypeBound {
     #[inline]
     pub fn tilde_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![~]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TypeBoundList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2042,8 +1772,6 @@ impl TypeBoundList {
     #[inline]
     pub fn bounds(&self) -> AstChildren<TypeBound> { support::children(&self.syntax) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct TypeParam {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2056,8 +1784,6 @@ impl TypeParam {
     #[inline]
     pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct UnderscoreExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2066,8 +1792,6 @@ impl UnderscoreExpr {
     #[inline]
     pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Union {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2082,8 +1806,6 @@ impl Union {
     #[inline]
     pub fn union_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![union]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Use {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2098,8 +1820,6 @@ impl Use {
     #[inline]
     pub fn use_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![use]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct UseBoundGenericArgs {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2113,8 +1833,6 @@ impl UseBoundGenericArgs {
     #[inline]
     pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct UseTree {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2130,8 +1848,6 @@ impl UseTree {
     #[inline]
     pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct UseTreeList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2143,8 +1859,6 @@ impl UseTreeList {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Variant {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2160,8 +1874,6 @@ impl Variant {
     #[inline]
     pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct VariantList {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2173,8 +1885,6 @@ impl VariantList {
     #[inline]
     pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Visibility {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2190,8 +1900,6 @@ impl Visibility {
     #[inline]
     pub fn pub_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![pub]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct WhereClause {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2201,8 +1909,6 @@ impl WhereClause {
     #[inline]
     pub fn where_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![where]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct WherePred {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2217,8 +1923,6 @@ impl WherePred {
     #[inline]
     pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct WhileExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2227,8 +1931,6 @@ impl WhileExpr {
     #[inline]
     pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct WildcardPat {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2236,8 +1938,6 @@ impl WildcardPat {
     #[inline]
     pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct YeetExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2250,8 +1950,6 @@ impl YeetExpr {
     #[inline]
     pub fn yeet_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![yeet]) }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct YieldExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -2456,66 +2154,96 @@ impl ast::HasAttrs for VariantDef {}
 impl ast::HasDocComments for VariantDef {}
 impl ast::HasName for VariantDef {}
 impl ast::HasVisibility for VariantDef {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AnyHasArgList {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasArgList for AnyHasArgList {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl AnyHasArgList {
+    #[inline]
+    pub fn new<T: ast::HasArgList>(node: T) -> AnyHasArgList {
+        AnyHasArgList { syntax: node.syntax().clone() }
+    }
+}
 pub struct AnyHasAttrs {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasAttrs for AnyHasAttrs {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl AnyHasAttrs {
+    #[inline]
+    pub fn new<T: ast::HasAttrs>(node: T) -> AnyHasAttrs {
+        AnyHasAttrs { syntax: node.syntax().clone() }
+    }
+}
 pub struct AnyHasDocComments {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasDocComments for AnyHasDocComments {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl AnyHasDocComments {
+    #[inline]
+    pub fn new<T: ast::HasDocComments>(node: T) -> AnyHasDocComments {
+        AnyHasDocComments { syntax: node.syntax().clone() }
+    }
+}
 pub struct AnyHasGenericArgs {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasGenericArgs for AnyHasGenericArgs {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl AnyHasGenericArgs {
+    #[inline]
+    pub fn new<T: ast::HasGenericArgs>(node: T) -> AnyHasGenericArgs {
+        AnyHasGenericArgs { syntax: node.syntax().clone() }
+    }
+}
 pub struct AnyHasGenericParams {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasGenericParams for AnyHasGenericParams {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl AnyHasGenericParams {
+    #[inline]
+    pub fn new<T: ast::HasGenericParams>(node: T) -> AnyHasGenericParams {
+        AnyHasGenericParams { syntax: node.syntax().clone() }
+    }
+}
 pub struct AnyHasLoopBody {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasLoopBody for AnyHasLoopBody {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl AnyHasLoopBody {
+    #[inline]
+    pub fn new<T: ast::HasLoopBody>(node: T) -> AnyHasLoopBody {
+        AnyHasLoopBody { syntax: node.syntax().clone() }
+    }
+}
 pub struct AnyHasModuleItem {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasModuleItem for AnyHasModuleItem {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl AnyHasModuleItem {
+    #[inline]
+    pub fn new<T: ast::HasModuleItem>(node: T) -> AnyHasModuleItem {
+        AnyHasModuleItem { syntax: node.syntax().clone() }
+    }
+}
 pub struct AnyHasName {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasName for AnyHasName {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl AnyHasName {
+    #[inline]
+    pub fn new<T: ast::HasName>(node: T) -> AnyHasName {
+        AnyHasName { syntax: node.syntax().clone() }
+    }
+}
 pub struct AnyHasTypeBounds {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasTypeBounds for AnyHasTypeBounds {}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl AnyHasTypeBounds {
+    #[inline]
+    pub fn new<T: ast::HasTypeBounds>(node: T) -> AnyHasTypeBounds {
+        AnyHasTypeBounds { syntax: node.syntax().clone() }
+    }
+}
 pub struct AnyHasVisibility {
     pub(crate) syntax: SyntaxNode,
 }
-impl ast::HasVisibility for AnyHasVisibility {}
+impl AnyHasVisibility {
+    #[inline]
+    pub fn new<T: ast::HasVisibility>(node: T) -> AnyHasVisibility {
+        AnyHasVisibility { syntax: node.syntax().clone() }
+    }
+}
 impl AstNode for Abi {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2537,6 +2265,21 @@ impl AstNode for Abi {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Abi {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Abi {}
+impl PartialEq for Abi {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Abi {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Abi {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Abi").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ArgList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2558,6 +2301,21 @@ impl AstNode for ArgList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ArgList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ArgList {}
+impl PartialEq for ArgList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ArgList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ArgList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ArgList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ArrayExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2579,6 +2337,21 @@ impl AstNode for ArrayExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ArrayExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ArrayExpr {}
+impl PartialEq for ArrayExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ArrayExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ArrayExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ArrayExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ArrayType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2600,6 +2373,21 @@ impl AstNode for ArrayType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ArrayType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ArrayType {}
+impl PartialEq for ArrayType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ArrayType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ArrayType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ArrayType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmClobberAbi {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2621,6 +2409,21 @@ impl AstNode for AsmClobberAbi {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmClobberAbi {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmClobberAbi {}
+impl PartialEq for AsmClobberAbi {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmClobberAbi {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmClobberAbi {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmClobberAbi").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmConst {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2642,6 +2445,21 @@ impl AstNode for AsmConst {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmConst {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmConst {}
+impl PartialEq for AsmConst {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmConst {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmConst {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmConst").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmDirSpec {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2663,6 +2481,21 @@ impl AstNode for AsmDirSpec {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmDirSpec {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmDirSpec {}
+impl PartialEq for AsmDirSpec {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmDirSpec {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmDirSpec {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmDirSpec").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2684,6 +2517,21 @@ impl AstNode for AsmExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmExpr {}
+impl PartialEq for AsmExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmLabel {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2705,6 +2553,21 @@ impl AstNode for AsmLabel {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmLabel {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmLabel {}
+impl PartialEq for AsmLabel {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmLabel {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmLabel {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmLabel").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmOperandExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2726,6 +2589,21 @@ impl AstNode for AsmOperandExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmOperandExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmOperandExpr {}
+impl PartialEq for AsmOperandExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmOperandExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmOperandExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmOperandExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmOperandNamed {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2747,6 +2625,21 @@ impl AstNode for AsmOperandNamed {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmOperandNamed {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmOperandNamed {}
+impl PartialEq for AsmOperandNamed {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmOperandNamed {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmOperandNamed {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmOperandNamed").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmOption {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2768,6 +2661,21 @@ impl AstNode for AsmOption {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmOption {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmOption {}
+impl PartialEq for AsmOption {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmOption {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmOption {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmOption").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmOptions {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2789,6 +2697,21 @@ impl AstNode for AsmOptions {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmOptions {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmOptions {}
+impl PartialEq for AsmOptions {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmOptions {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmOptions {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmOptions").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmRegOperand {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2810,6 +2733,21 @@ impl AstNode for AsmRegOperand {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmRegOperand {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmRegOperand {}
+impl PartialEq for AsmRegOperand {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmRegOperand {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmRegOperand {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmRegOperand").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmRegSpec {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2831,6 +2769,21 @@ impl AstNode for AsmRegSpec {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmRegSpec {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmRegSpec {}
+impl PartialEq for AsmRegSpec {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmRegSpec {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmRegSpec {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmRegSpec").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AsmSym {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2852,6 +2805,21 @@ impl AstNode for AsmSym {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AsmSym {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AsmSym {}
+impl PartialEq for AsmSym {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AsmSym {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AsmSym {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AsmSym").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AssocItemList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2873,6 +2841,21 @@ impl AstNode for AssocItemList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AssocItemList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AssocItemList {}
+impl PartialEq for AssocItemList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AssocItemList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AssocItemList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AssocItemList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AssocTypeArg {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2894,6 +2877,21 @@ impl AstNode for AssocTypeArg {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AssocTypeArg {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AssocTypeArg {}
+impl PartialEq for AssocTypeArg {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AssocTypeArg {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AssocTypeArg {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AssocTypeArg").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Attr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2915,6 +2913,21 @@ impl AstNode for Attr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Attr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Attr {}
+impl PartialEq for Attr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Attr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Attr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Attr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for AwaitExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2936,6 +2949,21 @@ impl AstNode for AwaitExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AwaitExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AwaitExpr {}
+impl PartialEq for AwaitExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AwaitExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AwaitExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AwaitExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for BecomeExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2957,6 +2985,21 @@ impl AstNode for BecomeExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for BecomeExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for BecomeExpr {}
+impl PartialEq for BecomeExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for BecomeExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for BecomeExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("BecomeExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for BinExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2978,6 +3021,21 @@ impl AstNode for BinExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for BinExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for BinExpr {}
+impl PartialEq for BinExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for BinExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for BinExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("BinExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for BlockExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -2999,6 +3057,21 @@ impl AstNode for BlockExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for BlockExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for BlockExpr {}
+impl PartialEq for BlockExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for BlockExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for BlockExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("BlockExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for BoxPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3020,6 +3093,21 @@ impl AstNode for BoxPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for BoxPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for BoxPat {}
+impl PartialEq for BoxPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for BoxPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for BoxPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("BoxPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for BreakExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3041,6 +3129,21 @@ impl AstNode for BreakExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for BreakExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for BreakExpr {}
+impl PartialEq for BreakExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for BreakExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for BreakExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("BreakExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for CallExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3059,8 +3162,23 @@ impl AstNode for CallExpr {
             None
         }
     }
-    #[inline]
-    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+    #[inline]
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl hash::Hash for CallExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for CallExpr {}
+impl PartialEq for CallExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for CallExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for CallExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("CallExpr").field("syntax", &self.syntax).finish()
+    }
 }
 impl AstNode for CastExpr {
     #[inline]
@@ -3083,6 +3201,21 @@ impl AstNode for CastExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for CastExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for CastExpr {}
+impl PartialEq for CastExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for CastExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for CastExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("CastExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ClosureBinder {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3104,6 +3237,21 @@ impl AstNode for ClosureBinder {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ClosureBinder {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ClosureBinder {}
+impl PartialEq for ClosureBinder {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ClosureBinder {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ClosureBinder {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ClosureBinder").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ClosureExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3125,6 +3273,21 @@ impl AstNode for ClosureExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ClosureExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ClosureExpr {}
+impl PartialEq for ClosureExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ClosureExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ClosureExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ClosureExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Const {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3146,6 +3309,21 @@ impl AstNode for Const {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Const {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Const {}
+impl PartialEq for Const {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Const {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Const {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Const").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ConstArg {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3167,6 +3345,21 @@ impl AstNode for ConstArg {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ConstArg {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ConstArg {}
+impl PartialEq for ConstArg {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ConstArg {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ConstArg {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ConstArg").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ConstBlockPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3188,6 +3381,21 @@ impl AstNode for ConstBlockPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ConstBlockPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ConstBlockPat {}
+impl PartialEq for ConstBlockPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ConstBlockPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ConstBlockPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ConstBlockPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ConstParam {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3209,6 +3417,21 @@ impl AstNode for ConstParam {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ConstParam {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ConstParam {}
+impl PartialEq for ConstParam {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ConstParam {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ConstParam {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ConstParam").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ContinueExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3230,6 +3453,21 @@ impl AstNode for ContinueExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ContinueExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ContinueExpr {}
+impl PartialEq for ContinueExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ContinueExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ContinueExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ContinueExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for DynTraitType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3251,6 +3489,21 @@ impl AstNode for DynTraitType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for DynTraitType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for DynTraitType {}
+impl PartialEq for DynTraitType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for DynTraitType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for DynTraitType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("DynTraitType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Enum {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3272,6 +3525,21 @@ impl AstNode for Enum {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Enum {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Enum {}
+impl PartialEq for Enum {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Enum {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Enum {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Enum").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ExprStmt {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3293,6 +3561,21 @@ impl AstNode for ExprStmt {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ExprStmt {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ExprStmt {}
+impl PartialEq for ExprStmt {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ExprStmt {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ExprStmt {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ExprStmt").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ExternBlock {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3314,6 +3597,21 @@ impl AstNode for ExternBlock {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ExternBlock {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ExternBlock {}
+impl PartialEq for ExternBlock {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ExternBlock {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ExternBlock {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ExternBlock").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ExternCrate {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3335,6 +3633,21 @@ impl AstNode for ExternCrate {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ExternCrate {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ExternCrate {}
+impl PartialEq for ExternCrate {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ExternCrate {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ExternCrate {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ExternCrate").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ExternItemList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3356,6 +3669,21 @@ impl AstNode for ExternItemList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ExternItemList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ExternItemList {}
+impl PartialEq for ExternItemList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ExternItemList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ExternItemList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ExternItemList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for FieldExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3377,6 +3705,21 @@ impl AstNode for FieldExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for FieldExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for FieldExpr {}
+impl PartialEq for FieldExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for FieldExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for FieldExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("FieldExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Fn {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3398,6 +3741,21 @@ impl AstNode for Fn {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Fn {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Fn {}
+impl PartialEq for Fn {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Fn {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Fn {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Fn").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for FnPtrType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3419,6 +3777,21 @@ impl AstNode for FnPtrType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for FnPtrType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for FnPtrType {}
+impl PartialEq for FnPtrType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for FnPtrType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for FnPtrType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("FnPtrType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ForExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3440,6 +3813,21 @@ impl AstNode for ForExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ForExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ForExpr {}
+impl PartialEq for ForExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ForExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ForExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ForExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ForType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3461,6 +3849,21 @@ impl AstNode for ForType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ForType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ForType {}
+impl PartialEq for ForType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ForType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ForType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ForType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for FormatArgsArg {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3482,6 +3885,21 @@ impl AstNode for FormatArgsArg {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for FormatArgsArg {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for FormatArgsArg {}
+impl PartialEq for FormatArgsArg {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for FormatArgsArg {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for FormatArgsArg {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("FormatArgsArg").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for FormatArgsExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3503,6 +3921,21 @@ impl AstNode for FormatArgsExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for FormatArgsExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for FormatArgsExpr {}
+impl PartialEq for FormatArgsExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for FormatArgsExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for FormatArgsExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("FormatArgsExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for GenericArgList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3524,6 +3957,21 @@ impl AstNode for GenericArgList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for GenericArgList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for GenericArgList {}
+impl PartialEq for GenericArgList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for GenericArgList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for GenericArgList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("GenericArgList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for GenericParamList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3545,6 +3993,21 @@ impl AstNode for GenericParamList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for GenericParamList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for GenericParamList {}
+impl PartialEq for GenericParamList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for GenericParamList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for GenericParamList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("GenericParamList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for IdentPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3566,6 +4029,21 @@ impl AstNode for IdentPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for IdentPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for IdentPat {}
+impl PartialEq for IdentPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for IdentPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for IdentPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("IdentPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for IfExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3587,6 +4065,21 @@ impl AstNode for IfExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for IfExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for IfExpr {}
+impl PartialEq for IfExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for IfExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for IfExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("IfExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Impl {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3608,6 +4101,21 @@ impl AstNode for Impl {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Impl {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Impl {}
+impl PartialEq for Impl {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Impl {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Impl {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Impl").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ImplTraitType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3629,6 +4137,21 @@ impl AstNode for ImplTraitType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ImplTraitType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ImplTraitType {}
+impl PartialEq for ImplTraitType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ImplTraitType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ImplTraitType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ImplTraitType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for IndexExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3650,6 +4173,21 @@ impl AstNode for IndexExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for IndexExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for IndexExpr {}
+impl PartialEq for IndexExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for IndexExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for IndexExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("IndexExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for InferType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3671,6 +4209,21 @@ impl AstNode for InferType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for InferType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for InferType {}
+impl PartialEq for InferType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for InferType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for InferType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("InferType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ItemList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3692,6 +4245,21 @@ impl AstNode for ItemList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ItemList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ItemList {}
+impl PartialEq for ItemList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ItemList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ItemList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ItemList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Label {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3713,6 +4281,21 @@ impl AstNode for Label {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Label {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Label {}
+impl PartialEq for Label {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Label {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Label {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Label").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for LetElse {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3734,6 +4317,21 @@ impl AstNode for LetElse {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for LetElse {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for LetElse {}
+impl PartialEq for LetElse {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for LetElse {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for LetElse {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("LetElse").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for LetExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3755,6 +4353,21 @@ impl AstNode for LetExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for LetExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for LetExpr {}
+impl PartialEq for LetExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for LetExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for LetExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("LetExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for LetStmt {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3776,6 +4389,21 @@ impl AstNode for LetStmt {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for LetStmt {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for LetStmt {}
+impl PartialEq for LetStmt {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for LetStmt {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for LetStmt {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("LetStmt").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Lifetime {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3797,6 +4425,21 @@ impl AstNode for Lifetime {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Lifetime {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Lifetime {}
+impl PartialEq for Lifetime {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Lifetime {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Lifetime {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Lifetime").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for LifetimeArg {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3818,6 +4461,21 @@ impl AstNode for LifetimeArg {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for LifetimeArg {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for LifetimeArg {}
+impl PartialEq for LifetimeArg {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for LifetimeArg {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for LifetimeArg {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("LifetimeArg").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for LifetimeParam {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3839,6 +4497,21 @@ impl AstNode for LifetimeParam {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for LifetimeParam {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for LifetimeParam {}
+impl PartialEq for LifetimeParam {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for LifetimeParam {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for LifetimeParam {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("LifetimeParam").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Literal {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3860,6 +4533,21 @@ impl AstNode for Literal {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Literal {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Literal {}
+impl PartialEq for Literal {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Literal {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Literal {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Literal").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for LiteralPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3881,6 +4569,21 @@ impl AstNode for LiteralPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for LiteralPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for LiteralPat {}
+impl PartialEq for LiteralPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for LiteralPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for LiteralPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("LiteralPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for LoopExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3902,6 +4605,21 @@ impl AstNode for LoopExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for LoopExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for LoopExpr {}
+impl PartialEq for LoopExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for LoopExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for LoopExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("LoopExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MacroCall {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3923,6 +4641,21 @@ impl AstNode for MacroCall {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MacroCall {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MacroCall {}
+impl PartialEq for MacroCall {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MacroCall {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MacroCall {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MacroCall").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MacroDef {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3944,6 +4677,21 @@ impl AstNode for MacroDef {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MacroDef {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MacroDef {}
+impl PartialEq for MacroDef {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MacroDef {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MacroDef {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MacroDef").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MacroExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3965,6 +4713,21 @@ impl AstNode for MacroExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MacroExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MacroExpr {}
+impl PartialEq for MacroExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MacroExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MacroExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MacroExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MacroItems {
     #[inline]
     fn kind() -> SyntaxKind
@@ -3986,6 +4749,21 @@ impl AstNode for MacroItems {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MacroItems {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MacroItems {}
+impl PartialEq for MacroItems {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MacroItems {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MacroItems {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MacroItems").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MacroPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4007,6 +4785,21 @@ impl AstNode for MacroPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MacroPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MacroPat {}
+impl PartialEq for MacroPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MacroPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MacroPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MacroPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MacroRules {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4028,6 +4821,21 @@ impl AstNode for MacroRules {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MacroRules {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MacroRules {}
+impl PartialEq for MacroRules {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MacroRules {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MacroRules {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MacroRules").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MacroStmts {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4049,6 +4857,21 @@ impl AstNode for MacroStmts {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MacroStmts {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MacroStmts {}
+impl PartialEq for MacroStmts {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MacroStmts {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MacroStmts {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MacroStmts").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MacroType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4070,6 +4893,21 @@ impl AstNode for MacroType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MacroType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MacroType {}
+impl PartialEq for MacroType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MacroType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MacroType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MacroType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MatchArm {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4091,6 +4929,21 @@ impl AstNode for MatchArm {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MatchArm {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MatchArm {}
+impl PartialEq for MatchArm {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MatchArm {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MatchArm {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MatchArm").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MatchArmList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4112,6 +4965,21 @@ impl AstNode for MatchArmList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MatchArmList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MatchArmList {}
+impl PartialEq for MatchArmList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MatchArmList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MatchArmList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MatchArmList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MatchExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4133,6 +5001,21 @@ impl AstNode for MatchExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MatchExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MatchExpr {}
+impl PartialEq for MatchExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MatchExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MatchExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MatchExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MatchGuard {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4154,6 +5037,21 @@ impl AstNode for MatchGuard {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MatchGuard {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MatchGuard {}
+impl PartialEq for MatchGuard {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MatchGuard {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MatchGuard {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MatchGuard").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Meta {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4175,6 +5073,21 @@ impl AstNode for Meta {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Meta {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Meta {}
+impl PartialEq for Meta {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Meta {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Meta {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Meta").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for MethodCallExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4196,6 +5109,21 @@ impl AstNode for MethodCallExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for MethodCallExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for MethodCallExpr {}
+impl PartialEq for MethodCallExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for MethodCallExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for MethodCallExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("MethodCallExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Module {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4217,6 +5145,21 @@ impl AstNode for Module {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Module {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Module {}
+impl PartialEq for Module {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Module {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Module {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Module").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Name {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4238,6 +5181,21 @@ impl AstNode for Name {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Name {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Name {}
+impl PartialEq for Name {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Name {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Name {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Name").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for NameRef {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4259,6 +5217,21 @@ impl AstNode for NameRef {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for NameRef {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for NameRef {}
+impl PartialEq for NameRef {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for NameRef {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for NameRef {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("NameRef").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for NeverType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4280,6 +5253,21 @@ impl AstNode for NeverType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for NeverType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for NeverType {}
+impl PartialEq for NeverType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for NeverType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for NeverType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("NeverType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for OffsetOfExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4301,6 +5289,21 @@ impl AstNode for OffsetOfExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for OffsetOfExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for OffsetOfExpr {}
+impl PartialEq for OffsetOfExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for OffsetOfExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for OffsetOfExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OffsetOfExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for OrPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4322,6 +5325,21 @@ impl AstNode for OrPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for OrPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for OrPat {}
+impl PartialEq for OrPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for OrPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for OrPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OrPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Param {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4343,6 +5361,21 @@ impl AstNode for Param {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Param {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Param {}
+impl PartialEq for Param {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Param {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Param {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Param").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ParamList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4364,6 +5397,21 @@ impl AstNode for ParamList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ParamList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ParamList {}
+impl PartialEq for ParamList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ParamList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ParamList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ParamList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ParenExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4385,6 +5433,21 @@ impl AstNode for ParenExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ParenExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ParenExpr {}
+impl PartialEq for ParenExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ParenExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ParenExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ParenExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ParenPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4406,6 +5469,21 @@ impl AstNode for ParenPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ParenPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ParenPat {}
+impl PartialEq for ParenPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ParenPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ParenPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ParenPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ParenType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4427,6 +5505,21 @@ impl AstNode for ParenType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ParenType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ParenType {}
+impl PartialEq for ParenType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ParenType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ParenType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ParenType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ParenthesizedArgList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4448,6 +5541,21 @@ impl AstNode for ParenthesizedArgList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ParenthesizedArgList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ParenthesizedArgList {}
+impl PartialEq for ParenthesizedArgList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ParenthesizedArgList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ParenthesizedArgList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ParenthesizedArgList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Path {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4469,6 +5577,21 @@ impl AstNode for Path {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Path {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Path {}
+impl PartialEq for Path {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Path {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Path {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Path").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for PathExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4490,6 +5613,21 @@ impl AstNode for PathExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for PathExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for PathExpr {}
+impl PartialEq for PathExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for PathExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for PathExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("PathExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for PathPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4511,6 +5649,21 @@ impl AstNode for PathPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for PathPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for PathPat {}
+impl PartialEq for PathPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for PathPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for PathPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("PathPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for PathSegment {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4532,6 +5685,21 @@ impl AstNode for PathSegment {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for PathSegment {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for PathSegment {}
+impl PartialEq for PathSegment {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for PathSegment {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for PathSegment {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("PathSegment").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for PathType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4553,6 +5721,21 @@ impl AstNode for PathType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for PathType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for PathType {}
+impl PartialEq for PathType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for PathType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for PathType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("PathType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for PrefixExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4574,6 +5757,21 @@ impl AstNode for PrefixExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for PrefixExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for PrefixExpr {}
+impl PartialEq for PrefixExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for PrefixExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for PrefixExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("PrefixExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for PtrType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4595,6 +5793,21 @@ impl AstNode for PtrType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for PtrType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for PtrType {}
+impl PartialEq for PtrType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for PtrType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for PtrType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("PtrType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RangeExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4616,6 +5829,21 @@ impl AstNode for RangeExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RangeExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RangeExpr {}
+impl PartialEq for RangeExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RangeExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RangeExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RangeExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RangePat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4637,6 +5865,21 @@ impl AstNode for RangePat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RangePat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RangePat {}
+impl PartialEq for RangePat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RangePat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RangePat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RangePat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RecordExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4658,6 +5901,21 @@ impl AstNode for RecordExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RecordExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RecordExpr {}
+impl PartialEq for RecordExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RecordExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RecordExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RecordExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RecordExprField {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4679,6 +5937,21 @@ impl AstNode for RecordExprField {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RecordExprField {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RecordExprField {}
+impl PartialEq for RecordExprField {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RecordExprField {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RecordExprField {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RecordExprField").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RecordExprFieldList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4700,6 +5973,21 @@ impl AstNode for RecordExprFieldList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RecordExprFieldList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RecordExprFieldList {}
+impl PartialEq for RecordExprFieldList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RecordExprFieldList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RecordExprFieldList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RecordExprFieldList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RecordField {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4721,6 +6009,21 @@ impl AstNode for RecordField {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RecordField {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RecordField {}
+impl PartialEq for RecordField {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RecordField {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RecordField {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RecordField").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RecordFieldList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4742,6 +6045,21 @@ impl AstNode for RecordFieldList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RecordFieldList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RecordFieldList {}
+impl PartialEq for RecordFieldList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RecordFieldList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RecordFieldList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RecordFieldList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RecordPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4763,6 +6081,21 @@ impl AstNode for RecordPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RecordPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RecordPat {}
+impl PartialEq for RecordPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RecordPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RecordPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RecordPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RecordPatField {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4784,6 +6117,21 @@ impl AstNode for RecordPatField {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RecordPatField {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RecordPatField {}
+impl PartialEq for RecordPatField {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RecordPatField {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RecordPatField {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RecordPatField").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RecordPatFieldList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4805,6 +6153,21 @@ impl AstNode for RecordPatFieldList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RecordPatFieldList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RecordPatFieldList {}
+impl PartialEq for RecordPatFieldList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RecordPatFieldList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RecordPatFieldList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RecordPatFieldList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RefExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4826,6 +6189,21 @@ impl AstNode for RefExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RefExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RefExpr {}
+impl PartialEq for RefExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RefExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RefExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RefExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RefPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4847,6 +6225,21 @@ impl AstNode for RefPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RefPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RefPat {}
+impl PartialEq for RefPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RefPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RefPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RefPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RefType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4868,6 +6261,21 @@ impl AstNode for RefType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RefType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RefType {}
+impl PartialEq for RefType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RefType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RefType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RefType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Rename {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4889,6 +6297,21 @@ impl AstNode for Rename {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Rename {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Rename {}
+impl PartialEq for Rename {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Rename {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Rename {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Rename").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RestPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4910,6 +6333,21 @@ impl AstNode for RestPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RestPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RestPat {}
+impl PartialEq for RestPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RestPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RestPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RestPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for RetType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4931,6 +6369,21 @@ impl AstNode for RetType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for RetType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for RetType {}
+impl PartialEq for RetType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for RetType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for RetType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RetType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ReturnExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4952,6 +6405,21 @@ impl AstNode for ReturnExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ReturnExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ReturnExpr {}
+impl PartialEq for ReturnExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ReturnExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ReturnExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ReturnExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for ReturnTypeSyntax {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4973,6 +6441,21 @@ impl AstNode for ReturnTypeSyntax {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for ReturnTypeSyntax {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ReturnTypeSyntax {}
+impl PartialEq for ReturnTypeSyntax {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ReturnTypeSyntax {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ReturnTypeSyntax {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ReturnTypeSyntax").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for SelfParam {
     #[inline]
     fn kind() -> SyntaxKind
@@ -4994,6 +6477,21 @@ impl AstNode for SelfParam {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for SelfParam {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for SelfParam {}
+impl PartialEq for SelfParam {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for SelfParam {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for SelfParam {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("SelfParam").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for SlicePat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5015,6 +6513,21 @@ impl AstNode for SlicePat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for SlicePat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for SlicePat {}
+impl PartialEq for SlicePat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for SlicePat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for SlicePat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("SlicePat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for SliceType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5036,6 +6549,21 @@ impl AstNode for SliceType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for SliceType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for SliceType {}
+impl PartialEq for SliceType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for SliceType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for SliceType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("SliceType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for SourceFile {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5057,6 +6585,21 @@ impl AstNode for SourceFile {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for SourceFile {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for SourceFile {}
+impl PartialEq for SourceFile {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for SourceFile {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for SourceFile {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("SourceFile").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Static {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5078,6 +6621,21 @@ impl AstNode for Static {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Static {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Static {}
+impl PartialEq for Static {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Static {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Static {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Static").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for StmtList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5099,6 +6657,21 @@ impl AstNode for StmtList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for StmtList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for StmtList {}
+impl PartialEq for StmtList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for StmtList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for StmtList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("StmtList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Struct {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5120,6 +6693,21 @@ impl AstNode for Struct {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Struct {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Struct {}
+impl PartialEq for Struct {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Struct {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Struct {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Struct").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TokenTree {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5141,6 +6729,21 @@ impl AstNode for TokenTree {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TokenTree {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TokenTree {}
+impl PartialEq for TokenTree {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TokenTree {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TokenTree {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TokenTree").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Trait {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5162,6 +6765,21 @@ impl AstNode for Trait {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Trait {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Trait {}
+impl PartialEq for Trait {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Trait {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Trait {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Trait").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TraitAlias {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5183,6 +6801,21 @@ impl AstNode for TraitAlias {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TraitAlias {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TraitAlias {}
+impl PartialEq for TraitAlias {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TraitAlias {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TraitAlias {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TraitAlias").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TryExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5201,8 +6834,23 @@ impl AstNode for TryExpr {
             None
         }
     }
-    #[inline]
-    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+    #[inline]
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl hash::Hash for TryExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TryExpr {}
+impl PartialEq for TryExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TryExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TryExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TryExpr").field("syntax", &self.syntax).finish()
+    }
 }
 impl AstNode for TupleExpr {
     #[inline]
@@ -5225,6 +6873,21 @@ impl AstNode for TupleExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TupleExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TupleExpr {}
+impl PartialEq for TupleExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TupleExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TupleExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TupleExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TupleField {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5246,6 +6909,21 @@ impl AstNode for TupleField {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TupleField {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TupleField {}
+impl PartialEq for TupleField {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TupleField {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TupleField {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TupleField").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TupleFieldList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5267,6 +6945,21 @@ impl AstNode for TupleFieldList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TupleFieldList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TupleFieldList {}
+impl PartialEq for TupleFieldList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TupleFieldList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TupleFieldList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TupleFieldList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TuplePat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5288,6 +6981,21 @@ impl AstNode for TuplePat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TuplePat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TuplePat {}
+impl PartialEq for TuplePat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TuplePat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TuplePat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TuplePat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TupleStructPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5309,6 +7017,21 @@ impl AstNode for TupleStructPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TupleStructPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TupleStructPat {}
+impl PartialEq for TupleStructPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TupleStructPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TupleStructPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TupleStructPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TupleType {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5330,6 +7053,21 @@ impl AstNode for TupleType {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TupleType {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TupleType {}
+impl PartialEq for TupleType {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TupleType {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TupleType {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TupleType").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TypeAlias {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5351,6 +7089,21 @@ impl AstNode for TypeAlias {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TypeAlias {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TypeAlias {}
+impl PartialEq for TypeAlias {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TypeAlias {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TypeAlias {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TypeAlias").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TypeArg {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5372,6 +7125,21 @@ impl AstNode for TypeArg {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TypeArg {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TypeArg {}
+impl PartialEq for TypeArg {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TypeArg {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TypeArg {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TypeArg").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TypeBound {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5393,6 +7161,21 @@ impl AstNode for TypeBound {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TypeBound {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TypeBound {}
+impl PartialEq for TypeBound {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TypeBound {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TypeBound {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TypeBound").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TypeBoundList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5414,6 +7197,21 @@ impl AstNode for TypeBoundList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TypeBoundList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TypeBoundList {}
+impl PartialEq for TypeBoundList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TypeBoundList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TypeBoundList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TypeBoundList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for TypeParam {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5435,6 +7233,21 @@ impl AstNode for TypeParam {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for TypeParam {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TypeParam {}
+impl PartialEq for TypeParam {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TypeParam {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TypeParam {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("TypeParam").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for UnderscoreExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5456,6 +7269,21 @@ impl AstNode for UnderscoreExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for UnderscoreExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for UnderscoreExpr {}
+impl PartialEq for UnderscoreExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for UnderscoreExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for UnderscoreExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("UnderscoreExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Union {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5477,6 +7305,21 @@ impl AstNode for Union {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Union {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Union {}
+impl PartialEq for Union {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Union {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Union {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Union").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Use {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5498,6 +7341,21 @@ impl AstNode for Use {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Use {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Use {}
+impl PartialEq for Use {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Use {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Use {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Use").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for UseBoundGenericArgs {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5519,6 +7377,21 @@ impl AstNode for UseBoundGenericArgs {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for UseBoundGenericArgs {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for UseBoundGenericArgs {}
+impl PartialEq for UseBoundGenericArgs {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for UseBoundGenericArgs {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for UseBoundGenericArgs {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("UseBoundGenericArgs").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for UseTree {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5540,6 +7413,21 @@ impl AstNode for UseTree {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for UseTree {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for UseTree {}
+impl PartialEq for UseTree {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for UseTree {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for UseTree {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("UseTree").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for UseTreeList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5561,6 +7449,21 @@ impl AstNode for UseTreeList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for UseTreeList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for UseTreeList {}
+impl PartialEq for UseTreeList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for UseTreeList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for UseTreeList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("UseTreeList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Variant {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5582,6 +7485,21 @@ impl AstNode for Variant {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Variant {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Variant {}
+impl PartialEq for Variant {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Variant {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Variant {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Variant").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for VariantList {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5603,6 +7521,21 @@ impl AstNode for VariantList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for VariantList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for VariantList {}
+impl PartialEq for VariantList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for VariantList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for VariantList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("VariantList").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for Visibility {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5624,6 +7557,21 @@ impl AstNode for Visibility {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for Visibility {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Visibility {}
+impl PartialEq for Visibility {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for Visibility {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for Visibility {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Visibility").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for WhereClause {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5645,6 +7593,21 @@ impl AstNode for WhereClause {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for WhereClause {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for WhereClause {}
+impl PartialEq for WhereClause {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for WhereClause {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for WhereClause {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("WhereClause").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for WherePred {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5666,6 +7629,21 @@ impl AstNode for WherePred {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for WherePred {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for WherePred {}
+impl PartialEq for WherePred {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for WherePred {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for WherePred {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("WherePred").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for WhileExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5687,6 +7665,21 @@ impl AstNode for WhileExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for WhileExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for WhileExpr {}
+impl PartialEq for WhileExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for WhileExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for WhileExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("WhileExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for WildcardPat {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5708,6 +7701,21 @@ impl AstNode for WildcardPat {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for WildcardPat {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for WildcardPat {}
+impl PartialEq for WildcardPat {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for WildcardPat {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for WildcardPat {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("WildcardPat").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for YeetExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5729,6 +7737,21 @@ impl AstNode for YeetExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for YeetExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for YeetExpr {}
+impl PartialEq for YeetExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for YeetExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for YeetExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("YeetExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl AstNode for YieldExpr {
     #[inline]
     fn kind() -> SyntaxKind
@@ -5750,6 +7773,21 @@ impl AstNode for YieldExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for YieldExpr {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for YieldExpr {}
+impl PartialEq for YieldExpr {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for YieldExpr {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for YieldExpr {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("YieldExpr").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<Enum> for Adt {
     #[inline]
     fn from(node: Enum) -> Adt { Adt::Enum(node) }
@@ -6783,12 +8821,7 @@ impl AstNode for VariantDef {
         }
     }
 }
-impl AnyHasArgList {
-    #[inline]
-    pub fn new<T: ast::HasArgList>(node: T) -> AnyHasArgList {
-        AnyHasArgList { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasArgList for AnyHasArgList {}
 impl AstNode for AnyHasArgList {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) }
@@ -6799,6 +8832,21 @@ impl AstNode for AnyHasArgList {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasArgList {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasArgList {}
+impl PartialEq for AnyHasArgList {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasArgList {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasArgList {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasArgList").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<CallExpr> for AnyHasArgList {
     #[inline]
     fn from(node: CallExpr) -> AnyHasArgList { AnyHasArgList { syntax: node.syntax } }
@@ -6807,12 +8855,7 @@ impl From<MethodCallExpr> for AnyHasArgList {
     #[inline]
     fn from(node: MethodCallExpr) -> AnyHasArgList { AnyHasArgList { syntax: node.syntax } }
 }
-impl AnyHasAttrs {
-    #[inline]
-    pub fn new<T: ast::HasAttrs>(node: T) -> AnyHasAttrs {
-        AnyHasAttrs { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasAttrs for AnyHasAttrs {}
 impl AstNode for AnyHasAttrs {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool {
@@ -6899,6 +8942,21 @@ impl AstNode for AnyHasAttrs {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasAttrs {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasAttrs {}
+impl PartialEq for AnyHasAttrs {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasAttrs {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasAttrs {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasAttrs").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<ArrayExpr> for AnyHasAttrs {
     #[inline]
     fn from(node: ArrayExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } }
@@ -7187,12 +9245,7 @@ impl From<YieldExpr> for AnyHasAttrs {
     #[inline]
     fn from(node: YieldExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } }
 }
-impl AnyHasDocComments {
-    #[inline]
-    pub fn new<T: ast::HasDocComments>(node: T) -> AnyHasDocComments {
-        AnyHasDocComments { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasDocComments for AnyHasDocComments {}
 impl AstNode for AnyHasDocComments {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool {
@@ -7228,6 +9281,21 @@ impl AstNode for AnyHasDocComments {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasDocComments {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasDocComments {}
+impl PartialEq for AnyHasDocComments {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasDocComments {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasDocComments {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasDocComments").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<Const> for AnyHasDocComments {
     #[inline]
     fn from(node: Const) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } }
@@ -7312,12 +9380,7 @@ impl From<Variant> for AnyHasDocComments {
     #[inline]
     fn from(node: Variant) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } }
 }
-impl AnyHasGenericArgs {
-    #[inline]
-    pub fn new<T: ast::HasGenericArgs>(node: T) -> AnyHasGenericArgs {
-        AnyHasGenericArgs { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasGenericArgs for AnyHasGenericArgs {}
 impl AstNode for AnyHasGenericArgs {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool {
@@ -7330,6 +9393,21 @@ impl AstNode for AnyHasGenericArgs {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasGenericArgs {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasGenericArgs {}
+impl PartialEq for AnyHasGenericArgs {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasGenericArgs {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasGenericArgs {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasGenericArgs").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<AssocTypeArg> for AnyHasGenericArgs {
     #[inline]
     fn from(node: AssocTypeArg) -> AnyHasGenericArgs { AnyHasGenericArgs { syntax: node.syntax } }
@@ -7342,16 +9420,11 @@ impl From<PathSegment> for AnyHasGenericArgs {
     #[inline]
     fn from(node: PathSegment) -> AnyHasGenericArgs { AnyHasGenericArgs { syntax: node.syntax } }
 }
-impl AnyHasGenericParams {
-    #[inline]
-    pub fn new<T: ast::HasGenericParams>(node: T) -> AnyHasGenericParams {
-        AnyHasGenericParams { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasGenericParams for AnyHasGenericParams {}
 impl AstNode for AnyHasGenericParams {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool {
-        matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TRAIT_ALIAS | TYPE_ALIAS | UNION)
+        matches!(kind, CONST | ENUM | FN | IMPL | STRUCT | TRAIT | TRAIT_ALIAS | TYPE_ALIAS | UNION)
     }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option<Self> {
@@ -7360,6 +9433,25 @@ impl AstNode for AnyHasGenericParams {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasGenericParams {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasGenericParams {}
+impl PartialEq for AnyHasGenericParams {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasGenericParams {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasGenericParams {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasGenericParams").field("syntax", &self.syntax).finish()
+    }
+}
+impl From<Const> for AnyHasGenericParams {
+    #[inline]
+    fn from(node: Const) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } }
+}
 impl From<Enum> for AnyHasGenericParams {
     #[inline]
     fn from(node: Enum) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } }
@@ -7392,12 +9484,7 @@ impl From<Union> for AnyHasGenericParams {
     #[inline]
     fn from(node: Union) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } }
 }
-impl AnyHasLoopBody {
-    #[inline]
-    pub fn new<T: ast::HasLoopBody>(node: T) -> AnyHasLoopBody {
-        AnyHasLoopBody { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasLoopBody for AnyHasLoopBody {}
 impl AstNode for AnyHasLoopBody {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) }
@@ -7408,6 +9495,21 @@ impl AstNode for AnyHasLoopBody {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasLoopBody {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasLoopBody {}
+impl PartialEq for AnyHasLoopBody {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasLoopBody {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasLoopBody {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasLoopBody").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<ForExpr> for AnyHasLoopBody {
     #[inline]
     fn from(node: ForExpr) -> AnyHasLoopBody { AnyHasLoopBody { syntax: node.syntax } }
@@ -7420,12 +9522,7 @@ impl From<WhileExpr> for AnyHasLoopBody {
     #[inline]
     fn from(node: WhileExpr) -> AnyHasLoopBody { AnyHasLoopBody { syntax: node.syntax } }
 }
-impl AnyHasModuleItem {
-    #[inline]
-    pub fn new<T: ast::HasModuleItem>(node: T) -> AnyHasModuleItem {
-        AnyHasModuleItem { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasModuleItem for AnyHasModuleItem {}
 impl AstNode for AnyHasModuleItem {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, ITEM_LIST | MACRO_ITEMS | SOURCE_FILE) }
@@ -7436,6 +9533,21 @@ impl AstNode for AnyHasModuleItem {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasModuleItem {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasModuleItem {}
+impl PartialEq for AnyHasModuleItem {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasModuleItem {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasModuleItem {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasModuleItem").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<ItemList> for AnyHasModuleItem {
     #[inline]
     fn from(node: ItemList) -> AnyHasModuleItem { AnyHasModuleItem { syntax: node.syntax } }
@@ -7448,12 +9560,7 @@ impl From<SourceFile> for AnyHasModuleItem {
     #[inline]
     fn from(node: SourceFile) -> AnyHasModuleItem { AnyHasModuleItem { syntax: node.syntax } }
 }
-impl AnyHasName {
-    #[inline]
-    pub fn new<T: ast::HasName>(node: T) -> AnyHasName {
-        AnyHasName { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasName for AnyHasName {}
 impl AstNode for AnyHasName {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool {
@@ -7489,6 +9596,21 @@ impl AstNode for AnyHasName {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasName {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasName {}
+impl PartialEq for AnyHasName {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasName {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasName {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasName").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<AsmOperandNamed> for AnyHasName {
     #[inline]
     fn from(node: AsmOperandNamed) -> AnyHasName { AnyHasName { syntax: node.syntax } }
@@ -7573,12 +9695,7 @@ impl From<Variant> for AnyHasName {
     #[inline]
     fn from(node: Variant) -> AnyHasName { AnyHasName { syntax: node.syntax } }
 }
-impl AnyHasTypeBounds {
-    #[inline]
-    pub fn new<T: ast::HasTypeBounds>(node: T) -> AnyHasTypeBounds {
-        AnyHasTypeBounds { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasTypeBounds for AnyHasTypeBounds {}
 impl AstNode for AnyHasTypeBounds {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool {
@@ -7594,6 +9711,21 @@ impl AstNode for AnyHasTypeBounds {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasTypeBounds {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasTypeBounds {}
+impl PartialEq for AnyHasTypeBounds {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasTypeBounds {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasTypeBounds {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasTypeBounds").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<AssocTypeArg> for AnyHasTypeBounds {
     #[inline]
     fn from(node: AssocTypeArg) -> AnyHasTypeBounds { AnyHasTypeBounds { syntax: node.syntax } }
@@ -7618,12 +9750,7 @@ impl From<WherePred> for AnyHasTypeBounds {
     #[inline]
     fn from(node: WherePred) -> AnyHasTypeBounds { AnyHasTypeBounds { syntax: node.syntax } }
 }
-impl AnyHasVisibility {
-    #[inline]
-    pub fn new<T: ast::HasVisibility>(node: T) -> AnyHasVisibility {
-        AnyHasVisibility { syntax: node.syntax().clone() }
-    }
-}
+impl ast::HasVisibility for AnyHasVisibility {}
 impl AstNode for AnyHasVisibility {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool {
@@ -7656,6 +9783,21 @@ impl AstNode for AnyHasVisibility {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl hash::Hash for AnyHasVisibility {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for AnyHasVisibility {}
+impl PartialEq for AnyHasVisibility {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for AnyHasVisibility {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for AnyHasVisibility {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AnyHasVisibility").field("syntax", &self.syntax).finish()
+    }
+}
 impl From<Const> for AnyHasVisibility {
     #[inline]
     fn from(node: Const) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
index df2e9619db1c3..b2f56c0b1dbf2 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
@@ -5,8 +5,7 @@ use crate::{
     SyntaxKind::{self, *},
     SyntaxToken,
 };
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+use std::{fmt, hash};
 pub struct Byte {
     pub(crate) syntax: SyntaxToken,
 }
@@ -26,8 +25,21 @@ impl AstToken for Byte {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl fmt::Debug for Byte {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Byte").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for Byte {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for Byte {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Byte {}
+impl PartialEq for Byte {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
 pub struct ByteString {
     pub(crate) syntax: SyntaxToken,
 }
@@ -47,8 +59,21 @@ impl AstToken for ByteString {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl fmt::Debug for ByteString {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("ByteString").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for ByteString {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for ByteString {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ByteString {}
+impl PartialEq for ByteString {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
 pub struct CString {
     pub(crate) syntax: SyntaxToken,
 }
@@ -68,8 +93,21 @@ impl AstToken for CString {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl fmt::Debug for CString {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("CString").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for CString {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for CString {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for CString {}
+impl PartialEq for CString {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
 pub struct Char {
     pub(crate) syntax: SyntaxToken,
 }
@@ -89,8 +127,21 @@ impl AstToken for Char {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl fmt::Debug for Char {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Char").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for Char {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for Char {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Char {}
+impl PartialEq for Char {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
 pub struct Comment {
     pub(crate) syntax: SyntaxToken,
 }
@@ -110,8 +161,21 @@ impl AstToken for Comment {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl fmt::Debug for Comment {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Comment").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for Comment {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for Comment {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Comment {}
+impl PartialEq for Comment {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
 pub struct FloatNumber {
     pub(crate) syntax: SyntaxToken,
 }
@@ -131,8 +195,21 @@ impl AstToken for FloatNumber {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl fmt::Debug for FloatNumber {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("FloatNumber").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for FloatNumber {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for FloatNumber {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for FloatNumber {}
+impl PartialEq for FloatNumber {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
 pub struct Ident {
     pub(crate) syntax: SyntaxToken,
 }
@@ -152,8 +229,21 @@ impl AstToken for Ident {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl fmt::Debug for Ident {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Ident").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for Ident {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for Ident {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Ident {}
+impl PartialEq for Ident {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
 pub struct IntNumber {
     pub(crate) syntax: SyntaxToken,
 }
@@ -173,8 +263,21 @@ impl AstToken for IntNumber {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl fmt::Debug for IntNumber {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("IntNumber").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for IntNumber {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for IntNumber {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for IntNumber {}
+impl PartialEq for IntNumber {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
 pub struct String {
     pub(crate) syntax: SyntaxToken,
 }
@@ -194,8 +297,21 @@ impl AstToken for String {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+impl fmt::Debug for String {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("String").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for String {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for String {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for String {}
+impl PartialEq for String {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
 pub struct Whitespace {
     pub(crate) syntax: SyntaxToken,
 }
@@ -215,3 +331,18 @@ impl AstToken for Whitespace {
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
+impl fmt::Debug for Whitespace {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Whitespace").field("syntax", &self.syntax).finish()
+    }
+}
+impl Clone for Whitespace {
+    fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl hash::Hash for Whitespace {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for Whitespace {}
+impl PartialEq for Whitespace {
+    fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
index 231c21c38f85f..d608a35effa1f 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
@@ -19,9 +19,9 @@ use rowan::NodeOrToken;
 use stdx::{format_to, format_to_acc, never};
 
 use crate::{
-    ast::{self, make::quote::quote, Param},
-    utils::is_raw_identifier,
     AstNode, SourceFile, SyntaxKind, SyntaxToken,
+    ast::{self, Param, make::quote::quote},
+    utils::is_raw_identifier,
 };
 
 /// While the parent module defines basic atomic "constructors", the `ext`
@@ -32,12 +32,9 @@ pub mod ext {
     use super::*;
 
     pub fn simple_ident_pat(name: ast::Name) -> ast::IdentPat {
-        return from_text(&name.text());
-
-        fn from_text(text: &str) -> ast::IdentPat {
-            ast_from_text(&format!("fn f({text}: ())"))
-        }
+        ast_from_text(&format!("fn f({}: ())", name.text()))
     }
+
     pub fn ident_path(ident: &str) -> ast::Path {
         path_unqualified(path_segment(name_ref(ident)))
     }
@@ -81,7 +78,6 @@ pub mod ext {
     pub fn expr_self() -> ast::Expr {
         expr_from_text("self")
     }
-
     pub fn zero_number() -> ast::Expr {
         expr_from_text("0")
     }
@@ -116,6 +112,10 @@ pub mod ext {
     pub fn ty_result(t: ast::Type, e: ast::Type) -> ast::Type {
         ty_from_text(&format!("Result<{t}, {e}>"))
     }
+
+    pub fn token_tree_from_node(node: &ast::SyntaxNode) -> ast::TokenTree {
+        ast_from_text(&format!("todo!{node}"))
+    }
 }
 
 pub fn name(name: &str) -> ast::Name {
@@ -131,11 +131,7 @@ pub fn name_ref(name_ref: &str) -> ast::NameRef {
     }
 }
 fn raw_ident_esc(ident: &str) -> &'static str {
-    if is_raw_identifier(ident, Edition::CURRENT) {
-        "r#"
-    } else {
-        ""
-    }
+    if is_raw_identifier(ident, Edition::CURRENT) { "r#" } else { "" }
 }
 
 pub fn lifetime(text: &str) -> ast::Lifetime {
@@ -328,7 +324,9 @@ pub fn impl_trait(
         None => String::new(),
     };
 
-    ast_from_text(&format!("{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{body_newline}{body}}}"))
+    ast_from_text(&format!(
+        "{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{body_newline}{body}}}"
+    ))
 }
 
 pub fn impl_trait_type(bounds: ast::TypeBoundList) -> ast::ImplTraitType {
@@ -623,6 +621,10 @@ pub fn expr_for_loop(pat: ast::Pat, expr: ast::Expr, block: ast::BlockExpr) -> a
     expr_from_text(&format!("for {pat} in {expr} {block}"))
 }
 
+pub fn expr_while_loop(condition: ast::Expr, block: ast::BlockExpr) -> ast::WhileExpr {
+    expr_from_text(&format!("while {condition} {block}"))
+}
+
 pub fn expr_loop(block: ast::BlockExpr) -> ast::Expr {
     expr_from_text(&format!("loop {block}"))
 }
@@ -631,18 +633,18 @@ pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::PrefixExpr {
     let token = token(op);
     expr_from_text(&format!("{token}{expr}"))
 }
-pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
+pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::CallExpr {
     expr_from_text(&format!("{f}{arg_list}"))
 }
 pub fn expr_method_call(
     receiver: ast::Expr,
     method: ast::NameRef,
     arg_list: ast::ArgList,
-) -> ast::Expr {
+) -> ast::MethodCallExpr {
     expr_from_text(&format!("{receiver}.{method}{arg_list}"))
 }
-pub fn expr_macro_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
-    expr_from_text(&format!("{f}!{arg_list}"))
+pub fn expr_macro(path: ast::Path, tt: ast::TokenTree) -> ast::MacroExpr {
+    expr_from_text(&format!("{path}!{tt}"))
 }
 pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr {
     expr_from_text(&if exclusive { format!("&mut {expr}") } else { format!("&{expr}") })
@@ -650,14 +652,17 @@ pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr {
 pub fn expr_reborrow(expr: ast::Expr) -> ast::Expr {
     expr_from_text(&format!("&mut *{expr}"))
 }
-pub fn expr_closure(pats: impl IntoIterator<Item = ast::Param>, expr: ast::Expr) -> ast::Expr {
+pub fn expr_closure(
+    pats: impl IntoIterator<Item = ast::Param>,
+    expr: ast::Expr,
+) -> ast::ClosureExpr {
     let params = pats.into_iter().join(", ");
     expr_from_text(&format!("|{params}| {expr}"))
 }
 pub fn expr_field(receiver: ast::Expr, field: &str) -> ast::Expr {
     expr_from_text(&format!("{receiver}.{field}"))
 }
-pub fn expr_paren(expr: ast::Expr) -> ast::Expr {
+pub fn expr_paren(expr: ast::Expr) -> ast::ParenExpr {
     expr_from_text(&format!("({expr})"))
 }
 pub fn expr_tuple(elements: impl IntoIterator<Item = ast::Expr>) -> ast::TupleExpr {
@@ -1221,7 +1226,7 @@ pub fn meta_path(path: ast::Path) -> ast::Meta {
 
 pub fn token_tree(
     delimiter: SyntaxKind,
-    tt: Vec<NodeOrToken<ast::TokenTree, SyntaxToken>>,
+    tt: impl IntoIterator<Item = NodeOrToken<ast::TokenTree, SyntaxToken>>,
 ) -> ast::TokenTree {
     let (l_delimiter, r_delimiter) = match delimiter {
         T!['('] => ('(', ')'),
@@ -1270,11 +1275,12 @@ pub mod tokens {
 
     use parser::Edition;
 
-    use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken};
+    use crate::{AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, ast};
 
     pub(super) static SOURCE_FILE: LazyLock<Parse<SourceFile>> = LazyLock::new(|| {
         SourceFile::parse(
-            "use crate::foo; const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, async { let _ @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
+            "use crate::foo; const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, async { let _ @ [] })\n;\n\nunsafe impl A for B where: {}",
+            Edition::CURRENT,
         )
     });
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
index 56f94b965e320..b9ccd34cff064 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -10,11 +10,12 @@ use parser::SyntaxKind;
 use rowan::{GreenNodeData, GreenTokenData};
 
 use crate::{
+    NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, T, TokenText,
     ast::{
-        self, support, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName,
-        SyntaxNode,
+        self, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName, SyntaxNode,
+        support,
     },
-    ted, NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T,
+    ted,
 };
 
 use super::{GenericParam, RangeItem, RangeOp};
@@ -35,6 +36,16 @@ impl ast::NameRef {
     pub fn text(&self) -> TokenText<'_> {
         text_of_first_token(self.syntax())
     }
+    pub fn text_non_mutable(&self) -> &str {
+        fn first_token(green_ref: &GreenNodeData) -> &GreenTokenData {
+            green_ref.children().next().and_then(NodeOrToken::into_token).unwrap()
+        }
+
+        match self.syntax().green() {
+            Cow::Borrowed(green_ref) => first_token(green_ref).text(),
+            Cow::Owned(_) => unreachable!(),
+        }
+    }
 
     pub fn as_tuple_field(&self) -> Option<usize> {
         self.text().parse().ok()
@@ -317,11 +328,7 @@ impl ast::Path {
         let path_range = self.syntax().text_range();
         successors(self.first_segment(), move |p| {
             p.parent_path().parent_path().and_then(|p| {
-                if path_range.contains_range(p.syntax().text_range()) {
-                    p.segment()
-                } else {
-                    None
-                }
+                if path_range.contains_range(p.syntax().text_range()) { p.segment() } else { None }
             })
         })
     }
@@ -506,11 +513,7 @@ impl ast::Union {
 impl ast::RecordExprField {
     pub fn for_field_name(field_name: &ast::NameRef) -> Option<ast::RecordExprField> {
         let candidate = Self::for_name_ref(field_name)?;
-        if candidate.field_name().as_ref() == Some(field_name) {
-            Some(candidate)
-        } else {
-            None
-        }
+        if candidate.field_name().as_ref() == Some(field_name) { Some(candidate) } else { None }
     }
 
     pub fn for_name_ref(name_ref: &ast::NameRef) -> Option<ast::RecordExprField> {
@@ -785,11 +788,7 @@ pub enum SelfParamKind {
 impl ast::SelfParam {
     pub fn kind(&self) -> SelfParamKind {
         if self.amp_token().is_some() {
-            if self.mut_token().is_some() {
-                SelfParamKind::MutRef
-            } else {
-                SelfParamKind::Ref
-            }
+            if self.mut_token().is_some() { SelfParamKind::MutRef } else { SelfParamKind::Ref }
         } else {
             SelfParamKind::Owned
         }
@@ -1066,7 +1065,7 @@ impl ast::GenericParamList {
             ast::GenericParam::TypeParam(_) | ast::GenericParam::ConstParam(_) => None,
         })
     }
-    pub fn type_or_const_params(&self) -> impl Iterator<Item = ast::TypeOrConstParam> {
+    pub fn type_or_const_params(&self) -> impl Iterator<Item = ast::TypeOrConstParam> + use<> {
         self.generic_params().filter_map(|param| match param {
             ast::GenericParam::TypeParam(it) => Some(ast::TypeOrConstParam::Type(it)),
             ast::GenericParam::LifetimeParam(_) => None,
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs
index 4f0e2cad1746f..00750bff0ba20 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/prec.rs
@@ -3,8 +3,9 @@
 use stdx::always;
 
 use crate::{
+    AstNode, SyntaxNode,
     ast::{self, BinaryOp, Expr, HasArgList, RangeItem},
-    match_ast, AstNode, SyntaxNode,
+    match_ast,
 };
 
 #[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
@@ -283,7 +284,7 @@ impl Expr {
                             .map(|op| matches!(op, BinaryOp::LogicOp(_)))
                             .unwrap_or(false) =>
                     {
-                        return true
+                        return true;
                     }
                     _ if self.clone().trailing_brace().is_some() => return true,
                     _ => {}
@@ -446,8 +447,8 @@ impl Expr {
     }
 
     fn is_ordered_before_parent_in_place_of(&self, parent: &Expr, place_of: &SyntaxNode) -> bool {
-        use rowan::TextSize;
         use Expr::*;
+        use rowan::TextSize;
 
         let self_range = self.syntax().text_range();
         let place_of_range = place_of.text_range();
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs
index 1c517ac2c77ed..7142e4f6e1bc8 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs
@@ -19,8 +19,8 @@ pub struct SyntaxFactory {
 
 impl SyntaxFactory {
     /// Creates a new [`SyntaxFactory`], generating mappings between input nodes and generated nodes.
-    pub fn new() -> Self {
-        Self { mappings: Some(RefCell::new(SyntaxMapping::new())) }
+    pub fn with_mappings() -> Self {
+        Self { mappings: Some(RefCell::new(SyntaxMapping::default())) }
     }
 
     /// Creates a [`SyntaxFactory`] without generating mappings.
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs
index 44f13041c244f..1854000d3db29 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -1,11 +1,11 @@
 //! Wrappers over [`make`] constructors
 use crate::{
+    AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken,
     ast::{
-        self, make, HasArgList, HasGenericArgs, HasGenericParams, HasName, HasTypeBounds,
-        HasVisibility,
+        self, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName, HasTypeBounds,
+        HasVisibility, make,
     },
     syntax_editor::SyntaxMappingBuilder,
-    AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken,
 };
 
 use super::SyntaxFactory;
@@ -241,7 +241,7 @@ impl SyntaxFactory {
         ast
     }
 
-    pub fn record_pat_field(self, name_ref: ast::NameRef, pat: ast::Pat) -> ast::RecordPatField {
+    pub fn record_pat_field(&self, name_ref: ast::NameRef, pat: ast::Pat) -> ast::RecordPatField {
         let ast = make::record_pat_field(name_ref.clone(), pat.clone()).clone_for_update();
 
         if let Some(mut mapping) = self.mappings() {
@@ -290,6 +290,10 @@ impl SyntaxFactory {
         ast
     }
 
+    pub fn rest_pat(&self) -> ast::RestPat {
+        make::rest_pat().clone_for_update()
+    }
+
     pub fn block_expr(
         &self,
         statements: impl IntoIterator<Item = ast::Stmt>,
@@ -328,10 +332,7 @@ impl SyntaxFactory {
     }
 
     pub fn expr_paren(&self, expr: ast::Expr) -> ast::ParenExpr {
-        // FIXME: `make::expr_paren` should return a `ParenExpr`, not just an `Expr`
-        let ast::Expr::ParenExpr(ast) = make::expr_paren(expr.clone()).clone_for_update() else {
-            unreachable!()
-        };
+        let ast = make::expr_paren(expr.clone()).clone_for_update();
 
         if let Some(mut mapping) = self.mappings() {
             let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
@@ -403,12 +404,7 @@ impl SyntaxFactory {
     }
 
     pub fn expr_call(&self, expr: ast::Expr, arg_list: ast::ArgList) -> ast::CallExpr {
-        // FIXME: `make::expr_call`` should return a `CallExpr`, not just an `Expr`
-        let ast::Expr::CallExpr(ast) =
-            make::expr_call(expr.clone(), arg_list.clone()).clone_for_update()
-        else {
-            unreachable!()
-        };
+        let ast = make::expr_call(expr.clone(), arg_list.clone()).clone_for_update();
 
         if let Some(mut mapping) = self.mappings() {
             let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
@@ -426,13 +422,8 @@ impl SyntaxFactory {
         method: ast::NameRef,
         arg_list: ast::ArgList,
     ) -> ast::MethodCallExpr {
-        // FIXME: `make::expr_method_call` should return a `MethodCallExpr`, not just an `Expr`
-        let ast::Expr::MethodCallExpr(ast) =
-            make::expr_method_call(receiver.clone(), method.clone(), arg_list.clone())
-                .clone_for_update()
-        else {
-            unreachable!()
-        };
+        let ast = make::expr_method_call(receiver.clone(), method.clone(), arg_list.clone())
+            .clone_for_update();
 
         if let Some(mut mapping) = self.mappings() {
             let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
@@ -479,11 +470,7 @@ impl SyntaxFactory {
         expr: ast::Expr,
     ) -> ast::ClosureExpr {
         let (args, input) = iterator_input(pats);
-        // FIXME: `make::expr_paren` should return a `ClosureExpr`, not just an `Expr`
-        let ast::Expr::ClosureExpr(ast) = make::expr_closure(args, expr.clone()).clone_for_update()
-        else {
-            unreachable!()
-        };
+        let ast = make::expr_closure(args, expr.clone()).clone_for_update();
 
         if let Some(mut mapping) = self.mappings() {
             let mut builder = SyntaxMappingBuilder::new(ast.syntax.clone());
@@ -543,6 +530,19 @@ impl SyntaxFactory {
         ast
     }
 
+    pub fn expr_while_loop(&self, condition: ast::Expr, body: ast::BlockExpr) -> ast::WhileExpr {
+        let ast = make::expr_while_loop(condition.clone(), body.clone()).clone_for_update();
+
+        if let Some(mut mapping) = self.mappings() {
+            let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+            builder.map_node(condition.syntax().clone(), ast.condition().unwrap().syntax().clone());
+            builder.map_node(body.syntax().clone(), ast.loop_body().unwrap().syntax().clone());
+            builder.finish(&mut mapping);
+        }
+
+        ast
+    }
+
     pub fn expr_let(&self, pattern: ast::Pat, expr: ast::Expr) -> ast::LetExpr {
         let ast = make::expr_let(pattern.clone(), expr.clone()).clone_for_update();
 
@@ -584,6 +584,21 @@ impl SyntaxFactory {
         ast
     }
 
+    pub fn expr_macro(&self, path: ast::Path, tt: ast::TokenTree) -> ast::MacroExpr {
+        let ast = make::expr_macro(path.clone(), tt.clone()).clone_for_update();
+
+        if let Some(mut mapping) = self.mappings() {
+            let macro_call = ast.macro_call().unwrap();
+            let mut builder = SyntaxMappingBuilder::new(macro_call.syntax().clone());
+            builder.map_node(path.syntax().clone(), macro_call.path().unwrap().syntax().clone());
+            builder
+                .map_node(tt.syntax().clone(), macro_call.token_tree().unwrap().syntax().clone());
+            builder.finish(&mut mapping);
+        }
+
+        ast
+    }
+
     pub fn match_arm(
         &self,
         pat: ast::Pat,
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
index 08bffb9e3aad3..ced3b713d8d57 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -3,13 +3,13 @@
 use std::{borrow::Cow, num::ParseIntError};
 
 use rustc_literal_escaper::{
-    unescape_byte, unescape_char, unescape_mixed, unescape_unicode, EscapeError, MixedUnit, Mode,
+    EscapeError, MixedUnit, Mode, unescape_byte, unescape_char, unescape_mixed, unescape_unicode,
 };
 use stdx::always;
 
 use crate::{
-    ast::{self, AstToken},
     TextRange, TextSize,
+    ast::{self, AstToken},
 };
 
 impl ast::Comment {
@@ -383,11 +383,7 @@ impl ast::IntNumber {
 
     pub fn suffix(&self) -> Option<&str> {
         let (_, _, suffix) = self.split_into_parts();
-        if suffix.is_empty() {
-            None
-        } else {
-            Some(suffix)
-        }
+        if suffix.is_empty() { None } else { Some(suffix) }
     }
 
     pub fn value_string(&self) -> String {
@@ -422,11 +418,7 @@ impl ast::FloatNumber {
 
     pub fn suffix(&self) -> Option<&str> {
         let (_, suffix) = self.split_into_parts();
-        if suffix.is_empty() {
-            None
-        } else {
-            Some(suffix)
-        }
+        if suffix.is_empty() { None } else { Some(suffix) }
     }
 
     pub fn value_string(&self) -> String {
@@ -491,7 +483,7 @@ impl ast::Byte {
 mod tests {
     use rustc_apfloat::ieee::Quad as f128;
 
-    use crate::ast::{self, make, FloatNumber, IntNumber};
+    use crate::ast::{self, FloatNumber, IntNumber, make};
 
     fn check_float_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
         assert_eq!(FloatNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into());
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
index 5d6aa4331b02c..5290f32dd27db 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -4,9 +4,9 @@
 use either::Either;
 
 use crate::{
-    ast::{self, support, AstChildren, AstNode, AstToken},
-    syntax_node::SyntaxElementChildren,
     SyntaxElement, SyntaxToken, T,
+    ast::{self, AstChildren, AstNode, AstToken, support},
+    syntax_node::SyntaxElementChildren,
 };
 
 pub trait HasName: AstNode {
@@ -121,11 +121,7 @@ impl DocCommentIter {
             &mut self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)),
             "\n",
         );
-        if docs.is_empty() {
-            None
-        } else {
-            Some(docs)
-        }
+        if docs.is_empty() { None } else { Some(docs) }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs
index fd20e603edc3c..9b5cd0135e83a 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs
@@ -6,7 +6,7 @@ use std::str::{self, FromStr};
 
 use parser::Edition;
 
-use crate::{validation, AstNode, SourceFile, TextRange};
+use crate::{AstNode, SourceFile, TextRange, validation};
 
 fn check_file_invariants(file: &SourceFile) {
     let root = file.syntax();
diff --git a/src/tools/rust-analyzer/crates/syntax/src/hacks.rs b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs
index 2184359f1d0c4..51b878aff7f11 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/hacks.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs
@@ -4,7 +4,7 @@
 
 use parser::Edition;
 
-use crate::{ast, AstNode};
+use crate::{AstNode, ast};
 
 pub fn parse_expr_from_str(s: &str, edition: Edition) -> Option<ast::Expr> {
     let s = s.trim();
diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
index 21f1ea5f913a7..a3c19f71fbaa7 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
@@ -54,11 +54,11 @@ pub use crate::{
 };
 pub use parser::{Edition, SyntaxKind, T};
 pub use rowan::{
-    api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize,
-    TokenAtOffset, WalkEvent,
+    Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent,
+    api::Preorder,
 };
 pub use rustc_literal_escaper as unescape;
-pub use smol_str::{format_smolstr, SmolStr, SmolStrBuilder, ToSmolStr};
+pub use smol_str::{SmolStr, SmolStrBuilder, ToSmolStr, format_smolstr};
 
 /// `Parse` is the result of the parsing: a syntax tree and a collection of
 /// errors.
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
index 2c7828c05246b..9e286edc5f988 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
@@ -5,7 +5,7 @@ mod reparsing;
 
 use rowan::TextRange;
 
-use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
+use crate::{SyntaxError, SyntaxTreeBuilder, syntax_node::GreenNode};
 
 pub(crate) use crate::parsing::reparsing::incremental_reparse;
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
index f2eab18c27963..c54f14366fa19 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
@@ -11,11 +11,11 @@ use std::ops::Range;
 use parser::{Edition, Reparser};
 
 use crate::{
-    parsing::build_tree,
-    syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
     SyntaxError,
     SyntaxKind::*,
-    TextRange, TextSize, T,
+    T, TextRange, TextSize,
+    parsing::build_tree,
+    syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
 };
 
 pub(crate) fn incremental_reparse(
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
index 11b79e4e0ed7b..34c07598d2001 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
@@ -16,7 +16,7 @@ use std::{
 
 use rowan::TextRange;
 
-use crate::{syntax_node::RustLanguage, AstNode, SyntaxNode};
+use crate::{AstNode, SyntaxNode, syntax_node::RustLanguage};
 
 /// A "pointer" to a [`SyntaxNode`], via location in the source code.
 pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr<RustLanguage>;
@@ -118,7 +118,7 @@ impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
 
 #[test]
 fn test_local_syntax_ptr() {
-    use crate::{ast, AstNode, SourceFile};
+    use crate::{AstNode, SourceFile, ast};
 
     let file = SourceFile::parse("struct Foo { f: u32, }", parser::Edition::CURRENT).ok().unwrap();
     let field = file.syntax().descendants().find_map(ast::RecordField::cast).unwrap();
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs
index 48c160b9a9acd..58200189c46b2 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs
@@ -33,7 +33,7 @@ pub struct SyntaxEditor {
 impl SyntaxEditor {
     /// Creates a syntax editor to start editing from `root`
     pub fn new(root: SyntaxNode) -> Self {
-        Self { root, changes: vec![], mappings: SyntaxMapping::new(), annotations: vec![] }
+        Self { root, changes: vec![], mappings: SyntaxMapping::default(), annotations: vec![] }
     }
 
     pub fn add_annotation(&mut self, element: impl Element, annotation: SyntaxAnnotation) {
@@ -151,9 +151,8 @@ impl SyntaxEdit {
 #[repr(transparent)]
 pub struct SyntaxAnnotation(NonZeroU32);
 
-impl SyntaxAnnotation {
-    /// Creates a unique syntax annotation to attach data to.
-    pub fn new() -> Self {
+impl Default for SyntaxAnnotation {
+    fn default() -> Self {
         static COUNTER: AtomicU32 = AtomicU32::new(1);
 
         // Only consistency within a thread matters, as SyntaxElements are !Send
@@ -163,12 +162,6 @@ impl SyntaxAnnotation {
     }
 }
 
-impl Default for SyntaxAnnotation {
-    fn default() -> Self {
-        Self::new()
-    }
-}
-
 /// Position describing where to insert elements
 #[derive(Debug)]
 pub struct Position {
@@ -385,8 +378,8 @@ mod tests {
     use expect_test::expect;
 
     use crate::{
+        AstNode,
         ast::{self, make, syntax_factory::SyntaxFactory},
-        AstNode, SyntaxKind,
     };
 
     use super::*;
@@ -411,12 +404,12 @@ mod tests {
         let to_replace = root.syntax().descendants().find_map(ast::BinExpr::cast).unwrap();
 
         let mut editor = SyntaxEditor::new(root.syntax().clone());
-        let make = SyntaxFactory::new();
+        let make = SyntaxFactory::with_mappings();
 
         let name = make::name("var_name");
         let name_ref = make::name_ref("var_name").clone_for_update();
 
-        let placeholder_snippet = SyntaxAnnotation::new();
+        let placeholder_snippet = SyntaxAnnotation::default();
         editor.add_annotation(name.syntax(), placeholder_snippet);
         editor.add_annotation(name_ref.syntax(), placeholder_snippet);
 
@@ -445,11 +438,12 @@ mod tests {
         expect.assert_eq(&edit.new_root.to_string());
 
         assert_eq!(edit.find_annotation(placeholder_snippet).len(), 2);
-        assert!(edit
-            .annotations
-            .iter()
-            .flat_map(|(_, elements)| elements)
-            .all(|element| element.ancestors().any(|it| &it == edit.new_root())))
+        assert!(
+            edit.annotations
+                .iter()
+                .flat_map(|(_, elements)| elements)
+                .all(|element| element.ancestors().any(|it| &it == edit.new_root()))
+        )
     }
 
     #[test]
@@ -521,7 +515,7 @@ mod tests {
         let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
 
         let mut editor = SyntaxEditor::new(root.syntax().clone());
-        let make = SyntaxFactory::new();
+        let make = SyntaxFactory::with_mappings();
 
         let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
 
@@ -573,7 +567,7 @@ mod tests {
         let inner_block = root.clone();
 
         let mut editor = SyntaxEditor::new(root.syntax().clone());
-        let make = SyntaxFactory::new();
+        let make = SyntaxFactory::with_mappings();
 
         let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
 
@@ -630,20 +624,12 @@ mod tests {
         }
 
         if let Some(tail) = parent_fn.body().unwrap().tail_expr() {
-            // FIXME: We do this because `xtask tidy` will not allow us to have trailing whitespace in the expect string.
-            if let Some(SyntaxElement::Token(token)) = tail.syntax().prev_sibling_or_token() {
-                if let SyntaxKind::WHITESPACE = token.kind() {
-                    editor.delete(token);
-                }
-            }
             editor.delete(tail.syntax().clone());
         }
 
         let edit = editor.finish();
 
-        let expect = expect![[r#"
-fn it() {
-}"#]];
+        let expect = expect![["fn it() {\n    \n}"]];
         expect.assert_eq(&edit.new_root.to_string());
     }
 }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edit_algo.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edit_algo.rs
index fa51fb6eef42a..6a9c88b55d7e1 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edit_algo.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edit_algo.rs
@@ -11,8 +11,8 @@ use rustc_hash::FxHashMap;
 use stdx::format_to;
 
 use crate::{
-    syntax_editor::{mapping::MissingMapping, Change, ChangeKind, PositionRepr},
     SyntaxElement, SyntaxNode, SyntaxNodePtr,
+    syntax_editor::{Change, ChangeKind, PositionRepr, mapping::MissingMapping},
 };
 
 use super::{SyntaxEdit, SyntaxEditor};
@@ -208,18 +208,26 @@ pub(super) fn apply_edits(editor: SyntaxEditor) -> SyntaxEdit {
             }
         };
 
-        let upmap_target_node = |target: &SyntaxNode| {
-            match mappings.upmap_child(target, &input_ancestor, &output_ancestor) {
-                Ok(it) => it,
-                Err(MissingMapping(current)) => unreachable!("no mappings exist between {current:?} (ancestor of {input_ancestor:?}) and {output_ancestor:?}"),
-            }
+        let upmap_target_node = |target: &SyntaxNode| match mappings.upmap_child(
+            target,
+            &input_ancestor,
+            &output_ancestor,
+        ) {
+            Ok(it) => it,
+            Err(MissingMapping(current)) => unreachable!(
+                "no mappings exist between {current:?} (ancestor of {input_ancestor:?}) and {output_ancestor:?}"
+            ),
         };
 
-        let upmap_target = |target: &SyntaxElement| {
-            match mappings.upmap_child_element(target, &input_ancestor, &output_ancestor) {
-                Ok(it) => it,
-                Err(MissingMapping(current)) => unreachable!("no mappings exist between {current:?} (ancestor of {input_ancestor:?}) and {output_ancestor:?}"),
-            }
+        let upmap_target = |target: &SyntaxElement| match mappings.upmap_child_element(
+            target,
+            &input_ancestor,
+            &output_ancestor,
+        ) {
+            Ok(it) => it,
+            Err(MissingMapping(current)) => unreachable!(
+                "no mappings exist between {current:?} (ancestor of {input_ancestor:?}) and {output_ancestor:?}"
+            ),
         };
 
         match &mut changes[child as usize] {
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs
index 450d601615ee9..350cb3e2544f6 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs
@@ -1,12 +1,12 @@
 //! Structural editing for ast using `SyntaxEditor`
 
 use crate::{
+    Direction, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, T,
     ast::{
-        self, edit::IndentLevel, make, syntax_factory::SyntaxFactory, AstNode, Fn, GenericParam,
-        HasGenericParams, HasName,
+        self, AstNode, Fn, GenericParam, HasGenericParams, HasName, edit::IndentLevel, make,
+        syntax_factory::SyntaxFactory,
     },
     syntax_editor::{Position, SyntaxEditor},
-    Direction, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, T,
 };
 
 impl SyntaxEditor {
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs
index f71925a79558a..1eaef03197c5d 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs
@@ -20,10 +20,6 @@ pub struct SyntaxMapping {
 }
 
 impl SyntaxMapping {
-    pub fn new() -> Self {
-        Self::default()
-    }
-
     /// Like [`SyntaxMapping::upmap_child`] but for syntax elements.
     pub fn upmap_child_element(
         &self,
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ted.rs b/src/tools/rust-analyzer/crates/syntax/src/ted.rs
index 8592df1597551..64d5ea084c1b0 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ted.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ted.rs
@@ -7,8 +7,8 @@ use std::{mem, ops::RangeInclusive};
 use parser::T;
 
 use crate::{
-    ast::{self, edit::IndentLevel, make, AstNode},
     SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
+    ast::{self, AstNode, edit::IndentLevel, make},
 };
 
 /// Utility trait to allow calling `ted` functions with references or owned
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests.rs b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
index b50489c6f0f2a..e5beb44f42ec7 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
@@ -10,7 +10,7 @@ use rayon::prelude::*;
 use stdx::format_to_acc;
 use test_utils::{bench, bench_fixture, project_root};
 
-use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
+use crate::{AstNode, SourceFile, SyntaxError, ast, fuzz};
 
 #[test]
 fn parse_smoke_test() {
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation.rs b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
index 71c5f9a946db0..5bfeb3bff87a6 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/validation.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
@@ -4,15 +4,16 @@
 
 mod block;
 
+use itertools::Itertools;
 use rowan::Direction;
-use rustc_literal_escaper::{unescape_mixed, unescape_unicode, EscapeError, Mode};
+use rustc_literal_escaper::{self, EscapeError, Mode, unescape_mixed, unescape_unicode};
 
 use crate::{
-    algo,
-    ast::{self, HasAttrs, HasVisibility, IsString, RangeItem},
-    match_ast, AstNode, SyntaxError,
+    AstNode, SyntaxError,
     SyntaxKind::{CONST, FN, INT_NUMBER, TYPE_ALIAS},
-    SyntaxNode, SyntaxToken, TextSize, T,
+    SyntaxNode, SyntaxToken, T, TextSize, algo,
+    ast::{self, HasAttrs, HasVisibility, IsString, RangeItem},
+    match_ast,
 };
 
 pub(crate) fn validate(root: &SyntaxNode, errors: &mut Vec<SyntaxError>) {
@@ -37,7 +38,8 @@ pub(crate) fn validate(root: &SyntaxNode, errors: &mut Vec<SyntaxError>) {
                 ast::FnPtrType(it) => validate_trait_object_fn_ptr_ret_ty(it, errors),
                 ast::MacroRules(it) => validate_macro_rules(it, errors),
                 ast::LetExpr(it) => validate_let_expr(it, errors),
-                ast::ImplTraitType(it) => validate_impl_object_ty(it, errors),
+                ast::DynTraitType(it) => errors.extend(validate_trait_object_ty(it)),
+                ast::ImplTraitType(it) => errors.extend(validate_impl_object_ty(it)),
                 _ => (),
             }
         }
@@ -316,58 +318,104 @@ fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec<SyntaxErro
 }
 
 fn validate_trait_object_ref_ty(ty: ast::RefType, errors: &mut Vec<SyntaxError>) {
-    if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
-        if let Some(err) = validate_trait_object_ty(ty) {
-            errors.push(err);
+    match ty.ty() {
+        Some(ast::Type::DynTraitType(ty)) => {
+            if let Some(err) = validate_trait_object_ty_plus(ty) {
+                errors.push(err);
+            }
+        }
+        Some(ast::Type::ImplTraitType(ty)) => {
+            if let Some(err) = validate_impl_object_ty_plus(ty) {
+                errors.push(err);
+            }
         }
+        _ => (),
     }
 }
 
 fn validate_trait_object_ptr_ty(ty: ast::PtrType, errors: &mut Vec<SyntaxError>) {
-    if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
-        if let Some(err) = validate_trait_object_ty(ty) {
-            errors.push(err);
+    match ty.ty() {
+        Some(ast::Type::DynTraitType(ty)) => {
+            if let Some(err) = validate_trait_object_ty_plus(ty) {
+                errors.push(err);
+            }
+        }
+        Some(ast::Type::ImplTraitType(ty)) => {
+            if let Some(err) = validate_impl_object_ty_plus(ty) {
+                errors.push(err);
+            }
         }
+        _ => (),
     }
 }
 
 fn validate_trait_object_fn_ptr_ret_ty(ty: ast::FnPtrType, errors: &mut Vec<SyntaxError>) {
-    if let Some(ast::Type::DynTraitType(ty)) = ty.ret_type().and_then(|ty| ty.ty()) {
-        if let Some(err) = validate_trait_object_ty(ty) {
-            errors.push(err);
+    match ty.ret_type().and_then(|ty| ty.ty()) {
+        Some(ast::Type::DynTraitType(ty)) => {
+            if let Some(err) = validate_trait_object_ty_plus(ty) {
+                errors.push(err);
+            }
+        }
+        Some(ast::Type::ImplTraitType(ty)) => {
+            if let Some(err) = validate_impl_object_ty_plus(ty) {
+                errors.push(err);
+            }
         }
+        _ => (),
     }
 }
 
 fn validate_trait_object_ty(ty: ast::DynTraitType) -> Option<SyntaxError> {
     let tbl = ty.type_bound_list()?;
-    let bounds_count = tbl.bounds().count();
+    let no_bounds = tbl.bounds().filter_map(|it| it.ty()).next().is_none();
 
-    match bounds_count {
-        0 => Some(SyntaxError::new(
+    match no_bounds {
+        true => Some(SyntaxError::new(
             "At least one trait is required for an object type",
             ty.syntax().text_range(),
         )),
-        _ if bounds_count > 1 => {
-            let dyn_token = ty.dyn_token()?;
-            let preceding_token =
-                algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
-
-            if !matches!(preceding_token.kind(), T!['('] | T![<] | T![=]) {
-                return Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()));
-            }
-            None
-        }
-        _ => None,
+        false => None,
     }
 }
 
-fn validate_impl_object_ty(ty: ast::ImplTraitType, errors: &mut Vec<SyntaxError>) {
-    if ty.type_bound_list().map_or(0, |tbl| tbl.bounds().count()) == 0 {
-        errors.push(SyntaxError::new(
-            "At least one trait must be specified",
+fn validate_impl_object_ty(ty: ast::ImplTraitType) -> Option<SyntaxError> {
+    let tbl = ty.type_bound_list()?;
+    let no_bounds = tbl.bounds().filter_map(|it| it.ty()).next().is_none();
+
+    match no_bounds {
+        true => Some(SyntaxError::new(
+            "At least one trait is required for an object type",
             ty.syntax().text_range(),
-        ));
+        )),
+        false => None,
+    }
+}
+
+// FIXME: This is not a validation error, this is a context dependent parse error
+fn validate_trait_object_ty_plus(ty: ast::DynTraitType) -> Option<SyntaxError> {
+    let dyn_token = ty.dyn_token()?;
+    let preceding_token = algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
+    let tbl = ty.type_bound_list()?;
+    let more_than_one_bound = tbl.bounds().next_tuple::<(_, _)>().is_some();
+
+    if more_than_one_bound && !matches!(preceding_token.kind(), T!['('] | T![<] | T![=]) {
+        Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()))
+    } else {
+        None
+    }
+}
+
+// FIXME: This is not a validation error, this is a context dependent parse error
+fn validate_impl_object_ty_plus(ty: ast::ImplTraitType) -> Option<SyntaxError> {
+    let dyn_token = ty.impl_token()?;
+    let preceding_token = algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
+    let tbl = ty.type_bound_list()?;
+    let more_than_one_bound = tbl.bounds().next_tuple::<(_, _)>().is_some();
+
+    if more_than_one_bound && !matches!(preceding_token.kind(), T!['('] | T![<] | T![=]) {
+        Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()))
+    } else {
+        None
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs b/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs
index fe3d61bef169d..1f815f6eac738 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs
@@ -1,9 +1,9 @@
 //! Logic for validating block expressions i.e. `ast::BlockExpr`.
 
 use crate::{
-    ast::{self, AstNode, HasAttrs},
     SyntaxError,
     SyntaxKind::*,
+    ast::{self, AstNode, HasAttrs},
 };
 
 pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<SyntaxError>) {
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rast
new file mode 100644
index 0000000000000..bd71c61cadaea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE@0..28
+  FN@0..28
+    FN_KW@0..2 "fn"
+    WHITESPACE@2..3 " "
+    NAME@3..4
+      IDENT@3..4 "f"
+    PARAM_LIST@4..25
+      L_PAREN@4..5 "("
+      PARAM@5..24
+        WILDCARD_PAT@5..6
+          UNDERSCORE@5..6 "_"
+        COLON@6..7 ":"
+        WHITESPACE@7..8 " "
+        REF_TYPE@8..24
+          AMP@8..9 "&"
+          IMPL_TRAIT_TYPE@9..24
+            IMPL_KW@9..13 "impl"
+            WHITESPACE@13..14 " "
+            TYPE_BOUND_LIST@14..24
+              TYPE_BOUND@14..16
+                LIFETIME@14..16
+                  LIFETIME_IDENT@14..16 "'a"
+              WHITESPACE@16..17 " "
+              PLUS@17..18 "+"
+              WHITESPACE@18..19 " "
+              TYPE_BOUND@19..24
+                PATH_TYPE@19..24
+                  PATH@19..24
+                    PATH_SEGMENT@19..24
+                      NAME_REF@19..24
+                        IDENT@19..24 "Sized"
+      R_PAREN@24..25 ")"
+    WHITESPACE@25..26 " "
+    BLOCK_EXPR@26..28
+      STMT_LIST@26..28
+        L_CURLY@26..27 "{"
+        R_CURLY@27..28 "}"
+error 9..24: ambiguous `+` in a type
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rs
new file mode 100644
index 0000000000000..670a6f0ea7c4d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rs
@@ -0,0 +1 @@
+fn f(_: &impl 'a + Sized) {}
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl.rast
index 2db07ae12a98e..c337ee8bbf4cf 100644
--- a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl.rast
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl.rast
@@ -20,4 +20,4 @@ SOURCE_FILE@0..16
       STMT_LIST@14..16
         L_CURLY@14..15 "{"
         R_CURLY@15..16 "}"
-error 8..12: At least one trait must be specified
+error 8..12: At least one trait is required for an object type
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl_reference.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl_reference.rast
index dbe6535ac665b..cb73cb4e058b8 100644
--- a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl_reference.rast
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl_reference.rast
@@ -22,4 +22,4 @@ SOURCE_FILE@0..17
       STMT_LIST@15..17
         L_CURLY@15..16 "{"
         R_CURLY@16..17 "}"
-error 9..13: At least one trait must be specified
+error 9..13: At least one trait is required for an object type
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rast b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rast
new file mode 100644
index 0000000000000..ed938d63db449
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE@0..20
+  FN@0..20
+    FN_KW@0..2 "fn"
+    WHITESPACE@2..3 " "
+    NAME@3..4
+      IDENT@3..4 "f"
+    PARAM_LIST@4..17
+      L_PAREN@4..5 "("
+      PARAM@5..16
+        WILDCARD_PAT@5..6
+          UNDERSCORE@5..6 "_"
+        COLON@6..7 ":"
+        WHITESPACE@7..8 " "
+        REF_TYPE@8..16
+          AMP@8..9 "&"
+          IMPL_TRAIT_TYPE@9..16
+            IMPL_KW@9..13 "impl"
+            WHITESPACE@13..14 " "
+            TYPE_BOUND_LIST@14..16
+              TYPE_BOUND@14..16
+                LIFETIME@14..16
+                  LIFETIME_IDENT@14..16 "'a"
+      R_PAREN@16..17 ")"
+    WHITESPACE@17..18 " "
+    BLOCK_EXPR@18..20
+      STMT_LIST@18..20
+        L_CURLY@18..19 "{"
+        R_CURLY@19..20 "}"
+error 9..16: At least one trait is required for an object type
diff --git a/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rs b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rs
new file mode 100644
index 0000000000000..5b18a54ef5a07
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rs
@@ -0,0 +1 @@
+fn f(_: &impl 'a) {}
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml b/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml
index 95f4cb9d67e25..353d4c312dba8 100644
--- a/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml
@@ -18,6 +18,8 @@ rustc-hash.workspace = true
 span.workspace = true
 stdx.workspace = true
 intern.workspace = true
+triomphe.workspace = true
+paths.workspace = true
 
 [lints]
 workspace = true
diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
index 37dfb87721c83..2f379d419e8ff 100644
--- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
@@ -1,12 +1,14 @@
 //! A set of high-level utility fixture methods to use in tests.
-use std::{iter, mem, str::FromStr, sync};
+use std::{mem, str::FromStr, sync};
 
 use base_db::{
-    CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, CrateWorkspaceData, Dependency,
-    Env, FileChange, FileSet, LangCrateOrigin, SourceRoot, SourceRootDatabase, Version, VfsPath,
+    Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
+    DependencyBuilder, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabase, SourceRoot,
+    Version, VfsPath, salsa,
 };
 use cfg::CfgOptions;
 use hir_expand::{
+    EditionedFileId, FileRange,
     change::ChangeWithProcMacros,
     db::ExpandDatabase,
     files::FilePosition,
@@ -15,26 +17,27 @@ use hir_expand::{
     },
     quote,
     tt::{Leaf, TokenTree, TopSubtree, TopSubtreeBuilder, TtElement, TtIter},
-    FileRange,
 };
-use intern::{sym, Symbol};
+use intern::{Symbol, sym};
+use paths::AbsPathBuf;
 use rustc_hash::FxHashMap;
-use span::{Edition, EditionedFileId, FileId, Span};
+use span::{Edition, FileId, Span};
 use stdx::itertools::Itertools;
 use test_utils::{
-    extract_range_or_offset, Fixture, FixtureWithProjectMeta, RangeOrOffset, CURSOR_MARKER,
-    ESCAPED_CURSOR_MARKER,
+    CURSOR_MARKER, ESCAPED_CURSOR_MARKER, Fixture, FixtureWithProjectMeta, RangeOrOffset,
+    extract_range_or_offset,
 };
+use triomphe::Arc;
 
 pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0);
 
-pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
+pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
     #[track_caller]
     fn with_single_file(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId) {
-        let fixture = ChangeFixture::parse(ra_fixture);
         let mut db = Self::default();
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
         assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
         (db, fixture.files[0])
@@ -44,8 +47,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
     fn with_many_files(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, Vec<EditionedFileId>) {
-        let fixture = ChangeFixture::parse(ra_fixture);
         let mut db = Self::default();
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         (db, fixture.files)
@@ -53,8 +56,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
 
     #[track_caller]
     fn with_files(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Self {
-        let fixture = ChangeFixture::parse(ra_fixture);
         let mut db = Self::default();
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -65,8 +68,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
         proc_macros: Vec<(String, ProcMacro)>,
     ) -> Self {
-        let fixture = ChangeFixture::parse_with_proc_macros(ra_fixture, proc_macros);
         let mut db = Self::default();
+        let fixture = ChangeFixture::parse_with_proc_macros(&db, ra_fixture, proc_macros);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -90,8 +93,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
     fn with_range_or_offset(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId, RangeOrOffset) {
-        let fixture = ChangeFixture::parse(ra_fixture);
         let mut db = Self::default();
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
 
         let (file_id, range_or_offset) = fixture
@@ -100,18 +103,12 @@ pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
         (db, file_id, range_or_offset)
     }
 
-    fn test_crate(&self) -> CrateId {
-        let crate_graph = self.crate_graph();
-        let mut it = crate_graph.iter();
-        let mut res = it.next().unwrap();
-        while crate_graph[res].origin.is_lang() {
-            res = it.next().unwrap();
-        }
-        res
+    fn test_crate(&self) -> Crate {
+        self.all_crates().iter().copied().find(|&krate| !krate.data(self).origin.is_lang()).unwrap()
     }
 }
 
-impl<DB: ExpandDatabase + SourceRootDatabase + Default + 'static> WithFixture for DB {}
+impl<DB: ExpandDatabase + SourceDatabase + Default + 'static> WithFixture for DB {}
 
 pub struct ChangeFixture {
     pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
@@ -122,11 +119,15 @@ pub struct ChangeFixture {
 const SOURCE_ROOT_PREFIX: &str = "/";
 
 impl ChangeFixture {
-    pub fn parse(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> ChangeFixture {
-        Self::parse_with_proc_macros(ra_fixture, Vec::new())
+    pub fn parse(
+        db: &dyn salsa::Database,
+        #[rust_analyzer::rust_fixture] ra_fixture: &str,
+    ) -> ChangeFixture {
+        Self::parse_with_proc_macros(db, ra_fixture, Vec::new())
     }
 
     pub fn parse_with_proc_macros(
+        db: &dyn salsa::Database,
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
         mut proc_macro_defs: Vec<(String, ProcMacro)>,
     ) -> ChangeFixture {
@@ -142,13 +143,14 @@ impl ChangeFixture {
             let channel = toolchain.as_deref().unwrap_or("stable");
             Version::parse(&format!("1.76.0-{channel}")).unwrap()
         });
-        let mut source_change = FileChange::new();
+        let mut source_change = FileChange::default();
 
         let mut files = Vec::new();
-        let mut crate_graph = CrateGraph::default();
+        let mut crate_graph = CrateGraphBuilder::default();
         let mut crates = FxHashMap::default();
         let mut crate_deps = Vec::new();
         let mut default_crate_root: Option<FileId> = None;
+        let mut default_edition = Edition::CURRENT;
         let mut default_cfg = CfgOptions::default();
         let mut default_env = Env::from_iter([(
             String::from("__ra_is_test_fixture"),
@@ -162,6 +164,12 @@ impl ChangeFixture {
 
         let mut file_position = None;
 
+        let crate_ws_data =
+            Arc::new(CrateWorkspaceData { data_layout: target_data_layout, toolchain });
+
+        // FIXME: This is less than ideal
+        let proc_macro_cwd = Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()));
+
         for entry in fixture {
             let mut range_or_offset = None;
             let text = if entry.text.contains(CURSOR_MARKER) {
@@ -180,7 +188,7 @@ impl ChangeFixture {
             let meta = FileMeta::from_fixture(entry, current_source_root_kind);
             if let Some(range_or_offset) = range_or_offset {
                 file_position =
-                    Some((EditionedFileId::new(file_id, meta.edition), range_or_offset));
+                    Some((EditionedFileId::new(db, file_id, meta.edition), range_or_offset));
             }
 
             assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
@@ -208,12 +216,13 @@ impl ChangeFixture {
                     meta.edition,
                     Some(crate_name.clone().into()),
                     version,
-                    From::from(meta.cfg.clone()),
-                    Some(From::from(meta.cfg)),
+                    meta.cfg.clone(),
+                    Some(meta.cfg),
                     meta.env,
                     origin,
                     false,
-                    None,
+                    proc_macro_cwd.clone(),
+                    crate_ws_data.clone(),
                 );
                 let prev = crates.insert(crate_name.clone(), crate_id);
                 assert!(prev.is_none(), "multiple crates with same name: {crate_name}");
@@ -228,6 +237,7 @@ impl ChangeFixture {
             } else if meta.path == "/main.rs" || meta.path == "/lib.rs" {
                 assert!(default_crate_root.is_none());
                 default_crate_root = Some(file_id);
+                default_edition = meta.edition;
                 default_cfg.extend(meta.cfg.into_iter());
                 default_env.extend_from_other(&meta.env);
             }
@@ -235,7 +245,7 @@ impl ChangeFixture {
             source_change.change_file(file_id, Some(text));
             let path = VfsPath::new_virtual_path(meta.path);
             file_set.insert(file_id, path);
-            files.push(EditionedFileId::new(file_id, meta.edition));
+            files.push(EditionedFileId::new(db, file_id, meta.edition));
             file_id = FileId::from_raw(file_id.index() + 1);
         }
 
@@ -244,23 +254,27 @@ impl ChangeFixture {
                 .expect("missing default crate root, specify a main.rs or lib.rs");
             crate_graph.add_crate_root(
                 crate_root,
-                Edition::CURRENT,
+                default_edition,
                 Some(CrateName::new("ra_test_fixture").unwrap().into()),
                 None,
-                From::from(default_cfg.clone()),
-                Some(From::from(default_cfg)),
+                default_cfg.clone(),
+                Some(default_cfg),
                 default_env,
                 CrateOrigin::Local { repo: None, name: None },
                 false,
-                None,
+                proc_macro_cwd.clone(),
+                crate_ws_data.clone(),
             );
         } else {
             for (from, to, prelude) in crate_deps {
                 let from_id = crates[&from];
                 let to_id = crates[&to];
-                let sysroot = crate_graph[to_id].origin.is_lang();
+                let sysroot = crate_graph[to_id].basic.origin.is_lang();
                 crate_graph
-                    .add_dep(from_id, Dependency::with_prelude(to.clone(), to_id, prelude, sysroot))
+                    .add_dep(
+                        from_id,
+                        DependencyBuilder::with_prelude(to.clone(), to_id, prelude, sysroot),
+                    )
                     .unwrap();
             }
         }
@@ -275,7 +289,7 @@ impl ChangeFixture {
 
             source_change.change_file(core_file, Some(mini_core.source_code()));
 
-            let all_crates = crate_graph.crates_in_topological_order();
+            let all_crates = crate_graph.iter().collect::<Vec<_>>();
 
             let core_crate = crate_graph.add_crate_root(
                 core_file,
@@ -290,14 +304,15 @@ impl ChangeFixture {
                 )]),
                 CrateOrigin::Lang(LangCrateOrigin::Core),
                 false,
-                None,
+                proc_macro_cwd.clone(),
+                crate_ws_data.clone(),
             );
 
             for krate in all_crates {
                 crate_graph
                     .add_dep(
                         krate,
-                        Dependency::with_prelude(
+                        DependencyBuilder::with_prelude(
                             CrateName::new("core").unwrap(),
                             core_crate,
                             true,
@@ -323,7 +338,7 @@ impl ChangeFixture {
 
             source_change.change_file(proc_lib_file, Some(source));
 
-            let all_crates = crate_graph.crates_in_topological_order();
+            let all_crates = crate_graph.iter().collect::<Vec<_>>();
 
             let proc_macros_crate = crate_graph.add_crate_root(
                 proc_lib_file,
@@ -338,7 +353,8 @@ impl ChangeFixture {
                 )]),
                 CrateOrigin::Local { repo: None, name: None },
                 true,
-                None,
+                proc_macro_cwd.clone(),
+                crate_ws_data,
             );
             proc_macros.insert(proc_macros_crate, Ok(proc_macro));
 
@@ -346,7 +362,10 @@ impl ChangeFixture {
                 crate_graph
                     .add_dep(
                         krate,
-                        Dependency::new(CrateName::new("proc_macros").unwrap(), proc_macros_crate),
+                        DependencyBuilder::new(
+                            CrateName::new("proc_macros").unwrap(),
+                            proc_macros_crate,
+                        ),
                     )
                     .unwrap();
             }
@@ -358,19 +377,9 @@ impl ChangeFixture {
         };
         roots.push(root);
 
-        let mut change =
-            ChangeWithProcMacros { source_change, proc_macros: Some(proc_macros.build()) };
+        let mut change = ChangeWithProcMacros { source_change, proc_macros: Some(proc_macros) };
 
         change.source_change.set_roots(roots);
-        change.source_change.set_ws_data(
-            crate_graph
-                .iter()
-                .zip(iter::repeat(From::from(CrateWorkspaceData {
-                    data_layout: target_data_layout,
-                    toolchain,
-                })))
-                .collect(),
-        );
         change.source_change.set_crate_graph(crate_graph);
 
         ChangeFixture { file_position, files, change }
@@ -649,10 +658,14 @@ impl ProcMacroExpander for IdentityProcMacroExpander {
         _: Span,
         _: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         Ok(subtree.clone())
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
 
 // Expands to a macro_rules! macro, for issue #18089.
@@ -667,7 +680,7 @@ impl ProcMacroExpander for Issue18089ProcMacroExpander {
         _: Span,
         call_site: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         let tt::TokenTree::Leaf(macro_name) = &subtree.0[2] else {
             return Err(ProcMacroExpansionError::Panic("incorrect input".to_owned()));
@@ -684,6 +697,10 @@ impl ProcMacroExpander for Issue18089ProcMacroExpander {
             #subtree
         })
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
 
 // Pastes the attribute input as its output
@@ -698,12 +715,16 @@ impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
         _: Span,
         _: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         attrs
             .cloned()
             .ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
 
 #[derive(Debug)]
@@ -717,7 +738,7 @@ impl ProcMacroExpander for Issue18840ProcMacroExpander {
         def_site: Span,
         _: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         // Input:
         // ```
@@ -735,6 +756,10 @@ impl ProcMacroExpander for Issue18840ProcMacroExpander {
         top_subtree_delimiter_mut.close = def_site;
         Ok(result)
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
 
 #[derive(Debug)]
@@ -748,7 +773,7 @@ impl ProcMacroExpander for MirrorProcMacroExpander {
         _: Span,
         _: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         fn traverse(builder: &mut TopSubtreeBuilder, iter: TtIter<'_>) {
             for tt in iter.collect_vec().into_iter().rev() {
@@ -766,6 +791,10 @@ impl ProcMacroExpander for MirrorProcMacroExpander {
         traverse(&mut builder, input.iter());
         Ok(builder.build())
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
 
 // Replaces every literal with an empty string literal and every identifier with its first letter,
@@ -782,7 +811,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
         _: Span,
         _: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         let mut result = input.0.clone();
         for it in &mut result {
@@ -806,6 +835,10 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
             }
         }
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
 
 // Reads ident type within string quotes, for issue #17479.
@@ -820,7 +853,7 @@ impl ProcMacroExpander for Issue17479ProcMacroExpander {
         _: Span,
         _: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         let TokenTree::Leaf(Leaf::Literal(lit)) = &subtree.0[1] else {
             return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
@@ -831,6 +864,10 @@ impl ProcMacroExpander for Issue17479ProcMacroExpander {
             #symbol()
         })
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
 
 // Reads ident type within string quotes, for issue #17479.
@@ -845,7 +882,7 @@ impl ProcMacroExpander for Issue18898ProcMacroExpander {
         def_site: Span,
         _: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         let span = subtree
             .token_trees()
@@ -882,6 +919,10 @@ impl ProcMacroExpander for Issue18898ProcMacroExpander {
             }
         })
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
 
 // Reads ident type within string quotes, for issue #17479.
@@ -896,7 +937,7 @@ impl ProcMacroExpander for DisallowCfgProcMacroExpander {
         _: Span,
         _: Span,
         _: Span,
-        _: Option<String>,
+        _: String,
     ) -> Result<TopSubtree, ProcMacroExpansionError> {
         for tt in subtree.token_trees().flat_tokens() {
             if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt {
@@ -909,4 +950,8 @@ impl ProcMacroExpander for DisallowCfgProcMacroExpander {
         }
         Ok(subtree.clone())
     }
+
+    fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+        other.as_any().type_id() == std::any::TypeId::of::<Self>()
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
index b99a2c4bd75c5..c27e850ce7feb 100644
--- a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
@@ -13,9 +13,8 @@ rust-version.workspace = true
 
 [dependencies]
 # Avoid adding deps here, this crate is widely used in tests it should compile fast!
-dissimilar = "1.0.7"
+dissimilar = "1.0.10"
 text-size.workspace = true
-tracing.workspace = true
 rustc-hash.workspace = true
 
 paths.workspace = true
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
index bf53e58d70cef..4bdd791eb1671 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -70,6 +70,7 @@
 //!     unimplemented: panic
 //!     column:
 //!     addr_of:
+//!     offset_of:
 
 #![rustc_coherence_is_core]
 
@@ -414,6 +415,13 @@ pub mod mem {
     use crate::marker::DiscriminantKind;
     pub struct Discriminant<T>(<T as DiscriminantKind>::Discriminant);
     // endregion:discriminant
+
+    // region:offset_of
+    pub macro offset_of($Container:ty, $($fields:expr)+ $(,)?) {
+        // The `{}` is for better error messages
+        {builtin # offset_of($Container, $($fields)+)}
+    }
+    // endregion:offset_of
 }
 
 pub mod ptr {
@@ -423,10 +431,12 @@ pub mod ptr {
         unsafe { drop_in_place(to_drop) }
     }
     pub const unsafe fn read<T>(src: *const T) -> T {
-        *src
+        unsafe { *src }
     }
     pub const unsafe fn write<T>(dst: *mut T, src: T) {
-        *dst = src;
+        unsafe {
+            *dst = src;
+        }
     }
     // endregion:drop
 
@@ -1061,7 +1071,7 @@ pub mod cmp {
 // region:fmt
 pub mod fmt {
     pub struct Error;
-    pub type Result = Result<(), Error>;
+    pub type Result = crate::result::Result<(), Error>;
     pub struct Formatter<'a>;
     pub struct DebugTuple;
     pub struct DebugStruct;
@@ -1480,9 +1490,9 @@ pub mod iter {
                 }
             }
         }
-        pub use self::repeat::{repeat, Repeat};
+        pub use self::repeat::{Repeat, repeat};
     }
-    pub use self::sources::{repeat, Repeat};
+    pub use self::sources::{Repeat, repeat};
     // endregion:iterators
 
     mod traits {
@@ -1811,11 +1821,7 @@ pub mod num {
 #[lang = "bool"]
 impl bool {
     pub fn then<T, F: FnOnce() -> T>(self, f: F) -> Option<T> {
-        if self {
-            Some(f())
-        } else {
-            None
-        }
+        if self { Some(f()) } else { None }
     }
 }
 // endregion:bool_impl
diff --git a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
index 38daacdf951a7..315a3a2890f19 100644
--- a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
 [lib]
 
 [dependencies]
-home = "0.5.4"
+home = "0.5.11"
 camino.workspace = true
 
 [lints]
diff --git a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
index 325b94cc33bae..8b7bf1a806415 100644
--- a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
@@ -27,14 +27,14 @@ impl Tool {
     ///
     /// The current implementation checks three places for an executable to use:
     /// 1) `$CARGO_HOME/bin/<executable_name>`
-    ///      where $CARGO_HOME defaults to ~/.cargo (see <https://doc.rust-lang.org/cargo/guide/cargo-home.html>)
-    ///      example: for cargo, this tries $CARGO_HOME/bin/cargo, or ~/.cargo/bin/cargo if $CARGO_HOME is unset.
-    ///      It seems that this is a reasonable place to try for cargo, rustc, and rustup
+    ///    where $CARGO_HOME defaults to ~/.cargo (see <https://doc.rust-lang.org/cargo/guide/cargo-home.html>)
+    ///    example: for cargo, this tries $CARGO_HOME/bin/cargo, or ~/.cargo/bin/cargo if $CARGO_HOME is unset.
+    ///    It seems that this is a reasonable place to try for cargo, rustc, and rustup
     /// 2) Appropriate environment variable (erroring if this is set but not a usable executable)
-    ///      example: for cargo, this checks $CARGO environment variable; for rustc, $RUSTC; etc
+    ///    example: for cargo, this checks $CARGO environment variable; for rustc, $RUSTC; etc
     /// 3) $PATH/`<executable_name>`
-    ///      example: for cargo, this tries all paths in $PATH with appended `cargo`, returning the
-    ///      first that exists
+    ///    example: for cargo, this tries all paths in $PATH with appended `cargo`, returning the
+    ///    first that exists
     /// 4) If all else fails, we just try to use the executable name directly
     pub fn prefer_proxy(self) -> Utf8PathBuf {
         invoke(&[cargo_proxy, lookup_as_env_var, lookup_in_path], self.name())
@@ -44,14 +44,14 @@ impl Tool {
     ///
     /// The current implementation checks three places for an executable to use:
     /// 1) Appropriate environment variable (erroring if this is set but not a usable executable)
-    ///      example: for cargo, this checks $CARGO environment variable; for rustc, $RUSTC; etc
+    ///    example: for cargo, this checks $CARGO environment variable; for rustc, $RUSTC; etc
     /// 2) $PATH/`<executable_name>`
-    ///      example: for cargo, this tries all paths in $PATH with appended `cargo`, returning the
-    ///      first that exists
+    ///    example: for cargo, this tries all paths in $PATH with appended `cargo`, returning the
+    ///    first that exists
     /// 3) `$CARGO_HOME/bin/<executable_name>`
-    ///      where $CARGO_HOME defaults to ~/.cargo (see <https://doc.rust-lang.org/cargo/guide/cargo-home.html>)
-    ///      example: for cargo, this tries $CARGO_HOME/bin/cargo, or ~/.cargo/bin/cargo if $CARGO_HOME is unset.
-    ///      It seems that this is a reasonable place to try for cargo, rustc, and rustup
+    ///    where $CARGO_HOME defaults to ~/.cargo (see <https://doc.rust-lang.org/cargo/guide/cargo-home.html>)
+    ///    example: for cargo, this tries $CARGO_HOME/bin/cargo, or ~/.cargo/bin/cargo if $CARGO_HOME is unset.
+    ///    It seems that this is a reasonable place to try for cargo, rustc, and rustup
     /// 4) If all else fails, we just try to use the executable name directly
     pub fn path(self) -> Utf8PathBuf {
         invoke(&[lookup_as_env_var, lookup_in_path, cargo_proxy], self.name())
@@ -71,11 +71,22 @@ impl Tool {
     }
 }
 
-pub fn command(cmd: impl AsRef<OsStr>, working_directory: impl AsRef<Path>) -> Command {
+#[allow(clippy::disallowed_types)] /* generic parameter allows for FxHashMap */
+pub fn command<H>(
+    cmd: impl AsRef<OsStr>,
+    working_directory: impl AsRef<Path>,
+    extra_env: &std::collections::HashMap<String, Option<String>, H>,
+) -> Command {
     // we are `toolchain::command``
     #[allow(clippy::disallowed_methods)]
     let mut cmd = Command::new(cmd);
     cmd.current_dir(working_directory);
+    for env in extra_env {
+        match env {
+            (key, Some(val)) => cmd.env(key, val),
+            (key, None) => cmd.env_remove(key),
+        };
+    }
     cmd
 }
 
diff --git a/src/tools/rust-analyzer/crates/tt/src/iter.rs b/src/tools/rust-analyzer/crates/tt/src/iter.rs
index 1d88218810de6..0418c00174bdb 100644
--- a/src/tools/rust-analyzer/crates/tt/src/iter.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/iter.rs
@@ -6,7 +6,7 @@ use std::fmt;
 use arrayvec::ArrayVec;
 use intern::sym;
 
-use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree, TokenTreesView};
+use crate::{Ident, Leaf, MAX_GLUED_PUNCT_LEN, Punct, Spacing, Subtree, TokenTree, TokenTreesView};
 
 #[derive(Clone)]
 pub struct TtIter<'a, S> {
@@ -111,7 +111,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
     ///
     /// This method currently may return a single quotation, which is part of lifetime ident and
     /// conceptually not a punct in the context of mbe. Callers should handle this.
-    pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, 3>, ()> {
+    pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, MAX_GLUED_PUNCT_LEN>, ()> {
         let TtElement::Leaf(&Leaf::Punct(first)) = self.next().ok_or(())? else {
             return Err(());
         };
@@ -145,7 +145,6 @@ impl<'a, S: Copy> TtIter<'a, S> {
             }
             ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
             | ('-' | '=' | '>', '>', _)
-            | (_, _, Some(';'))
             | ('<', '-', _)
             | (':', ':', _)
             | ('.', '.', _)
diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs
index 1cfead54f192d..36ccb67f3b8df 100644
--- a/src/tools/rust-analyzer/crates/tt/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs
@@ -22,6 +22,8 @@ use stdx::{impl_from, itertools::Itertools as _};
 
 pub use text_size::{TextRange, TextSize};
 
+pub const MAX_GLUED_PUNCT_LEN: usize = 3;
+
 #[derive(Clone, PartialEq, Debug)]
 pub struct Lit {
     pub kind: LitKind,
@@ -243,6 +245,23 @@ impl<S: Copy> TopSubtreeBuilder<S> {
         self.token_trees.extend(tt.0.iter().cloned());
     }
 
+    /// Like [`Self::extend_with_tt()`], but makes sure the new tokens will never be
+    /// joint with whatever comes after them.
+    pub fn extend_with_tt_alone(&mut self, tt: TokenTreesView<'_, S>) {
+        if let Some((last, before_last)) = tt.0.split_last() {
+            self.token_trees.reserve(tt.0.len());
+            self.token_trees.extend(before_last.iter().cloned());
+            let last = if let TokenTree::Leaf(Leaf::Punct(last)) = last {
+                let mut last = *last;
+                last.spacing = Spacing::Alone;
+                TokenTree::Leaf(Leaf::Punct(last))
+            } else {
+                last.clone()
+            };
+            self.token_trees.push(last);
+        }
+    }
+
     pub fn expected_delimiters(&self) -> impl Iterator<Item = &Delimiter<S>> {
         self.unclosed_subtree_indices.iter().rev().map(|&subtree_idx| {
             let TokenTree::Subtree(subtree) = &self.token_trees[subtree_idx] else {
@@ -363,7 +382,8 @@ impl<'a, S: Copy> TokenTreesView<'a, S> {
     ) -> impl Iterator<Item = TokenTreesView<'a, S>> {
         let mut subtree_iter = self.iter();
         let mut need_to_yield_even_if_empty = true;
-        let result = std::iter::from_fn(move || {
+
+        std::iter::from_fn(move || {
             if subtree_iter.is_empty() && !need_to_yield_even_if_empty {
                 return None;
             };
@@ -379,8 +399,7 @@ impl<'a, S: Copy> TokenTreesView<'a, S> {
                 result = subtree_iter.from_savepoint(savepoint);
             }
             Some(result)
-        });
-        result
+        })
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
index 48b4d22de2f0b..9b32ee17abcf3 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
@@ -13,7 +13,7 @@ rust-version.workspace = true
 
 [dependencies]
 tracing.workspace = true
-walkdir = "2.3.2"
+walkdir = "2.5.0"
 crossbeam-channel.workspace = true
 notify = "8.0.0"
 rayon = "1.10.0"
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
index 320033417640a..e918fd0887db9 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
@@ -13,7 +13,7 @@ use std::{
     sync::atomic::AtomicUsize,
 };
 
-use crossbeam_channel::{select, unbounded, Receiver, Sender};
+use crossbeam_channel::{Receiver, Sender, select, unbounded};
 use notify::{Config, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
 use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
 use rayon::iter::{IndexedParallelIterator as _, IntoParallelIterator as _, ParallelIterator};
diff --git a/src/tools/rust-analyzer/crates/vfs/src/file_set.rs b/src/tools/rust-analyzer/crates/vfs/src/file_set.rs
index d7d283c3eb85e..1228e2e1774db 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/file_set.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/file_set.rs
@@ -211,11 +211,7 @@ impl fst::Automaton for PrefixOf<'_> {
         state != !0
     }
     fn accept(&self, &state: &usize, byte: u8) -> usize {
-        if self.prefix_of.get(state) == Some(&byte) {
-            state + 1
-        } else {
-            !0
-        }
+        if self.prefix_of.get(state) == Some(&byte) { state + 1 } else { !0 }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
index 3feca512e55a5..50e388d780022 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
@@ -52,12 +52,12 @@ pub use crate::{
     anchored_path::{AnchoredPath, AnchoredPathBuf},
     vfs_path::VfsPath,
 };
-use indexmap::{map::Entry, IndexMap};
+use indexmap::{IndexMap, map::Entry};
 pub use paths::{AbsPath, AbsPathBuf};
 
 use rustc_hash::FxHasher;
 use stdx::hash_once;
-use tracing::{span, Level};
+use tracing::{Level, span};
 
 /// Handle to a file in [`Vfs`]
 ///
diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
index cce8595cc4a95..c35b7f229f64e 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
@@ -97,11 +97,7 @@ impl VfsPath {
     /// Returns [`None`] if the path is a root or prefix.
     pub fn parent(&self) -> Option<VfsPath> {
         let mut parent = self.clone();
-        if parent.pop() {
-            Some(parent)
-        } else {
-            None
-        }
+        if parent.pop() { Some(parent) } else { None }
     }
 
     /// Returns `self`'s base name and file extension.
diff --git a/src/tools/rust-analyzer/docs/book/README.md b/src/tools/rust-analyzer/docs/book/README.md
index 043524b2341b7..464ea02512f74 100644
--- a/src/tools/rust-analyzer/docs/book/README.md
+++ b/src/tools/rust-analyzer/docs/book/README.md
@@ -8,6 +8,7 @@ To run the documentation site locally:
 
 ```shell
 cargo install mdbook
+cargo xtask codegen
 cd docs/book
 mdbook serve
 # make changes to documentation files in doc/book/src
diff --git a/src/tools/rust-analyzer/docs/book/book.toml b/src/tools/rust-analyzer/docs/book/book.toml
index a6f6a6ed784db..edf11fadf0831 100644
--- a/src/tools/rust-analyzer/docs/book/book.toml
+++ b/src/tools/rust-analyzer/docs/book/book.toml
@@ -6,7 +6,7 @@ src = "src"
 title = "rust-analyzer"
 
 [rust]
-edition = "2021"
+edition = "2024"
 
 [output.html]
 edit-url-template = "https://github.com/rust-lang/rust-analyzer/edit/master/docs/book/{path}"
diff --git a/src/tools/rust-analyzer/docs/book/src/assists_generated.md b/src/tools/rust-analyzer/docs/book/src/assists_generated.md
deleted file mode 100644
index 9a80185179279..0000000000000
--- a/src/tools/rust-analyzer/docs/book/src/assists_generated.md
+++ /dev/null
@@ -1,3890 +0,0 @@
-//! Generated by `cargo xtask codegen assists-doc-tests`, do not edit by hand.
-
-### `add_braces`
-**Source:**  [add_braces.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_braces.rs#L8) 
-
-Adds braces to lambda and match arm expressions.
-
-#### Before
-```rust
-fn foo(n: i32) -> i32 {
-    match n {
-        1 =>┃ n + 1,
-        _ => 0
-    }
-}
-```
-
-#### After
-```rust
-fn foo(n: i32) -> i32 {
-    match n {
-        1 => {
-            n + 1
-        },
-        _ => 0
-    }
-}
-```
-
-
-### `add_explicit_enum_discriminant`
-**Source:**  [add_explicit_enum_discriminant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs#L11) 
-
-Adds explicit discriminant to all enum variants.
-
-#### Before
-```rust
-enum TheEnum┃ {
-    Foo,
-    Bar,
-    Baz = 42,
-    Quux,
-}
-```
-
-#### After
-```rust
-enum TheEnum {
-    Foo = 0,
-    Bar = 1,
-    Baz = 42,
-    Quux = 43,
-}
-```
-
-
-### `add_explicit_type`
-**Source:**  [add_explicit_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_explicit_type.rs#L7) 
-
-Specify type for a let binding.
-
-#### Before
-```rust
-fn main() {
-    let x┃ = 92;
-}
-```
-
-#### After
-```rust
-fn main() {
-    let x: i32 = 92;
-}
-```
-
-
-### `add_hash`
-**Source:**  [raw_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/raw_string.rs#L89) 
-
-Adds a hash to a raw string literal.
-
-#### Before
-```rust
-fn main() {
-    r#"Hello,┃ World!"#;
-}
-```
-
-#### After
-```rust
-fn main() {
-    r##"Hello, World!"##;
-}
-```
-
-
-### `add_impl_default_members`
-**Source:**  [add_missing_impl_members.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_missing_impl_members.rs#L58) 
-
-Adds scaffold for overriding default impl members.
-
-#### Before
-```rust
-trait Trait {
-    type X;
-    fn foo(&self);
-    fn bar(&self) {}
-}
-
-impl Trait for () {
-    type X = ();
-    fn foo(&self) {}┃
-}
-```
-
-#### After
-```rust
-trait Trait {
-    type X;
-    fn foo(&self);
-    fn bar(&self) {}
-}
-
-impl Trait for () {
-    type X = ();
-    fn foo(&self) {}
-
-    ┃fn bar(&self) {}
-}
-```
-
-
-### `add_impl_missing_members`
-**Source:**  [add_missing_impl_members.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_missing_impl_members.rs#L16) 
-
-Adds scaffold for required impl members.
-
-#### Before
-```rust
-trait Trait<T> {
-    type X;
-    fn foo(&self) -> T;
-    fn bar(&self) {}
-}
-
-impl Trait<u32> for () {┃
-
-}
-```
-
-#### After
-```rust
-trait Trait<T> {
-    type X;
-    fn foo(&self) -> T;
-    fn bar(&self) {}
-}
-
-impl Trait<u32> for () {
-    ┃type X;
-
-    fn foo(&self) -> u32 {
-        todo!()
-    }
-}
-```
-
-
-### `add_label_to_loop`
-**Source:**  [add_label_to_loop.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_label_to_loop.rs#L9) 
-
-Adds a label to a loop.
-
-#### Before
-```rust
-fn main() {
-    loop┃ {
-        break;
-        continue;
-    }
-}
-```
-
-#### After
-```rust
-fn main() {
-    'l: loop {
-        break 'l;
-        continue 'l;
-    }
-}
-```
-
-
-### `add_lifetime_to_type`
-**Source:**  [add_lifetime_to_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_lifetime_to_type.rs#L5) 
-
-Adds a new lifetime to a struct, enum or union.
-
-#### Before
-```rust
-struct Point {
-    x: &┃u32,
-    y: u32,
-}
-```
-
-#### After
-```rust
-struct Point<'a> {
-    x: &'a u32,
-    y: u32,
-}
-```
-
-
-### `add_missing_match_arms`
-**Source:**  [add_missing_match_arms.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_missing_match_arms.rs#L16) 
-
-Adds missing clauses to a `match` expression.
-
-#### Before
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        ┃
-    }
-}
-```
-
-#### After
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        Action::Move { distance } => ${1:todo!()},
-        Action::Stop => ${2:todo!()},┃
-    }
-}
-```
-
-
-### `add_return_type`
-**Source:**  [add_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_return_type.rs#L6) 
-
-Adds the return type to a function or closure inferred from its tail expression if it doesn't have a return
-type specified. This assists is useable in a functions or closures tail expression or return type position.
-
-#### Before
-```rust
-fn foo() { 4┃2i32 }
-```
-
-#### After
-```rust
-fn foo() -> i32 { 42i32 }
-```
-
-
-### `add_turbo_fish`
-**Source:**  [add_turbo_fish.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/add_turbo_fish.rs#L14) 
-
-Adds `::<_>` to a call of a generic method or function.
-
-#### Before
-```rust
-fn make<T>() -> T { todo!() }
-fn main() {
-    let x = make┃();
-}
-```
-
-#### After
-```rust
-fn make<T>() -> T { todo!() }
-fn main() {
-    let x = make::<${0:_}>();
-}
-```
-
-
-### `apply_demorgan`
-**Source:**  [apply_demorgan.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/apply_demorgan.rs#L23) 
-
-Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws).
-This transforms expressions of the form `!l || !r` into `!(l && r)`.
-This also works with `&&`. This assist can only be applied with the cursor
-on either `||` or `&&`.
-
-#### Before
-```rust
-fn main() {
-    if x != 4 ||┃ y < 3.14 {}
-}
-```
-
-#### After
-```rust
-fn main() {
-    if !(x == 4 && y >= 3.14) {}
-}
-```
-
-
-### `apply_demorgan_iterator`
-**Source:**  [apply_demorgan.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/apply_demorgan.rs#L156) 
-
-Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws) to
-`Iterator::all` and `Iterator::any`.
-
-This transforms expressions of the form `!iter.any(|x| predicate(x))` into
-`iter.all(|x| !predicate(x))` and vice versa. This also works the other way for
-`Iterator::all` into `Iterator::any`.
-
-#### Before
-```rust
-fn main() {
-    let arr = [1, 2, 3];
-    if !arr.into_iter().┃any(|num| num == 4) {
-        println!("foo");
-    }
-}
-```
-
-#### After
-```rust
-fn main() {
-    let arr = [1, 2, 3];
-    if arr.into_iter().all(|num| num != 4) {
-        println!("foo");
-    }
-}
-```
-
-
-### `auto_import`
-**Source:**  [auto_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/auto_import.rs#L73) 
-
-If the name is unresolved, provides all possible imports for it.
-
-#### Before
-```rust
-fn main() {
-    let map = HashMap┃::new();
-}
-```
-
-#### After
-```rust
-use std::collections::HashMap;
-
-fn main() {
-    let map = HashMap::new();
-}
-```
-
-
-### `bind_unused_param`
-**Source:**  [bind_unused_param.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/bind_unused_param.rs#L12) 
-
-Binds unused function parameter to an underscore.
-
-#### Before
-```rust
-fn some_function(x: i32┃) {}
-```
-
-#### After
-```rust
-fn some_function(x: i32) {
-    let _ = x;
-}
-```
-
-
-### `change_visibility`
-**Source:**  [change_visibility.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/change_visibility.rs#L13) 
-
-Adds or changes existing visibility specifier.
-
-#### Before
-```rust
-┃fn frobnicate() {}
-```
-
-#### After
-```rust
-pub(crate) fn frobnicate() {}
-```
-
-
-### `comment_to_doc`
-**Source:**  [convert_comment_from_or_to_doc.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs#L9) 
-
-Converts comments to documentation.
-
-#### Before
-```rust
-// Wow what ┃a nice module
-// I sure hope this shows up when I hover over it
-```
-
-#### After
-```rust
-//! Wow what a nice module
-//! I sure hope this shows up when I hover over it
-```
-
-
-### `convert_bool_then_to_if`
-**Source:**  [convert_bool_then.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_bool_then.rs#L143) 
-
-Converts a `bool::then` method call to an equivalent if expression.
-
-#### Before
-```rust
-fn main() {
-    (0 == 0).then┃(|| val)
-}
-```
-
-#### After
-```rust
-fn main() {
-    if 0 == 0 {
-        Some(val)
-    } else {
-        None
-    }
-}
-```
-
-
-### `convert_bool_to_enum`
-**Source:**  [convert_bool_to_enum.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_bool_to_enum.rs#L29) 
-
-This converts boolean local variables, fields, constants, and statics into a new
-enum with two variants `Bool::True` and `Bool::False`, as well as replacing
-all assignments with the variants and replacing all usages with `== Bool::True` or
-`== Bool::False`.
-
-#### Before
-```rust
-fn main() {
-    let ┃bool = true;
-
-    if bool {
-        println!("foo");
-    }
-}
-```
-
-#### After
-```rust
-#[derive(PartialEq, Eq)]
-enum Bool { True, False }
-
-fn main() {
-    let bool = Bool::True;
-
-    if bool == Bool::True {
-        println!("foo");
-    }
-}
-```
-
-
-### `convert_closure_to_fn`
-**Source:**  [convert_closure_to_fn.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_closure_to_fn.rs#L25) 
-
-This converts a closure to a freestanding function, changing all captures to parameters.
-
-#### Before
-```rust
-fn main() {
-    let mut s = String::new();
-    let closure = |┃a| s.push_str(a);
-    closure("abc");
-}
-```
-
-#### After
-```rust
-fn main() {
-    let mut s = String::new();
-    fn closure(a: &str, s: &mut String) {
-        s.push_str(a)
-    }
-    closure("abc", &mut s);
-}
-```
-
-
-### `convert_for_loop_with_for_each`
-**Source:**  [convert_iter_for_each_to_for.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs#L76) 
-
-Converts a for loop into a for_each loop on the Iterator.
-
-#### Before
-```rust
-fn main() {
-    let x = vec![1, 2, 3];
-    for┃ v in x {
-        let y = v * 2;
-    }
-}
-```
-
-#### After
-```rust
-fn main() {
-    let x = vec![1, 2, 3];
-    x.into_iter().for_each(|v| {
-        let y = v * 2;
-    });
-}
-```
-
-
-### `convert_from_to_tryfrom`
-**Source:**  [convert_from_to_tryfrom.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs#L10) 
-
-Converts a From impl to a TryFrom impl, wrapping returns in `Ok`.
-
-#### Before
-```rust
-impl ┃From<usize> for Thing {
-    fn from(val: usize) -> Self {
-        Thing {
-            b: val.to_string(),
-            a: val
-        }
-    }
-}
-```
-
-#### After
-```rust
-impl TryFrom<usize> for Thing {
-    type Error = ${0:()};
-
-    fn try_from(val: usize) -> Result<Self, Self::Error> {
-        Ok(Thing {
-            b: val.to_string(),
-            a: val
-        })
-    }
-}
-```
-
-
-### `convert_if_to_bool_then`
-**Source:**  [convert_bool_then.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_bool_then.rs#L21) 
-
-Converts an if expression into a corresponding `bool::then` call.
-
-#### Before
-```rust
-fn main() {
-    if┃ cond {
-        Some(val)
-    } else {
-        None
-    }
-}
-```
-
-#### After
-```rust
-fn main() {
-    cond.then(|| val)
-}
-```
-
-
-### `convert_integer_literal`
-**Source:**  [convert_integer_literal.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_integer_literal.rs#L5) 
-
-Converts the base of integer literals to other bases.
-
-#### Before
-```rust
-const _: i32 = 10┃;
-```
-
-#### After
-```rust
-const _: i32 = 0b1010;
-```
-
-
-### `convert_into_to_from`
-**Source:**  [convert_into_to_from.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_into_to_from.rs#L8) 
-
-Converts an Into impl to an equivalent From impl.
-
-#### Before
-```rust
-impl ┃Into<Thing> for usize {
-    fn into(self) -> Thing {
-        Thing {
-            b: self.to_string(),
-            a: self
-        }
-    }
-}
-```
-
-#### After
-```rust
-impl From<usize> for Thing {
-    fn from(val: usize) -> Self {
-        Thing {
-            b: val.to_string(),
-            a: val
-        }
-    }
-}
-```
-
-
-### `convert_iter_for_each_to_for`
-**Source:**  [convert_iter_for_each_to_for.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs#L11) 
-
-Converts an Iterator::for_each function into a for loop.
-
-#### Before
-```rust
-fn main() {
-    let iter = iter::repeat((9, 2));
-    iter.for_each┃(|(x, y)| {
-        println!("x: {}, y: {}", x, y);
-    });
-}
-```
-
-#### After
-```rust
-fn main() {
-    let iter = iter::repeat((9, 2));
-    for (x, y) in iter {
-        println!("x: {}, y: {}", x, y);
-    }
-}
-```
-
-
-### `convert_let_else_to_match`
-**Source:**  [convert_let_else_to_match.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_let_else_to_match.rs#L9) 
-
-Converts let-else statement to let statement and match expression.
-
-#### Before
-```rust
-fn main() {
-    let Ok(mut x) = f() else┃ { return };
-}
-```
-
-#### After
-```rust
-fn main() {
-    let mut x = match f() {
-        Ok(x) => x,
-        _ => return,
-    };
-}
-```
-
-
-### `convert_match_to_let_else`
-**Source:**  [convert_match_to_let_else.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_match_to_let_else.rs#L12) 
-
-Converts let statement with match initializer to let-else statement.
-
-#### Before
-```rust
-fn foo(opt: Option<()>) {
-    let val┃ = match opt {
-        Some(it) => it,
-        None => return,
-    };
-}
-```
-
-#### After
-```rust
-fn foo(opt: Option<()>) {
-    let Some(val) = opt else { return };
-}
-```
-
-
-### `convert_named_struct_to_tuple_struct`
-**Source:**  [convert_named_struct_to_tuple_struct.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs#L11) 
-
-Converts struct with named fields to tuple struct, and analogously for enum variants with named
-fields.
-
-#### Before
-```rust
-struct Point┃ { x: f32, y: f32 }
-
-impl Point {
-    pub fn new(x: f32, y: f32) -> Self {
-        Point { x, y }
-    }
-
-    pub fn x(&self) -> f32 {
-        self.x
-    }
-
-    pub fn y(&self) -> f32 {
-        self.y
-    }
-}
-```
-
-#### After
-```rust
-struct Point(f32, f32);
-
-impl Point {
-    pub fn new(x: f32, y: f32) -> Self {
-        Point(x, y)
-    }
-
-    pub fn x(&self) -> f32 {
-        self.0
-    }
-
-    pub fn y(&self) -> f32 {
-        self.1
-    }
-}
-```
-
-
-### `convert_nested_function_to_closure`
-**Source:**  [convert_nested_function_to_closure.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs#L7) 
-
-Converts a function that is defined within the body of another function into a closure.
-
-#### Before
-```rust
-fn main() {
-    fn fo┃o(label: &str, number: u64) {
-        println!("{}: {}", label, number);
-    }
-
-    foo("Bar", 100);
-}
-```
-
-#### After
-```rust
-fn main() {
-    let foo = |label: &str, number: u64| {
-        println!("{}: {}", label, number);
-    };
-
-    foo("Bar", 100);
-}
-```
-
-
-### `convert_to_guarded_return`
-**Source:**  [convert_to_guarded_return.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_to_guarded_return.rs#L24) 
-
-Replace a large conditional with a guarded return.
-
-#### Before
-```rust
-fn main() {
-    ┃if cond {
-        foo();
-        bar();
-    }
-}
-```
-
-#### After
-```rust
-fn main() {
-    if !cond {
-        return;
-    }
-    foo();
-    bar();
-}
-```
-
-
-### `convert_tuple_return_type_to_struct`
-**Source:**  [convert_tuple_return_type_to_struct.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs#L20) 
-
-This converts the return type of a function from a tuple type
-into a tuple struct and updates the body accordingly.
-
-#### Before
-```rust
-fn bar() {
-    let (a, b, c) = foo();
-}
-
-fn foo() -> (┃u32, u32, u32) {
-    (1, 2, 3)
-}
-```
-
-#### After
-```rust
-fn bar() {
-    let FooResult(a, b, c) = foo();
-}
-
-struct FooResult(u32, u32, u32);
-
-fn foo() -> FooResult {
-    FooResult(1, 2, 3)
-}
-```
-
-
-### `convert_tuple_struct_to_named_struct`
-**Source:**  [convert_tuple_struct_to_named_struct.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs#L10) 
-
-Converts tuple struct to struct with named fields, and analogously for tuple enum variants.
-
-#### Before
-```rust
-struct Point┃(f32, f32);
-
-impl Point {
-    pub fn new(x: f32, y: f32) -> Self {
-        Point(x, y)
-    }
-
-    pub fn x(&self) -> f32 {
-        self.0
-    }
-
-    pub fn y(&self) -> f32 {
-        self.1
-    }
-}
-```
-
-#### After
-```rust
-struct Point { field1: f32, field2: f32 }
-
-impl Point {
-    pub fn new(x: f32, y: f32) -> Self {
-        Point { field1: x, field2: y }
-    }
-
-    pub fn x(&self) -> f32 {
-        self.field1
-    }
-
-    pub fn y(&self) -> f32 {
-        self.field2
-    }
-}
-```
-
-
-### `convert_two_arm_bool_match_to_matches_macro`
-**Source:**  [convert_two_arm_bool_match_to_matches_macro.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs#L8) 
-
-Convert 2-arm match that evaluates to a boolean into the equivalent matches! invocation.
-
-#### Before
-```rust
-fn main() {
-    match scrutinee┃ {
-        Some(val) if val.cond() => true,
-        _ => false,
-    }
-}
-```
-
-#### After
-```rust
-fn main() {
-    matches!(scrutinee, Some(val) if val.cond())
-}
-```
-
-
-### `convert_while_to_loop`
-**Source:**  [convert_while_to_loop.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_while_to_loop.rs#L20) 
-
-Replace a while with a loop.
-
-#### Before
-```rust
-fn main() {
-    ┃while cond {
-        foo();
-    }
-}
-```
-
-#### After
-```rust
-fn main() {
-    loop {
-        if !cond {
-            break;
-        }
-        foo();
-    }
-}
-```
-
-
-### `destructure_struct_binding`
-**Source:**  [destructure_struct_binding.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/destructure_struct_binding.rs#L18) 
-
-Destructures a struct binding in place.
-
-#### Before
-```rust
-struct Foo {
-    bar: i32,
-    baz: i32,
-}
-fn main() {
-    let ┃foo = Foo { bar: 1, baz: 2 };
-    let bar2 = foo.bar;
-    let baz2 = &foo.baz;
-}
-```
-
-#### After
-```rust
-struct Foo {
-    bar: i32,
-    baz: i32,
-}
-fn main() {
-    let Foo { bar, baz } = Foo { bar: 1, baz: 2 };
-    let bar2 = bar;
-    let baz2 = &baz;
-}
-```
-
-
-### `destructure_tuple_binding`
-**Source:**  [destructure_tuple_binding.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/destructure_tuple_binding.rs#L19) 
-
-Destructures a tuple binding in place.
-
-#### Before
-```rust
-fn main() {
-    let ┃t = (1,2);
-    let v = t.0;
-}
-```
-
-#### After
-```rust
-fn main() {
-    let (┃_0, _1) = (1,2);
-    let v = _0;
-}
-```
-
-
-### `desugar_async_into_impl_future`
-**Source:**  [toggle_async_sugar.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/toggle_async_sugar.rs#L103) 
-
-Rewrites asynchronous function from `async fn` into `-> impl Future`.
-This action does not touch the function body and therefore `0`
-block does not transform to `async { 0 }`.
-
-#### Before
-```rust
-pub as┃ync fn foo() -> usize {
-    0
-}
-```
-
-#### After
-```rust
-pub fn foo() -> impl core::future::Future<Output = usize> {
-    0
-}
-```
-
-
-### `desugar_doc_comment`
-**Source:**  [desugar_doc_comment.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/desugar_doc_comment.rs#L14) 
-
-Desugars doc-comments to the attribute form.
-
-#### Before
-```rust
-/// Multi-line┃
-/// comment
-```
-
-#### After
-```rust
-#[doc = r"Multi-line
-comment"]
-```
-
-
-### `expand_glob_import`
-**Source:**  [expand_glob_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/expand_glob_import.rs#L19) 
-
-Expands glob imports.
-
-#### Before
-```rust
-mod foo {
-    pub struct Bar;
-    pub struct Baz;
-}
-
-use foo::*┃;
-
-fn qux(bar: Bar, baz: Baz) {}
-```
-
-#### After
-```rust
-mod foo {
-    pub struct Bar;
-    pub struct Baz;
-}
-
-use foo::{Bar, Baz};
-
-fn qux(bar: Bar, baz: Baz) {}
-```
-
-
-### `expand_glob_reexport`
-**Source:**  [expand_glob_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/expand_glob_import.rs#L81) 
-
-Expands non-private glob imports.
-
-#### Before
-```rust
-mod foo {
-    pub struct Bar;
-    pub struct Baz;
-}
-
-pub use foo::*┃;
-```
-
-#### After
-```rust
-mod foo {
-    pub struct Bar;
-    pub struct Baz;
-}
-
-pub use foo::{Bar, Baz};
-```
-
-
-### `expand_record_rest_pattern`
-**Source:**  [expand_rest_pattern.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/expand_rest_pattern.rs#L26) 
-
-Fills fields by replacing rest pattern in record patterns.
-
-#### Before
-```rust
-struct Bar { y: Y, z: Z }
-
-fn foo(bar: Bar) {
-    let Bar { ..┃ } = bar;
-}
-```
-
-#### After
-```rust
-struct Bar { y: Y, z: Z }
-
-fn foo(bar: Bar) {
-    let Bar { y, z  } = bar;
-}
-```
-
-
-### `expand_tuple_struct_rest_pattern`
-**Source:**  [expand_rest_pattern.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/expand_rest_pattern.rs#L82) 
-
-Fills fields by replacing rest pattern in tuple struct patterns.
-
-#### Before
-```rust
-struct Bar(Y, Z);
-
-fn foo(bar: Bar) {
-    let Bar(..┃) = bar;
-}
-```
-
-#### After
-```rust
-struct Bar(Y, Z);
-
-fn foo(bar: Bar) {
-    let Bar(_0, _1) = bar;
-}
-```
-
-
-### `extract_constant`
-**Source:**  [extract_variable.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_variable.rs#L35) 
-
-Extracts subexpression into a constant.
-
-#### Before
-```rust
-fn main() {
-    ┃(1 + 2)┃ * 4;
-}
-```
-
-#### After
-```rust
-fn main() {
-    const ┃VAR_NAME: i32 = 1 + 2;
-    VAR_NAME * 4;
-}
-```
-
-
-### `extract_expressions_from_format_string`
-**Source:**  [extract_expressions_from_format_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs#L14) 
-
-Move an expression out of a format string.
-
-#### Before
-```rust
-fn main() {
-    print!("{var} {x + 1}┃");
-}
-```
-
-#### After
-```rust
-fn main() {
-    print!("{var} {}"┃, x + 1);
-}
-```
-
-
-### `extract_function`
-**Source:**  [extract_function.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_function.rs#L39) 
-
-Extracts selected statements and comments into new function.
-
-#### Before
-```rust
-fn main() {
-    let n = 1;
-    ┃let m = n + 2;
-    // calculate
-    let k = m + n;┃
-    let g = 3;
-}
-```
-
-#### After
-```rust
-fn main() {
-    let n = 1;
-    fun_name(n);
-    let g = 3;
-}
-
-fn ┃fun_name(n: i32) {
-    let m = n + 2;
-    // calculate
-    let k = m + n;
-}
-```
-
-
-### `extract_module`
-**Source:**  [extract_module.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_module.rs#L29) 
-
-Extracts a selected region as separate module. All the references, visibility and imports are
-resolved.
-
-#### Before
-```rust
-┃fn foo(name: i32) -> i32 {
-    name + 1
-}┃
-
-fn bar(name: i32) -> i32 {
-    name + 2
-}
-```
-
-#### After
-```rust
-mod modname {
-    pub(crate) fn foo(name: i32) -> i32 {
-        name + 1
-    }
-}
-
-fn bar(name: i32) -> i32 {
-    name + 2
-}
-```
-
-
-### `extract_static`
-**Source:**  [extract_variable.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_variable.rs#L52) 
-
-Extracts subexpression into a static.
-
-#### Before
-```rust
-fn main() {
-    ┃(1 + 2)┃ * 4;
-}
-```
-
-#### After
-```rust
-fn main() {
-    static ┃VAR_NAME: i32 = 1 + 2;
-    VAR_NAME * 4;
-}
-```
-
-
-### `extract_struct_from_enum_variant`
-**Source:**  [extract_struct_from_enum_variant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs#L26) 
-
-Extracts a struct from enum variant.
-
-#### Before
-```rust
-enum A { ┃One(u32, u32) }
-```
-
-#### After
-```rust
-struct One(u32, u32);
-
-enum A { One(One) }
-```
-
-
-### `extract_type_alias`
-**Source:**  [extract_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_type_alias.rs#L10) 
-
-Extracts the selected type as a type alias.
-
-#### Before
-```rust
-struct S {
-    field: ┃(u8, u8, u8)┃,
-}
-```
-
-#### After
-```rust
-type ┃Type = (u8, u8, u8);
-
-struct S {
-    field: Type,
-}
-```
-
-
-### `extract_variable`
-**Source:**  [extract_variable.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/extract_variable.rs#L18) 
-
-Extracts subexpression into a variable.
-
-#### Before
-```rust
-fn main() {
-    ┃(1 + 2)┃ * 4;
-}
-```
-
-#### After
-```rust
-fn main() {
-    let ┃var_name = 1 + 2;
-    var_name * 4;
-}
-```
-
-
-### `fix_visibility`
-**Source:**  [fix_visibility.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/fix_visibility.rs#L14) 
-
-Makes inaccessible item public.
-
-#### Before
-```rust
-mod m {
-    fn frobnicate() {}
-}
-fn main() {
-    m::frobnicate┃();
-}
-```
-
-#### After
-```rust
-mod m {
-    ┃pub(crate) fn frobnicate() {}
-}
-fn main() {
-    m::frobnicate();
-}
-```
-
-
-### `flip_binexpr`
-**Source:**  [flip_binexpr.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/flip_binexpr.rs#L8) 
-
-Flips operands of a binary expression.
-
-#### Before
-```rust
-fn main() {
-    let _ = 90 +┃ 2;
-}
-```
-
-#### After
-```rust
-fn main() {
-    let _ = 2 + 90;
-}
-```
-
-
-### `flip_comma`
-**Source:**  [flip_comma.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/flip_comma.rs#L10) 
-
-Flips two comma-separated items.
-
-#### Before
-```rust
-fn main() {
-    ((1, 2),┃ (3, 4));
-}
-```
-
-#### After
-```rust
-fn main() {
-    ((3, 4), (1, 2));
-}
-```
-
-
-### `flip_or_pattern`
-**Source:**  [flip_or_pattern.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/flip_or_pattern.rs#L9) 
-
-Flips two patterns in an or-pattern.
-
-#### Before
-```rust
-fn foo() {
-    let (a |┃ b) = 1;
-}
-```
-
-#### After
-```rust
-fn foo() {
-    let (b | a) = 1;
-}
-```
-
-
-### `flip_trait_bound`
-**Source:**  [flip_trait_bound.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/flip_trait_bound.rs#L9) 
-
-Flips two trait bounds.
-
-#### Before
-```rust
-fn foo<T: Clone +┃ Copy>() { }
-```
-
-#### After
-```rust
-fn foo<T: Copy + Clone>() { }
-```
-
-
-### `generate_constant`
-**Source:**  [generate_constant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_constant.rs#L14) 
-
-Generate a named constant.
-
-#### Before
-```rust
-struct S { i: usize }
-impl S { pub fn new(n: usize) {} }
-fn main() {
-    let v = S::new(CAPA┃CITY);
-}
-```
-
-#### After
-```rust
-struct S { i: usize }
-impl S { pub fn new(n: usize) {} }
-fn main() {
-    const CAPACITY: usize = ┃;
-    let v = S::new(CAPACITY);
-}
-```
-
-
-### `generate_default_from_enum_variant`
-**Source:**  [generate_default_from_enum_variant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs#L6) 
-
-Adds a Default impl for an enum using a variant.
-
-#### Before
-```rust
-enum Version {
- Undefined,
- Minor┃,
- Major,
-}
-```
-
-#### After
-```rust
-enum Version {
- Undefined,
- Minor,
- Major,
-}
-
-impl Default for Version {
-    fn default() -> Self {
-        Self::Minor
-    }
-}
-```
-
-
-### `generate_default_from_new`
-**Source:**  [generate_default_from_new.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_default_from_new.rs#L13) 
-
-Generates default implementation from new method.
-
-#### Before
-```rust
-struct Example { _inner: () }
-
-impl Example {
-    pub fn n┃ew() -> Self {
-        Self { _inner: () }
-    }
-}
-```
-
-#### After
-```rust
-struct Example { _inner: () }
-
-impl Example {
-    pub fn new() -> Self {
-        Self { _inner: () }
-    }
-}
-
-impl Default for Example {
-    fn default() -> Self {
-        Self::new()
-    }
-}
-```
-
-
-### `generate_delegate_methods`
-**Source:**  [generate_delegate_methods.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_delegate_methods.rs#L15) 
-
-Generate delegate methods.
-
-#### Before
-```rust
-struct Age(u8);
-impl Age {
-    fn age(&self) -> u8 {
-        self.0
-    }
-}
-
-struct Person {
-    ag┃e: Age,
-}
-```
-
-#### After
-```rust
-struct Age(u8);
-impl Age {
-    fn age(&self) -> u8 {
-        self.0
-    }
-}
-
-struct Person {
-    age: Age,
-}
-
-impl Person {
-    ┃fn age(&self) -> u8 {
-        self.age.age()
-    }
-}
-```
-
-
-### `generate_delegate_trait`
-**Source:**  [generate_delegate_trait.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_delegate_trait.rs#L29) 
-
-Generate delegate trait implementation for `StructField`s.
-
-#### Before
-```rust
-trait SomeTrait {
-    type T;
-    fn fn_(arg: u32) -> u32;
-    fn method_(&mut self) -> bool;
-}
-struct A;
-impl SomeTrait for A {
-    type T = u32;
-
-    fn fn_(arg: u32) -> u32 {
-        42
-    }
-
-    fn method_(&mut self) -> bool {
-        false
-    }
-}
-struct B {
-    a┃: A,
-}
-```
-
-#### After
-```rust
-trait SomeTrait {
-    type T;
-    fn fn_(arg: u32) -> u32;
-    fn method_(&mut self) -> bool;
-}
-struct A;
-impl SomeTrait for A {
-    type T = u32;
-
-    fn fn_(arg: u32) -> u32 {
-        42
-    }
-
-    fn method_(&mut self) -> bool {
-        false
-    }
-}
-struct B {
-    a: A,
-}
-
-impl SomeTrait for B {
-    type T = <A as SomeTrait>::T;
-
-    fn fn_(arg: u32) -> u32 {
-        <A as SomeTrait>::fn_(arg)
-    }
-
-    fn method_(&mut self) -> bool {
-        <A as SomeTrait>::method_(&mut self.a)
-    }
-}
-```
-
-
-### `generate_deref`
-**Source:**  [generate_deref.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_deref.rs#L16) 
-
-Generate `Deref` impl using the given struct field.
-
-#### Before
-```rust
-struct A;
-struct B {
-   ┃a: A
-}
-```
-
-#### After
-```rust
-struct A;
-struct B {
-   a: A
-}
-
-impl core::ops::Deref for B {
-    type Target = A;
-
-    fn deref(&self) -> &Self::Target {
-        &self.a
-    }
-}
-```
-
-
-### `generate_derive`
-**Source:**  [generate_derive.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_derive.rs#L8) 
-
-Adds a new `#[derive()]` clause to a struct or enum.
-
-#### Before
-```rust
-struct Point {
-    x: u32,
-    y: u32,┃
-}
-```
-
-#### After
-```rust
-#[derive(┃)]
-struct Point {
-    x: u32,
-    y: u32,
-}
-```
-
-
-### `generate_doc_example`
-**Source:**  [generate_documentation_template.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_documentation_template.rs#L76) 
-
-Generates a rustdoc example when editing an item's documentation.
-
-#### Before
-```rust
-/// Adds two numbers.┃
-pub fn add(a: i32, b: i32) -> i32 { a + b }
-```
-
-#### After
-```rust
-/// Adds two numbers.
-///
-/// # Examples
-///
-/// ```
-/// use ra_test_fixture::add;
-///
-/// assert_eq!(add(a, b), );
-/// ```
-pub fn add(a: i32, b: i32) -> i32 { a + b }
-```
-
-
-### `generate_documentation_template`
-**Source:**  [generate_documentation_template.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_documentation_template.rs#L13) 
-
-Adds a documentation template above a function definition / declaration.
-
-#### Before
-```rust
-pub struct S;
-impl S {
-    pub unsafe fn set_len┃(&mut self, len: usize) -> Result<(), std::io::Error> {
-        /* ... */
-    }
-}
-```
-
-#### After
-```rust
-pub struct S;
-impl S {
-    /// Sets the length of this [`S`].
-    ///
-    /// # Errors
-    ///
-    /// This function will return an error if .
-    ///
-    /// # Safety
-    ///
-    /// .
-    pub unsafe fn set_len(&mut self, len: usize) -> Result<(), std::io::Error> {
-        /* ... */
-    }
-}
-```
-
-
-### `generate_enum_as_method`
-**Source:**  [generate_enum_projection_method.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_enum_projection_method.rs#L59) 
-
-Generate an `as_` method for this enum variant.
-
-#### Before
-```rust
-enum Value {
- Number(i32),
- Text(String)┃,
-}
-```
-
-#### After
-```rust
-enum Value {
- Number(i32),
- Text(String),
-}
-
-impl Value {
-    fn as_text(&self) -> Option<&String> {
-        if let Self::Text(v) = self {
-            Some(v)
-        } else {
-            None
-        }
-    }
-}
-```
-
-
-### `generate_enum_is_method`
-**Source:**  [generate_enum_is_method.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_enum_is_method.rs#L11) 
-
-Generate an `is_` method for this enum variant.
-
-#### Before
-```rust
-enum Version {
- Undefined,
- Minor┃,
- Major,
-}
-```
-
-#### After
-```rust
-enum Version {
- Undefined,
- Minor,
- Major,
-}
-
-impl Version {
-    /// Returns `true` if the version is [`Minor`].
-    ///
-    /// [`Minor`]: Version::Minor
-    #[must_use]
-    fn is_minor(&self) -> bool {
-        matches!(self, Self::Minor)
-    }
-}
-```
-
-
-### `generate_enum_try_into_method`
-**Source:**  [generate_enum_projection_method.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_enum_projection_method.rs#L12) 
-
-Generate a `try_into_` method for this enum variant.
-
-#### Before
-```rust
-enum Value {
- Number(i32),
- Text(String)┃,
-}
-```
-
-#### After
-```rust
-enum Value {
- Number(i32),
- Text(String),
-}
-
-impl Value {
-    fn try_into_text(self) -> Result<String, Self> {
-        if let Self::Text(v) = self {
-            Ok(v)
-        } else {
-            Err(self)
-        }
-    }
-}
-```
-
-
-### `generate_enum_variant`
-**Source:**  [generate_enum_variant.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_enum_variant.rs#L10) 
-
-Adds a variant to an enum.
-
-#### Before
-```rust
-enum Countries {
-    Ghana,
-}
-
-fn main() {
-    let country = Countries::Lesotho┃;
-}
-```
-
-#### After
-```rust
-enum Countries {
-    Ghana,
-    Lesotho,
-}
-
-fn main() {
-    let country = Countries::Lesotho;
-}
-```
-
-
-### `generate_fn_type_alias_named`
-**Source:**  [generate_fn_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_fn_type_alias.rs#L10) 
-
-Generate a type alias for the function with named parameters.
-
-#### Before
-```rust
-unsafe fn fo┃o(n: i32) -> i32 { 42i32 }
-```
-
-#### After
-```rust
-type ${0:FooFn} = unsafe fn(n: i32) -> i32;
-
-unsafe fn foo(n: i32) -> i32 { 42i32 }
-```
-
-
-### `generate_fn_type_alias_unnamed`
-**Source:**  [generate_fn_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_fn_type_alias.rs#L24) 
-
-Generate a type alias for the function with unnamed parameters.
-
-#### Before
-```rust
-unsafe fn fo┃o(n: i32) -> i32 { 42i32 }
-```
-
-#### After
-```rust
-type ${0:FooFn} = unsafe fn(i32) -> i32;
-
-unsafe fn foo(n: i32) -> i32 { 42i32 }
-```
-
-
-### `generate_from_impl_for_enum`
-**Source:**  [generate_from_impl_for_enum.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs#L8) 
-
-Adds a From impl for this enum variant with one tuple field.
-
-#### Before
-```rust
-enum A { ┃One(u32) }
-```
-
-#### After
-```rust
-enum A { One(u32) }
-
-impl From<u32> for A {
-    fn from(v: u32) -> Self {
-        Self::One(v)
-    }
-}
-```
-
-
-### `generate_function`
-**Source:**  [generate_function.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_function.rs#L28) 
-
-Adds a stub function with a signature matching the function under the cursor.
-
-#### Before
-```rust
-struct Baz;
-fn baz() -> Baz { Baz }
-fn foo() {
-    bar┃("", baz());
-}
-
-```
-
-#### After
-```rust
-struct Baz;
-fn baz() -> Baz { Baz }
-fn foo() {
-    bar("", baz());
-}
-
-fn bar(arg: &str, baz: Baz) ${0:-> _} {
-    todo!()
-}
-
-```
-
-
-### `generate_getter`
-**Source:**  [generate_getter_or_setter.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_getter_or_setter.rs#L73) 
-
-Generate a getter method.
-
-#### Before
-```rust
-struct Person {
-    nam┃e: String,
-}
-```
-
-#### After
-```rust
-struct Person {
-    name: String,
-}
-
-impl Person {
-    fn ┃name(&self) -> &str {
-        &self.name
-    }
-}
-```
-
-
-### `generate_getter_mut`
-**Source:**  [generate_getter_or_setter.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_getter_or_setter.rs#L127) 
-
-Generate a mut getter method.
-
-#### Before
-```rust
-struct Person {
-    nam┃e: String,
-}
-```
-
-#### After
-```rust
-struct Person {
-    name: String,
-}
-
-impl Person {
-    fn ┃name_mut(&mut self) -> &mut String {
-        &mut self.name
-    }
-}
-```
-
-
-### `generate_impl`
-**Source:**  [generate_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_impl.rs#L20) 
-
-Adds a new inherent impl for a type.
-
-#### Before
-```rust
-struct Ctx┃<T: Clone> {
-    data: T,
-}
-```
-
-#### After
-```rust
-struct Ctx<T: Clone> {
-    data: T,
-}
-
-impl<T: Clone> Ctx<T> {┃}
-```
-
-
-### `generate_is_empty_from_len`
-**Source:**  [generate_is_empty_from_len.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs#L12) 
-
-Generates is_empty implementation from the len method.
-
-#### Before
-```rust
-struct MyStruct { data: Vec<String> }
-
-impl MyStruct {
-    #[must_use]
-    p┃ub fn len(&self) -> usize {
-        self.data.len()
-    }
-}
-```
-
-#### After
-```rust
-struct MyStruct { data: Vec<String> }
-
-impl MyStruct {
-    #[must_use]
-    pub fn len(&self) -> usize {
-        self.data.len()
-    }
-
-    #[must_use]
-    pub fn is_empty(&self) -> bool {
-        self.len() == 0
-    }
-}
-```
-
-
-### `generate_mut_trait_impl`
-**Source:**  [generate_mut_trait_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs#L12) 
-
-Adds a IndexMut impl from the `Index` trait.
-
-#### Before
-```rust
-pub enum Axis { X = 0, Y = 1, Z = 2 }
-
-impl<T> core::ops::Index┃<Axis> for [T; 3] {
-    type Output = T;
-
-    fn index(&self, index: Axis) -> &Self::Output {
-        &self[index as usize]
-    }
-}
-```
-
-#### After
-```rust
-pub enum Axis { X = 0, Y = 1, Z = 2 }
-
-┃impl<T> core::ops::IndexMut<Axis> for [T; 3] {
-    fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
-        &self[index as usize]
-    }
-}
-
-impl<T> core::ops::Index<Axis> for [T; 3] {
-    type Output = T;
-
-    fn index(&self, index: Axis) -> &Self::Output {
-        &self[index as usize]
-    }
-}
-```
-
-
-### `generate_new`
-**Source:**  [generate_new.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_new.rs#L14) 
-
-Adds a `fn new` for a type.
-
-#### Before
-```rust
-struct Ctx<T: Clone> {
-     data: T,┃
-}
-```
-
-#### After
-```rust
-struct Ctx<T: Clone> {
-     data: T,
-}
-
-impl<T: Clone> Ctx<T> {
-    fn ┃new(data: T) -> Self {
-        Self { data }
-    }
-}
-```
-
-
-### `generate_setter`
-**Source:**  [generate_getter_or_setter.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_getter_or_setter.rs#L13) 
-
-Generate a setter method.
-
-#### Before
-```rust
-struct Person {
-    nam┃e: String,
-}
-```
-
-#### After
-```rust
-struct Person {
-    name: String,
-}
-
-impl Person {
-    fn ┃set_name(&mut self, name: String) {
-        self.name = name;
-    }
-}
-```
-
-
-### `generate_trait_from_impl`
-**Source:**  [generate_trait_from_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_trait_from_impl.rs#L18) 
-
-Generate trait for an already defined inherent impl and convert impl to a trait impl.
-
-#### Before
-```rust
-struct Foo<const N: usize>([i32; N]);
-
-macro_rules! const_maker {
-    ($t:ty, $v:tt) => {
-        const CONST: $t = $v;
-    };
-}
-
-impl<const N: usize> Fo┃o<N> {
-    // Used as an associated constant.
-    const CONST_ASSOC: usize = N * 4;
-
-    fn create() -> Option<()> {
-        Some(())
-    }
-
-    const_maker! {i32, 7}
-}
-```
-
-#### After
-```rust
-struct Foo<const N: usize>([i32; N]);
-
-macro_rules! const_maker {
-    ($t:ty, $v:tt) => {
-        const CONST: $t = $v;
-    };
-}
-
-trait ${0:NewTrait}<const N: usize> {
-    // Used as an associated constant.
-    const CONST_ASSOC: usize = N * 4;
-
-    fn create() -> Option<()>;
-
-    const_maker! {i32, 7}
-}
-
-impl<const N: usize> ${0:NewTrait}<N> for Foo<N> {
-    // Used as an associated constant.
-    const CONST_ASSOC: usize = N * 4;
-
-    fn create() -> Option<()> {
-        Some(())
-    }
-
-    const_maker! {i32, 7}
-}
-```
-
-
-### `generate_trait_impl`
-**Source:**  [generate_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/generate_impl.rs#L66) 
-
-Adds a new trait impl for a type.
-
-#### Before
-```rust
-struct ┃Ctx<T: Clone> {
-    data: T,
-}
-```
-
-#### After
-```rust
-struct Ctx<T: Clone> {
-    data: T,
-}
-
-impl<T: Clone> ${1:_} for Ctx<T> {┃}
-```
-
-
-### `inline_call`
-**Source:**  [inline_call.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_call.rs#L170) 
-
-Inlines a function or method body creating a `let` statement per parameter unless the parameter
-can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
-or if the parameter is only accessed inside the function body once.
-
-#### Before
-```rust
-fn foo(name: Option<&str>) {
-    let name = name.unwrap┃();
-}
-```
-
-#### After
-```rust
-fn foo(name: Option<&str>) {
-    let name = match name {
-            Some(val) => val,
-            None => panic!("called `Option::unwrap()` on a `None` value"),
-        };
-}
-```
-
-
-### `inline_const_as_literal`
-**Source:**  [inline_const_as_literal.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_const_as_literal.rs#L6) 
-
-Evaluate and inline const variable as literal.
-
-#### Before
-```rust
-const STRING: &str = "Hello, World!";
-
-fn something() -> &'static str {
-    STRING┃
-}
-```
-
-#### After
-```rust
-const STRING: &str = "Hello, World!";
-
-fn something() -> &'static str {
-    "Hello, World!"
-}
-```
-
-
-### `inline_into_callers`
-**Source:**  [inline_call.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_call.rs#L32) 
-
-Inline a function or method body into all of its callers where possible, creating a `let` statement per parameter
-unless the parameter can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
-or if the parameter is only accessed inside the function body once.
-If all calls can be inlined the function will be removed.
-
-#### Before
-```rust
-fn print(_: &str) {}
-fn foo┃(word: &str) {
-    if !word.is_empty() {
-        print(word);
-    }
-}
-fn bar() {
-    foo("안녕하세요");
-    foo("여러분");
-}
-```
-
-#### After
-```rust
-fn print(_: &str) {}
-
-fn bar() {
-    {
-        let word: &str = "안녕하세요";
-        if !word.is_empty() {
-            print(word);
-        }
-    };
-    {
-        let word: &str = "여러분";
-        if !word.is_empty() {
-            print(word);
-        }
-    };
-}
-```
-
-
-### `inline_local_variable`
-**Source:**  [inline_local_variable.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_local_variable.rs#L17) 
-
-Inlines a local variable.
-
-#### Before
-```rust
-fn main() {
-    let x┃ = 1 + 2;
-    x * 4;
-}
-```
-
-#### After
-```rust
-fn main() {
-    (1 + 2) * 4;
-}
-```
-
-
-### `inline_macro`
-**Source:**  [inline_macro.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_macro.rs#L7) 
-
-Takes a macro and inlines it one step.
-
-#### Before
-```rust
-macro_rules! num {
-    (+$($t:tt)+) => (1 + num!($($t )+));
-    (-$($t:tt)+) => (-1 + num!($($t )+));
-    (+) => (1);
-    (-) => (-1);
-}
-
-fn main() {
-    let number = num┃!(+ + + - + +);
-    println!("{number}");
-}
-```
-
-#### After
-```rust
-macro_rules! num {
-    (+$($t:tt)+) => (1 + num!($($t )+));
-    (-$($t:tt)+) => (-1 + num!($($t )+));
-    (+) => (1);
-    (-) => (-1);
-}
-
-fn main() {
-    let number = 1+num!(+ + - + +);
-    println!("{number}");
-}
-```
-
-
-### `inline_type_alias`
-**Source:**  [inline_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_type_alias.rs#L106) 
-
-Replace a type alias with its concrete type.
-
-#### Before
-```rust
-type A<T = u32> = Vec<T>;
-
-fn main() {
-    let a: ┃A;
-}
-```
-
-#### After
-```rust
-type A<T = u32> = Vec<T>;
-
-fn main() {
-    let a: Vec<u32>;
-}
-```
-
-
-### `inline_type_alias_uses`
-**Source:**  [inline_type_alias.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/inline_type_alias.rs#L24) 
-
-Inline a type alias into all of its uses where possible.
-
-#### Before
-```rust
-type ┃A = i32;
-fn id(x: A) -> A {
-    x
-};
-fn foo() {
-    let _: A = 3;
-}
-```
-
-#### After
-```rust
-
-fn id(x: i32) -> i32 {
-    x
-};
-fn foo() {
-    let _: i32 = 3;
-}
-```
-
-
-### `into_to_qualified_from`
-**Source:**  [into_to_qualified_from.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/into_to_qualified_from.rs#L10) 
-
-Convert an `into` method call to a fully qualified `from` call.
-
-#### Before
-```rust
-//- minicore: from
-struct B;
-impl From<i32> for B {
-    fn from(a: i32) -> Self {
-       B
-    }
-}
-
-fn main() -> () {
-    let a = 3;
-    let b: B = a.in┃to();
-}
-```
-
-#### After
-```rust
-struct B;
-impl From<i32> for B {
-    fn from(a: i32) -> Self {
-       B
-    }
-}
-
-fn main() -> () {
-    let a = 3;
-    let b: B = B::from(a);
-}
-```
-
-
-### `introduce_named_lifetime`
-**Source:**  [introduce_named_lifetime.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/introduce_named_lifetime.rs#L13) 
-
-Change an anonymous lifetime to a named lifetime.
-
-#### Before
-```rust
-impl Cursor<'_┃> {
-    fn node(self) -> &SyntaxNode {
-        match self {
-            Cursor::Replace(node) | Cursor::Before(node) => node,
-        }
-    }
-}
-```
-
-#### After
-```rust
-impl<'a> Cursor<'a> {
-    fn node(self) -> &SyntaxNode {
-        match self {
-            Cursor::Replace(node) | Cursor::Before(node) => node,
-        }
-    }
-}
-```
-
-
-### `introduce_named_type_parameter`
-**Source:**  [introduce_named_type_parameter.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs#L7) 
-
-Replaces `impl Trait` function argument with the named generic.
-
-#### Before
-```rust
-fn foo(bar: ┃impl Bar) {}
-```
-
-#### After
-```rust
-fn foo<┃B: Bar>(bar: B) {}
-```
-
-
-### `invert_if`
-**Source:**  [invert_if.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/invert_if.rs#L13) 
-
-This transforms if expressions of the form `if !x {A} else {B}` into `if x {B} else {A}`
-This also works with `!=`. This assist can only be applied with the cursor on `if`.
-
-#### Before
-```rust
-fn main() {
-    if┃ !y { A } else { B }
-}
-```
-
-#### After
-```rust
-fn main() {
-    if y { B } else { A }
-}
-```
-
-
-### `line_to_block`
-**Source:**  [convert_comment_block.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/convert_comment_block.rs#L9) 
-
-Converts comments between block and single-line form.
-
-#### Before
-```rust
-   // Multi-line┃
-   // comment
-```
-
-#### After
-```rust
-  /*
-  Multi-line
-  comment
-  */
-```
-
-
-### `make_raw_string`
-**Source:**  [raw_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/raw_string.rs#L7) 
-
-Adds `r#` to a plain string literal.
-
-#### Before
-```rust
-fn main() {
-    "Hello,┃ World!";
-}
-```
-
-#### After
-```rust
-fn main() {
-    r#"Hello, World!"#;
-}
-```
-
-
-### `make_usual_string`
-**Source:**  [raw_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/raw_string.rs#L47) 
-
-Turns a raw string into a plain string.
-
-#### Before
-```rust
-fn main() {
-    r#"Hello,┃ "World!""#;
-}
-```
-
-#### After
-```rust
-fn main() {
-    "Hello, \"World!\"";
-}
-```
-
-
-### `merge_imports`
-**Source:**  [merge_imports.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/merge_imports.rs#L21) 
-
-Merges neighbor imports with a common prefix.
-
-#### Before
-```rust
-use std::┃fmt::Formatter;
-use std::io;
-```
-
-#### After
-```rust
-use std::{fmt::Formatter, io};
-```
-
-
-### `merge_match_arms`
-**Source:**  [merge_match_arms.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/merge_match_arms.rs#L12) 
-
-Merges the current match arm with the following if their bodies are identical.
-
-#### Before
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        ┃Action::Move(..) => foo(),
-        Action::Stop => foo(),
-    }
-}
-```
-
-#### After
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        Action::Move(..) | Action::Stop => foo(),
-    }
-}
-```
-
-
-### `merge_nested_if`
-**Source:**  [merge_nested_if.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/merge_nested_if.rs#L11) 
-
-This transforms if expressions of the form `if x { if y {A} }` into `if x && y {A}`
-This assist can only be applied with the cursor on `if`.
-
-#### Before
-```rust
-fn main() {
-   i┃f x == 3 { if y == 4 { 1 } }
-}
-```
-
-#### After
-```rust
-fn main() {
-   if x == 3 && y == 4 { 1 }
-}
-```
-
-
-### `move_arm_cond_to_match_guard`
-**Source:**  [move_guard.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_guard.rs#L69) 
-
-Moves if expression from match arm body into a guard.
-
-#### Before
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        Action::Move { distance } => ┃if distance > 10 { foo() },
-        _ => (),
-    }
-}
-```
-
-#### After
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        Action::Move { distance } if distance > 10 => foo(),
-        _ => (),
-    }
-}
-```
-
-
-### `move_bounds_to_where_clause`
-**Source:**  [move_bounds.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_bounds.rs#L12) 
-
-Moves inline type bounds to a where clause.
-
-#### Before
-```rust
-fn apply<T, U, ┃F: FnOnce(T) -> U>(f: F, x: T) -> U {
-    f(x)
-}
-```
-
-#### After
-```rust
-fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
-    f(x)
-}
-```
-
-
-### `move_const_to_impl`
-**Source:**  [move_const_to_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_const_to_impl.rs#L14) 
-
-Move a local constant item in a method to impl's associated constant. All the references will be
-qualified with `Self::`.
-
-#### Before
-```rust
-struct S;
-impl S {
-    fn foo() -> usize {
-        /// The answer.
-        const C┃: usize = 42;
-
-        C * C
-    }
-}
-```
-
-#### After
-```rust
-struct S;
-impl S {
-    /// The answer.
-    const C: usize = 42;
-
-    fn foo() -> usize {
-        Self::C * Self::C
-    }
-}
-```
-
-
-### `move_from_mod_rs`
-**Source:**  [move_from_mod_rs.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_from_mod_rs.rs#L12) 
-
-Moves xxx/mod.rs to xxx.rs.
-
-#### Before
-```rust
-//- /main.rs
-mod a;
-//- /a/mod.rs
-┃fn t() {}┃
-```
-
-#### After
-```rust
-fn t() {}
-```
-
-
-### `move_guard_to_arm_body`
-**Source:**  [move_guard.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_guard.rs#L8) 
-
-Moves match guard into match arm body.
-
-#### Before
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        Action::Move { distance } ┃if distance > 10 => foo(),
-        _ => (),
-    }
-}
-```
-
-#### After
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        Action::Move { distance } => if distance > 10 {
-            foo()
-        },
-        _ => (),
-    }
-}
-```
-
-
-### `move_module_to_file`
-**Source:**  [move_module_to_file.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_module_to_file.rs#L15) 
-
-Moves inline module's contents to a separate file.
-
-#### Before
-```rust
-mod ┃foo {
-    fn t() {}
-}
-```
-
-#### After
-```rust
-mod foo;
-```
-
-
-### `move_to_mod_rs`
-**Source:**  [move_to_mod_rs.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/move_to_mod_rs.rs#L12) 
-
-Moves xxx.rs to xxx/mod.rs.
-
-#### Before
-```rust
-//- /main.rs
-mod a;
-//- /a.rs
-┃fn t() {}┃
-```
-
-#### After
-```rust
-fn t() {}
-```
-
-
-### `normalize_import`
-**Source:**  [normalize_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/normalize_import.rs#L9) 
-
-Normalizes an import.
-
-#### Before
-```rust
-use┃ std::{io, {fmt::Formatter}};
-```
-
-#### After
-```rust
-use std::{fmt::Formatter, io};
-```
-
-
-### `promote_local_to_const`
-**Source:**  [promote_local_to_const.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/promote_local_to_const.rs#L17) 
-
-Promotes a local variable to a const item changing its name to a `SCREAMING_SNAKE_CASE` variant
-if the local uses no non-const expressions.
-
-#### Before
-```rust
-fn main() {
-    let foo┃ = true;
-
-    if foo {
-        println!("It's true");
-    } else {
-        println!("It's false");
-    }
-}
-```
-
-#### After
-```rust
-fn main() {
-    const ┃FOO: bool = true;
-
-    if FOO {
-        println!("It's true");
-    } else {
-        println!("It's false");
-    }
-}
-```
-
-
-### `pull_assignment_up`
-**Source:**  [pull_assignment_up.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/pull_assignment_up.rs#L11) 
-
-Extracts variable assignment to outside an if or match statement.
-
-#### Before
-```rust
-fn main() {
-    let mut foo = 6;
-
-    if true {
-        ┃foo = 5;
-    } else {
-        foo = 4;
-    }
-}
-```
-
-#### After
-```rust
-fn main() {
-    let mut foo = 6;
-
-    foo = if true {
-        5
-    } else {
-        4
-    };
-}
-```
-
-
-### `qualify_method_call`
-**Source:**  [qualify_method_call.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/qualify_method_call.rs#L10) 
-
-Replaces the method call with a qualified function call.
-
-#### Before
-```rust
-struct Foo;
-impl Foo {
-    fn foo(&self) {}
-}
-fn main() {
-    let foo = Foo;
-    foo.fo┃o();
-}
-```
-
-#### After
-```rust
-struct Foo;
-impl Foo {
-    fn foo(&self) {}
-}
-fn main() {
-    let foo = Foo;
-    Foo::foo(&foo);
-}
-```
-
-
-### `qualify_path`
-**Source:**  [qualify_path.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/qualify_path.rs#L24) 
-
-If the name is unresolved, provides all possible qualified paths for it.
-
-#### Before
-```rust
-fn main() {
-    let map = HashMap┃::new();
-}
-```
-
-#### After
-```rust
-fn main() {
-    let map = std::collections::HashMap::new();
-}
-```
-
-
-### `reformat_number_literal`
-**Source:**  [number_representation.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/number_representation.rs#L7) 
-
-Adds or removes separators from integer literal.
-
-#### Before
-```rust
-const _: i32 = 1012345┃;
-```
-
-#### After
-```rust
-const _: i32 = 1_012_345;
-```
-
-
-### `remove_dbg`
-**Source:**  [remove_dbg.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_dbg.rs#L9) 
-
-Removes `dbg!()` macro call.
-
-#### Before
-```rust
-fn main() {
-    let x = ┃dbg!(42 * dbg!(4 + 2));┃
-}
-```
-
-#### After
-```rust
-fn main() {
-    let x = 42 * (4 + 2);
-}
-```
-
-
-### `remove_hash`
-**Source:**  [raw_string.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/raw_string.rs#L117) 
-
-Removes a hash from a raw string literal.
-
-#### Before
-```rust
-fn main() {
-    r#"Hello,┃ World!"#;
-}
-```
-
-#### After
-```rust
-fn main() {
-    r"Hello, World!";
-}
-```
-
-
-### `remove_mut`
-**Source:**  [remove_mut.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_mut.rs#L5) 
-
-Removes the `mut` keyword.
-
-#### Before
-```rust
-impl Walrus {
-    fn feed(&mut┃ self, amount: u32) {}
-}
-```
-
-#### After
-```rust
-impl Walrus {
-    fn feed(&self, amount: u32) {}
-}
-```
-
-
-### `remove_parentheses`
-**Source:**  [remove_parentheses.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_parentheses.rs#L9) 
-
-Removes redundant parentheses.
-
-#### Before
-```rust
-fn main() {
-    _ = ┃(2) + 2;
-}
-```
-
-#### After
-```rust
-fn main() {
-    _ = 2 + 2;
-}
-```
-
-
-### `remove_unused_imports`
-**Source:**  [remove_unused_imports.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_unused_imports.rs#L17) 
-
-Removes any use statements in the current selection that are unused.
-
-#### Before
-```rust
-struct X();
-mod foo {
-    use super::X┃;
-}
-```
-
-#### After
-```rust
-struct X();
-mod foo {
-}
-```
-
-
-### `remove_unused_param`
-**Source:**  [remove_unused_param.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/remove_unused_param.rs#L16) 
-
-Removes unused function parameter.
-
-#### Before
-```rust
-fn frobnicate(x: i32┃) {}
-
-fn main() {
-    frobnicate(92);
-}
-```
-
-#### After
-```rust
-fn frobnicate() {}
-
-fn main() {
-    frobnicate();
-}
-```
-
-
-### `reorder_fields`
-**Source:**  [reorder_fields.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/reorder_fields.rs#L8) 
-
-Reorder the fields of record literals and record patterns in the same order as in
-the definition.
-
-#### Before
-```rust
-struct Foo {foo: i32, bar: i32};
-const test: Foo = ┃Foo {bar: 0, foo: 1}
-```
-
-#### After
-```rust
-struct Foo {foo: i32, bar: i32};
-const test: Foo = Foo {foo: 1, bar: 0}
-```
-
-
-### `reorder_impl_items`
-**Source:**  [reorder_impl_items.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/reorder_impl_items.rs#L11) 
-
-Reorder the items of an `impl Trait`. The items will be ordered
-in the same order as in the trait definition.
-
-#### Before
-```rust
-trait Foo {
-    type A;
-    const B: u8;
-    fn c();
-}
-
-struct Bar;
-┃impl Foo for Bar┃ {
-    const B: u8 = 17;
-    fn c() {}
-    type A = String;
-}
-```
-
-#### After
-```rust
-trait Foo {
-    type A;
-    const B: u8;
-    fn c();
-}
-
-struct Bar;
-impl Foo for Bar {
-    type A = String;
-    const B: u8 = 17;
-    fn c() {}
-}
-```
-
-
-### `replace_arith_with_checked`
-**Source:**  [replace_arith_op.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_arith_op.rs#L9) 
-
-Replaces arithmetic on integers with the `checked_*` equivalent.
-
-#### Before
-```rust
-fn main() {
-  let x = 1 ┃+ 2;
-}
-```
-
-#### After
-```rust
-fn main() {
-  let x = 1.checked_add(2);
-}
-```
-
-
-### `replace_arith_with_saturating`
-**Source:**  [replace_arith_op.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_arith_op.rs#L28) 
-
-Replaces arithmetic on integers with the `saturating_*` equivalent.
-
-#### Before
-```rust
-fn main() {
-  let x = 1 ┃+ 2;
-}
-```
-
-#### After
-```rust
-fn main() {
-  let x = 1.saturating_add(2);
-}
-```
-
-
-### `replace_arith_with_wrapping`
-**Source:**  [replace_arith_op.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_arith_op.rs#L50) 
-
-Replaces arithmetic on integers with the `wrapping_*` equivalent.
-
-#### Before
-```rust
-fn main() {
-  let x = 1 ┃+ 2;
-}
-```
-
-#### After
-```rust
-fn main() {
-  let x = 1.wrapping_add(2);
-}
-```
-
-
-### `replace_char_with_string`
-**Source:**  [replace_string_with_char.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_string_with_char.rs#L51) 
-
-Replace a char literal with a string literal.
-
-#### Before
-```rust
-fn main() {
-    find('{┃');
-}
-```
-
-#### After
-```rust
-fn main() {
-    find("{");
-}
-```
-
-
-### `replace_derive_with_manual_impl`
-**Source:**  [replace_derive_with_manual_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs#L20) 
-
-Converts a `derive` impl into a manual one.
-
-#### Before
-```rust
-#[derive(Deb┃ug, Display)]
-struct S;
-```
-
-#### After
-```rust
-#[derive(Display)]
-struct S;
-
-impl Debug for S {
-    ┃fn fmt(&self, f: &mut Formatter) -> Result<()> {
-        f.debug_struct("S").finish()
-    }
-}
-```
-
-
-### `replace_if_let_with_match`
-**Source:**  [replace_if_let_with_match.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_if_let_with_match.rs#L20) 
-
-Replaces a `if let` expression with a `match` expression.
-
-#### Before
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    ┃if let Action::Move { distance } = action {
-        foo(distance)
-    } else {
-        bar()
-    }
-}
-```
-
-#### After
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        Action::Move { distance } => foo(distance),
-        _ => bar(),
-    }
-}
-```
-
-
-### `replace_is_some_with_if_let_some`
-**Source:**  [replace_is_method_with_if_let_method.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs#L9) 
-
-Replace `if x.is_some()` with `if let Some(_tmp) = x` or `if x.is_ok()` with `if let Ok(_tmp) = x`.
-
-#### Before
-```rust
-fn main() {
-    let x = Some(1);
-    if x.is_som┃e() {}
-}
-```
-
-#### After
-```rust
-fn main() {
-    let x = Some(1);
-    if let Some(${0:x1}) = x {}
-}
-```
-
-
-### `replace_let_with_if_let`
-**Source:**  [replace_let_with_if_let.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_let_with_if_let.rs#L9) 
-
-Replaces `let` with an `if let`.
-
-#### Before
-```rust
-
-fn main(action: Action) {
-    ┃let x = compute();
-}
-
-fn compute() -> Option<i32> { None }
-```
-
-#### After
-```rust
-
-fn main(action: Action) {
-    if let Some(x) = compute() {
-    }
-}
-
-fn compute() -> Option<i32> { None }
-```
-
-
-### `replace_match_with_if_let`
-**Source:**  [replace_if_let_with_match.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_if_let_with_match.rs#L188) 
-
-Replaces a binary `match` with a wildcard pattern and no guards with an `if let` expression.
-
-#### Before
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    ┃match action {
-        Action::Move { distance } => foo(distance),
-        _ => bar(),
-    }
-}
-```
-
-#### After
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    if let Action::Move { distance } = action {
-        foo(distance)
-    } else {
-        bar()
-    }
-}
-```
-
-
-### `replace_named_generic_with_impl`
-**Source:**  [replace_named_generic_with_impl.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs#L18) 
-
-Replaces named generic with an `impl Trait` in function argument.
-
-#### Before
-```rust
-fn new<P┃: AsRef<Path>>(location: P) -> Self {}
-```
-
-#### After
-```rust
-fn new(location: impl AsRef<Path>) -> Self {}
-```
-
-
-### `replace_qualified_name_with_use`
-**Source:**  [replace_qualified_name_with_use.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs#L13) 
-
-Adds a use statement for a given fully-qualified name.
-
-#### Before
-```rust
-fn process(map: std::collections::┃HashMap<String, String>) {}
-```
-
-#### After
-```rust
-use std::collections::HashMap;
-
-fn process(map: HashMap<String, String>) {}
-```
-
-
-### `replace_string_with_char`
-**Source:**  [replace_string_with_char.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_string_with_char.rs#L11) 
-
-Replace string literal with char literal.
-
-#### Before
-```rust
-fn main() {
-    find("{┃");
-}
-```
-
-#### After
-```rust
-fn main() {
-    find('{');
-}
-```
-
-
-### `replace_try_expr_with_match`
-**Source:**  [replace_try_expr_with_match.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs#L18) 
-
-Replaces a `try` expression with a `match` expression.
-
-#### Before
-```rust
-fn handle() {
-    let pat = Some(true)┃?;
-}
-```
-
-#### After
-```rust
-fn handle() {
-    let pat = match Some(true) {
-        Some(it) => it,
-        None => return None,
-    };
-}
-```
-
-
-### `replace_turbofish_with_explicit_type`
-**Source:**  [replace_turbofish_with_explicit_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs#L12) 
-
-Converts `::<_>` to an explicit type assignment.
-
-#### Before
-```rust
-fn make<T>() -> T { ) }
-fn main() {
-    let a = make┃::<i32>();
-}
-```
-
-#### After
-```rust
-fn make<T>() -> T { ) }
-fn main() {
-    let a: i32 = make();
-}
-```
-
-
-### `replace_with_eager_method`
-**Source:**  [replace_method_eager_lazy.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs#L89) 
-
-Replace `unwrap_or_else` with `unwrap_or` and `ok_or_else` with `ok_or`.
-
-#### Before
-```rust
-fn foo() {
-    let a = Some(1);
-    a.unwra┃p_or_else(|| 2);
-}
-```
-
-#### After
-```rust
-fn foo() {
-    let a = Some(1);
-    a.unwrap_or(2);
-}
-```
-
-
-### `replace_with_lazy_method`
-**Source:**  [replace_method_eager_lazy.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs#L9) 
-
-Replace `unwrap_or` with `unwrap_or_else` and `ok_or` with `ok_or_else`.
-
-#### Before
-```rust
-fn foo() {
-    let a = Some(1);
-    a.unwra┃p_or(2);
-}
-```
-
-#### After
-```rust
-fn foo() {
-    let a = Some(1);
-    a.unwrap_or_else(|| 2);
-}
-```
-
-
-### `sort_items`
-**Source:**  [sort_items.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/sort_items.rs#L12) 
-
-Sorts item members alphabetically: fields, enum variants and methods.
-
-#### Before
-```rust
-struct ┃Foo┃ { second: u32, first: String }
-```
-
-#### After
-```rust
-struct Foo { first: String, second: u32 }
-```
-
----
-
-#### Before
-```rust
-trait ┃Bar┃ {
-    fn second(&self) -> u32;
-    fn first(&self) -> String;
-}
-```
-
-#### After
-```rust
-trait Bar {
-    fn first(&self) -> String;
-    fn second(&self) -> u32;
-}
-```
-
----
-
-#### Before
-```rust
-struct Baz;
-impl ┃Baz┃ {
-    fn second(&self) -> u32;
-    fn first(&self) -> String;
-}
-```
-
-#### After
-```rust
-struct Baz;
-impl Baz {
-    fn first(&self) -> String;
-    fn second(&self) -> u32;
-}
-```
-
----
-There is a difference between sorting enum variants:
-
-#### Before
-```rust
-enum ┃Animal┃ {
-  Dog(String, f64),
-  Cat { weight: f64, name: String },
-}
-```
-
-#### After
-```rust
-enum Animal {
-  Cat { weight: f64, name: String },
-  Dog(String, f64),
-}
-```
-
-and sorting a single enum struct variant:
-
-#### Before
-```rust
-enum Animal {
-  Dog(String, f64),
-  Cat ┃{ weight: f64, name: String }┃,
-}
-```
-
-#### After
-```rust
-enum Animal {
-  Dog(String, f64),
-  Cat { name: String, weight: f64 },
-}
-```
-
-
-### `split_import`
-**Source:**  [split_import.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/split_import.rs#L5) 
-
-Wraps the tail of import into braces.
-
-#### Before
-```rust
-use std::┃collections::HashMap;
-```
-
-#### After
-```rust
-use std::{collections::HashMap};
-```
-
-
-### `sugar_impl_future_into_async`
-**Source:**  [toggle_async_sugar.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/toggle_async_sugar.rs#L13) 
-
-Rewrites asynchronous function from `-> impl Future` into `async fn`.
-This action does not touch the function body and therefore `async { 0 }`
-block does not transform to just `0`.
-
-#### Before
-```rust
-pub fn foo() -> impl core::future::F┃uture<Output = usize> {
-    async { 0 }
-}
-```
-
-#### After
-```rust
-pub async fn foo() -> usize {
-    async { 0 }
-}
-```
-
-
-### `toggle_ignore`
-**Source:**  [toggle_ignore.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/toggle_ignore.rs#L8) 
-
-Adds `#[ignore]` attribute to the test.
-
-#### Before
-```rust
-┃#[test]
-fn arithmetics {
-    assert_eq!(2 + 2, 5);
-}
-```
-
-#### After
-```rust
-#[test]
-#[ignore]
-fn arithmetics {
-    assert_eq!(2 + 2, 5);
-}
-```
-
-
-### `toggle_macro_delimiter`
-**Source:**  [toggle_macro_delimiter.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs#L9) 
-
-Change macro delimiters in the order of `( -> { -> [ -> (`.
-
-#### Before
-```rust
-macro_rules! sth {
-    () => {};
-}
-
-sth!┃( );
-```
-
-#### After
-```rust
-macro_rules! sth {
-    () => {};
-}
-
-sth!{ }
-```
-
-
-### `unmerge_match_arm`
-**Source:**  [unmerge_match_arm.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unmerge_match_arm.rs#L10) 
-
-Splits the current match with a `|` pattern into two arms with identical bodies.
-
-#### Before
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        Action::Move(..) ┃| Action::Stop => foo(),
-    }
-}
-```
-
-#### After
-```rust
-enum Action { Move { distance: u32 }, Stop }
-
-fn handle(action: Action) {
-    match action {
-        Action::Move(..) => foo(),
-        Action::Stop => foo(),
-    }
-}
-```
-
-
-### `unmerge_use`
-**Source:**  [unmerge_use.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unmerge_use.rs#L12) 
-
-Extracts single use item from use list.
-
-#### Before
-```rust
-use std::fmt::{Debug, Display┃};
-```
-
-#### After
-```rust
-use std::fmt::{Debug};
-use std::fmt::Display;
-```
-
-
-### `unnecessary_async`
-**Source:**  [unnecessary_async.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unnecessary_async.rs#L17) 
-
-Removes the `async` mark from functions which have no `.await` in their body.
-Looks for calls to the functions and removes the `.await` on the call site.
-
-#### Before
-```rust
-pub asy┃nc fn foo() {}
-pub async fn bar() { foo().await }
-```
-
-#### After
-```rust
-pub fn foo() {}
-pub async fn bar() { foo() }
-```
-
-
-### `unqualify_method_call`
-**Source:**  [unqualify_method_call.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unqualify_method_call.rs#L9) 
-
-Transforms universal function call syntax into a method call.
-
-#### Before
-```rust
-fn main() {
-    std::ops::Add::add┃(1, 2);
-}
-```
-
-#### After
-```rust
-use std::ops::Add;
-
-fn main() {
-    1.add(2);
-}
-```
-
-
-### `unwrap_block`
-**Source:**  [unwrap_block.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unwrap_block.rs#L12) 
-
-This assist removes if...else, for, while and loop control statements to just keep the body.
-
-#### Before
-```rust
-fn foo() {
-    if true {┃
-        println!("foo");
-    }
-}
-```
-
-#### After
-```rust
-fn foo() {
-    println!("foo");
-}
-```
-
-
-### `unwrap_option_return_type`
-**Source:**  [unwrap_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unwrap_return_type.rs#L13) 
-
-Unwrap the function's return type.
-
-#### Before
-```rust
-fn foo() -> Option<i32>┃ { Some(42i32) }
-```
-
-#### After
-```rust
-fn foo() -> i32 { 42i32 }
-```
-
-
-### `unwrap_result_return_type`
-**Source:**  [unwrap_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unwrap_return_type.rs#L26) 
-
-Unwrap the function's return type.
-
-#### Before
-```rust
-fn foo() -> Result<i32>┃ { Ok(42i32) }
-```
-
-#### After
-```rust
-fn foo() -> i32 { 42i32 }
-```
-
-
-### `unwrap_tuple`
-**Source:**  [unwrap_tuple.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/unwrap_tuple.rs#L8) 
-
-Unwrap the tuple to different variables.
-
-#### Before
-```rust
-fn main() {
-    ┃let (foo, bar) = ("Foo", "Bar");
-}
-```
-
-#### After
-```rust
-fn main() {
-    let foo = "Foo";
-    let bar = "Bar";
-}
-```
-
-
-### `wrap_return_type_in_option`
-**Source:**  [wrap_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/wrap_return_type.rs#L16) 
-
-Wrap the function's return type into Option.
-
-#### Before
-```rust
-fn foo() -> i32┃ { 42i32 }
-```
-
-#### After
-```rust
-fn foo() -> Option<i32> { Some(42i32) }
-```
-
-
-### `wrap_return_type_in_result`
-**Source:**  [wrap_return_type.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/wrap_return_type.rs#L29) 
-
-Wrap the function's return type into Result.
-
-#### Before
-```rust
-fn foo() -> i32┃ { 42i32 }
-```
-
-#### After
-```rust
-fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
-```
-
-
-### `wrap_unwrap_cfg_attr`
-**Source:**  [wrap_unwrap_cfg_attr.rs](https://github.com/rust-lang/rust-analyzer/blob/master/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs#L12) 
-
-Wraps an attribute to a cfg_attr attribute or unwraps a cfg_attr attribute to the inner attributes.
-
-#### Before
-```rust
-#[derive┃(Debug)]
-struct S {
-   field: i32
-}
-```
-
-#### After
-```rust
-#[cfg_attr(┃, derive(Debug))]
-struct S {
-   field: i32
-}
-```
diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
index 0a612d20b9cc1..2ae73df61d0af 100644
--- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
+++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
@@ -1,53 +1,73 @@
-**rust-analyzer.assist.emitMustUse** (default: false)
+## rust-analyzer.assist.emitMustUse {#assist.emitMustUse}
 
- Whether to insert #[must_use] when generating `as_` methods
+Default: `false`
+
+Whether to insert #[must_use] when generating `as_` methods
 for enum variants.
 
 
-**rust-analyzer.assist.expressionFillDefault** (default: "todo")
+## rust-analyzer.assist.expressionFillDefault {#assist.expressionFillDefault}
+
+Default: `"todo"`
+
+Placeholder expression to use for missing expressions in assists.
+
+
+## rust-analyzer.assist.termSearch.borrowcheck {#assist.termSearch.borrowcheck}
+
+Default: `true`
 
- Placeholder expression to use for missing expressions in assists.
+Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.
 
 
-**rust-analyzer.assist.termSearch.borrowcheck** (default: true)
+## rust-analyzer.assist.termSearch.fuel {#assist.termSearch.fuel}
 
- Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.
+Default: `1800`
 
+Term search fuel in "units of work" for assists (Defaults to 1800).
 
-**rust-analyzer.assist.termSearch.fuel** (default: 1800)
 
- Term search fuel in "units of work" for assists (Defaults to 1800).
+## rust-analyzer.cachePriming.enable {#cachePriming.enable}
 
+Default: `true`
 
-**rust-analyzer.cachePriming.enable** (default: true)
+Warm up caches on project load.
 
- Warm up caches on project load.
 
+## rust-analyzer.cachePriming.numThreads {#cachePriming.numThreads}
 
-**rust-analyzer.cachePriming.numThreads** (default: "physical")
+Default: `"physical"`
 
- How many worker threads to handle priming caches. The default `0` means to pick automatically.
+How many worker threads to handle priming caches. The default `0` means to pick automatically.
 
 
-**rust-analyzer.cargo.allTargets** (default: true)
+## rust-analyzer.cargo.allTargets {#cargo.allTargets}
 
- Pass `--all-targets` to cargo invocation.
+Default: `true`
 
+Pass `--all-targets` to cargo invocation.
 
-**rust-analyzer.cargo.autoreload** (default: true)
 
- Automatically refresh project info via `cargo metadata` on
+## rust-analyzer.cargo.autoreload {#cargo.autoreload}
+
+Default: `true`
+
+Automatically refresh project info via `cargo metadata` on
 `Cargo.toml` or `.cargo/config.toml` changes.
 
 
-**rust-analyzer.cargo.buildScripts.enable** (default: true)
+## rust-analyzer.cargo.buildScripts.enable {#cargo.buildScripts.enable}
+
+Default: `true`
 
- Run build scripts (`build.rs`) for more precise code analysis.
+Run build scripts (`build.rs`) for more precise code analysis.
 
 
-**rust-analyzer.cargo.buildScripts.invocationStrategy** (default: "per_workspace")
+## rust-analyzer.cargo.buildScripts.invocationStrategy {#cargo.buildScripts.invocationStrategy}
 
- Specifies the invocation strategy to use when running the build scripts command.
+Default: `"per_workspace"`
+
+Specifies the invocation strategy to use when running the build scripts command.
 If `per_workspace` is set, the command will be executed for each Rust workspace with the
 workspace as the working directory.
 If `once` is set, the command will be executed once with the opened project as the
@@ -56,9 +76,11 @@ This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideC
 is set.
 
 
-**rust-analyzer.cargo.buildScripts.overrideCommand** (default: null)
+## rust-analyzer.cargo.buildScripts.overrideCommand {#cargo.buildScripts.overrideCommand}
+
+Default: `null`
 
- Override the command rust-analyzer uses to run build scripts and
+Override the command rust-analyzer uses to run build scripts and
 build procedural macros. The command is required to output json
 and should therefore include `--message-format=json` or a similar
 option.
@@ -77,62 +99,83 @@ cargo check --quiet --workspace --message-format=json --all-targets --keep-going
 .
 
 
-**rust-analyzer.cargo.buildScripts.rebuildOnSave** (default: true)
+## rust-analyzer.cargo.buildScripts.rebuildOnSave {#cargo.buildScripts.rebuildOnSave}
+
+Default: `true`
 
- Rerun proc-macros building/build-scripts running when proc-macro
+Rerun proc-macros building/build-scripts running when proc-macro
 or build-script sources change and are saved.
 
 
-**rust-analyzer.cargo.buildScripts.useRustcWrapper** (default: true)
+## rust-analyzer.cargo.buildScripts.useRustcWrapper {#cargo.buildScripts.useRustcWrapper}
 
- Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
+Default: `true`
+
+Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
 avoid checking unnecessary things.
 
 
- **rust-analyzer.cargo.cfgs**
+## rust-analyzer.cargo.cfgs {#cargo.cfgs}
 
 Default:
-
-```[
+```json
+[
   "debug_assertions",
   "miri"
 ]
-
 ```
 
- List of cfg options to enable with the given values.
+List of cfg options to enable with the given values.
 
 To enable a name without a value, use `"key"`.
 To enable a name with a value, use `"key=value"`.
 To disable, prefix the entry with a `!`.
 
 
- **rust-analyzer.cargo.extraArgs** (default: [])
+## rust-analyzer.cargo.extraArgs {#cargo.extraArgs}
+
+Default: `[]`
+
+Extra arguments that are passed to every cargo invocation.
 
- Extra arguments that are passed to every cargo invocation.
 
+## rust-analyzer.cargo.extraEnv {#cargo.extraEnv}
 
-**rust-analyzer.cargo.extraEnv** (default: {})
+Default: `{}`
 
- Extra environment variables that will be set when running cargo, rustc
+Extra environment variables that will be set when running cargo, rustc
 or other commands within the workspace. Useful for setting RUSTFLAGS.
 
 
-**rust-analyzer.cargo.features** (default: [])
+## rust-analyzer.cargo.features {#cargo.features}
 
- List of features to activate.
+Default: `[]`
+
+List of features to activate.
 
 Set this to `"all"` to pass `--all-features` to cargo.
 
 
-**rust-analyzer.cargo.noDefaultFeatures** (default: false)
+## rust-analyzer.cargo.noDefaultFeatures {#cargo.noDefaultFeatures}
+
+Default: `false`
+
+Whether to pass `--no-default-features` to cargo.
+
+
+## rust-analyzer.cargo.noDeps {#cargo.noDeps}
 
- Whether to pass `--no-default-features` to cargo.
+Default: `false`
 
+Whether to skip fetching dependencies. If set to "true", the analysis is performed
+entirely offline, and Cargo metadata for dependencies is not fetched.
 
-**rust-analyzer.cargo.sysroot** (default: "discover")
 
- Relative path to the sysroot, or "discover" to try to automatically find it via
+## rust-analyzer.cargo.sysroot {#cargo.sysroot}
+
+Default: `"discover"`
+
+Relative path to the sysroot, or "discover" to try to automatically find it via
 "rustc --print sysroot".
 
 Unsetting this disables sysroot loading.
@@ -140,22 +183,28 @@ Unsetting this disables sysroot loading.
 This option does not take effect until rust-analyzer is restarted.
 
 
-**rust-analyzer.cargo.sysrootSrc** (default: null)
+## rust-analyzer.cargo.sysrootSrc {#cargo.sysrootSrc}
+
+Default: `null`
 
- Relative path to the sysroot library sources. If left unset, this will default to
+Relative path to the sysroot library sources. If left unset, this will default to
 `{cargo.sysroot}/lib/rustlib/src/rust/library`.
 
 This option does not take effect until rust-analyzer is restarted.
 
 
-**rust-analyzer.cargo.target** (default: null)
+## rust-analyzer.cargo.target {#cargo.target}
 
- Compilation target override (target tuple).
+Default: `null`
 
+Compilation target override (target tuple).
 
-**rust-analyzer.cargo.targetDir** (default: null)
 
- Optional path to a rust-analyzer specific target directory.
+## rust-analyzer.cargo.targetDir {#cargo.targetDir}
+
+Default: `null`
+
+Optional path to a rust-analyzer specific target directory.
 This prevents rust-analyzer's `cargo check` and initial build-script and proc-macro
 building from locking the `Cargo.lock` at the expense of duplicating build artifacts.
 
@@ -163,71 +212,93 @@ Set to `true` to use a subdirectory of the existing target directory or
 set to a path relative to the workspace to use that path.
 
 
-**rust-analyzer.cfg.setTest** (default: true)
+## rust-analyzer.cfg.setTest {#cfg.setTest}
+
+Default: `true`
 
- Set `cfg(test)` for local crates. Defaults to true.
+Set `cfg(test)` for local crates. Defaults to true.
 
 
-**rust-analyzer.checkOnSave** (default: true)
+## rust-analyzer.checkOnSave {#checkOnSave}
 
- Run the check command for diagnostics on save.
+Default: `true`
 
+Run the check command for diagnostics on save.
 
-**rust-analyzer.check.allTargets** (default: null)
 
- Check all targets and tests (`--all-targets`). Defaults to
+## rust-analyzer.check.allTargets {#check.allTargets}
+
+Default: `null`
+
+Check all targets and tests (`--all-targets`). Defaults to
 `#rust-analyzer.cargo.allTargets#`.
 
 
-**rust-analyzer.check.command** (default: "check")
+## rust-analyzer.check.command {#check.command}
+
+Default: `"check"`
 
- Cargo command to use for `cargo check`.
+Cargo command to use for `cargo check`.
 
 
-**rust-analyzer.check.extraArgs** (default: [])
+## rust-analyzer.check.extraArgs {#check.extraArgs}
 
- Extra arguments for `cargo check`.
+Default: `[]`
 
+Extra arguments for `cargo check`.
 
-**rust-analyzer.check.extraEnv** (default: {})
 
- Extra environment variables that will be set when running `cargo check`.
+## rust-analyzer.check.extraEnv {#check.extraEnv}
+
+Default: `{}`
+
+Extra environment variables that will be set when running `cargo check`.
 Extends `#rust-analyzer.cargo.extraEnv#`.
 
 
-**rust-analyzer.check.features** (default: null)
+## rust-analyzer.check.features {#check.features}
+
+Default: `null`
 
- List of features to activate. Defaults to
+List of features to activate. Defaults to
 `#rust-analyzer.cargo.features#`.
 
 Set to `"all"` to pass `--all-features` to Cargo.
 
 
-**rust-analyzer.check.ignore** (default: [])
+## rust-analyzer.check.ignore {#check.ignore}
 
- List of `cargo check` (or other command specified in `check.command`) diagnostics to ignore.
+Default: `[]`
+
+List of `cargo check` (or other command specified in `check.command`) diagnostics to ignore.
 
 For example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,...
 
 
-**rust-analyzer.check.invocationStrategy** (default: "per_workspace")
+## rust-analyzer.check.invocationStrategy {#check.invocationStrategy}
+
+Default: `"per_workspace"`
 
- Specifies the invocation strategy to use when running the check command.
+Specifies the invocation strategy to use when running the check command.
 If `per_workspace` is set, the command will be executed for each workspace.
 If `once` is set, the command will be executed once.
 This config only has an effect when `#rust-analyzer.check.overrideCommand#`
 is set.
 
 
-**rust-analyzer.check.noDefaultFeatures** (default: null)
+## rust-analyzer.check.noDefaultFeatures {#check.noDefaultFeatures}
+
+Default: `null`
 
- Whether to pass `--no-default-features` to Cargo. Defaults to
+Whether to pass `--no-default-features` to Cargo. Defaults to
 `#rust-analyzer.cargo.noDefaultFeatures#`.
 
 
-**rust-analyzer.check.overrideCommand** (default: null)
+## rust-analyzer.check.overrideCommand {#check.overrideCommand}
 
- Override the command rust-analyzer uses instead of `cargo check` for
+Default: `null`
+
+Override the command rust-analyzer uses instead of `cargo check` for
 diagnostics on save. The command is required to output json and
 should therefore include `--message-format=json` or a similar option
 (if your client supports the `colorDiagnosticOutput` experimental
@@ -255,9 +326,11 @@ cargo check --workspace --message-format=json --all-targets
 .
 
 
-**rust-analyzer.check.targets** (default: null)
+## rust-analyzer.check.targets {#check.targets}
+
+Default: `null`
 
- Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.
+Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.
 
 Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g.
 `["aarch64-apple-darwin", "x86_64-apple-darwin"]`.
@@ -265,41 +338,51 @@ Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets,
 Aliased as `"checkOnSave.targets"`.
 
 
-**rust-analyzer.check.workspace** (default: true)
+## rust-analyzer.check.workspace {#check.workspace}
+
+Default: `true`
 
- Whether `--workspace` should be passed to `cargo check`.
+Whether `--workspace` should be passed to `cargo check`.
 If false, `-p <package>` will be passed instead if applicable. In case it is not, no
 check will be performed.
 
 
-**rust-analyzer.completion.addSemicolonToUnit** (default: true)
+## rust-analyzer.completion.addSemicolonToUnit {#completion.addSemicolonToUnit}
 
- Whether to automatically add a semicolon when completing unit-returning functions.
+Default: `true`
+
+Whether to automatically add a semicolon when completing unit-returning functions.
 
 In `match` arms it completes a comma instead.
 
 
-**rust-analyzer.completion.autoAwait.enable** (default: true)
+## rust-analyzer.completion.autoAwait.enable {#completion.autoAwait.enable}
+
+Default: `true`
+
+Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future.
 
- Toggles the additional completions that automatically show method calls and field accesses with `await` prefixed to them when completing on a future.
 
+## rust-analyzer.completion.autoIter.enable {#completion.autoIter.enable}
 
-**rust-analyzer.completion.autoIter.enable** (default: true)
+Default: `true`
 
- Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them.
+Toggles the additional completions that automatically show method calls with `iter()` or `into_iter()` prefixed to them when completing on a type that has them.
 
 
-**rust-analyzer.completion.autoimport.enable** (default: true)
+## rust-analyzer.completion.autoimport.enable {#completion.autoimport.enable}
 
- Toggles the additional completions that automatically add imports when completed.
+Default: `true`
+
+Toggles the additional completions that automatically add imports when completed.
 Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
 
 
- **rust-analyzer.completion.autoimport.exclude**
+## rust-analyzer.completion.autoimport.exclude {#completion.autoimport.exclude}
 
 Default:
-
-```[
+```json
+[
   {
     "path": "core::borrow::Borrow",
     "type": "methods"
@@ -309,10 +392,9 @@ Default:
     "type": "methods"
   }
 ]
-
 ```
 
- A list of full paths to items to exclude from auto-importing completions.
+A list of full paths to items to exclude from auto-importing completions.
 
 Traits in this list won't have their methods suggested in completions unless the trait
 is in scope.
@@ -325,56 +407,72 @@ For traits the type "methods" can be used to only exclude the methods but not th
 This setting also inherits `#rust-analyzer.completion.excludeTraits#`.
 
 
- **rust-analyzer.completion.autoself.enable** (default: true)
+## rust-analyzer.completion.autoself.enable {#completion.autoself.enable}
+
+Default: `true`
 
- Toggles the additional completions that automatically show method calls and field accesses
+Toggles the additional completions that automatically show method calls and field accesses
 with `self` prefixed to them when inside a method.
 
 
-**rust-analyzer.completion.callable.snippets** (default: "fill_arguments")
+## rust-analyzer.completion.callable.snippets {#completion.callable.snippets}
 
- Whether to add parenthesis and argument snippets when completing function.
+Default: `"fill_arguments"`
 
+Whether to add parenthesis and argument snippets when completing function.
 
-**rust-analyzer.completion.excludeTraits** (default: [])
 
- A list of full paths to traits whose methods to exclude from completion.
+## rust-analyzer.completion.excludeTraits {#completion.excludeTraits}
+
+Default: `[]`
+
+A list of full paths to traits whose methods to exclude from completion.
 
 Methods from these traits won't be completed, even if the trait is in scope. However, they will still be suggested on expressions whose type is `dyn Trait`, `impl Trait` or `T where T: Trait`.
 
 Note that the trait themselves can still be completed.
 
 
-**rust-analyzer.completion.fullFunctionSignatures.enable** (default: false)
+## rust-analyzer.completion.fullFunctionSignatures.enable {#completion.fullFunctionSignatures.enable}
 
- Whether to show full function/method signatures in completion docs.
+Default: `false`
 
+Whether to show full function/method signatures in completion docs.
 
-**rust-analyzer.completion.hideDeprecated** (default: false)
 
- Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden.
+## rust-analyzer.completion.hideDeprecated {#completion.hideDeprecated}
 
+Default: `false`
 
-**rust-analyzer.completion.limit** (default: null)
+Whether to omit deprecated items from autocompletion. By default they are marked as deprecated but not hidden.
 
- Maximum number of completions to return. If `None`, the limit is infinite.
 
+## rust-analyzer.completion.limit {#completion.limit}
 
-**rust-analyzer.completion.postfix.enable** (default: true)
+Default: `null`
 
- Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
+Maximum number of completions to return. If `None`, the limit is infinite.
 
 
-**rust-analyzer.completion.privateEditable.enable** (default: false)
+## rust-analyzer.completion.postfix.enable {#completion.postfix.enable}
 
- Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+Default: `true`
 
+Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
 
- **rust-analyzer.completion.snippets.custom**
 
-Default:
+## rust-analyzer.completion.privateEditable.enable {#completion.privateEditable.enable}
 
-```{
+Default: `false`
+
+Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+
+
+## rust-analyzer.completion.snippets.custom {#completion.snippets.custom}
+
+Default:
+```json
+{
   "Ok": {
     "postfix": "ok",
     "body": "Ok(${receiver})",
@@ -415,478 +513,651 @@ Default:
     "scope": "expr"
   }
 }
-
 ```
 
- Custom completion snippets.
+Custom completion snippets.
+
+
+## rust-analyzer.completion.termSearch.enable {#completion.termSearch.enable}
+
+Default: `false`
+
+Whether to enable term search based snippets like `Some(foo.bar().baz())`.
 
 
- **rust-analyzer.completion.termSearch.enable** (default: false)
+## rust-analyzer.completion.termSearch.fuel {#completion.termSearch.fuel}
 
- Whether to enable term search based snippets like `Some(foo.bar().baz())`.
+Default: `1000`
 
+Term search fuel in "units of work" for autocompletion (Defaults to 1000).
 
-**rust-analyzer.completion.termSearch.fuel** (default: 1000)
 
- Term search fuel in "units of work" for autocompletion (Defaults to 1000).
+## rust-analyzer.diagnostics.disabled {#diagnostics.disabled}
 
+Default: `[]`
 
-**rust-analyzer.diagnostics.disabled** (default: [])
+List of rust-analyzer diagnostics to disable.
 
- List of rust-analyzer diagnostics to disable.
 
+## rust-analyzer.diagnostics.enable {#diagnostics.enable}
 
-**rust-analyzer.diagnostics.enable** (default: true)
+Default: `true`
 
- Whether to show native rust-analyzer diagnostics.
+Whether to show native rust-analyzer diagnostics.
 
 
-**rust-analyzer.diagnostics.experimental.enable** (default: false)
+## rust-analyzer.diagnostics.experimental.enable {#diagnostics.experimental.enable}
 
- Whether to show experimental rust-analyzer diagnostics that might
+Default: `false`
+
+Whether to show experimental rust-analyzer diagnostics that might
 have more false positives than usual.
 
 
-**rust-analyzer.diagnostics.remapPrefix** (default: {})
+## rust-analyzer.diagnostics.remapPrefix {#diagnostics.remapPrefix}
+
+Default: `{}`
 
- Map of prefixes to be substituted when parsing diagnostic file paths.
+Map of prefixes to be substituted when parsing diagnostic file paths.
 This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
 
 
-**rust-analyzer.diagnostics.styleLints.enable** (default: false)
+## rust-analyzer.diagnostics.styleLints.enable {#diagnostics.styleLints.enable}
+
+Default: `false`
 
- Whether to run additional style lints.
+Whether to run additional style lints.
 
 
-**rust-analyzer.diagnostics.warningsAsHint** (default: [])
+## rust-analyzer.diagnostics.warningsAsHint {#diagnostics.warningsAsHint}
 
- List of warnings that should be displayed with hint severity.
+Default: `[]`
+
+List of warnings that should be displayed with hint severity.
 
 The warnings will be indicated by faded text or three dots in code
 and will not show up in the `Problems Panel`.
 
 
-**rust-analyzer.diagnostics.warningsAsInfo** (default: [])
+## rust-analyzer.diagnostics.warningsAsInfo {#diagnostics.warningsAsInfo}
+
+Default: `[]`
 
- List of warnings that should be displayed with info severity.
+List of warnings that should be displayed with info severity.
 
 The warnings will be indicated by a blue squiggly underline in code
 and a blue icon in the `Problems Panel`.
 
 
-**rust-analyzer.files.exclude** (default: [])
+## rust-analyzer.files.exclude {#files.exclude}
 
- These paths (file/directories) will be ignored by rust-analyzer. They are
+Default: `[]`
+
+These paths (file/directories) will be ignored by rust-analyzer. They are
 relative to the workspace root, and globs are not supported. You may
 also need to add the folders to Code's `files.watcherExclude`.
 
 
-**rust-analyzer.files.watcher** (default: "client")
+## rust-analyzer.files.watcher {#files.watcher}
+
+Default: `"client"`
+
+Controls file watching implementation.
+
+
+## rust-analyzer.highlightRelated.breakPoints.enable {#highlightRelated.breakPoints.enable}
+
+Default: `true`
 
- Controls file watching implementation.
+Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
 
 
-**rust-analyzer.highlightRelated.breakPoints.enable** (default: true)
+## rust-analyzer.highlightRelated.closureCaptures.enable {#highlightRelated.closureCaptures.enable}
 
- Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
+Default: `true`
 
+Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
 
-**rust-analyzer.highlightRelated.closureCaptures.enable** (default: true)
 
- Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
+## rust-analyzer.highlightRelated.exitPoints.enable {#highlightRelated.exitPoints.enable}
 
+Default: `true`
 
-**rust-analyzer.highlightRelated.exitPoints.enable** (default: true)
+Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
 
- Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
 
+## rust-analyzer.highlightRelated.references.enable {#highlightRelated.references.enable}
 
-**rust-analyzer.highlightRelated.references.enable** (default: true)
+Default: `true`
 
- Enables highlighting of related references while the cursor is on any identifier.
+Enables highlighting of related references while the cursor is on any identifier.
 
 
-**rust-analyzer.highlightRelated.yieldPoints.enable** (default: true)
+## rust-analyzer.highlightRelated.yieldPoints.enable {#highlightRelated.yieldPoints.enable}
 
- Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
+Default: `true`
 
+Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
 
-**rust-analyzer.hover.actions.debug.enable** (default: true)
 
- Whether to show `Debug` action. Only applies when
+## rust-analyzer.hover.actions.debug.enable {#hover.actions.debug.enable}
+
+Default: `true`
+
+Whether to show `Debug` action. Only applies when
 `#rust-analyzer.hover.actions.enable#` is set.
 
 
-**rust-analyzer.hover.actions.enable** (default: true)
+## rust-analyzer.hover.actions.enable {#hover.actions.enable}
+
+Default: `true`
 
- Whether to show HoverActions in Rust files.
+Whether to show HoverActions in Rust files.
 
 
-**rust-analyzer.hover.actions.gotoTypeDef.enable** (default: true)
+## rust-analyzer.hover.actions.gotoTypeDef.enable {#hover.actions.gotoTypeDef.enable}
 
- Whether to show `Go to Type Definition` action. Only applies when
+Default: `true`
+
+Whether to show `Go to Type Definition` action. Only applies when
 `#rust-analyzer.hover.actions.enable#` is set.
 
 
-**rust-analyzer.hover.actions.implementations.enable** (default: true)
+## rust-analyzer.hover.actions.implementations.enable {#hover.actions.implementations.enable}
+
+Default: `true`
 
- Whether to show `Implementations` action. Only applies when
+Whether to show `Implementations` action. Only applies when
 `#rust-analyzer.hover.actions.enable#` is set.
 
 
-**rust-analyzer.hover.actions.references.enable** (default: false)
+## rust-analyzer.hover.actions.references.enable {#hover.actions.references.enable}
+
+Default: `false`
 
- Whether to show `References` action. Only applies when
+Whether to show `References` action. Only applies when
 `#rust-analyzer.hover.actions.enable#` is set.
 
 
-**rust-analyzer.hover.actions.run.enable** (default: true)
+## rust-analyzer.hover.actions.run.enable {#hover.actions.run.enable}
 
- Whether to show `Run` action. Only applies when
+Default: `true`
+
+Whether to show `Run` action. Only applies when
 `#rust-analyzer.hover.actions.enable#` is set.
 
 
-**rust-analyzer.hover.actions.updateTest.enable** (default: true)
+## rust-analyzer.hover.actions.updateTest.enable {#hover.actions.updateTest.enable}
+
+Default: `true`
 
- Whether to show `Update Test` action. Only applies when
+Whether to show `Update Test` action. Only applies when
 `#rust-analyzer.hover.actions.enable#` and `#rust-analyzer.hover.actions.run.enable#` are set.
 
 
-**rust-analyzer.hover.documentation.enable** (default: true)
+## rust-analyzer.hover.documentation.enable {#hover.documentation.enable}
+
+Default: `true`
 
- Whether to show documentation on hover.
+Whether to show documentation on hover.
 
 
-**rust-analyzer.hover.documentation.keywords.enable** (default: true)
+## rust-analyzer.hover.documentation.keywords.enable {#hover.documentation.keywords.enable}
 
- Whether to show keyword hover popups. Only applies when
+Default: `true`
+
+Whether to show keyword hover popups. Only applies when
 `#rust-analyzer.hover.documentation.enable#` is set.
 
 
-**rust-analyzer.hover.dropGlue.enable** (default: true)
+## rust-analyzer.hover.dropGlue.enable {#hover.dropGlue.enable}
+
+Default: `true`
+
+Whether to show drop glue information on hover.
 
- Whether to show drop glue information on hover.
 
+## rust-analyzer.hover.links.enable {#hover.links.enable}
 
-**rust-analyzer.hover.links.enable** (default: true)
+Default: `true`
 
- Use markdown syntax for links on hover.
+Use markdown syntax for links on hover.
 
 
-**rust-analyzer.hover.maxSubstitutionLength** (default: 20)
+## rust-analyzer.hover.maxSubstitutionLength {#hover.maxSubstitutionLength}
 
- Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis.
+Default: `20`
+
+Whether to show what types are used as generic arguments in calls etc. on hover, and what is their max length to show such types, beyond it they will be shown with ellipsis.
 
 This can take three values: `null` means "unlimited", the string `"hide"` means to not show generic substitutions at all, and a number means to limit them to X characters.
 
 The default is 20 characters.
 
 
-**rust-analyzer.hover.memoryLayout.alignment** (default: "hexadecimal")
+## rust-analyzer.hover.memoryLayout.alignment {#hover.memoryLayout.alignment}
+
+Default: `"hexadecimal"`
+
+How to render the align information in a memory layout hover.
+
+
+## rust-analyzer.hover.memoryLayout.enable {#hover.memoryLayout.enable}
 
- How to render the align information in a memory layout hover.
+Default: `true`
 
+Whether to show memory layout data on hover.
 
-**rust-analyzer.hover.memoryLayout.enable** (default: true)
 
- Whether to show memory layout data on hover.
+## rust-analyzer.hover.memoryLayout.niches {#hover.memoryLayout.niches}
 
+Default: `false`
 
-**rust-analyzer.hover.memoryLayout.niches** (default: false)
+How to render the niche information in a memory layout hover.
 
- How to render the niche information in a memory layout hover.
 
+## rust-analyzer.hover.memoryLayout.offset {#hover.memoryLayout.offset}
 
-**rust-analyzer.hover.memoryLayout.offset** (default: "hexadecimal")
+Default: `"hexadecimal"`
 
- How to render the offset information in a memory layout hover.
+How to render the offset information in a memory layout hover.
 
 
-**rust-analyzer.hover.memoryLayout.size** (default: "both")
+## rust-analyzer.hover.memoryLayout.size {#hover.memoryLayout.size}
 
- How to render the size information in a memory layout hover.
+Default: `"both"`
 
+How to render the size information in a memory layout hover.
 
-**rust-analyzer.hover.show.enumVariants** (default: 5)
 
- How many variants of an enum to display when hovering on. Show none if empty.
+## rust-analyzer.hover.show.enumVariants {#hover.show.enumVariants}
 
+Default: `5`
 
-**rust-analyzer.hover.show.fields** (default: 5)
+How many variants of an enum to display when hovering on. Show none if empty.
 
- How many fields of a struct, variant or union to display when hovering on. Show none if empty.
 
+## rust-analyzer.hover.show.fields {#hover.show.fields}
 
-**rust-analyzer.hover.show.traitAssocItems** (default: null)
+Default: `5`
 
- How many associated items of a trait to display when hovering a trait.
+How many fields of a struct, variant or union to display when hovering on. Show none if empty.
 
 
-**rust-analyzer.imports.granularity.enforce** (default: false)
+## rust-analyzer.hover.show.traitAssocItems {#hover.show.traitAssocItems}
 
- Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+Default: `null`
 
+How many associated items of a trait to display when hovering a trait.
 
-**rust-analyzer.imports.granularity.group** (default: "crate")
 
- How imports should be grouped into use statements.
+## rust-analyzer.imports.granularity.enforce {#imports.granularity.enforce}
 
+Default: `false`
 
-**rust-analyzer.imports.group.enable** (default: true)
+Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
 
- Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.
 
+## rust-analyzer.imports.granularity.group {#imports.granularity.group}
 
-**rust-analyzer.imports.merge.glob** (default: true)
+Default: `"crate"`
 
- Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+How imports should be grouped into use statements.
 
 
-**rust-analyzer.imports.preferNoStd** (default: false)
+## rust-analyzer.imports.group.enable {#imports.group.enable}
 
- Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
+Default: `true`
 
+Group inserted imports by the [following order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are separated by newlines.
 
-**rust-analyzer.imports.preferPrelude** (default: false)
 
- Whether to prefer import paths containing a `prelude` module.
+## rust-analyzer.imports.merge.glob {#imports.merge.glob}
 
+Default: `true`
 
-**rust-analyzer.imports.prefix** (default: "plain")
+Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
 
- The path structure for newly inserted paths to use.
 
+## rust-analyzer.imports.preferNoStd {#imports.preferNoStd}
 
-**rust-analyzer.imports.prefixExternPrelude** (default: false)
+Default: `false`
 
- Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;".
+Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
 
 
-**rust-analyzer.inlayHints.bindingModeHints.enable** (default: false)
+## rust-analyzer.imports.preferPrelude {#imports.preferPrelude}
 
- Whether to show inlay type hints for binding modes.
+Default: `false`
 
+Whether to prefer import paths containing a `prelude` module.
 
-**rust-analyzer.inlayHints.chainingHints.enable** (default: true)
 
- Whether to show inlay type hints for method chains.
+## rust-analyzer.imports.prefix {#imports.prefix}
 
+Default: `"plain"`
 
-**rust-analyzer.inlayHints.closingBraceHints.enable** (default: true)
+The path structure for newly inserted paths to use.
 
- Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
 
+## rust-analyzer.imports.prefixExternPrelude {#imports.prefixExternPrelude}
 
-**rust-analyzer.inlayHints.closingBraceHints.minLines** (default: 25)
+Default: `false`
 
- Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
+Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;".
+
+
+## rust-analyzer.inlayHints.bindingModeHints.enable {#inlayHints.bindingModeHints.enable}
+
+Default: `false`
+
+Whether to show inlay type hints for binding modes.
+
+
+## rust-analyzer.inlayHints.chainingHints.enable {#inlayHints.chainingHints.enable}
+
+Default: `true`
+
+Whether to show inlay type hints for method chains.
+
+
+## rust-analyzer.inlayHints.closingBraceHints.enable {#inlayHints.closingBraceHints.enable}
+
+Default: `true`
+
+Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
+
+
+## rust-analyzer.inlayHints.closingBraceHints.minLines {#inlayHints.closingBraceHints.minLines}
+
+Default: `25`
+
+Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
 to always show them).
 
 
-**rust-analyzer.inlayHints.closureCaptureHints.enable** (default: false)
+## rust-analyzer.inlayHints.closureCaptureHints.enable {#inlayHints.closureCaptureHints.enable}
+
+Default: `false`
+
+Whether to show inlay hints for closure captures.
+
+
+## rust-analyzer.inlayHints.closureReturnTypeHints.enable {#inlayHints.closureReturnTypeHints.enable}
+
+Default: `"never"`
+
+Whether to show inlay type hints for return types of closures.
+
 
- Whether to show inlay hints for closure captures.
+## rust-analyzer.inlayHints.closureStyle {#inlayHints.closureStyle}
 
+Default: `"impl_fn"`
 
-**rust-analyzer.inlayHints.closureReturnTypeHints.enable** (default: "never")
+Closure notation in type and chaining inlay hints.
 
- Whether to show inlay type hints for return types of closures.
 
+## rust-analyzer.inlayHints.discriminantHints.enable {#inlayHints.discriminantHints.enable}
 
-**rust-analyzer.inlayHints.closureStyle** (default: "impl_fn")
+Default: `"never"`
 
- Closure notation in type and chaining inlay hints.
+Whether to show enum variant discriminant hints.
 
 
-**rust-analyzer.inlayHints.discriminantHints.enable** (default: "never")
+## rust-analyzer.inlayHints.expressionAdjustmentHints.enable {#inlayHints.expressionAdjustmentHints.enable}
 
- Whether to show enum variant discriminant hints.
+Default: `"never"`
 
+Whether to show inlay hints for type adjustments.
 
-**rust-analyzer.inlayHints.expressionAdjustmentHints.enable** (default: "never")
 
- Whether to show inlay hints for type adjustments.
+## rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe {#inlayHints.expressionAdjustmentHints.hideOutsideUnsafe}
 
+Default: `false`
 
-**rust-analyzer.inlayHints.expressionAdjustmentHints.hideOutsideUnsafe** (default: false)
+Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.
 
- Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.
 
+## rust-analyzer.inlayHints.expressionAdjustmentHints.mode {#inlayHints.expressionAdjustmentHints.mode}
 
-**rust-analyzer.inlayHints.expressionAdjustmentHints.mode** (default: "prefix")
+Default: `"prefix"`
 
- Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
+Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
 
 
-**rust-analyzer.inlayHints.genericParameterHints.const.enable** (default: true)
+## rust-analyzer.inlayHints.genericParameterHints.const.enable {#inlayHints.genericParameterHints.const.enable}
 
- Whether to show const generic parameter name inlay hints.
+Default: `true`
 
+Whether to show const generic parameter name inlay hints.
 
-**rust-analyzer.inlayHints.genericParameterHints.lifetime.enable** (default: false)
 
- Whether to show generic lifetime parameter name inlay hints.
+## rust-analyzer.inlayHints.genericParameterHints.lifetime.enable {#inlayHints.genericParameterHints.lifetime.enable}
 
+Default: `false`
 
-**rust-analyzer.inlayHints.genericParameterHints.type.enable** (default: false)
+Whether to show generic lifetime parameter name inlay hints.
 
- Whether to show generic type parameter name inlay hints.
 
+## rust-analyzer.inlayHints.genericParameterHints.type.enable {#inlayHints.genericParameterHints.type.enable}
 
-**rust-analyzer.inlayHints.implicitDrops.enable** (default: false)
+Default: `false`
 
- Whether to show implicit drop hints.
+Whether to show generic type parameter name inlay hints.
 
 
-**rust-analyzer.inlayHints.implicitSizedBoundHints.enable** (default: false)
+## rust-analyzer.inlayHints.implicitDrops.enable {#inlayHints.implicitDrops.enable}
 
- Whether to show inlay hints for the implied type parameter `Sized` bound.
+Default: `false`
 
+Whether to show implicit drop hints.
 
-**rust-analyzer.inlayHints.lifetimeElisionHints.enable** (default: "never")
 
- Whether to show inlay type hints for elided lifetimes in function signatures.
+## rust-analyzer.inlayHints.implicitSizedBoundHints.enable {#inlayHints.implicitSizedBoundHints.enable}
 
+Default: `false`
 
-**rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames** (default: false)
+Whether to show inlay hints for the implied type parameter `Sized` bound.
 
- Whether to prefer using parameter names as the name for elided lifetime hints if possible.
 
+## rust-analyzer.inlayHints.lifetimeElisionHints.enable {#inlayHints.lifetimeElisionHints.enable}
 
-**rust-analyzer.inlayHints.maxLength** (default: 25)
+Default: `"never"`
 
- Maximum length for inlay hints. Set to null to have an unlimited length.
+Whether to show inlay type hints for elided lifetimes in function signatures.
 
 
-**rust-analyzer.inlayHints.parameterHints.enable** (default: true)
+## rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames {#inlayHints.lifetimeElisionHints.useParameterNames}
 
- Whether to show function parameter name inlay hints at the call
+Default: `false`
+
+Whether to prefer using parameter names as the name for elided lifetime hints if possible.
+
+
+## rust-analyzer.inlayHints.maxLength {#inlayHints.maxLength}
+
+Default: `25`
+
+Maximum length for inlay hints. Set to null to have an unlimited length.
+
+
+## rust-analyzer.inlayHints.parameterHints.enable {#inlayHints.parameterHints.enable}
+
+Default: `true`
+
+Whether to show function parameter name inlay hints at the call
 site.
 
 
-**rust-analyzer.inlayHints.rangeExclusiveHints.enable** (default: false)
+## rust-analyzer.inlayHints.rangeExclusiveHints.enable {#inlayHints.rangeExclusiveHints.enable}
 
- Whether to show exclusive range inlay hints.
+Default: `false`
 
+Whether to show exclusive range inlay hints.
 
-**rust-analyzer.inlayHints.reborrowHints.enable** (default: "never")
 
- Whether to show inlay hints for compiler inserted reborrows.
+## rust-analyzer.inlayHints.reborrowHints.enable {#inlayHints.reborrowHints.enable}
+
+Default: `"never"`
+
+Whether to show inlay hints for compiler inserted reborrows.
 This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.
 
 
-**rust-analyzer.inlayHints.renderColons** (default: true)
+## rust-analyzer.inlayHints.renderColons {#inlayHints.renderColons}
+
+Default: `true`
 
- Whether to render leading colons for type hints, and trailing colons for parameter hints.
+Whether to render leading colons for type hints, and trailing colons for parameter hints.
 
 
-**rust-analyzer.inlayHints.typeHints.enable** (default: true)
+## rust-analyzer.inlayHints.typeHints.enable {#inlayHints.typeHints.enable}
 
- Whether to show inlay type hints for variables.
+Default: `true`
 
+Whether to show inlay type hints for variables.
 
-**rust-analyzer.inlayHints.typeHints.hideClosureInitialization** (default: false)
 
- Whether to hide inlay type hints for `let` statements that initialize to a closure.
+## rust-analyzer.inlayHints.typeHints.hideClosureInitialization {#inlayHints.typeHints.hideClosureInitialization}
+
+Default: `false`
+
+Whether to hide inlay type hints for `let` statements that initialize to a closure.
 Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
 
 
-**rust-analyzer.inlayHints.typeHints.hideClosureParameter** (default: false)
+## rust-analyzer.inlayHints.typeHints.hideClosureParameter {#inlayHints.typeHints.hideClosureParameter}
+
+Default: `false`
+
+Whether to hide inlay parameter type hints for closures.
+
+
+## rust-analyzer.inlayHints.typeHints.hideNamedConstructor {#inlayHints.typeHints.hideNamedConstructor}
 
- Whether to hide inlay parameter type hints for closures.
+Default: `false`
 
+Whether to hide inlay type hints for constructors.
 
-**rust-analyzer.inlayHints.typeHints.hideNamedConstructor** (default: false)
 
- Whether to hide inlay type hints for constructors.
+## rust-analyzer.interpret.tests {#interpret.tests}
 
+Default: `false`
 
-**rust-analyzer.interpret.tests** (default: false)
+Enables the experimental support for interpreting tests.
 
- Enables the experimental support for interpreting tests.
 
+## rust-analyzer.joinLines.joinAssignments {#joinLines.joinAssignments}
 
-**rust-analyzer.joinLines.joinAssignments** (default: true)
+Default: `true`
 
- Join lines merges consecutive declaration and initialization of an assignment.
+Join lines merges consecutive declaration and initialization of an assignment.
 
 
-**rust-analyzer.joinLines.joinElseIf** (default: true)
+## rust-analyzer.joinLines.joinElseIf {#joinLines.joinElseIf}
 
- Join lines inserts else between consecutive ifs.
+Default: `true`
 
+Join lines inserts else between consecutive ifs.
 
-**rust-analyzer.joinLines.removeTrailingComma** (default: true)
 
- Join lines removes trailing commas.
+## rust-analyzer.joinLines.removeTrailingComma {#joinLines.removeTrailingComma}
 
+Default: `true`
 
-**rust-analyzer.joinLines.unwrapTrivialBlock** (default: true)
+Join lines removes trailing commas.
 
- Join lines unwraps trivial blocks.
 
+## rust-analyzer.joinLines.unwrapTrivialBlock {#joinLines.unwrapTrivialBlock}
 
-**rust-analyzer.lens.debug.enable** (default: true)
+Default: `true`
 
- Whether to show `Debug` lens. Only applies when
+Join lines unwraps trivial blocks.
+
+
+## rust-analyzer.lens.debug.enable {#lens.debug.enable}
+
+Default: `true`
+
+Whether to show `Debug` lens. Only applies when
 `#rust-analyzer.lens.enable#` is set.
 
 
-**rust-analyzer.lens.enable** (default: true)
+## rust-analyzer.lens.enable {#lens.enable}
+
+Default: `true`
 
- Whether to show CodeLens in Rust files.
+Whether to show CodeLens in Rust files.
 
 
-**rust-analyzer.lens.implementations.enable** (default: true)
+## rust-analyzer.lens.implementations.enable {#lens.implementations.enable}
 
- Whether to show `Implementations` lens. Only applies when
+Default: `true`
+
+Whether to show `Implementations` lens. Only applies when
 `#rust-analyzer.lens.enable#` is set.
 
 
-**rust-analyzer.lens.location** (default: "above_name")
+## rust-analyzer.lens.location {#lens.location}
+
+Default: `"above_name"`
+
+Where to render annotations.
 
- Where to render annotations.
 
+## rust-analyzer.lens.references.adt.enable {#lens.references.adt.enable}
 
-**rust-analyzer.lens.references.adt.enable** (default: false)
+Default: `false`
 
- Whether to show `References` lens for Struct, Enum, and Union.
+Whether to show `References` lens for Struct, Enum, and Union.
 Only applies when `#rust-analyzer.lens.enable#` is set.
 
 
-**rust-analyzer.lens.references.enumVariant.enable** (default: false)
+## rust-analyzer.lens.references.enumVariant.enable {#lens.references.enumVariant.enable}
 
- Whether to show `References` lens for Enum Variants.
+Default: `false`
+
+Whether to show `References` lens for Enum Variants.
 Only applies when `#rust-analyzer.lens.enable#` is set.
 
 
-**rust-analyzer.lens.references.method.enable** (default: false)
+## rust-analyzer.lens.references.method.enable {#lens.references.method.enable}
+
+Default: `false`
 
- Whether to show `Method References` lens. Only applies when
+Whether to show `Method References` lens. Only applies when
 `#rust-analyzer.lens.enable#` is set.
 
 
-**rust-analyzer.lens.references.trait.enable** (default: false)
+## rust-analyzer.lens.references.trait.enable {#lens.references.trait.enable}
+
+Default: `false`
 
- Whether to show `References` lens for Trait.
+Whether to show `References` lens for Trait.
 Only applies when `#rust-analyzer.lens.enable#` is set.
 
 
-**rust-analyzer.lens.run.enable** (default: true)
+## rust-analyzer.lens.run.enable {#lens.run.enable}
 
- Whether to show `Run` lens. Only applies when
+Default: `true`
+
+Whether to show `Run` lens. Only applies when
 `#rust-analyzer.lens.enable#` is set.
 
 
-**rust-analyzer.lens.updateTest.enable** (default: true)
+## rust-analyzer.lens.updateTest.enable {#lens.updateTest.enable}
+
+Default: `true`
 
- Whether to show `Update Test` lens. Only applies when
+Whether to show `Update Test` lens. Only applies when
 `#rust-analyzer.lens.enable#` and `#rust-analyzer.lens.run.enable#` are set.
 
 
-**rust-analyzer.linkedProjects** (default: [])
+## rust-analyzer.linkedProjects {#linkedProjects}
+
+Default: `[]`
 
- Disable project auto-discovery in favor of explicitly specified set
+Disable project auto-discovery in favor of explicitly specified set
 of projects.
 
 Elements must be paths pointing to `Cargo.toml`,
@@ -894,80 +1165,103 @@ Elements must be paths pointing to `Cargo.toml`,
 objects in `rust-project.json` format.
 
 
-**rust-analyzer.lru.capacity** (default: null)
+## rust-analyzer.lru.capacity {#lru.capacity}
+
+Default: `null`
+
+Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
+
 
- Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
+## rust-analyzer.lru.query.capacities {#lru.query.capacities}
 
+Default: `{}`
 
-**rust-analyzer.lru.query.capacities** (default: {})
+Sets the LRU capacity of the specified queries.
 
- Sets the LRU capacity of the specified queries.
 
+## rust-analyzer.notifications.cargoTomlNotFound {#notifications.cargoTomlNotFound}
 
-**rust-analyzer.notifications.cargoTomlNotFound** (default: true)
+Default: `true`
 
- Whether to show `can't find Cargo.toml` error message.
+Whether to show `can't find Cargo.toml` error message.
 
 
-**rust-analyzer.numThreads** (default: null)
+## rust-analyzer.numThreads {#numThreads}
 
- How many worker threads in the main loop. The default `null` means to pick automatically.
+Default: `null`
 
+How many worker threads in the main loop. The default `null` means to pick automatically.
 
-**rust-analyzer.procMacro.attributes.enable** (default: true)
 
- Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
+## rust-analyzer.procMacro.attributes.enable {#procMacro.attributes.enable}
 
+Default: `true`
 
-**rust-analyzer.procMacro.enable** (default: true)
+Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
 
- Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
 
+## rust-analyzer.procMacro.enable {#procMacro.enable}
 
-**rust-analyzer.procMacro.ignored** (default: {})
+Default: `true`
 
- These proc-macros will be ignored when trying to expand them.
+Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
+
+
+## rust-analyzer.procMacro.ignored {#procMacro.ignored}
+
+Default: `{}`
+
+These proc-macros will be ignored when trying to expand them.
 
 This config takes a map of crate names with the exported proc-macro names to ignore as values.
 
 
-**rust-analyzer.procMacro.server** (default: null)
+## rust-analyzer.procMacro.server {#procMacro.server}
+
+Default: `null`
+
+Internal config, path to proc-macro server executable.
 
- Internal config, path to proc-macro server executable.
 
+## rust-analyzer.references.excludeImports {#references.excludeImports}
 
-**rust-analyzer.references.excludeImports** (default: false)
+Default: `false`
 
- Exclude imports from find-all-references.
+Exclude imports from find-all-references.
 
 
-**rust-analyzer.references.excludeTests** (default: false)
+## rust-analyzer.references.excludeTests {#references.excludeTests}
 
- Exclude tests from find-all-references and call-hierarchy.
+Default: `false`
 
+Exclude tests from find-all-references and call-hierarchy.
 
-**rust-analyzer.runnables.command** (default: null)
 
- Command to be executed instead of 'cargo' for runnables.
+## rust-analyzer.runnables.command {#runnables.command}
 
+Default: `null`
 
-**rust-analyzer.runnables.extraArgs** (default: [])
+Command to be executed instead of 'cargo' for runnables.
 
- Additional arguments to be passed to cargo for runnables such as
+
+## rust-analyzer.runnables.extraArgs {#runnables.extraArgs}
+
+Default: `[]`
+
+Additional arguments to be passed to cargo for runnables such as
 tests or binaries. For example, it may be `--release`.
 
 
- **rust-analyzer.runnables.extraTestBinaryArgs**
+## rust-analyzer.runnables.extraTestBinaryArgs {#runnables.extraTestBinaryArgs}
 
 Default:
-
-```[
+```json
+[
   "--show-output"
 ]
-
 ```
 
- Additional arguments to be passed through Cargo to launched tests, benchmarks, or
+Additional arguments to be passed through Cargo to launched tests, benchmarks, or
 doc-tests.
 
 Unless the launched target uses a
@@ -976,9 +1270,11 @@ they will end up being interpreted as options to
 [`rustc`’s built-in test harness (“libtest”)](https://doc.rust-lang.org/rustc/tests/index.html#cli-arguments).
 
 
- **rust-analyzer.rustc.source** (default: null)
+## rust-analyzer.rustc.source {#rustc.source}
+
+Default: `null`
 
- Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
+Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
 projects, or "discover" to try to automatically find it if the `rustc-dev` component
 is installed.
 
@@ -988,100 +1284,128 @@ crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
 This option does not take effect until rust-analyzer is restarted.
 
 
-**rust-analyzer.rustfmt.extraArgs** (default: [])
+## rust-analyzer.rustfmt.extraArgs {#rustfmt.extraArgs}
+
+Default: `[]`
 
- Additional arguments to `rustfmt`.
+Additional arguments to `rustfmt`.
 
 
-**rust-analyzer.rustfmt.overrideCommand** (default: null)
+## rust-analyzer.rustfmt.overrideCommand {#rustfmt.overrideCommand}
 
- Advanced option, fully override the command rust-analyzer uses for
+Default: `null`
+
+Advanced option, fully override the command rust-analyzer uses for
 formatting. This should be the equivalent of `rustfmt` here, and
 not that of `cargo fmt`. The file contents will be passed on the
 standard input and the formatted result will be read from the
 standard output.
 
 
-**rust-analyzer.rustfmt.rangeFormatting.enable** (default: false)
+## rust-analyzer.rustfmt.rangeFormatting.enable {#rustfmt.rangeFormatting.enable}
+
+Default: `false`
 
- Enables the use of rustfmt's unstable range formatting command for the
+Enables the use of rustfmt's unstable range formatting command for the
 `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
 available on a nightly build.
 
 
-**rust-analyzer.semanticHighlighting.doc.comment.inject.enable** (default: true)
+## rust-analyzer.semanticHighlighting.doc.comment.inject.enable {#semanticHighlighting.doc.comment.inject.enable}
+
+Default: `true`
 
- Inject additional highlighting into doc comments.
+Inject additional highlighting into doc comments.
 
 When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
 doc links.
 
 
-**rust-analyzer.semanticHighlighting.nonStandardTokens** (default: true)
+## rust-analyzer.semanticHighlighting.nonStandardTokens {#semanticHighlighting.nonStandardTokens}
 
- Whether the server is allowed to emit non-standard tokens and modifiers.
+Default: `true`
 
+Whether the server is allowed to emit non-standard tokens and modifiers.
 
-**rust-analyzer.semanticHighlighting.operator.enable** (default: true)
 
- Use semantic tokens for operators.
+## rust-analyzer.semanticHighlighting.operator.enable {#semanticHighlighting.operator.enable}
+
+Default: `true`
+
+Use semantic tokens for operators.
 
 When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
 they are tagged with modifiers.
 
 
-**rust-analyzer.semanticHighlighting.operator.specialization.enable** (default: false)
+## rust-analyzer.semanticHighlighting.operator.specialization.enable {#semanticHighlighting.operator.specialization.enable}
+
+Default: `false`
 
- Use specialized semantic tokens for operators.
+Use specialized semantic tokens for operators.
 
 When enabled, rust-analyzer will emit special token types for operator tokens instead
 of the generic `operator` token type.
 
 
-**rust-analyzer.semanticHighlighting.punctuation.enable** (default: false)
+## rust-analyzer.semanticHighlighting.punctuation.enable {#semanticHighlighting.punctuation.enable}
 
- Use semantic tokens for punctuation.
+Default: `false`
+
+Use semantic tokens for punctuation.
 
 When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
 they are tagged with modifiers or have a special role.
 
 
-**rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang** (default: false)
+## rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang {#semanticHighlighting.punctuation.separate.macro.bang}
+
+Default: `false`
 
- When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
+When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
 calls.
 
 
-**rust-analyzer.semanticHighlighting.punctuation.specialization.enable** (default: false)
+## rust-analyzer.semanticHighlighting.punctuation.specialization.enable {#semanticHighlighting.punctuation.specialization.enable}
+
+Default: `false`
 
- Use specialized semantic tokens for punctuation.
+Use specialized semantic tokens for punctuation.
 
 When enabled, rust-analyzer will emit special token types for punctuation tokens instead
 of the generic `punctuation` token type.
 
 
-**rust-analyzer.semanticHighlighting.strings.enable** (default: true)
+## rust-analyzer.semanticHighlighting.strings.enable {#semanticHighlighting.strings.enable}
 
- Use semantic tokens for strings.
+Default: `true`
+
+Use semantic tokens for strings.
 
 In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
 By disabling semantic tokens for strings, other grammars can be used to highlight
 their contents.
 
 
-**rust-analyzer.signatureInfo.detail** (default: "full")
+## rust-analyzer.signatureInfo.detail {#signatureInfo.detail}
+
+Default: `"full"`
+
+Show full signature of the callable. Only shows parameters if disabled.
 
- Show full signature of the callable. Only shows parameters if disabled.
 
+## rust-analyzer.signatureInfo.documentation.enable {#signatureInfo.documentation.enable}
 
-**rust-analyzer.signatureInfo.documentation.enable** (default: true)
+Default: `true`
 
- Show documentation.
+Show documentation.
 
 
-**rust-analyzer.typing.triggerChars** (default: "=.")
+## rust-analyzer.typing.triggerChars {#typing.triggerChars}
 
- Specify the characters allowed to invoke special on typing triggers.
+Default: `"=."`
+
+Specify the characters allowed to invoke special on typing triggers.
 - typing `=` after `let` tries to smartly add `;` if `=` is followed by an existing expression
 - typing `=` between two expressions adds `;` when in statement position
 - typing `=` to turn an assignment into an equality comparison removes `;` when in expression position
@@ -1092,16 +1416,20 @@ their contents.
 - typing `<` in a path or type position inserts a closing `>` after the path or type.
 
 
-**rust-analyzer.vfs.extraIncludes** (default: [])
+## rust-analyzer.vfs.extraIncludes {#vfs.extraIncludes}
+
+Default: `[]`
 
- Additional paths to include in the VFS. Generally for code that is
+Additional paths to include in the VFS. Generally for code that is
 generated or otherwise managed by a build system outside of Cargo,
 though Cargo might be the eventual consumer.
 
 
-**rust-analyzer.workspace.discoverConfig** (default: null)
+## rust-analyzer.workspace.discoverConfig {#workspace.discoverConfig}
+
+Default: `null`
 
- Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].
+Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].
 
 [`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.
 `progress_label` is used for the title in progress indicators, whereas `files_to_watch`
@@ -1183,7 +1511,7 @@ The JSON representation of `DiscoverArgument::Path` is:
 
 Similarly, the JSON representation of `DiscoverArgument::Buildfile` is:
 
-```
+```json
 {
     "buildfile": "BUILD"
 }
@@ -1196,20 +1524,26 @@ buck2's `rust-project` will likely be useful:
 https://github.com/facebook/buck2/tree/main/integrations/rust-project.
 
 
-**rust-analyzer.workspace.symbol.search.kind** (default: "only_types")
+## rust-analyzer.workspace.symbol.search.kind {#workspace.symbol.search.kind}
+
+Default: `"only_types"`
 
- Workspace symbol search kind.
+Workspace symbol search kind.
 
 
-**rust-analyzer.workspace.symbol.search.limit** (default: 128)
+## rust-analyzer.workspace.symbol.search.limit {#workspace.symbol.search.limit}
 
- Limits the number of items returned from a workspace symbol search (Defaults to 128).
+Default: `128`
+
+Limits the number of items returned from a workspace symbol search (Defaults to 128).
 Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
 Other clients requires all results upfront and might require a higher limit.
 
 
-**rust-analyzer.workspace.symbol.search.scope** (default: "workspace")
+## rust-analyzer.workspace.symbol.search.scope {#workspace.symbol.search.scope}
+
+Default: `"workspace"`
 
- Workspace symbol search scope.
+Workspace symbol search scope.
 
 
diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/README.md b/src/tools/rust-analyzer/docs/book/src/contributing/README.md
index cbbf6acf3e590..05286b5429244 100644
--- a/src/tools/rust-analyzer/docs/book/src/contributing/README.md
+++ b/src/tools/rust-analyzer/docs/book/src/contributing/README.md
@@ -3,7 +3,7 @@
 rust-analyzer is an ordinary Rust project, which is organized as a Cargo workspace, builds on stable and doesn't depend on C libraries.
 So, just
 
-```
+```bash
 $ cargo test
 ```
 
@@ -140,22 +140,25 @@ By default, log goes to stderr, but the stderr itself is processed by VS Code.
 `--log-file <PATH>` CLI argument allows logging to file.
 Setting the `RA_LOG_FILE=<PATH>` environment variable will also log to file, it will also override `--log-file`.
 
-To see stderr in the running VS Code instance, go to the "Output" tab of the panel and select `rust-analyzer`.
+To see the server stderr output in the running VS Code instance, go to the "Output" tab of the panel
+and select `rust-analyzer Language Server`.
 This shows `eprintln!` as well.
-Note that `stdout` is used for the actual protocol, so `println!` will break things.
+Note that `stdout` is used by LSP messages, so using `println!`—or anything that writes to `stdout`—will break rust-analyzer!
 
 To log all communication between the server and the client, there are two choices:
 
 * You can log on the server side, by running something like
 
-  ```
+  ```bash
   env RA_LOG=lsp_server=debug code .
   ```
 
 * You can log on the client side, by the `rust-analyzer: Toggle LSP Logs` command or enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
-  These logs are shown in a separate tab in the output and could be used with LSP inspector.
+  These logs are shown in a separate tab named `rust-analyzer LSP Trace` in the output and could be used with LSP inspector.
   Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
 
+Finally there are the logs of the VSCode extension itself which go into the `rust-analyzer Extension` output tab.
+
 There are also several VS Code commands which might be of interest:
 
 * `rust-analyzer: Status` shows some memory-usage statistics.
@@ -180,7 +183,7 @@ There are also several VS Code commands which might be of interest:
 
 We have a built-in hierarchical profiler, you can enable it by using `RA_PROFILE` env-var:
 
-```
+```bash
 RA_PROFILE=*             // dump everything
 RA_PROFILE=foo|bar|baz   // enabled only selected entries
 RA_PROFILE=*@3>10        // dump everything, up to depth 3, if it takes more than 10 ms
@@ -191,7 +194,7 @@ Some rust-analyzer contributors have `export RA_PROFILE='*>10'` in my shell prof
 For machine-readable JSON output, we have the `RA_PROFILE_JSON` env variable. We support
 filtering only by span name:
 
-```
+```bash
 RA_PROFILE=* // dump everything
 RA_PROFILE_JSON="vfs_load|parallel_prime_caches|discover_command" // dump selected spans
 ```
@@ -201,13 +204,13 @@ It is enabled by `RA_COUNT=1`.
 
 To measure time for from-scratch analysis, use something like this:
 
-```
+```bash
 $ cargo run --release -p rust-analyzer -- analysis-stats ../chalk/
 ```
 
 For measuring time of incremental analysis, use either of these:
 
-```
+```bash
 $ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --highlight ../chalk/chalk-engine/src/logic.rs
 $ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0
 ```
@@ -220,7 +223,7 @@ Release process is handled by `release`, `dist`, `publish-release-notes` and `pr
 
 `release` assumes that you have checkouts of `rust-analyzer`, `rust-analyzer.github.io`, and `rust-lang/rust` in the same directory:
 
-```
+```bash
 ./rust-analyzer
 ./rust-analyzer.github.io
 ./rust-rust-analyzer  # Note the name!
diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md b/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md
index 14a3fd1ebd110..1ada1cb24c2ce 100644
--- a/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md
+++ b/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md
@@ -1,5 +1,5 @@
 <!---
-lsp/ext.rs hash: af70cce5d6905e39
+lsp/ext.rs hash: 78e87a78de8f288e
 
 If you need to change the above hash to make the test pass, please check if you
 need to adjust this doc as well and ping this issue:
diff --git a/src/tools/rust-analyzer/docs/book/src/diagnostics_generated.md b/src/tools/rust-analyzer/docs/book/src/diagnostics_generated.md
deleted file mode 100644
index d34c459ad0258..0000000000000
--- a/src/tools/rust-analyzer/docs/book/src/diagnostics_generated.md
+++ /dev/null
@@ -1,516 +0,0 @@
-//! Generated by `cargo xtask codegen diagnostics-docs`, do not edit by hand.
-
-#### attribute-expansion-disabled
-
-Source:  [macro_error.rs](crates/ide-diagnostics/src/handlers/macro_error.rs#7) 
-
-
-This diagnostic is shown for attribute proc macros when attribute expansions have been disabled.
-
-
-
-
-#### await-outside-of-async
-
-Source:  [await_outside_of_async.rs](crates/ide-diagnostics/src/handlers/await_outside_of_async.rs#3) 
-
-
-This diagnostic is triggered if the `await` keyword is used outside of an async function or block
-
-
-
-
-#### break-outside-of-loop
-
-Source:  [break_outside_of_loop.rs](crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs#3) 
-
-
-This diagnostic is triggered if the `break` keyword is used outside of a loop.
-
-
-
-
-#### cast-to-unsized
-
-Source:  [invalid_cast.rs](crates/ide-diagnostics/src/handlers/invalid_cast.rs#106) 
-
-
-This diagnostic is triggered when casting to an unsized type
-
-
-
-
-#### expected-function
-
-Source:  [expected_function.rs](crates/ide-diagnostics/src/handlers/expected_function.rs#5) 
-
-
-This diagnostic is triggered if a call is made on something that is not callable.
-
-
-
-
-#### generic-args-prohibited
-
-Source:  [generic_args_prohibited.rs](crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs#10) 
-
-
-This diagnostic is shown when generic arguments are provided for a type that does not accept
-generic arguments.
-
-
-
-
-#### inactive-code
-
-Source:  [inactive_code.rs](crates/ide-diagnostics/src/handlers/inactive_code.rs#6) 
-
-
-This diagnostic is shown for code with inactive `#[cfg]` attributes.
-
-
-
-
-#### incoherent-impl
-
-Source:  [incoherent_impl.rs](crates/ide-diagnostics/src/handlers/incoherent_impl.rs#6) 
-
-
-This diagnostic is triggered if the targe type of an impl is from a foreign crate.
-
-
-
-
-#### incorrect-ident-case
-
-Source:  [incorrect_case.rs](crates/ide-diagnostics/src/handlers/incorrect_case.rs#13) 
-
-
-This diagnostic is triggered if an item name doesn't follow [Rust naming convention](https://doc.rust-lang.org/1.0.0/style/style/naming/README.html).
-
-
-
-
-#### invalid-cast
-
-Source:  [invalid_cast.rs](crates/ide-diagnostics/src/handlers/invalid_cast.rs#18) 
-
-
-This diagnostic is triggered if the code contains an illegal cast
-
-
-
-
-#### invalid-derive-target
-
-Source:  [invalid_derive_target.rs](crates/ide-diagnostics/src/handlers/invalid_derive_target.rs#3) 
-
-
-This diagnostic is shown when the derive attribute is used on an item other than a `struct`,
-`enum` or `union`.
-
-
-
-
-#### macro-def-error
-
-Source:  [macro_error.rs](crates/ide-diagnostics/src/handlers/macro_error.rs#24) 
-
-
-This diagnostic is shown for macro expansion errors.
-
-
-
-
-#### macro-error
-
-Source:  [macro_error.rs](crates/ide-diagnostics/src/handlers/macro_error.rs#3) 
-
-
-This diagnostic is shown for macro expansion errors.
-
-
-
-
-#### malformed-derive
-
-Source:  [malformed_derive.rs](crates/ide-diagnostics/src/handlers/malformed_derive.rs#3) 
-
-
-This diagnostic is shown when the derive attribute has invalid input.
-
-
-
-
-#### mismatched-arg-count
-
-Source:  [mismatched_arg_count.rs](crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs#31) 
-
-
-This diagnostic is triggered if a function is invoked with an incorrect amount of arguments.
-
-
-
-
-#### mismatched-tuple-struct-pat-arg-count
-
-Source:  [mismatched_arg_count.rs](crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs#11) 
-
-
-This diagnostic is triggered if a function is invoked with an incorrect amount of arguments.
-
-
-
-
-#### missing-fields
-
-Source:  [missing_fields.rs](crates/ide-diagnostics/src/handlers/missing_fields.rs#19) 
-
-
-This diagnostic is triggered if record lacks some fields that exist in the corresponding structure.
-
-Example:
-
-```rust
-struct A { a: u8, b: u8 }
-
-let a = A { a: 10 };
-```
-
-
-
-
-#### missing-match-arm
-
-Source:  [missing_match_arms.rs](crates/ide-diagnostics/src/handlers/missing_match_arms.rs#3) 
-
-
-This diagnostic is triggered if `match` block is missing one or more match arms.
-
-
-
-
-#### missing-unsafe
-
-Source:  [missing_unsafe.rs](crates/ide-diagnostics/src/handlers/missing_unsafe.rs#10) 
-
-
-This diagnostic is triggered if an operation marked as `unsafe` is used outside of an `unsafe` function or block.
-
-
-
-
-#### moved-out-of-ref
-
-Source:  [moved_out_of_ref.rs](crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs#4) 
-
-
-This diagnostic is triggered on moving non copy things out of references.
-
-
-
-
-#### need-mut
-
-Source:  [mutability_errors.rs](crates/ide-diagnostics/src/handlers/mutability_errors.rs#8) 
-
-
-This diagnostic is triggered on mutating an immutable variable.
-
-
-
-
-#### no-such-field
-
-Source:  [no_such_field.rs](crates/ide-diagnostics/src/handlers/no_such_field.rs#12) 
-
-
-This diagnostic is triggered if created structure does not have field provided in record.
-
-
-
-
-#### non-exhaustive-let
-
-Source:  [non_exhaustive_let.rs](crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs#3) 
-
-
-This diagnostic is triggered if a `let` statement without an `else` branch has a non-exhaustive
-pattern.
-
-
-
-
-#### private-assoc-item
-
-Source:  [private_assoc_item.rs](crates/ide-diagnostics/src/handlers/private_assoc_item.rs#3) 
-
-
-This diagnostic is triggered if the referenced associated item is not visible from the current
-module.
-
-
-
-
-#### private-field
-
-Source:  [private_field.rs](crates/ide-diagnostics/src/handlers/private_field.rs#3) 
-
-
-This diagnostic is triggered if the accessed field is not visible from the current module.
-
-
-
-
-#### proc-macro-disabled
-
-Source:  [macro_error.rs](crates/ide-diagnostics/src/handlers/macro_error.rs#11) 
-
-
-This diagnostic is shown for proc macros that have been specifically disabled via `rust-analyzer.procMacro.ignored`.
-
-
-
-
-#### remove-trailing-return
-
-Source:  [remove_trailing_return.rs](crates/ide-diagnostics/src/handlers/remove_trailing_return.rs#8) 
-
-
-This diagnostic is triggered when there is a redundant `return` at the end of a function
-or closure.
-
-
-
-
-#### remove-unnecessary-else
-
-Source:  [remove_unnecessary_else.rs](crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs#17) 
-
-
-This diagnostic is triggered when there is an `else` block for an `if` expression whose
-then branch diverges (e.g. ends with a `return`, `continue`, `break` e.t.c).
-
-
-
-
-#### replace-filter-map-next-with-find-map
-
-Source:  [replace_filter_map_next_with_find_map.rs](crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs#11) 
-
-
-This diagnostic is triggered when `.filter_map(..).next()` is used, rather than the more concise `.find_map(..)`.
-
-
-
-
-#### trait-impl-incorrect-safety
-
-Source:  [trait_impl_incorrect_safety.rs](crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs#6) 
-
-
-Diagnoses incorrect safety annotations of trait impls.
-
-
-
-
-#### trait-impl-missing-assoc_item
-
-Source:  [trait_impl_missing_assoc_item.rs](crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs#7) 
-
-
-Diagnoses missing trait items in a trait impl.
-
-
-
-
-#### trait-impl-orphan
-
-Source:  [trait_impl_orphan.rs](crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs#5) 
-
-
-Only traits defined in the current crate can be implemented for arbitrary types
-
-
-
-
-#### trait-impl-redundant-assoc_item
-
-Source:  [trait_impl_redundant_assoc_item.rs](crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs#12) 
-
-
-Diagnoses redundant trait items in a trait impl.
-
-
-
-
-#### type-mismatch
-
-Source:  [type_mismatch.rs](crates/ide-diagnostics/src/handlers/type_mismatch.rs#20) 
-
-
-This diagnostic is triggered when the type of an expression or pattern does not match
-the expected type.
-
-
-
-
-#### typed-hole
-
-Source:  [typed_hole.rs](crates/ide-diagnostics/src/handlers/typed_hole.rs#18) 
-
-
-This diagnostic is triggered when an underscore expression is used in an invalid position.
-
-
-
-
-#### undeclared-label
-
-Source:  [undeclared_label.rs](crates/ide-diagnostics/src/handlers/undeclared_label.rs#3) 
-
-
-
-
-
-
-#### unimplemented-builtin-macro
-
-Source:  [unimplemented_builtin_macro.rs](crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs#3) 
-
-
-This diagnostic is shown for builtin macros which are not yet implemented by rust-analyzer
-
-
-
-
-#### unlinked-file
-
-Source:  [unlinked_file.rs](crates/ide-diagnostics/src/handlers/unlinked_file.rs#20) 
-
-
-This diagnostic is shown for files that are not included in any crate, or files that are part of
-crates rust-analyzer failed to discover. The file will not have IDE features available.
-
-
-
-
-#### unnecessary-braces
-
-Source:  [useless_braces.rs](crates/ide-diagnostics/src/handlers/useless_braces.rs#9) 
-
-
-Diagnostic for unnecessary braces in `use` items.
-
-
-
-
-#### unreachable-label
-
-Source:  [unreachable_label.rs](crates/ide-diagnostics/src/handlers/unreachable_label.rs#3) 
-
-
-
-
-
-
-#### unresolved-assoc-item
-
-Source:  [unresolved_assoc_item.rs](crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs#3) 
-
-
-This diagnostic is triggered if the referenced associated item does not exist.
-
-
-
-
-#### unresolved-extern-crate
-
-Source:  [unresolved_extern_crate.rs](crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs#3) 
-
-
-This diagnostic is triggered if rust-analyzer is unable to discover referred extern crate.
-
-
-
-
-#### unresolved-field
-
-Source:  [unresolved_field.rs](crates/ide-diagnostics/src/handlers/unresolved_field.rs#23) 
-
-
-This diagnostic is triggered if a field does not exist on a given type.
-
-
-
-
-#### unresolved-ident
-
-Source:  [unresolved_ident.rs](crates/ide-diagnostics/src/handlers/unresolved_ident.rs#3) 
-
-
-This diagnostic is triggered if an expr-position ident is invalid.
-
-
-
-
-#### unresolved-import
-
-Source:  [unresolved_import.rs](crates/ide-diagnostics/src/handlers/unresolved_import.rs#3) 
-
-
-This diagnostic is triggered if rust-analyzer is unable to resolve a path in
-a `use` declaration.
-
-
-
-
-#### unresolved-macro-call
-
-Source:  [unresolved_macro_call.rs](crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs#3) 
-
-
-This diagnostic is triggered if rust-analyzer is unable to resolve the path
-to a macro in a macro invocation.
-
-
-
-
-#### unresolved-method
-
-Source:  [unresolved_method.rs](crates/ide-diagnostics/src/handlers/unresolved_method.rs#15) 
-
-
-This diagnostic is triggered if a method does not exist on a given type.
-
-
-
-
-#### unresolved-module
-
-Source:  [unresolved_module.rs](crates/ide-diagnostics/src/handlers/unresolved_module.rs#8) 
-
-
-This diagnostic is triggered if rust-analyzer is unable to discover referred module.
-
-
-
-
-#### unused-mut
-
-Source:  [mutability_errors.rs](crates/ide-diagnostics/src/handlers/mutability_errors.rs#62) 
-
-
-This diagnostic is triggered when a mutable variable isn't actually mutated.
-
-
-
-
-#### unused-variables
-
-Source:  [unused_variables.rs](crates/ide-diagnostics/src/handlers/unused_variables.rs#13) 
-
-
-This diagnostic is triggered when a local variable is not used.
-
-
diff --git a/src/tools/rust-analyzer/docs/book/src/features_generated.md b/src/tools/rust-analyzer/docs/book/src/features_generated.md
deleted file mode 100644
index 2c5829b1f54c0..0000000000000
--- a/src/tools/rust-analyzer/docs/book/src/features_generated.md
+++ /dev/null
@@ -1,940 +0,0 @@
-//! Generated by `cargo xtask codegen feature-docs`, do not edit by hand.
-
-### Annotations
-**Source:**  [annotations.rs](crates/ide/src/annotations.rs#19) 
-
-Provides user with annotations above items for looking up references or impl blocks
-and running/debugging binaries.
-
-![Annotations](https://user-images.githubusercontent.com/48062697/113020672-b7c34f00-917a-11eb-8f6e-858735660a0e.png)
-
-
-### Auto Import
-**Source:**  [auto_import.rs](crates/ide-assists/src/handlers/auto_import.rs#15) 
-
-Using the `auto-import` assist it is possible to insert missing imports for unresolved items.
-When inserting an import it will do so in a structured manner by keeping imports grouped,
-separated by a newline in the following order:
-
-- `std` and `core`
-- External Crates
-- Current Crate, paths prefixed by `crate`
-- Current Module, paths prefixed by `self`
-- Super Module, paths prefixed by `super`
-
-Example:
-```rust
-use std::fs::File;
-
-use itertools::Itertools;
-use syntax::ast;
-
-use crate::utils::insert_use;
-
-use self::auto_import;
-
-use super::AssistContext;
-```
-
-#### Import Granularity
-
-It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
-It has the following configurations:
-
-- `crate`: Merge imports from the same crate into a single use statement. This kind of
- nesting is only supported in Rust versions later than 1.24.
-- `module`: Merge imports from the same module into a single use statement.
-- `item`: Don't merge imports at all, creating one import per item.
-- `preserve`: Do not change the granularity of any imports. For auto-import this has the same
- effect as `item`.
-- `one`: Merge all imports into a single use statement as long as they have the same visibility
- and attributes.
-
-In `VS Code` the configuration for this is `rust-analyzer.imports.granularity.group`.
-
-#### Import Prefix
-
-The style of imports in the same crate is configurable through the `imports.prefix` setting.
-It has the following configurations:
-
-- `crate`: This setting will force paths to be always absolute, starting with the `crate`
- prefix, unless the item is defined outside of the current crate.
-- `self`: This setting will force paths that are relative to the current module to always
- start with `self`. This will result in paths that always start with either `crate`, `self`,
- `super` or an extern crate identifier.
-- `plain`: This setting does not impose any restrictions in imports.
-
-In `VS Code` the configuration for this is `rust-analyzer.imports.prefix`.
-
-![Auto Import](https://user-images.githubusercontent.com/48062697/113020673-b85be580-917a-11eb-9022-59585f35d4f8.gif)
-
-
-### Completion With Autoimport
-**Source:**  [flyimport.rs](crates/ide-completion/src/completions/flyimport.rs#20) 
-
-When completing names in the current scope, proposes additional imports from other modules or crates,
-if they can be qualified in the scope, and their name contains all symbols from the completion input.
-
-To be considered applicable, the name must contain all input symbols in the given order, not necessarily adjacent.
-If any input symbol is not lowercased, the name must contain all symbols in exact case; otherwise the containing is checked case-insensitively.
-
-```
-fn main() {
-    pda$0
-}
-# pub mod std { pub mod marker { pub struct PhantomData { } } }
-```
-->
-```
-use std::marker::PhantomData;
-
-fn main() {
-    PhantomData
-}
-# pub mod std { pub mod marker { pub struct PhantomData { } } }
-```
-
-Also completes associated items, that require trait imports.
-If any unresolved and/or partially-qualified path precedes the input, it will be taken into account.
-Currently, only the imports with their import path ending with the whole qualifier will be proposed
-(no fuzzy matching for qualifier).
-
-```
-mod foo {
-    pub mod bar {
-        pub struct Item;
-
-        impl Item {
-            pub const TEST_ASSOC: usize = 3;
-        }
-    }
-}
-
-fn main() {
-    bar::Item::TEST_A$0
-}
-```
-->
-```
-use foo::bar;
-
-mod foo {
-    pub mod bar {
-        pub struct Item;
-
-        impl Item {
-            pub const TEST_ASSOC: usize = 3;
-        }
-    }
-}
-
-fn main() {
-    bar::Item::TEST_ASSOC
-}
-```
-
-NOTE: currently, if an assoc item comes from a trait that's not currently imported, and it also has an unresolved and/or partially-qualified path,
-no imports will be proposed.
-
-#### Fuzzy search details
-
-To avoid an excessive amount of the results returned, completion input is checked for inclusion in the names only
-(i.e. in `HashMap` in the `std::collections::HashMap` path).
-For the same reasons, avoids searching for any path imports for inputs with their length less than 2 symbols
-(but shows all associated items for any input length).
-
-#### Import configuration
-
-It is possible to configure how use-trees are merged with the `imports.granularity.group` setting.
-Mimics the corresponding behavior of the `Auto Import` feature.
-
-#### LSP and performance implications
-
-The feature is enabled only if the LSP client supports LSP protocol version 3.16+ and reports the `additionalTextEdits`
-(case-sensitive) resolve client capability in its client capabilities.
-This way the server is able to defer the costly computations, doing them for a selected completion item only.
-For clients with no such support, all edits have to be calculated on the completion request, including the fuzzy search completion ones,
-which might be slow ergo the feature is automatically disabled.
-
-#### Feature toggle
-
-The feature can be forcefully turned off in the settings with the `rust-analyzer.completion.autoimport.enable` flag.
-Note that having this flag set to `true` does not guarantee that the feature is enabled: your client needs to have the corresponding
-capability enabled.
-
-
-### Debug ItemTree
-**Source:**  [view_item_tree.rs](crates/ide/src/view_item_tree.rs#5) 
-
-Displays the ItemTree of the currently open file, for debugging.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Debug ItemTree** |
-
-
-### Expand Macro Recursively
-**Source:**  [expand_macro.rs](crates/ide/src/expand_macro.rs#18) 
-
-Shows the full macro expansion of the macro at the current caret position.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Expand macro recursively at caret** |
-
-![Expand Macro Recursively](https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif)
-
-
-### Expand and Shrink Selection
-**Source:**  [extend_selection.rs](crates/ide/src/extend_selection.rs#15) 
-
-Extends or shrinks the current selection to the encompassing syntactic construct
-(expression, statement, item, module, etc). It works with multiple cursors.
-
-| Editor  | Shortcut |
-|---------|----------|
-| VS Code | <kbd>Alt+Shift+→</kbd>, <kbd>Alt+Shift+←</kbd> |
-
-![Expand and Shrink Selection](https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif)
-
-
-### File Structure
-**Source:**  [file_structure.rs](crates/ide/src/file_structure.rs#26) 
-
-Provides a tree of the symbols defined in the file. Can be used to
-
-* fuzzy search symbol in a file (super useful)
-* draw breadcrumbs to describe the context around the cursor
-* draw outline of the file
-
-| Editor  | Shortcut |
-|---------|----------|
-| VS Code | <kbd>Ctrl+Shift+O</kbd> |
-
-![File Structure](https://user-images.githubusercontent.com/48062697/113020654-b42fc800-917a-11eb-8388-e7dc4d92b02e.gif)
-
-
-### Find All References
-**Source:**  [references.rs](crates/ide/src/references.rs#42) 
-
-Shows all references of the item at the cursor location
-
-| Editor  | Shortcut |
-|---------|----------|
-| VS Code | <kbd>Shift+Alt+F12</kbd> |
-
-![Find All References](https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif)
-
-
-### Folding
-**Source:**  [folding_ranges.rs](crates/ide/src/folding_ranges.rs#36) 
-
-Defines folding regions for curly braced blocks, runs of consecutive use, mod, const or static
-items, and `region` / `endregion` comment markers.
-
-
-### Format String Completion
-**Source:**  [format_like.rs](crates/ide-completion/src/completions/postfix/format_like.rs#0) 
-
-`"Result {result} is {2 + 2}"` is expanded to the `"Result {} is {}", result, 2 + 2`.
-
-The following postfix snippets are available:
-
-* `format` -> `format!(...)`
-* `panic` -> `panic!(...)`
-* `println` -> `println!(...)`
-* `log`:
-** `logd` -> `log::debug!(...)`
-** `logt` -> `log::trace!(...)`
-** `logi` -> `log::info!(...)`
-** `logw` -> `log::warn!(...)`
-** `loge` -> `log::error!(...)`
-
-![Format String Completion](https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif)
-
-
-### Go to Declaration
-**Source:**  [goto_declaration.rs](crates/ide/src/goto_declaration.rs#13) 
-
-Navigates to the declaration of an identifier.
-
-This is the same as `Go to Definition` with the following exceptions:
-- outline modules will navigate to the `mod name;` item declaration
-- trait assoc items will navigate to the assoc item of the trait declaration as opposed to the trait impl
-- fields in patterns will navigate to the field declaration of the struct, union or variant
-
-
-### Go to Definition
-**Source:**  [goto_definition.rs](crates/ide/src/goto_definition.rs#28) 
-
-Navigates to the definition of an identifier.
-
-For outline modules, this will navigate to the source file of the module.
-
-| Editor  | Shortcut |
-|---------|----------|
-| VS Code | <kbd>F12</kbd> |
-
-![Go to Definition](https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif)
-
-
-### Go to Implementation
-**Source:**  [goto_implementation.rs](crates/ide/src/goto_implementation.rs#11) 
-
-Navigates to the impl items of types.
-
-| Editor  | Shortcut |
-|---------|----------|
-| VS Code | <kbd>Ctrl+F12</kbd>
-
-![Go to Implementation](https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif)
-
-
-### Go to Type Definition
-**Source:**  [goto_type_definition.rs](crates/ide/src/goto_type_definition.rs#7) 
-
-Navigates to the type of an identifier.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **Go to Type Definition** |
-
-![Go to Type Definition](https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif)
-
-
-### Highlight Related
-**Source:**  [highlight_related.rs](crates/ide/src/highlight_related.rs#42) 
-
-Highlights constructs related to the thing under the cursor:
-
-1. if on an identifier, highlights all references to that identifier in the current file
-     * additionally, if the identifier is a trait in a where clause, type parameter trait bound or use item, highlights all references to that trait's assoc items in the corresponding scope
-1. if on an `async` or `await` token, highlights all yield points for that async context
-1. if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context
-1. if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context
-1. if on a `move` or `|` token that belongs to a closure, highlights all captures of the closure.
-
-Note: `?`, `|` and `->` do not currently trigger this behavior in the VSCode editor.
-
-
-### Hover
-**Source:**  [hover.rs](crates/ide/src/hover.rs#116) 
-
-Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code.
-Focusing is usually hovering with a mouse, but can also be triggered with a shortcut.
-
-![Hover](https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif)
-
-
-### Inlay Hints
-**Source:**  [inlay_hints.rs](crates/ide/src/inlay_hints.rs#41) 
-
-rust-analyzer shows additional information inline with the source code.
-Editors usually render this using read-only virtual text snippets interspersed with code.
-
-rust-analyzer by default shows hints for
-
-* types of local variables
-* names of function arguments
-* names of const generic parameters
-* types of chained expressions
-
-Optionally, one can enable additional hints for
-
-* return types of closure expressions
-* elided lifetimes
-* compiler inserted reborrows
-* names of generic type and lifetime parameters
-
-Note: inlay hints for function argument names are heuristically omitted to reduce noise and will not appear if
-any of the
-[following criteria](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L92-L99)
-are met:
-
-* the parameter name is a suffix of the function's name
-* the argument is a qualified constructing or call expression where the qualifier is an ADT
-* exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix
-  of argument with _ splitting it off
-* the parameter name starts with `ra_fixture`
-* the parameter name is a
-[well known name](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L200)
-in a unary function
-* the parameter name is a
-[single character](https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L201)
-in a unary function
-
-![Inlay hints](https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png)
-
-
-### Interpret A Function, Static Or Const.
-**Source:**  [interpret.rs](crates/ide/src/interpret.rs#8) 
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Interpret** |
-
-
-### Join Lines
-**Source:**  [join_lines.rs](crates/ide/src/join_lines.rs#20) 
-
-Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces.
-
-See [this gif](https://user-images.githubusercontent.com/1711539/124515923-4504e800-dde9-11eb-8d58-d97945a1a785.gif) for the cases handled specially by joined lines.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Join lines** |
-
-![Join Lines](https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif)
-
-
-### Magic Completions
-**Source:**  [lib.rs](crates/ide-completion/src/lib.rs#78) 
-
-In addition to usual reference completion, rust-analyzer provides some ✨magic✨
-completions as well:
-
-Keywords like `if`, `else` `while`, `loop` are completed with braces, and cursor
-is placed at the appropriate position. Even though `if` is easy to type, you
-still want to complete it, to get ` { }` for free! `return` is inserted with a
-space or `;` depending on the return type of the function.
-
-When completing a function call, `()` are automatically inserted. If a function
-takes arguments, the cursor is positioned inside the parenthesis.
-
-There are postfix completions, which can be triggered by typing something like
-`foo().if`. The word after `.` determines postfix completion. Possible variants are:
-
-- `expr.if` -> `if expr {}` or `if let ... {}` for `Option` or `Result`
-- `expr.match` -> `match expr {}`
-- `expr.while` -> `while expr {}` or `while let ... {}` for `Option` or `Result`
-- `expr.ref` -> `&expr`
-- `expr.refm` -> `&mut expr`
-- `expr.let` -> `let $0 = expr;`
-- `expr.lete` -> `let $1 = expr else { $0 };`
-- `expr.letm` -> `let mut $0 = expr;`
-- `expr.not` -> `!expr`
-- `expr.dbg` -> `dbg!(expr)`
-- `expr.dbgr` -> `dbg!(&expr)`
-- `expr.call` -> `(expr)`
-
-There also snippet completions:
-
-#### Expressions
-
-- `pd` -> `eprintln!(" = {:?}", );`
-- `ppd` -> `eprintln!(" = {:#?}", );`
-
-#### Items
-
-- `tfn` -> `#[test] fn feature(){}`
-- `tmod` ->
-```rust
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn test_name() {}
-}
-```
-
-And the auto import completions, enabled with the `rust-analyzer.completion.autoimport.enable` setting and the corresponding LSP client capabilities.
-Those are the additional completion options with automatic `use` import and options from all project importable items,
-fuzzy matched against the completion input.
-
-![Magic Completions](https://user-images.githubusercontent.com/48062697/113020667-b72ab880-917a-11eb-8778-716cf26a0eb3.gif)
-
-
-### Matching Brace
-**Source:**  [matching_brace.rs](crates/ide/src/matching_brace.rs#6) 
-
-If the cursor is on any brace (`<>(){}[]||`) which is a part of a brace-pair,
-moves cursor to the matching brace. It uses the actual parser to determine
-braces, so it won't confuse generics with comparisons.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Find matching brace** |
-
-![Matching Brace](https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif)
-
-
-### Memory Usage
-**Source:**  [apply_change.rs](crates/ide-db/src/apply_change.rs#43) 
-
-Clears rust-analyzer's internal database and prints memory usage statistics.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Memory Usage (Clears Database)**
-
-
-### Move Item
-**Source:**  [move_item.rs](crates/ide/src/move_item.rs#16) 
-
-Move item under cursor or selection up and down.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Move item up**
-| VS Code | **rust-analyzer: Move item down**
-
-![Move Item](https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif)
-
-
-### On Enter
-**Source:**  [on_enter.rs](crates/ide/src/typing/on_enter.rs#17) 
-
-rust-analyzer can override <kbd>Enter</kbd> key to make it smarter:
-
-- <kbd>Enter</kbd> inside triple-slash comments automatically inserts `///`
-- <kbd>Enter</kbd> in the middle or after a trailing space in `//` inserts `//`
-- <kbd>Enter</kbd> inside `//!` doc comments automatically inserts `//!`
-- <kbd>Enter</kbd> after `{` indents contents and closing `}` of single-line block
-
-This action needs to be assigned to shortcut explicitly.
-
-Note that, depending on the other installed extensions, this feature can visibly slow down typing.
-Similarly, if rust-analyzer crashes or stops responding, `Enter` might not work.
-In that case, you can still press `Shift-Enter` to insert a newline.
-
-#### VS Code
-
-Add the following to `keybindings.json`:
-```json
-{
-  "key": "Enter",
-  "command": "rust-analyzer.onEnter",
-  "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust"
-}
-````
-
-When using the Vim plugin:
-```json
-{
-  "key": "Enter",
-  "command": "rust-analyzer.onEnter",
-  "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust && vim.mode == 'Insert'"
-}
-````
-
-![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
-
-
-### On Typing Assists
-**Source:**  [typing.rs](crates/ide/src/typing.rs#42) 
-
-Some features trigger on typing certain characters:
-
-- typing `let =` tries to smartly add `;` if `=` is followed by an existing expression
-- typing `=` between two expressions adds `;` when in statement position
-- typing `=` to turn an assignment into an equality comparison removes `;` when in expression position
-- typing `.` in a chain method call auto-indents
-- typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression
-- typing `{` in a use item adds a closing `}` in the right place
-- typing `>` to complete a return type `->` will insert a whitespace after it
-
-#### VS Code
-
-Add the following to `settings.json`:
-```json
-"editor.formatOnType": true,
-```
-
-![On Typing Assists](https://user-images.githubusercontent.com/48062697/113166163-69758500-923a-11eb-81ee-eb33ec380399.gif)
-![On Typing Assists](https://user-images.githubusercontent.com/48062697/113171066-105c2000-923f-11eb-87ab-f4a263346567.gif)
-
-
-### Open Docs
-**Source:**  [doc_links.rs](crates/ide/src/doc_links.rs#118) 
-
-Retrieve a links to documentation for the given symbol.
-
-The simplest way to use this feature is via the context menu. Right-click on
-the selected item. The context menu opens. Select **Open Docs**.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Open Docs** |
-
-
-### Parent Module
-**Source:**  [parent_module.rs](crates/ide/src/parent_module.rs#14) 
-
-Navigates to the parent module of the current module.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Locate parent module** |
-
-![Parent Module](https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif)
-
-
-### Related Tests
-**Source:**  [runnables.rs](crates/ide/src/runnables.rs#202) 
-
-Provides a sneak peek of all tests where the current item is used.
-
-The simplest way to use this feature is via the context menu. Right-click on
-the selected item. The context menu opens. Select **Peek Related Tests**.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Peek Related Tests** |
-
-
-### Rename
-**Source:**  [rename.rs](crates/ide/src/rename.rs#70) 
-
-Renames the item below the cursor and all of its references
-
-| Editor  | Shortcut |
-|---------|----------|
-| VS Code | <kbd>F2</kbd> |
-
-![Rename](https://user-images.githubusercontent.com/48062697/113065582-055aae80-91b1-11eb-8ade-2b58e6d81883.gif)
-
-
-### Run
-**Source:**  [runnables.rs](crates/ide/src/runnables.rs#116) 
-
-Shows a popup suggesting to run a test/benchmark/binary **at the current cursor
-location**. Super useful for repeatedly running just a single test. Do bind this
-to a shortcut!
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Run** |
-
-![Run](https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif)
-
-
-### Semantic Syntax Highlighting
-**Source:**  [syntax_highlighting.rs](crates/ide/src/syntax_highlighting.rs#68) 
-
-rust-analyzer highlights the code semantically.
-For example, `Bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait.
-rust-analyzer does not specify colors directly, instead it assigns a tag (like `struct`) and a set of modifiers (like `declaration`) to each token.
-It's up to the client to map those to specific colors.
-
-The general rule is that a reference to an entity gets colored the same way as the entity itself.
-We also give special modifier for `mut` and `&mut` local variables.
-
-
-#### Token Tags
-
-Rust-analyzer currently emits the following token tags:
-
-- For items:
-
-|           |                                |
-|-----------|--------------------------------|
-| attribute |  Emitted for attribute macros. |
-|enum| Emitted for enums. |
-|function| Emitted for free-standing functions. |
-|derive| Emitted for derive macros. |
-|macro| Emitted for function-like macros. |
-|method| Emitted for associated functions, also knowns as methods. |
-|namespace| Emitted for modules. |
-|struct| Emitted for structs.|
-|trait| Emitted for traits.|
-|typeAlias| Emitted for type aliases and `Self` in `impl`s.|
-|union| Emitted for unions.|
-
-- For literals:
-
-|           |                                |
-|-----------|--------------------------------|
-| boolean|  Emitted for the boolean literals `true` and `false`.|
-| character| Emitted for character literals.|
-| number| Emitted for numeric literals.|
-| string| Emitted for string literals.|
-| escapeSequence| Emitted for escaped sequences inside strings like `\n`.|
-| formatSpecifier| Emitted for format specifiers `{:?}` in `format!`-like macros.|
-
-- For operators:
-
-|           |                                |
-|-----------|--------------------------------|
-|operator| Emitted for general operators.|
-|arithmetic| Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`.|
-|bitwise| Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`.|
-|comparison| Emitted for the comparison oerators `>`, `<`, `==`, `>=`, `<=`, `!=`.|
-|logical| Emitted for the logical operatos `||`, `&&`, `!`.|
-
-- For punctuation:
-
-|           |                                |
-|-----------|--------------------------------|
-|punctuation| Emitted for general punctuation.|
-|attributeBracket| Emitted for attribute invocation brackets, that is the `#[` and `]` tokens.|
-|angle| Emitted for `<>` angle brackets.|
-|brace| Emitted for `{}` braces.|
-|bracket| Emitted for `[]` brackets.|
-|parenthesis| Emitted for `()` parentheses.|
-|colon| Emitted for the `:` token.|
-|comma| Emitted for the `,` token.|
-|dot| Emitted for the `.` token.|
-|semi| Emitted for the `;` token.|
-|macroBang| Emitted for the `!` token in macro calls.|
-
--
-
-|           |                                |
-|-----------|--------------------------------|
-|builtinAttribute| Emitted for names to builtin attributes in attribute path, the `repr` in `#[repr(u8)]` for example.|
-|builtinType| Emitted for builtin types like `u32`, `str` and `f32`.|
-|comment| Emitted for comments.|
-|constParameter| Emitted for const parameters.|
-|deriveHelper| Emitted for derive helper attributes.|
-|enumMember| Emitted for enum variants.|
-|generic| Emitted for generic tokens that have no mapping.|
-|keyword| Emitted for keywords.|
-|label| Emitted for labels.|
-|lifetime| Emitted for lifetimes.|
-|parameter| Emitted for non-self function parameters.|
-|property| Emitted for struct and union fields.|
-|selfKeyword| Emitted for the self function parameter and self path-specifier.|
-|selfTypeKeyword| Emitted for the Self type parameter.|
-|toolModule| Emitted for tool modules.|
-|typeParameter| Emitted for type parameters.|
-|unresolvedReference| Emitted for unresolved references, names that rust-analyzer can't find the definition of.|
-|variable| Emitted for locals, constants and statics.|
-
-
-#### Token Modifiers
-
-Token modifiers allow to style some elements in the source code more precisely.
-
-Rust-analyzer currently emits the following token modifiers:
-
-|           |                                |
-|-----------|--------------------------------|
-|async| Emitted for async functions and the `async` and `await` keywords.|
-|attribute| Emitted for tokens inside attributes.|
-|callable| Emitted for locals whose types implements one of the `Fn*` traits.|
-|constant| Emitted for const.|
-|consuming| Emitted for locals that are being consumed when use in a function call.|
-|controlFlow| Emitted for control-flow related tokens, this includes th `?` operator.|
-|crateRoot| Emitted for crate names, like `serde` and `crate.|
-|declaration| Emitted for names of definitions, like `foo` in `fn foo(){}`.|
-|defaultLibrary| Emitted for items from built-in crates (std, core, allc, test and proc_macro).|
-|documentation| Emitted for documentation comment.|
-|injected| Emitted for doc-string injected highlighting like rust source blocks in documentation.|
-|intraDocLink| Emitted for intra doc links in doc-string.|
-|library| Emitted for items that are defined outside of the current crae.|
-|macro|  Emitted for tokens inside macro call.|
-|mutable| Emitted for mutable locals and statics as well as functions taking `&mut self`.|
-|public| Emitted for items that are from the current crate and are `pub.|
-|reference| Emitted for locals behind a reference and functions taking self` by reference.|
-|static| Emitted for "static" functions, also known as functions that d not take a `self` param, as well as statics and consts.|
-|trait| Emitted for associated trait item.|
-|unsafe| Emitted for unsafe operations, like unsafe function calls, as ell as the `unsafe` token.|
-
-![Semantic Syntax Highlighting](https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png)
-![Semantic Syntax Highlighting](https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png)
-
-
-### Show Dependency Tree
-**Source:**  [fetch_crates.rs](crates/ide/src/fetch_crates.rs#13) 
-
-Shows a view tree with all the dependencies of this project
-
-| Editor  | Panel Name |
-|---------|------------|
-| VS Code | **Rust Dependencies** |
-
-![Show Dependency Tree](https://user-images.githubusercontent.com/5748995/229394139-2625beab-f4c9-484b-84ed-ad5dee0b1e1a.png)
-
-
-### Show Syntax Tree
-**Source:**  [view_syntax_tree.rs](crates/ide/src/view_syntax_tree.rs#14) 
-
-Shows a tree view with the syntax tree of the current file
-
-| Editor  | Panel Name |
-|---------|-------------|
-| VS Code | **Rust Syntax Tree** |
-
-
-### Status
-**Source:**  [status.rs](crates/ide/src/status.rs#28) 
-
-Shows internal statistic about memory usage of rust-analyzer.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: Status** |
-
-![Status](https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif)
-
-
-### Structural Search and Replace
-**Source:**  [lib.rs](crates/ide-ssr/src/lib.rs#6) 
-
-Search and replace with named wildcards that will match any expression, type, path, pattern or item.
-The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
-A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
-Within a macro call, a placeholder will match up until whatever token follows the placeholder.
-
-All paths in both the search pattern and the replacement template must resolve in the context
-in which this command is invoked. Paths in the search pattern will then match the code if they
-resolve to the same item, even if they're written differently. For example if we invoke the
-command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
-to `foo::Bar` will match.
-
-Paths in the replacement template will be rendered appropriately for the context in which the
-replacement occurs. For example if our replacement template is `foo::Bar` and we match some
-code in the `foo` module, we'll insert just `Bar`.
-
-Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
-match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. When a
-placeholder is the receiver of a method call in the search pattern (e.g. `$s.foo()`), but not in
-the replacement template (e.g. `bar($s)`), then *, & and &mut will be added as needed to mirror
-whatever autoderef and autoref was happening implicitly in the matched code.
-
-The scope of the search / replace will be restricted to the current selection if any, otherwise
-it will apply to the whole workspace.
-
-Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
-
-Supported constraints:
-
-| Constraint    | Restricts placeholder |
-|---------------|------------------------|
-| kind(literal) | Is a literal (e.g. `42` or `"forty two"`) |
-| not(a)        | Negates the constraint `a` |
-
-Available via the command `rust-analyzer.ssr`.
-
-```rust
-// Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
-
-// BEFORE
-String::from(foo(y + 5, z))
-
-// AFTER
-String::from((y + 5).foo(z))
-```
-
-| Editor  | Action Name |
-|---------|--------------|
-| VS Code | **rust-analyzer: Structural Search Replace** |
-
-Also available as an assist, by writing a comment containing the structural
-search and replace rule. You will only see the assist if the comment can
-be parsed as a valid structural search and replace rule.
-
-```rust
-// Place the cursor on the line below to see the assist 💡.
-// foo($a, $b) ==>> ($a).foo($b)
-```
-
-
-### User Snippet Completions
-**Source:**  [snippet.rs](crates/ide-completion/src/snippet.rs#5) 
-
-rust-analyzer allows the user to define custom (postfix)-snippets that may depend on items to be accessible for the current scope to be applicable.
-
-A custom snippet can be defined by adding it to the `rust-analyzer.completion.snippets.custom` object respectively.
-
-```json
-{
-  "rust-analyzer.completion.snippets.custom": {
-    "thread spawn": {
-      "prefix": ["spawn", "tspawn"],
-      "body": [
-        "thread::spawn(move || {",
-        "\t$0",
-        "});",
-      ],
-      "description": "Insert a thread::spawn call",
-      "requires": "std::thread",
-      "scope": "expr",
-    }
-  }
-}
-```
-
-In the example above:
-
-* `"thread spawn"` is the name of the snippet.
-
-* `prefix` defines one or more trigger words that will trigger the snippets completion.
-Using `postfix` will instead create a postfix snippet.
-
-* `body` is one or more lines of content joined via newlines for the final output.
-
-* `description` is an optional description of the snippet, if unset the snippet name will be used.
-
-* `requires` is an optional list of item paths that have to be resolvable in the current crate where the completion is rendered.
-
-
-### View Crate Graph
-**Source:**  [view_crate_graph.rs](crates/ide/src/view_crate_graph.rs#8) 
-
-Renders the currently loaded crate graph as an SVG graphic. Requires the `dot` tool, which
-is part of graphviz, to be installed.
-
-Only workspace crates are included, no crates.io dependencies or sysroot crates.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: View Crate Graph** |
-
-
-### View Hir
-**Source:**  [view_hir.rs](crates/ide/src/view_hir.rs#5) 
-
-| Editor  | Action Name |
-|---------|--------------|
-| VS Code | **rust-analyzer: View Hir**
-
-![View Hir](https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif)
-
-
-### View Memory Layout
-**Source:**  [view_memory_layout.rs](crates/ide/src/view_memory_layout.rs#74) 
-
-Displays the recursive memory layout of a datatype.
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: View Memory Layout** |
-
-
-### View Mir
-**Source:**  [view_mir.rs](crates/ide/src/view_mir.rs#5) 
-
-| Editor  | Action Name |
-|---------|-------------|
-| VS Code | **rust-analyzer: View Mir**
-
-
-### Workspace Symbol
-**Source:**  [symbol_index.rs](crates/ide-db/src/symbol_index.rs#174) 
-
-Uses fuzzy-search to find types, modules and functions by name across your
-project and dependencies. This is **the** most useful feature, which improves code
-navigation tremendously. It mostly works on top of the built-in LSP
-functionality, however `#` and `*` symbols can be used to narrow down the
-search. Specifically,
-
-- `Foo` searches for `Foo` type in the current workspace
-- `foo#` searches for `foo` function in the current workspace
-- `Foo*` searches for `Foo` type among dependencies, including `stdlib`
-- `foo#*` searches for `foo` function among dependencies
-
-That is, `#` switches from "types" to all symbols, `*` switches from the current
-workspace to dependencies.
-
-Note that filtering does not currently work in VSCode due to the editor never
-sending the special symbols to the language server. Instead, you can configure
-the filtering via the `rust-analyzer.workspace.symbol.search.scope` and
-`rust-analyzer.workspace.symbol.search.kind` settings. Symbols prefixed
-with `__` are hidden from the search results unless configured otherwise.
-
-| Editor  | Shortcut |
-|---------|-----------|
-| VS Code | <kbd>Ctrl+T</kbd>
diff --git a/src/tools/rust-analyzer/editors/code/README.md b/src/tools/rust-analyzer/editors/code/README.md
index 36ab98188220e..c02882b4982e8 100644
--- a/src/tools/rust-analyzer/editors/code/README.md
+++ b/src/tools/rust-analyzer/editors/code/README.md
@@ -15,19 +15,19 @@ It is recommended over and replaces `rust-lang.rust`.
 - apply suggestions from errors
 - ... and many more, check out the [manual] to see them all
 
-[code completion]: https://rust-analyzer.github.io/manual.html#magic-completions
-[imports insertion]: https://rust-analyzer.github.io/manual.html#completion-with-autoimport
-[definition]: https://rust-analyzer.github.io/manual.html#go-to-definition
-[implementation]: https://rust-analyzer.github.io/manual.html#go-to-implementation
-[type definition]: https://rust-analyzer.github.io/manual.html#go-to-type-definition
-[find all references]: https://rust-analyzer.github.io/manual.html#find-all-references
-[workspace symbol search]: https://rust-analyzer.github.io/manual.html#workspace-symbol
-[symbol renaming]: https://rust-analyzer.github.io/manual.html#rename
-[types and documentation on hover]: https://rust-analyzer.github.io/manual.html#hover
-[inlay hints]: https://rust-analyzer.github.io/manual.html#inlay-hints
-[semantic syntax highlighting]: https://rust-analyzer.github.io/manual.html#semantic-syntax-highlighting
-[assists (code actions)]: https://rust-analyzer.github.io/manual.html#assists-code-actions
-[manual]: https://rust-analyzer.github.io/manual.html
+[code completion]: https://rust-analyzer.github.io/book/features.html#magic-completions
+[imports insertion]: https://rust-analyzer.github.io/book/features.html#completion-with-autoimport
+[definition]: https://rust-analyzer.github.io/book/features.html#go-to-definition
+[implementation]: https://rust-analyzer.github.io/book/features.html#go-to-implementation
+[type definition]: https://rust-analyzer.github.io/book/features.html#go-to-type-definition
+[find all references]: https://rust-analyzer.github.io/book/features.html#find-all-references
+[workspace symbol search]: https://rust-analyzer.github.io/book/features.html#workspace-symbol
+[symbol renaming]: https://rust-analyzer.github.io/book/features.html#rename
+[types and documentation on hover]: https://rust-analyzer.github.io/book/features.html#hover
+[inlay hints]: https://rust-analyzer.github.io/book/features.html#inlay-hints
+[semantic syntax highlighting]: https://rust-analyzer.github.io/book/features.html#semantic-syntax-highlighting
+[assists (code actions)]: https://rust-analyzer.github.io/book/assists.html
+[manual]: https://rust-analyzer.github.io/book/features.html
 
 ## Quick start
 
@@ -41,7 +41,7 @@ It is recommended over and replaces `rust-lang.rust`.
 
 This extension provides configurations through VSCode's configuration settings. All configurations are under `rust-analyzer.*`.
 
-See [the manual](https://rust-analyzer.github.io/manual.html#vs-code-2) for more information on VSCode specific configurations.
+See [the manual](https://rust-analyzer.github.io/book/editor_features.html#vs-code) for more information on VSCode specific configurations.
 
 ## Communication
 
diff --git a/src/tools/rust-analyzer/editors/code/language-configuration.json b/src/tools/rust-analyzer/editors/code/language-configuration.json
index 6619d0c85c5f9..aacd48b0f830c 100644
--- a/src/tools/rust-analyzer/editors/code/language-configuration.json
+++ b/src/tools/rust-analyzer/editors/code/language-configuration.json
@@ -14,9 +14,9 @@
         ["(", ")"]
     ],
     "autoClosingPairs": [
-        { "open": "{", "close": "}" },
-        { "open": "[", "close": "]" },
-        { "open": "(", "close": ")" },
+        { "open": "{", "close": "}", "notIn": ["string"] },
+        { "open": "[", "close": "]", "notIn": ["string"] },
+        { "open": "(", "close": ")", "notIn": ["string"] },
         { "open": "\"", "close": "\"", "notIn": ["string"] },
         { "open": "/*", "close": " */", "notIn": ["string"] },
         { "open": "`", "close": "`", "notIn": ["string"] },
diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json
index 9df41c7487c35..a282eea99973c 100644
--- a/src/tools/rust-analyzer/editors/code/package.json
+++ b/src/tools/rust-analyzer/editors/code/package.json
@@ -170,6 +170,11 @@
                 "title": "Locate parent module",
                 "category": "rust-analyzer"
             },
+            {
+                "command": "rust-analyzer.childModules",
+                "title": "Locate child modules",
+                "category": "rust-analyzer"
+            },
             {
                 "command": "rust-analyzer.joinLines",
                 "title": "Join lines",
@@ -538,7 +543,8 @@
                         "additionalProperties": {
                             "type": [
                                 "string",
-                                "number"
+                                "number",
+                                "null"
                             ]
                         },
                         "default": null,
@@ -606,11 +612,6 @@
                             "/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust"
                         }
                     },
-                    "rust-analyzer.debug.openDebugPane": {
-                        "markdownDescription": "Whether to open up the `Debug Panel` on debugging start.",
-                        "type": "boolean",
-                        "default": false
-                    },
                     "rust-analyzer.debug.buildBeforeRestart": {
                         "markdownDescription": "Whether to rebuild the project modules before debugging the same test again",
                         "type": "boolean",
@@ -896,6 +897,16 @@
                     }
                 }
             },
+            {
+                "title": "cargo",
+                "properties": {
+                    "rust-analyzer.cargo.noDeps": {
+                        "markdownDescription": "Whether to skip fetching dependencies. If set to \"true\", the analysis is performed\nentirely offline, and Cargo metadata for dependencies is not fetched.",
+                        "default": false,
+                        "type": "boolean"
+                    }
+                }
+            },
             {
                 "title": "cargo",
                 "properties": {
@@ -1875,7 +1886,7 @@
                 "title": "imports",
                 "properties": {
                     "rust-analyzer.imports.group.enable": {
-                        "markdownDescription": "Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.",
+                        "markdownDescription": "Group inserted imports by the [following order](https://rust-analyzer.github.io/book/features.html#auto-import). Groups are separated by newlines.",
                         "default": true,
                         "type": "boolean"
                     }
@@ -2823,7 +2834,7 @@
                 "title": "workspace",
                 "properties": {
                     "rust-analyzer.workspace.discoverConfig": {
-                        "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n        \"command\": [\n                \"rust-project\",\n                \"develop-json\"\n        ],\n        \"progressLabel\": \"rust-analyzer\",\n        \"filesToWatch\": [\n                \"BUCK\"\n        ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object\ncorresponding to `DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n        Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n        Error { error: String, source: Option<String> },\n        Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n        // the internally-tagged representation of the enum.\n        \"kind\": \"finished\",\n        // the file used by a non-Cargo build system to define\n        // a package or target.\n        \"buildfile\": \"rust-analyzer/BUILD\",\n        // the contents of a rust-project.json, elided for brevity\n        \"project\": {\n                \"sysroot\": \"foo\",\n                \"crates\": []\n        }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on\n`DiscoverProjectData` to provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`,\nwhich will be substituted with the JSON-serialized form of the following\nenum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n     Path(AbsPathBuf),\n     Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n        \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```\n{\n        \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`,\nand therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to\nto update an existing workspace. As a reference for implementors,\nbuck2's `rust-project` will likely be useful:\nhttps://github.com/facebook/buck2/tree/main/integrations/rust-project.",
+                        "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n        \"command\": [\n                \"rust-project\",\n                \"develop-json\"\n        ],\n        \"progressLabel\": \"rust-analyzer\",\n        \"filesToWatch\": [\n                \"BUCK\"\n        ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object\ncorresponding to `DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n        Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n        Error { error: String, source: Option<String> },\n        Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n        // the internally-tagged representation of the enum.\n        \"kind\": \"finished\",\n        // the file used by a non-Cargo build system to define\n        // a package or target.\n        \"buildfile\": \"rust-analyzer/BUILD\",\n        // the contents of a rust-project.json, elided for brevity\n        \"project\": {\n                \"sysroot\": \"foo\",\n                \"crates\": []\n        }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on\n`DiscoverProjectData` to provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`,\nwhich will be substituted with the JSON-serialized form of the following\nenum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n     Path(AbsPathBuf),\n     Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n        \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n        \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`,\nand therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to\nto update an existing workspace. As a reference for implementors,\nbuck2's `rust-project` will likely be useful:\nhttps://github.com/facebook/buck2/tree/main/integrations/rust-project.",
                         "default": null,
                         "anyOf": [
                             {
@@ -3368,6 +3379,10 @@
                     "command": "rust-analyzer.parentModule",
                     "when": "inRustProject"
                 },
+                {
+                    "command": "rust-analyzer.childModules",
+                    "when": "inRustProject"
+                },
                 {
                     "command": "rust-analyzer.joinLines",
                     "when": "inRustProject"
@@ -3488,6 +3503,7 @@
         "views": {
             "explorer": [
                 {
+                    "icon": "$(package)",
                     "id": "rustDependencies",
                     "name": "Rust Dependencies",
                     "when": "inRustProject && config.rust-analyzer.showDependenciesExplorer"
@@ -3495,6 +3511,7 @@
             ],
             "rustSyntaxTreeContainer": [
                 {
+                    "icon": "$(file-code)",
                     "id": "rustSyntaxTree",
                     "name": "Rust Syntax Tree",
                     "when": "inRustProject && config.rust-analyzer.showSyntaxTree"
@@ -3537,13 +3554,13 @@
                     {
                         "id": "docs",
                         "title": "Visit the docs!",
-                        "description": "Confused about configurations? Want to learn more about rust-analyzer? Visit the [User Manual](https://rust-analyzer.github.io/manual.html)!",
+                        "description": "Confused about configurations? Want to learn more about rust-analyzer? Visit the [User Manual](https://rust-analyzer.github.io/book/)!",
                         "media": {
                             "image": "./icon.png",
                             "altText": "rust-analyzer logo"
                         },
                         "completionEvents": [
-                            "onLink:https://rust-analyzer.github.io/manual.html"
+                            "onLink:https://rust-analyzer.github.io/book/"
                         ]
                     },
                     {
diff --git a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts
index bccae73c9a783..bddf195803d0b 100644
--- a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts
+++ b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts
@@ -15,7 +15,7 @@ export async function bootstrap(
     if (!path) {
         throw new Error(
             "rust-analyzer Language Server is not available. " +
-                "Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation).",
+                "Please, ensure its [proper installation](https://rust-analyzer.github.io/book/installation.html).",
         );
     }
 
@@ -187,8 +187,16 @@ async function hasToolchainFileWithRaDeclared(uri: vscode.Uri): Promise<boolean>
 export async function isValidExecutable(path: string, extraEnv: Env): Promise<boolean> {
     log.debug("Checking availability of a binary at", path);
 
+    const newEnv = { ...process.env };
+    for (const [k, v] of Object.entries(extraEnv)) {
+        if (v) {
+            newEnv[k] = v;
+        } else if (k in newEnv) {
+            delete newEnv[k];
+        }
+    }
     const res = await spawnAsync(path, ["--version"], {
-        env: { ...process.env, ...extraEnv },
+        env: newEnv,
     });
 
     if (res.error) {
diff --git a/src/tools/rust-analyzer/editors/code/src/commands.ts b/src/tools/rust-analyzer/editors/code/src/commands.ts
index 4e614d3205714..3ac1a933d9ec9 100644
--- a/src/tools/rust-analyzer/editors/code/src/commands.ts
+++ b/src/tools/rust-analyzer/editors/code/src/commands.ts
@@ -266,6 +266,43 @@ export function parentModule(ctx: CtxInit): Cmd {
     };
 }
 
+export function childModules(ctx: CtxInit): Cmd {
+    return async () => {
+        const editor = vscode.window.activeTextEditor;
+        if (!editor) return;
+        if (!(isRustDocument(editor.document) || isCargoTomlDocument(editor.document))) return;
+
+        const client = ctx.client;
+
+        const locations = await client.sendRequest(ra.childModules, {
+            textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
+            position: client.code2ProtocolConverter.asPosition(editor.selection.active),
+        });
+        if (!locations) return;
+
+        if (locations.length === 1) {
+            const loc = unwrapUndefinable(locations[0]);
+
+            const uri = client.protocol2CodeConverter.asUri(loc.targetUri);
+            const range = client.protocol2CodeConverter.asRange(loc.targetRange);
+
+            const doc = await vscode.workspace.openTextDocument(uri);
+            const e = await vscode.window.showTextDocument(doc);
+            e.selection = new vscode.Selection(range.start, range.start);
+            e.revealRange(range, vscode.TextEditorRevealType.InCenter);
+        } else {
+            const uri = editor.document.uri.toString();
+            const position = client.code2ProtocolConverter.asPosition(editor.selection.active);
+            await showReferencesImpl(
+                client,
+                uri,
+                position,
+                locations.map((loc) => lc.Location.create(loc.targetUri, loc.targetRange)),
+            );
+        }
+    };
+}
+
 export function openCargoToml(ctx: CtxInit): Cmd {
     return async () => {
         const editor = ctx.activeRustEditor;
diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts
index 896b3c10cbf5d..f36e18a73da00 100644
--- a/src/tools/rust-analyzer/editors/code/src/config.ts
+++ b/src/tools/rust-analyzer/editors/code/src/config.ts
@@ -2,7 +2,7 @@ import * as Is from "vscode-languageclient/lib/common/utils/is";
 import * as os from "os";
 import * as path from "path";
 import * as vscode from "vscode";
-import { expectNotUndefined, log, unwrapUndefinable } from "./util";
+import { expectNotUndefined, log, normalizeDriveLetter, unwrapUndefinable } from "./util";
 import type { Env } from "./util";
 import type { Disposable } from "vscode";
 
@@ -213,12 +213,13 @@ export class Config {
 
     get serverExtraEnv(): Env {
         const extraEnv =
-            this.get<{ [key: string]: string | number } | null>("server.extraEnv") ?? {};
+            this.get<{ [key: string]: { toString(): string } | null } | null>("server.extraEnv") ??
+            {};
         return substituteVariablesInEnv(
             Object.fromEntries(
                 Object.entries(extraEnv).map(([k, v]) => [
                     k,
-                    typeof v !== "string" ? v.toString() : v,
+                    typeof v === "string" ? v : v?.toString(),
                 ]),
             ),
         );
@@ -323,7 +324,6 @@ export class Config {
         return {
             engine: this.get<string>("debug.engine"),
             engineSettings: this.get<object>("debug.engineSettings") ?? {},
-            openDebugPane: this.get<boolean>("debug.openDebugPane"),
             buildBeforeRestart: this.get<boolean>("debug.buildBeforeRestart"),
             sourceFileMap: sourceFileMap,
         };
@@ -399,6 +399,7 @@ export function prepareVSCodeConfig<T>(resp: T): T {
 
 // FIXME: Merge this with `substituteVSCodeVariables` above
 export function substituteVariablesInEnv(env: Env): Env {
+    const depRe = new RegExp(/\${(?<depName>.+?)}/g);
     const missingDeps = new Set<string>();
     // vscode uses `env:ENV_NAME` for env vars resolution, and it's easier
     // to follow the same convention for our dependency tracking
@@ -406,15 +407,16 @@ export function substituteVariablesInEnv(env: Env): Env {
     const envWithDeps = Object.fromEntries(
         Object.entries(env).map(([key, value]) => {
             const deps = new Set<string>();
-            const depRe = new RegExp(/\${(?<depName>.+?)}/g);
-            let match = undefined;
-            while ((match = depRe.exec(value))) {
-                const depName = unwrapUndefinable(match.groups?.["depName"]);
-                deps.add(depName);
-                // `depName` at this point can have a form of `expression` or
-                // `prefix:expression`
-                if (!definedEnvKeys.has(depName)) {
-                    missingDeps.add(depName);
+            if (value) {
+                let match = undefined;
+                while ((match = depRe.exec(value))) {
+                    const depName = unwrapUndefinable(match.groups?.["depName"]);
+                    deps.add(depName);
+                    // `depName` at this point can have a form of `expression` or
+                    // `prefix:expression`
+                    if (!definedEnvKeys.has(depName)) {
+                        missingDeps.add(depName);
+                    }
                 }
             }
             return [`env:${key}`, { deps: [...deps], value }];
@@ -455,11 +457,10 @@ export function substituteVariablesInEnv(env: Env): Env {
     do {
         leftToResolveSize = toResolve.size;
         for (const key of toResolve) {
-            const item = unwrapUndefinable(envWithDeps[key]);
-            if (item.deps.every((dep) => resolved.has(dep))) {
-                item.value = item.value.replace(/\${(?<depName>.+?)}/g, (_wholeMatch, depName) => {
-                    const item = unwrapUndefinable(envWithDeps[depName]);
-                    return item.value;
+            const item = envWithDeps[key];
+            if (item && item.deps.every((dep) => resolved.has(dep))) {
+                item.value = item.value?.replace(/\${(?<depName>.+?)}/g, (_wholeMatch, depName) => {
+                    return envWithDeps[depName]?.value ?? "";
                 });
                 resolved.add(key);
                 toResolve.delete(key);
@@ -499,7 +500,7 @@ function computeVscodeVar(varName: string): string | null {
                   // user has opened on Editor startup. Could lead to
                   // unpredictable workspace selection in practice.
                   // It's better to pick the first one
-                  folder.uri.fsPath;
+                  normalizeDriveLetter(folder.uri.fsPath);
         return fsPath;
     };
     // https://code.visualstudio.com/docs/editor/variables-reference
diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts
index 37a2ee236915c..e55754fb9f048 100644
--- a/src/tools/rust-analyzer/editors/code/src/ctx.ts
+++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts
@@ -190,11 +190,11 @@ export class Ctx implements RustAnalyzerExtensionApi {
         }
 
         if (!this.traceOutputChannel) {
-            this.traceOutputChannel = new LazyOutputChannel("Rust Analyzer Language Server Trace");
+            this.traceOutputChannel = new LazyOutputChannel("rust-analyzer LSP Trace");
             this.pushExtCleanup(this.traceOutputChannel);
         }
         if (!this.outputChannel) {
-            this.outputChannel = vscode.window.createOutputChannel("Rust Analyzer Language Server");
+            this.outputChannel = vscode.window.createOutputChannel("rust-analyzer Language Server");
             this.pushExtCleanup(this.outputChannel);
         }
 
@@ -213,7 +213,14 @@ export class Ctx implements RustAnalyzerExtensionApi {
                     this.refreshServerStatus();
                 },
             );
-            const newEnv = Object.assign({}, process.env, this.config.serverExtraEnv);
+            const newEnv = { ...process.env };
+            for (const [k, v] of Object.entries(this.config.serverExtraEnv)) {
+                if (v) {
+                    newEnv[k] = v;
+                } else if (k in newEnv) {
+                    delete newEnv[k];
+                }
+            }
             const run: lc.Executable = {
                 command: this._serverPath,
                 options: { env: newEnv },
diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts
index 72a9aabc04342..adb75c23c70cc 100644
--- a/src/tools/rust-analyzer/editors/code/src/debug.ts
+++ b/src/tools/rust-analyzer/editors/code/src/debug.ts
@@ -6,11 +6,9 @@ import type * as ra from "./lsp_ext";
 import { Cargo } from "./toolchain";
 import type { Ctx } from "./ctx";
 import { createTaskFromRunnable, prepareEnv } from "./run";
-import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util";
+import { execute, isCargoRunnableArgs, unwrapUndefinable, log, normalizeDriveLetter } from "./util";
 import type { Config } from "./config";
 
-const debugOutput = vscode.window.createOutputChannel("Debug");
-
 // Here we want to keep track on everything that's currently running
 const activeDebugSessionIds: string[] = [];
 
@@ -56,15 +54,14 @@ export async function startDebugSession(ctx: Ctx, runnable: ra.Runnable): Promis
     if (-1 !== index) {
         debugConfig = configurations[index];
         message = " (from launch.json)";
-        debugOutput.clear();
     } else {
         debugConfig = await getDebugConfiguration(ctx.config, runnable);
     }
 
     if (!debugConfig) return false;
 
-    debugOutput.appendLine(`Launching debug configuration${message}:`);
-    debugOutput.appendLine(JSON.stringify(debugConfig, null, 2));
+    log.debug(`Launching debug configuration${message}:`);
+    log.debug(JSON.stringify(debugConfig, null, 2));
     return vscode.debug.startDebugging(undefined, debugConfig);
 }
 
@@ -118,10 +115,6 @@ async function getDebugConfiguration(
         return;
     }
 
-    debugOutput.clear();
-    if (config.debug.openDebugPane) {
-        debugOutput.show(true);
-    }
     // folder exists or RA is not active.
 
     const workspaceFolders = vscode.workspace.workspaceFolders!;
@@ -134,20 +127,14 @@ async function getDebugConfiguration(
               firstWorkspace;
 
     const workspace = unwrapUndefinable(maybeWorkspace);
-    let wsFolder = path.normalize(workspace.uri.fsPath);
-    if (os.platform() === "win32") {
-        // in windows, the drive letter can vary in casing for VSCode, so we gotta normalize that first
-        wsFolder = wsFolder.replace(/^[a-z]:\\/, (c) => c.toUpperCase());
-    }
+    const wsFolder = normalizeDriveLetter(path.normalize(workspace.uri.fsPath));
 
     const workspaceQualifier = isMultiFolderWorkspace ? `:${workspace.name}` : "";
     function simplifyPath(p: string): string {
         // in windows, the drive letter can vary in casing for VSCode, so we gotta normalize that first
-        if (os.platform() === "win32") {
-            p = p.replace(/^[a-z]:\\/, (c) => c.toUpperCase());
-        }
+        p = normalizeDriveLetter(path.normalize(p));
         // see https://github.com/rust-lang/rust-analyzer/pull/5513#issuecomment-663458818 for why this is needed
-        return path.normalize(p).replace(wsFolder, `\${workspaceFolder${workspaceQualifier}}`);
+        return p.replace(wsFolder, `\${workspaceFolder${workspaceQualifier}}`);
     }
 
     const executable = await getDebugExecutable(
@@ -232,7 +219,7 @@ async function discoverSourceFileMap(
         const commitHash = rx.exec(data)?.[1];
         if (commitHash) {
             const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust");
-            return { source: rustlib, destination: rustlib };
+            return { source: "/rustc/" + commitHash, destination: rustlib };
         }
     }
 
@@ -321,7 +308,7 @@ async function getDebugExecutable(
     runnableArgs: ra.CargoRunnableArgs,
     env: Record<string, string>,
 ): Promise<string> {
-    const cargo = new Cargo(runnableArgs.workspaceRoot || ".", debugOutput, env);
+    const cargo = new Cargo(runnableArgs.workspaceRoot || ".", env);
     const executable = await cargo.executableFromArgs(runnableArgs);
 
     // if we are here, there were no compilation errors.
diff --git a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts
index af5129ac96358..20952e93ccc48 100644
--- a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts
+++ b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts
@@ -194,6 +194,11 @@ export const parentModule = new lc.RequestType<
     lc.LocationLink[] | null,
     void
 >("experimental/parentModule");
+export const childModules = new lc.RequestType<
+    lc.TextDocumentPositionParams,
+    lc.LocationLink[] | null,
+    void
+>("experimental/childModules");
 export const runnables = new lc.RequestType<RunnablesParams, Runnable[], void>(
     "experimental/runnables",
 );
diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts
index 451294e26f6c7..5e500730693fc 100644
--- a/src/tools/rust-analyzer/editors/code/src/main.ts
+++ b/src/tools/rust-analyzer/editors/code/src/main.ts
@@ -158,6 +158,7 @@ function createCommands(): Record<string, CommandFactory> {
         matchingBrace: { enabled: commands.matchingBrace },
         joinLines: { enabled: commands.joinLines },
         parentModule: { enabled: commands.parentModule },
+        childModules: { enabled: commands.childModules },
         viewHir: { enabled: commands.viewHir },
         viewMir: { enabled: commands.viewMir },
         interpretFunction: { enabled: commands.interpretFunction },
@@ -187,7 +188,9 @@ function createCommands(): Record<string, CommandFactory> {
         openWalkthrough: { enabled: commands.openWalkthrough },
         // Internal commands which are invoked by the server.
         applyActionGroup: { enabled: commands.applyActionGroup },
-        applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand },
+        applySnippetWorkspaceEdit: {
+            enabled: commands.applySnippetWorkspaceEditCommand,
+        },
         debugSingle: { enabled: commands.debugSingle },
         gotoLocation: { enabled: commands.gotoLocation },
         hoverRefCommandProxy: { enabled: commands.hoverRefCommandProxy },
@@ -200,8 +203,12 @@ function createCommands(): Record<string, CommandFactory> {
         revealDependency: { enabled: commands.revealDependency },
         syntaxTreeReveal: { enabled: commands.syntaxTreeReveal },
         syntaxTreeCopy: { enabled: commands.syntaxTreeCopy },
-        syntaxTreeHideWhitespace: { enabled: commands.syntaxTreeHideWhitespace },
-        syntaxTreeShowWhitespace: { enabled: commands.syntaxTreeShowWhitespace },
+        syntaxTreeHideWhitespace: {
+            enabled: commands.syntaxTreeHideWhitespace,
+        },
+        syntaxTreeShowWhitespace: {
+            enabled: commands.syntaxTreeShowWhitespace,
+        },
     };
 }
 
diff --git a/src/tools/rust-analyzer/editors/code/src/toolchain.ts b/src/tools/rust-analyzer/editors/code/src/toolchain.ts
index bb06144295314..a859ce6ff0070 100644
--- a/src/tools/rust-analyzer/editors/code/src/toolchain.ts
+++ b/src/tools/rust-analyzer/editors/code/src/toolchain.ts
@@ -37,7 +37,6 @@ interface CompilerMessage {
 export class Cargo {
     constructor(
         readonly rootFolder: string,
-        readonly output: vscode.OutputChannel,
         readonly env: Record<string, string>,
     ) {}
 
@@ -93,14 +92,14 @@ export class Cargo {
                             });
                         }
                     } else if (message.reason === "compiler-message") {
-                        this.output.append(message.message.rendered);
+                        log.info(message.message.rendered);
                     }
                 },
-                (stderr) => this.output.append(stderr),
+                (stderr) => log.error(stderr),
                 env,
             );
         } catch (err) {
-            this.output.show(true);
+            log.error(`Cargo invocation has failed: ${err}`);
             throw new Error(`Cargo invocation has failed: ${err}`);
         }
 
diff --git a/src/tools/rust-analyzer/editors/code/src/util.ts b/src/tools/rust-analyzer/editors/code/src/util.ts
index 93c7bf8d73e7b..410b055100a1b 100644
--- a/src/tools/rust-analyzer/editors/code/src/util.ts
+++ b/src/tools/rust-analyzer/editors/code/src/util.ts
@@ -14,11 +14,11 @@ export function assert(condition: boolean, explanation: string): asserts conditi
 }
 
 export type Env = {
-    [name: string]: string;
+    [name: string]: string | undefined;
 };
 
 class Log {
-    private readonly output = vscode.window.createOutputChannel("Rust Analyzer Client", {
+    private readonly output = vscode.window.createOutputChannel("rust-analyzer Extension", {
         log: true,
     });
 
@@ -299,3 +299,32 @@ export async function spawnAsync(
         };
     }
 }
+
+export const isWindows = process.platform === "win32";
+
+export function isWindowsDriveLetter(code: number): boolean {
+    // Copied from https://github.com/microsoft/vscode/blob/02c2dba5f2669b924fd290dff7d2ff3460791996/src/vs/base/common/extpath.ts#L265-L267
+    return (
+        (code >= /* CharCode.A */ 65 && code <= /* CharCode.Z */ 90) ||
+        (code >= /* CharCode.a */ 97 && code <= /* CharCode.z */ 122)
+    );
+}
+export function hasDriveLetter(path: string, isWindowsOS: boolean = isWindows): boolean {
+    // Copied from https://github.com/microsoft/vscode/blob/02c2dba5f2669b924fd290dff7d2ff3460791996/src/vs/base/common/extpath.ts#L324-L330
+    if (isWindowsOS) {
+        return (
+            isWindowsDriveLetter(path.charCodeAt(0)) &&
+            path.charCodeAt(1) === /* CharCode.Colon */ 58
+        );
+    }
+
+    return false;
+}
+export function normalizeDriveLetter(path: string, isWindowsOS: boolean = isWindows): string {
+    // Copied from https://github.com/microsoft/vscode/blob/02c2dba5f2669b924fd290dff7d2ff3460791996/src/vs/base/common/labels.ts#L140-L146
+    if (hasDriveLetter(path, isWindowsOS)) {
+        return path.charAt(0).toUpperCase() + path.slice(1);
+    }
+
+    return path;
+}
diff --git a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
index 589d026142bb7..1c330e0e37e17 100644
--- a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
@@ -6,8 +6,8 @@ license = "MIT OR Apache-2.0"
 repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/la-arena"
 documentation = "https://docs.rs/la-arena"
 categories = ["data-structures", "memory-management", "rust-patterns"]
-edition = "2021"
-rust-version = "1.56"
+edition = "2024"
+rust-version = "1.85"
 
 [lints]
 workspace = true
diff --git a/src/tools/rust-analyzer/lib/line-index/Cargo.toml b/src/tools/rust-analyzer/lib/line-index/Cargo.toml
index 14196ba3d0973..81cd364cc22b0 100644
--- a/src/tools/rust-analyzer/lib/line-index/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/line-index/Cargo.toml
@@ -4,14 +4,14 @@ version = "0.1.2"
 description = "Maps flat `TextSize` offsets to/from `(line, column)` representation."
 license = "MIT OR Apache-2.0"
 repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index"
-edition = "2021"
+edition = "2024"
 
 [dependencies]
 text-size = "1.1.1"
 nohash-hasher = "0.2.0"
 
 [dev-dependencies]
-oorandom = "11.1.3"
+oorandom = "11.1.5"
 
 [lints]
 workspace = true
diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
index 2fa3272e6593f..1dc6d3ce5db8f 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
@@ -4,18 +4,18 @@ version = "0.7.8"
 description = "Generic LSP server scaffold."
 license = "MIT OR Apache-2.0"
 repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"
-edition = "2021"
+edition = "2024"
 
 [dependencies]
-log = "0.4.17"
-serde_json = "1.0.108"
-serde = { version = "1.0.216" }
-serde_derive = { version = "1.0.216" }
+log = "0.4.26"
+serde_json = "1.0.140"
+serde = { version = "1.0.219" }
+serde_derive = { version = "1.0.219" }
 crossbeam-channel.workspace = true
 
 [dev-dependencies]
 lsp-types = "=0.95"
-ctrlc = "3.4.1"
+ctrlc = "3.4.5"
 
 [lints]
 workspace = true
diff --git a/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs b/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs
index 6ad144b85fee4..6b3acda7bcdd1 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs
+++ b/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs
@@ -48,7 +48,7 @@ use std::error::Error;
 
 use lsp_types::OneOf;
 use lsp_types::{
-    request::GotoDefinition, GotoDefinitionResponse, InitializeParams, ServerCapabilities,
+    GotoDefinitionResponse, InitializeParams, ServerCapabilities, request::GotoDefinition,
 };
 
 use lsp_server::{Connection, ExtractError, Message, Request, RequestId, Response};
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs b/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
index 4069e6f2c09a5..c8cdb86681c6e 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/lib.rs
@@ -356,17 +356,17 @@ impl Connection {
             Ok(msg) => {
                 return Err(ProtocolError::new(format!(
                     "unexpected message during shutdown: {msg:?}"
-                )))
+                )));
             }
             Err(RecvTimeoutError::Timeout) => {
                 return Err(ProtocolError::new(
                     "timed out waiting for exit notification".to_owned(),
-                ))
+                ));
             }
             Err(RecvTimeoutError::Disconnected) => {
                 return Err(ProtocolError::new(
                     "channel disconnected waiting for exit notification".to_owned(),
-                ))
+                ));
             }
         }
         Ok(true)
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs b/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs
index 48400abf2295c..793073d1f72d5 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/socket.rs
@@ -4,11 +4,11 @@ use std::{
     thread,
 };
 
-use crossbeam_channel::{bounded, Receiver, Sender};
+use crossbeam_channel::{Receiver, Sender, bounded};
 
 use crate::{
-    stdio::{make_io_threads, IoThreads},
     Message,
+    stdio::{IoThreads, make_io_threads},
 };
 
 pub(crate) fn socket_transport(
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs b/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
index 8344c9f56b534..c558b6c6e7708 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
@@ -5,7 +5,7 @@ use std::{
 
 use log::debug;
 
-use crossbeam_channel::{bounded, Receiver, Sender};
+use crossbeam_channel::{Receiver, Sender, bounded};
 
 use crate::Message;
 
@@ -40,7 +40,7 @@ pub(crate) fn stdio_transport() -> (Sender<Message>, Receiver<Message>, IoThread
 
                 debug!("sending message {:#?}", msg);
                 if let Err(e) = reader_sender.send(msg) {
-                    return Err(io::Error::new(io::ErrorKind::Other, e));
+                    return Err(io::Error::other(e));
                 }
 
                 if is_exit {
diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version
index 0db4be8ff639b..09c127f6bcd84 100644
--- a/src/tools/rust-analyzer/rust-version
+++ b/src/tools/rust-analyzer/rust-version
@@ -1 +1 @@
-2c6a12ec44d0426c8939123c2f2cf27d2217de13
+21079f53a359d9fc82668d4175d49dafdb600563
diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml
index 1138035d0e82e..2201b5a5e7c90 100644
--- a/src/tools/rust-analyzer/triagebot.toml
+++ b/src/tools/rust-analyzer/triagebot.toml
@@ -21,3 +21,9 @@ exclude_titles = [ # exclude syncs from subtree in rust-lang/rust
 labels = ["has-merge-commits", "S-waiting-on-author"]
 
 [transfer]
+
+# Canonicalize issue numbers to avoid closing the wrong issue when upstreaming this subtree
+[canonicalize-issue-links]
+
+# Prevents mentions in commits to avoid users being spammed
+[no-mentions]
diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml
index ebd8903ad8ac3..6195de5d20255 100644
--- a/src/tools/rust-analyzer/xtask/Cargo.toml
+++ b/src/tools/rust-analyzer/xtask/Cargo.toml
@@ -3,21 +3,21 @@ name = "xtask"
 version = "0.1.0"
 publish = false
 license = "MIT OR Apache-2.0"
-edition = "2021"
+edition = "2024"
 rust-version.workspace = true
 
 [dependencies]
 anyhow.workspace = true
-directories = "5.0"
-flate2 = "1.0.24"
-write-json = "0.1.2"
+directories = "6.0"
+flate2 = "1.1.0"
+write-json = "0.1.4"
 xshell.workspace = true
-xflags = "0.3.0"
+xflags = "0.3.2"
 time = { version = "0.3", default-features = false }
-zip = { version = "0.6", default-features = false, features = ["deflate", "time"] }
+zip = { version = "2.4", default-features = false, features = ["deflate-flate2", "flate2", "time"] }
 stdx.workspace = true
-proc-macro2 = "1.0.93"
-quote = "1.0.20"
+proc-macro2 = "1.0.94"
+quote = "1.0.40"
 ungrammar = "1.16.1"
 either.workspace = true
 itertools.workspace = true
diff --git a/src/tools/rust-analyzer/xtask/src/codegen.rs b/src/tools/rust-analyzer/xtask/src/codegen.rs
index 8165a2a12b084..bba7ad73f3895 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen.rs
@@ -3,7 +3,7 @@ use std::{
     path::{Path, PathBuf},
 };
 
-use xshell::{cmd, Shell};
+use xshell::{Shell, cmd};
 
 use crate::{
     flags::{self, CodegenType},
@@ -21,7 +21,7 @@ impl flags::Codegen {
     pub(crate) fn run(self, _sh: &Shell) -> anyhow::Result<()> {
         match self.codegen_type.unwrap_or_default() {
             flags::CodegenType::All => {
-                diagnostics_docs::generate(self.check);
+                grammar::generate(self.check);
                 assists_doc_tests::generate(self.check);
                 parser_inline_tests::generate(self.check);
                 feature_docs::generate(self.check)
diff --git a/src/tools/rust-analyzer/xtask/src/codegen/assists_doc_tests.rs b/src/tools/rust-analyzer/xtask/src/codegen/assists_doc_tests.rs
index 0bb18c73cfc66..dd55d06bd3471 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen/assists_doc_tests.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen/assists_doc_tests.rs
@@ -5,7 +5,7 @@ use std::{fmt, fs, path::Path};
 use stdx::format_to_acc;
 
 use crate::{
-    codegen::{add_preamble, ensure_file_contents, reformat, CommentBlock, Location},
+    codegen::{CommentBlock, Location, add_preamble, ensure_file_contents, reformat},
     project_root,
     util::list_rust_files,
 };
@@ -53,6 +53,11 @@ r#####"
         );
     }
 
+    // Do not generate assists manual when run with `--check`
+    if check {
+        return;
+    }
+
     {
         // Generate assists manual. Note that we do _not_ commit manual to the
         // git repo. Instead, `cargo xtask release` runs this test before making
diff --git a/src/tools/rust-analyzer/xtask/src/codegen/diagnostics_docs.rs b/src/tools/rust-analyzer/xtask/src/codegen/diagnostics_docs.rs
index cf8f97be009ba..4a47a5f6aa4b9 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen/diagnostics_docs.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen/diagnostics_docs.rs
@@ -3,20 +3,22 @@
 use std::{fmt, fs, io, path::PathBuf};
 
 use crate::{
-    codegen::{add_preamble, CommentBlock, Location},
+    codegen::{CommentBlock, Location, add_preamble},
     project_root,
     util::list_rust_files,
 };
 
 pub(crate) fn generate(check: bool) {
     let diagnostics = Diagnostic::collect().unwrap();
-    if !check {
-        let contents =
-            diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
-        let contents = add_preamble(crate::flags::CodegenType::DiagnosticsDocs, contents);
-        let dst = project_root().join("docs/book/src/diagnostics_generated.md");
-        fs::write(dst, contents).unwrap();
+    // Do not generate docs when run with `--check`
+    if check {
+        return;
     }
+    let contents =
+        diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
+    let contents = add_preamble(crate::flags::CodegenType::DiagnosticsDocs, contents);
+    let dst = project_root().join("docs/book/src/diagnostics_generated.md");
+    fs::write(dst, contents).unwrap();
 }
 
 #[derive(Debug)]
diff --git a/src/tools/rust-analyzer/xtask/src/codegen/feature_docs.rs b/src/tools/rust-analyzer/xtask/src/codegen/feature_docs.rs
index 51ff13aba81e2..170de5db9a7ae 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen/feature_docs.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen/feature_docs.rs
@@ -3,13 +3,17 @@
 use std::{fmt, fs, io, path::PathBuf};
 
 use crate::{
-    codegen::{add_preamble, CommentBlock, Location},
+    codegen::{CommentBlock, Location, add_preamble},
     project_root,
     util::list_rust_files,
 };
 
-pub(crate) fn generate(_check: bool) {
+pub(crate) fn generate(check: bool) {
     let features = Feature::collect().unwrap();
+    // Do not generate docs when run with `--check`
+    if check {
+        return;
+    }
     let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
     let contents = add_preamble(crate::flags::CodegenType::FeatureDocs, contents);
     let dst = project_root().join("docs/book/src/features_generated.md");
diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs
index e20dda7fec3bb..82df78c1a898a 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs
@@ -67,7 +67,6 @@ fn generate_tokens(grammar: &AstSrc) -> String {
         let name = format_ident!("{}", token);
         let kind = format_ident!("{}", to_upper_snake_case(token));
         quote! {
-            #[derive(Debug, Clone, PartialEq, Eq, Hash)]
             pub struct #name {
                 pub(crate) syntax: SyntaxToken,
             }
@@ -83,6 +82,29 @@ fn generate_tokens(grammar: &AstSrc) -> String {
                 }
                 fn syntax(&self) -> &SyntaxToken { &self.syntax }
             }
+
+            impl fmt::Debug for #name {
+                fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+                    f.debug_struct(#token).field("syntax", &self.syntax).finish()
+                }
+            }
+            impl Clone for #name {
+                fn clone(&self) -> Self {
+                    Self { syntax: self.syntax.clone() }
+                }
+            }
+            impl hash::Hash for #name {
+                fn hash<H: hash::Hasher>(&self, state: &mut H) {
+                    self.syntax.hash(state);
+                }
+            }
+
+            impl Eq for #name {}
+            impl PartialEq for #name {
+                fn eq(&self, other: &Self) -> bool {
+                    self.syntax == other.syntax
+                }
+            }
         }
     });
 
@@ -90,7 +112,10 @@ fn generate_tokens(grammar: &AstSrc) -> String {
         crate::flags::CodegenType::Grammar,
         reformat(
             quote! {
+                use std::{fmt, hash};
+
                 use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
+
                 #(#tokens)*
             }
             .to_string(),
@@ -104,6 +129,7 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
         .nodes
         .iter()
         .map(|node| {
+            let node_str_name = &node.name;
             let name = format_ident!("{}", node.name);
             let kind = format_ident!("{}", to_upper_snake_case(&node.name));
             let traits = node
@@ -149,7 +175,6 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
             (
                 quote! {
                     #[pretty_doc_comment_placeholder_workaround]
-                    #[derive(Debug, Clone, PartialEq, Eq, Hash)]
                     pub struct #name {
                         pub(crate) syntax: SyntaxNode,
                     }
@@ -180,6 +205,31 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
                         #[inline]
                         fn syntax(&self) -> &SyntaxNode { &self.syntax }
                     }
+
+                    impl hash::Hash for #name {
+                        fn hash<H: hash::Hasher>(&self, state: &mut H) {
+                            self.syntax.hash(state);
+                        }
+                    }
+
+                    impl Eq for #name {}
+                    impl PartialEq for #name {
+                        fn eq(&self, other: &Self) -> bool {
+                            self.syntax == other.syntax
+                        }
+                    }
+
+                    impl Clone for #name {
+                        fn clone(&self) -> Self {
+                            Self { syntax: self.syntax.clone() }
+                        }
+                    }
+
+                    impl fmt::Debug for #name {
+                        fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+                            f.debug_struct(#node_str_name).field("syntax", &self.syntax).finish()
+                        }
+                    }
                 },
             )
         })
@@ -265,6 +315,7 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
         .sorted_by_key(|(name, _)| *name)
         .map(|(trait_name, nodes)| {
             let name = format_ident!("Any{}", trait_name);
+            let node_str_name = name.to_string();
             let trait_name = format_ident!("{}", trait_name);
             let kinds: Vec<_> = nodes
                 .iter()
@@ -274,13 +325,9 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
             (
                 quote! {
                     #[pretty_doc_comment_placeholder_workaround]
-                    #[derive(Debug, Clone, PartialEq, Eq, Hash)]
                     pub struct #name {
                         pub(crate) syntax: SyntaxNode,
                     }
-                    impl ast::#trait_name for #name {}
-                },
-                quote! {
                     impl #name {
                         #[inline]
                         pub fn new<T: ast::#trait_name>(node: T) -> #name {
@@ -289,6 +336,9 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
                             }
                         }
                     }
+                },
+                quote! {
+                    impl ast::#trait_name for #name {}
                     impl AstNode for #name {
                         #[inline]
                         fn can_cast(kind: SyntaxKind) -> bool {
@@ -304,6 +354,31 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
                         }
                     }
 
+                    impl hash::Hash for #name {
+                        fn hash<H: hash::Hasher>(&self, state: &mut H) {
+                            self.syntax.hash(state);
+                        }
+                    }
+
+                    impl Eq for #name {}
+                    impl PartialEq for #name {
+                        fn eq(&self, other: &Self) -> bool {
+                            self.syntax == other.syntax
+                        }
+                    }
+
+                    impl Clone for #name {
+                        fn clone(&self) -> Self {
+                            Self { syntax: self.syntax.clone() }
+                        }
+                    }
+
+                    impl fmt::Debug for #name {
+                        fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+                            f.debug_struct(#node_str_name).field("syntax", &self.syntax).finish()
+                        }
+                    }
+
                     #(
                         impl From<#nodes> for #name {
                             #[inline]
@@ -339,13 +414,14 @@ fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
         .map(|kind| to_pascal_case(kind))
         .filter(|name| !defined_nodes.iter().any(|&it| it == name))
     {
-        drop(node)
-        // FIXME: restore this
-        // eprintln!("Warning: node {} not defined in ast source", node);
+        eprintln!("Warning: node {} not defined in AST source", node);
+        drop(node);
     }
 
     let ast = quote! {
         #![allow(non_snake_case)]
+        use std::{fmt, hash};
+
         use crate::{
             SyntaxNode, SyntaxToken, SyntaxKind::{self, *},
             ast::{self, AstNode, AstChildren, support},
@@ -476,7 +552,6 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
 
     let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
 
-    // FIXME: This generates enum kinds?
     let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
 
     let ast = quote! {
@@ -484,7 +559,7 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
         use crate::Edition;
 
         /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`.
-        #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+        #[derive(Debug)]
         #[repr(u16)]
         pub enum SyntaxKind {
             // Technical SyntaxKinds: they appear temporally during parsing,
@@ -585,7 +660,7 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
         }
 
         #[macro_export]
-        macro_rules! T {
+        macro_rules! T_ {
             #([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
             #([#strict_keywords_tokens] => { $crate::SyntaxKind::#strict_keywords_variants };)*
             #([#contextual_keywords_tokens] => { $crate::SyntaxKind::#contextual_keywords_variants };)*
@@ -596,6 +671,38 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String {
             [string] => { $crate::SyntaxKind::STRING };
             [shebang] => { $crate::SyntaxKind::SHEBANG };
         }
+
+        impl ::core::marker::Copy for SyntaxKind {}
+        impl ::core::clone::Clone for SyntaxKind {
+            #[inline]
+            fn clone(&self) -> Self {
+                *self
+            }
+        }
+        impl ::core::cmp::PartialEq for SyntaxKind {
+            #[inline]
+            fn eq(&self, other: &Self) -> bool {
+                (*self as u16) == (*other as u16)
+            }
+        }
+        impl ::core::cmp::Eq for SyntaxKind {}
+        impl ::core::cmp::PartialOrd for SyntaxKind {
+            #[inline]
+            fn partial_cmp(&self, other: &Self) -> core::option::Option<core::cmp::Ordering> {
+                Some(self.cmp(other))
+            }
+        }
+        impl ::core::cmp::Ord for SyntaxKind {
+            #[inline]
+            fn cmp(&self, other: &Self) -> core::cmp::Ordering {
+                (*self as u16).cmp(&(*other as u16))
+            }
+        }
+        impl ::core::hash::Hash for SyntaxKind {
+            fn hash<H: ::core::hash::Hasher>(&self, state: &mut H) {
+                ::core::mem::discriminant(self).hash(state);
+            }
+        }
     };
 
     add_preamble(crate::flags::CodegenType::Grammar, reformat(ast.to_string()))
@@ -718,11 +825,7 @@ impl Field {
 
 fn clean_token_name(name: &str) -> String {
     let cleaned = name.trim_start_matches(['@', '#', '?']);
-    if cleaned.is_empty() {
-        name.to_owned()
-    } else {
-        cleaned.to_owned()
-    }
+    if cleaned.is_empty() { name.to_owned() } else { cleaned.to_owned() }
 }
 
 fn lower(grammar: &Grammar) -> AstSrc {
diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs
index 9269d15423534..d8cbf894520ac 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs
@@ -12,6 +12,7 @@ pub(crate) struct KindsSrc {
     pub(crate) literals: &'static [&'static str],
     pub(crate) tokens: &'static [&'static str],
     pub(crate) nodes: &'static [&'static str],
+    pub(crate) _enums: &'static [&'static str],
     pub(crate) edition_dependent_keywords: &'static [(&'static str, Edition)],
 }
 
@@ -206,13 +207,21 @@ pub(crate) fn generate_kind_src(
     let nodes = nodes
         .iter()
         .map(|it| &it.name)
-        .chain(enums.iter().map(|it| &it.name))
         .map(|it| to_upper_snake_case(it))
         .map(String::leak)
         .map(|it| &*it)
         .collect();
     let nodes = Vec::leak(nodes);
     nodes.sort();
+    let enums = enums
+        .iter()
+        .map(|it| &it.name)
+        .map(|it| to_upper_snake_case(it))
+        .map(String::leak)
+        .map(|it| &*it)
+        .collect();
+    let enums = Vec::leak(enums);
+    enums.sort();
     let keywords = Vec::leak(keywords);
     let contextual_keywords = Vec::leak(contextual_keywords);
     let edition_dependent_keywords = Vec::leak(edition_dependent_keywords);
@@ -224,6 +233,7 @@ pub(crate) fn generate_kind_src(
     KindsSrc {
         punct: PUNCT,
         nodes,
+        _enums: enums,
         keywords,
         contextual_keywords,
         edition_dependent_keywords,
diff --git a/src/tools/rust-analyzer/xtask/src/codegen/lints.rs b/src/tools/rust-analyzer/xtask/src/codegen/lints.rs
index b1a7c2fb27ed1..3b4c2e8da3c37 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen/lints.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen/lints.rs
@@ -3,7 +3,7 @@
 #![allow(clippy::disallowed_types)]
 
 use std::{
-    collections::{hash_map, HashMap},
+    collections::{HashMap, hash_map},
     fs,
     path::Path,
     str::FromStr,
@@ -11,7 +11,7 @@ use std::{
 
 use edition::Edition;
 use stdx::format_to;
-use xshell::{cmd, Shell};
+use xshell::{Shell, cmd};
 
 use crate::{
     codegen::{add_preamble, ensure_file_contents, reformat},
diff --git a/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs b/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs
index 88732cebe727e..f3b786b9d867b 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen/parser_inline_tests.rs
@@ -13,7 +13,7 @@ use anyhow::Result;
 use itertools::Itertools as _;
 
 use crate::{
-    codegen::{ensure_file_contents, reformat, CommentBlock},
+    codegen::{CommentBlock, ensure_file_contents, reformat},
     project_root,
     util::list_rust_files,
 };
diff --git a/src/tools/rust-analyzer/xtask/src/dist.rs b/src/tools/rust-analyzer/xtask/src/dist.rs
index 99483f4a5dc2d..b3d6f06b073fa 100644
--- a/src/tools/rust-analyzer/xtask/src/dist.rs
+++ b/src/tools/rust-analyzer/xtask/src/dist.rs
@@ -1,15 +1,18 @@
+use anyhow::Context;
+use flate2::{Compression, write::GzEncoder};
+use std::env::consts::EXE_EXTENSION;
+use std::ffi::OsStr;
 use std::{
     env,
     fs::File,
     io::{self, BufWriter},
     path::{Path, PathBuf},
 };
-
-use flate2::{write::GzEncoder, Compression};
 use time::OffsetDateTime;
-use xshell::{cmd, Shell};
-use zip::{write::FileOptions, DateTime, ZipWriter};
+use xshell::{Cmd, Shell, cmd};
+use zip::{DateTime, ZipWriter, write::SimpleFileOptions};
 
+use crate::flags::PgoTrainingCrate;
 use crate::{
     date_iso,
     flags::{self, Malloc},
@@ -38,11 +41,18 @@ impl flags::Dist {
                 // A hack to make VS Code prefer nightly over stable.
                 format!("{VERSION_NIGHTLY}.{patch_version}")
             };
-            dist_server(sh, &format!("{version}-standalone"), &target, allocator, self.zig)?;
+            dist_server(
+                sh,
+                &format!("{version}-standalone"),
+                &target,
+                allocator,
+                self.zig,
+                self.pgo,
+            )?;
             let release_tag = if stable { date_iso(sh)? } else { "nightly".to_owned() };
             dist_client(sh, &version, &release_tag, &target)?;
         } else {
-            dist_server(sh, "0.0.0-standalone", &target, allocator, self.zig)?;
+            dist_server(sh, "0.0.0-standalone", &target, allocator, self.zig, self.pgo)?;
         }
         Ok(())
     }
@@ -84,6 +94,7 @@ fn dist_server(
     target: &Target,
     allocator: Malloc,
     zig: bool,
+    pgo: Option<PgoTrainingCrate>,
 ) -> anyhow::Result<()> {
     let _e = sh.push_env("CFG_RELEASE", release);
     let _e = sh.push_env("CARGO_PROFILE_RELEASE_LTO", "thin");
@@ -100,7 +111,23 @@ fn dist_server(
     };
     let features = allocator.to_features();
     let command = if linux_target && zig { "zigbuild" } else { "build" };
-    cmd!(sh, "cargo {command} --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --target {target_name} {features...} --release").run()?;
+
+    let pgo_profile = if let Some(train_crate) = pgo {
+        Some(gather_pgo_profile(
+            sh,
+            build_command(sh, command, &target_name, features),
+            &target_name,
+            train_crate,
+        )?)
+    } else {
+        None
+    };
+
+    let mut cmd = build_command(sh, command, &target_name, features);
+    if let Some(profile) = pgo_profile {
+        cmd = cmd.env("RUSTFLAGS", format!("-Cprofile-use={}", profile.to_str().unwrap()));
+    }
+    cmd.run().context("cannot build Rust Analyzer")?;
 
     let dst = Path::new("dist").join(&target.artifact_name);
     if target_name.contains("-windows-") {
@@ -112,6 +139,97 @@ fn dist_server(
     Ok(())
 }
 
+fn build_command<'a>(
+    sh: &'a Shell,
+    command: &str,
+    target_name: &str,
+    features: &[&str],
+) -> Cmd<'a> {
+    cmd!(
+        sh,
+        "cargo {command} --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --target {target_name} {features...} --release"
+    )
+}
+
+/// Decorates `ra_build_cmd` to add PGO instrumentation, and then runs the PGO instrumented
+/// Rust Analyzer on itself to gather a PGO profile.
+fn gather_pgo_profile<'a>(
+    sh: &'a Shell,
+    ra_build_cmd: Cmd<'a>,
+    target: &str,
+    train_crate: PgoTrainingCrate,
+) -> anyhow::Result<PathBuf> {
+    let pgo_dir = std::path::absolute("rust-analyzer-pgo")?;
+    // Clear out any stale profiles
+    if pgo_dir.is_dir() {
+        std::fs::remove_dir_all(&pgo_dir)?;
+    }
+    std::fs::create_dir_all(&pgo_dir)?;
+
+    // Figure out a path to `llvm-profdata`
+    let target_libdir = cmd!(sh, "rustc --print=target-libdir")
+        .read()
+        .context("cannot resolve target-libdir from rustc")?;
+    let target_bindir = PathBuf::from(target_libdir).parent().unwrap().join("bin");
+    let llvm_profdata = target_bindir.join("llvm-profdata").with_extension(EXE_EXTENSION);
+
+    // Build RA with PGO instrumentation
+    let cmd_gather =
+        ra_build_cmd.env("RUSTFLAGS", format!("-Cprofile-generate={}", pgo_dir.to_str().unwrap()));
+    cmd_gather.run().context("cannot build rust-analyzer with PGO instrumentation")?;
+
+    let (train_path, label) = match &train_crate {
+        PgoTrainingCrate::RustAnalyzer => (PathBuf::from("."), "itself"),
+        PgoTrainingCrate::GitHub(repo) => {
+            (download_crate_for_training(sh, &pgo_dir, repo)?, repo.as_str())
+        }
+    };
+
+    // Run RA either on itself or on a downloaded crate
+    eprintln!("Training RA on {label}...");
+    cmd!(
+        sh,
+        "target/{target}/release/rust-analyzer analysis-stats -q --run-all-ide-things {train_path}"
+    )
+    .run()
+    .context("cannot generate PGO profiles")?;
+
+    // Merge profiles into a single file
+    let merged_profile = pgo_dir.join("merged.profdata");
+    let profile_files = std::fs::read_dir(pgo_dir)?.filter_map(|entry| {
+        let entry = entry.ok()?;
+        if entry.path().extension() == Some(OsStr::new("profraw")) {
+            Some(entry.path().to_str().unwrap().to_owned())
+        } else {
+            None
+        }
+    });
+    cmd!(sh, "{llvm_profdata} merge {profile_files...} -o {merged_profile}").run().context(
+        "cannot merge PGO profiles. Do you have the rustup `llvm-tools` component installed?",
+    )?;
+
+    Ok(merged_profile)
+}
+
+/// Downloads a crate from GitHub, stores it into `pgo_dir` and returns a path to it.
+fn download_crate_for_training(sh: &Shell, pgo_dir: &Path, repo: &str) -> anyhow::Result<PathBuf> {
+    let mut it = repo.splitn(2, '@');
+    let repo = it.next().unwrap();
+    let revision = it.next();
+
+    // FIXME: switch to `--revision` here around 2035 or so
+    let revision =
+        if let Some(revision) = revision { &["--branch", revision] as &[&str] } else { &[] };
+
+    let normalized_path = repo.replace("/", "-");
+    let target_path = pgo_dir.join(normalized_path);
+    cmd!(sh, "git clone --depth 1 https://github.com/{repo} {revision...} {target_path}")
+        .run()
+        .with_context(|| "cannot download PGO training crate from {repo}")?;
+
+    Ok(target_path)
+}
+
 fn gzip(src_path: &Path, dest_path: &Path) -> anyhow::Result<()> {
     let mut encoder = GzEncoder::new(File::create(dest_path)?, Compression::best());
     let mut input = io::BufReader::new(File::open(src_path)?);
@@ -125,7 +243,7 @@ fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> any
     let mut writer = ZipWriter::new(BufWriter::new(file));
     writer.start_file(
         src_path.file_name().unwrap().to_str().unwrap(),
-        FileOptions::default()
+        SimpleFileOptions::default()
             .last_modified_time(
                 DateTime::try_from(OffsetDateTime::from(std::fs::metadata(src_path)?.modified()?))
                     .unwrap(),
@@ -139,7 +257,7 @@ fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> any
     if let Some(symbols_path) = symbols_path {
         writer.start_file(
             symbols_path.file_name().unwrap().to_str().unwrap(),
-            FileOptions::default()
+            SimpleFileOptions::default()
                 .last_modified_time(
                     DateTime::try_from(OffsetDateTime::from(
                         std::fs::metadata(src_path)?.modified()?,
diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs
index d03e2f8437ece..700806d178c33 100644
--- a/src/tools/rust-analyzer/xtask/src/flags.rs
+++ b/src/tools/rust-analyzer/xtask/src/flags.rs
@@ -59,6 +59,8 @@ xflags::xflags! {
             optional --client-patch-version version: String
             /// Use cargo-zigbuild
             optional --zig
+            /// Apply PGO optimizations
+            optional --pgo pgo: PgoTrainingCrate
         }
         /// Read a changelog AsciiDoc file and update the GitHub Releases entry in Markdown.
         cmd publish-release-notes {
@@ -141,12 +143,32 @@ pub struct RustcPush {
     pub branch: Option<String>,
 }
 
+#[derive(Debug)]
+pub enum PgoTrainingCrate {
+    // Use RA's own sources for PGO training
+    RustAnalyzer,
+    // Download a Rust crate from `https://github.com/{0}` and use it for PGO training.
+    GitHub(String),
+}
+
+impl FromStr for PgoTrainingCrate {
+    type Err = String;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        match s {
+            "rust-analyzer" => Ok(Self::RustAnalyzer),
+            url => Ok(Self::GitHub(url.to_owned())),
+        }
+    }
+}
+
 #[derive(Debug)]
 pub struct Dist {
     pub mimalloc: bool,
     pub jemalloc: bool,
     pub client_patch_version: Option<String>,
     pub zig: bool,
+    pub pgo: Option<PgoTrainingCrate>,
 }
 
 #[derive(Debug)]
diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs
index eb33d6f9be696..4e2093f0691bb 100644
--- a/src/tools/rust-analyzer/xtask/src/install.rs
+++ b/src/tools/rust-analyzer/xtask/src/install.rs
@@ -2,8 +2,8 @@
 
 use std::{env, path::PathBuf, str};
 
-use anyhow::{bail, format_err, Context};
-use xshell::{cmd, Shell};
+use anyhow::{Context, bail, format_err};
+use xshell::{Shell, cmd};
 
 use crate::flags::{self, Malloc};
 
@@ -135,7 +135,10 @@ fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> {
     let features = opts.malloc.to_features();
     let profile = if opts.dev_rel { "dev-rel" } else { "release" };
 
-    let cmd = cmd!(sh, "cargo install --path crates/rust-analyzer --profile={profile} --locked --force --features force-always-assert {features...}");
+    let cmd = cmd!(
+        sh,
+        "cargo install --path crates/rust-analyzer --profile={profile} --locked --force --features force-always-assert {features...}"
+    );
     cmd.run()?;
     Ok(())
 }
@@ -143,7 +146,10 @@ fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> {
 fn install_proc_macro_server(sh: &Shell, opts: ProcMacroServerOpt) -> anyhow::Result<()> {
     let profile = if opts.dev_rel { "dev-rel" } else { "release" };
 
-    let cmd = cmd!(sh, "cargo +nightly install --path crates/proc-macro-srv-cli --profile={profile} --locked --force --features sysroot-abi");
+    let cmd = cmd!(
+        sh,
+        "cargo +nightly install --path crates/proc-macro-srv-cli --profile={profile} --locked --force --features sysroot-abi"
+    );
     cmd.run()?;
     Ok(())
 }
diff --git a/src/tools/rust-analyzer/xtask/src/main.rs b/src/tools/rust-analyzer/xtask/src/main.rs
index 1e723b90a5ead..52ea896c734d7 100644
--- a/src/tools/rust-analyzer/xtask/src/main.rs
+++ b/src/tools/rust-analyzer/xtask/src/main.rs
@@ -29,7 +29,7 @@ mod util;
 
 use anyhow::bail;
 use std::{env, path::PathBuf};
-use xshell::{cmd, Shell};
+use xshell::{Shell, cmd};
 
 fn main() -> anyhow::Result<()> {
     let flags = flags::Xtask::from_env_or_exit();
diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs
index 6555f225415f6..6ff6a1b15310a 100644
--- a/src/tools/rust-analyzer/xtask/src/metrics.rs
+++ b/src/tools/rust-analyzer/xtask/src/metrics.rs
@@ -7,7 +7,7 @@ use std::{
 };
 
 use anyhow::format_err;
-use xshell::{cmd, Shell};
+use xshell::{Shell, cmd};
 
 use crate::flags::{self, MeasurementType};
 
diff --git a/src/tools/rust-analyzer/xtask/src/publish.rs b/src/tools/rust-analyzer/xtask/src/publish.rs
index f5d765d7c9821..fea078391a3c8 100644
--- a/src/tools/rust-analyzer/xtask/src/publish.rs
+++ b/src/tools/rust-analyzer/xtask/src/publish.rs
@@ -3,7 +3,7 @@ mod notes;
 use crate::flags;
 use anyhow::bail;
 use std::env;
-use xshell::{cmd, Shell};
+use xshell::{Shell, cmd};
 
 impl flags::PublishReleaseNotes {
     pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
@@ -73,7 +73,9 @@ fn create_original_changelog_url(file_name: &str) -> String {
 fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> anyhow::Result<()> {
     let token = match env::var("GITHUB_TOKEN") {
         Ok(token) => token,
-        Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."),
+        Err(_) => bail!(
+            "Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."
+        ),
     };
     let accept = "Accept: application/vnd.github+json";
     let authorization = format!("Authorization: Bearer {token}");
diff --git a/src/tools/rust-analyzer/xtask/src/publish/notes.rs b/src/tools/rust-analyzer/xtask/src/publish/notes.rs
index 7245ce2431187..93592d4986f8a 100644
--- a/src/tools/rust-analyzer/xtask/src/publish/notes.rs
+++ b/src/tools/rust-analyzer/xtask/src/publish/notes.rs
@@ -85,7 +85,7 @@ impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> {
     }
 
     fn process_list(&mut self) -> anyhow::Result<()> {
-        let mut nesting = ListNesting::new();
+        let mut nesting = ListNesting::default();
         while let Some(line) = self.iter.peek() {
             let line = line.as_deref().map_err(|e| anyhow!("{e}"))?;
 
@@ -385,10 +385,6 @@ fn parse_media_block<'a>(line: &'a str, prefix: &str) -> Option<(&'a str, &'a st
 struct ListNesting(Vec<ListMarker>);
 
 impl ListNesting {
-    fn new() -> Self {
-        Self(Vec::<ListMarker>::with_capacity(6))
-    }
-
     fn current(&mut self) -> Option<&ListMarker> {
         self.0.last()
     }
@@ -417,6 +413,12 @@ impl ListNesting {
     }
 }
 
+impl Default for ListNesting {
+    fn default() -> Self {
+        Self(Vec::<ListMarker>::with_capacity(6))
+    }
+}
+
 #[derive(Debug, PartialEq, Eq)]
 enum ListMarker {
     Asterisk(usize),
diff --git a/src/tools/rust-analyzer/xtask/src/release.rs b/src/tools/rust-analyzer/xtask/src/release.rs
index 9f65c40295366..e41f4ceb4350f 100644
--- a/src/tools/rust-analyzer/xtask/src/release.rs
+++ b/src/tools/rust-analyzer/xtask/src/release.rs
@@ -4,10 +4,10 @@ use std::process::{Command, Stdio};
 use std::thread;
 use std::time::Duration;
 
-use anyhow::{bail, Context as _};
+use anyhow::{Context as _, bail};
 use directories::ProjectDirs;
 use stdx::JodChild;
-use xshell::{cmd, Shell};
+use xshell::{Shell, cmd};
 
 use crate::{date_iso, flags, is_release_tag, project_root};
 
@@ -188,7 +188,9 @@ impl flags::RustcPush {
                 Expected {head}, got {fetch_head}."
             );
         }
-        println!("Confirmed that the push round-trips back to rust-analyzer properly. Please create a rustc PR:");
+        println!(
+            "Confirmed that the push round-trips back to rust-analyzer properly. Please create a rustc PR:"
+        );
         // https://github.com/github-linguist/linguist/compare/master...octocat:linguist:master
         let fork_path = rust_fork.replace('/', ":");
         println!(
@@ -201,8 +203,7 @@ impl flags::RustcPush {
 }
 
 /// Used for rustc syncs.
-const JOSH_FILTER: &str =
-    ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer";
+const JOSH_FILTER: &str = ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer";
 const JOSH_PORT: &str = "42042";
 
 fn start_josh() -> anyhow::Result<impl Drop> {
diff --git a/src/tools/rust-analyzer/xtask/src/release/changelog.rs b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
index 343a9efbbc818..391a23eb29418 100644
--- a/src/tools/rust-analyzer/xtask/src/release/changelog.rs
+++ b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
@@ -2,7 +2,7 @@ use std::fmt::Write;
 use std::{env, iter};
 
 use anyhow::bail;
-use xshell::{cmd, Shell};
+use xshell::{Shell, cmd};
 
 pub(crate) fn get_changelog(
     sh: &Shell,
@@ -13,7 +13,9 @@ pub(crate) fn get_changelog(
 ) -> anyhow::Result<String> {
     let token = match env::var("GITHUB_TOKEN") {
         Ok(token) => token,
-        Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."),
+        Err(_) => bail!(
+            "Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."
+        ),
     };
 
     let git_log = cmd!(sh, "git log {prev_tag}..HEAD --reverse").read()?;
@@ -134,11 +136,7 @@ fn parse_pr_number(s: &str) -> Option<u32> {
         let s = if let Some(space) = s.find(' ') { &s[..space] } else { s };
         s.parse().ok()
     } else if let Some(s) = s.strip_prefix(HOMU_PREFIX) {
-        if let Some(space) = s.find(' ') {
-            s[..space].parse().ok()
-        } else {
-            None
-        }
+        if let Some(space) = s.find(' ') { s[..space].parse().ok() } else { None }
     } else {
         None
     }
diff --git a/src/tools/rust-analyzer/xtask/src/tidy.rs b/src/tools/rust-analyzer/xtask/src/tidy.rs
index b500b251ed355..f91192b0076ba 100644
--- a/src/tools/rust-analyzer/xtask/src/tidy.rs
+++ b/src/tools/rust-analyzer/xtask/src/tidy.rs
@@ -4,6 +4,7 @@ use std::{
     path::{Path, PathBuf},
 };
 
+use itertools::Itertools;
 use xshell::Shell;
 
 use xshell::cmd;
@@ -46,7 +47,7 @@ lsp/ext.rs was changed without touching lsp-extensions.md.
 Expected hash: {expected_hash:x}
 Actual hash:   {actual_hash:x}
 
-Please adjust docs/dev/lsp-extensions.md.
+Please adjust docs/book/src/contributing/lsp-extensions.md.
 "
         )
     }
@@ -126,32 +127,28 @@ fn check_cargo_toml(path: &Path, text: String) {
 }
 
 fn check_licenses(sh: &Shell) {
-    let expected = "
-(MIT OR Apache-2.0) AND Unicode-DFS-2016
-0BSD OR MIT OR Apache-2.0
-Apache-2.0
-Apache-2.0 OR BSL-1.0
-Apache-2.0 OR MIT
-Apache-2.0 WITH LLVM-exception
-Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
-Apache-2.0/MIT
-BSD-2-Clause OR Apache-2.0 OR MIT
-CC0-1.0
-ISC
-MIT
-MIT / Apache-2.0
-MIT OR Apache-2.0
-MIT OR Apache-2.0 OR Zlib
-MIT OR Zlib OR Apache-2.0
-MIT/Apache-2.0
-MPL-2.0
-Unlicense OR MIT
-Unlicense/MIT
-Zlib OR Apache-2.0 OR MIT
-"
-    .lines()
-    .filter(|it| !it.is_empty())
-    .collect::<Vec<_>>();
+    const EXPECTED: [&str; 20] = [
+        "(MIT OR Apache-2.0) AND Unicode-3.0",
+        "0BSD OR MIT OR Apache-2.0",
+        "Apache-2.0",
+        "Apache-2.0 OR BSL-1.0",
+        "Apache-2.0 OR MIT",
+        "Apache-2.0 WITH LLVM-exception",
+        "Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT",
+        "Apache-2.0/MIT",
+        "CC0-1.0",
+        "ISC",
+        "MIT",
+        "MIT / Apache-2.0",
+        "MIT OR Apache-2.0",
+        "MIT OR Zlib OR Apache-2.0",
+        "MIT/Apache-2.0",
+        "MPL-2.0",
+        "Unicode-3.0",
+        "Unlicense OR MIT",
+        "Unlicense/MIT",
+        "Zlib",
+    ];
 
     let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
     let mut licenses = meta
@@ -162,18 +159,18 @@ Zlib OR Apache-2.0 OR MIT
         .collect::<Vec<_>>();
     licenses.sort_unstable();
     licenses.dedup();
-    if licenses != expected {
+    if licenses != EXPECTED {
         let mut diff = String::new();
 
         diff.push_str("New Licenses:\n");
         for &l in licenses.iter() {
-            if !expected.contains(&l) {
+            if !EXPECTED.contains(&l) {
                 diff += &format!("  {l}\n")
             }
         }
 
         diff.push_str("\nMissing Licenses:\n");
-        for &l in expected.iter() {
+        for l in EXPECTED {
             if !licenses.contains(&l) {
                 diff += &format!("  {l}\n")
             }
@@ -181,12 +178,11 @@ Zlib OR Apache-2.0 OR MIT
 
         panic!("different set of licenses!\n{diff}");
     }
-    assert_eq!(licenses, expected);
+    assert_eq!(licenses, EXPECTED);
 }
 
 fn check_test_attrs(path: &Path, text: &str) {
-    let panic_rule =
-        "https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/src/contributing/style.md#should_panic";
+    let panic_rule = "https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/src/contributing/style.md#should_panic";
     let need_panic: &[&str] = &[
         // This file.
         "slow-tests/tidy.rs",
@@ -194,9 +190,17 @@ fn check_test_attrs(path: &Path, text: &str) {
         // Generated code from lints contains doc tests in string literals.
         "ide-db/src/generated/lints.rs",
     ];
-    if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
+    if need_panic.iter().any(|p| path.ends_with(p)) {
+        return;
+    }
+    if let Some((line, _)) = text
+        .lines()
+        .tuple_windows()
+        .enumerate()
+        .find(|(_, (a, b))| b.contains("#[should_panic") && !a.contains("FIXME"))
+    {
         panic!(
-            "\ndon't add `#[should_panic]` tests, see:\n\n    {}\n\n   {}\n",
+            "\ndon't add `#[should_panic]` tests, see:\n\n    {}\n\n   {}:{line}\n",
             panic_rule,
             path.display(),
         )
@@ -223,7 +227,7 @@ struct TidyDocs {
 impl TidyDocs {
     fn visit(&mut self, path: &Path, text: &str) {
         // Tests and diagnostic fixes don't need module level comments.
-        if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "ra-salsa", "stdx"]) {
+        if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "stdx"]) {
             return;
         }
 
@@ -256,7 +260,7 @@ impl TidyDocs {
             d.file_name()
                 .unwrap_or_default()
                 .to_str()
-                .map(|f_n| file_names.iter().any(|name| *name == f_n))
+                .map(|f_n| file_names.contains(&f_n))
                 .unwrap_or(false)
         }
     }
diff --git a/src/tools/rustfmt/src/pairs.rs b/src/tools/rustfmt/src/pairs.rs
index 9c51298416b0d..17ff041d77528 100644
--- a/src/tools/rustfmt/src/pairs.rs
+++ b/src/tools/rustfmt/src/pairs.rs
@@ -1,4 +1,4 @@
-use rustc_ast::ast;
+use rustc_ast::{ast, token};
 use rustc_span::Span;
 
 use crate::config::IndentStyle;
@@ -272,13 +272,17 @@ struct PairList<'a, 'b, T: Rewrite> {
     span: Span,
 }
 
-fn is_ident(expr: &ast::Expr) -> bool {
+fn is_ident_or_bool_lit(expr: &ast::Expr) -> bool {
     match &expr.kind {
         ast::ExprKind::Path(None, path) if path.segments.len() == 1 => true,
+        ast::ExprKind::Lit(token::Lit {
+            kind: token::LitKind::Bool,
+            ..
+        }) => true,
         ast::ExprKind::Unary(_, expr)
         | ast::ExprKind::AddrOf(_, _, expr)
         | ast::ExprKind::Paren(expr)
-        | ast::ExprKind::Try(expr) => is_ident(expr),
+        | ast::ExprKind::Try(expr) => is_ident_or_bool_lit(expr),
         _ => false,
     }
 }
@@ -296,10 +300,10 @@ impl<'a, 'b> PairList<'a, 'b, ast::Expr> {
             return false;
         }
 
-        let fist_item_is_ident = is_ident(self.list[0].0);
+        let fist_item_is_ident_or_bool_lit = is_ident_or_bool_lit(self.list[0].0);
         let second_item_is_let_chain = matches!(self.list[1].0.kind, ast::ExprKind::Let(..));
 
-        fist_item_is_ident && second_item_is_let_chain
+        fist_item_is_ident_or_bool_lit && second_item_is_let_chain
     }
 }
 
diff --git a/src/tools/rustfmt/tests/source/let_chains.rs b/src/tools/rustfmt/tests/source/let_chains.rs
index b7c1f811096c8..0c4d8aa85ea90 100644
--- a/src/tools/rustfmt/tests/source/let_chains.rs
+++ b/src/tools/rustfmt/tests/source/let_chains.rs
@@ -20,6 +20,11 @@ fn test_single_line_let_chain() {
     if a && let Some(b) = foo() {
     }
 
+    // first item in let-chain is a bool literal
+    if true && let Some(x) = y {
+
+    }
+
     // first item in let-chain is a unary ! with an ident
     let unary_not = if !from_hir_call
         && let Some(p) = parent
@@ -94,11 +99,6 @@ fn test_multi_line_let_chain() {
 
     }
 
-    // bool literal
-    if true && let Some(x) = y {
-
-    }
-
     // cast to a bool
     if 1 as bool && let Some(x) = y {
 
diff --git a/src/tools/rustfmt/tests/target/let_chains.rs b/src/tools/rustfmt/tests/target/let_chains.rs
index 1ceecac8abcb7..204937b4cac95 100644
--- a/src/tools/rustfmt/tests/target/let_chains.rs
+++ b/src/tools/rustfmt/tests/target/let_chains.rs
@@ -50,6 +50,9 @@ fn test_single_line_let_chain() {
     // first item in let-chain is an ident
     if a && let Some(b) = foo() {}
 
+    // first item in let-chain is a bool literal
+    if true && let Some(x) = y {}
+
     // first item in let-chain is a unary ! with an ident
     let unary_not = if !from_hir_call && let Some(p) = parent {};
 
@@ -102,11 +105,6 @@ fn test_multi_line_let_chain() {
         && let Some(x) = y
     {}
 
-    // bool literal
-    if true
-        && let Some(x) = y
-    {}
-
     // cast to a bool
     if 1 as bool
         && let Some(x) = y
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
index dd7f9c6b146a7..3c3ebf556c52f 100644
--- a/src/tools/tidy/src/deps.rs
+++ b/src/tools/tidy/src/deps.rs
@@ -148,6 +148,7 @@ const EXCEPTIONS_CARGO: ExceptionList = &[
 const EXCEPTIONS_RUST_ANALYZER: ExceptionList = &[
     // tidy-alphabetical-start
     ("dissimilar", "Apache-2.0"),
+    ("foldhash", "Zlib"),
     ("notify", "CC0-1.0"),
     ("option-ext", "MPL-2.0"),
     ("pulldown-cmark-to-cmark", "Apache-2.0"),
diff --git a/tests/ui/async-await/async-drop/partly-dropped-tuple.rs b/tests/ui/async-await/async-drop/partly-dropped-tuple.rs
new file mode 100644
index 0000000000000..147caaf4cfd1d
--- /dev/null
+++ b/tests/ui/async-await/async-drop/partly-dropped-tuple.rs
@@ -0,0 +1,11 @@
+//@ edition: 2024
+//@ build-pass
+#![crate_type = "lib"]
+#![allow(incomplete_features)]
+#![feature(async_drop)]
+async fn move_part_await_return_rest_tuple() -> Vec<usize> {
+    let x = (vec![3], vec![4, 4]);
+    drop(x.1);
+
+    x.0
+}
diff --git a/tests/ui/auxiliary/default-ty-param-cross-crate-crate.rs b/tests/ui/auxiliary/default-ty-param-cross-crate-crate.rs
deleted file mode 100644
index d722b78768a47..0000000000000
--- a/tests/ui/auxiliary/default-ty-param-cross-crate-crate.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-#![crate_type = "lib"]
-#![crate_name = "default_param_test"]
-#![feature(default_type_parameter_fallback)]
-
-use std::marker::PhantomData;
-
-pub struct Foo<A, B>(PhantomData<(A, B)>);
-
-pub fn bleh<A=i32, X=char>() -> Foo<A, X> { Foo(PhantomData) }
diff --git a/tests/ui/auxiliary/edition-kw-macro-2015.rs b/tests/ui/auxiliary/edition-kw-macro-2015.rs
deleted file mode 100644
index 7f479fa93708a..0000000000000
--- a/tests/ui/auxiliary/edition-kw-macro-2015.rs
+++ /dev/null
@@ -1,26 +0,0 @@
-//@ edition:2015
-
-#[macro_export]
-macro_rules! produces_async {
-    () => (pub fn async() {})
-}
-
-#[macro_export]
-macro_rules! produces_async_raw {
-    () => (pub fn r#async() {})
-}
-
-#[macro_export]
-macro_rules! consumes_async {
-    (async) => (1)
-}
-
-#[macro_export]
-macro_rules! consumes_async_raw {
-    (r#async) => (1)
-}
-
-#[macro_export]
-macro_rules! passes_ident {
-    ($i: ident) => ($i)
-}
diff --git a/tests/ui/auxiliary/edition-kw-macro-2018.rs b/tests/ui/auxiliary/edition-kw-macro-2018.rs
deleted file mode 100644
index ba8ecc4d83b3f..0000000000000
--- a/tests/ui/auxiliary/edition-kw-macro-2018.rs
+++ /dev/null
@@ -1,26 +0,0 @@
-//@ edition:2018
-
-#[macro_export]
-macro_rules! produces_async {
-    () => (pub fn async() {})
-}
-
-#[macro_export]
-macro_rules! produces_async_raw {
-    () => (pub fn r#async() {})
-}
-
-#[macro_export]
-macro_rules! consumes_async {
-    (async) => (1)
-}
-
-#[macro_export]
-macro_rules! consumes_async_raw {
-    (r#async) => (1)
-}
-
-#[macro_export]
-macro_rules! passes_ident {
-    ($i: ident) => ($i)
-}
diff --git a/tests/ui/auxiliary/removing-extern-crate.rs b/tests/ui/auxiliary/removing-extern-crate.rs
deleted file mode 100644
index 65e2cc340450f..0000000000000
--- a/tests/ui/auxiliary/removing-extern-crate.rs
+++ /dev/null
@@ -1 +0,0 @@
-// intentionally blank
diff --git a/tests/ui/catch-unwind-bang.rs b/tests/ui/catch-unwind-bang.rs
deleted file mode 100644
index c874c649f3330..0000000000000
--- a/tests/ui/catch-unwind-bang.rs
+++ /dev/null
@@ -1,10 +0,0 @@
-//@ run-pass
-//@ needs-unwind
-
-fn worker() -> ! {
-    panic!()
-}
-
-fn main() {
-    std::panic::catch_unwind(worker).unwrap_err();
-}
diff --git a/tests/ui/cfguard-run.rs b/tests/ui/codegen/cfguard-run.rs
similarity index 100%
rename from tests/ui/cfguard-run.rs
rename to tests/ui/codegen/cfguard-run.rs
diff --git a/tests/ui/const-generics/const_eval_unchecked_doesnt_fire_patterns.rs b/tests/ui/const-generics/const_eval_unchecked_doesnt_fire_patterns.rs
new file mode 100644
index 0000000000000..fae2d16f43029
--- /dev/null
+++ b/tests/ui/const-generics/const_eval_unchecked_doesnt_fire_patterns.rs
@@ -0,0 +1,23 @@
+//@ check-pass
+
+// Previously the `CONST_EVALUATABLE_UNCHECKED` FCW would fire on const evaluation of
+// associated consts. This is unnecessary as the FCW only needs to apply for repeat expr
+// counts which are anon consts with generic parameters provided. #140447
+
+pub struct Foo<const N: usize>;
+
+impl<const N: usize> Foo<N> {
+    const UNUSED_PARAM: usize = {
+        let _: [(); N];
+        3
+    };
+
+    pub fn bar() {
+        match 1 {
+            Self::UNUSED_PARAM => (),
+            _ => (),
+        }
+    }
+}
+
+fn main() {}
diff --git a/tests/ui/error-festival.rs b/tests/ui/error-emitter/error-festival.rs
similarity index 86%
rename from tests/ui/error-festival.rs
rename to tests/ui/error-emitter/error-festival.rs
index 356564e54077a..ebb5882352cba 100644
--- a/tests/ui/error-festival.rs
+++ b/tests/ui/error-emitter/error-festival.rs
@@ -1,3 +1,5 @@
+//! Check that if there are a lot of errors we truncate the list of errors appropriately
+
 enum Question {
     Yes,
     No,
diff --git a/tests/ui/error-festival.stderr b/tests/ui/error-emitter/error-festival.stderr
similarity index 86%
rename from tests/ui/error-festival.stderr
rename to tests/ui/error-emitter/error-festival.stderr
index 9db9536379141..be484bc8094fe 100644
--- a/tests/ui/error-festival.stderr
+++ b/tests/ui/error-emitter/error-festival.stderr
@@ -1,5 +1,5 @@
 error[E0425]: cannot find value `y` in this scope
-  --> $DIR/error-festival.rs:14:5
+  --> $DIR/error-festival.rs:16:5
    |
 LL |     y = 2;
    |     ^
@@ -15,19 +15,19 @@ LL |     let y = 2;
    |     +++
 
 error[E0603]: constant `FOO` is private
-  --> $DIR/error-festival.rs:22:10
+  --> $DIR/error-festival.rs:24:10
    |
 LL |     foo::FOO;
    |          ^^^ private constant
    |
 note: the constant `FOO` is defined here
-  --> $DIR/error-festival.rs:7:5
+  --> $DIR/error-festival.rs:9:5
    |
 LL |     const FOO: u32 = 0;
    |     ^^^^^^^^^^^^^^^^^^^
 
 error[E0368]: binary assignment operation `+=` cannot be applied to type `&str`
-  --> $DIR/error-festival.rs:12:5
+  --> $DIR/error-festival.rs:14:5
    |
 LL |     x += 2;
    |     -^^^^^
@@ -35,19 +35,19 @@ LL |     x += 2;
    |     cannot use `+=` on type `&str`
 
 error[E0599]: no method named `z` found for reference `&str` in the current scope
-  --> $DIR/error-festival.rs:16:7
+  --> $DIR/error-festival.rs:18:7
    |
 LL |     x.z();
    |       ^ method not found in `&str`
 
 error[E0600]: cannot apply unary operator `!` to type `Question`
-  --> $DIR/error-festival.rs:19:5
+  --> $DIR/error-festival.rs:21:5
    |
 LL |     !Question::Yes;
    |     ^^^^^^^^^^^^^^ cannot apply unary operator `!`
    |
 note: an implementation of `Not` might be missing for `Question`
-  --> $DIR/error-festival.rs:1:1
+  --> $DIR/error-festival.rs:3:1
    |
 LL | enum Question {
    | ^^^^^^^^^^^^^ must implement `Not`
@@ -55,7 +55,7 @@ note: the trait `Not` must be implemented
   --> $SRC_DIR/core/src/ops/bit.rs:LL:COL
 
 error[E0604]: only `u8` can be cast as `char`, not `u32`
-  --> $DIR/error-festival.rs:25:5
+  --> $DIR/error-festival.rs:27:5
    |
 LL |     0u32 as char;
    |     ^^^^^^^^^^^^
@@ -64,13 +64,13 @@ LL |     0u32 as char;
    |     help: try `char::from_u32` instead: `char::from_u32(0u32)`
 
 error[E0605]: non-primitive cast: `u8` as `Vec<u8>`
-  --> $DIR/error-festival.rs:29:5
+  --> $DIR/error-festival.rs:31:5
    |
 LL |     x as Vec<u8>;
    |     ^^^^^^^^^^^^ an `as` expression can only be used to convert between primitive types or to coerce to a specific trait object
 
 error[E0054]: cannot cast `{integer}` as `bool`
-  --> $DIR/error-festival.rs:33:24
+  --> $DIR/error-festival.rs:35:24
    |
 LL |     let x_is_nonzero = x as bool;
    |                        ^^^^^^^^^
@@ -82,7 +82,7 @@ LL +     let x_is_nonzero = x != 0;
    |
 
 error[E0606]: casting `&u8` as `u32` is invalid
-  --> $DIR/error-festival.rs:37:18
+  --> $DIR/error-festival.rs:39:18
    |
 LL |     let y: u32 = x as u32;
    |                  ^^^^^^^^
@@ -93,7 +93,7 @@ LL |     let y: u32 = *x as u32;
    |                  +
 
 error[E0607]: cannot cast thin pointer `*const u8` to wide pointer `*const [u8]`
-  --> $DIR/error-festival.rs:41:5
+  --> $DIR/error-festival.rs:43:5
    |
 LL |     v as *const [u8];
    |     ^^^^^^^^^^^^^^^^
diff --git a/tests/ui/non-copyable-void.rs b/tests/ui/non-copyable-void.rs
deleted file mode 100644
index 55bad82bc339d..0000000000000
--- a/tests/ui/non-copyable-void.rs
+++ /dev/null
@@ -1,10 +0,0 @@
-use std::ffi::c_void;
-
-fn main() {
-    let x : *const Vec<isize> = &vec![1,2,3];
-    let y : *const c_void = x as *const c_void;
-    unsafe {
-        let _z = (*y).clone();
-        //~^ ERROR no method named `clone` found
-    }
-}
diff --git a/tests/ui/non-copyable-void.stderr b/tests/ui/non-copyable-void.stderr
deleted file mode 100644
index 373557fa01a4b..0000000000000
--- a/tests/ui/non-copyable-void.stderr
+++ /dev/null
@@ -1,9 +0,0 @@
-error[E0599]: no method named `clone` found for enum `c_void` in the current scope
-  --> $DIR/non-copyable-void.rs:7:23
-   |
-LL |         let _z = (*y).clone();
-   |                       ^^^^^ method not found in `c_void`
-
-error: aborting due to 1 previous error
-
-For more information about this error, try `rustc --explain E0599`.
diff --git a/tests/ui/panics/catch-unwind-bang.rs b/tests/ui/panics/catch-unwind-bang.rs
new file mode 100644
index 0000000000000..80eb377e5ca13
--- /dev/null
+++ b/tests/ui/panics/catch-unwind-bang.rs
@@ -0,0 +1,15 @@
+//! Check that the unwind machinery handles uninhabited types correctly.
+//! It used to call `std::mem::uninitialized::<!>();` at some point...
+//!
+//! See <https://github.com/rust-lang/rust/issues/39432>
+
+//@ run-pass
+//@ needs-unwind
+
+fn worker() -> ! {
+    panic!()
+}
+
+fn main() {
+    std::panic::catch_unwind(worker).unwrap_err();
+}
diff --git a/tests/ui/rust-2018/auxiliary/dummy-crate.rs b/tests/ui/rust-2018/auxiliary/dummy-crate.rs
new file mode 100644
index 0000000000000..c9e8881600dea
--- /dev/null
+++ b/tests/ui/rust-2018/auxiliary/dummy-crate.rs
@@ -0,0 +1,2 @@
+// intentionally blank, used because we need an extern crate for
+// `removing-extern-crate.rs` but don't care about what's in it.
diff --git a/tests/ui/removing-extern-crate.fixed b/tests/ui/rust-2018/removing-extern-crate.fixed
similarity index 86%
rename from tests/ui/removing-extern-crate.fixed
rename to tests/ui/rust-2018/removing-extern-crate.fixed
index 477161fba804d..e88a84cc93e2a 100644
--- a/tests/ui/removing-extern-crate.fixed
+++ b/tests/ui/rust-2018/removing-extern-crate.fixed
@@ -1,5 +1,5 @@
 //@ edition:2018
-//@ aux-build:removing-extern-crate.rs
+//@ aux-build:dummy-crate.rs
 //@ run-rustfix
 //@ check-pass
 
diff --git a/tests/ui/removing-extern-crate.rs b/tests/ui/rust-2018/removing-extern-crate.rs
similarity index 52%
rename from tests/ui/removing-extern-crate.rs
rename to tests/ui/rust-2018/removing-extern-crate.rs
index 0b819482c7101..844377945e02f 100644
--- a/tests/ui/removing-extern-crate.rs
+++ b/tests/ui/rust-2018/removing-extern-crate.rs
@@ -1,15 +1,15 @@
 //@ edition:2018
-//@ aux-build:removing-extern-crate.rs
+//@ aux-build:dummy-crate.rs
 //@ run-rustfix
 //@ check-pass
 
 #![warn(rust_2018_idioms)]
 
-extern crate removing_extern_crate as foo; //~ WARNING unused extern crate
+extern crate dummy_crate as foo; //~ WARNING unused extern crate
 extern crate core; //~ WARNING unused extern crate
 
 mod another {
-    extern crate removing_extern_crate as foo; //~ WARNING unused extern crate
+    extern crate dummy_crate as foo; //~ WARNING unused extern crate
     extern crate core; //~ WARNING unused extern crate
 }
 
diff --git a/tests/ui/removing-extern-crate.stderr b/tests/ui/rust-2018/removing-extern-crate.stderr
similarity index 75%
rename from tests/ui/removing-extern-crate.stderr
rename to tests/ui/rust-2018/removing-extern-crate.stderr
index 4dddf160ce27b..573125426404d 100644
--- a/tests/ui/removing-extern-crate.stderr
+++ b/tests/ui/rust-2018/removing-extern-crate.stderr
@@ -1,8 +1,8 @@
 warning: unused extern crate
   --> $DIR/removing-extern-crate.rs:8:1
    |
-LL | extern crate removing_extern_crate as foo;
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove it
+LL | extern crate dummy_crate as foo;
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove it
    |
 note: the lint level is defined here
   --> $DIR/removing-extern-crate.rs:6:9
@@ -20,8 +20,8 @@ LL | extern crate core;
 warning: unused extern crate
   --> $DIR/removing-extern-crate.rs:12:5
    |
-LL |     extern crate removing_extern_crate as foo;
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove it
+LL |     extern crate dummy_crate as foo;
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove it
 
 warning: unused extern crate
   --> $DIR/removing-extern-crate.rs:13:5
diff --git a/tests/ui/sanitizer/asan_odr_windows.rs b/tests/ui/sanitizer/asan_odr_windows.rs
index c618ac02a66d8..28c2471676155 100644
--- a/tests/ui/sanitizer/asan_odr_windows.rs
+++ b/tests/ui/sanitizer/asan_odr_windows.rs
@@ -5,6 +5,8 @@
 //@ compile-flags:-Zsanitizer=address
 //@ aux-build: asan_odr_win-2.rs
 //@ only-windows-msvc
+//@ needs-sanitizer-support
+//@ needs-sanitizer-address
 
 extern crate othercrate;
 
diff --git a/tests/ui/target-feature/abi-incompatible-target-feature-attribute.riscv.stderr b/tests/ui/target-feature/abi-incompatible-target-feature-attribute.riscv.stderr
new file mode 100644
index 0000000000000..49c5479275f39
--- /dev/null
+++ b/tests/ui/target-feature/abi-incompatible-target-feature-attribute.riscv.stderr
@@ -0,0 +1,8 @@
+error: target feature `d` cannot be enabled with `#[target_feature]`: this feature is incompatible with the target ABI
+  --> $DIR/abi-incompatible-target-feature-attribute.rs:15:90
+   |
+LL | #[cfg_attr(x86, target_feature(enable = "soft-float"))] #[cfg_attr(riscv, target_feature(enable = "d"))]
+   |                                                                                          ^^^^^^^^^^^^
+
+error: aborting due to 1 previous error
+
diff --git a/tests/ui/target-feature/abi-incompatible-target-feature-attribute.rs b/tests/ui/target-feature/abi-incompatible-target-feature-attribute.rs
new file mode 100644
index 0000000000000..a873344075920
--- /dev/null
+++ b/tests/ui/target-feature/abi-incompatible-target-feature-attribute.rs
@@ -0,0 +1,17 @@
+//! Ensure ABI-incompatible features cannot be enabled via `#[target_feature]`.
+// ignore-tidy-linelength
+//@ compile-flags: --crate-type=lib
+//@ revisions: x86 riscv
+//@[x86] compile-flags: --target=x86_64-unknown-linux-gnu
+//@[x86] needs-llvm-components: x86
+//@[riscv] compile-flags: --target=riscv32e-unknown-none-elf
+//@[riscv] needs-llvm-components: riscv
+#![feature(no_core, lang_items, riscv_target_feature, x87_target_feature)]
+#![no_core]
+
+#[lang = "sized"]
+pub trait Sized {}
+
+#[cfg_attr(x86, target_feature(enable = "soft-float"))] #[cfg_attr(riscv, target_feature(enable = "d"))]
+//~^ERROR: cannot be enabled with
+pub unsafe fn my_fun() {}
diff --git a/tests/ui/target-feature/abi-incompatible-target-feature-attribute.x86.stderr b/tests/ui/target-feature/abi-incompatible-target-feature-attribute.x86.stderr
new file mode 100644
index 0000000000000..81471fd7e303e
--- /dev/null
+++ b/tests/ui/target-feature/abi-incompatible-target-feature-attribute.x86.stderr
@@ -0,0 +1,8 @@
+error: target feature `soft-float` cannot be enabled with `#[target_feature]`: this feature is incompatible with the target ABI
+  --> $DIR/abi-incompatible-target-feature-attribute.rs:15:32
+   |
+LL | #[cfg_attr(x86, target_feature(enable = "soft-float"))] #[cfg_attr(riscv, target_feature(enable = "d"))]
+   |                                ^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 1 previous error
+
diff --git a/tests/ui/target-feature/abi-incompatible-target-feature-flag-enable.riscv.stderr b/tests/ui/target-feature/abi-incompatible-target-feature-flag-enable.riscv.stderr
new file mode 100644
index 0000000000000..2dca0c220332b
--- /dev/null
+++ b/tests/ui/target-feature/abi-incompatible-target-feature-flag-enable.riscv.stderr
@@ -0,0 +1,19 @@
+warning: target feature `d` must be disabled to ensure that the ABI of the current target can be implemented correctly
+   |
+   = note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
+
+warning: unstable feature specified for `-Ctarget-feature`: `d`
+   |
+   = note: this feature is not stably supported; its behavior can change in the future
+
+warning: unstable feature specified for `-Ctarget-feature`: `f`
+   |
+   = note: this feature is not stably supported; its behavior can change in the future
+
+warning: unstable feature specified for `-Ctarget-feature`: `zicsr`
+   |
+   = note: this feature is not stably supported; its behavior can change in the future
+
+warning: 4 warnings emitted
+
diff --git a/tests/ui/target-feature/abi-incompatible-target-feature-flag-enable.rs b/tests/ui/target-feature/abi-incompatible-target-feature-flag-enable.rs
new file mode 100644
index 0000000000000..68e1d3b9ddc61
--- /dev/null
+++ b/tests/ui/target-feature/abi-incompatible-target-feature-flag-enable.rs
@@ -0,0 +1,22 @@
+//! Ensure ABI-incompatible features cannot be enabled via `-Ctarget-feature`.
+// These are just warnings for now.
+//@ check-pass
+//@ compile-flags: --crate-type=lib
+//@ revisions: x86 riscv
+//@[x86] compile-flags: --target=x86_64-unknown-linux-gnu -Ctarget-feature=+soft-float
+//@[x86] needs-llvm-components: x86
+//@[riscv] compile-flags: --target=riscv32e-unknown-none-elf -Ctarget-feature=+d
+//@[riscv] needs-llvm-components: riscv
+
+#![feature(no_core, lang_items, riscv_target_feature)]
+#![no_core]
+
+#[lang = "sized"]
+pub trait Sized {}
+#[lang = "freeze"]
+pub trait Freeze {}
+
+//~? WARN must be disabled to ensure that the ABI of the current target can be implemented correctly
+//~? WARN unstable feature specified for `-Ctarget-feature`
+//[riscv]~? WARN unstable feature specified for `-Ctarget-feature`
+//[riscv]~? WARN unstable feature specified for `-Ctarget-feature`
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag.stderr b/tests/ui/target-feature/abi-incompatible-target-feature-flag-enable.x86.stderr
similarity index 100%
rename from tests/ui/target-feature/forbidden-hardfloat-target-feature-flag.stderr
rename to tests/ui/target-feature/abi-incompatible-target-feature-flag-enable.x86.stderr
diff --git a/tests/ui/target-feature/allowed-softfloat-target-feature-flag-disable.rs b/tests/ui/target-feature/abi-irrelevant-target-feature-flag-disable.rs
similarity index 59%
rename from tests/ui/target-feature/allowed-softfloat-target-feature-flag-disable.rs
rename to tests/ui/target-feature/abi-irrelevant-target-feature-flag-disable.rs
index 7368ef120fa6e..0013d033b9c5e 100644
--- a/tests/ui/target-feature/allowed-softfloat-target-feature-flag-disable.rs
+++ b/tests/ui/target-feature/abi-irrelevant-target-feature-flag-disable.rs
@@ -1,3 +1,6 @@
+//! `x87` is a required target feature on some x86 targets, but not on this one as this one
+//! uses soft-floats. So ensure disabling the target feature here (which is a NOP) does
+//! not trigger a warning.
 //@ compile-flags: --target=x86_64-unknown-none --crate-type=lib
 //@ needs-llvm-components: x86
 //@ compile-flags: -Ctarget-feature=-x87
diff --git a/tests/ui/target-feature/allowed-softfloat-target-feature-flag-disable.stderr b/tests/ui/target-feature/abi-irrelevant-target-feature-flag-disable.stderr
similarity index 100%
rename from tests/ui/target-feature/allowed-softfloat-target-feature-flag-disable.stderr
rename to tests/ui/target-feature/abi-irrelevant-target-feature-flag-disable.stderr
diff --git a/tests/ui/target-feature/allowed-softfloat-target-feature-attribute.rs b/tests/ui/target-feature/abi-required-target-feature-attribute.rs
similarity index 69%
rename from tests/ui/target-feature/allowed-softfloat-target-feature-attribute.rs
rename to tests/ui/target-feature/abi-required-target-feature-attribute.rs
index 8b60820cc9b68..95723c57f94d4 100644
--- a/tests/ui/target-feature/allowed-softfloat-target-feature-attribute.rs
+++ b/tests/ui/target-feature/abi-required-target-feature-attribute.rs
@@ -1,3 +1,5 @@
+//! Enabling a target feature that is anyway required changes nothing, so this is allowed
+//! for `#[target_feature]`.
 //@ compile-flags: --target=x86_64-unknown-none --crate-type=lib
 //@ needs-llvm-components: x86
 //@ build-pass
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-neon.stderr b/tests/ui/target-feature/abi-required-target-feature-flag-disable.aarch64.stderr
similarity index 100%
rename from tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-neon.stderr
rename to tests/ui/target-feature/abi-required-target-feature-flag-disable.aarch64.stderr
diff --git a/tests/ui/target-feature/abi-required-target-feature-flag-disable.loongarch.stderr b/tests/ui/target-feature/abi-required-target-feature-flag-disable.loongarch.stderr
new file mode 100644
index 0000000000000..e8df521e1bd91
--- /dev/null
+++ b/tests/ui/target-feature/abi-required-target-feature-flag-disable.loongarch.stderr
@@ -0,0 +1,13 @@
+warning: target feature `d` must be enabled to ensure that the ABI of the current target can be implemented correctly
+   |
+   = note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
+
+warning: unstable feature specified for `-Ctarget-feature`: `d`
+   |
+   = note: this feature is not stably supported; its behavior can change in the future
+
+warning: both target-abi and the triple-implied ABI are invalid, ignoring and using feature-implied ABI
+warning: 'lp64f' has not been standardized
+warning: 2 warnings emitted
+
diff --git a/tests/ui/target-feature/abi-required-target-feature-flag-disable.riscv.stderr b/tests/ui/target-feature/abi-required-target-feature-flag-disable.riscv.stderr
new file mode 100644
index 0000000000000..919c49dcf9657
--- /dev/null
+++ b/tests/ui/target-feature/abi-required-target-feature-flag-disable.riscv.stderr
@@ -0,0 +1,12 @@
+warning: target feature `d` must be enabled to ensure that the ABI of the current target can be implemented correctly
+   |
+   = note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
+
+warning: unstable feature specified for `-Ctarget-feature`: `d`
+   |
+   = note: this feature is not stably supported; its behavior can change in the future
+
+Hard-float 'd' ABI can't be used for a target that doesn't support the D instruction set extension (ignoring target-abi)
+warning: 2 warnings emitted
+
diff --git a/tests/ui/target-feature/abi-required-target-feature-flag-disable.rs b/tests/ui/target-feature/abi-required-target-feature-flag-disable.rs
new file mode 100644
index 0000000000000..1788c0bf5b303
--- /dev/null
+++ b/tests/ui/target-feature/abi-required-target-feature-flag-disable.rs
@@ -0,0 +1,27 @@
+//! Ensure ABI-required features cannot be disabled via `-Ctarget-feature`.
+//! Also covers the case of a feature indirectly disabling another via feature implications.
+//@ compile-flags: --crate-type=lib
+//@ revisions: x86 x86-implied aarch64 riscv loongarch
+//@[x86] compile-flags: --target=x86_64-unknown-linux-gnu -Ctarget-feature=-x87
+//@[x86] needs-llvm-components: x86
+//@[x86-implied] compile-flags: --target=x86_64-unknown-linux-gnu -Ctarget-feature=-sse
+//@[x86-implied] needs-llvm-components: x86
+//@[aarch64] compile-flags: --target=aarch64-unknown-linux-gnu -Ctarget-feature=-neon
+//@[aarch64] needs-llvm-components: aarch64
+//@[riscv] compile-flags: --target=riscv64gc-unknown-none-elf -Ctarget-feature=-d
+//@[riscv] needs-llvm-components: riscv
+//@[riscv] min-llvm-version: 20
+//@[loongarch] compile-flags: --target=loongarch64-unknown-none -Ctarget-feature=-d
+//@[loongarch] needs-llvm-components: loongarch
+//@[loongarch] min-llvm-version: 20
+// For now this is just a warning.
+//@ build-pass
+
+#![feature(no_core, lang_items)]
+#![no_core]
+
+#[lang = "sized"]
+pub trait Sized {}
+
+//~? WARN must be enabled to ensure that the ABI of the current target can be implemented correctly
+//[x86,riscv,loongarch]~? WARN unstable feature specified for `-Ctarget-feature`
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-implied.stderr b/tests/ui/target-feature/abi-required-target-feature-flag-disable.x86-implied.stderr
similarity index 100%
rename from tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-implied.stderr
rename to tests/ui/target-feature/abi-required-target-feature-flag-disable.x86-implied.stderr
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable.stderr b/tests/ui/target-feature/abi-required-target-feature-flag-disable.x86.stderr
similarity index 100%
rename from tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable.stderr
rename to tests/ui/target-feature/abi-required-target-feature-flag-disable.x86.stderr
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-attribute.rs b/tests/ui/target-feature/forbidden-hardfloat-target-feature-attribute.rs
deleted file mode 100644
index 215e64979f736..0000000000000
--- a/tests/ui/target-feature/forbidden-hardfloat-target-feature-attribute.rs
+++ /dev/null
@@ -1,12 +0,0 @@
-//! Ensure ABI-incompatible features cannot be enabled via `#[target_feature]`.
-//@ compile-flags: --target=riscv32e-unknown-none-elf --crate-type=lib
-//@ needs-llvm-components: riscv
-#![feature(no_core, lang_items, riscv_target_feature)]
-#![no_core]
-
-#[lang = "sized"]
-pub trait Sized {}
-
-#[target_feature(enable = "d")]
-//~^ERROR: cannot be enabled with
-pub unsafe fn my_fun() {}
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-attribute.stderr b/tests/ui/target-feature/forbidden-hardfloat-target-feature-attribute.stderr
deleted file mode 100644
index bfe767e5ffb07..0000000000000
--- a/tests/ui/target-feature/forbidden-hardfloat-target-feature-attribute.stderr
+++ /dev/null
@@ -1,8 +0,0 @@
-error: target feature `d` cannot be enabled with `#[target_feature]`: this feature is incompatible with the target ABI
-  --> $DIR/forbidden-hardfloat-target-feature-attribute.rs:10:18
-   |
-LL | #[target_feature(enable = "d")]
-   |                  ^^^^^^^^^^^^
-
-error: aborting due to 1 previous error
-
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-implied.rs b/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-implied.rs
deleted file mode 100644
index 12e7e3bc45b31..0000000000000
--- a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-implied.rs
+++ /dev/null
@@ -1,15 +0,0 @@
-//! Ensure that if disabling a target feature implies disabling an ABI-required target feature,
-//! we complain.
-//@ compile-flags: --target=x86_64-unknown-linux-gnu --crate-type=lib
-//@ needs-llvm-components: x86
-//@ compile-flags: -Ctarget-feature=-sse
-// For now this is just a warning.
-//@ build-pass
-
-#![feature(no_core, lang_items)]
-#![no_core]
-
-#[lang = "sized"]
-pub trait Sized {}
-
-//~? WARN target feature `sse2` must be enabled to ensure that the ABI of the current target can be implemented correctly
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-neon.rs b/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-neon.rs
deleted file mode 100644
index 33e4f12694f36..0000000000000
--- a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable-neon.rs
+++ /dev/null
@@ -1,13 +0,0 @@
-//@ compile-flags: --target=aarch64-unknown-linux-gnu --crate-type=lib
-//@ needs-llvm-components: aarch64
-//@ compile-flags: -Ctarget-feature=-neon
-// For now this is just a warning.
-//@ build-pass
-
-#![feature(no_core, lang_items)]
-#![no_core]
-
-#[lang = "sized"]
-pub trait Sized {}
-
-//~? WARN target feature `neon` must be enabled to ensure that the ABI of the current target can be implemented correctly
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable.rs b/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable.rs
deleted file mode 100644
index e1bd25ffad1e9..0000000000000
--- a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag-disable.rs
+++ /dev/null
@@ -1,15 +0,0 @@
-//! Ensure ABI-required features cannot be disabled via `-Ctarget-feature`.
-//@ compile-flags: --target=x86_64-unknown-linux-gnu --crate-type=lib
-//@ needs-llvm-components: x86
-//@ compile-flags: -Ctarget-feature=-x87
-// For now this is just a warning.
-//@ build-pass
-
-#![feature(no_core, lang_items)]
-#![no_core]
-
-#[lang = "sized"]
-pub trait Sized {}
-
-//~? WARN target feature `x87` must be enabled to ensure that the ABI of the current target can be implemented correctly
-//~? WARN unstable feature specified for `-Ctarget-feature`: `x87`
diff --git a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag.rs b/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag.rs
deleted file mode 100644
index 4ccc6e0e941f1..0000000000000
--- a/tests/ui/target-feature/forbidden-hardfloat-target-feature-flag.rs
+++ /dev/null
@@ -1,15 +0,0 @@
-//! Ensure ABI-incompatible features cannot be enabled via `-Ctarget-feature`.
-//@ compile-flags: --target=x86_64-unknown-linux-gnu --crate-type=lib
-//@ needs-llvm-components: x86
-//@ compile-flags: -Ctarget-feature=+soft-float
-// For now this is just a warning.
-//@ build-pass
-
-#![feature(no_core, lang_items, riscv_target_feature)]
-#![no_core]
-
-#[lang = "sized"]
-pub trait Sized {}
-
-//~? WARN target feature `soft-float` must be disabled to ensure that the ABI of the current target can be implemented correctl
-//~? WARN unstable feature specified for `-Ctarget-feature`: `soft-float`
diff --git a/tests/ui/transmutability/unions/extension.rs b/tests/ui/transmutability/unions/extension.rs
new file mode 100644
index 0000000000000..eb4dcd4dff3df
--- /dev/null
+++ b/tests/ui/transmutability/unions/extension.rs
@@ -0,0 +1,12 @@
+#![crate_type = "lib"]
+#![feature(transmutability)]
+use std::mem::{Assume, MaybeUninit, TransmuteFrom};
+
+pub fn is_maybe_transmutable<Src, Dst>()
+    where Dst: TransmuteFrom<Src, { Assume::VALIDITY.and(Assume::SAFETY) }>
+{}
+
+fn extension() {
+    is_maybe_transmutable::<(), MaybeUninit<u8>>();
+    is_maybe_transmutable::<MaybeUninit<u8>, [u8; 2]>(); //~ ERROR  `MaybeUninit<u8>` cannot be safely transmuted into `[u8; 2]`
+}
diff --git a/tests/ui/transmutability/unions/extension.stderr b/tests/ui/transmutability/unions/extension.stderr
new file mode 100644
index 0000000000000..c99e46f3d12b7
--- /dev/null
+++ b/tests/ui/transmutability/unions/extension.stderr
@@ -0,0 +1,17 @@
+error[E0277]: `MaybeUninit<u8>` cannot be safely transmuted into `[u8; 2]`
+  --> $DIR/extension.rs:11:46
+   |
+LL |     is_maybe_transmutable::<MaybeUninit<u8>, [u8; 2]>();
+   |                                              ^^^^^^^ the size of `MaybeUninit<u8>` is smaller than the size of `[u8; 2]`
+   |
+note: required by a bound in `is_maybe_transmutable`
+  --> $DIR/extension.rs:6:16
+   |
+LL | pub fn is_maybe_transmutable<Src, Dst>()
+   |        --------------------- required by a bound in this function
+LL |     where Dst: TransmuteFrom<Src, { Assume::VALIDITY.and(Assume::SAFETY) }>
+   |                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `is_maybe_transmutable`
+
+error: aborting due to 1 previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/tests/ui/transmutability/unions/init_as_uninit.rs b/tests/ui/transmutability/unions/init_as_uninit.rs
new file mode 100644
index 0000000000000..d14eca800ef5d
--- /dev/null
+++ b/tests/ui/transmutability/unions/init_as_uninit.rs
@@ -0,0 +1,26 @@
+//@ check-pass
+// Regression test for issue #140337.
+#![crate_type = "lib"]
+#![feature(transmutability)]
+#![allow(dead_code)]
+use std::mem::{Assume, MaybeUninit, TransmuteFrom};
+
+pub fn is_transmutable<Src, Dst>()
+where
+    Dst: TransmuteFrom<Src, { Assume::SAFETY }>
+{}
+
+#[derive(Copy, Clone)]
+#[repr(u8)]
+pub enum B0 { Value = 0 }
+
+#[derive(Copy, Clone)]
+#[repr(u8)]
+pub enum B1 { Value = 1 }
+
+fn main() {
+    is_transmutable::<(B0, B0), MaybeUninit<(B0, B0)>>();
+    is_transmutable::<(B0, B0), MaybeUninit<(B0, B1)>>();
+    is_transmutable::<(B0, B0), MaybeUninit<(B1, B0)>>();
+    is_transmutable::<(B0, B0), MaybeUninit<(B1, B1)>>();
+}
diff --git a/tests/ui/transmutability/unions/should_permit_intersecting_if_validity_is_assumed.rs b/tests/ui/transmutability/unions/should_permit_intersecting_if_validity_is_assumed.rs
index 359ba51543981..24c6fa2e6ac0f 100644
--- a/tests/ui/transmutability/unions/should_permit_intersecting_if_validity_is_assumed.rs
+++ b/tests/ui/transmutability/unions/should_permit_intersecting_if_validity_is_assumed.rs
@@ -34,4 +34,19 @@ fn test() {
 
     assert::is_maybe_transmutable::<A, B>();
     assert::is_maybe_transmutable::<B, A>();
+
+    #[repr(C)]
+    struct C {
+        a: Ox00,
+        b: Ox00,
+    }
+
+    #[repr(C, align(2))]
+    struct D {
+        a: Ox00,
+    }
+
+    assert::is_maybe_transmutable::<C, D>();
+    // With Assume::VALIDITY a padding byte can hold any value.
+    assert::is_maybe_transmutable::<D, C>();
 }
diff --git a/tests/ui/unpretty/expanded-exhaustive.stdout b/tests/ui/unpretty/exhaustive.expanded.stdout
similarity index 97%
rename from tests/ui/unpretty/expanded-exhaustive.stdout
rename to tests/ui/unpretty/exhaustive.expanded.stdout
index 841edf63c9191..ad536bd34f178 100644
--- a/tests/ui/unpretty/expanded-exhaustive.stdout
+++ b/tests/ui/unpretty/exhaustive.expanded.stdout
@@ -1,7 +1,13 @@
 #![feature(prelude_import)]
-//@ compile-flags: -Zunpretty=expanded
+//@ revisions: expanded hir
+//@[expanded]compile-flags: -Zunpretty=expanded
+//@[expanded]check-pass
+//@[hir]compile-flags: -Zunpretty=hir
+//@[hir]check-fail
 //@ edition:2024
-//@ check-pass
+
+// Note: the HIR revision includes a `.stderr` file because there are some
+// errors that only occur once we get past the AST.
 
 #![feature(auto_traits)]
 #![feature(box_patterns)]
@@ -211,7 +217,10 @@ mod expressions {
     }
 
     /// ExprKind::Await
-    fn expr_await() { let fut; fut.await; }
+    fn expr_await() {
+        let fut;
+        fut.await;
+    }
 
     /// ExprKind::TryBlock
     fn expr_try_block() { try {} try { return; } }
@@ -242,7 +251,9 @@ mod expressions {
     }
 
     /// ExprKind::Underscore
-    fn expr_underscore() { _; }
+    fn expr_underscore() {
+        _;
+    }
 
     /// ExprKind::Path
     fn expr_path() {
@@ -300,65 +311,12 @@ mod expressions {
 
 
 
-
-
-
-
-
-
         // ...
 
 
 
 
 
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
         // concat_idents is deprecated
 
 
diff --git a/tests/ui/unpretty/exhaustive.hir.stderr b/tests/ui/unpretty/exhaustive.hir.stderr
new file mode 100644
index 0000000000000..83e24ec3f66f0
--- /dev/null
+++ b/tests/ui/unpretty/exhaustive.hir.stderr
@@ -0,0 +1,172 @@
+error[E0697]: closures cannot be static
+  --> $DIR/exhaustive.rs:211:9
+   |
+LL |         static || value;
+   |         ^^^^^^^^^
+
+error[E0697]: closures cannot be static
+  --> $DIR/exhaustive.rs:212:9
+   |
+LL |         static move || value;
+   |         ^^^^^^^^^^^^^^
+
+error[E0728]: `await` is only allowed inside `async` functions and blocks
+  --> $DIR/exhaustive.rs:241:13
+   |
+LL |     fn expr_await() {
+   |     --------------- this is not `async`
+LL |         let fut;
+LL |         fut.await;
+   |             ^^^^^ only allowed inside `async` functions and blocks
+
+error: in expressions, `_` can only be used on the left-hand side of an assignment
+  --> $DIR/exhaustive.rs:290:9
+   |
+LL |         _;
+   |         ^ `_` not allowed here
+
+error[E0214]: parenthesized type parameters may only be used with a `Fn` trait
+  --> $DIR/exhaustive.rs:300:9
+   |
+LL |         x::();
+   |         ^^^^^ only `Fn` traits may use parentheses
+
+error[E0214]: parenthesized type parameters may only be used with a `Fn` trait
+  --> $DIR/exhaustive.rs:301:9
+   |
+LL |         x::(T, T) -> T;
+   |         ^^^^^^^^^^^^^^ only `Fn` traits may use parentheses
+   |
+help: use angle brackets instead
+   |
+LL -         x::(T, T) -> T;
+LL +         x::<T, T> -> T;
+   |
+
+error[E0214]: parenthesized type parameters may only be used with a `Fn` trait
+  --> $DIR/exhaustive.rs:302:9
+   |
+LL |         crate::() -> ()::expressions::() -> ()::expr_path;
+   |         ^^^^^^^^^^^^^^^ only `Fn` traits may use parentheses
+
+error[E0214]: parenthesized type parameters may only be used with a `Fn` trait
+  --> $DIR/exhaustive.rs:302:26
+   |
+LL |         crate::() -> ()::expressions::() -> ()::expr_path;
+   |                          ^^^^^^^^^^^^^^^^^^^^^ only `Fn` traits may use parentheses
+
+error[E0214]: parenthesized type parameters may only be used with a `Fn` trait
+  --> $DIR/exhaustive.rs:305:9
+   |
+LL |         core::()::marker::()::PhantomData;
+   |         ^^^^^^^^ only `Fn` traits may use parentheses
+
+error[E0214]: parenthesized type parameters may only be used with a `Fn` trait
+  --> $DIR/exhaustive.rs:305:19
+   |
+LL |         core::()::marker::()::PhantomData;
+   |                   ^^^^^^^^^^ only `Fn` traits may use parentheses
+
+error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks
+  --> $DIR/exhaustive.rs:403:9
+   |
+LL |         yield;
+   |         ^^^^^
+   |
+help: use `#[coroutine]` to make this closure a coroutine
+   |
+LL |     #[coroutine] fn expr_yield() {
+   |     ++++++++++++
+
+error[E0703]: invalid ABI: found `C++`
+  --> $DIR/exhaustive.rs:483:23
+   |
+LL |         unsafe extern "C++" {}
+   |                       ^^^^^ invalid ABI
+   |
+   = note: invoke `rustc --print=calling-conventions` for a full list of supported calling conventions
+
+error: `..` patterns are not allowed here
+  --> $DIR/exhaustive.rs:693:13
+   |
+LL |         let ..;
+   |             ^^
+   |
+   = note: only allowed in tuple, tuple struct, and slice patterns
+
+error[E0214]: parenthesized type parameters may only be used with a `Fn` trait
+  --> $DIR/exhaustive.rs:808:16
+   |
+LL |         let _: T() -> !;
+   |                ^^^^^^^^ only `Fn` traits may use parentheses
+
+error[E0562]: `impl Trait` is not allowed in the type of variable bindings
+  --> $DIR/exhaustive.rs:823:16
+   |
+LL |         let _: impl Send;
+   |                ^^^^^^^^^
+   |
+   = note: `impl Trait` is only allowed in arguments and return types of functions and methods
+   = note: see issue #63065 <https://github.com/rust-lang/rust/issues/63065> for more information
+   = help: add `#![feature(impl_trait_in_bindings)]` to the crate attributes to enable
+   = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
+
+error[E0562]: `impl Trait` is not allowed in the type of variable bindings
+  --> $DIR/exhaustive.rs:824:16
+   |
+LL |         let _: impl Send + 'static;
+   |                ^^^^^^^^^^^^^^^^^^^
+   |
+   = note: `impl Trait` is only allowed in arguments and return types of functions and methods
+   = note: see issue #63065 <https://github.com/rust-lang/rust/issues/63065> for more information
+   = help: add `#![feature(impl_trait_in_bindings)]` to the crate attributes to enable
+   = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
+
+error[E0562]: `impl Trait` is not allowed in the type of variable bindings
+  --> $DIR/exhaustive.rs:825:16
+   |
+LL |         let _: impl 'static + Send;
+   |                ^^^^^^^^^^^^^^^^^^^
+   |
+   = note: `impl Trait` is only allowed in arguments and return types of functions and methods
+   = note: see issue #63065 <https://github.com/rust-lang/rust/issues/63065> for more information
+   = help: add `#![feature(impl_trait_in_bindings)]` to the crate attributes to enable
+   = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
+
+error[E0562]: `impl Trait` is not allowed in the type of variable bindings
+  --> $DIR/exhaustive.rs:826:16
+   |
+LL |         let _: impl ?Sized;
+   |                ^^^^^^^^^^^
+   |
+   = note: `impl Trait` is only allowed in arguments and return types of functions and methods
+   = note: see issue #63065 <https://github.com/rust-lang/rust/issues/63065> for more information
+   = help: add `#![feature(impl_trait_in_bindings)]` to the crate attributes to enable
+   = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
+
+error[E0562]: `impl Trait` is not allowed in the type of variable bindings
+  --> $DIR/exhaustive.rs:827:16
+   |
+LL |         let _: impl ~const Clone;
+   |                ^^^^^^^^^^^^^^^^^
+   |
+   = note: `impl Trait` is only allowed in arguments and return types of functions and methods
+   = note: see issue #63065 <https://github.com/rust-lang/rust/issues/63065> for more information
+   = help: add `#![feature(impl_trait_in_bindings)]` to the crate attributes to enable
+   = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
+
+error[E0562]: `impl Trait` is not allowed in the type of variable bindings
+  --> $DIR/exhaustive.rs:828:16
+   |
+LL |         let _: impl for<'a> Send;
+   |                ^^^^^^^^^^^^^^^^^
+   |
+   = note: `impl Trait` is only allowed in arguments and return types of functions and methods
+   = note: see issue #63065 <https://github.com/rust-lang/rust/issues/63065> for more information
+   = help: add `#![feature(impl_trait_in_bindings)]` to the crate attributes to enable
+   = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
+
+error: aborting due to 20 previous errors
+
+Some errors have detailed explanations: E0214, E0562, E0697, E0703, E0728.
+For more information about an error, try `rustc --explain E0214`.
diff --git a/tests/ui/unpretty/exhaustive.hir.stdout b/tests/ui/unpretty/exhaustive.hir.stdout
new file mode 100644
index 0000000000000..7741475e05d99
--- /dev/null
+++ b/tests/ui/unpretty/exhaustive.hir.stdout
@@ -0,0 +1,715 @@
+//@ revisions: expanded hir
+//@[expanded]compile-flags: -Zunpretty=expanded
+//@[expanded]check-pass
+//@[hir]compile-flags: -Zunpretty=hir
+//@[hir]check-fail
+//@ edition:2024
+
+// Note: the HIR revision includes a `.stderr` file because there are some
+// errors that only occur once we get past the AST.
+
+#![feature(auto_traits)]#![feature(box_patterns)]#![feature(builtin_syntax)]#![feature(concat_idents)]#![feature(const_trait_impl)]#![feature(decl_macro)]#![feature(deref_patterns)]#![feature(dyn_star)]#![feature(explicit_tail_calls)]#![feature(gen_blocks)]#![feature(more_qualified_paths)]#![feature(never_patterns)]#![feature(never_type)]#![feature(pattern_types)]#![feature(pattern_type_macro)]#![feature(prelude_import)]#![feature(specialization)]#![feature(trace_macros)]#![feature(trait_alias)]#![feature(try_blocks)]#![feature(yeet_expr)]#![allow(incomplete_features)]
+#[prelude_import]
+use std::prelude::rust_2024::*;
+#[macro_use]
+extern crate std;
+
+#[prelude_import]
+use self::prelude::*;
+
+mod prelude {
+    use std::prelude::rust_2024::*;
+
+    type T = _;
+
+    trait Trait {
+        const
+        CONST:
+        ();
+    }
+}
+
+//! inner single-line doc comment
+/*!
+     * inner multi-line doc comment
+     */
+#[doc = "inner doc attribute"]#[allow(dead_code, unused_variables)]#[no_std]
+mod attributes {//! inner single-line doc comment
+    /*!
+     * inner multi-line doc comment
+     */
+    #![doc =
+    "inner doc attribute"]#![allow(dead_code, unused_variables)]#![no_std]
+
+    /// outer single-line doc comment
+    /**
+     * outer multi-line doc comment
+     */
+    #[doc =
+    "outer doc attribute"]#[doc = "macro"]#[allow()]#[attr = Repr([ReprC])]
+    struct Struct;
+}
+
+mod expressions {
+    /// ExprKind::Array
+    fn expr_array() {
+        [];
+        [true];
+        [true];
+        [true, true];
+        ["long........................................................................"];
+        ["long............................................................",
+                true];
+    }
+
+    /// ExprKind::ConstBlock
+    fn expr_const_block() {
+        const { };
+        const { 1 };
+        const
+                {
+                    struct S;
+                };
+    }
+
+    /// ExprKind::Call
+    fn expr_call() {
+        let f;
+        f();
+        f::<u8>();
+        f::<1>();
+        f::<'static, u8, 1>();
+        f(true);
+        f(true);
+        ()();
+    }
+
+    /// ExprKind::MethodCall
+    fn expr_method_call() {
+        let x;
+        x.f();
+        x.f::<u8>();
+        x.collect::<Vec<_>>();
+    }
+
+    /// ExprKind::Tup
+    fn expr_tup() { (); (true,); (true, false); (true, false); }
+
+    /// ExprKind::Binary
+    fn expr_binary() {
+        let (a, b, c, d, x, y);
+        true || false;
+        true || false && false;
+        a < 1 && 2 < b && c > 3 && 4 > d;
+        a & b & !c;
+        a + b * c - d + -1 * -2 - -3;
+        x = !y;
+    }
+
+    /// ExprKind::Unary
+    fn expr_unary() { let expr; *expr; !expr; -expr; }
+
+    /// ExprKind::Lit
+    fn expr_lit() { 'x'; 1000i8; 1.00000000000000000000001; }
+
+    /// ExprKind::Cast
+    fn expr_cast() { let expr; expr as T; expr as T<u8>; }
+
+    /// ExprKind::Type
+    fn expr_type() { let expr; type_ascribe!(expr, T); }
+
+    /// ExprKind::Let
+    fn expr_let() {
+        let b;
+        if let Some(a) = b { }
+        if let _ = true && false { }
+        if let _ = (true && false) { }
+    }
+
+    /// ExprKind::If
+    fn expr_if() {
+        if true { }
+        if !true { }
+        if let true = true { } else { }
+        if true { } else if false { }
+        if true { } else if false { } else { }
+        if true { return; } else if false { 0 } else { 0 }
+    }
+
+    /// ExprKind::While
+    fn expr_while() {
+        loop { if false { } else { break; } }
+        'a: loop { if false { } else { break; } }
+        loop { if let true = true { } else { break; } }
+    }
+
+    /// ExprKind::ForLoop
+    fn expr_for_loop() {
+        let x;
+        {
+                let _t =
+                    match #[lang = "into_iter"](x) {
+                            mut iter =>
+                                loop {
+                                        match #[lang = "next"](&mut iter) {
+                                                #[lang = "None"] {} => break,
+                                                #[lang = "Some"] {  0: _ } => { }
+                                            }
+                                    },
+                        };
+                _t
+            };
+        {
+                let _t =
+                    match #[lang = "into_iter"](x) {
+                            mut iter =>
+                                'a:
+                                    loop {
+                                        match #[lang = "next"](&mut iter) {
+                                                #[lang = "None"] {} => break,
+                                                #[lang = "Some"] {  0: _ } => { }
+                                            }
+                                    },
+                        };
+                _t
+            }
+    }
+
+    /// ExprKind::Loop
+    fn expr_loop() { loop { } 'a: loop { } }
+
+    /// ExprKind::Match
+    fn expr_match() {
+        let value;
+        match value { }
+        match value { ok => 1, }
+        match value { ok => 1, err => 0, }
+    }
+
+    /// ExprKind::Closure
+    fn expr_closure() {
+        let value;
+        || { };
+        |x| { };
+        |x: u8| { };
+        || ();
+        move || value;
+        || |mut _task_context: ResumeTy| { { let _t = value; _t } };
+        move || |mut _task_context: ResumeTy| { { let _t = value; _t } };
+        || value;
+        move || value;
+        || |mut _task_context: ResumeTy| { { let _t = value; _t } };
+        move || |mut _task_context: ResumeTy| { { let _t = value; _t } };
+        || -> u8 { value };
+        1 + (|| { });
+    }
+
+    /// ExprKind::Block
+    fn expr_block() {
+        { }
+        unsafe { }
+        'a: { }
+        #[allow()]
+        { }
+        #[allow()]
+        { }
+    }
+
+    /// ExprKind::Gen
+    fn expr_gen() {
+        |mut _task_context: ResumeTy| { };
+        move |mut _task_context: ResumeTy| { };
+        || { };
+        move || { };
+        |mut _task_context: ResumeTy| { };
+        move |mut _task_context: ResumeTy| { };
+    }
+
+    /// ExprKind::Await
+    fn expr_await() {
+        let fut;
+        {
+            fut;
+            (/*ERROR*/)
+        };
+    }
+
+    /// ExprKind::TryBlock
+    fn expr_try_block() {
+        { #[lang = "from_output"](()) }
+        { return; #[lang = "from_output"](()) }
+    }
+
+    /// ExprKind::Assign
+    fn expr_assign() { let expr; expr = true; }
+
+    /// ExprKind::AssignOp
+    fn expr_assign_op() { let expr; expr += true; }
+
+    /// ExprKind::Field
+    fn expr_field() { let expr; expr.field; expr.0; }
+
+    /// ExprKind::Index
+    fn expr_index() { let expr; expr[true]; }
+
+    /// ExprKind::Range
+    fn expr_range() {
+        let (lo, hi);
+        #[lang = "RangeFull"] {  };
+        #[lang = "RangeTo"] { end: hi };
+        #[lang = "RangeFrom"] { start: lo };
+        #[lang = "Range"] { start: lo, end: hi };
+        #[lang = "Range"] { start: lo, end: hi };
+        #[lang = "RangeToInclusive"] { end: hi };
+        #[lang = "range_inclusive_new"](lo, hi);
+        #[lang = "range_inclusive_new"](-2, -1);
+    }
+
+    /// ExprKind::Underscore
+    fn expr_underscore() {
+        (/*ERROR*/);
+    }
+
+    /// ExprKind::Path
+    fn expr_path() {
+        let x;
+        crate::expressions::expr_path;
+        crate::expressions::expr_path::<'static>;
+        <T as Default>::default;
+        <T as ::core::default::Default>::default;
+        x;
+        x::<T, T>;
+        crate::expressions::expr_path;
+        core::marker::PhantomData;
+    }
+
+    /// ExprKind::AddrOf
+    fn expr_addr_of() {
+        let expr;
+        &expr;
+        &mut expr;
+        &raw const expr;
+        &raw mut expr;
+    }
+
+    /// ExprKind::Break
+    fn expr_break() { 'a: { break; break 'a; break true; break 'a true; } }
+
+    /// ExprKind::Continue
+    fn expr_continue() { 'a: { continue; continue 'a; } }
+
+    /// ExprKind::Ret
+    fn expr_ret() { return; return true; }
+
+    /// ExprKind::InlineAsm
+    fn expr_inline_asm() {
+        let x;
+        asm!("mov {1}, {0}\nshl {1}, 1\nshl {0}, 2\nadd {0}, {1}",
+            inout(reg)
+            x,
+            out(reg)
+            _);
+    }
+
+    /// ExprKind::OffsetOf
+    fn expr_offset_of() {
+
+
+
+
+
+
+
+
+
+
+
+
+
+        // ...
+
+
+
+
+
+        // concat_idents is deprecated
+
+
+
+
+        { offset_of!(T, field) };
+    }
+    /// ExprKind::MacCall
+    fn expr_mac_call() { "..."; "..."; "..."; }
+    /// ExprKind::Struct
+    fn expr_struct() {
+        struct Struct {
+        }
+        let (x, base);
+        Struct {  };
+        <Struct as ToOwned>::Owned {  };
+        Struct { .. };
+        Struct { ..base };
+        Struct { x };
+        Struct { x, ..base };
+        Struct { x: true };
+        Struct { x: true, .. };
+        Struct { x: true, ..base };
+        Struct { 0: true, ..base };
+    }
+    /// ExprKind::Repeat
+    fn expr_repeat() { [(); 0]; }
+    /// ExprKind::Paren
+    fn expr_paren() { let expr; expr; }
+    /// ExprKind::Try
+    fn expr_try() {
+        let expr;
+        match #[lang = "branch"](expr) {
+                #[lang = "Break"] {  0: residual } =>
+                    #[allow(unreachable_code)]
+                    return #[lang = "from_residual"](residual),
+                #[lang = "Continue"] {  0: val } => #[allow(unreachable_code)]
+                    val,
+            };
+    }
+    /// ExprKind::Yield
+    fn expr_yield() { yield (); yield true; }
+    /// ExprKind::Yeet
+    fn expr_yeet() {
+        return #[lang = "from_yeet"](());
+        return #[lang = "from_yeet"](0);
+    }
+    /// ExprKind::Become
+    fn expr_become() { become true; }
+    /// ExprKind::IncludedBytes
+    fn expr_include_bytes() {
+        b"data for include_bytes in ../expanded-exhaustive.rs\n";
+    }
+    /// ExprKind::FormatArgs
+    fn expr_format_args() {
+        let expr;
+        format_arguments::new_const(&[]);
+        format_arguments::new_v1(&[""],
+            &[format_argument::new_display(&expr)]);
+    }
+}
+mod items {
+    /// ItemKind::ExternCrate
+    mod item_extern_crate {/// ItemKind::ExternCrate
+        extern crate core;
+        extern crate self as unpretty;
+        extern crate core as _;
+    }
+    /// ItemKind::Use
+    mod item_use {/// ItemKind::Use
+        use ::{};
+        use crate::expressions;
+        use crate::items::item_use;
+        use core::*;
+    }
+    /// ItemKind::Static
+    mod item_static {/// ItemKind::Static
+        static A: () = { };
+        static mut B: () = { };
+    }
+    /// ItemKind::Const
+    mod item_const {/// ItemKind::Const
+        const A: () = { };
+        trait TraitItems {
+            const
+            B:
+            ();
+            const
+            C:
+            ()
+            =
+            { };
+        }
+    }
+    /// ItemKind::Fn
+    mod item_fn {/// ItemKind::Fn
+        const unsafe extern "C" fn f() { }
+        async unsafe extern "C" fn g()
+            ->
+                /*impl Trait*/ |mut _task_context: ResumeTy|
+            { { let _t = { }; _t } }
+        fn h<'a, T>() where T: 'a { }
+        trait TraitItems {
+            unsafe extern "C" fn f();
+        }
+        impl TraitItems for _ {
+            unsafe extern "C" fn f() { }
+        }
+    }
+    /// ItemKind::Mod
+    mod item_mod {/// ItemKind::Mod
+    }
+    /// ItemKind::ForeignMod
+    mod item_foreign_mod {/// ItemKind::ForeignMod
+        extern "Rust" { }
+        extern "C" { }
+    }
+    /// ItemKind::GlobalAsm
+    mod item_global_asm {/// ItemKind::GlobalAsm
+        global_asm! (".globl my_asm_func") }
+        /// ItemKind::TyAlias
+        mod item_ty_alias {/// ItemKind::TyAlias
+            type Type<'a> where T: 'a = T;
+        }
+        /// ItemKind::Enum
+        mod item_enum {/// ItemKind::Enum
+            enum Void { }
+            enum Empty {
+                Unit,
+                Tuple(),
+                Struct {
+                    },
+            }
+            enum Generic<'a, T> where T: 'a {
+                Tuple(T),
+                Struct {
+                        t: T,
+                    },
+            }
+        }
+        /// ItemKind::Struct
+        mod item_struct {/// ItemKind::Struct
+            struct Unit;
+            struct Tuple();
+            struct Newtype(Unit);
+            struct Struct {
+            }
+            struct Generic<'a, T> where T: 'a {
+                t: T,
+            }
+        }
+        /// ItemKind::Union
+        mod item_union {/// ItemKind::Union
+            union Generic<'a, T> where T: 'a {
+                t: T,
+            }
+        }
+        /// ItemKind::Trait
+        mod item_trait {/// ItemKind::Trait
+            auto unsafe trait Send { }
+            trait Trait<'a>: Sized where Self: 'a { }
+        }
+        /// ItemKind::TraitAlias
+        mod item_trait_alias {/// ItemKind::TraitAlias
+            trait Trait<T> = Sized where for<'a> T: 'a;
+        }
+        /// ItemKind::Impl
+        mod item_impl {/// ItemKind::Impl
+            impl () { }
+            impl <T> () { }
+            impl Default for () { }
+            impl const <T> Default for () { }
+        }
+        /// ItemKind::MacCall
+        mod item_mac_call {/// ItemKind::MacCall
+        }
+        /// ItemKind::MacroDef
+        mod item_macro_def {/// ItemKind::MacroDef
+            macro_rules! mac { () => { ... }; }
+            macro stringify { () => {} }
+        }
+        /// ItemKind::Delegation
+        /*! FIXME: todo */
+        mod item_delegation {/// ItemKind::Delegation
+            /*! FIXME: todo */
+        }
+        /// ItemKind::DelegationMac
+        /*! FIXME: todo */
+        mod item_delegation_mac {/// ItemKind::DelegationMac
+            /*! FIXME: todo */
+        }
+    }
+    mod patterns {
+        /// PatKind::Missing
+        fn pat_missing() { let _: for fn(u32, T, &'_ str); }
+        /// PatKind::Wild
+        fn pat_wild() { let _; }
+        /// PatKind::Ident
+        fn pat_ident() {
+            let x;
+            let ref x;
+            let mut x;
+            let ref mut x;
+            let ref mut x@_;
+        }
+        /// PatKind::Struct
+        fn pat_struct() {
+            let T {};
+            let T::<T> {};
+            let T::<'static> {};
+            let T {  x };
+            let T {  x: _x };
+            let T { .. };
+            let T {  x, .. };
+            let T {  x: _x, .. };
+            let T {  0: _x, .. };
+            let <T as ToOwned>::Owned {};
+        }
+        /// PatKind::TupleStruct
+        fn pat_tuple_struct() {
+            struct Tuple();
+            let Tuple();
+            let Tuple::<T>();
+            let Tuple::<'static>();
+            let Tuple(x);
+            let Tuple(..);
+            let Tuple(x, ..);
+        }
+        /// PatKind::Or
+        fn pat_or() { let true | false; let true; let true | false; }
+        /// PatKind::Path
+        fn pat_path() {
+            let core::marker::PhantomData;
+            let core::marker::PhantomData::<T>;
+            let core::marker::PhantomData::<'static>;
+            let <T as Trait>::CONST;
+        }
+        /// PatKind::Tuple
+        fn pat_tuple() { let (); let (true,); let (true, false); }
+        /// PatKind::Box
+        fn pat_box() { let box pat; }
+        /// PatKind::Deref
+        fn pat_deref() { let deref!(pat); }
+        /// PatKind::Ref
+        fn pat_ref() { let &pat; let &mut pat; }
+        /// PatKind::Expr
+        fn pat_expr() { let 1000i8; let -""; }
+        /// PatKind::Range
+        fn pat_range() {
+            let ..1;
+            let 0...;
+            let 0..1;
+            let 0...1;
+            let -2...-1;
+        }
+        /// PatKind::Slice
+        fn pat_slice() { let []; let [true]; let [true]; let [true, false]; }
+        /// PatKind::Rest
+        fn pat_rest() { let _; }
+        /// PatKind::Never
+        fn pat_never() { let !; let Some(!); }
+        /// PatKind::Paren
+        fn pat_paren() { let pat; }
+        /// PatKind::MacCall
+        fn pat_mac_call() { let ""; let ""; let ""; }
+    }
+    mod statements {
+        /// StmtKind::Let
+        fn stmt_let() {
+            let _;
+            let _ = true;
+            let _: T = true;
+            let _ = true else { return; };
+        }
+        /// StmtKind::Item
+        fn stmt_item() {
+            struct Struct {
+            }
+            struct Unit;
+        }
+        /// StmtKind::Expr
+        fn stmt_expr() { () }
+        /// StmtKind::Semi
+        fn stmt_semi() { 1 + 1; }
+        /// StmtKind::Empty
+        fn stmt_empty() { }
+        /// StmtKind::MacCall
+        fn stmt_mac_call() { "..."; "..."; "..."; }
+    }
+    mod types {
+        /// TyKind::Slice
+        fn ty_slice() { let _: [T]; }
+        /// TyKind::Array
+        fn ty_array() { let _: [T; 0]; }
+        /// TyKind::Ptr
+        fn ty_ptr() { let _: *const T; let _: *mut T; }
+        /// TyKind::Ref
+        fn ty_ref() {
+            let _: &T;
+            let _: &mut T;
+            let _: &'static T;
+            let _: &'static mut [T];
+            let _: &T<T<T<T<T>>>>;
+            let _: &T<T<T<T<T>>>>;
+        }
+        /// TyKind::BareFn
+        fn ty_bare_fn() {
+            let _: fn();
+            let _: fn() -> ();
+            let _: fn(T);
+            let _: fn(t: T);
+            let _: fn();
+            let _: for<'a> fn();
+        }
+        /// TyKind::Never
+        fn ty_never() { let _: !; }
+        /// TyKind::Tup
+        fn ty_tup() { let _: (); let _: (T,); let _: (T, T); }
+        /// TyKind::Path
+        fn ty_path() {
+            let _: T;
+            let _: T<'static>;
+            let _: T<T>;
+            let _: T<T>;
+            let _: T;
+            let _: <T as ToOwned>::Owned;
+        }
+        /// TyKind::TraitObject
+        fn ty_trait_object() {
+            let _: dyn Send;
+            let _: dyn Send + 'static;
+            let _: dyn Send + 'static;
+            let _: dyn for<'a> Send;
+            let _: dyn* Send;
+        }
+        /// TyKind::ImplTrait
+        const fn ty_impl_trait() {
+            let _: (/*ERROR*/);
+            let _: (/*ERROR*/);
+            let _: (/*ERROR*/);
+            let _: (/*ERROR*/);
+            let _: (/*ERROR*/);
+            let _: (/*ERROR*/);
+        }
+        /// TyKind::Paren
+        fn ty_paren() { let _: T; }
+        /// TyKind::Typeof
+        /*! unused for now */
+        fn ty_typeof() { }
+        /// TyKind::Infer
+        fn ty_infer() { let _: _; }
+        /// TyKind::ImplicitSelf
+        /*! there is no syntax for this */
+        fn ty_implicit_self() { }
+        /// TyKind::MacCall
+        #[expect(deprecated)]
+        fn ty_mac_call() { let _: T; let _: T; let _: T; }
+        /// TyKind::CVarArgs
+        /*! FIXME: todo */
+        fn ty_c_var_args() { }
+        /// TyKind::Pat
+        fn ty_pat() { let _: u32 is 1..=RangeMax; }
+    }
+    mod visibilities {
+        /// VisibilityKind::Public
+        mod visibility_public {/// VisibilityKind::Public
+            struct Pub;
+        }
+        /// VisibilityKind::Restricted
+        mod visibility_restricted {/// VisibilityKind::Restricted
+            struct PubCrate;
+            struct PubSelf;
+            struct PubSuper;
+            struct PubInCrate;
+            struct PubInSelf;
+            struct PubInSuper;
+            struct PubInCrateVisibilities;
+            struct PubInSelfSuper;
+            struct PubInSuperMod;
+        }
+    }
diff --git a/tests/ui/unpretty/expanded-exhaustive.rs b/tests/ui/unpretty/exhaustive.rs
similarity index 91%
rename from tests/ui/unpretty/expanded-exhaustive.rs
rename to tests/ui/unpretty/exhaustive.rs
index 5697f615b9791..40807eafd7f92 100644
--- a/tests/ui/unpretty/expanded-exhaustive.rs
+++ b/tests/ui/unpretty/exhaustive.rs
@@ -1,6 +1,12 @@
-//@ compile-flags: -Zunpretty=expanded
+//@ revisions: expanded hir
+//@[expanded]compile-flags: -Zunpretty=expanded
+//@[expanded]check-pass
+//@[hir]compile-flags: -Zunpretty=hir
+//@[hir]check-fail
 //@ edition:2024
-//@ check-pass
+
+// Note: the HIR revision includes a `.stderr` file because there are some
+// errors that only occur once we get past the AST.
 
 #![feature(auto_traits)]
 #![feature(box_patterns)]
@@ -202,8 +208,8 @@ mod expressions {
         move || value;
         async || value;
         async move || value;
-        static || value;
-        static move || value;
+        static || value;            //[hir]~ closures cannot be static
+        static move || value;       //[hir]~ closures cannot be static
         (static async || value);
         (static async move || value);
         || -> u8 { value };
@@ -232,7 +238,7 @@ mod expressions {
     /// ExprKind::Await
     fn expr_await() {
         let fut;
-        fut.await;
+        fut.await;  //[hir]~ `await` is only allowed
     }
 
     /// ExprKind::TryBlock
@@ -281,7 +287,7 @@ mod expressions {
 
     /// ExprKind::Underscore
     fn expr_underscore() {
-        _;
+        _;      //[hir]~ in expressions, `_` can only
     }
 
     /// ExprKind::Path
@@ -291,10 +297,14 @@ mod expressions {
         crate::expressions::expr_path::<'static>;
         <T as Default>::default;
         <T as ::core::default::Default>::default::<>;
-        x::();
-        x::(T, T) -> T;
+        x::();            //[hir]~ parenthesized type parameters
+        x::(T, T) -> T;   //[hir]~ parenthesized type parameters
         crate::() -> ()::expressions::() -> ()::expr_path;
+        //[hir]~^ parenthesized type parameters
+        //[hir]~| parenthesized type parameters
         core::()::marker::()::PhantomData;
+        //[hir]~^ parenthesized type parameters
+        //[hir]~| parenthesized type parameters
     }
 
     /// ExprKind::AddrOf
@@ -390,7 +400,7 @@ mod expressions {
 
     /// ExprKind::Yield
     fn expr_yield() {
-        yield;
+        yield;          //[hir]~ `yield` can only be used
         yield true;
     }
 
@@ -470,7 +480,7 @@ mod items {
 
     /// ItemKind::ForeignMod
     mod item_foreign_mod {
-        unsafe extern "C++" {}
+        unsafe extern "C++" {}  //[hir]~ invalid ABI
         unsafe extern "C" {}
     }
 
@@ -680,7 +690,7 @@ mod patterns {
 
     /// PatKind::Rest
     fn pat_rest() {
-        let ..;
+        let ..;     //[hir]~ `..` patterns are not allowed here
     }
 
     /// PatKind::Never
@@ -795,7 +805,7 @@ mod types {
         let _: T<'static>;
         let _: T<T>;
         let _: T::<T>;
-        let _: T() -> !;
+        let _: T() -> !;    //[hir]~ parenthesized type parameters
         let _: <T as ToOwned>::Owned;
     }
 
@@ -810,12 +820,12 @@ mod types {
 
     /// TyKind::ImplTrait
     const fn ty_impl_trait() {
-        let _: impl Send;
-        let _: impl Send + 'static;
-        let _: impl 'static + Send;
-        let _: impl ?Sized;
-        let _: impl ~const Clone;
-        let _: impl for<'a> Send;
+        let _: impl Send;               //[hir]~ `impl Trait` is not allowed
+        let _: impl Send + 'static;     //[hir]~ `impl Trait` is not allowed
+        let _: impl 'static + Send;     //[hir]~ `impl Trait` is not allowed
+        let _: impl ?Sized;             //[hir]~ `impl Trait` is not allowed
+        let _: impl ~const Clone;       //[hir]~ `impl Trait` is not allowed
+        let _: impl for<'a> Send;       //[hir]~ `impl Trait` is not allowed
     }
 
     /// TyKind::Paren
diff --git a/tests/ui/unpretty/expanded-interpolation.rs b/tests/ui/unpretty/interpolation-expanded.rs
similarity index 100%
rename from tests/ui/unpretty/expanded-interpolation.rs
rename to tests/ui/unpretty/interpolation-expanded.rs
diff --git a/tests/ui/unpretty/expanded-interpolation.stdout b/tests/ui/unpretty/interpolation-expanded.stdout
similarity index 100%
rename from tests/ui/unpretty/expanded-interpolation.stdout
rename to tests/ui/unpretty/interpolation-expanded.stdout