From 7a7ab993bbf4519b6e0e4c43e7b910c678cc1e94 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Wed, 22 Oct 2025 19:19:13 +0300 Subject: [PATCH] Revert "internal: Rewrite attribute handling" --- Cargo.lock | 6 +- Cargo.toml | 5 +- crates/base-db/src/editioned_file_id.rs | 291 --- crates/base-db/src/input.rs | 7 +- crates/base-db/src/lib.rs | 39 +- crates/cfg/Cargo.toml | 1 - crates/cfg/src/cfg_expr.rs | 59 - crates/cfg/src/tests.rs | 42 +- crates/hir-def/Cargo.toml | 4 +- crates/hir-def/src/attr.rs | 901 +++++++++ crates/hir-def/src/attrs.rs | 1613 ----------------- crates/hir-def/src/db.rs | 71 +- crates/hir-def/src/expr_store/expander.rs | 14 +- crates/hir-def/src/expr_store/lower.rs | 22 +- crates/hir-def/src/expr_store/pretty.rs | 19 +- .../src/expr_store/tests/body/block.rs | 4 +- .../src/expr_store/tests/signatures.rs | 14 +- crates/hir-def/src/import_map.rs | 34 +- crates/hir-def/src/item_tree.rs | 40 +- crates/hir-def/src/item_tree/attrs.rs | 220 --- crates/hir-def/src/item_tree/lower.rs | 35 +- crates/hir-def/src/item_tree/pretty.rs | 12 +- crates/hir-def/src/item_tree/tests.rs | 9 +- crates/hir-def/src/lang_item.rs | 7 +- crates/hir-def/src/lib.rs | 94 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 43 +- .../hir-def/src/macro_expansion_tests/mod.rs | 15 - .../src/macro_expansion_tests/proc_macros.rs | 96 +- crates/hir-def/src/nameres.rs | 15 +- crates/hir-def/src/nameres/assoc.rs | 39 +- crates/hir-def/src/nameres/attr_resolution.rs | 10 +- crates/hir-def/src/nameres/collector.rs | 179 +- crates/hir-def/src/nameres/diagnostics.rs | 14 +- crates/hir-def/src/nameres/mod_resolution.rs | 5 +- crates/hir-def/src/nameres/proc_macro.rs | 24 +- crates/hir-def/src/signatures.rs | 130 +- crates/hir-def/src/src.rs | 9 +- crates/hir-def/src/test_db.rs | 33 +- crates/hir-expand/Cargo.toml | 2 - crates/hir-expand/src/attrs.rs | 808 ++++----- crates/hir-expand/src/builtin/fn_macro.rs | 2 +- crates/hir-expand/src/cfg_process.rs | 638 +++---- crates/hir-expand/src/db.rs | 181 +- crates/hir-expand/src/declarative.rs | 58 +- crates/hir-expand/src/files.rs | 33 +- crates/hir-expand/src/fixup.rs | 5 +- crates/hir-expand/src/lib.rs | 165 +- crates/hir-expand/src/mod_path.rs | 59 +- crates/hir-expand/src/span_map.rs | 13 +- crates/hir-ty/src/consteval.rs | 3 +- crates/hir-ty/src/diagnostics/decl_check.rs | 6 +- .../diagnostics/match_check/pat_analysis.rs | 6 +- crates/hir-ty/src/diagnostics/unsafe_check.rs | 4 +- crates/hir-ty/src/infer.rs | 14 +- crates/hir-ty/src/infer/coerce.rs | 14 +- crates/hir-ty/src/infer/expr.rs | 14 +- crates/hir-ty/src/layout.rs | 4 +- crates/hir-ty/src/layout/adt.rs | 35 +- crates/hir-ty/src/method_resolution.rs | 5 +- crates/hir-ty/src/mir/eval/shim.rs | 45 +- crates/hir-ty/src/next_solver/interner.rs | 59 +- crates/hir-ty/src/target_feature.rs | 46 +- crates/hir-ty/src/tests/incremental.rs | 49 +- crates/hir-ty/src/utils.rs | 8 +- crates/hir/src/attrs.rs | 256 +-- crates/hir/src/diagnostics.rs | 13 +- crates/hir/src/lib.rs | 267 ++- crates/hir/src/semantics.rs | 71 +- crates/hir/src/semantics/child_by_source.rs | 13 +- crates/hir/src/symbols.rs | 8 +- .../src/handlers/add_missing_match_arms.rs | 6 +- .../handlers/destructure_struct_binding.rs | 4 +- .../src/handlers/move_module_to_file.rs | 10 +- crates/ide-assists/src/lib.rs | 4 +- crates/ide-assists/src/tests.rs | 4 +- crates/ide-assists/src/utils.rs | 13 +- .../src/completions/attribute/lint.rs | 2 +- .../src/completions/flyimport.rs | 4 +- .../ide-completion/src/completions/postfix.rs | 2 +- .../ide-completion/src/completions/snippet.rs | 2 +- crates/ide-completion/src/context.rs | 22 +- crates/ide-completion/src/item.rs | 12 +- crates/ide-completion/src/render.rs | 13 +- crates/ide-completion/src/render/literal.rs | 2 +- crates/ide-completion/src/render/pattern.rs | 2 +- crates/ide-completion/src/render/variant.rs | 6 +- crates/ide-completion/src/tests.rs | 4 +- crates/ide-db/src/defs.rs | 38 +- crates/ide-db/src/documentation.rs | 351 +++- crates/ide-db/src/ra_fixture.rs | 12 +- crates/ide-db/src/rust_doc.rs | 2 +- crates/ide-db/src/search.rs | 16 +- .../ide-db/src/test_data/test_doc_alias.txt | 30 +- .../test_symbol_index_collection.txt | 134 +- .../test_symbols_exclude_imports.txt | 2 +- .../test_data/test_symbols_with_imports.txt | 4 +- crates/ide-db/src/traits.rs | 6 +- .../src/handlers/inactive_code.rs | 3 +- .../src/handlers/invalid_derive_target.rs | 4 +- .../src/handlers/macro_error.rs | 22 +- .../src/handlers/malformed_derive.rs | 4 +- .../src/handlers/unresolved_macro_call.rs | 5 +- crates/ide-diagnostics/src/lib.rs | 43 +- crates/ide-ssr/src/from_comment.rs | 2 +- crates/ide-ssr/src/lib.rs | 6 +- crates/ide-ssr/src/search.rs | 8 +- crates/ide/src/doc_links.rs | 38 +- crates/ide/src/doc_links/tests.rs | 73 +- crates/ide/src/fixture.rs | 32 +- crates/ide/src/goto_implementation.rs | 2 +- crates/ide/src/highlight_related.rs | 2 +- crates/ide/src/hover/render.rs | 52 +- crates/ide/src/inlay_hints.rs | 4 +- crates/ide/src/lib.rs | 13 +- crates/ide/src/navigation_target.rs | 38 +- crates/ide/src/references.rs | 5 +- crates/ide/src/runnables.rs | 50 +- crates/ide/src/signature_help.rs | 36 +- crates/ide/src/static_index.rs | 6 +- crates/ide/src/syntax_highlighting.rs | 2 +- crates/ide/src/syntax_highlighting/html.rs | 2 +- crates/ide/src/syntax_highlighting/inject.rs | 189 +- .../test_data/highlight_doctest.html | 72 +- crates/ide/src/typing.rs | 5 +- crates/ide/src/typing/on_enter.rs | 2 +- crates/ide/src/view_item_tree.rs | 2 +- .../rust-analyzer/src/cli/analysis_stats.rs | 8 +- crates/rust-analyzer/src/cli/scip.rs | 6 +- crates/rust-analyzer/src/cli/ssr.rs | 2 +- .../src/cli/unresolved_references.rs | 2 +- crates/rust-analyzer/src/lsp/to_proto.rs | 4 +- crates/syntax-bridge/src/lib.rs | 68 +- crates/syntax/src/ast.rs | 4 +- crates/syntax/src/ast/node_ext.rs | 38 +- crates/syntax/src/ast/token_ext.rs | 6 +- crates/syntax/src/ast/traits.rs | 67 +- crates/test-fixture/src/lib.rs | 45 +- 137 files changed, 3805 insertions(+), 4953 deletions(-) delete mode 100644 crates/base-db/src/editioned_file_id.rs create mode 100644 crates/hir-def/src/attr.rs delete mode 100644 crates/hir-def/src/attrs.rs delete mode 100644 crates/hir-def/src/item_tree/attrs.rs diff --git a/Cargo.lock b/Cargo.lock index d31d233dc4b6..ea8d1a781dcc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -725,7 +725,6 @@ dependencies = [ name = "hir-expand" version = "0.0.0" dependencies = [ - "arrayvec", "base-db", "cfg", "cov-mark", @@ -744,7 +743,6 @@ dependencies = [ "stdx", "syntax", "syntax-bridge", - "thin-vec", "tracing", "triomphe", "tt", @@ -1993,9 +1991,9 @@ dependencies = [ [[package]] name = "rowan" -version = "0.15.17" +version = "0.15.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4f1e4a001f863f41ea8d0e6a0c34b356d5b733db50dadab3efef640bafb779b" +checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49" dependencies = [ "countme", "hashbrown 0.14.5", diff --git a/Cargo.toml b/Cargo.toml index 767dbcae9031..8a108974681a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,7 +52,7 @@ debug = 2 # local crates macros = { path = "./crates/macros", version = "0.0.0" } base-db = { path = "./crates/base-db", version = "0.0.0" } -cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt", "syntax"] } +cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] } hir = { path = "./crates/hir", version = "0.0.0" } hir-def = { path = "./crates/hir-def", version = "0.0.0" } hir-expand = { path = "./crates/hir-expand", version = "0.0.0" } @@ -131,7 +131,7 @@ process-wrap = { version = "8.2.1", features = ["std"] } pulldown-cmark-to-cmark = "10.0.4" pulldown-cmark = { version = "0.9.6", default-features = false } rayon = "1.10.0" -rowan = "=0.15.17" +rowan = "=0.15.15" # Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work # on impls without it salsa = { version = "0.24.0", default-features = true, features = [ @@ -167,7 +167,6 @@ tracing-subscriber = { version = "0.3.20", default-features = false, features = triomphe = { version = "0.1.14", default-features = false, features = ["std"] } url = "2.5.4" xshell = "0.2.7" -thin-vec = "0.2.14" petgraph = { version = "0.8.2", default-features = false } # We need to freeze the version of the crate, as the raw-api feature is considered unstable diff --git a/crates/base-db/src/editioned_file_id.rs b/crates/base-db/src/editioned_file_id.rs deleted file mode 100644 index 2f8969c0ea33..000000000000 --- a/crates/base-db/src/editioned_file_id.rs +++ /dev/null @@ -1,291 +0,0 @@ -//! Defines [`EditionedFileId`], an interned wrapper around [`span::EditionedFileId`] that -//! is interned (so queries can take it) and remembers its crate. - -use core::fmt; -use std::hash::{Hash, Hasher}; - -use span::Edition; -use vfs::FileId; - -use crate::{Crate, RootQueryDb}; - -#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct EditionedFileId( - salsa::Id, - std::marker::PhantomData<&'static salsa::plumbing::interned::Value>, -); - -const _: () = { - use salsa::plumbing as zalsa_; - use zalsa_::interned as zalsa_struct_; - type Configuration_ = EditionedFileId; - - #[derive(Debug, Clone, PartialEq, Eq)] - pub struct EditionedFileIdData { - editioned_file_id: span::EditionedFileId, - krate: Crate, - } - - /// We like to include the origin crate in an `EditionedFileId` (for use in the item tree), - /// but this poses us a problem. - /// - /// Spans contain `EditionedFileId`s, and we don't want to make them store the crate too - /// because that will increase their size, which will increase memory usage significantly. - /// Furthermore, things using spans do not generally need the crate: they are using the - /// file id for queries like `ast_id_map` or `parse`, which do not care about the crate. - /// - /// To solve this, we hash **only the `span::EditionedFileId`**, but on still compare - /// the crate in equality check. This preserves the invariant of `Hash` and `Eq` - - /// although same hashes can be used for different items, same file ids used for multiple - /// crates is a rare thing, and different items always have different hashes. Then, - /// when we only have a `span::EditionedFileId`, we use the `intern()` method to - /// reuse existing file ids, and create new one only if needed. See [`from_span_guess_origin`]. - /// - /// See this for more info: https://rust-lang.zulipchat.com/#narrow/channel/185405-t-compiler.2Frust-analyzer/topic/Letting.20EditionedFileId.20know.20its.20crate/near/530189401 - /// - /// [`from_span_guess_origin`]: EditionedFileId::from_span_guess_origin - #[derive(Hash, PartialEq, Eq)] - struct WithoutCrate { - editioned_file_id: span::EditionedFileId, - } - - impl Hash for EditionedFileIdData { - #[inline] - fn hash(&self, state: &mut H) { - let EditionedFileIdData { editioned_file_id, krate: _ } = *self; - editioned_file_id.hash(state); - } - } - - impl zalsa_struct_::HashEqLike for EditionedFileIdData { - #[inline] - fn hash(&self, state: &mut H) { - Hash::hash(self, state); - } - - #[inline] - fn eq(&self, data: &WithoutCrate) -> bool { - let EditionedFileIdData { editioned_file_id, krate: _ } = *self; - editioned_file_id == data.editioned_file_id - } - } - - impl zalsa_::HasJar for EditionedFileId { - type Jar = zalsa_struct_::JarImpl; - const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct; - } - - zalsa_::register_jar! { - zalsa_::ErasedJar::erase::() - } - - impl zalsa_struct_::Configuration for EditionedFileId { - const LOCATION: salsa::plumbing::Location = - salsa::plumbing::Location { file: file!(), line: line!() }; - const DEBUG_NAME: &'static str = "EditionedFileId"; - const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX; - const PERSIST: bool = false; - - type Fields<'a> = EditionedFileIdData; - type Struct<'db> = EditionedFileId; - - fn serialize(_: &Self::Fields<'_>, _: S) -> Result - where - S: zalsa_::serde::Serializer, - { - unimplemented!("attempted to serialize value that set `PERSIST` to false") - } - - fn deserialize<'de, D>(_: D) -> Result, D::Error> - where - D: zalsa_::serde::Deserializer<'de>, - { - unimplemented!("attempted to deserialize value that cannot set `PERSIST` to false"); - } - } - - impl Configuration_ { - pub fn ingredient(zalsa: &zalsa_::Zalsa) -> &zalsa_struct_::IngredientImpl { - static CACHE: zalsa_::IngredientCache> = - zalsa_::IngredientCache::new(); - - // SAFETY: `lookup_jar_by_type` returns a valid ingredient index, and the only - // ingredient created by our jar is the struct ingredient. - unsafe { - CACHE.get_or_create(zalsa, || { - zalsa.lookup_jar_by_type::>() - }) - } - } - } - - impl zalsa_::AsId for EditionedFileId { - fn as_id(&self) -> salsa::Id { - self.0.as_id() - } - } - impl zalsa_::FromId for EditionedFileId { - fn from_id(id: salsa::Id) -> Self { - Self(::from_id(id), std::marker::PhantomData) - } - } - - unsafe impl Send for EditionedFileId {} - unsafe impl Sync for EditionedFileId {} - - impl std::fmt::Debug for EditionedFileId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - Self::default_debug_fmt(*self, f) - } - } - - impl zalsa_::SalsaStructInDb for EditionedFileId { - type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex; - - fn lookup_ingredient_index(aux: &zalsa_::Zalsa) -> salsa::plumbing::IngredientIndices { - aux.lookup_jar_by_type::>().into() - } - - fn entries(zalsa: &zalsa_::Zalsa) -> impl Iterator + '_ { - let _ingredient_index = - zalsa.lookup_jar_by_type::>(); - ::ingredient(zalsa).entries(zalsa).map(|entry| entry.key()) - } - - #[inline] - fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option { - if type_id == std::any::TypeId::of::() { - Some(::from_id(id)) - } else { - None - } - } - - #[inline] - unsafe fn memo_table( - zalsa: &zalsa_::Zalsa, - id: zalsa_::Id, - current_revision: zalsa_::Revision, - ) -> zalsa_::MemoTableWithTypes<'_> { - // SAFETY: Guaranteed by caller. - unsafe { - zalsa.table().memos::>(id, current_revision) - } - } - } - - unsafe impl zalsa_::Update for EditionedFileId { - unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool { - if unsafe { *old_pointer } != new_value { - unsafe { *old_pointer = new_value }; - true - } else { - false - } - } - } - - impl EditionedFileId { - pub fn from_span( - db: &(impl salsa::Database + ?Sized), - editioned_file_id: span::EditionedFileId, - krate: Crate, - ) -> Self { - let (zalsa, zalsa_local) = db.zalsas(); - Configuration_::ingredient(zalsa).intern( - zalsa, - zalsa_local, - EditionedFileIdData { editioned_file_id, krate }, - |_, data| data, - ) - } - - /// Guesses the crate for the file. - /// - /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases: - /// - /// 1. The file is not in the module tree. - /// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin - /// (e.g. on enter feature, folding, etc.). - pub fn from_span_guess_origin( - db: &dyn RootQueryDb, - editioned_file_id: span::EditionedFileId, - ) -> Self { - let (zalsa, zalsa_local) = db.zalsas(); - Configuration_::ingredient(zalsa).intern( - zalsa, - zalsa_local, - WithoutCrate { editioned_file_id }, - |_, _| { - // FileId not in the database. - let krate = db - .relevant_crates(editioned_file_id.file_id()) - .first() - .copied() - .unwrap_or_else(|| db.all_crates()[0]); - EditionedFileIdData { editioned_file_id, krate } - }, - ) - } - - pub fn editioned_file_id(self, db: &dyn salsa::Database) -> span::EditionedFileId { - let zalsa = db.zalsa(); - let fields = Configuration_::ingredient(zalsa).fields(zalsa, self); - fields.editioned_file_id - } - - pub fn krate(self, db: &dyn salsa::Database) -> Crate { - let zalsa = db.zalsa(); - let fields = Configuration_::ingredient(zalsa).fields(zalsa, self); - fields.krate - } - - /// Default debug formatting for this struct (may be useful if you define your own `Debug` impl) - pub fn default_debug_fmt(this: Self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - zalsa_::with_attached_database(|db| { - let zalsa = db.zalsa(); - let fields = Configuration_::ingredient(zalsa).fields(zalsa, this); - fmt::Debug::fmt(fields, f) - }) - .unwrap_or_else(|| { - f.debug_tuple("EditionedFileId").field(&zalsa_::AsId::as_id(&this)).finish() - }) - } - } -}; - -impl EditionedFileId { - #[inline] - pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition, krate: Crate) -> Self { - EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition), krate) - } - - /// Attaches the current edition and guesses the crate for the file. - /// - /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases: - /// - /// 1. The file is not in the module tree. - /// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin - /// (e.g. on enter feature, folding, etc.). - #[inline] - pub fn current_edition_guess_origin(db: &dyn RootQueryDb, file_id: FileId) -> Self { - Self::from_span_guess_origin(db, span::EditionedFileId::current_edition(file_id)) - } - - #[inline] - pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId { - let id = self.editioned_file_id(db); - id.file_id() - } - - #[inline] - pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) { - let id = self.editioned_file_id(db); - (id.file_id(), id.edition()) - } - - #[inline] - pub fn edition(self, db: &dyn salsa::Database) -> Edition { - self.editioned_file_id(db).edition() - } -} diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 28539d59825f..cac74778a26b 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -829,10 +829,9 @@ pub(crate) fn transitive_rev_deps(db: &dyn RootQueryDb, of: Crate) -> FxHashSet< rev_deps } -impl Crate { - pub fn root_file_id(self, db: &dyn salsa::Database) -> EditionedFileId { - let data = self.data(db); - EditionedFileId::new(db, data.root_file_id, data.edition, self) +impl BuiltCrateData { + pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId { + EditionedFileId::new(db, self.root_file_id, self.edition) } } diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 32909af5d78d..0e411bcfae60 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -5,7 +5,6 @@ pub use salsa_macros; // FIXME: Rename this crate, base db is non descriptive mod change; -mod editioned_file_id; mod input; pub mod target; @@ -18,7 +17,6 @@ use std::{ pub use crate::{ change::FileChange, - editioned_file_id::EditionedFileId, input::{ BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap, @@ -31,6 +29,7 @@ pub use query_group::{self}; use rustc_hash::{FxHashSet, FxHasher}; use salsa::{Durability, Setter}; pub use semver::{BuildMetadata, Prerelease, Version, VersionReq}; +use span::Edition; use syntax::{Parse, SyntaxError, ast}; use triomphe::Arc; pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet}; @@ -176,6 +175,42 @@ impl Files { } } +#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)] +#[derive(PartialOrd, Ord)] +pub struct EditionedFileId { + pub editioned_file_id: span::EditionedFileId, +} + +impl EditionedFileId { + // Salsa already uses the name `new`... + #[inline] + pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self { + EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition)) + } + + #[inline] + pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self { + EditionedFileId::new(db, file_id, Edition::CURRENT) + } + + #[inline] + pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId { + let id = self.editioned_file_id(db); + id.file_id() + } + + #[inline] + pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) { + let id = self.editioned_file_id(db); + (id.file_id(), id.edition()) + } + + #[inline] + pub fn edition(self, db: &dyn SourceDatabase) -> Edition { + self.editioned_file_id(db).edition() + } +} + #[salsa_macros::input(debug)] pub struct FileText { #[returns(ref)] diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index 9e2a95dbf32c..e17969bd82d4 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml @@ -18,7 +18,6 @@ tracing.workspace = true # locals deps tt = { workspace = true, optional = true } -syntax = { workspace = true, optional = true } intern.workspace = true [dev-dependencies] diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs index 76e0aba859e6..7a21015e14be 100644 --- a/crates/cfg/src/cfg_expr.rs +++ b/crates/cfg/src/cfg_expr.rs @@ -63,8 +63,6 @@ impl From for CfgExpr { } impl CfgExpr { - // FIXME: Parsing from `tt` is only used in a handful of places, reconsider - // if we should switch them to AST. #[cfg(feature = "tt")] pub fn parse(tt: &tt::TopSubtree) -> CfgExpr { next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid) @@ -75,13 +73,6 @@ impl CfgExpr { next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid) } - #[cfg(feature = "syntax")] - pub fn parse_from_ast( - ast: &mut std::iter::Peekable, - ) -> CfgExpr { - next_cfg_expr_from_ast(ast).unwrap_or(CfgExpr::Invalid) - } - /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates. pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option { match self { @@ -98,56 +89,6 @@ impl CfgExpr { } } -#[cfg(feature = "syntax")] -fn next_cfg_expr_from_ast( - it: &mut std::iter::Peekable, -) -> Option { - use intern::sym; - use syntax::{NodeOrToken, SyntaxKind, T, ast}; - - let name = match it.next() { - None => return None, - Some(NodeOrToken::Token(ident)) if ident.kind().is_any_identifier() => { - Symbol::intern(ident.text()) - } - Some(_) => return Some(CfgExpr::Invalid), - }; - - let ret = match it.peek() { - Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => { - it.next(); - if let Some(NodeOrToken::Token(literal)) = it.peek() - && matches!(literal.kind(), SyntaxKind::STRING) - { - let literal = tt::token_to_literal(literal.text(), ()).symbol; - it.next(); - CfgAtom::KeyValue { key: name, value: literal.clone() }.into() - } else { - return Some(CfgExpr::Invalid); - } - } - Some(NodeOrToken::Node(subtree)) => { - let mut subtree_iter = ast::TokenTreeChildren::new(subtree).peekable(); - it.next(); - let mut subs = std::iter::from_fn(|| next_cfg_expr_from_ast(&mut subtree_iter)); - match name { - s if s == sym::all => CfgExpr::All(subs.collect()), - s if s == sym::any => CfgExpr::Any(subs.collect()), - s if s == sym::not => { - CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid))) - } - _ => CfgExpr::Invalid, - } - } - _ => CfgAtom::Flag(name).into(), - }; - - // Eat comma separator - while it.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {} - - Some(ret) -} - #[cfg(feature = "tt")] fn next_cfg_expr(it: &mut tt::iter::TtIter<'_, S>) -> Option { use intern::sym; diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index 52c581dbbd3a..6766748097f0 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,10 +1,7 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{Expect, expect}; use intern::Symbol; -use syntax::{ - AstNode, Edition, - ast::{self, TokenTreeChildren}, -}; +use syntax::{AstNode, Edition, ast}; use syntax_bridge::{ DocCommentDesugarMode, dummy_test_span_utils::{DUMMY, DummyTestSpanMap}, @@ -13,33 +10,24 @@ use syntax_bridge::{ use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; -#[track_caller] -fn parse_ast_cfg(tt: &ast::TokenTree) -> CfgExpr { - CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable()) -} - -#[track_caller] fn assert_parse_result(input: &str, expected: CfgExpr) { let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); - let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( - tt_ast.syntax(), + tt.syntax(), DummyTestSpanMap, DUMMY, DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); - let cfg = parse_ast_cfg(&tt_ast); - assert_eq!(cfg, expected); } -#[track_caller] fn check_dnf(input: &str, expect: Expect) { let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); - let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( - tt_ast.syntax(), + tt.syntax(), DummyTestSpanMap, DUMMY, DocCommentDesugarMode::ProcMacro, @@ -47,17 +35,13 @@ fn check_dnf(input: &str, expect: Expect) { let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); expect.assert_eq(&actual); - let cfg = parse_ast_cfg(&tt_ast); - let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); - expect.assert_eq(&actual); } -#[track_caller] fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); - let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( - tt_ast.syntax(), + tt.syntax(), DummyTestSpanMap, DUMMY, DocCommentDesugarMode::ProcMacro, @@ -66,18 +50,14 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let dnf = DnfExpr::new(&cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); expect.assert_eq(&why_inactive); - let cfg = parse_ast_cfg(&tt_ast); - let dnf = DnfExpr::new(&cfg); - let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); - expect.assert_eq(&why_inactive); } #[track_caller] fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); - let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( - tt_ast.syntax(), + tt.syntax(), DummyTestSpanMap, DUMMY, DocCommentDesugarMode::ProcMacro, @@ -86,10 +66,6 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let dnf = DnfExpr::new(&cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); assert_eq!(hints, expected_hints); - let cfg = parse_ast_cfg(&tt_ast); - let dnf = DnfExpr::new(&cfg); - let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); - assert_eq!(hints, expected_hints); } #[test] diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index e1f60742d324..abb4819a7672 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -45,8 +45,7 @@ mbe.workspace = true cfg.workspace = true tt.workspace = true span.workspace = true -thin-vec.workspace = true -syntax-bridge.workspace = true +thin-vec = "0.2.14" [dev-dependencies] expect-test.workspace = true @@ -54,6 +53,7 @@ expect-test.workspace = true # local deps test-utils.workspace = true test-fixture.workspace = true +syntax-bridge.workspace = true [features] in-rust-tree = ["hir-expand/in-rust-tree"] diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs new file mode 100644 index 000000000000..b4fcfa11aea7 --- /dev/null +++ b/crates/hir-def/src/attr.rs @@ -0,0 +1,901 @@ +//! A higher level attributes based on TokenTree, with also some shortcuts. + +use std::{borrow::Cow, convert::identity, hash::Hash, ops}; + +use base_db::Crate; +use cfg::{CfgExpr, CfgOptions}; +use either::Either; +use hir_expand::{ + HirFileId, InFile, + attrs::{Attr, AttrId, RawAttrs, collect_attrs}, + span_map::SpanMapRef, +}; +use intern::{Symbol, sym}; +use la_arena::{ArenaMap, Idx, RawIdx}; +use mbe::DelimiterKind; +use rustc_abi::ReprOptions; +use span::AstIdNode; +use syntax::{ + AstPtr, + ast::{self, HasAttrs}, +}; +use triomphe::Arc; +use tt::iter::{TtElement, TtIter}; + +use crate::{ + AdtId, AstIdLoc, AttrDefId, GenericParamId, HasModule, LocalFieldId, Lookup, MacroId, + VariantId, + db::DefDatabase, + item_tree::block_item_tree_query, + lang_item::LangItem, + nameres::{ModuleOrigin, ModuleSource}, + src::{HasChildSource, HasSource}, +}; + +/// Desugared attributes of an item post `cfg_attr` expansion. +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct Attrs(RawAttrs); + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AttrsWithOwner { + attrs: Attrs, + owner: AttrDefId, +} + +impl Attrs { + pub fn new( + db: &dyn DefDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + cfg_options: &CfgOptions, + ) -> Self { + Attrs(RawAttrs::new_expanded(db, owner, span_map, cfg_options)) + } + + pub fn get(&self, id: AttrId) -> Option<&Attr> { + (**self).iter().find(|attr| attr.id == id) + } + + pub(crate) fn expand_cfg_attr( + db: &dyn DefDatabase, + krate: Crate, + raw_attrs: RawAttrs, + ) -> Attrs { + Attrs(raw_attrs.expand_cfg_attr(db, krate)) + } + + pub(crate) fn is_cfg_enabled_for( + db: &dyn DefDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + cfg_options: &CfgOptions, + ) -> Result<(), CfgExpr> { + RawAttrs::attrs_iter_expanded::(db, owner, span_map, cfg_options) + .filter_map(|attr| attr.cfg()) + .find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) { + true => None, + false => Some(cfg), + }) + .map_or(Ok(()), Err) + } +} + +impl ops::Deref for Attrs { + type Target = [Attr]; + + fn deref(&self) -> &[Attr] { + &self.0 + } +} + +impl ops::Deref for AttrsWithOwner { + type Target = Attrs; + + fn deref(&self) -> &Attrs { + &self.attrs + } +} + +impl Attrs { + pub const EMPTY: Self = Self(RawAttrs::EMPTY); + + pub(crate) fn fields_attrs_query( + db: &dyn DefDatabase, + v: VariantId, + ) -> Arc> { + let _p = tracing::info_span!("fields_attrs_query").entered(); + let mut res = ArenaMap::default(); + let (fields, file_id, krate) = match v { + VariantId::EnumVariantId(it) => { + let loc = it.lookup(db); + let krate = loc.parent.lookup(db).container.krate; + let source = loc.source(db); + (source.value.field_list(), source.file_id, krate) + } + VariantId::StructId(it) => { + let loc = it.lookup(db); + let krate = loc.container.krate; + let source = loc.source(db); + (source.value.field_list(), source.file_id, krate) + } + VariantId::UnionId(it) => { + let loc = it.lookup(db); + let krate = loc.container.krate; + let source = loc.source(db); + ( + source.value.record_field_list().map(ast::FieldList::RecordFieldList), + source.file_id, + krate, + ) + } + }; + let Some(fields) = fields else { + return Arc::new(res); + }; + + let cfg_options = krate.cfg_options(db); + let span_map = db.span_map(file_id); + + match fields { + ast::FieldList::RecordFieldList(fields) => { + let mut idx = 0; + for field in fields.fields() { + let attrs = + Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options)); + if attrs.is_cfg_enabled(cfg_options).is_ok() { + res.insert(Idx::from_raw(RawIdx::from(idx)), attrs); + idx += 1; + } + } + } + ast::FieldList::TupleFieldList(fields) => { + let mut idx = 0; + for field in fields.fields() { + let attrs = + Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options)); + if attrs.is_cfg_enabled(cfg_options).is_ok() { + res.insert(Idx::from_raw(RawIdx::from(idx)), attrs); + idx += 1; + } + } + } + } + + res.shrink_to_fit(); + Arc::new(res) + } +} + +impl Attrs { + #[inline] + pub fn by_key(&self, key: Symbol) -> AttrQuery<'_> { + AttrQuery { attrs: self, key } + } + + #[inline] + pub fn rust_analyzer_tool(&self) -> impl Iterator { + self.iter() + .filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer)) + } + + #[inline] + pub fn cfg(&self) -> Option { + let mut cfgs = self.by_key(sym::cfg).tt_values().map(CfgExpr::parse); + let first = cfgs.next()?; + match cfgs.next() { + Some(second) => { + let cfgs = [first, second].into_iter().chain(cfgs); + Some(CfgExpr::All(cfgs.collect())) + } + None => Some(first), + } + } + + #[inline] + pub fn cfgs(&self) -> impl Iterator + '_ { + self.by_key(sym::cfg).tt_values().map(CfgExpr::parse) + } + + #[inline] + pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Result<(), CfgExpr> { + self.cfgs().try_for_each(|cfg| { + if cfg_options.check(&cfg) != Some(false) { Ok(()) } else { Err(cfg) } + }) + } + + #[inline] + pub fn lang(&self) -> Option<&Symbol> { + self.by_key(sym::lang).string_value() + } + + #[inline] + pub fn lang_item(&self) -> Option { + self.by_key(sym::lang).string_value().and_then(LangItem::from_symbol) + } + + #[inline] + pub fn has_doc_hidden(&self) -> bool { + self.by_key(sym::doc).tt_values().any(|tt| { + tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis && + matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden) + }) + } + + #[inline] + pub fn has_doc_notable_trait(&self) -> bool { + self.by_key(sym::doc).tt_values().any(|tt| { + tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis && + matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait) + }) + } + + #[inline] + pub fn doc_exprs(&self) -> impl Iterator + '_ { + self.by_key(sym::doc).tt_values().map(DocExpr::parse) + } + + #[inline] + pub fn doc_aliases(&self) -> impl Iterator + '_ { + self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec()) + } + + #[inline] + pub fn export_name(&self) -> Option<&Symbol> { + self.by_key(sym::export_name).string_value() + } + + #[inline] + pub fn is_proc_macro(&self) -> bool { + self.by_key(sym::proc_macro).exists() + } + + #[inline] + pub fn is_proc_macro_attribute(&self) -> bool { + self.by_key(sym::proc_macro_attribute).exists() + } + + #[inline] + pub fn is_proc_macro_derive(&self) -> bool { + self.by_key(sym::proc_macro_derive).exists() + } + + #[inline] + pub fn is_test(&self) -> bool { + self.iter().any(|it| { + it.path() + .segments() + .iter() + .rev() + .zip([sym::core, sym::prelude, sym::v1, sym::test].iter().rev()) + .all(|it| it.0 == it.1) + }) + } + + #[inline] + pub fn is_ignore(&self) -> bool { + self.by_key(sym::ignore).exists() + } + + #[inline] + pub fn is_bench(&self) -> bool { + self.by_key(sym::bench).exists() + } + + #[inline] + pub fn is_unstable(&self) -> bool { + self.by_key(sym::unstable).exists() + } + + #[inline] + pub fn rustc_legacy_const_generics(&self) -> Option>> { + self.by_key(sym::rustc_legacy_const_generics) + .tt_values() + .next() + .map(parse_rustc_legacy_const_generics) + .filter(|it| !it.is_empty()) + .map(Box::new) + } + + #[inline] + pub fn repr(&self) -> Option { + self.by_key(sym::repr).tt_values().filter_map(parse_repr_tt).fold(None, |acc, repr| { + acc.map_or(Some(repr), |mut acc| { + merge_repr(&mut acc, repr); + Some(acc) + }) + }) + } +} + +fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> { + let mut indices = Vec::new(); + let mut iter = tt.iter(); + while let (Some(first), second) = (iter.next(), iter.next()) { + match first { + TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() { + Ok(index) => indices.push(index), + Err(_) => break, + }, + _ => break, + } + + if let Some(comma) = second { + match comma { + TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {} + _ => break, + } + } + } + + indices.into_boxed_slice() +} + +fn merge_repr(this: &mut ReprOptions, other: ReprOptions) { + let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this; + flags.insert(other.flags); + *align = (*align).max(other.align); + *pack = match (*pack, other.pack) { + (Some(pack), None) | (None, Some(pack)) => Some(pack), + _ => (*pack).min(other.pack), + }; + if other.int.is_some() { + *int = other.int; + } +} + +fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option { + use crate::builtin_type::{BuiltinInt, BuiltinUint}; + use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; + + match tt.top_subtree().delimiter { + tt::Delimiter { kind: DelimiterKind::Parenthesis, .. } => {} + _ => return None, + } + + let mut acc = ReprOptions::default(); + let mut tts = tt.iter(); + while let Some(tt) = tts.next() { + let TtElement::Leaf(tt::Leaf::Ident(ident)) = tt else { + continue; + }; + let repr = match &ident.sym { + s if *s == sym::packed => { + let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { + tts.next(); + if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() { + lit.symbol.as_str().parse().unwrap_or_default() + } else { + 0 + } + } else { + 0 + }; + let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE)); + ReprOptions { pack, ..Default::default() } + } + s if *s == sym::align => { + let mut align = None; + if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { + tts.next(); + if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() + && let Ok(a) = lit.symbol.as_str().parse() + { + align = Align::from_bytes(a).ok(); + } + } + ReprOptions { align, ..Default::default() } + } + s if *s == sym::C => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() }, + s if *s == sym::transparent => { + ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() } + } + s if *s == sym::simd => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() }, + repr => { + let mut int = None; + if let Some(builtin) = BuiltinInt::from_suffix_sym(repr) + .map(Either::Left) + .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right)) + { + int = Some(match builtin { + Either::Left(bi) => match bi { + BuiltinInt::Isize => IntegerType::Pointer(true), + BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true), + BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true), + BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true), + BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true), + BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true), + }, + Either::Right(bu) => match bu { + BuiltinUint::Usize => IntegerType::Pointer(false), + BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false), + BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false), + BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false), + BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false), + BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false), + }, + }); + } + ReprOptions { int, ..Default::default() } + } + }; + merge_repr(&mut acc, repr); + } + + Some(acc) +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum DocAtom { + /// eg. `#[doc(hidden)]` + Flag(Symbol), + /// eg. `#[doc(alias = "it")]` + /// + /// Note that a key can have multiple values that are all considered "active" at the same time. + /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`. + KeyValue { key: Symbol, value: Symbol }, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum DocExpr { + Invalid, + /// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]` + Atom(DocAtom), + /// eg. `#[doc(alias("x", "y"))]` + Alias(Vec), +} + +impl From for DocExpr { + fn from(atom: DocAtom) -> Self { + DocExpr::Atom(atom) + } +} + +impl DocExpr { + fn parse(tt: &tt::TopSubtree) -> DocExpr { + next_doc_expr(tt.iter()).unwrap_or(DocExpr::Invalid) + } + + pub fn aliases(&self) -> &[Symbol] { + match self { + DocExpr::Atom(DocAtom::KeyValue { key, value }) if *key == sym::alias => { + std::slice::from_ref(value) + } + DocExpr::Alias(aliases) => aliases, + _ => &[], + } + } +} + +fn next_doc_expr(mut it: TtIter<'_, S>) -> Option { + let name = match it.next() { + None => return None, + Some(TtElement::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(), + Some(_) => return Some(DocExpr::Invalid), + }; + + // Peek + let ret = match it.peek() { + Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => { + it.next(); + match it.next() { + Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + kind: tt::LitKind::Str, + .. + }))) => DocAtom::KeyValue { key: name, value: text.clone() }.into(), + _ => return Some(DocExpr::Invalid), + } + } + Some(TtElement::Subtree(_, subtree_iter)) => { + it.next(); + let subs = parse_comma_sep(subtree_iter); + match &name { + s if *s == sym::alias => DocExpr::Alias(subs), + _ => DocExpr::Invalid, + } + } + _ => DocAtom::Flag(name).into(), + }; + Some(ret) +} + +fn parse_comma_sep(iter: TtIter<'_, S>) -> Vec { + iter.filter_map(|tt| match tt { + TtElement::Leaf(tt::Leaf::Literal(tt::Literal { + kind: tt::LitKind::Str, symbol, .. + })) => Some(symbol.clone()), + _ => None, + }) + .collect() +} + +impl AttrsWithOwner { + pub fn new(db: &dyn DefDatabase, owner: AttrDefId) -> Self { + Self { attrs: db.attrs(owner), owner } + } + + pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs { + let _p = tracing::info_span!("attrs_query").entered(); + // FIXME: this should use `Trace` to avoid duplication in `source_map` below + match def { + AttrDefId::ModuleId(module) => { + let def_map = module.def_map(db); + let mod_data = &def_map[module.local_id]; + + let raw_attrs = match mod_data.origin { + ModuleOrigin::File { definition, declaration_tree_id, declaration, .. } => { + let decl_attrs = declaration_tree_id + .item_tree(db) + .raw_attrs(declaration.upcast()) + .clone(); + let tree = db.file_item_tree(definition.into()); + let def_attrs = tree.top_level_raw_attrs().clone(); + decl_attrs.merge(def_attrs) + } + ModuleOrigin::CrateRoot { definition } => { + let tree = db.file_item_tree(definition.into()); + tree.top_level_raw_attrs().clone() + } + ModuleOrigin::Inline { definition_tree_id, definition } => { + definition_tree_id.item_tree(db).raw_attrs(definition.upcast()).clone() + } + ModuleOrigin::BlockExpr { id, .. } => { + let tree = block_item_tree_query(db, id); + tree.top_level_raw_attrs().clone() + } + }; + Attrs::expand_cfg_attr(db, module.krate, raw_attrs) + } + AttrDefId::FieldId(it) => db.fields_attrs(it.parent)[it.local_id].clone(), + AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::AdtId(it) => match it { + AdtId::StructId(it) => attrs_from_ast_id_loc(db, it), + AdtId::EnumId(it) => attrs_from_ast_id_loc(db, it), + AdtId::UnionId(it) => attrs_from_ast_id_loc(db, it), + }, + AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::MacroId(it) => match it { + MacroId::Macro2Id(it) => attrs_from_ast_id_loc(db, it), + MacroId::MacroRulesId(it) => attrs_from_ast_id_loc(db, it), + MacroId::ProcMacroId(it) => attrs_from_ast_id_loc(db, it), + }, + AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::GenericParamId(it) => match it { + GenericParamId::ConstParamId(it) => { + let src = it.parent().child_source(db); + // FIXME: We should be never getting `None` here. + Attrs(match src.value.get(it.local_id()) { + Some(val) => RawAttrs::new_expanded( + db, + val, + db.span_map(src.file_id).as_ref(), + def.krate(db).cfg_options(db), + ), + None => RawAttrs::EMPTY, + }) + } + GenericParamId::TypeParamId(it) => { + let src = it.parent().child_source(db); + // FIXME: We should be never getting `None` here. + Attrs(match src.value.get(it.local_id()) { + Some(val) => RawAttrs::new_expanded( + db, + val, + db.span_map(src.file_id).as_ref(), + def.krate(db).cfg_options(db), + ), + None => RawAttrs::EMPTY, + }) + } + GenericParamId::LifetimeParamId(it) => { + let src = it.parent.child_source(db); + // FIXME: We should be never getting `None` here. + Attrs(match src.value.get(it.local_id) { + Some(val) => RawAttrs::new_expanded( + db, + val, + db.span_map(src.file_id).as_ref(), + def.krate(db).cfg_options(db), + ), + None => RawAttrs::EMPTY, + }) + } + }, + AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it), + } + } + + pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap { + let owner = match self.owner { + AttrDefId::ModuleId(module) => { + // Modules can have 2 attribute owners (the `mod x;` item, and the module file itself). + + let def_map = module.def_map(db); + let mod_data = &def_map[module.local_id]; + match mod_data.declaration_source(db) { + Some(it) => { + let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value)); + if let InFile { file_id, value: ModuleSource::SourceFile(file) } = + mod_data.definition_source(db) + { + map.append_module_inline_attrs(AttrSourceMap::new(InFile::new( + file_id, &file, + ))); + } + return map; + } + None => { + let InFile { file_id, value } = mod_data.definition_source(db); + let attrs_owner = match &value { + ModuleSource::SourceFile(file) => file as &dyn ast::HasAttrs, + ModuleSource::Module(module) => module as &dyn ast::HasAttrs, + ModuleSource::BlockExpr(block) => block as &dyn ast::HasAttrs, + }; + return AttrSourceMap::new(InFile::new(file_id, attrs_owner)); + } + } + } + AttrDefId::FieldId(id) => { + let map = db.fields_attrs_source_map(id.parent); + let file_id = id.parent.file_id(db); + let root = db.parse_or_expand(file_id); + let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root)); + InFile::new(file_id, owner) + } + AttrDefId::AdtId(adt) => match adt { + AdtId::StructId(id) => any_has_attrs(db, id), + AdtId::UnionId(id) => any_has_attrs(db, id), + AdtId::EnumId(id) => any_has_attrs(db, id), + }, + AttrDefId::FunctionId(id) => any_has_attrs(db, id), + AttrDefId::EnumVariantId(id) => any_has_attrs(db, id), + AttrDefId::StaticId(id) => any_has_attrs(db, id), + AttrDefId::ConstId(id) => any_has_attrs(db, id), + AttrDefId::TraitId(id) => any_has_attrs(db, id), + AttrDefId::TypeAliasId(id) => any_has_attrs(db, id), + AttrDefId::MacroId(id) => match id { + MacroId::Macro2Id(id) => any_has_attrs(db, id), + MacroId::MacroRulesId(id) => any_has_attrs(db, id), + MacroId::ProcMacroId(id) => any_has_attrs(db, id), + }, + AttrDefId::ImplId(id) => any_has_attrs(db, id), + AttrDefId::GenericParamId(id) => match id { + GenericParamId::ConstParamId(id) => id + .parent() + .child_source(db) + .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())), + GenericParamId::TypeParamId(id) => id + .parent() + .child_source(db) + .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())), + GenericParamId::LifetimeParamId(id) => id + .parent + .child_source(db) + .map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())), + }, + AttrDefId::ExternBlockId(id) => any_has_attrs(db, id), + AttrDefId::ExternCrateId(id) => any_has_attrs(db, id), + AttrDefId::UseId(id) => any_has_attrs(db, id), + }; + + AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs)) + } +} + +#[derive(Debug)] +pub struct AttrSourceMap { + source: Vec>, + file_id: HirFileId, + /// If this map is for a module, this will be the [`HirFileId`] of the module's definition site, + /// while `file_id` will be the one of the module declaration site. + /// The usize is the index into `source` from which point on the entries reside in the def site + /// file. + mod_def_site_file_id: Option<(HirFileId, usize)>, +} + +impl AttrSourceMap { + fn new(owner: InFile<&dyn ast::HasAttrs>) -> Self { + Self { + source: collect_attrs(owner.value).map(|(_, it)| it).collect(), + file_id: owner.file_id, + mod_def_site_file_id: None, + } + } + + /// Append a second source map to this one, this is required for modules, whose outline and inline + /// attributes can reside in different files + fn append_module_inline_attrs(&mut self, other: Self) { + assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none()); + let len = self.source.len(); + self.source.extend(other.source); + if other.file_id != self.file_id { + self.mod_def_site_file_id = Some((other.file_id, len)); + } + } + + /// Maps the lowered `Attr` back to its original syntax node. + /// + /// `attr` must come from the `owner` used for AttrSourceMap + /// + /// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of + /// the attribute represented by `Attr`. + pub fn source_of(&self, attr: &Attr) -> InFile<&Either> { + self.source_of_id(attr.id) + } + + pub fn source_of_id(&self, id: AttrId) -> InFile<&Either> { + let ast_idx = id.ast_index(); + let file_id = match self.mod_def_site_file_id { + Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id, + _ => self.file_id, + }; + + self.source + .get(ast_idx) + .map(|it| InFile::new(file_id, it)) + .unwrap_or_else(|| panic!("cannot find attr at index {id:?}")) + } +} + +#[derive(Debug, Clone)] +pub struct AttrQuery<'attr> { + attrs: &'attr Attrs, + key: Symbol, +} + +impl<'attr> AttrQuery<'attr> { + #[inline] + pub fn tt_values(self) -> impl Iterator { + self.attrs().filter_map(|attr| attr.token_tree_value()) + } + + #[inline] + pub fn string_value(self) -> Option<&'attr Symbol> { + self.attrs().find_map(|attr| attr.string_value()) + } + + #[inline] + pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> { + self.attrs().find_map(|attr| attr.string_value_with_span()) + } + + #[inline] + pub fn string_value_unescape(self) -> Option> { + self.attrs().find_map(|attr| attr.string_value_unescape()) + } + + #[inline] + pub fn exists(self) -> bool { + self.attrs().next().is_some() + } + + #[inline] + pub fn attrs(self) -> impl Iterator + Clone { + let key = self.key; + self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key)) + } + + /// Find string value for a specific key inside token tree + /// + /// ```ignore + /// #[doc(html_root_url = "url")] + /// ^^^^^^^^^^^^^ key + /// ``` + #[inline] + pub fn find_string_value_in_tt(self, key: Symbol) -> Option<&'attr str> { + self.tt_values().find_map(|tt| { + let name = tt.iter() + .skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == key)) + .nth(2); + + match name { + Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()), + _ => None + } + }) + } +} + +fn any_has_attrs<'db>( + db: &(dyn DefDatabase + 'db), + id: impl Lookup>, +) -> InFile { + id.lookup(db).source(db).map(ast::AnyHasAttrs::new) +} + +fn attrs_from_ast_id_loc<'db, N: AstIdNode + HasAttrs>( + db: &(dyn DefDatabase + 'db), + lookup: impl Lookup + HasModule>, +) -> Attrs { + let loc = lookup.lookup(db); + let source = loc.source(db); + let span_map = db.span_map(source.file_id); + let cfg_options = loc.krate(db).cfg_options(db); + Attrs(RawAttrs::new_expanded(db, &source.value, span_map.as_ref(), cfg_options)) +} + +pub(crate) fn fields_attrs_source_map( + db: &dyn DefDatabase, + def: VariantId, +) -> Arc>>> { + let mut res = ArenaMap::default(); + let child_source = def.child_source(db); + + for (idx, variant) in child_source.value.iter() { + res.insert( + idx, + variant + .as_ref() + .either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()), + ); + } + + Arc::new(res) +} + +#[cfg(test)] +mod tests { + //! This module contains tests for doc-expression parsing. + //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. + + use intern::Symbol; + use span::EditionedFileId; + use triomphe::Arc; + + use hir_expand::span_map::{RealSpanMap, SpanMap}; + use span::FileId; + use syntax::{AstNode, TextRange, ast}; + use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree}; + + use crate::attr::{DocAtom, DocExpr}; + + fn assert_parse_result(input: &str, expected: DocExpr) { + let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute( + EditionedFileId::current_edition(FileId::from_raw(0)), + ))); + let tt = syntax_node_to_token_tree( + tt.syntax(), + map.as_ref(), + map.span_for_range(TextRange::empty(0.into())), + DocCommentDesugarMode::ProcMacro, + ); + let cfg = DocExpr::parse(&tt); + assert_eq!(cfg, expected); + } + + #[test] + fn test_doc_expr_parser() { + assert_parse_result("#![doc(hidden)]", DocAtom::Flag(Symbol::intern("hidden")).into()); + + assert_parse_result( + r#"#![doc(alias = "foo")]"#, + DocAtom::KeyValue { key: Symbol::intern("alias"), value: Symbol::intern("foo") }.into(), + ); + + assert_parse_result( + r#"#![doc(alias("foo"))]"#, + DocExpr::Alias([Symbol::intern("foo")].into()), + ); + assert_parse_result( + r#"#![doc(alias("foo", "bar", "baz"))]"#, + DocExpr::Alias( + [Symbol::intern("foo"), Symbol::intern("bar"), Symbol::intern("baz")].into(), + ), + ); + + assert_parse_result( + r#" + #[doc(alias("Bar", "Qux"))] + struct Foo;"#, + DocExpr::Alias([Symbol::intern("Bar"), Symbol::intern("Qux")].into()), + ); + } +} diff --git a/crates/hir-def/src/attrs.rs b/crates/hir-def/src/attrs.rs deleted file mode 100644 index 1897cb5205aa..000000000000 --- a/crates/hir-def/src/attrs.rs +++ /dev/null @@ -1,1613 +0,0 @@ -//! Attributes for anything that is not name resolution. -//! -//! The fundamental idea of this module stems from the observation that most "interesting" -//! attributes have a more memory-compact form than storing their full syntax, and -//! that most of the attributes are flags, and those that are not are rare. Therefore, -//! this module defines [`AttrFlags`], which is a bitflag enum that contains only a yes/no -//! answer to whether an attribute is present on an item. For most attributes, that's all -//! that is interesting us; for the rest of them, we define another query that extracts -//! their data. A key part is that every one of those queries will have a wrapper method -//! that queries (or is given) the `AttrFlags` and checks for the presence of the attribute; -//! if it is not present, we do not call the query, to prevent Salsa from needing to record -//! its value. This way, queries are only called on items that have the attribute, which is -//! usually only a few. -//! -//! An exception to this model that is also defined in this module is documentation (doc -//! comments and `#[doc = "..."]` attributes). But it also has a more compact form than -//! the attribute: a concatenated string of the full docs as well as a source map -//! to map it back to AST (which is needed for things like resolving links in doc comments -//! and highlight injection). The lowering and upmapping of doc comments is a bit complicated, -//! but it is encapsulated in the [`Docs`] struct. - -use std::{ - convert::Infallible, - iter::Peekable, - ops::{ControlFlow, Range}, -}; - -use base_db::Crate; -use cfg::{CfgExpr, CfgOptions}; -use either::Either; -use hir_expand::{ - HirFileId, InFile, Lookup, - attrs::{Meta, expand_cfg_attr, expand_cfg_attr_with_doc_comments}, -}; -use intern::Symbol; -use itertools::Itertools; -use la_arena::ArenaMap; -use rustc_abi::ReprOptions; -use rustc_hash::FxHashSet; -use smallvec::SmallVec; -use syntax::{ - AstNode, AstToken, NodeOrToken, SmolStr, SyntaxNode, SyntaxToken, T, - ast::{self, AttrDocCommentIter, HasAttrs, IsString, TokenTreeChildren}, -}; -use tt::{TextRange, TextSize}; - -use crate::{ - AdtId, AstIdLoc, AttrDefId, FieldId, FunctionId, GenericDefId, HasModule, InternedModuleId, - LifetimeParamId, LocalFieldId, MacroId, TypeOrConstParamId, VariantId, - db::DefDatabase, - hir::generics::{GenericParams, LocalLifetimeParamId, LocalTypeOrConstParamId}, - lang_item::LangItem, - nameres::ModuleOrigin, - src::{HasChildSource, HasSource}, -}; - -#[inline] -fn attrs_from_ast_id_loc>( - db: &dyn DefDatabase, - lookup: impl Lookup + HasModule>, -) -> (InFile, Crate) { - let loc = lookup.lookup(db); - let source = loc.source(db); - let krate = loc.krate(db); - (source.map(|it| it.into()), krate) -} - -#[inline] -fn extract_doc_tt_attr(attr_flags: &mut AttrFlags, tt: ast::TokenTree) { - for atom in DocAtom::parse(tt) { - match atom { - DocAtom::Flag(flag) => match &*flag { - "notable_trait" => attr_flags.insert(AttrFlags::IS_DOC_NOTABLE_TRAIT), - "hidden" => attr_flags.insert(AttrFlags::IS_DOC_HIDDEN), - _ => {} - }, - DocAtom::KeyValue { key, value: _ } => match &*key { - "alias" => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES), - "keyword" => attr_flags.insert(AttrFlags::HAS_DOC_KEYWORD), - _ => {} - }, - DocAtom::Alias(_) => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES), - } - } -} - -fn extract_ra_completions(attr_flags: &mut AttrFlags, tt: ast::TokenTree) { - let tt = TokenTreeChildren::new(&tt); - if let Ok(NodeOrToken::Token(option)) = tt.exactly_one() - && option.kind().is_any_identifier() - { - match option.text() { - "ignore_flyimport" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT), - "ignore_methods" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_METHODS), - "ignore_flyimport_methods" => { - attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS) - } - _ => {} - } - } -} - -fn extract_rustc_skip_during_method_dispatch(attr_flags: &mut AttrFlags, tt: ast::TokenTree) { - let iter = TokenTreeChildren::new(&tt); - for kind in iter { - if let NodeOrToken::Token(kind) = kind - && kind.kind().is_any_identifier() - { - match kind.text() { - "array" => attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH), - "boxed_slice" => { - attr_flags.insert(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) - } - _ => {} - } - } - } -} - -#[inline] -fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow { - match attr { - Meta::NamedKeyValue { name: Some(name), value, .. } => match name.text() { - "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED), - "lang" => attr_flags.insert(AttrFlags::LANG_ITEM), - "path" => attr_flags.insert(AttrFlags::HAS_PATH), - "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE), - "export_name" => { - if let Some(value) = value - && let Some(value) = ast::String::cast(value) - && let Ok(value) = value.value() - && *value == *"main" - { - attr_flags.insert(AttrFlags::IS_EXPORT_NAME_MAIN); - } - } - _ => {} - }, - Meta::TokenTree { path, tt } => match path.segments.len() { - 1 => match path.segments[0].text() { - "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED), - "cfg" => attr_flags.insert(AttrFlags::HAS_CFG), - "doc" => extract_doc_tt_attr(attr_flags, tt), - "repr" => attr_flags.insert(AttrFlags::HAS_REPR), - "target_feature" => attr_flags.insert(AttrFlags::HAS_TARGET_FEATURE), - "proc_macro_derive" | "rustc_builtin_macro" => { - attr_flags.insert(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO) - } - "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE), - "rustc_layout_scalar_valid_range_start" | "rustc_layout_scalar_valid_range_end" => { - attr_flags.insert(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE) - } - "rustc_legacy_const_generics" => { - attr_flags.insert(AttrFlags::HAS_LEGACY_CONST_GENERICS) - } - "rustc_skip_during_method_dispatch" => { - extract_rustc_skip_during_method_dispatch(attr_flags, tt) - } - _ => {} - }, - 2 => match path.segments[0].text() { - "rust_analyzer" => match path.segments[1].text() { - "completions" => extract_ra_completions(attr_flags, tt), - _ => {} - }, - _ => {} - }, - _ => {} - }, - Meta::Path { path } => { - match path.segments.len() { - 1 => match path.segments[0].text() { - "rustc_has_incoherent_inherent_impls" => { - attr_flags.insert(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) - } - "rustc_allow_incoherent_impl" => { - attr_flags.insert(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) - } - "fundamental" => attr_flags.insert(AttrFlags::FUNDAMENTAL), - "no_std" => attr_flags.insert(AttrFlags::IS_NO_STD), - "may_dangle" => attr_flags.insert(AttrFlags::MAY_DANGLE), - "rustc_paren_sugar" => attr_flags.insert(AttrFlags::RUSTC_PAREN_SUGAR), - "rustc_coinductive" => attr_flags.insert(AttrFlags::RUSTC_COINDUCTIVE), - "rustc_force_inline" => attr_flags.insert(AttrFlags::RUSTC_FORCE_INLINE), - "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE), - "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED), - "macro_export" => attr_flags.insert(AttrFlags::IS_MACRO_EXPORT), - "no_mangle" => attr_flags.insert(AttrFlags::NO_MANGLE), - "non_exhaustive" => attr_flags.insert(AttrFlags::NON_EXHAUSTIVE), - "ignore" => attr_flags.insert(AttrFlags::IS_IGNORE), - "bench" => attr_flags.insert(AttrFlags::IS_BENCH), - "rustc_const_panic_str" => attr_flags.insert(AttrFlags::RUSTC_CONST_PANIC_STR), - "rustc_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_INTRINSIC), - "rustc_safe_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_SAFE_INTRINSIC), - "rustc_intrinsic_must_be_overridden" => { - attr_flags.insert(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN) - } - "rustc_allocator" => attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR), - "rustc_deallocator" => attr_flags.insert(AttrFlags::RUSTC_DEALLOCATOR), - "rustc_reallocator" => attr_flags.insert(AttrFlags::RUSTC_REALLOCATOR), - "rustc_allocator_zeroed" => { - attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR_ZEROED) - } - "rustc_reservation_impl" => { - attr_flags.insert(AttrFlags::RUSTC_RESERVATION_IMPL) - } - "rustc_deprecated_safe_2024" => { - attr_flags.insert(AttrFlags::RUSTC_DEPRECATED_SAFE_2024) - } - "rustc_skip_array_during_method_dispatch" => { - attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH) - } - _ => {} - }, - 2 => match path.segments[0].text() { - "rust_analyzer" => match path.segments[1].text() { - "skip" => attr_flags.insert(AttrFlags::RUST_ANALYZER_SKIP), - _ => {} - }, - _ => {} - }, - _ => {} - } - - if path.is_test { - attr_flags.insert(AttrFlags::IS_TEST); - } - } - _ => {} - }; - ControlFlow::Continue(()) -} - -bitflags::bitflags! { - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] - pub struct AttrFlags: u64 { - const RUST_ANALYZER_SKIP = 1 << 0; - - const LANG_ITEM = 1 << 1; - - const HAS_DOC_ALIASES = 1 << 2; - const HAS_DOC_KEYWORD = 1 << 3; - const IS_DOC_NOTABLE_TRAIT = 1 << 4; - const IS_DOC_HIDDEN = 1 << 5; - - const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 6; - const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7; - const FUNDAMENTAL = 1 << 8; - const RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 9; - const RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 10; - const HAS_REPR = 1 << 11; - const HAS_TARGET_FEATURE = 1 << 12; - const RUSTC_DEPRECATED_SAFE_2024 = 1 << 13; - const HAS_LEGACY_CONST_GENERICS = 1 << 14; - const NO_MANGLE = 1 << 15; - const NON_EXHAUSTIVE = 1 << 16; - const RUSTC_RESERVATION_IMPL = 1 << 17; - const RUSTC_CONST_PANIC_STR = 1 << 18; - const MAY_DANGLE = 1 << 19; - - const RUSTC_INTRINSIC = 1 << 20; - const RUSTC_SAFE_INTRINSIC = 1 << 21; - const RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN = 1 << 22; - const RUSTC_ALLOCATOR = 1 << 23; - const RUSTC_DEALLOCATOR = 1 << 24; - const RUSTC_REALLOCATOR = 1 << 25; - const RUSTC_ALLOCATOR_ZEROED = 1 << 26; - - const IS_UNSTABLE = 1 << 27; - const IS_IGNORE = 1 << 28; - // FIXME: `IS_TEST` and `IS_BENCH` should be based on semantic information, not textual match. - const IS_BENCH = 1 << 29; - const IS_TEST = 1 << 30; - const IS_EXPORT_NAME_MAIN = 1 << 31; - const IS_MACRO_EXPORT = 1 << 32; - const IS_NO_STD = 1 << 33; - const IS_DERIVE_OR_BUILTIN_MACRO = 1 << 34; - const IS_DEPRECATED = 1 << 35; - const HAS_PATH = 1 << 36; - const HAS_CFG = 1 << 37; - - const COMPLETE_IGNORE_FLYIMPORT = 1 << 38; - const COMPLETE_IGNORE_FLYIMPORT_METHODS = 1 << 39; - const COMPLETE_IGNORE_METHODS = 1 << 40; - - const RUSTC_LAYOUT_SCALAR_VALID_RANGE = 1 << 41; - const RUSTC_PAREN_SUGAR = 1 << 42; - const RUSTC_COINDUCTIVE = 1 << 43; - const RUSTC_FORCE_INLINE = 1 << 44; - } -} - -fn attrs_source( - db: &dyn DefDatabase, - owner: AttrDefId, -) -> (InFile, Option>, Crate) { - let (owner, krate) = match owner { - AttrDefId::ModuleId(id) => { - let id = id.loc(db); - let def_map = id.def_map(db); - let (definition, declaration) = match def_map[id.local_id].origin { - ModuleOrigin::CrateRoot { definition } => { - let file = db.parse(definition).tree(); - (InFile::new(definition.into(), ast::AnyHasAttrs::from(file)), None) - } - ModuleOrigin::File { declaration, declaration_tree_id, definition, .. } => { - let declaration = InFile::new(declaration_tree_id.file_id(), declaration); - let declaration = declaration.with_value(declaration.to_node(db)); - let definition_source = db.parse(definition).tree(); - (InFile::new(definition.into(), definition_source.into()), Some(declaration)) - } - ModuleOrigin::Inline { definition_tree_id, definition } => { - let definition = InFile::new(definition_tree_id.file_id(), definition); - let definition = definition.with_value(definition.to_node(db).into()); - (definition, None) - } - ModuleOrigin::BlockExpr { block, .. } => { - let definition = block.to_node(db); - (block.with_value(definition.into()), None) - } - }; - return (definition, declaration, id.krate); - } - AttrDefId::AdtId(AdtId::StructId(it)) => attrs_from_ast_id_loc(db, it), - AttrDefId::AdtId(AdtId::UnionId(it)) => attrs_from_ast_id_loc(db, it), - AttrDefId::AdtId(AdtId::EnumId(it)) => attrs_from_ast_id_loc(db, it), - AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::MacroId(MacroId::MacroRulesId(it)) => attrs_from_ast_id_loc(db, it), - AttrDefId::MacroId(MacroId::Macro2Id(it)) => attrs_from_ast_id_loc(db, it), - AttrDefId::MacroId(MacroId::ProcMacroId(it)) => attrs_from_ast_id_loc(db, it), - AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it), - }; - (owner, None, krate) -} - -fn collect_attrs( - db: &dyn DefDatabase, - owner: AttrDefId, - mut callback: impl FnMut(Meta) -> ControlFlow, -) -> Option { - let (source, outer_mod_decl, krate) = attrs_source(db, owner); - - let mut cfg_options = None; - expand_cfg_attr( - outer_mod_decl - .into_iter() - .flat_map(|it| it.value.attrs()) - .chain(ast::attrs_including_inner(&source.value)), - || cfg_options.get_or_insert_with(|| krate.cfg_options(db)), - move |meta, _, _, _| callback(meta), - ) -} - -fn collect_field_attrs( - db: &dyn DefDatabase, - variant: VariantId, - mut field_attrs: impl FnMut(&CfgOptions, InFile) -> T, -) -> ArenaMap { - let (variant_syntax, krate) = match variant { - VariantId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it), - VariantId::StructId(it) => attrs_from_ast_id_loc(db, it), - VariantId::UnionId(it) => attrs_from_ast_id_loc(db, it), - }; - let cfg_options = krate.cfg_options(db); - let variant_syntax = variant_syntax - .with_value(ast::VariantDef::cast(variant_syntax.value.syntax().clone()).unwrap()); - let fields = match &variant_syntax.value { - ast::VariantDef::Struct(it) => it.field_list(), - ast::VariantDef::Union(it) => it.record_field_list().map(ast::FieldList::RecordFieldList), - ast::VariantDef::Variant(it) => it.field_list(), - }; - let Some(fields) = fields else { - return ArenaMap::new(); - }; - - let mut result = ArenaMap::new(); - let mut idx = 0; - match fields { - ast::FieldList::RecordFieldList(fields) => { - for field in fields.fields() { - if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() { - result.insert( - la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)), - field_attrs(cfg_options, variant_syntax.with_value(field.into())), - ); - idx += 1; - } - } - } - ast::FieldList::TupleFieldList(fields) => { - for field in fields.fields() { - if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() { - result.insert( - la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)), - field_attrs(cfg_options, variant_syntax.with_value(field.into())), - ); - idx += 1; - } - } - } - } - result.shrink_to_fit(); - result -} - -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)] -pub struct RustcLayoutScalarValidRange { - pub start: Option, - pub end: Option, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -struct DocsSourceMapLine { - /// The offset in [`Docs::docs`]. - string_offset: TextSize, - /// The offset in the AST of the text. - ast_offset: TextSize, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Docs { - /// The concatenated string of all `#[doc = "..."]` attributes and documentation comments. - docs: String, - /// A sorted map from an offset in `docs` to an offset in the source code. - docs_source_map: Vec, - /// If the item is an outlined module (`mod foo;`), `docs_source_map` store the concatenated - /// list of the outline and inline docs (outline first). Then, this field contains the [`HirFileId`] - /// of the outline declaration, and the index in `docs` from which the inline docs - /// begin. - outline_mod: Option<(HirFileId, usize)>, - inline_file: HirFileId, - /// The size the prepended prefix, which does not map to real doc comments. - prefix_len: TextSize, - /// The offset in `docs` from which the docs are inner attributes/comments. - inline_inner_docs_start: Option, - /// Like `inline_inner_docs_start`, but for `outline_mod`. This can happen only when merging `Docs` - /// (as outline modules don't have inner attributes). - outline_inner_docs_start: Option, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum IsInnerDoc { - No, - Yes, -} - -impl IsInnerDoc { - #[inline] - pub fn yes(self) -> bool { - self == IsInnerDoc::Yes - } -} - -impl Docs { - #[inline] - pub fn docs(&self) -> &str { - &self.docs - } - - #[inline] - pub fn into_docs(self) -> String { - self.docs - } - - pub fn find_ast_range( - &self, - mut string_range: TextRange, - ) -> Option<(InFile, IsInnerDoc)> { - if string_range.start() < self.prefix_len { - return None; - } - string_range -= self.prefix_len; - - let mut file = self.inline_file; - let mut inner_docs_start = self.inline_inner_docs_start; - // Check whether the range is from the outline, the inline, or both. - let source_map = if let Some((outline_mod_file, outline_mod_end)) = self.outline_mod { - if let Some(first_inline) = self.docs_source_map.get(outline_mod_end) { - if string_range.end() <= first_inline.string_offset { - // The range is completely in the outline. - file = outline_mod_file; - inner_docs_start = self.outline_inner_docs_start; - &self.docs_source_map[..outline_mod_end] - } else if string_range.start() >= first_inline.string_offset { - // The range is completely in the inline. - &self.docs_source_map[outline_mod_end..] - } else { - // The range is combined from the outline and the inline - cannot map it back. - return None; - } - } else { - // There is no inline. - file = outline_mod_file; - inner_docs_start = self.outline_inner_docs_start; - &self.docs_source_map - } - } else { - // There is no outline. - &self.docs_source_map - }; - - let after_range = - source_map.partition_point(|line| line.string_offset <= string_range.start()) - 1; - let after_range = &source_map[after_range..]; - let line = after_range.first()?; - if after_range.get(1).is_some_and(|next_line| next_line.string_offset < string_range.end()) - { - // The range is combined from two lines - cannot map it back. - return None; - } - let ast_range = string_range - line.string_offset + line.ast_offset; - let is_inner = if inner_docs_start - .is_some_and(|inner_docs_start| string_range.start() >= inner_docs_start) - { - IsInnerDoc::Yes - } else { - IsInnerDoc::No - }; - Some((InFile::new(file, ast_range), is_inner)) - } - - #[inline] - pub fn shift_by(&mut self, offset: TextSize) { - self.prefix_len += offset; - } - - pub fn prepend_str(&mut self, s: &str) { - self.prefix_len += TextSize::of(s); - self.docs.insert_str(0, s); - } - - pub fn append_str(&mut self, s: &str) { - self.docs.push_str(s); - } - - pub fn append(&mut self, other: &Docs) { - let other_offset = TextSize::of(&self.docs); - - assert!( - self.outline_mod.is_none() && other.outline_mod.is_none(), - "cannot merge `Docs` that have `outline_mod` set" - ); - self.outline_mod = Some((self.inline_file, self.docs_source_map.len())); - self.inline_file = other.inline_file; - self.outline_inner_docs_start = self.inline_inner_docs_start; - self.inline_inner_docs_start = other.inline_inner_docs_start.map(|it| it + other_offset); - - self.docs.push_str(&other.docs); - self.docs_source_map.extend(other.docs_source_map.iter().map( - |&DocsSourceMapLine { string_offset, ast_offset }| DocsSourceMapLine { - ast_offset, - string_offset: string_offset + other_offset, - }, - )); - } - - fn extend_with_doc_comment(&mut self, comment: ast::Comment, indent: &mut usize) { - let Some((doc, offset)) = comment.doc_comment() else { return }; - self.extend_with_doc_str(doc, comment.syntax().text_range().start() + offset, indent); - } - - fn extend_with_doc_attr(&mut self, value: SyntaxToken, indent: &mut usize) { - let Some(value) = ast::String::cast(value) else { return }; - let Some(value_offset) = value.text_range_between_quotes() else { return }; - let value_offset = value_offset.start(); - let Ok(value) = value.value() else { return }; - // FIXME: Handle source maps for escaped text. - self.extend_with_doc_str(&value, value_offset, indent); - } - - fn extend_with_doc_str(&mut self, doc: &str, mut offset_in_ast: TextSize, indent: &mut usize) { - for line in doc.split('\n') { - self.docs_source_map.push(DocsSourceMapLine { - string_offset: TextSize::of(&self.docs), - ast_offset: offset_in_ast, - }); - offset_in_ast += TextSize::of(line) + TextSize::of("\n"); - - let line = line.trim_end(); - if let Some(line_indent) = line.chars().position(|ch| !ch.is_whitespace()) { - // Empty lines are handled because `position()` returns `None` for them. - *indent = std::cmp::min(*indent, line_indent); - } - self.docs.push_str(line); - self.docs.push('\n'); - } - } - - fn remove_indent(&mut self, indent: usize, start_source_map_index: usize) { - /// In case of panics, we want to avoid corrupted UTF-8 in `self.docs`, so we clear it. - struct Guard<'a>(&'a mut Docs); - impl Drop for Guard<'_> { - fn drop(&mut self) { - let Docs { - docs, - docs_source_map, - outline_mod, - inline_file: _, - prefix_len: _, - inline_inner_docs_start: _, - outline_inner_docs_start: _, - } = self.0; - // Don't use `String::clear()` here because it's not guaranteed to not do UTF-8-dependent things, - // and we may have temporarily broken the string's encoding. - unsafe { docs.as_mut_vec() }.clear(); - // This is just to avoid panics down the road. - docs_source_map.clear(); - *outline_mod = None; - } - } - - if self.docs.is_empty() { - return; - } - - let guard = Guard(self); - let source_map = &mut guard.0.docs_source_map[start_source_map_index..]; - let Some(&DocsSourceMapLine { string_offset: mut copy_into, .. }) = source_map.first() - else { - return; - }; - // We basically want to remove multiple ranges from a string. Doing this efficiently (without O(N^2) - // or allocations) requires unsafe. Basically, for each line, we copy the line minus the indent into - // consecutive to the previous line (which may have moved). Then at the end we truncate. - let mut accumulated_offset = TextSize::new(0); - for idx in 0..source_map.len() { - let string_end_offset = source_map - .get(idx + 1) - .map_or_else(|| TextSize::of(&guard.0.docs), |next_attr| next_attr.string_offset); - let line_source = &mut source_map[idx]; - let line_docs = - &guard.0.docs[TextRange::new(line_source.string_offset, string_end_offset)]; - let line_docs_len = TextSize::of(line_docs); - let indent_size = line_docs.char_indices().nth(indent).map_or_else( - || TextSize::of(line_docs) - TextSize::of("\n"), - |(offset, _)| TextSize::new(offset as u32), - ); - unsafe { guard.0.docs.as_bytes_mut() }.copy_within( - Range::::from(TextRange::new( - line_source.string_offset + indent_size, - string_end_offset, - )), - copy_into.into(), - ); - copy_into += line_docs_len - indent_size; - - if let Some(inner_attrs_start) = &mut guard.0.inline_inner_docs_start - && *inner_attrs_start == line_source.string_offset - { - *inner_attrs_start -= accumulated_offset; - } - // The removals in the string accumulate, but in the AST not, because it already points - // to the beginning of each attribute. - // Also, we need to shift the AST offset of every line, but the string offset of the first - // line should not get shifted (in general, the shift for the string offset is by the - // number of lines until the current one, excluding the current one). - line_source.string_offset -= accumulated_offset; - line_source.ast_offset += indent_size; - - accumulated_offset += indent_size; - } - // Don't use `String::truncate()` here because it's not guaranteed to not do UTF-8-dependent things, - // and we may have temporarily broken the string's encoding. - unsafe { guard.0.docs.as_mut_vec() }.truncate(copy_into.into()); - - std::mem::forget(guard); - } - - fn remove_last_newline(&mut self) { - self.docs.truncate(self.docs.len().saturating_sub(1)); - } - - fn shrink_to_fit(&mut self) { - let Docs { - docs, - docs_source_map, - outline_mod: _, - inline_file: _, - prefix_len: _, - inline_inner_docs_start: _, - outline_inner_docs_start: _, - } = self; - docs.shrink_to_fit(); - docs_source_map.shrink_to_fit(); - } -} - -#[derive(Debug, PartialEq, Eq, Hash)] -pub struct DeriveInfo { - pub trait_name: Symbol, - pub helpers: Box<[Symbol]>, -} - -fn extract_doc_aliases(result: &mut Vec, attr: Meta) -> ControlFlow { - if let Meta::TokenTree { path, tt } = attr - && path.is1("doc") - { - for atom in DocAtom::parse(tt) { - match atom { - DocAtom::Alias(aliases) => { - result.extend(aliases.into_iter().map(|alias| Symbol::intern(&alias))) - } - DocAtom::KeyValue { key, value } if key == "alias" => { - result.push(Symbol::intern(&value)) - } - _ => {} - } - } - } - ControlFlow::Continue(()) -} - -fn extract_cfgs(result: &mut Vec, attr: Meta) -> ControlFlow { - if let Meta::TokenTree { path, tt } = attr - && path.is1("cfg") - { - result.push(CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable())); - } - ControlFlow::Continue(()) -} - -fn extract_docs<'a>( - get_cfg_options: &dyn Fn() -> &'a CfgOptions, - source: InFile, - outer_mod_decl: Option>, - inner_attrs_node: Option, -) -> Option> { - let mut result = Docs { - docs: String::new(), - docs_source_map: Vec::new(), - outline_mod: None, - inline_file: source.file_id, - prefix_len: TextSize::new(0), - inline_inner_docs_start: None, - outline_inner_docs_start: None, - }; - - let mut cfg_options = None; - let mut extend_with_attrs = - |result: &mut Docs, node: &SyntaxNode, expect_inner_attrs, indent: &mut usize| { - expand_cfg_attr_with_doc_comments::<_, Infallible>( - AttrDocCommentIter::from_syntax_node(node).filter(|attr| match attr { - Either::Left(attr) => attr.kind().is_inner() == expect_inner_attrs, - Either::Right(comment) => comment.kind().doc.is_some_and(|kind| { - (kind == ast::CommentPlacement::Inner) == expect_inner_attrs - }), - }), - || cfg_options.get_or_insert_with(get_cfg_options), - |attr| { - match attr { - Either::Right(doc_comment) => { - result.extend_with_doc_comment(doc_comment, indent) - } - Either::Left((attr, _, _, _)) => match attr { - // FIXME: Handle macros: `#[doc = concat!("foo", "bar")]`. - Meta::NamedKeyValue { - name: Some(name), value: Some(value), .. - } if name.text() == "doc" => { - result.extend_with_doc_attr(value, indent); - } - _ => {} - }, - } - ControlFlow::Continue(()) - }, - ); - }; - - if let Some(outer_mod_decl) = outer_mod_decl { - let mut indent = usize::MAX; - extend_with_attrs(&mut result, outer_mod_decl.value.syntax(), false, &mut indent); - result.remove_indent(indent, 0); - result.outline_mod = Some((outer_mod_decl.file_id, result.docs_source_map.len())); - } - - let inline_source_map_start = result.docs_source_map.len(); - let mut indent = usize::MAX; - extend_with_attrs(&mut result, source.value.syntax(), false, &mut indent); - if let Some(inner_attrs_node) = &inner_attrs_node { - result.inline_inner_docs_start = Some(TextSize::of(&result.docs)); - extend_with_attrs(&mut result, inner_attrs_node, true, &mut indent); - } - result.remove_indent(indent, inline_source_map_start); - - result.remove_last_newline(); - - result.shrink_to_fit(); - - if result.docs.is_empty() { None } else { Some(Box::new(result)) } -} - -#[salsa::tracked] -impl AttrFlags { - #[salsa::tracked] - pub fn query(db: &dyn DefDatabase, owner: AttrDefId) -> AttrFlags { - let mut attr_flags = AttrFlags::empty(); - collect_attrs(db, owner, |attr| match_attr_flags(&mut attr_flags, attr)); - attr_flags - } - - #[inline] - pub fn query_field(db: &dyn DefDatabase, field: FieldId) -> AttrFlags { - return field_attr_flags(db, field.parent) - .get(field.local_id) - .copied() - .unwrap_or_else(AttrFlags::empty); - - #[salsa::tracked(returns(ref))] - fn field_attr_flags( - db: &dyn DefDatabase, - variant: VariantId, - ) -> ArenaMap { - collect_field_attrs(db, variant, |cfg_options, field| { - let mut attr_flags = AttrFlags::empty(); - expand_cfg_attr( - field.value.attrs(), - || cfg_options, - |attr, _, _, _| match_attr_flags(&mut attr_flags, attr), - ); - attr_flags - }) - } - } - - #[inline] - pub fn query_generic_params( - db: &dyn DefDatabase, - def: GenericDefId, - ) -> &(ArenaMap, ArenaMap) - { - let generic_params = GenericParams::new(db, def); - let params_count_excluding_self = - generic_params.len() - usize::from(generic_params.trait_self_param().is_some()); - if params_count_excluding_self == 0 { - return const { &(ArenaMap::new(), ArenaMap::new()) }; - } - return generic_params_attr_flags(db, def); - - #[salsa::tracked(returns(ref))] - fn generic_params_attr_flags( - db: &dyn DefDatabase, - def: GenericDefId, - ) -> (ArenaMap, ArenaMap) - { - let mut lifetimes = ArenaMap::new(); - let mut type_and_consts = ArenaMap::new(); - - let mut cfg_options = None; - let mut cfg_options = - || *cfg_options.get_or_insert_with(|| def.krate(db).cfg_options(db)); - - let lifetimes_source = HasChildSource::::child_source(&def, db); - for (lifetime_id, lifetime) in lifetimes_source.value.iter() { - let mut attr_flags = AttrFlags::empty(); - expand_cfg_attr(lifetime.attrs(), &mut cfg_options, |attr, _, _, _| { - match_attr_flags(&mut attr_flags, attr) - }); - if !attr_flags.is_empty() { - lifetimes.insert(lifetime_id, attr_flags); - } - } - - let type_and_consts_source = - HasChildSource::::child_source(&def, db); - for (type_or_const_id, type_or_const) in type_and_consts_source.value.iter() { - let mut attr_flags = AttrFlags::empty(); - expand_cfg_attr(type_or_const.attrs(), &mut cfg_options, |attr, _, _, _| { - match_attr_flags(&mut attr_flags, attr) - }); - if !attr_flags.is_empty() { - type_and_consts.insert(type_or_const_id, attr_flags); - } - } - - lifetimes.shrink_to_fit(); - type_and_consts.shrink_to_fit(); - (lifetimes, type_and_consts) - } - } - - #[inline] - pub fn query_lifetime_param(db: &dyn DefDatabase, owner: LifetimeParamId) -> AttrFlags { - AttrFlags::query_generic_params(db, owner.parent) - .0 - .get(owner.local_id) - .copied() - .unwrap_or_else(AttrFlags::empty) - } - #[inline] - pub fn query_type_or_const_param(db: &dyn DefDatabase, owner: TypeOrConstParamId) -> AttrFlags { - AttrFlags::query_generic_params(db, owner.parent) - .1 - .get(owner.local_id) - .copied() - .unwrap_or_else(AttrFlags::empty) - } - - pub(crate) fn is_cfg_enabled_for( - owner: &dyn HasAttrs, - cfg_options: &CfgOptions, - ) -> Result<(), CfgExpr> { - let attrs = ast::attrs_including_inner(owner); - let result = expand_cfg_attr( - attrs, - || cfg_options, - |attr, _, _, _| { - if let Meta::TokenTree { path, tt } = attr - && path.is1("cfg") - && let cfg = - CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable()) - && cfg_options.check(&cfg) == Some(false) - { - ControlFlow::Break(cfg) - } else { - ControlFlow::Continue(()) - } - }, - ); - match result { - Some(cfg) => Err(cfg), - None => Ok(()), - } - } - - #[inline] - pub fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option { - AttrFlags::query(db, owner).lang_item_with_attrs(db, owner) - } - - #[inline] - pub fn lang_item_with_attrs(self, db: &dyn DefDatabase, owner: AttrDefId) -> Option { - if !self.contains(AttrFlags::LANG_ITEM) { - // Don't create the query in case this is not a lang item, this wastes memory. - return None; - } - - return lang_item(db, owner); - - #[salsa::tracked] - fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option { - collect_attrs(db, owner, |attr| { - if let Meta::NamedKeyValue { name: Some(name), value: Some(value), .. } = attr - && name.text() == "lang" - && let Some(value) = ast::String::cast(value) - && let Ok(value) = value.value() - && let symbol = Symbol::intern(&value) - && let Some(lang_item) = LangItem::from_symbol(&symbol) - { - ControlFlow::Break(lang_item) - } else { - ControlFlow::Continue(()) - } - }) - } - } - - #[inline] - pub fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option { - if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_REPR) { - // Don't create the query in case this has no repr, this wastes memory. - return None; - } - - return repr(db, owner); - - #[salsa::tracked] - fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option { - let mut result = None; - collect_attrs::(db, owner.into(), |attr| { - if let Meta::TokenTree { path, tt } = attr - && path.is1("repr") - && let Some(repr) = parse_repr_tt(&tt) - { - match &mut result { - Some(existing) => merge_repr(existing, repr), - None => result = Some(repr), - } - } - ControlFlow::Continue(()) - }); - result - } - } - - /// Call this only if there are legacy const generics, to save memory. - #[salsa::tracked(returns(ref))] - pub(crate) fn legacy_const_generic_indices( - db: &dyn DefDatabase, - owner: FunctionId, - ) -> Option> { - let result = collect_attrs(db, owner.into(), |attr| { - if let Meta::TokenTree { path, tt } = attr - && path.is1("rustc_legacy_const_generics") - { - let result = parse_rustc_legacy_const_generics(tt); - ControlFlow::Break(result) - } else { - ControlFlow::Continue(()) - } - }); - result.filter(|it| !it.is_empty()) - } - - // There aren't typically many crates, so it's okay to always make this a query without a flag. - #[salsa::tracked(returns(ref))] - pub fn doc_html_root_url(db: &dyn DefDatabase, krate: Crate) -> Option { - let root_file_id = krate.root_file_id(db); - let syntax = db.parse(root_file_id).tree(); - - let mut cfg_options = None; - expand_cfg_attr( - syntax.attrs(), - || cfg_options.get_or_insert(krate.cfg_options(db)), - |attr, _, _, _| { - if let Meta::TokenTree { path, tt } = attr - && path.is1("doc") - && let Some(result) = DocAtom::parse(tt).into_iter().find_map(|atom| { - if let DocAtom::KeyValue { key, value } = atom - && key == "html_root_url" - { - Some(value) - } else { - None - } - }) - { - ControlFlow::Break(result) - } else { - ControlFlow::Continue(()) - } - }, - ) - } - - #[inline] - pub fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> &FxHashSet { - if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_TARGET_FEATURE) { - return const { &FxHashSet::with_hasher(rustc_hash::FxBuildHasher) }; - } - - return target_features(db, owner); - - #[salsa::tracked(returns(ref))] - fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> FxHashSet { - let mut result = FxHashSet::default(); - collect_attrs::(db, owner.into(), |attr| { - if let Meta::TokenTree { path, tt } = attr - && path.is1("target_feature") - && let mut tt = TokenTreeChildren::new(&tt) - && let Some(NodeOrToken::Token(enable_ident)) = tt.next() - && enable_ident.text() == "enable" - && let Some(NodeOrToken::Token(eq_token)) = tt.next() - && eq_token.kind() == T![=] - && let Some(NodeOrToken::Token(features)) = tt.next() - && let Some(features) = ast::String::cast(features) - && let Ok(features) = features.value() - && tt.next().is_none() - { - result.extend(features.split(',').map(Symbol::intern)); - } - ControlFlow::Continue(()) - }); - result.shrink_to_fit(); - result - } - } - - #[inline] - pub fn rustc_layout_scalar_valid_range( - db: &dyn DefDatabase, - owner: AdtId, - ) -> RustcLayoutScalarValidRange { - if !AttrFlags::query(db, owner.into()).contains(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE) - { - return RustcLayoutScalarValidRange::default(); - } - - return rustc_layout_scalar_valid_range(db, owner); - - #[salsa::tracked] - fn rustc_layout_scalar_valid_range( - db: &dyn DefDatabase, - owner: AdtId, - ) -> RustcLayoutScalarValidRange { - let mut result = RustcLayoutScalarValidRange::default(); - collect_attrs::(db, owner.into(), |attr| { - if let Meta::TokenTree { path, tt } = attr - && (path.is1("rustc_layout_scalar_valid_range_start") - || path.is1("rustc_layout_scalar_valid_range_end")) - && let tt = TokenTreeChildren::new(&tt) - && let Ok(NodeOrToken::Token(value)) = tt.exactly_one() - && let Some(value) = ast::IntNumber::cast(value) - && let Ok(value) = value.value() - { - if path.is1("rustc_layout_scalar_valid_range_start") { - result.start = Some(value) - } else { - result.end = Some(value); - } - } - ControlFlow::Continue(()) - }); - result - } - } - - #[inline] - pub fn doc_aliases(self, db: &dyn DefDatabase, owner: Either) -> &[Symbol] { - if !self.contains(AttrFlags::HAS_DOC_ALIASES) { - return &[]; - } - return match owner { - Either::Left(it) => doc_aliases(db, it), - Either::Right(field) => fields_doc_aliases(db, field.parent) - .get(field.local_id) - .map(|it| &**it) - .unwrap_or_default(), - }; - - #[salsa::tracked(returns(ref))] - fn doc_aliases(db: &dyn DefDatabase, owner: AttrDefId) -> Box<[Symbol]> { - let mut result = Vec::new(); - collect_attrs::(db, owner, |attr| extract_doc_aliases(&mut result, attr)); - result.into_boxed_slice() - } - - #[salsa::tracked(returns(ref))] - fn fields_doc_aliases( - db: &dyn DefDatabase, - variant: VariantId, - ) -> ArenaMap> { - collect_field_attrs(db, variant, |cfg_options, field| { - let mut result = Vec::new(); - expand_cfg_attr( - field.value.attrs(), - || cfg_options, - |attr, _, _, _| extract_doc_aliases(&mut result, attr), - ); - result.into_boxed_slice() - }) - } - } - - #[inline] - pub fn cfgs(self, db: &dyn DefDatabase, owner: Either) -> Option<&CfgExpr> { - if !self.contains(AttrFlags::HAS_CFG) { - return None; - } - return match owner { - Either::Left(it) => cfgs(db, it).as_ref(), - Either::Right(field) => { - fields_cfgs(db, field.parent).get(field.local_id).and_then(|it| it.as_ref()) - } - }; - - // We LRU this query because it is only used by IDE. - #[salsa::tracked(returns(ref), lru = 250)] - fn cfgs(db: &dyn DefDatabase, owner: AttrDefId) -> Option { - let mut result = Vec::new(); - collect_attrs::(db, owner, |attr| extract_cfgs(&mut result, attr)); - match result.len() { - 0 => None, - 1 => result.into_iter().next(), - _ => Some(CfgExpr::All(result.into_boxed_slice())), - } - } - - // We LRU this query because it is only used by IDE. - #[salsa::tracked(returns(ref), lru = 50)] - fn fields_cfgs( - db: &dyn DefDatabase, - variant: VariantId, - ) -> ArenaMap> { - collect_field_attrs(db, variant, |cfg_options, field| { - let mut result = Vec::new(); - expand_cfg_attr( - field.value.attrs(), - || cfg_options, - |attr, _, _, _| extract_cfgs(&mut result, attr), - ); - match result.len() { - 0 => None, - 1 => result.into_iter().next(), - _ => Some(CfgExpr::All(result.into_boxed_slice())), - } - }) - } - } - - #[inline] - pub fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option { - if !AttrFlags::query(db, AttrDefId::ModuleId(owner)).contains(AttrFlags::HAS_DOC_KEYWORD) { - return None; - } - return doc_keyword(db, owner); - - #[salsa::tracked] - fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option { - collect_attrs(db, AttrDefId::ModuleId(owner), |attr| { - if let Meta::TokenTree { path, tt } = attr - && path.is1("doc") - { - for atom in DocAtom::parse(tt) { - if let DocAtom::KeyValue { key, value } = atom - && key == "keyword" - { - return ControlFlow::Break(Symbol::intern(&value)); - } - } - } - ControlFlow::Continue(()) - }) - } - } - - // We LRU this query because it is only used by IDE. - #[salsa::tracked(returns(ref), lru = 250)] - pub fn docs(db: &dyn DefDatabase, owner: AttrDefId) -> Option> { - let (source, outer_mod_decl, krate) = attrs_source(db, owner); - let inner_attrs_node = source.value.inner_attributes_node(); - extract_docs(&|| krate.cfg_options(db), source, outer_mod_decl, inner_attrs_node) - } - - #[inline] - pub fn field_docs(db: &dyn DefDatabase, field: FieldId) -> Option<&Docs> { - return fields_docs(db, field.parent).get(field.local_id).and_then(|it| it.as_deref()); - - // We LRU this query because it is only used by IDE. - #[salsa::tracked(returns(ref), lru = 50)] - pub fn fields_docs( - db: &dyn DefDatabase, - variant: VariantId, - ) -> ArenaMap>> { - collect_field_attrs(db, variant, |cfg_options, field| { - extract_docs(&|| cfg_options, field, None, None) - }) - } - } - - #[inline] - pub fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option<&DeriveInfo> { - if !AttrFlags::query(db, owner.into()).contains(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO) { - return None; - } - - return derive_info(db, owner).as_ref(); - - #[salsa::tracked(returns(ref))] - fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option { - collect_attrs(db, owner.into(), |attr| { - if let Meta::TokenTree { path, tt } = attr - && path.segments.len() == 1 - && matches!( - path.segments[0].text(), - "proc_macro_derive" | "rustc_builtin_macro" - ) - && let mut tt = TokenTreeChildren::new(&tt) - && let Some(NodeOrToken::Token(trait_name)) = tt.next() - && trait_name.kind().is_any_identifier() - { - let trait_name = Symbol::intern(trait_name.text()); - - let helpers = if let Some(NodeOrToken::Token(comma)) = tt.next() - && comma.kind() == T![,] - && let Some(NodeOrToken::Token(attributes)) = tt.next() - && attributes.text() == "attributes" - && let Some(NodeOrToken::Node(attributes)) = tt.next() - { - attributes - .syntax() - .children_with_tokens() - .filter_map(NodeOrToken::into_token) - .filter(|it| it.kind().is_any_identifier()) - .map(|it| Symbol::intern(it.text())) - .collect::>() - } else { - Box::new([]) - }; - - ControlFlow::Break(DeriveInfo { trait_name, helpers }) - } else { - ControlFlow::Continue(()) - } - }) - } - } -} - -fn merge_repr(this: &mut ReprOptions, other: ReprOptions) { - let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this; - flags.insert(other.flags); - *align = (*align).max(other.align); - *pack = match (*pack, other.pack) { - (Some(pack), None) | (None, Some(pack)) => Some(pack), - _ => (*pack).min(other.pack), - }; - if other.int.is_some() { - *int = other.int; - } -} - -fn parse_repr_tt(tt: &ast::TokenTree) -> Option { - use crate::builtin_type::{BuiltinInt, BuiltinUint}; - use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; - - let mut tts = TokenTreeChildren::new(tt).peekable(); - - let mut acc = ReprOptions::default(); - while let Some(tt) = tts.next() { - let NodeOrToken::Token(ident) = tt else { - continue; - }; - if !ident.kind().is_any_identifier() { - continue; - } - let repr = match ident.text() { - "packed" => { - let pack = if let Some(NodeOrToken::Node(tt)) = tts.peek() { - let tt = tt.clone(); - tts.next(); - let mut tt_iter = TokenTreeChildren::new(&tt); - if let Some(NodeOrToken::Token(lit)) = tt_iter.next() - && let Some(lit) = ast::IntNumber::cast(lit) - && let Ok(lit) = lit.value() - && let Ok(lit) = lit.try_into() - { - lit - } else { - 0 - } - } else { - 0 - }; - let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE)); - ReprOptions { pack, ..Default::default() } - } - "align" => { - let mut align = None; - if let Some(NodeOrToken::Node(tt)) = tts.peek() { - let tt = tt.clone(); - tts.next(); - let mut tt_iter = TokenTreeChildren::new(&tt); - if let Some(NodeOrToken::Token(lit)) = tt_iter.next() - && let Some(lit) = ast::IntNumber::cast(lit) - && let Ok(lit) = lit.value() - && let Ok(lit) = lit.try_into() - { - align = Align::from_bytes(lit).ok(); - } - } - ReprOptions { align, ..Default::default() } - } - "C" => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() }, - "transparent" => ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() }, - "simd" => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() }, - repr => { - let mut int = None; - if let Some(builtin) = BuiltinInt::from_suffix(repr) - .map(Either::Left) - .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right)) - { - int = Some(match builtin { - Either::Left(bi) => match bi { - BuiltinInt::Isize => IntegerType::Pointer(true), - BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true), - BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true), - BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true), - BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true), - BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true), - }, - Either::Right(bu) => match bu { - BuiltinUint::Usize => IntegerType::Pointer(false), - BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false), - BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false), - BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false), - BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false), - BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false), - }, - }); - } - ReprOptions { int, ..Default::default() } - } - }; - merge_repr(&mut acc, repr); - } - - Some(acc) -} - -fn parse_rustc_legacy_const_generics(tt: ast::TokenTree) -> Box<[u32]> { - TokenTreeChildren::new(&tt) - .filter_map(|param| { - ast::IntNumber::cast(param.into_token()?)?.value().ok()?.try_into().ok() - }) - .collect() -} - -#[derive(Debug)] -enum DocAtom { - /// eg. `#[doc(hidden)]` - Flag(SmolStr), - /// eg. `#[doc(alias = "it")]` - /// - /// Note that a key can have multiple values that are all considered "active" at the same time. - /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`. - KeyValue { key: SmolStr, value: SmolStr }, - /// eg. `#[doc(alias("x", "y"))]` - Alias(Vec), -} - -impl DocAtom { - fn parse(tt: ast::TokenTree) -> SmallVec<[DocAtom; 1]> { - let mut iter = TokenTreeChildren::new(&tt).peekable(); - let mut result = SmallVec::new(); - while iter.peek().is_some() { - if let Some(expr) = next_doc_expr(&mut iter) { - result.push(expr); - } - } - result - } -} - -fn next_doc_expr(it: &mut Peekable) -> Option { - let name = match it.next() { - Some(NodeOrToken::Token(token)) if token.kind().is_any_identifier() => { - SmolStr::new(token.text()) - } - _ => return None, - }; - - let ret = match it.peek() { - Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => { - it.next(); - if let Some(NodeOrToken::Token(value)) = it.next() - && let Some(value) = ast::String::cast(value) - && let Ok(value) = value.value() - { - DocAtom::KeyValue { key: name, value: SmolStr::new(&*value) } - } else { - return None; - } - } - Some(NodeOrToken::Node(subtree)) => { - if name != "alias" { - return None; - } - let aliases = TokenTreeChildren::new(subtree) - .filter_map(|alias| { - Some(SmolStr::new(&*ast::String::cast(alias.into_token()?)?.value().ok()?)) - }) - .collect(); - it.next(); - DocAtom::Alias(aliases) - } - _ => DocAtom::Flag(name), - }; - Some(ret) -} - -#[cfg(test)] -mod tests { - use expect_test::expect; - use hir_expand::InFile; - use test_fixture::WithFixture; - use tt::{TextRange, TextSize}; - - use crate::attrs::IsInnerDoc; - use crate::{attrs::Docs, test_db::TestDB}; - - #[test] - fn docs() { - let (_db, file_id) = TestDB::with_single_file(""); - let mut docs = Docs { - docs: String::new(), - docs_source_map: Vec::new(), - outline_mod: None, - inline_file: file_id.into(), - prefix_len: TextSize::new(0), - inline_inner_docs_start: None, - outline_inner_docs_start: None, - }; - let mut indent = usize::MAX; - - let outer = " foo\n\tbar baz"; - let mut ast_offset = TextSize::new(123); - for line in outer.split('\n') { - docs.extend_with_doc_str(line, ast_offset, &mut indent); - ast_offset += TextSize::of(line) + TextSize::of("\n"); - } - - docs.inline_inner_docs_start = Some(TextSize::of(&docs.docs)); - ast_offset += TextSize::new(123); - let inner = " bar \n baz"; - for line in inner.split('\n') { - docs.extend_with_doc_str(line, ast_offset, &mut indent); - ast_offset += TextSize::of(line) + TextSize::of("\n"); - } - - assert_eq!(indent, 1); - expect![[r#" - [ - DocsSourceMapLine { - string_offset: 0, - ast_offset: 123, - }, - DocsSourceMapLine { - string_offset: 5, - ast_offset: 128, - }, - DocsSourceMapLine { - string_offset: 15, - ast_offset: 261, - }, - DocsSourceMapLine { - string_offset: 20, - ast_offset: 267, - }, - ] - "#]] - .assert_debug_eq(&docs.docs_source_map); - - docs.remove_indent(indent, 0); - - assert_eq!(docs.inline_inner_docs_start, Some(TextSize::new(13))); - - assert_eq!(docs.docs, "foo\nbar baz\nbar\nbaz\n"); - expect![[r#" - [ - DocsSourceMapLine { - string_offset: 0, - ast_offset: 124, - }, - DocsSourceMapLine { - string_offset: 4, - ast_offset: 129, - }, - DocsSourceMapLine { - string_offset: 13, - ast_offset: 262, - }, - DocsSourceMapLine { - string_offset: 17, - ast_offset: 268, - }, - ] - "#]] - .assert_debug_eq(&docs.docs_source_map); - - docs.append(&docs.clone()); - docs.prepend_str("prefix---"); - assert_eq!(docs.docs, "prefix---foo\nbar baz\nbar\nbaz\nfoo\nbar baz\nbar\nbaz\n"); - expect![[r#" - [ - DocsSourceMapLine { - string_offset: 0, - ast_offset: 124, - }, - DocsSourceMapLine { - string_offset: 4, - ast_offset: 129, - }, - DocsSourceMapLine { - string_offset: 13, - ast_offset: 262, - }, - DocsSourceMapLine { - string_offset: 17, - ast_offset: 268, - }, - DocsSourceMapLine { - string_offset: 21, - ast_offset: 124, - }, - DocsSourceMapLine { - string_offset: 25, - ast_offset: 129, - }, - DocsSourceMapLine { - string_offset: 34, - ast_offset: 262, - }, - DocsSourceMapLine { - string_offset: 38, - ast_offset: 268, - }, - ] - "#]] - .assert_debug_eq(&docs.docs_source_map); - - let range = |start, end| TextRange::new(TextSize::new(start), TextSize::new(end)); - let in_file = |range| InFile::new(file_id.into(), range); - assert_eq!(docs.find_ast_range(range(0, 2)), None); - assert_eq!(docs.find_ast_range(range(8, 10)), None); - assert_eq!( - docs.find_ast_range(range(9, 10)), - Some((in_file(range(124, 125)), IsInnerDoc::No)) - ); - assert_eq!(docs.find_ast_range(range(20, 23)), None); - assert_eq!( - docs.find_ast_range(range(23, 25)), - Some((in_file(range(263, 265)), IsInnerDoc::Yes)) - ); - } -} diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index ad2990087672..4e1d598623ab 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -1,21 +1,23 @@ //! Defines database & queries for name resolution. use base_db::{Crate, RootQueryDb, SourceDatabase}; +use either::Either; use hir_expand::{ EditionedFileId, HirFileId, InFile, Lookup, MacroCallId, MacroDefId, MacroDefKind, db::ExpandDatabase, }; +use intern::sym; use la_arena::ArenaMap; +use syntax::{AstPtr, ast}; use triomphe::Arc; use crate::{ - AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, CrateRootModuleId, DefWithBodyId, - EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, - ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, HasModule, ImplId, ImplLoc, - InternedModuleId, LocalFieldId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, - MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, - StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, - VariantId, - attrs::AttrFlags, + AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, + EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, + FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc, + MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, + ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId, + TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId, + attr::{Attrs, AttrsWithOwner}, expr_store::{ Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes, }, @@ -28,6 +30,7 @@ use crate::{ ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature, StructSignature, TraitSignature, TypeAliasSignature, UnionSignature, }, + tt, visibility::{self, Visibility}, }; @@ -235,11 +238,28 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase { def: GenericDefId, ) -> (Arc, Arc, Arc); - // FIXME: Get rid of this, call `AttrFlags::lang_item()` directly. + // region:attrs + + #[salsa::invoke(Attrs::fields_attrs_query)] + fn fields_attrs(&self, def: VariantId) -> Arc>; + + // should this really be a query? + #[salsa::invoke(crate::attr::fields_attrs_source_map)] + fn fields_attrs_source_map( + &self, + def: VariantId, + ) -> Arc>>>; + + // FIXME: Make this a non-interned query. + #[salsa::invoke_interned(AttrsWithOwner::attrs_query)] + fn attrs(&self, def: AttrDefId) -> Attrs; + #[salsa::transparent] #[salsa::invoke(lang_item::lang_attr)] fn lang_attr(&self, def: AttrDefId) -> Option; + // endregion:attrs + #[salsa::invoke(ImportMap::import_map_query)] fn import_map(&self, krate: Crate) -> Arc; @@ -283,9 +303,36 @@ fn include_macro_invoc( } fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool { - let root_module = CrateRootModuleId::from(crate_id).module(db); - let attrs = AttrFlags::query(db, AttrDefId::ModuleId(InternedModuleId::new(db, root_module))); - attrs.contains(AttrFlags::IS_NO_STD) + let file = crate_id.data(db).root_file_id(db); + let item_tree = db.file_item_tree(file.into()); + let attrs = item_tree.top_level_raw_attrs(); + for attr in &**attrs { + match attr.path().as_ident() { + Some(ident) if *ident == sym::no_std => return true, + Some(ident) if *ident == sym::cfg_attr => {} + _ => continue, + } + + // This is a `cfg_attr`; check if it could possibly expand to `no_std`. + // Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]` + let tt = match attr.token_tree_value() { + Some(tt) => tt.token_trees(), + None => continue, + }; + + let segments = + tt.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(p)) if p.char == ',')); + for output in segments.skip(1) { + match output.flat_tokens() { + [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => { + return true; + } + _ => {} + } + } + } + + false } fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId { diff --git a/crates/hir-def/src/expr_store/expander.rs b/crates/hir-def/src/expr_store/expander.rs index 6a2f06b0a6f6..23b9712d1e6c 100644 --- a/crates/hir-def/src/expr_store/expander.rs +++ b/crates/hir-def/src/expr_store/expander.rs @@ -17,10 +17,11 @@ use syntax::{AstNode, Parse, ast}; use triomphe::Arc; use tt::TextRange; -use crate::{ - MacroId, UnresolvedMacro, attrs::AttrFlags, db::DefDatabase, expr_store::HygieneId, - macro_call_as_call_id, nameres::DefMap, -}; +use crate::attr::Attrs; +use crate::expr_store::HygieneId; +use crate::macro_call_as_call_id; +use crate::nameres::DefMap; +use crate::{MacroId, UnresolvedMacro, db::DefDatabase}; #[derive(Debug)] pub(super) struct Expander { @@ -69,10 +70,11 @@ impl Expander { pub(super) fn is_cfg_enabled( &self, - owner: &dyn HasAttrs, + db: &dyn DefDatabase, + has_attrs: &dyn HasAttrs, cfg_options: &CfgOptions, ) -> Result<(), cfg::CfgExpr> { - AttrFlags::is_cfg_enabled_for(owner, cfg_options) + Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options) } pub(super) fn call_syntax_ctx(&self) -> SyntaxContext { diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs index fbe0b1ab9596..3794cb18e936 100644 --- a/crates/hir-def/src/expr_store/lower.rs +++ b/crates/hir-def/src/expr_store/lower.rs @@ -12,6 +12,7 @@ use cfg::CfgOptions; use either::Either; use hir_expand::{ HirFileId, InFile, MacroDefId, + mod_path::tool_path, name::{AsName, Name}, span_map::SpanMapRef, }; @@ -33,7 +34,6 @@ use tt::TextRange; use crate::{ AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId, ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro, - attrs::AttrFlags, builtin_type::BuiltinUint, db::DefDatabase, expr_store::{ @@ -87,16 +87,14 @@ pub(super) fn lower_body( let mut params = vec![]; let mut collector = ExprCollector::new(db, module, current_file_id); - let skip_body = AttrFlags::query( - db, - match owner { - DefWithBodyId::FunctionId(it) => it.into(), - DefWithBodyId::StaticId(it) => it.into(), - DefWithBodyId::ConstId(it) => it.into(), - DefWithBodyId::VariantId(it) => it.into(), - }, - ) - .contains(AttrFlags::RUST_ANALYZER_SKIP); + let skip_body = match owner { + DefWithBodyId::FunctionId(it) => db.attrs(it.into()), + DefWithBodyId::StaticId(it) => db.attrs(it.into()), + DefWithBodyId::ConstId(it) => db.attrs(it.into()), + DefWithBodyId::VariantId(it) => db.attrs(it.into()), + } + .rust_analyzer_tool() + .any(|attr| *attr.path() == tool_path![skip]); // If #[rust_analyzer::skip] annotated, only construct enough information for the signature // and skip the body. if skip_body { @@ -2487,7 +2485,7 @@ impl ExprCollector<'_> { /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when /// not. fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool { - let enabled = self.expander.is_cfg_enabled(owner, self.cfg_options); + let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options); match enabled { Ok(()) => true, Err(cfg) => { diff --git a/crates/hir-def/src/expr_store/pretty.rs b/crates/hir-def/src/expr_store/pretty.rs index e386e8d0c596..5b9da3c5e668 100644 --- a/crates/hir-def/src/expr_store/pretty.rs +++ b/crates/hir-def/src/expr_store/pretty.rs @@ -12,8 +12,7 @@ use span::Edition; use syntax::ast::HasName; use crate::{ - AdtId, DefWithBodyId, FunctionId, GenericDefId, StructId, TypeParamId, VariantId, - attrs::AttrFlags, + AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId, expr_store::path::{GenericArg, GenericArgs}, hir::{ Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement, @@ -168,7 +167,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi GenericDefId::AdtId(id) => match id { AdtId::StructId(id) => { let signature = db.struct_signature(id); - print_struct(db, id, &signature, edition) + print_struct(db, &signature, edition) } AdtId::UnionId(id) => { format!("unimplemented {id:?}") @@ -180,7 +179,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi GenericDefId::ConstId(id) => format!("unimplemented {id:?}"), GenericDefId::FunctionId(id) => { let signature = db.function_signature(id); - print_function(db, id, &signature, edition) + print_function(db, &signature, edition) } GenericDefId::ImplId(id) => format!("unimplemented {id:?}"), GenericDefId::StaticId(id) => format!("unimplemented {id:?}"), @@ -209,8 +208,7 @@ pub fn print_path( pub fn print_struct( db: &dyn DefDatabase, - id: StructId, - StructSignature { name, generic_params, store, flags, shape }: &StructSignature, + StructSignature { name, generic_params, store, flags, shape, repr }: &StructSignature, edition: Edition, ) -> String { let mut p = Printer { @@ -221,7 +219,7 @@ pub fn print_struct( line_format: LineFormat::Newline, edition, }; - if let Some(repr) = AttrFlags::repr(db, id.into()) { + if let Some(repr) = repr { if repr.c() { wln!(p, "#[repr(C)]"); } @@ -257,8 +255,7 @@ pub fn print_struct( pub fn print_function( db: &dyn DefDatabase, - id: FunctionId, - signature @ FunctionSignature { + FunctionSignature { name, generic_params, store, @@ -266,10 +263,10 @@ pub fn print_function( ret_type, abi, flags, + legacy_const_generics_indices, }: &FunctionSignature, edition: Edition, ) -> String { - let legacy_const_generics_indices = signature.legacy_const_generics_indices(db, id); let mut p = Printer { db, store, @@ -301,7 +298,7 @@ pub fn print_function( if i != 0 { w!(p, ", "); } - if legacy_const_generics_indices.is_some_and(|idx| idx.contains(&(i as u32))) { + if legacy_const_generics_indices.as_ref().is_some_and(|idx| idx.contains(&(i as u32))) { w!(p, "const: "); } p.print_type_ref(*param); diff --git a/crates/hir-def/src/expr_store/tests/body/block.rs b/crates/hir-def/src/expr_store/tests/body/block.rs index 0cb9325b502e..c7707378a5b3 100644 --- a/crates/hir-def/src/expr_store/tests/body/block.rs +++ b/crates/hir-def/src/expr_store/tests/body/block.rs @@ -189,8 +189,8 @@ fn f() { } "#, expect![[r#" - BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::(1) } - BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::(0) } + BlockId(3c01) in BlockRelativeModuleId { block: Some(BlockId(3c00)), local_id: Idx::(1) } + BlockId(3c00) in BlockRelativeModuleId { block: None, local_id: Idx::(0) } crate scope "#]], ); diff --git a/crates/hir-def/src/expr_store/tests/signatures.rs b/crates/hir-def/src/expr_store/tests/signatures.rs index 2dac4e7fc84b..b68674c7a74f 100644 --- a/crates/hir-def/src/expr_store/tests/signatures.rs +++ b/crates/hir-def/src/expr_store/tests/signatures.rs @@ -38,24 +38,14 @@ fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expe match def { GenericDefId::AdtId(adt_id) => match adt_id { crate::AdtId::StructId(struct_id) => { - out += &print_struct( - &db, - struct_id, - &db.struct_signature(struct_id), - Edition::CURRENT, - ); + out += &print_struct(&db, &db.struct_signature(struct_id), Edition::CURRENT); } crate::AdtId::UnionId(_id) => (), crate::AdtId::EnumId(_id) => (), }, GenericDefId::ConstId(_id) => (), GenericDefId::FunctionId(function_id) => { - out += &print_function( - &db, - function_id, - &db.function_signature(function_id), - Edition::CURRENT, - ) + out += &print_function(&db, &db.function_signature(function_id), Edition::CURRENT) } GenericDefId::ImplId(_id) => (), diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index 67cf466276c5..f31f355cfa5d 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -13,8 +13,7 @@ use stdx::format_to; use triomphe::Arc; use crate::{ - AssocItemId, AttrDefId, Complete, FxIndexMap, InternedModuleId, ModuleDefId, ModuleId, TraitId, - attrs::AttrFlags, + AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId, db::DefDatabase, item_scope::{ImportOrExternCrate, ItemInNs}, nameres::{DefMap, assoc::TraitItems, crate_def_map}, @@ -166,34 +165,17 @@ impl ImportMap { } } else { match item { - ItemInNs::Types(id) | ItemInNs::Values(id) => match id { - ModuleDefId::ModuleId(it) => { - Some(AttrDefId::ModuleId(InternedModuleId::new(db, it))) - } - ModuleDefId::FunctionId(it) => Some(it.into()), - ModuleDefId::AdtId(it) => Some(it.into()), - ModuleDefId::EnumVariantId(it) => Some(it.into()), - ModuleDefId::ConstId(it) => Some(it.into()), - ModuleDefId::StaticId(it) => Some(it.into()), - ModuleDefId::TraitId(it) => Some(it.into()), - ModuleDefId::TypeAliasId(it) => Some(it.into()), - ModuleDefId::MacroId(it) => Some(it.into()), - ModuleDefId::BuiltinType(_) => None, - }, + ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(), ItemInNs::Macros(id) => Some(id.into()), } }; let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id { None => (false, false, Complete::Yes), Some(attr_id) => { - let attrs = AttrFlags::query(db, attr_id); + let attrs = db.attrs(attr_id); let do_not_complete = - Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), attrs); - ( - attrs.contains(AttrFlags::IS_DOC_HIDDEN), - attrs.contains(AttrFlags::IS_UNSTABLE), - do_not_complete, - ) + Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs); + (attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete) } }; @@ -257,15 +239,15 @@ impl ImportMap { }; let attr_id = item.into(); - let attrs = AttrFlags::query(db, attr_id); + let attrs = &db.attrs(attr_id); let item_do_not_complete = Complete::extract(false, attrs); let do_not_complete = Complete::for_trait_item(trait_import_info.complete, item_do_not_complete); let assoc_item_info = ImportInfo { container: trait_import_info.container, name: assoc_item_name.clone(), - is_doc_hidden: attrs.contains(AttrFlags::IS_DOC_HIDDEN), - is_unstable: attrs.contains(AttrFlags::IS_UNSTABLE), + is_doc_hidden: attrs.has_doc_hidden(), + is_unstable: attrs.is_unstable(), complete: do_not_complete, }; diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 2a104fff2b92..f35df8d3a7e1 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -30,7 +30,6 @@ //! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its //! surface syntax. -mod attrs; mod lower; mod pretty; #[cfg(test)] @@ -44,8 +43,10 @@ use std::{ }; use ast::{AstNode, StructKind}; +use base_db::Crate; use hir_expand::{ ExpandTo, HirFileId, + attrs::RawAttrs, mod_path::{ModPath, PathKind}, name::Name, }; @@ -58,12 +59,9 @@ use syntax::{SyntaxKind, ast, match_ast}; use thin_vec::ThinVec; use triomphe::Arc; -use crate::{BlockId, Lookup, db::DefDatabase}; +use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase}; -pub(crate) use crate::item_tree::{ - attrs::*, - lower::{lower_use_tree, visibility_from_ast}, -}; +pub(crate) use crate::item_tree::lower::{lower_use_tree, visibility_from_ast}; #[derive(Copy, Clone, Eq, PartialEq)] pub(crate) struct RawVisibilityId(u32); @@ -98,7 +96,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - let top_attrs = ctx.lower_attrs(&file); + let top_attrs = RawAttrs::new(db, &file, ctx.span_map()); let mut item_tree = ctx.lower_module_items(&file); item_tree.top_attrs = top_attrs; item_tree @@ -134,7 +132,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> attrs: FxHashMap::default(), small_data: FxHashMap::default(), big_data: FxHashMap::default(), - top_attrs: AttrsOrCfg::empty(), + top_attrs: RawAttrs::EMPTY, vis: ItemVisibilities { arena: ThinVec::new() }, }) }) @@ -170,7 +168,7 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc attrs: FxHashMap::default(), small_data: FxHashMap::default(), big_data: FxHashMap::default(), - top_attrs: AttrsOrCfg::empty(), + top_attrs: RawAttrs::EMPTY, vis: ItemVisibilities { arena: ThinVec::new() }, }) }) @@ -184,8 +182,8 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc #[derive(Debug, Default, Eq, PartialEq)] pub struct ItemTree { top_level: Box<[ModItemId]>, - top_attrs: AttrsOrCfg, - attrs: FxHashMap, AttrsOrCfg>, + top_attrs: RawAttrs, + attrs: FxHashMap, RawAttrs>, vis: ItemVisibilities, big_data: FxHashMap, BigModItem>, small_data: FxHashMap, SmallModItem>, @@ -199,12 +197,26 @@ impl ItemTree { } /// Returns the inner attributes of the source file. - pub(crate) fn top_level_attrs(&self) -> &AttrsOrCfg { + pub(crate) fn top_level_raw_attrs(&self) -> &RawAttrs { &self.top_attrs } - pub(crate) fn attrs(&self, of: FileAstId) -> Option<&AttrsOrCfg> { - self.attrs.get(&of) + /// Returns the inner attributes of the source file. + pub(crate) fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs { + Attrs::expand_cfg_attr(db, krate, self.top_attrs.clone()) + } + + pub(crate) fn raw_attrs(&self, of: FileAstId) -> &RawAttrs { + self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY) + } + + pub(crate) fn attrs( + &self, + db: &dyn DefDatabase, + krate: Crate, + of: FileAstId, + ) -> Attrs { + Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone()) } /// Returns a count of a few, expensive items. diff --git a/crates/hir-def/src/item_tree/attrs.rs b/crates/hir-def/src/item_tree/attrs.rs deleted file mode 100644 index 5c635a4b3831..000000000000 --- a/crates/hir-def/src/item_tree/attrs.rs +++ /dev/null @@ -1,220 +0,0 @@ -//! Defines attribute helpers for name resolution. -//! -//! Notice we don't preserve all attributes for name resolution, to save space: -//! for example, we skip doc comments (desugared to `#[doc = "..."]` attributes) -//! and `#[inline]`. The filtered attributes are listed in [`hir_expand::attrs`]. - -use std::{ - borrow::Cow, - convert::Infallible, - ops::{self, ControlFlow}, -}; - -use cfg::{CfgExpr, CfgOptions}; -use either::Either; -use hir_expand::{ - attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs}, - mod_path::ModPath, - name::Name, - span_map::SpanMapRef, -}; -use intern::{Interned, Symbol, sym}; -use syntax::{AstNode, T, ast}; -use syntax_bridge::DocCommentDesugarMode; -use tt::token_to_literal; - -use crate::{db::DefDatabase, item_tree::lower::Ctx}; - -#[derive(Debug, PartialEq, Eq)] -pub(crate) enum AttrsOrCfg { - Enabled { - attrs: AttrsOwned, - }, - /// This only collects the attributes up to the disabled `cfg` (this is what needed for crate-level attributes.) - CfgDisabled(Box<(CfgExpr, AttrsOwned)>), -} - -impl Default for AttrsOrCfg { - #[inline] - fn default() -> Self { - AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) } - } -} - -impl AttrsOrCfg { - pub(crate) fn lower<'a>( - db: &dyn DefDatabase, - owner: &dyn ast::HasAttrs, - cfg_options: &dyn Fn() -> &'a CfgOptions, - span_map: SpanMapRef<'_>, - ) -> AttrsOrCfg { - let mut attrs = Vec::new(); - let result = - collect_item_tree_attrs::(owner, cfg_options, |meta, container, _, _| { - // NOTE: We cannot early return from this function, *every* attribute must be pushed, otherwise we'll mess the `AttrId` - // tracking. - let (span, path_range, input) = match meta { - Meta::NamedKeyValue { path_range, name: _, value } => { - let span = span_map.span_for_range(path_range); - let input = value.map(|value| { - Box::new(AttrInput::Literal(token_to_literal( - value.text(), - span_map.span_for_range(value.text_range()), - ))) - }); - (span, path_range, input) - } - Meta::TokenTree { path, tt } => { - let span = span_map.span_for_range(path.range); - let tt = syntax_bridge::syntax_node_to_token_tree( - tt.syntax(), - span_map, - span, - DocCommentDesugarMode::ProcMacro, - ); - let input = Some(Box::new(AttrInput::TokenTree(tt))); - (span, path.range, input) - } - Meta::Path { path } => { - let span = span_map.span_for_range(path.range); - (span, path.range, None) - } - }; - - let path = container.token_at_offset(path_range.start()).right_biased().and_then( - |first_path_token| { - let is_abs = matches!(first_path_token.kind(), T![:] | T![::]); - let segments = - std::iter::successors(Some(first_path_token), |it| it.next_token()) - .take_while(|it| it.text_range().end() <= path_range.end()) - .filter(|it| it.kind().is_any_identifier()); - ModPath::from_tokens( - db, - &mut |range| span_map.span_for_range(range).ctx, - is_abs, - segments, - ) - }, - ); - let path = path.unwrap_or_else(|| Name::missing().into()); - - attrs.push(Attr { path: Interned::new(path), input, ctxt: span.ctx }); - ControlFlow::Continue(()) - }); - let attrs = AttrsOwned(attrs.into_boxed_slice()); - match result { - Some(Either::Right(cfg)) => AttrsOrCfg::CfgDisabled(Box::new((cfg, attrs))), - None => AttrsOrCfg::Enabled { attrs }, - } - } -} - -#[derive(Debug, PartialEq, Eq)] -pub(crate) struct AttrsOwned(Box<[Attr]>); - -#[derive(Debug, Clone, Copy)] -pub(crate) struct Attrs<'a>(&'a [Attr]); - -impl ops::Deref for Attrs<'_> { - type Target = [Attr]; - - #[inline] - fn deref(&self) -> &Self::Target { - self.0 - } -} - -impl Ctx<'_> { - #[inline] - pub(super) fn lower_attrs(&self, owner: &dyn ast::HasAttrs) -> AttrsOrCfg { - AttrsOrCfg::lower(self.db, owner, &|| self.cfg_options(), self.span_map()) - } -} - -impl AttrsOwned { - #[inline] - pub(crate) fn as_ref(&self) -> Attrs<'_> { - Attrs(&self.0) - } -} - -impl<'a> Attrs<'a> { - pub(crate) const EMPTY: Self = Attrs(&[]); - - #[inline] - pub(crate) fn by_key(self, key: Symbol) -> AttrQuery<'a> { - AttrQuery { attrs: self, key } - } - - #[inline] - pub(crate) fn iter(self) -> impl Iterator { - self.0.iter().enumerate().map(|(id, attr)| (AttrId::from_item_tree_index(id as u32), attr)) - } - - #[inline] - pub(crate) fn iter_after( - self, - after: Option, - ) -> impl Iterator { - let skip = after.map_or(0, |after| after.item_tree_index() + 1); - self.0[skip as usize..] - .iter() - .enumerate() - .map(move |(id, attr)| (AttrId::from_item_tree_index(id as u32 + skip), attr)) - } - - #[inline] - pub(crate) fn is_proc_macro(&self) -> bool { - self.by_key(sym::proc_macro).exists() - } - - #[inline] - pub(crate) fn is_proc_macro_attribute(&self) -> bool { - self.by_key(sym::proc_macro_attribute).exists() - } -} -#[derive(Debug, Clone)] -pub(crate) struct AttrQuery<'attr> { - attrs: Attrs<'attr>, - key: Symbol, -} - -impl<'attr> AttrQuery<'attr> { - #[inline] - pub(crate) fn tt_values(self) -> impl Iterator { - self.attrs().filter_map(|attr| attr.token_tree_value()) - } - - #[inline] - pub(crate) fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> { - self.attrs().find_map(|attr| attr.string_value_with_span()) - } - - #[inline] - pub(crate) fn string_value_unescape(self) -> Option> { - self.attrs().find_map(|attr| attr.string_value_unescape()) - } - - #[inline] - pub(crate) fn exists(self) -> bool { - self.attrs().next().is_some() - } - - #[inline] - pub(crate) fn attrs(self) -> impl Iterator + Clone { - let key = self.key; - self.attrs.0.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key)) - } -} - -impl AttrsOrCfg { - #[inline] - pub(super) fn empty() -> Self { - AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) } - } - - #[inline] - pub(super) fn is_empty(&self) -> bool { - matches!(self, AttrsOrCfg::Enabled { attrs } if attrs.as_ref().is_empty()) - } -} diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index b50a75169158..454e06399583 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -1,9 +1,8 @@ //! AST -> `ItemTree` lowering code. -use std::cell::OnceCell; +use std::{cell::OnceCell, collections::hash_map::Entry}; use base_db::FxIndexSet; -use cfg::CfgOptions; use hir_expand::{ HirFileId, mod_path::PathKind, @@ -23,19 +22,18 @@ use crate::{ item_tree::{ BigModItem, Const, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ImportAlias, Interned, ItemTree, ItemTreeAstId, Macro2, MacroCall, MacroRules, Mod, - ModItemId, ModKind, ModPath, RawVisibility, RawVisibilityId, SmallModItem, Static, Struct, - StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, VisibilityExplicitness, - attrs::AttrsOrCfg, + ModItemId, ModKind, ModPath, RawAttrs, RawVisibility, RawVisibilityId, SmallModItem, + Static, Struct, StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, + VisibilityExplicitness, }, }; pub(super) struct Ctx<'a> { - pub(super) db: &'a dyn DefDatabase, + db: &'a dyn DefDatabase, tree: ItemTree, source_ast_id_map: Arc, span_map: OnceCell, file: HirFileId, - cfg_options: OnceCell<&'a CfgOptions>, top_level: Vec, visibilities: FxIndexSet, } @@ -47,18 +45,12 @@ impl<'a> Ctx<'a> { tree: ItemTree::default(), source_ast_id_map: db.ast_id_map(file), file, - cfg_options: OnceCell::new(), span_map: OnceCell::new(), visibilities: FxIndexSet::default(), top_level: Vec::new(), } } - #[inline] - pub(super) fn cfg_options(&self) -> &'a CfgOptions { - self.cfg_options.get_or_init(|| self.file.krate(self.db).cfg_options(self.db)) - } - pub(super) fn span_map(&self) -> SpanMapRef<'_> { self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref() } @@ -106,7 +98,7 @@ impl<'a> Ctx<'a> { } pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { - self.tree.top_attrs = self.lower_attrs(block); + self.tree.top_attrs = RawAttrs::new(self.db, block, self.span_map()); self.top_level = block .statements() .filter_map(|stmt| match stmt { @@ -152,15 +144,22 @@ impl<'a> Ctx<'a> { // FIXME: Handle `global_asm!()`. ast::Item::AsmExpr(_) => return None, }; - let attrs = self.lower_attrs(item); + let attrs = RawAttrs::new(self.db, item, self.span_map()); self.add_attrs(mod_item.ast_id(), attrs); Some(mod_item) } - fn add_attrs(&mut self, item: FileAstId, attrs: AttrsOrCfg) { + fn add_attrs(&mut self, item: FileAstId, attrs: RawAttrs) { if !attrs.is_empty() { - self.tree.attrs.insert(item, attrs); + match self.tree.attrs.entry(item) { + Entry::Occupied(mut entry) => { + *entry.get_mut() = entry.get().merge(attrs); + } + Entry::Vacant(entry) => { + entry.insert(attrs); + } + } } } @@ -353,7 +352,7 @@ impl<'a> Ctx<'a> { ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(), ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(), }; - let attrs = self.lower_attrs(&item); + let attrs = RawAttrs::new(self.db, &item, self.span_map()); self.add_attrs(mod_item.ast_id(), attrs); Some(mod_item) }) diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 66a2d14a734f..94a6cce3ce33 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -7,8 +7,8 @@ use span::{Edition, ErasedFileAstId}; use crate::{ item_tree::{ Const, DefDatabase, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ItemTree, - Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawVisibilityId, Static, Struct, - Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, attrs::AttrsOrCfg, + Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawAttrs, RawVisibilityId, Static, + Struct, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, }, visibility::RawVisibility, }; @@ -85,13 +85,9 @@ impl Printer<'_> { } } - fn print_attrs(&mut self, attrs: &AttrsOrCfg, inner: bool, separated_by: &str) { - let AttrsOrCfg::Enabled { attrs } = attrs else { - w!(self, "#[cfg(false)]{separated_by}"); - return; - }; + fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool, separated_by: &str) { let inner = if inner { "!" } else { "" }; - for attr in &*attrs.as_ref() { + for attr in &**attrs { w!( self, "#{}[{}{}]{}", diff --git a/crates/hir-def/src/item_tree/tests.rs b/crates/hir-def/src/item_tree/tests.rs index a57432f33c3d..91b42bef8f79 100644 --- a/crates/hir-def/src/item_tree/tests.rs +++ b/crates/hir-def/src/item_tree/tests.rs @@ -30,8 +30,10 @@ use crate::{A, B}; use a::{c, d::{e}}; "#, - expect![[r#" + expect![[r##" + #![doc = " file comment"] #![no_std] + #![doc = " another file comment"] // AstId: ExternCrate[070B, 0] pub(self) extern crate self as renamed; @@ -45,12 +47,13 @@ use a::{c, d::{e}}; // AstId: Use[0000, 1] pub(self) use globs::*; + #[doc = " docs on import"] // AstId: Use[0000, 2] pub(self) use crate::{A, B}; // AstId: Use[0000, 3] pub(self) use a::{c, d::{e}}; - "#]], + "##]], ); } @@ -192,6 +195,8 @@ mod inline { mod outline; "#, expect![[r##" + #[doc = " outer"] + #[doc = " inner"] // AstId: Module[03AE, 0] pub(self) mod inline { // AstId: Use[0000, 0] diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs index 4f97baadd183..df0705bf90cb 100644 --- a/crates/hir-def/src/lang_item.rs +++ b/crates/hir-def/src/lang_item.rs @@ -10,7 +10,6 @@ use triomphe::Arc; use crate::{ AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId, - attrs::AttrFlags, db::DefDatabase, expr_store::path::Path, nameres::{assoc::TraitItems, crate_def_map, crate_local_def_map}, @@ -214,14 +213,14 @@ impl LangItems { T: Into + Copy, { let _p = tracing::info_span!("collect_lang_item").entered(); - if let Some(lang_item) = AttrFlags::lang_item(db, item.into()) { + if let Some(lang_item) = lang_attr(db, item.into()) { self.items.entry(lang_item).or_insert_with(|| constructor(item)); } } } pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option { - AttrFlags::lang_item(db, item) + db.attrs(item).lang_item() } pub(crate) fn notable_traits_in_deps(db: &dyn DefDatabase, krate: Crate) -> Arc<[Arc<[TraitId]>]> { @@ -241,7 +240,7 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option for (_, module_data) in crate_def_map.modules() { for def in module_data.scope.declarations() { if let ModuleDefId::TraitId(trait_) = def - && AttrFlags::query(db, trait_.into()).contains(AttrFlags::IS_DOC_NOTABLE_TRAIT) + && db.attrs(trait_.into()).has_doc_notable_trait() { traits.push(trait_); } diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index c3c9fc75252d..e5c213ca937c 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -19,7 +19,7 @@ extern crate ra_ap_rustc_abi as rustc_abi; pub mod db; -pub mod attrs; +pub mod attr; pub mod builtin_type; pub mod item_scope; pub mod per_ns; @@ -45,7 +45,7 @@ pub mod find_path; pub mod import_map; pub mod visibility; -use intern::{Interned, Symbol}; +use intern::{Interned, Symbol, sym}; pub use rustc_abi as layout; use thin_vec::ThinVec; use triomphe::Arc; @@ -80,7 +80,7 @@ use syntax::{AstNode, ast}; pub use hir_expand::{Intern, Lookup, tt}; use crate::{ - attrs::AttrFlags, + attr::Attrs, builtin_type::BuiltinType, db::DefDatabase, expr_store::ExpressionStoreSourceMap, @@ -956,16 +956,10 @@ impl CallableDefId { } } -// FIXME: We probably should use this in more places. -/// This is used to avoid interning the whole `AttrDefId`, so we intern just modules and not everything. -#[salsa_macros::interned(debug, no_lifetime)] -pub struct InternedModuleId { - pub loc: ModuleId, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, salsa_macros::Supertype)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum AttrDefId { - ModuleId(InternedModuleId), + ModuleId(ModuleId), + FieldId(FieldId), AdtId(AdtId), FunctionId(FunctionId), EnumVariantId(EnumVariantId), @@ -975,12 +969,15 @@ pub enum AttrDefId { TypeAliasId(TypeAliasId), MacroId(MacroId), ImplId(ImplId), + GenericParamId(GenericParamId), ExternBlockId(ExternBlockId), ExternCrateId(ExternCrateId), UseId(UseId), } impl_from!( + ModuleId, + FieldId, AdtId(StructId, EnumId, UnionId), EnumVariantId, StaticId, @@ -990,11 +987,41 @@ impl_from!( TypeAliasId, MacroId(Macro2Id, MacroRulesId, ProcMacroId), ImplId, + GenericParamId, ExternCrateId, UseId for AttrDefId ); +impl TryFrom for AttrDefId { + type Error = (); + + fn try_from(value: ModuleDefId) -> Result { + match value { + ModuleDefId::ModuleId(it) => Ok(it.into()), + ModuleDefId::FunctionId(it) => Ok(it.into()), + ModuleDefId::AdtId(it) => Ok(it.into()), + ModuleDefId::EnumVariantId(it) => Ok(it.into()), + ModuleDefId::ConstId(it) => Ok(it.into()), + ModuleDefId::StaticId(it) => Ok(it.into()), + ModuleDefId::TraitId(it) => Ok(it.into()), + ModuleDefId::TypeAliasId(it) => Ok(it.into()), + ModuleDefId::MacroId(id) => Ok(id.into()), + ModuleDefId::BuiltinType(_) => Err(()), + } + } +} + +impl From for AttrDefId { + fn from(acid: ItemContainerId) -> Self { + match acid { + ItemContainerId::ModuleId(mid) => AttrDefId::ModuleId(mid), + ItemContainerId::ImplId(iid) => AttrDefId::ImplId(iid), + ItemContainerId::TraitId(tid) => AttrDefId::TraitId(tid), + ItemContainerId::ExternBlockId(id) => AttrDefId::ExternBlockId(id), + } + } +} impl From for AttrDefId { fn from(assoc: AssocItemId) -> Self { match assoc { @@ -1235,7 +1262,8 @@ impl HasModule for GenericDefId { impl HasModule for AttrDefId { fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { - AttrDefId::ModuleId(it) => it.loc(db), + AttrDefId::ModuleId(it) => *it, + AttrDefId::FieldId(it) => it.parent.module(db), AttrDefId::AdtId(it) => it.module(db), AttrDefId::FunctionId(it) => it.module(db), AttrDefId::EnumVariantId(it) => it.module(db), @@ -1245,6 +1273,12 @@ impl HasModule for AttrDefId { AttrDefId::TypeAliasId(it) => it.module(db), AttrDefId::ImplId(it) => it.module(db), AttrDefId::ExternBlockId(it) => it.module(db), + AttrDefId::GenericParamId(it) => match it { + GenericParamId::TypeParamId(it) => it.parent(), + GenericParamId::ConstParamId(it) => it.parent(), + GenericParamId::LifetimeParamId(it) => it.parent, + } + .module(db), AttrDefId::MacroId(it) => it.module(db), AttrDefId::ExternCrateId(it) => it.module(db), AttrDefId::UseId(it) => it.module(db), @@ -1368,18 +1402,32 @@ pub enum Complete { } impl Complete { - #[inline] - pub fn extract(is_trait: bool, attrs: AttrFlags) -> Complete { - if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT) { - return Complete::IgnoreFlyimport; - } else if is_trait { - if attrs.contains(AttrFlags::COMPLETE_IGNORE_METHODS) { - return Complete::IgnoreMethods; - } else if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS) { - return Complete::IgnoreFlyimportMethods; + pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete { + let mut do_not_complete = Complete::Yes; + for ra_attr in attrs.rust_analyzer_tool() { + let segments = ra_attr.path.segments(); + if segments.len() != 2 { + continue; + } + let action = segments[1].symbol(); + if *action == sym::completions { + match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) { + Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => { + if ident.sym == sym::ignore_flyimport { + do_not_complete = Complete::IgnoreFlyimport; + } else if is_trait { + if ident.sym == sym::ignore_methods { + do_not_complete = Complete::IgnoreMethods; + } else if ident.sym == sym::ignore_flyimport_methods { + do_not_complete = Complete::IgnoreFlyimportMethods; + } + } + } + _ => {} + } } } - Complete::Yes + do_not_complete } #[inline] diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 115b487b7ac8..c489c1f7c1da 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -300,21 +300,21 @@ fn match_by_first_token_literally() { check( r#" macro_rules! m { - ($i:ident) => ( enum $i {} ); + ($i:ident) => ( mod $i {} ); (= $i:ident) => ( fn $i() {} ); (+ $i:ident) => ( struct $i; ) } -m! { Foo } +m! { foo } m! { = bar } m! { + Baz } "#, expect![[r#" macro_rules! m { - ($i:ident) => ( enum $i {} ); + ($i:ident) => ( mod $i {} ); (= $i:ident) => ( fn $i() {} ); (+ $i:ident) => ( struct $i; ) } -enum Foo {} +mod foo {} fn bar() {} struct Baz; "#]], @@ -326,21 +326,21 @@ fn match_by_last_token_literally() { check( r#" macro_rules! m { - ($i:ident) => ( enum $i {} ); + ($i:ident) => ( mod $i {} ); ($i:ident =) => ( fn $i() {} ); ($i:ident +) => ( struct $i; ) } -m! { Foo } +m! { foo } m! { bar = } m! { Baz + } "#, expect![[r#" macro_rules! m { - ($i:ident) => ( enum $i {} ); + ($i:ident) => ( mod $i {} ); ($i:ident =) => ( fn $i() {} ); ($i:ident +) => ( struct $i; ) } -enum Foo {} +mod foo {} fn bar() {} struct Baz; "#]], @@ -352,21 +352,21 @@ fn match_by_ident() { check( r#" macro_rules! m { - ($i:ident) => ( enum $i {} ); + ($i:ident) => ( mod $i {} ); (spam $i:ident) => ( fn $i() {} ); (eggs $i:ident) => ( struct $i; ) } -m! { Foo } +m! { foo } m! { spam bar } m! { eggs Baz } "#, expect![[r#" macro_rules! m { - ($i:ident) => ( enum $i {} ); + ($i:ident) => ( mod $i {} ); (spam $i:ident) => ( fn $i() {} ); (eggs $i:ident) => ( struct $i; ) } -enum Foo {} +mod foo {} fn bar() {} struct Baz; "#]], @@ -378,12 +378,12 @@ fn match_by_separator_token() { check( r#" macro_rules! m { - ($($i:ident),*) => ($(enum $i {} )*); + ($($i:ident),*) => ($(mod $i {} )*); ($($i:ident)#*) => ($(fn $i() {} )*); ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; ) } -m! { Baz, Qux } +m! { foo, bar } m! { foo# bar } @@ -391,13 +391,13 @@ m! { Foo,# Bar } "#, expect![[r#" macro_rules! m { - ($($i:ident),*) => ($(enum $i {} )*); + ($($i:ident),*) => ($(mod $i {} )*); ($($i:ident)#*) => ($(fn $i() {} )*); ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; ) } -enum Baz {} -enum Qux {} +mod foo {} +mod bar {} fn foo() {} fn bar() {} @@ -1114,11 +1114,11 @@ fn test_single_item() { check( r#" macro_rules! m { ($i:item) => ( $i ) } -m! { struct C {} } +m! { mod c {} } "#, expect![[r#" macro_rules! m { ($i:item) => ( $i ) } -struct C {} +mod c {} "#]], ) } @@ -1144,7 +1144,6 @@ m! { type T = u8; } "#, - // The modules are counted twice, once because of the module and once because of the macro call. expect![[r#" macro_rules! m { ($($i:item)*) => ($($i )*) } extern crate a; @@ -1162,9 +1161,7 @@ trait J {} fn h() {} extern {} type T = u8; - -mod b; -mod c {}"#]], +"#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 74393411054e..e8ae499d27b2 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -245,21 +245,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } } - for (_, module) in def_map.modules() { - let Some(src) = module.declaration_source(&db) else { - continue; - }; - if let Some(macro_file) = src.file_id.macro_file() { - let pp = pretty_print_macro_expansion( - src.value.syntax().clone(), - db.span_map(macro_file.into()).as_ref(), - false, - false, - ); - format_to!(expanded_text, "\n{}", pp) - } - } - for impl_id in def_map[local_id].scope.impls() { let src = impl_id.lookup(&db).source(&db); if let Some(macro_file) = src.file_id.macro_file() diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index 3f0afe61e0b8..6952a9da1013 100644 --- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -9,93 +9,37 @@ use crate::macro_expansion_tests::{check, check_errors}; #[test] fn attribute_macro_attr_censoring() { + cov_mark::check!(attribute_macro_attr_censoring); check( r#" //- proc_macros: identity -//- minicore: derive -#[attr1] #[derive()] #[proc_macros::identity] #[attr2] +#[attr1] #[proc_macros::identity] #[attr2] struct S; - -/// Foo -#[cfg_attr(false, doc = "abc...", attr1)] -mod foo { - #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))] - #![cfg_attr(true, doc = "123...", attr2)] - #![attr3] - - #[cfg_attr(true, cfg(false))] - fn foo() {} - - #[cfg(true)] - fn bar() {} -} "#, - expect![[r##" -#[attr1] #[derive()] #[proc_macros::identity] #[attr2] + expect![[r#" +#[attr1] #[proc_macros::identity] #[attr2] struct S; -/// Foo -#[cfg_attr(false, doc = "abc...", attr1)] -mod foo { - #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))] - #![cfg_attr(true, doc = "123...", attr2)] - #![attr3] - - #[cfg_attr(true, cfg(false))] - fn foo() {} - - #[cfg(true)] - fn bar() {} -} - #[attr1] -#[attr2] struct S; -#[doc = " Foo"] mod foo { - # ![foo] - # ![doc = "123..."] - # ![attr2] - # ![attr3] - #[cfg_attr(true , cfg(false ))] fn foo() {} - #[cfg(true )] fn bar() {} -}"##]], +#[attr2] struct S;"#]], ); } #[test] fn derive_censoring() { + cov_mark::check!(derive_censoring); check( r#" //- proc_macros: derive_identity //- minicore:derive -use derive as my_cool_derive; #[attr1] #[derive(Foo)] #[derive(proc_macros::DeriveIdentity)] #[derive(Bar)] #[attr2] struct S; - -#[my_cool_derive()] -#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))] -#[my_cool_derive()] -struct Foo { - #[cfg_attr(false, cfg(false), attr2)] - v1: i32, - #[cfg_attr(true, cfg(false), attr2)] - v1: i32, - #[cfg_attr(true, attr3)] - v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32), - v3: Foo<{ - #[cfg(false)] - let foo = 123; - 456 - }>, - #[cfg(false)] - v4: bool // No comma here -} "#, expect![[r#" -use derive as my_cool_derive; #[attr1] #[derive(Foo)] #[derive(proc_macros::DeriveIdentity)] @@ -103,32 +47,6 @@ use derive as my_cool_derive; #[attr2] struct S; -#[my_cool_derive()] -#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))] -#[my_cool_derive()] -struct Foo { - #[cfg_attr(false, cfg(false), attr2)] - v1: i32, - #[cfg_attr(true, cfg(false), attr2)] - v1: i32, - #[cfg_attr(true, attr3)] - v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32), - v3: Foo<{ - #[cfg(false)] - let foo = 123; - 456 - }>, - #[cfg(false)] - v4: bool // No comma here -} - -#[attr1] -#[my_cool_derive()] struct Foo { - v1: i32, #[attr3]v2: fn(#[attr4]param2: u32), v3: Foo< { - 456 - } - >, -} #[attr1] #[derive(Bar)] #[attr2] struct S;"#]], @@ -169,7 +87,7 @@ fn foo() { bar.; blub } fn foo() { bar.; blub } fn foo() { - bar.; + bar. ; blub }"#]], ); diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index e4b95a5a77a5..7d5e627964eb 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -391,14 +391,19 @@ pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefM ) .entered(); - let root_file_id = crate_id.root_file_id(db); - let module_data = - ModuleData::new(ModuleOrigin::CrateRoot { definition: root_file_id }, Visibility::Public); + let module_data = ModuleData::new( + ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) }, + Visibility::Public, + ); let def_map = DefMap::empty(crate_id, Arc::new(DefMapCrateData::new(krate.edition)), module_data, None); - let (def_map, local_def_map) = - collector::collect_defs(db, def_map, TreeId::new(root_file_id.into(), None), None); + let (def_map, local_def_map) = collector::collect_defs( + db, + def_map, + TreeId::new(krate.root_file_id(db).into(), None), + None, + ); DefMapPair::new(db, def_map, local_def_map) } diff --git a/crates/hir-def/src/nameres/assoc.rs b/crates/hir-def/src/nameres/assoc.rs index b67853347bde..8d2a386de8ec 100644 --- a/crates/hir-def/src/nameres/assoc.rs +++ b/crates/hir-def/src/nameres/assoc.rs @@ -4,8 +4,7 @@ use std::mem; use cfg::CfgOptions; use hir_expand::{ - AstId, AttrMacroAttrIds, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, - MacroDefKind, + AstId, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind, mod_path::ModPath, name::{AsName, Name}, span_map::SpanMap, @@ -22,8 +21,8 @@ use triomphe::Arc; use crate::{ AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId, ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc, + attr::Attrs, db::DefDatabase, - item_tree::AttrsOrCfg, macro_call_as_call_id, nameres::{ DefMap, LocalDefMap, MacroSubNs, @@ -192,22 +191,19 @@ impl<'a> AssocItemCollector<'a> { fn collect_item(&mut self, item: ast::AssocItem) { let ast_id = self.ast_id_map.ast_id(&item); - let attrs = - match AttrsOrCfg::lower(self.db, &item, &|| self.cfg_options, self.span_map.as_ref()) { - AttrsOrCfg::Enabled { attrs } => attrs, - AttrsOrCfg::CfgDisabled(cfg) => { - self.diagnostics.push(DefDiagnostic::unconfigured_code( - self.module_id.local_id, - InFile::new(self.file_id, ast_id.erase()), - cfg.0, - self.cfg_options.clone(), - )); - return; - } - }; + let attrs = Attrs::new(self.db, &item, self.span_map.as_ref(), self.cfg_options); + if let Err(cfg) = attrs.is_cfg_enabled(self.cfg_options) { + self.diagnostics.push(DefDiagnostic::unconfigured_code( + self.module_id.local_id, + InFile::new(self.file_id, ast_id.erase()), + cfg, + self.cfg_options.clone(), + )); + return; + } let ast_id = InFile::new(self.file_id, ast_id.upcast()); - 'attrs: for (attr_id, attr) in attrs.as_ref().iter() { + 'attrs: for attr in &*attrs { let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id }; match self.def_map.resolve_attr_macro( @@ -216,7 +212,6 @@ impl<'a> AssocItemCollector<'a> { self.module_id.local_id, ast_id_with_path, attr, - attr_id, ) { Ok(ResolvedAttr::Macro(call_id)) => { let loc = self.db.lookup_intern_macro_call(call_id); @@ -245,12 +240,8 @@ impl<'a> AssocItemCollector<'a> { Err(_) => { self.diagnostics.push(DefDiagnostic::unresolved_macro_call( self.module_id.local_id, - MacroCallKind::Attr { - ast_id, - attr_args: None, - censored_attr_ids: AttrMacroAttrIds::from_one(attr_id), - }, - (*attr.path).clone(), + MacroCallKind::Attr { ast_id, attr_args: None, invoc_attr_index: attr.id }, + attr.path().clone(), )); } } diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs index fb755026c3e0..2f56d608fcbf 100644 --- a/crates/hir-def/src/nameres/attr_resolution.rs +++ b/crates/hir-def/src/nameres/attr_resolution.rs @@ -2,7 +2,7 @@ use base_db::Crate; use hir_expand::{ - AttrMacroAttrIds, MacroCallId, MacroCallKind, MacroDefId, + MacroCallId, MacroCallKind, MacroDefId, attrs::{Attr, AttrId, AttrInput}, inert_attr_macro::find_builtin_attr_idx, mod_path::{ModPath, PathKind}, @@ -28,7 +28,6 @@ pub enum ResolvedAttr { } impl DefMap { - /// This cannot be used to resolve items that allow derives. pub(crate) fn resolve_attr_macro( &self, local_def_map: &LocalDefMap, @@ -36,7 +35,6 @@ impl DefMap { original_module: LocalModuleId, ast_id: AstIdWithPath, attr: &Attr, - attr_id: AttrId, ) -> Result { // NB: does not currently work for derive helpers as they aren't recorded in the `DefMap` @@ -70,9 +68,6 @@ impl DefMap { db, &ast_id, attr, - // There aren't any active attributes before this one, because attribute macros - // replace their input, and derive macros are not allowed in this function. - AttrMacroAttrIds::from_one(attr_id), self.krate, db.macro_def(def), ))) @@ -107,7 +102,6 @@ pub(super) fn attr_macro_as_call_id( db: &dyn DefDatabase, item_attr: &AstIdWithPath, macro_attr: &Attr, - censored_attr_ids: AttrMacroAttrIds, krate: Crate, def: MacroDefId, ) -> MacroCallId { @@ -127,7 +121,7 @@ pub(super) fn attr_macro_as_call_id( MacroCallKind::Attr { ast_id: item_attr.ast_id, attr_args: arg.map(Arc::new), - censored_attr_ids, + invoc_attr_index: macro_attr.id, }, macro_attr.ctxt, ) diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index c3b272b403bb..a2ce53835651 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -3,14 +3,14 @@ //! `DefCollector::collect` contains the fixed-point iteration loop which //! resolves imports and expands macros. -use std::{cmp::Ordering, iter, mem}; +use std::{cmp::Ordering, iter, mem, ops::Not}; use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin}; use cfg::{CfgAtom, CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ - AttrMacroAttrIds, EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, - MacroCallKind, MacroDefId, MacroDefKind, + EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, + MacroDefId, MacroDefKind, attrs::{Attr, AttrId}, builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro}, mod_path::{ModPath, PathKind}, @@ -18,10 +18,9 @@ use hir_expand::{ proc_macro::CustomProcMacroExpander, }; use intern::{Interned, sym}; -use itertools::izip; +use itertools::{Itertools, izip}; use la_arena::Idx; use rustc_hash::{FxHashMap, FxHashSet}; -use smallvec::SmallVec; use span::{Edition, FileAstId, SyntaxContext}; use syntax::ast; use triomphe::Arc; @@ -33,11 +32,12 @@ use crate::{ MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, UseLoc, + attr::Attrs, db::DefDatabase, item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports}, item_tree::{ - self, Attrs, AttrsOrCfg, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, - Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, TreeId, + self, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, Macro2, MacroCall, + MacroRules, Mod, ModItemId, ModKind, TreeId, }, macro_call_as_call_id, nameres::{ @@ -102,7 +102,6 @@ pub(super) fn collect_defs( proc_macros, from_glob_import: Default::default(), skip_attrs: Default::default(), - prev_active_attrs: Default::default(), unresolved_extern_crates: Default::default(), is_proc_macro: krate.is_proc_macro, }; @@ -207,7 +206,6 @@ enum MacroDirectiveKind<'db> { }, Attr { ast_id: AstIdWithPath, - attr_id: AttrId, attr: Attr, mod_item: ModItemId, /* is this needed? */ tree: TreeId, @@ -248,27 +246,28 @@ struct DefCollector<'db> { /// This also stores the attributes to skip when we resolve derive helpers and non-macro /// non-builtin attributes in general. // FIXME: There has to be a better way to do this - skip_attrs: FxHashMap, AttrId>, - /// When we expand attributes, we need to censor all previous active attributes - /// on the same item. Therefore, this holds all active attributes that we already - /// expanded. - prev_active_attrs: FxHashMap, SmallVec<[AttrId; 1]>>, + skip_attrs: FxHashMap>, AttrId>, } impl<'db> DefCollector<'db> { fn seed_with_top_level(&mut self) { let _p = tracing::info_span!("seed_with_top_level").entered(); - let file_id = self.def_map.krate.root_file_id(self.db); + let file_id = self.def_map.krate.data(self.db).root_file_id(self.db); let item_tree = self.db.file_item_tree(file_id.into()); - let attrs = match item_tree.top_level_attrs() { - AttrsOrCfg::Enabled { attrs } => attrs.as_ref(), - AttrsOrCfg::CfgDisabled(it) => it.1.as_ref(), - }; + let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate); let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); + let mut process = true; + // Process other crate-level attributes. for attr in &*attrs { + if let Some(cfg) = attr.cfg() + && self.cfg_options.check(&cfg) == Some(false) + { + process = false; + break; + } let Some(attr_name) = attr.path.as_ident() else { continue }; match () { @@ -292,7 +291,7 @@ impl<'db> DefCollector<'db> { () if *attr_name == sym::feature => { let features = attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map( - |(feat, _, _)| match feat.segments() { + |(feat, _)| match feat.segments() { [name] => Some(name.symbol().clone()), _ => None, }, @@ -345,7 +344,7 @@ impl<'db> DefCollector<'db> { self.inject_prelude(); - if matches!(item_tree.top_level_attrs(), AttrsOrCfg::CfgDisabled(_)) { + if !process { return; } @@ -363,7 +362,10 @@ impl<'db> DefCollector<'db> { fn seed_with_inner(&mut self, tree_id: TreeId) { let item_tree = tree_id.item_tree(self.db); - let is_cfg_enabled = matches!(item_tree.top_level_attrs(), AttrsOrCfg::Enabled { .. }); + let is_cfg_enabled = item_tree + .top_level_attrs(self.db, self.def_map.krate) + .cfg() + .is_none_or(|cfg| self.cfg_options.check(&cfg) != Some(false)); if is_cfg_enabled { self.inject_prelude(); @@ -454,18 +456,18 @@ impl<'db> DefCollector<'db> { self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive .kind { - MacroDirectiveKind::Attr { ast_id, mod_item, attr_id, attr, tree, item_tree } => { + MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree, item_tree } => { self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( directive.module_id, MacroCallKind::Attr { ast_id: ast_id.ast_id, attr_args: None, - censored_attr_ids: AttrMacroAttrIds::from_one(*attr_id), + invoc_attr_index: attr.id, }, - (*attr.path).clone(), + attr.path().clone(), )); - self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), *attr_id); + self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), attr.id); Some((idx, directive, *mod_item, *tree, *item_tree)) } @@ -1348,7 +1350,6 @@ impl<'db> DefCollector<'db> { MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, - attr_id, attr, tree, item_tree, @@ -1361,7 +1362,7 @@ impl<'db> DefCollector<'db> { let mod_dir = collector.mod_dirs[&directive.module_id].clone(); collector .skip_attrs - .insert(InFile::new(file_id, mod_item.ast_id()), *attr_id); + .insert(InFile::new(file_id, mod_item.ast_id()), attr.id); ModCollector { def_collector: collector, @@ -1397,6 +1398,7 @@ impl<'db> DefCollector<'db> { // being cfg'ed out). // Ideally we will just expand them to nothing here. But we are only collecting macro calls, // not expanding them, so we have no way to do that. + // If you add an ignored attribute here, also add it to `Semantics::might_be_inside_macro_call()`. if matches!( def.kind, MacroDefKind::BuiltInAttr(_, expander) @@ -1408,18 +1410,8 @@ impl<'db> DefCollector<'db> { } } - let mut call_id = || { - let active_attrs = self.prev_active_attrs.entry(ast_id).or_default(); - active_attrs.push(*attr_id); - - attr_macro_as_call_id( - self.db, - file_ast_id, - attr, - AttrMacroAttrIds::from_many(active_attrs), - self.def_map.krate, - def, - ) + let call_id = || { + attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def) }; if matches!(def, MacroDefId { kind: MacroDefKind::BuiltInAttr(_, exp), .. } @@ -1437,7 +1429,7 @@ impl<'db> DefCollector<'db> { let diag = DefDiagnostic::invalid_derive_target( directive.module_id, ast_id, - *attr_id, + attr.id, ); self.def_map.diagnostics.push(diag); return recollect_without(self); @@ -1450,7 +1442,7 @@ impl<'db> DefCollector<'db> { Some(derive_macros) => { let call_id = call_id(); let mut len = 0; - for (idx, (path, call_site, _)) in derive_macros.enumerate() { + for (idx, (path, call_site)) in derive_macros.enumerate() { let ast_id = AstIdWithPath::new( file_id, ast_id.value, @@ -1461,7 +1453,7 @@ impl<'db> DefCollector<'db> { depth: directive.depth + 1, kind: MacroDirectiveKind::Derive { ast_id, - derive_attr: *attr_id, + derive_attr: attr.id, derive_pos: idx, ctxt: call_site.ctx, derive_macro_id: call_id, @@ -1477,13 +1469,13 @@ impl<'db> DefCollector<'db> { // Check the comment in [`builtin_attr_macro`]. self.def_map.modules[directive.module_id] .scope - .init_derive_attribute(ast_id, *attr_id, call_id, len + 1); + .init_derive_attribute(ast_id, attr.id, call_id, len + 1); } None => { let diag = DefDiagnostic::malformed_derive( directive.module_id, ast_id, - *attr_id, + attr.id, ); self.def_map.diagnostics.push(diag); } @@ -1720,17 +1712,16 @@ impl ModCollector<'_, '_> { }; let mut process_mod_item = |item: ModItemId| { - let attrs = match self.item_tree.attrs(item.ast_id()) { - Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(), - None => Attrs::EMPTY, - Some(AttrsOrCfg::CfgDisabled(cfg)) => { - let ast_id = item.ast_id().erase(); - self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg.0); - return; - } - }; + let attrs = self.item_tree.attrs(db, krate, item.ast_id()); + if let Some(cfg) = attrs.cfg() + && !self.is_cfg_enabled(&cfg) + { + let ast_id = item.ast_id().erase(); + self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg); + return; + } - if let Err(()) = self.resolve_attributes(attrs, item, container) { + if let Err(()) = self.resolve_attributes(&attrs, item, container) { // Do not process the item. It has at least one non-builtin attribute, so the // fixed-point algorithm is required to resolve the rest of them. return; @@ -1742,7 +1733,7 @@ impl ModCollector<'_, '_> { self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map); match item { - ModItemId::Mod(m) => self.collect_module(m, attrs), + ModItemId::Mod(m) => self.collect_module(m, &attrs), ModItemId::Use(item_tree_id) => { let id = UseLoc { container: module, id: InFile::new(self.file_id(), item_tree_id) } @@ -2015,7 +2006,7 @@ impl ModCollector<'_, '_> { ); return; }; - for (path, _, _) in paths { + for (path, _) in paths { if let Some(name) = path.as_ident() { single_imports.push(name.clone()); } @@ -2029,7 +2020,7 @@ impl ModCollector<'_, '_> { ); } - fn collect_module(&mut self, module_ast_id: ItemTreeAstId, attrs: Attrs<'_>) { + fn collect_module(&mut self, module_ast_id: ItemTreeAstId, attrs: &Attrs) { let path_attr = attrs.by_key(sym::path).string_value_unescape(); let is_macro_use = attrs.by_key(sym::macro_use).exists(); let module = &self.item_tree[module_ast_id]; @@ -2070,18 +2061,23 @@ impl ModCollector<'_, '_> { self.file_id(), &module.name, path_attr.as_deref(), - self.def_collector.def_map.krate, ) { Ok((file_id, is_mod_rs, mod_dir)) => { let item_tree = db.file_item_tree(file_id.into()); - match item_tree.top_level_attrs() { - AttrsOrCfg::CfgDisabled(cfg) => { + let krate = self.def_collector.def_map.krate; + let is_enabled = item_tree + .top_level_attrs(db, krate) + .cfg() + .and_then(|cfg| self.is_cfg_enabled(&cfg).not().then_some(cfg)) + .map_or(Ok(()), Err); + match is_enabled { + Err(cfg) => { self.emit_unconfigured_diagnostic( InFile::new(self.file_id(), module_ast_id.erase()), - &cfg.0, + &cfg, ); } - AttrsOrCfg::Enabled { attrs } => { + Ok(()) => { let module_id = self.push_child_module( module.name.clone(), ast_id.value, @@ -2097,8 +2093,11 @@ impl ModCollector<'_, '_> { mod_dir, } .collect_in_top_module(item_tree.top_level_items()); - let is_macro_use = - is_macro_use || attrs.as_ref().by_key(sym::macro_use).exists(); + let is_macro_use = is_macro_use + || item_tree + .top_level_attrs(db, krate) + .by_key(sym::macro_use) + .exists(); if is_macro_use { self.import_all_legacy_macros(module_id); } @@ -2186,16 +2185,36 @@ impl ModCollector<'_, '_> { /// assumed to be resolved already. fn resolve_attributes( &mut self, - attrs: Attrs<'_>, + attrs: &Attrs, mod_item: ModItemId, container: ItemContainerId, ) -> Result<(), ()> { - let ignore_up_to = self + let mut ignore_up_to = self .def_collector .skip_attrs .get(&InFile::new(self.file_id(), mod_item.ast_id())) .copied(); - for (attr_id, attr) in attrs.iter_after(ignore_up_to) { + let iter = attrs + .iter() + .dedup_by(|a, b| { + // FIXME: this should not be required, all attributes on an item should have a + // unique ID! + // Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes: + // #[cfg_attr(not(off), unresolved, unresolved)] + // struct S; + // We should come up with a different way to ID attributes. + a.id == b.id + }) + .skip_while(|attr| match ignore_up_to { + Some(id) if attr.id == id => { + ignore_up_to = None; + true + } + Some(_) => true, + None => false, + }); + + for attr in iter { if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) { continue; } @@ -2210,7 +2229,6 @@ impl ModCollector<'_, '_> { depth: self.macro_depth + 1, kind: MacroDirectiveKind::Attr { ast_id, - attr_id, attr: attr.clone(), mod_item, tree: self.tree_id, @@ -2226,14 +2244,9 @@ impl ModCollector<'_, '_> { } fn collect_macro_rules(&mut self, ast_id: ItemTreeAstId, module: ModuleId) { + let krate = self.def_collector.def_map.krate; let mac = &self.item_tree[ast_id]; - let attrs = match self.item_tree.attrs(ast_id.upcast()) { - Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(), - None => Attrs::EMPTY, - Some(AttrsOrCfg::CfgDisabled(_)) => { - unreachable!("we only get here if the macro is not cfg'ed out") - } - }; + let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast()); let f_ast_id = InFile::new(self.file_id(), ast_id.upcast()); let export_attr = || attrs.by_key(sym::macro_export); @@ -2313,14 +2326,9 @@ impl ModCollector<'_, '_> { } fn collect_macro_def(&mut self, ast_id: ItemTreeAstId, module: ModuleId) { + let krate = self.def_collector.def_map.krate; let mac = &self.item_tree[ast_id]; - let attrs = match self.item_tree.attrs(ast_id.upcast()) { - Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(), - None => Attrs::EMPTY, - Some(AttrsOrCfg::CfgDisabled(_)) => { - unreachable!("we only get here if the macro is not cfg'ed out") - } - }; + let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast()); let f_ast_id = InFile::new(self.file_id(), ast_id.upcast()); // Case 1: builtin macros @@ -2506,6 +2514,10 @@ impl ModCollector<'_, '_> { Some((a, b)) } + fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool { + self.def_collector.cfg_options.check(cfg) != Some(false) + } + fn emit_unconfigured_diagnostic(&mut self, ast_id: ErasedAstId, cfg: &CfgExpr) { self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id, @@ -2545,7 +2557,6 @@ mod tests { proc_macros: Default::default(), from_glob_import: Default::default(), skip_attrs: Default::default(), - prev_active_attrs: Default::default(), is_proc_macro: false, unresolved_extern_crates: Default::default(), }; diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 6a07c56aeebe..c495a0744919 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -17,8 +17,8 @@ pub enum DefDiagnosticKind { UnconfiguredCode { ast_id: ErasedAstId, cfg: CfgExpr, opts: CfgOptions }, UnresolvedMacroCall { ast: MacroCallKind, path: ModPath }, UnimplementedBuiltinMacro { ast: AstId }, - InvalidDeriveTarget { ast: AstId, id: AttrId }, - MalformedDerive { ast: AstId, id: AttrId }, + InvalidDeriveTarget { ast: AstId, id: usize }, + MalformedDerive { ast: AstId, id: usize }, MacroDefError { ast: AstId, message: String }, MacroError { ast: AstId, path: ModPath, err: ExpandErrorKind }, } @@ -119,7 +119,10 @@ impl DefDiagnostic { ast: AstId, id: AttrId, ) -> Self { - Self { in_module: container, kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id } } + Self { + in_module: container, + kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() }, + } } pub(super) fn malformed_derive( @@ -127,6 +130,9 @@ impl DefDiagnostic { ast: AstId, id: AttrId, ) -> Self { - Self { in_module: container, kind: DefDiagnosticKind::MalformedDerive { ast, id } } + Self { + in_module: container, + kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() }, + } } } diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 140b77ac002f..0c50f13edfb6 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -1,6 +1,6 @@ //! This module resolves `mod foo;` declaration to file. use arrayvec::ArrayVec; -use base_db::{AnchoredPath, Crate}; +use base_db::AnchoredPath; use hir_expand::{EditionedFileId, name::Name}; use crate::{HirFileId, db::DefDatabase}; @@ -62,7 +62,6 @@ impl ModDir { file_id: HirFileId, name: &Name, attr_path: Option<&str>, - krate: Crate, ) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> { let name = name.as_str(); @@ -92,7 +91,7 @@ impl ModDir { if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) { return Ok(( // FIXME: Edition, is this rightr? - EditionedFileId::new(db, file_id, orig_file_id.edition(db), krate), + EditionedFileId::new(db, file_id, orig_file_id.edition(db)), is_mod_rs, mod_dir, )); diff --git a/crates/hir-def/src/nameres/proc_macro.rs b/crates/hir-def/src/nameres/proc_macro.rs index cd45afe57d7c..cd8882183bb4 100644 --- a/crates/hir-def/src/nameres/proc_macro.rs +++ b/crates/hir-def/src/nameres/proc_macro.rs @@ -3,10 +3,8 @@ use hir_expand::name::{AsName, Name}; use intern::sym; -use crate::{ - item_tree::Attrs, - tt::{Leaf, TokenTree, TopSubtree, TtElement}, -}; +use crate::attr::Attrs; +use crate::tt::{Leaf, TokenTree, TopSubtree, TtElement}; #[derive(Debug, PartialEq, Eq)] pub struct ProcMacroDef { @@ -31,8 +29,8 @@ impl ProcMacroKind { } } -impl Attrs<'_> { - pub(crate) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option { +impl Attrs { + pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option { if self.is_proc_macro() { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang }) } else if self.is_proc_macro_attribute() { @@ -53,10 +51,15 @@ impl Attrs<'_> { } } - pub(crate) fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> { + pub fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> { let derive = self.by_key(sym::proc_macro_derive).tt_values().next()?; parse_macro_name_and_helper_attrs(derive) } + + pub fn parse_rustc_builtin_macro(&self) -> Option<(Name, Box<[Name]>)> { + let derive = self.by_key(sym::rustc_builtin_macro).tt_values().next()?; + parse_macro_name_and_helper_attrs(derive) + } } // This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have @@ -81,11 +84,14 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &TopSubtree) -> Option<(Name let helpers = tt::TokenTreesView::new(&tt.token_trees().flat_tokens()[3..]).try_into_subtree()?; let helpers = helpers .iter() - .filter_map(|tt| match tt { + .filter( + |tt| !matches!(tt, TtElement::Leaf(Leaf::Punct(comma)) if comma.char == ','), + ) + .map(|tt| match tt { TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()), _ => None, }) - .collect::>(); + .collect::>>()?; Some((trait_name.as_name(), helpers)) } diff --git a/crates/hir-def/src/signatures.rs b/crates/hir-def/src/signatures.rs index c9e8955ad68c..ebbf87cad668 100644 --- a/crates/hir-def/src/signatures.rs +++ b/crates/hir-def/src/signatures.rs @@ -21,7 +21,7 @@ use triomphe::Arc; use crate::{ ConstId, EnumId, EnumVariantId, EnumVariantLoc, ExternBlockId, FunctionId, HasModule, ImplId, ItemContainerId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId, - attrs::AttrFlags, + attr::Attrs, db::DefDatabase, expr_store::{ ExpressionStore, ExpressionStoreSourceMap, @@ -48,13 +48,12 @@ pub struct StructSignature { pub store: Arc, pub flags: StructFlags, pub shape: FieldsShape, + pub repr: Option, } bitflags! { #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct StructFlags: u8 { - /// Indicates whether this struct has `#[repr]`. - const HAS_REPR = 1 << 0; /// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute. const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1; /// Indicates whether the struct has a `#[fundamental]` attribute. @@ -76,19 +75,16 @@ impl StructSignature { pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc, Arc) { let loc = id.lookup(db); let InFile { file_id, value: source } = loc.source(db); - let attrs = AttrFlags::query(db, id.into()); + let attrs = db.attrs(id.into()); let mut flags = StructFlags::empty(); - if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; } - if attrs.contains(AttrFlags::FUNDAMENTAL) { + if attrs.by_key(sym::fundamental).exists() { flags |= StructFlags::FUNDAMENTAL; } - if attrs.contains(AttrFlags::HAS_REPR) { - flags |= StructFlags::HAS_REPR; - } - if let Some(lang) = attrs.lang_item_with_attrs(db, id.into()) { + if let Some(lang) = attrs.lang_item() { match lang { LangItem::PhantomData => flags |= StructFlags::IS_PHANTOM_DATA, LangItem::OwnedBox => flags |= StructFlags::IS_BOX, @@ -98,6 +94,7 @@ impl StructSignature { _ => (), } } + let repr = attrs.repr(); let shape = adt_shape(source.kind()); let (store, generic_params, source_map) = lower_generic_params( @@ -115,19 +112,11 @@ impl StructSignature { flags, shape, name: as_name_opt(source.name()), + repr, }), Arc::new(source_map), ) } - - #[inline] - pub fn repr(&self, db: &dyn DefDatabase, id: StructId) -> Option { - if self.flags.contains(StructFlags::HAS_REPR) { - AttrFlags::repr(db, id.into()) - } else { - None - } - } } #[inline] @@ -145,22 +134,22 @@ pub struct UnionSignature { pub generic_params: Arc, pub store: Arc, pub flags: StructFlags, + pub repr: Option, } impl UnionSignature { pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc, Arc) { let loc = id.lookup(db); - let attrs = AttrFlags::query(db, id.into()); + let attrs = db.attrs(id.into()); let mut flags = StructFlags::empty(); - if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; } - if attrs.contains(AttrFlags::FUNDAMENTAL) { + if attrs.by_key(sym::fundamental).exists() { flags |= StructFlags::FUNDAMENTAL; } - if attrs.contains(AttrFlags::HAS_REPR) { - flags |= StructFlags::HAS_REPR; - } + + let repr = attrs.repr(); let InFile { file_id, value: source } = loc.source(db); let (store, generic_params, source_map) = lower_generic_params( @@ -176,6 +165,7 @@ impl UnionSignature { generic_params, store, flags, + repr, name: as_name_opt(source.name()), }), Arc::new(source_map), @@ -196,17 +186,20 @@ pub struct EnumSignature { pub generic_params: Arc, pub store: Arc, pub flags: EnumFlags, + pub repr: Option, } impl EnumSignature { pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc, Arc) { let loc = id.lookup(db); - let attrs = AttrFlags::query(db, id.into()); + let attrs = db.attrs(id.into()); let mut flags = EnumFlags::empty(); - if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; } + let repr = attrs.repr(); + let InFile { file_id, value: source } = loc.source(db); let (store, generic_params, source_map) = lower_generic_params( db, @@ -222,14 +215,15 @@ impl EnumSignature { generic_params, store, flags, + repr, name: as_name_opt(source.name()), }), Arc::new(source_map), ) } - pub fn variant_body_type(db: &dyn DefDatabase, id: EnumId) -> IntegerType { - match AttrFlags::repr(db, id.into()) { + pub fn variant_body_type(&self) -> IntegerType { + match self.repr { Some(ReprOptions { int: Some(builtin), .. }) => builtin, _ => IntegerType::Pointer(true), } @@ -257,9 +251,9 @@ impl ConstSignature { let loc = id.lookup(db); let module = loc.container.module(db); - let attrs = AttrFlags::query(db, id.into()); + let attrs = db.attrs(id.into()); let mut flags = ConstFlags::empty(); - if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) { + if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL; } let source = loc.source(db); @@ -312,9 +306,9 @@ impl StaticSignature { let loc = id.lookup(db); let module = loc.container.module(db); - let attrs = AttrFlags::query(db, id.into()); + let attrs = db.attrs(id.into()); let mut flags = StaticFlags::empty(); - if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) { + if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL; } @@ -439,7 +433,7 @@ impl TraitSignature { let loc = id.lookup(db); let mut flags = TraitFlags::empty(); - let attrs = AttrFlags::query(db, id.into()); + let attrs = db.attrs(id.into()); let source = loc.source(db); if source.value.auto_token().is_some() { flags.insert(TraitFlags::AUTO); @@ -450,23 +444,34 @@ impl TraitSignature { if source.value.eq_token().is_some() { flags.insert(TraitFlags::ALIAS); } - if attrs.contains(AttrFlags::FUNDAMENTAL) { + if attrs.by_key(sym::fundamental).exists() { flags |= TraitFlags::FUNDAMENTAL; } - if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; } - if attrs.contains(AttrFlags::RUSTC_PAREN_SUGAR) { + if attrs.by_key(sym::rustc_paren_sugar).exists() { flags |= TraitFlags::RUSTC_PAREN_SUGAR; } - if attrs.contains(AttrFlags::RUSTC_COINDUCTIVE) { + if attrs.by_key(sym::rustc_coinductive).exists() { flags |= TraitFlags::COINDUCTIVE; } + let mut skip_array_during_method_dispatch = + attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists(); + let mut skip_boxed_slice_during_method_dispatch = false; + for tt in attrs.by_key(sym::rustc_skip_during_method_dispatch).tt_values() { + for tt in tt.iter() { + if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt { + skip_array_during_method_dispatch |= ident.sym == sym::array; + skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice; + } + } + } - if attrs.contains(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH) { + if skip_array_during_method_dispatch { flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH; } - if attrs.contains(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) { + if skip_boxed_slice_during_method_dispatch { flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH; } @@ -498,8 +503,7 @@ bitflags! { const HAS_TARGET_FEATURE = 1 << 9; const DEPRECATED_SAFE_2024 = 1 << 10; const EXPLICIT_SAFE = 1 << 11; - const HAS_LEGACY_CONST_GENERICS = 1 << 12; - const RUSTC_INTRINSIC = 1 << 13; + const RUSTC_INTRINSIC = 1 << 12; } } @@ -512,6 +516,8 @@ pub struct FunctionSignature { pub ret_type: Option, pub abi: Option, pub flags: FnFlags, + // FIXME: we should put this behind a fn flags + query to avoid bloating the struct + pub legacy_const_generics_indices: Option>>, } impl FunctionSignature { @@ -523,26 +529,23 @@ impl FunctionSignature { let module = loc.container.module(db); let mut flags = FnFlags::empty(); - let attrs = AttrFlags::query(db, id.into()); - if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) { + let attrs = db.attrs(id.into()); + if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL); } - if attrs.contains(AttrFlags::HAS_TARGET_FEATURE) { + if attrs.by_key(sym::target_feature).exists() { flags.insert(FnFlags::HAS_TARGET_FEATURE); } - - if attrs.contains(AttrFlags::RUSTC_INTRINSIC) { + if attrs.by_key(sym::rustc_intrinsic).exists() { flags.insert(FnFlags::RUSTC_INTRINSIC); } - if attrs.contains(AttrFlags::HAS_LEGACY_CONST_GENERICS) { - flags.insert(FnFlags::HAS_LEGACY_CONST_GENERICS); - } + let legacy_const_generics_indices = attrs.rustc_legacy_const_generics(); let source = loc.source(db); if source.value.unsafe_token().is_some() { - if attrs.contains(AttrFlags::RUSTC_DEPRECATED_SAFE_2024) { + if attrs.by_key(sym::rustc_deprecated_safe_2024).exists() { flags.insert(FnFlags::DEPRECATED_SAFE_2024); } else { flags.insert(FnFlags::UNSAFE); @@ -584,6 +587,7 @@ impl FunctionSignature { ret_type, abi, flags, + legacy_const_generics_indices, name, }), Arc::new(source_map), @@ -632,19 +636,6 @@ impl FunctionSignature { self.flags.contains(FnFlags::HAS_TARGET_FEATURE) } - #[inline] - pub fn legacy_const_generics_indices<'db>( - &self, - db: &'db dyn DefDatabase, - id: FunctionId, - ) -> Option<&'db [u32]> { - if !self.flags.contains(FnFlags::HAS_LEGACY_CONST_GENERICS) { - return None; - } - - AttrFlags::legacy_const_generic_indices(db, id).as_deref() - } - pub fn is_intrinsic(db: &dyn DefDatabase, id: FunctionId) -> bool { let data = db.function_signature(id); data.flags.contains(FnFlags::RUSTC_INTRINSIC) @@ -688,11 +679,11 @@ impl TypeAliasSignature { let loc = id.lookup(db); let mut flags = TypeAliasFlags::empty(); - let attrs = AttrFlags::query(db, id.into()); - if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { + let attrs = db.attrs(id.into()); + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL); } - if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) { + if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { flags.insert(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL); } if matches!(loc.container, ItemContainerId::ExternBlockId(_)) { @@ -875,7 +866,7 @@ fn lower_fields( let mut has_fields = false; for (ty, field) in fields.value { has_fields = true; - match AttrFlags::is_cfg_enabled_for(&field, cfg_options) { + match Attrs::is_cfg_enabled_for(db, &field, col.span_map(), cfg_options) { Ok(()) => { let type_ref = col.lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator); @@ -937,6 +928,7 @@ impl EnumVariants { let loc = e.lookup(db); let source = loc.source(db); let ast_id_map = db.ast_id_map(source.file_id); + let span_map = db.span_map(source.file_id); let mut diagnostics = ThinVec::new(); let cfg_options = loc.container.krate.cfg_options(db); @@ -948,7 +940,7 @@ impl EnumVariants { .variants() .filter_map(|variant| { let ast_id = ast_id_map.ast_id(&variant); - match AttrFlags::is_cfg_enabled_for(&variant, cfg_options) { + match Attrs::is_cfg_enabled_for(db, &variant, span_map.as_ref(), cfg_options) { Ok(()) => { let enum_variant = EnumVariantLoc { id: source.with_value(ast_id), parent: e, index } diff --git a/crates/hir-def/src/src.rs b/crates/hir-def/src/src.rs index 153fd195f0ad..367b543cf908 100644 --- a/crates/hir-def/src/src.rs +++ b/crates/hir-def/src/src.rs @@ -7,7 +7,7 @@ use syntax::{AstNode, AstPtr, ast}; use crate::{ AstIdLoc, GenericDefId, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, - UseId, VariantId, attrs::AttrFlags, db::DefDatabase, + UseId, VariantId, attr::Attrs, db::DefDatabase, }; pub trait HasSource { @@ -145,13 +145,15 @@ impl HasChildSource for VariantId { (lookup.source(db).map(|it| it.kind()), lookup.container) } }; + let span_map = db.span_map(src.file_id); let mut map = ArenaMap::new(); match &src.value { ast::StructKind::Tuple(fl) => { let cfg_options = container.krate.cfg_options(db); let mut idx = 0; for fd in fl.fields() { - let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok(); + let enabled = + Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok(); if !enabled { continue; } @@ -166,7 +168,8 @@ impl HasChildSource for VariantId { let cfg_options = container.krate.cfg_options(db); let mut idx = 0; for fd in fl.fields() { - let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok(); + let enabled = + Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok(); if !enabled { continue; } diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs index 3bb9c361b3c8..12a1c1554cc1 100644 --- a/crates/hir-def/src/test_db.rs +++ b/crates/hir-def/src/test_db.rs @@ -190,15 +190,7 @@ impl TestDB { let mut res = DefMap::ROOT; for (module, data) in def_map.modules() { let src = data.definition_source(self); - // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because - // `position.file_id` is created before the def map, causing it to have to wrong crate - // attached often, which means it won't compare equal. This should not be a problem in real - // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only - // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map. - let Some(file_id) = src.file_id.file_id() else { - continue; - }; - if file_id.file_id(self) != position.file_id.file_id(self) { + if src.file_id != position.file_id { continue; } @@ -238,15 +230,7 @@ impl TestDB { let mut fn_def = None; for (_, module) in def_map.modules() { let file_id = module.definition_source(self).file_id; - // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because - // `position.file_id` is created before the def map, causing it to have to wrong crate - // attached often, which means it won't compare equal. This should not be a problem in real - // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only - // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map. - let Some(file_id) = file_id.file_id() else { - continue; - }; - if file_id.file_id(self) != position.file_id.file_id(self) { + if file_id != position.file_id { continue; } for decl in module.scope.declarations() { @@ -269,25 +253,26 @@ impl TestDB { }; if size != Some(new_size) { size = Some(new_size); - fn_def = Some((it, file_id)); + fn_def = Some(it); } } } } // Find the innermost block expression that has a `DefMap`. - let (def_with_body, file_id) = fn_def?; - let def_with_body = def_with_body.into(); + let def_with_body = fn_def?.into(); let source_map = self.body_with_source_map(def_with_body).1; let scopes = self.expr_scopes(def_with_body); - let root_syntax_node = self.parse(file_id).syntax_node(); + let root_syntax_node = self.parse(position.file_id).syntax_node(); let scope_iter = algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| { let block = ast::BlockExpr::cast(node)?; let expr = ast::Expr::from(block); - let expr_id = - source_map.node_expr(InFile::new(file_id.into(), &expr))?.as_expr().unwrap(); + let expr_id = source_map + .node_expr(InFile::new(position.file_id.into(), &expr))? + .as_expr() + .unwrap(); let scope = scopes.scope_for(expr_id).unwrap(); Some(scope) }); diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index 4fa476afb64a..80a3c0848653 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -23,8 +23,6 @@ triomphe.workspace = true query-group.workspace = true salsa.workspace = true salsa-macros.workspace = true -arrayvec.workspace = true -thin-vec.workspace = true # local deps stdx.workspace = true diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index e1807cd2e1e9..986f8764f5c9 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -1,397 +1,200 @@ -//! Defines the basics of attributes lowering. -//! -//! The heart and soul of this module is [`expand_cfg_attr()`], alongside its sibling -//! [`expand_cfg_attr_with_doc_comments()`]. It is used to implement all attribute lowering -//! in r-a. Its basic job is to list attributes; however, attributes do not necessarily map -//! into [`ast::Attr`], because `cfg_attr` can map to zero, one, or more attributes -//! (`#[cfg_attr(predicate, attr1, attr2, ...)]`). To bridge this gap, this module defines -//! [`Meta`], which represents a desugared attribute. Various bits of r-a need different -//! things from [`Meta`], therefore it contains many parts. The basic idea is: -//! -//! - There are three kinds of attributes, `path = value`, `path`, and `path(token_tree)`. -//! - Most bits of rust-analyzer only need to deal with some paths. Therefore, we keep -//! the path only if it has up to 2 segments, or one segment for `path = value`. -//! We also only keep the value in `path = value` if it is a literal. However, we always -//! save the all relevant ranges of attributes (the path range, and the full attribute range) -//! for parts of r-a (e.g. name resolution) that need a faithful representation of the -//! attribute. -//! -//! [`expand_cfg_attr()`] expands `cfg_attr`s as it goes (as its name implies), to list -//! all attributes. -//! -//! Another thing to note is that we need to be able to map an attribute back to a range -//! (for diagnostic purposes etc.). This is only ever needed for attributes that participate -//! in name resolution. An attribute is mapped back by its [`AttrId`], which is just an -//! index into the item tree attributes list. To minimize the risk of bugs, we have one -//! place (here) and one function ([`is_item_tree_filtered_attr()`]) that decides whether -//! an attribute participate in name resolution. - -use std::{ - borrow::Cow, cell::OnceCell, convert::Infallible, fmt, iter::Peekable, ops::ControlFlow, -}; +//! A higher level attributes based on TokenTree, with also some shortcuts. +use std::iter; +use std::{borrow::Cow, fmt, ops}; -use ::tt::{TextRange, TextSize}; -use arrayvec::ArrayVec; use base_db::Crate; use cfg::{CfgExpr, CfgOptions}; use either::Either; -use intern::{Interned, Symbol}; +use intern::{Interned, Symbol, sym}; + use mbe::{DelimiterKind, Punct}; -use parser::T; -use smallvec::SmallVec; -use span::{RealSpanMap, Span, SyntaxContext}; -use syntax::{ - AstNode, NodeOrToken, SyntaxNode, SyntaxToken, - ast::{self, TokenTreeChildren}, - unescape, -}; -use syntax_bridge::DocCommentDesugarMode; +use smallvec::{SmallVec, smallvec}; +use span::{Span, SyntaxContext}; +use syntax::unescape; +use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast}; +use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree}; +use triomphe::ThinArc; use crate::{ - AstId, db::ExpandDatabase, mod_path::ModPath, + name::Name, span_map::SpanMapRef, - tt::{self, TopSubtree}, + tt::{self, TopSubtree, token_to_literal}, }; -#[derive(Debug)] -pub struct AttrPath { - /// This can be empty if the path is not of 1 or 2 segments exactly. - pub segments: ArrayVec, - pub range: TextRange, - // FIXME: This shouldn't be textual, `#[test]` needs name resolution. - // And if textual, it shouldn't be here, it should be in hir-def/src/attrs.rs. But some macros - // fully qualify `test` as `core::prelude::vX::test`, and this is more than 2 segments, so hir-def - // attrs can't find it. But this will mean we have to push every up-to-4-segments path, which - // may impact perf. So it was easier to just hack it here. - pub is_test: bool, +/// Syntactical attributes, without filtering of `cfg_attr`s. +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct RawAttrs { + // FIXME: This can become `Box<[Attr]>` if https://internals.rust-lang.org/t/layout-of-dst-box/21728?u=chrefr is accepted. + entries: Option>, } -impl AttrPath { - #[inline] - fn extract(path: &ast::Path) -> Self { - let mut is_test = false; - let segments = (|| { - let mut segments = ArrayVec::new(); - let segment2 = path.segment()?.name_ref()?.syntax().first_token()?; - if segment2.text() == "test" { - // `#[test]` or `#[core::prelude::vX::test]`. - is_test = true; - } - let segment1 = path.qualifier(); - if let Some(segment1) = segment1 { - if segment1.qualifier().is_some() { - None - } else { - let segment1 = segment1.segment()?.name_ref()?.syntax().first_token()?; - segments.push(segment1); - segments.push(segment2); - Some(segments) - } - } else { - segments.push(segment2); - Some(segments) - } - })(); - AttrPath { - segments: segments.unwrap_or(ArrayVec::new()), - range: path.syntax().text_range(), - is_test, +impl ops::Deref for RawAttrs { + type Target = [Attr]; + + fn deref(&self) -> &[Attr] { + match &self.entries { + Some(it) => &it.slice, + None => &[], } } +} - #[inline] - pub fn is1(&self, segment: &str) -> bool { - self.segments.len() == 1 && self.segments[0].text() == segment +impl RawAttrs { + pub const EMPTY: Self = Self { entries: None }; + + pub fn new( + db: &dyn ExpandDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + ) -> Self { + let entries: Vec<_> = Self::attrs_iter::(db, owner, span_map).collect(); + + let entries = if entries.is_empty() { + None + } else { + Some(ThinArc::from_header_and_iter((), entries.into_iter())) + }; + + RawAttrs { entries } } -} -#[derive(Debug)] -pub enum Meta { - /// `name` is `None` if not a single token. `value` is a literal or `None`. - NamedKeyValue { - path_range: TextRange, - name: Option, - value: Option, - }, - TokenTree { - path: AttrPath, - tt: ast::TokenTree, - }, - Path { - path: AttrPath, - }, -} + /// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded. + pub fn new_expanded( + db: &dyn ExpandDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + cfg_options: &CfgOptions, + ) -> Self { + let entries: Vec<_> = + Self::attrs_iter_expanded::(db, owner, span_map, cfg_options).collect(); + + let entries = if entries.is_empty() { + None + } else { + Some(ThinArc::from_header_and_iter((), entries.into_iter())) + }; -impl Meta { - #[inline] - pub fn path_range(&self) -> TextRange { - match self { - Meta::NamedKeyValue { path_range, .. } => *path_range, - Meta::TokenTree { path, .. } | Meta::Path { path } => path.range, - } + RawAttrs { entries } + } + + pub fn attrs_iter( + db: &dyn ExpandDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + ) -> impl Iterator { + collect_attrs(owner).filter_map(move |(id, attr)| match attr { + Either::Left(attr) => { + attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id)) + } + Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| { + let span = span_map.span_for_range(comment.syntax().text_range()); + let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro); + Attr { + id, + input: Some(Box::new(AttrInput::Literal(tt::Literal { + symbol: text, + span, + kind, + suffix: None, + }))), + path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))), + ctxt: span.ctx, + } + }), + Either::Right(_) => None, + }) } - fn extract(iter: &mut Peekable) -> Option<(Self, TextSize)> { - let mut start_offset = None; - if let Some(NodeOrToken::Token(colon1)) = iter.peek() - && colon1.kind() == T![:] - { - start_offset = Some(colon1.text_range().start()); - iter.next(); - iter.next_if(|it| it.as_token().is_some_and(|it| it.kind() == T![:])); + pub fn attrs_iter_expanded( + db: &dyn ExpandDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + cfg_options: &CfgOptions, + ) -> impl Iterator { + Self::attrs_iter::(db, owner, span_map) + .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options)) + } + + pub fn merge(&self, other: Self) -> Self { + match (&self.entries, other.entries) { + (None, None) => Self::EMPTY, + (None, entries @ Some(_)) => Self { entries }, + (Some(entries), None) => Self { entries: Some(entries.clone()) }, + (Some(a), Some(b)) => { + let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1); + let items = a + .slice + .iter() + .cloned() + .chain(b.slice.iter().map(|it| { + let mut it = it.clone(); + let id = it.id.ast_index() + last_ast_index; + it.id = AttrId::new(id, it.id.is_inner_attr()); + it + })) + .collect::>(); + Self { entries: Some(ThinArc::from_header_and_iter((), items.into_iter())) } + } } - let first_segment = iter - .next_if(|it| it.as_token().is_some_and(|it| it.kind().is_any_identifier()))? - .into_token()?; - let mut is_test = first_segment.text() == "test"; - let start_offset = start_offset.unwrap_or_else(|| first_segment.text_range().start()); - - let mut segments_len = 1; - let mut second_segment = None; - let mut path_range = first_segment.text_range(); - while iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:]) - && let _ = iter.next() - && iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:]) - && let _ = iter.next() - && let Some(NodeOrToken::Token(segment)) = iter.peek() - && segment.kind().is_any_identifier() - { - segments_len += 1; - is_test = segment.text() == "test"; - second_segment = Some(segment.clone()); - path_range = TextRange::new(path_range.start(), segment.text_range().end()); - iter.next(); + } + + /// Processes `cfg_attr`s + pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs { + let has_cfg_attrs = + self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr)); + if !has_cfg_attrs { + return self; } - let segments = |first, second| { - let mut segments = ArrayVec::new(); - if segments_len <= 2 { - segments.push(first); - if let Some(second) = second { - segments.push(second); - } - } - segments - }; - let meta = match iter.peek() { - Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => { - iter.next(); - let value = match iter.peek() { - Some(NodeOrToken::Token(token)) if token.kind().is_literal() => { - // No need to consume it, it will be consumed by `extract_and_eat_comma()`. - Some(token.clone()) - } - _ => None, - }; - let name = if second_segment.is_none() { Some(first_segment) } else { None }; - Meta::NamedKeyValue { path_range, name, value } - } - Some(NodeOrToken::Node(tt)) => Meta::TokenTree { - path: AttrPath { - segments: segments(first_segment, second_segment), - range: path_range, - is_test, - }, - tt: tt.clone(), - }, - _ => Meta::Path { - path: AttrPath { - segments: segments(first_segment, second_segment), - range: path_range, - is_test, - }, - }, + let cfg_options = krate.cfg_options(db); + let new_attrs = self + .iter() + .cloned() + .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options)) + .collect::>(); + let entries = if new_attrs.is_empty() { + None + } else { + Some(ThinArc::from_header_and_iter((), new_attrs.into_iter())) }; - Some((meta, start_offset)) + RawAttrs { entries } } - fn extract_possibly_unsafe( - iter: &mut Peekable, - container: &ast::TokenTree, - ) -> Option<(Self, TextRange)> { - if iter.peek().is_some_and(|it| it.as_token().is_some_and(|it| it.kind() == T![unsafe])) { - iter.next(); - let tt = iter.next()?.into_node()?; - let result = Self::extract(&mut TokenTreeChildren::new(&tt).peekable()).map( - |(meta, start_offset)| (meta, TextRange::new(start_offset, tt_end_offset(&tt))), - ); - while iter.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {} - result - } else { - Self::extract(iter).map(|(meta, start_offset)| { - let end_offset = 'find_end_offset: { - for it in iter { - if let NodeOrToken::Token(it) = it - && it.kind() == T![,] - { - break 'find_end_offset it.text_range().start(); - } - } - tt_end_offset(container) - }; - (meta, TextRange::new(start_offset, end_offset)) - }) - } + pub fn is_empty(&self) -> bool { + self.entries.is_none() } } -fn tt_end_offset(tt: &ast::TokenTree) -> TextSize { - tt.syntax().last_token().unwrap().text_range().start() +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct AttrId { + id: u32, } -/// The callback is passed a desugared form of the attribute ([`Meta`]), a [`SyntaxNode`] fully containing it -/// (note: it may not be the direct parent), the range within the [`SyntaxNode`] bounding the attribute, -/// and the outermost `ast::Attr`. Note that one node may map to multiple [`Meta`]s due to `cfg_attr`. -#[inline] -pub fn expand_cfg_attr<'a, BreakValue>( - attrs: impl Iterator, - cfg_options: impl FnMut() -> &'a CfgOptions, - mut callback: impl FnMut(Meta, &SyntaxNode, TextRange, &ast::Attr) -> ControlFlow, -) -> Option { - expand_cfg_attr_with_doc_comments::( - attrs.map(Either::Left), - cfg_options, - move |Either::Left((meta, container, range, top_attr))| { - callback(meta, container, range, top_attr) - }, - ) -} +// FIXME: This only handles a single level of cfg_attr nesting +// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again +impl AttrId { + const INNER_ATTR_SET_BIT: u32 = 1 << 31; -#[inline] -pub fn expand_cfg_attr_with_doc_comments<'a, DocComment, BreakValue>( - mut attrs: impl Iterator>, - mut cfg_options: impl FnMut() -> &'a CfgOptions, - mut callback: impl FnMut( - Either<(Meta, &SyntaxNode, TextRange, &ast::Attr), DocComment>, - ) -> ControlFlow, -) -> Option { - let mut stack = SmallVec::<[_; 1]>::new(); - let result = attrs.try_for_each(|top_attr| { - let top_attr = match top_attr { - Either::Left(it) => it, - Either::Right(comment) => return callback(Either::Right(comment)), - }; - if let Some((attr_name, tt)) = top_attr.as_simple_call() - && attr_name == "cfg_attr" - { - let mut tt_iter = TokenTreeChildren::new(&tt).peekable(); - let cfg = cfg::CfgExpr::parse_from_ast(&mut tt_iter); - if cfg_options().check(&cfg) != Some(false) { - stack.push((tt_iter, tt)); - while let Some((tt_iter, tt)) = stack.last_mut() { - let Some((attr, range)) = Meta::extract_possibly_unsafe(tt_iter, tt) else { - stack.pop(); - continue; - }; - if let Meta::TokenTree { path, tt: nested_tt } = &attr - && path.is1("cfg_attr") - { - let mut nested_tt_iter = TokenTreeChildren::new(nested_tt).peekable(); - let cfg = cfg::CfgExpr::parse_from_ast(&mut nested_tt_iter); - if cfg_options().check(&cfg) != Some(false) { - stack.push((nested_tt_iter, nested_tt.clone())); - } - } else { - callback(Either::Left((attr, tt.syntax(), range, &top_attr)))?; - } - } - } - } else if let Some(ast_meta) = top_attr.meta() - && let Some(path) = ast_meta.path() - { - let path = AttrPath::extract(&path); - let meta = if let Some(tt) = ast_meta.token_tree() { - Meta::TokenTree { path, tt } - } else if let Some(value) = ast_meta.expr() { - let value = - if let ast::Expr::Literal(value) = value { Some(value.token()) } else { None }; - let name = - if path.segments.len() == 1 { Some(path.segments[0].clone()) } else { None }; - Meta::NamedKeyValue { name, value, path_range: path.range } - } else { - Meta::Path { path } - }; - callback(Either::Left(( - meta, - ast_meta.syntax(), - ast_meta.syntax().text_range(), - &top_attr, - )))?; - } - ControlFlow::Continue(()) - }); - result.break_value() -} + pub fn new(id: usize, is_inner: bool) -> Self { + assert!(id <= !Self::INNER_ATTR_SET_BIT as usize); + let id = id as u32; + Self { id: if is_inner { id | Self::INNER_ATTR_SET_BIT } else { id } } + } -#[inline] -pub(crate) fn is_item_tree_filtered_attr(name: &str) -> bool { - matches!( - name, - "doc" - | "stable" - | "unstable" - | "target_feature" - | "allow" - | "expect" - | "warn" - | "deny" - | "forbid" - | "repr" - | "inline" - | "track_caller" - | "must_use" - ) -} + pub fn ast_index(&self) -> usize { + (self.id & !Self::INNER_ATTR_SET_BIT) as usize + } -/// This collects attributes exactly as the item tree needs them. This is used for the item tree, -/// as well as for resolving [`AttrId`]s. -pub fn collect_item_tree_attrs<'a, BreakValue>( - owner: &dyn ast::HasAttrs, - cfg_options: impl Fn() -> &'a CfgOptions, - mut on_attr: impl FnMut(Meta, &SyntaxNode, &ast::Attr, TextRange) -> ControlFlow, -) -> Option> { - let attrs = ast::attrs_including_inner(owner); - expand_cfg_attr( - attrs, - || cfg_options(), - |attr, container, range, top_attr| { - // We filter builtin attributes that we don't need for nameres, because this saves memory. - // I only put the most common attributes, but if some attribute becomes common feel free to add it. - // Notice, however: for an attribute to be filtered out, it *must* not be shadowable with a macro! - let filter = match &attr { - Meta::NamedKeyValue { name: Some(name), .. } => { - is_item_tree_filtered_attr(name.text()) - } - Meta::TokenTree { path, tt } if path.segments.len() == 1 => { - let name = path.segments[0].text(); - if name == "cfg" { - let cfg = - CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable()); - if cfg_options().check(&cfg) == Some(false) { - return ControlFlow::Break(Either::Right(cfg)); - } - true - } else { - is_item_tree_filtered_attr(name) - } - } - Meta::Path { path } => { - path.segments.len() == 1 && is_item_tree_filtered_attr(path.segments[0].text()) - } - _ => false, - }; - if !filter && let ControlFlow::Break(v) = on_attr(attr, container, top_attr, range) { - return ControlFlow::Break(Either::Left(v)); - } - ControlFlow::Continue(()) - }, - ) + pub fn is_inner_attr(&self) -> bool { + self.id & Self::INNER_ATTR_SET_BIT != 0 + } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Attr { + pub id: AttrId, pub path: Interned, pub input: Option>, pub ctxt: SyntaxContext, @@ -414,6 +217,131 @@ impl fmt::Display for AttrInput { } } +impl Attr { + fn from_src( + db: &dyn ExpandDatabase, + ast: ast::Meta, + span_map: SpanMapRef<'_>, + id: AttrId, + ) -> Option { + let path = ast.path()?; + let range = path.syntax().text_range(); + let path = Interned::new(ModPath::from_src(db, path, &mut |range| { + span_map.span_for_range(range).ctx + })?); + let span = span_map.span_for_range(range); + let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { + let token = lit.token(); + Some(Box::new(AttrInput::Literal(token_to_literal(token.text(), span)))) + } else if let Some(tt) = ast.token_tree() { + let tree = syntax_node_to_token_tree( + tt.syntax(), + span_map, + span, + DocCommentDesugarMode::ProcMacro, + ); + Some(Box::new(AttrInput::TokenTree(tree))) + } else { + None + }; + Some(Attr { id, path, input, ctxt: span.ctx }) + } + + fn from_tt( + db: &dyn ExpandDatabase, + mut tt: tt::TokenTreesView<'_>, + id: AttrId, + ) -> Option { + if matches!(tt.flat_tokens(), + [tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..] + if *sym == sym::unsafe_ + ) { + match tt.iter().nth(1) { + Some(tt::TtElement::Subtree(_, iter)) => tt = iter.remaining(), + _ => return None, + } + } + let first = tt.flat_tokens().first()?; + let ctxt = first.first_span().ctx; + let (path, input) = { + let mut iter = tt.iter(); + let start = iter.savepoint(); + let mut input = tt::TokenTreesView::new(&[]); + let mut path = iter.from_savepoint(start); + let mut path_split_savepoint = iter.savepoint(); + while let Some(tt) = iter.next() { + path = iter.from_savepoint(start); + if !matches!( + tt, + tt::TtElement::Leaf( + tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_), + ) + ) { + input = path_split_savepoint.remaining(); + break; + } + path_split_savepoint = iter.savepoint(); + } + (path, input) + }; + + let path = Interned::new(ModPath::from_tt(db, path)?); + + let input = match (input.flat_tokens().first(), input.try_into_subtree()) { + (_, Some(tree)) => { + Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree)))) + } + (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => { + match input.flat_tokens().get(1) { + Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => { + Some(Box::new(AttrInput::Literal(lit.clone()))) + } + _ => None, + } + } + _ => None, + }; + Some(Attr { id, path, input, ctxt }) + } + + pub fn path(&self) -> &ModPath { + &self.path + } + + pub fn expand_cfg_attr( + self, + db: &dyn ExpandDatabase, + cfg_options: &CfgOptions, + ) -> impl IntoIterator { + let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr); + if !is_cfg_attr { + return smallvec![self]; + } + + let subtree = match self.token_tree_value() { + Some(it) => it, + _ => return smallvec![self.clone()], + }; + + let (cfg, parts) = match parse_cfg_attr_input(subtree) { + Some(it) => it, + None => return smallvec![self.clone()], + }; + let index = self.id; + let attrs = parts.filter_map(|attr| Attr::from_tt(db, attr, index)); + + let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg); + let cfg = CfgExpr::parse(&cfg); + if cfg_options.check(&cfg) == Some(false) { + smallvec![] + } else { + cov_mark::hit!(cfg_attr_active); + + attrs.collect::>() + } + } +} + impl Attr { /// #[path = "string"] pub fn string_value(&self) -> Option<&Symbol> { @@ -475,26 +403,30 @@ impl Attr { pub fn parse_path_comma_token_tree<'a>( &'a self, db: &'a dyn ExpandDatabase, - ) -> Option)> + 'a> { + ) -> Option + 'a> { let args = self.token_tree_value()?; if args.top_subtree().delimiter.kind != DelimiterKind::Parenthesis { return None; } - Some(parse_path_comma_token_tree(db, args)) + let paths = args + .token_trees() + .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))) + .filter_map(move |tts| { + let span = tts.flat_tokens().first()?.first_span(); + Some((ModPath::from_tt(db, tts)?, span)) + }); + + Some(paths) } -} -fn parse_path_comma_token_tree<'a>( - db: &'a dyn ExpandDatabase, - args: &'a tt::TopSubtree, -) -> impl Iterator)> { - args.token_trees() - .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))) - .filter_map(move |tts| { - let span = tts.flat_tokens().first()?.first_span(); - Some((ModPath::from_tt(db, tts)?, span, tts)) - }) + pub fn cfg(&self) -> Option { + if *self.path.as_ident()? == sym::cfg { + self.token_tree_value().map(CfgExpr::parse) + } else { + None + } + } } fn unescape(s: &str) -> Option> { @@ -523,104 +455,58 @@ fn unescape(s: &str) -> Option> { } } -/// This is an index of an attribute *that always points to the item tree attributes*. -/// -/// Outer attributes are counted first, then inner attributes. This does not support -/// out-of-line modules, which may have attributes spread across 2 files! -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct AttrId { - id: u32, +pub fn collect_attrs( + owner: &dyn ast::HasAttrs, +) -> impl Iterator)> { + let inner_attrs = + inner_attributes(owner.syntax()).into_iter().flatten().zip(iter::repeat(true)); + let outer_attrs = ast::AttrDocCommentIter::from_syntax_node(owner.syntax()) + .filter(|el| match el { + Either::Left(attr) => attr.kind().is_outer(), + Either::Right(comment) => comment.is_outer(), + }) + .zip(iter::repeat(false)); + outer_attrs + .chain(inner_attrs) + .enumerate() + .map(|(id, (attr, is_inner))| (AttrId::new(id, is_inner), attr)) } -impl AttrId { - #[inline] - pub fn from_item_tree_index(id: u32) -> Self { - Self { id } - } - - #[inline] - pub fn item_tree_index(self) -> u32 { - self.id - } - - /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due - /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the - /// attribute, and its desugared [`Meta`]. - pub fn find_attr_range( - self, - db: &dyn ExpandDatabase, - krate: Crate, - owner: AstId, - ) -> (ast::Attr, SyntaxNode, TextRange, Meta) { - self.find_attr_range_with_source(db, krate, &owner.to_node(db)) - } - - /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due - /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the - /// attribute, and its desugared [`Meta`]. - pub fn find_attr_range_with_source( - self, - db: &dyn ExpandDatabase, - krate: Crate, - owner: &dyn ast::HasAttrs, - ) -> (ast::Attr, SyntaxNode, TextRange, Meta) { - let cfg_options = OnceCell::new(); - let mut index = 0; - let result = collect_item_tree_attrs( - owner, - || cfg_options.get_or_init(|| krate.cfg_options(db)), - |meta, container, top_attr, range| { - if index == self.id { - return ControlFlow::Break((top_attr.clone(), container.clone(), range, meta)); +fn inner_attributes( + syntax: &SyntaxNode, +) -> Option>> { + let node = match_ast! { + match syntax { + ast::SourceFile(_) => syntax.clone(), + ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(), + ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(), + ast::Impl(it) => it.assoc_item_list()?.syntax().clone(), + ast::Module(it) => it.item_list()?.syntax().clone(), + ast::BlockExpr(it) => { + if !it.may_carry_attributes() { + return None } - index += 1; - ControlFlow::Continue(()) + syntax.clone() }, - ); - match result { - Some(Either::Left(it)) => it, - _ => { - panic!("used an incorrect `AttrId`; crate={krate:?}, attr_id={self:?}"); - } + _ => return None, } - } + }; - pub fn find_derive_range( - self, - db: &dyn ExpandDatabase, - krate: Crate, - owner: AstId, - derive_index: u32, - ) -> TextRange { - let (_, _, derive_attr_range, derive_attr) = self.find_attr_range(db, krate, owner); - let Meta::TokenTree { tt, .. } = derive_attr else { - return derive_attr_range; - }; - // Fake the span map, as we don't really need spans here, just the offsets of the node in the file. - let span_map = RealSpanMap::absolute(span::EditionedFileId::current_edition( - span::FileId::from_raw(0), - )); - let tt = syntax_bridge::syntax_node_to_token_tree( - tt.syntax(), - SpanMapRef::RealSpanMap(&span_map), - span_map.span_for_range(tt.syntax().text_range()), - DocCommentDesugarMode::ProcMacro, - ); - let Some((_, _, derive_tts)) = - parse_path_comma_token_tree(db, &tt).nth(derive_index as usize) - else { - return derive_attr_range; - }; - let (Some(first_tt), Some(last_tt)) = - (derive_tts.flat_tokens().first(), derive_tts.flat_tokens().last()) - else { - return derive_attr_range; - }; - let start = first_tt.first_span().range.start(); - let end = match last_tt { - tt::TokenTree::Leaf(it) => it.span().range.end(), - tt::TokenTree::Subtree(it) => it.delimiter.close.range.end(), - }; - TextRange::new(start, end) - } + let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el { + Either::Left(attr) => attr.kind().is_inner(), + Either::Right(comment) => comment.is_inner(), + }); + Some(attrs) +} + +// Input subtree is: `(cfg, $(attr),+)` +// Split it up into a `cfg` subtree and the `attr` subtrees. +fn parse_cfg_attr_input( + subtree: &TopSubtree, +) -> Option<(tt::TokenTreesView<'_>, impl Iterator>)> { + let mut parts = subtree + .token_trees() + .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))); + let cfg = parts.next()?; + Some((cfg, parts.filter(|it| !it.is_empty()))) } diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs index 92bcd378149e..6fe63f249cd4 100644 --- a/crates/hir-expand/src/builtin/fn_macro.rs +++ b/crates/hir-expand/src/builtin/fn_macro.rs @@ -772,7 +772,7 @@ fn relative_file( if res == call_site && !allow_recursion { Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`"))) } else { - Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition, lookup.krate)) + Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition)) } } diff --git a/crates/hir-expand/src/cfg_process.rs b/crates/hir-expand/src/cfg_process.rs index 8b82671ed4a0..d5ebd6ee19f5 100644 --- a/crates/hir-expand/src/cfg_process.rs +++ b/crates/hir-expand/src/cfg_process.rs @@ -1,343 +1,373 @@ //! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro -use std::{cell::OnceCell, ops::ControlFlow}; +use std::iter::Peekable; -use ::tt::TextRange; use base_db::Crate; -use cfg::CfgExpr; -use parser::T; -use smallvec::SmallVec; +use cfg::{CfgAtom, CfgExpr}; +use intern::{Symbol, sym}; +use rustc_hash::FxHashSet; use syntax::{ - AstNode, PreorderWithTokens, SyntaxElement, SyntaxNode, SyntaxToken, WalkEvent, - ast::{self, HasAttrs, TokenTreeChildren}, + AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T, + ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList}, }; -use syntax_bridge::DocCommentDesugarMode; +use tracing::{debug, warn}; -use crate::{ - attrs::{AttrId, Meta, expand_cfg_attr, is_item_tree_filtered_attr}, - db::ExpandDatabase, - fixup::{self, SyntaxFixupUndoInfo}, - span_map::SpanMapRef, - tt::{self, DelimSpan, Span}, -}; - -struct ItemIsCfgedOut; +use crate::{MacroCallLoc, MacroDefKind, db::ExpandDatabase, proc_macro::ProcMacroKind}; -#[derive(Debug)] -struct ExpandedAttrToProcess { - range: TextRange, +fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option { + if !attr.simple_name().as_deref().map(|v| v == "cfg")? { + return None; + } + let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?; + let enabled = krate.cfg_options(db).check(&cfg) != Some(false); + Some(enabled) } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -enum NextExpandedAttrState { - NotStarted, - InTheMiddle, +fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option { + if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? { + return None; + } + check_cfg_attr_value(db, &attr.token_tree()?, krate) } -#[derive(Debug)] -struct AstAttrToProcess { - range: TextRange, - expanded_attrs: SmallVec<[ExpandedAttrToProcess; 1]>, - expanded_attrs_idx: usize, - next_expanded_attr: NextExpandedAttrState, - pound_span: Span, - brackets_span: DelimSpan, - /// If `Some`, this is an inner attribute. - excl_span: Option, +pub fn check_cfg_attr_value( + db: &dyn ExpandDatabase, + attr: &TokenTree, + krate: Crate, +) -> Option { + let cfg_expr = parse_from_attr_token_tree(attr)?; + let enabled = krate.cfg_options(db).check(&cfg_expr) != Some(false); + Some(enabled) } -fn macro_input_callback( +fn process_has_attrs_with_possible_comma( db: &dyn ExpandDatabase, - is_derive: bool, - censor_item_tree_attr_ids: &[AttrId], + items: impl Iterator, krate: Crate, - default_span: Span, - span_map: SpanMapRef<'_>, -) -> impl FnMut(&mut PreorderWithTokens, &WalkEvent) -> (bool, Vec) { - let cfg_options = OnceCell::new(); - let cfg_options = move || *cfg_options.get_or_init(|| krate.cfg_options(db)); - - let mut should_strip_attr = { - let mut item_tree_attr_id = 0; - let mut censor_item_tree_attr_ids_index = 0; - move || { - let mut result = false; - if let Some(&next_censor_attr_id) = - censor_item_tree_attr_ids.get(censor_item_tree_attr_ids_index) - && next_censor_attr_id.item_tree_index() == item_tree_attr_id - { - censor_item_tree_attr_ids_index += 1; - result = true; + remove: &mut FxHashSet, +) -> Option<()> { + for item in items { + let field_attrs = item.attrs(); + 'attrs: for attr in field_attrs { + if let Some(enabled) = check_cfg(db, &attr, krate) { + if enabled { + debug!("censoring {:?}", attr.syntax()); + remove.insert(attr.syntax().clone().into()); + } else { + debug!("censoring {:?}", item.syntax()); + remove.insert(item.syntax().clone().into()); + // We need to remove the , as well + remove_possible_comma(&item, remove); + break 'attrs; + } } - item_tree_attr_id += 1; - result - } - }; - let mut attrs = Vec::new(); - let mut attrs_idx = 0; - let mut has_inner_attrs_owner = false; - let mut in_attr = false; - let mut done_with_attrs = false; - let mut did_top_attrs = false; - move |preorder, event| { - match event { - WalkEvent::Enter(SyntaxElement::Node(node)) => { - if done_with_attrs { - return (true, Vec::new()); + if let Some(enabled) = check_cfg_attr(db, &attr, krate) { + if enabled { + debug!("Removing cfg_attr tokens {:?}", attr); + let meta = attr.meta()?; + let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; + remove.extend(removes_from_cfg_attr); + } else { + debug!("censoring type cfg_attr {:?}", item.syntax()); + remove.insert(attr.syntax().clone().into()); } + } + } + } + Some(()) +} - if ast::Attr::can_cast(node.kind()) { - in_attr = true; - let node_range = node.text_range(); - while attrs - .get(attrs_idx) - .is_some_and(|it: &AstAttrToProcess| it.range != node_range) - { - attrs_idx += 1; - } - } else if let Some(has_attrs) = ast::AnyHasAttrs::cast(node.clone()) { - if has_inner_attrs_owner { - has_inner_attrs_owner = false; - return (true, Vec::new()); - } - - if did_top_attrs && !is_derive { - // Derives need all attributes handled, but attribute macros need only the top attributes handled. - done_with_attrs = true; - return (true, Vec::new()); - } - did_top_attrs = true; - - if let Some(inner_attrs_node) = has_attrs.inner_attributes_node() - && inner_attrs_node != *node - { - has_inner_attrs_owner = true; - } - - let node_attrs = ast::attrs_including_inner(&has_attrs); - - attrs.clear(); - node_attrs.clone().for_each(|attr| { - let span_for = |token: Option| { - token - .map(|token| span_map.span_for_range(token.text_range())) - .unwrap_or(default_span) - }; - attrs.push(AstAttrToProcess { - range: attr.syntax().text_range(), - pound_span: span_for(attr.pound_token()), - brackets_span: DelimSpan { - open: span_for(attr.l_brack_token()), - close: span_for(attr.r_brack_token()), - }, - excl_span: attr - .excl_token() - .map(|token| span_map.span_for_range(token.text_range())), - expanded_attrs: SmallVec::new(), - expanded_attrs_idx: 0, - next_expanded_attr: NextExpandedAttrState::NotStarted, - }); - }); - - attrs_idx = 0; - let strip_current_item = expand_cfg_attr( - node_attrs, - &cfg_options, - |attr, _container, range, top_attr| { - // Find the attr. - while attrs[attrs_idx].range != top_attr.syntax().text_range() { - attrs_idx += 1; - } - - let mut strip_current_attr = false; - match attr { - Meta::NamedKeyValue { name, .. } => { - if name - .is_none_or(|name| !is_item_tree_filtered_attr(name.text())) - { - strip_current_attr = should_strip_attr(); - } - } - Meta::TokenTree { path, tt } => { - if path.segments.len() != 1 - || !is_item_tree_filtered_attr(path.segments[0].text()) - { - strip_current_attr = should_strip_attr(); - } - - if path.segments.len() == 1 { - let name = path.segments[0].text(); - - if name == "cfg" { - let cfg_expr = CfgExpr::parse_from_ast( - &mut TokenTreeChildren::new(&tt).peekable(), - ); - if cfg_options().check(&cfg_expr) == Some(false) { - return ControlFlow::Break(ItemIsCfgedOut); - } - strip_current_attr = true; - } - } - } - Meta::Path { path } => { - if path.segments.len() != 1 - || !is_item_tree_filtered_attr(path.segments[0].text()) - { - strip_current_attr = should_strip_attr(); - } - } - } - - if !strip_current_attr { - attrs[attrs_idx] - .expanded_attrs - .push(ExpandedAttrToProcess { range }); - } - - ControlFlow::Continue(()) - }, - ); - attrs_idx = 0; - - if strip_current_item.is_some() { - preorder.skip_subtree(); - attrs.clear(); +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +enum CfgExprStage { + /// Stripping the CFGExpr part of the attribute + StrippigCfgExpr, + /// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute + FoundComma, + /// Everything following the attribute. This could be another attribute or the end of the attribute. + // FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute + // Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110 + EverythingElse, +} - 'eat_comma: { - // If there is a comma after this node, eat it too. - let mut events_until_comma = 0; - for event in preorder.clone() { - match event { - WalkEvent::Enter(SyntaxElement::Node(_)) - | WalkEvent::Leave(_) => {} - WalkEvent::Enter(SyntaxElement::Token(token)) => { - let kind = token.kind(); - if kind == T![,] { - break; - } else if !kind.is_trivia() { - break 'eat_comma; - } - } - } - events_until_comma += 1; - } - preorder.nth(events_until_comma); - } +/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input. +fn remove_tokens_within_cfg_attr(meta: Meta) -> Option> { + let mut remove: FxHashSet = FxHashSet::default(); + debug!("Enabling attribute {}", meta); + let meta_path = meta.path()?; + debug!("Removing {:?}", meta_path.syntax()); + remove.insert(meta_path.syntax().clone().into()); - return (false, Vec::new()); - } + let meta_tt = meta.token_tree()?; + debug!("meta_tt {}", meta_tt); + let mut stage = CfgExprStage::StrippigCfgExpr; + for tt in meta_tt.token_trees_and_tokens() { + debug!("Checking {:?}. Stage: {:?}", tt, stage); + match (stage, tt) { + (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => { + remove.insert(node.syntax().clone().into()); + } + (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => { + if token.kind() == T![,] { + stage = CfgExprStage::FoundComma; } + remove.insert(token.into()); + } + (CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token)) + if (token.kind() == T![,] || token.kind() == T![')']) => + { + // The end of the attribute or separator for the next attribute + stage = CfgExprStage::EverythingElse; + remove.insert(token.into()); } - WalkEvent::Leave(SyntaxElement::Node(node)) => { - if ast::Attr::can_cast(node.kind()) { - in_attr = false; - attrs_idx += 1; + (CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => { + remove.insert(node.syntax().clone().into()); + } + (CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => { + remove.insert(token.into()); + } + // This is an actual attribute + _ => {} + } + } + if stage != CfgExprStage::EverythingElse { + warn!("Invalid cfg_attr attribute. {:?}", meta_tt); + return None; + } + Some(remove) +} +/// Removes a possible comma after the [AstNode] +fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet) { + if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) { + res.insert(comma); + } +} +fn process_enum( + db: &dyn ExpandDatabase, + variants: VariantList, + krate: Crate, + remove: &mut FxHashSet, +) -> Option<()> { + 'variant: for variant in variants.variants() { + for attr in variant.attrs() { + if let Some(enabled) = check_cfg(db, &attr, krate) { + if enabled { + debug!("censoring {:?}", attr.syntax()); + remove.insert(attr.syntax().clone().into()); + } else { + // Rustc does not strip the attribute if it is enabled. So we will leave it + debug!("censoring type {:?}", variant.syntax()); + remove.insert(variant.syntax().clone().into()); + // We need to remove the , as well + remove_possible_comma(&variant, remove); + continue 'variant; + } + } + + if let Some(enabled) = check_cfg_attr(db, &attr, krate) { + if enabled { + debug!("Removing cfg_attr tokens {:?}", attr); + let meta = attr.meta()?; + let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; + remove.extend(removes_from_cfg_attr); + } else { + debug!("censoring type cfg_attr {:?}", variant.syntax()); + remove.insert(attr.syntax().clone().into()); } } - WalkEvent::Enter(SyntaxElement::Token(token)) => { - if !in_attr { - return (true, Vec::new()); + } + if let Some(fields) = variant.field_list() { + match fields { + ast::FieldList::RecordFieldList(fields) => { + process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?; + } + ast::FieldList::TupleFieldList(fields) => { + process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?; } + } + } + } + Some(()) +} - let Some(ast_attr) = attrs.get_mut(attrs_idx) else { - return (true, Vec::new()); - }; - let token_range = token.text_range(); - let Some(expanded_attr) = ast_attr.expanded_attrs.get(ast_attr.expanded_attrs_idx) - else { - // No expanded attributes in this `ast::Attr`, or we finished them all already, either way - // the remaining tokens should be discarded. - return (false, Vec::new()); - }; - match ast_attr.next_expanded_attr { - NextExpandedAttrState::NotStarted => { - if token_range.start() >= expanded_attr.range.start() { - // We started the next attribute. - let mut insert_tokens = Vec::with_capacity(3); - insert_tokens.push(tt::Leaf::Punct(tt::Punct { - char: '#', - spacing: tt::Spacing::Alone, - span: ast_attr.pound_span, - })); - if let Some(span) = ast_attr.excl_span { - insert_tokens.push(tt::Leaf::Punct(tt::Punct { - char: '!', - spacing: tt::Spacing::Alone, - span, - })); - } - insert_tokens.push(tt::Leaf::Punct(tt::Punct { - char: '[', - spacing: tt::Spacing::Alone, - span: ast_attr.brackets_span.open, - })); +pub(crate) fn process_cfg_attrs( + db: &dyn ExpandDatabase, + node: &SyntaxNode, + loc: &MacroCallLoc, +) -> Option> { + // FIXME: #[cfg_eval] is not implemented. But it is not stable yet + let is_derive = match loc.def.kind { + MacroDefKind::BuiltInDerive(..) + | MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true, + MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(), + _ => false, + }; + let mut remove = FxHashSet::default(); - ast_attr.next_expanded_attr = NextExpandedAttrState::InTheMiddle; + let item = ast::Item::cast(node.clone())?; + for attr in item.attrs() { + if let Some(enabled) = check_cfg_attr(db, &attr, loc.krate) { + if enabled { + debug!("Removing cfg_attr tokens {:?}", attr); + let meta = attr.meta()?; + let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; + remove.extend(removes_from_cfg_attr); + } else { + debug!("Removing type cfg_attr {:?}", item.syntax()); + remove.insert(attr.syntax().clone().into()); + } + } + } - return (true, insert_tokens); - } else { - // Before any attribute or between the attributes. - return (false, Vec::new()); - } - } - NextExpandedAttrState::InTheMiddle => { - if token_range.start() >= expanded_attr.range.end() { - // Finished the current attribute. - let insert_tokens = vec![tt::Leaf::Punct(tt::Punct { - char: ']', - spacing: tt::Spacing::Alone, - span: ast_attr.brackets_span.close, - })]; + if is_derive { + // Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level + // (cfg_attr is handled above, cfg is handled in the def map). + match item { + ast::Item::Struct(it) => match it.field_list()? { + ast::FieldList::RecordFieldList(fields) => { + process_has_attrs_with_possible_comma( + db, + fields.fields(), + loc.krate, + &mut remove, + )?; + } + ast::FieldList::TupleFieldList(fields) => { + process_has_attrs_with_possible_comma( + db, + fields.fields(), + loc.krate, + &mut remove, + )?; + } + }, + ast::Item::Enum(it) => { + process_enum(db, it.variant_list()?, loc.krate, &mut remove)?; + } + ast::Item::Union(it) => { + process_has_attrs_with_possible_comma( + db, + it.record_field_list()?.fields(), + loc.krate, + &mut remove, + )?; + } + // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now + _ => {} + } + } + Some(remove) +} +/// Parses a `cfg` attribute from the meta +fn parse_from_attr_token_tree(tt: &TokenTree) -> Option { + let mut iter = tt + .token_trees_and_tokens() + .filter(is_not_whitespace) + .skip(1) + .take_while(is_not_closing_paren) + .peekable(); + next_cfg_expr_from_syntax(&mut iter) +} - ast_attr.next_expanded_attr = NextExpandedAttrState::NotStarted; - ast_attr.expanded_attrs_idx += 1; +fn is_not_closing_paren(element: &NodeOrToken) -> bool { + !matches!(element, NodeOrToken::Token(token) if (token.kind() == syntax::T![')'])) +} +fn is_not_whitespace(element: &NodeOrToken) -> bool { + !matches!(element, NodeOrToken::Token(token) if (token.kind() == SyntaxKind::WHITESPACE)) +} - // It's safe to ignore the current token because between attributes - // there is always at least one token we skip - either the closing bracket - // in `#[]` or the comma in case of multiple attrs in `cfg_attr` expansion. - return (false, insert_tokens); - } else { - // Still in the middle. - return (true, Vec::new()); - } - } +fn next_cfg_expr_from_syntax(iter: &mut Peekable) -> Option +where + I: Iterator>, +{ + let name = match iter.next() { + None => return None, + Some(NodeOrToken::Token(element)) => match element.kind() { + syntax::T![ident] => Symbol::intern(element.text()), + _ => return Some(CfgExpr::Invalid), + }, + Some(_) => return Some(CfgExpr::Invalid), + }; + let result = match &name { + s if [&sym::all, &sym::any, &sym::not].contains(&s) => { + let mut preds = Vec::new(); + let Some(NodeOrToken::Node(tree)) = iter.next() else { + return Some(CfgExpr::Invalid); + }; + let mut tree_iter = tree + .token_trees_and_tokens() + .filter(is_not_whitespace) + .skip(1) + .take_while(is_not_closing_paren) + .peekable(); + while tree_iter.peek().is_some() { + let pred = next_cfg_expr_from_syntax(&mut tree_iter); + if let Some(pred) = pred { + preds.push(pred); } } - WalkEvent::Leave(SyntaxElement::Token(_)) => {} + let group = match &name { + s if *s == sym::all => CfgExpr::All(preds.into_boxed_slice()), + s if *s == sym::any => CfgExpr::Any(preds.into_boxed_slice()), + s if *s == sym::not => { + CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid))) + } + _ => unreachable!(), + }; + Some(group) } - (true, Vec::new()) + _ => match iter.peek() { + Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => { + iter.next(); + match iter.next() { + Some(NodeOrToken::Token(value_token)) + if (value_token.kind() == syntax::SyntaxKind::STRING) => + { + let value = value_token.text(); + Some(CfgExpr::Atom(CfgAtom::KeyValue { + key: name, + value: Symbol::intern(value.trim_matches('"')), + })) + } + _ => None, + } + } + _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))), + }, + }; + if let Some(NodeOrToken::Token(element)) = iter.peek() + && element.kind() == syntax::T![,] + { + iter.next(); } + result } +#[cfg(test)] +mod tests { + use cfg::DnfExpr; + use expect_test::{Expect, expect}; + use syntax::{AstNode, SourceFile, ast::Attr}; -pub(crate) fn attr_macro_input_to_token_tree( - db: &dyn ExpandDatabase, - node: &SyntaxNode, - span_map: SpanMapRef<'_>, - span: Span, - is_derive: bool, - censor_item_tree_attr_ids: &[AttrId], - krate: Crate, -) -> (tt::TopSubtree, SyntaxFixupUndoInfo) { - let fixups = fixup::fixup_syntax(span_map, node, span, DocCommentDesugarMode::ProcMacro); - ( - syntax_bridge::syntax_node_to_token_tree_modified( - node, - span_map, - fixups.append, - fixups.remove, - span, - DocCommentDesugarMode::ProcMacro, - macro_input_callback(db, is_derive, censor_item_tree_attr_ids, krate, span, span_map), - ), - fixups.undo_info, - ) -} + use crate::cfg_process::parse_from_attr_token_tree; -pub fn check_cfg_attr_value( - db: &dyn ExpandDatabase, - attr: &ast::TokenTree, - krate: Crate, -) -> Option { - let cfg_expr = CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(attr).peekable()); - krate.cfg_options(db).check(&cfg_expr) + fn check_dnf_from_syntax(input: &str, expect: Expect) { + let parse = SourceFile::parse(input, span::Edition::CURRENT); + let node = match parse.tree().syntax().descendants().find_map(Attr::cast) { + Some(it) => it, + None => { + let node = std::any::type_name::(); + panic!("Failed to make ast node `{node}` from text {input}") + } + }; + let node = node.clone_subtree(); + assert_eq!(node.syntax().text_range().start(), 0.into()); + + let cfg = parse_from_attr_token_tree(&node.meta().unwrap().token_tree().unwrap()).unwrap(); + let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); + expect.assert_eq(&actual); + } + #[test] + fn cfg_from_attr() { + check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]); + check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]); + } } diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 6b5aa39fa6bf..888c1405a6bb 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,9 +1,11 @@ //! Defines database & queries for macro expansion. use base_db::{Crate, RootQueryDb}; +use either::Either; use mbe::MatchedArmIndex; +use rustc_hash::FxHashSet; use span::{AstIdMap, Edition, Span, SyntaxContext}; -use syntax::{AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, ast}; +use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast}; use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree}; use triomphe::Arc; @@ -11,9 +13,9 @@ use crate::{ AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, - attrs::Meta, + attrs::{AttrId, AttrInput, RawAttrs, collect_attrs}, builtin::pseudo_derive_attr_expansion, - cfg_process::attr_macro_input_to_token_tree, + cfg_process, declarative::DeclarativeMacroExpander, fixup::{self, SyntaxFixupUndoInfo}, hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt}, @@ -175,7 +177,7 @@ pub fn expand_speculative( let span_map = SpanMapRef::RealSpanMap(&span_map); // Build the subtree and token mapping for the speculative args - let (mut tt, undo_info) = match &loc.kind { + let (mut tt, undo_info) = match loc.kind { MacroCallKind::FnLike { .. } => ( syntax_bridge::syntax_node_to_token_tree( speculative_args, @@ -198,35 +200,48 @@ pub fn expand_speculative( ), SyntaxFixupUndoInfo::NONE, ), - MacroCallKind::Derive { derive_macro_id, .. } => { - let MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } = - &derive_macro_id.loc(db).kind - else { - unreachable!("`derive_macro_id` should be `MacroCallKind::Attr`"); + MacroCallKind::Derive { derive_attr_index: index, .. } + | MacroCallKind::Attr { invoc_attr_index: index, .. } => { + let censor = if let MacroCallKind::Derive { .. } = loc.kind { + censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?) + } else { + attr_source(index, &ast::Item::cast(speculative_args.clone())?) + .into_iter() + .map(|it| it.syntax().clone().into()) + .collect() }; - attr_macro_input_to_token_tree( - db, - speculative_args, + + let censor_cfg = + cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default(); + let mut fixups = fixup::fixup_syntax( span_map, + speculative_args, span, - true, - attr_ids, - loc.krate, + DocCommentDesugarMode::ProcMacro, + ); + fixups.append.retain(|it, _| match it { + syntax::NodeOrToken::Token(_) => true, + it => !censor.contains(it) && !censor_cfg.contains(it), + }); + fixups.remove.extend(censor); + fixups.remove.extend(censor_cfg); + + ( + syntax_bridge::syntax_node_to_token_tree_modified( + speculative_args, + span_map, + fixups.append, + fixups.remove, + span, + DocCommentDesugarMode::ProcMacro, + ), + fixups.undo_info, ) } - MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => attr_macro_input_to_token_tree( - db, - speculative_args, - span_map, - span, - false, - attr_ids, - loc.krate, - ), }; - let attr_arg = match &loc.kind { - MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => { + let attr_arg = match loc.kind { + MacroCallKind::Attr { invoc_attr_index, .. } => { if loc.def.is_attribute_derive() { // for pseudo-derive expansion we actually pass the attribute itself only ast::Attr::cast(speculative_args.clone()).and_then(|attr| attr.token_tree()).map( @@ -245,21 +260,18 @@ pub fn expand_speculative( // Attributes may have an input token tree, build the subtree and map for this as well // then try finding a token id for our token if it is inside this input subtree. let item = ast::Item::cast(speculative_args.clone())?; - let (_, _, _, meta) = - attr_ids.invoc_attr().find_attr_range_with_source(db, loc.krate, &item); - match meta { - Meta::TokenTree { tt, .. } => { - let mut attr_arg = syntax_bridge::syntax_node_to_token_tree( - tt.syntax(), - span_map, - span, - DocCommentDesugarMode::ProcMacro, - ); - attr_arg.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible; - Some(attr_arg) + let attrs = RawAttrs::new_expanded(db, &item, span_map, loc.krate.cfg_options(db)); + attrs.iter().find(|attr| attr.id == invoc_attr_index).and_then(|attr| { + match attr.input.as_deref()? { + AttrInput::TokenTree(tt) => { + let mut attr_arg = tt.clone(); + attr_arg.top_subtree_delimiter_mut().kind = + tt::DelimiterKind::Invisible; + Some(attr_arg) + } + AttrInput::Literal(_) => None, } - _ => None, - } + }) } } _ => None, @@ -421,7 +433,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { let (parse, map) = parse_with_map(db, loc.kind.file_id()); let root = parse.syntax_node(); - let (is_derive, censor_item_tree_attr_ids, item_node, span) = match &loc.kind { + let (censor, item_node, span) = match loc.kind { MacroCallKind::FnLike { ast_id, .. } => { let node = &ast_id.to_ptr(db).to_node(&root); let path_range = node @@ -489,29 +501,53 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { MacroCallKind::Derive { .. } => { unreachable!("`ExpandDatabase::macro_arg` called with `MacroCallKind::Derive`") } - MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => { + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { let node = ast_id.to_ptr(db).to_node(&root); - let range = attr_ids - .invoc_attr() - .find_attr_range_with_source(db, loc.krate, &node) - .3 - .path_range(); - let span = map.span_for_range(range); - - let is_derive = matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive()); - (is_derive, &**attr_ids, node, span) + let attr_source = attr_source(invoc_attr_index, &node); + + let span = map.span_for_range( + attr_source + .as_ref() + .and_then(|it| it.path()) + .map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()), + ); + // If derive attribute we need to censor the derive input + if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive()) + && ast::Adt::can_cast(node.syntax().kind()) + { + let adt = ast::Adt::cast(node.syntax().clone()).unwrap(); + let censor_derive_input = censor_derive_input(invoc_attr_index, &adt); + (censor_derive_input, node, span) + } else { + (attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span) + } } }; - let (mut tt, undo_info) = attr_macro_input_to_token_tree( - db, - item_node.syntax(), - map.as_ref(), - span, - is_derive, - censor_item_tree_attr_ids, - loc.krate, - ); + let (mut tt, undo_info) = { + let syntax = item_node.syntax(); + let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default(); + let mut fixups = + fixup::fixup_syntax(map.as_ref(), syntax, span, DocCommentDesugarMode::ProcMacro); + fixups.append.retain(|it, _| match it { + syntax::NodeOrToken::Token(_) => true, + it => !censor.contains(it) && !censor_cfg.contains(it), + }); + fixups.remove.extend(censor); + fixups.remove.extend(censor_cfg); + + ( + syntax_bridge::syntax_node_to_token_tree_modified( + syntax, + map, + fixups.append, + fixups.remove, + span, + DocCommentDesugarMode::ProcMacro, + ), + fixups.undo_info, + ) + }; if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included @@ -521,6 +557,31 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { (Arc::new(tt), undo_info, span) } +// FIXME: Censoring info should be calculated by the caller! Namely by name resolution +/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped +fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet { + // FIXME: handle `cfg_attr` + cov_mark::hit!(derive_censoring); + collect_attrs(node) + .take(derive_attr_index.ast_index() + 1) + .filter_map(|(_, attr)| Either::left(attr)) + // FIXME, this resolution should not be done syntactically + // derive is a proper macro now, no longer builtin + // But we do not have resolution at this stage, this means + // we need to know about all macro calls for the given ast item here + // so we require some kind of mapping... + .filter(|attr| attr.simple_name().as_deref() == Some("derive")) + .map(|it| it.syntax().clone().into()) + .collect() +} + +/// Attributes expect the invoking attribute to be stripped +fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option { + // FIXME: handle `cfg_attr` + cov_mark::hit!(attribute_macro_attr_censoring); + collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr)) +} + impl TokenExpander { fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { match id.kind { diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 3fb9aca9649e..0d100c1364ab 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -1,20 +1,16 @@ //! Compiled declarative macro expanders (`macro_rules!` and `macro`) -use std::{cell::OnceCell, ops::ControlFlow}; - use base_db::Crate; +use intern::sym; use span::{Edition, Span, SyntaxContext}; use stdx::TupleExt; -use syntax::{ - AstNode, AstToken, - ast::{self, HasAttrs}, -}; +use syntax::{AstNode, ast}; use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId, - attrs::{Meta, expand_cfg_attr}, + attrs::RawAttrs, db::ExpandDatabase, hygiene::{Transparency, apply_mark}, tt, @@ -84,28 +80,29 @@ impl DeclarativeMacroExpander { let (root, map) = crate::db::parse_with_map(db, id.file_id); let root = root.syntax_node(); - let transparency = |node: ast::AnyHasAttrs| { - let cfg_options = OnceCell::new(); - expand_cfg_attr( - node.attrs(), - || cfg_options.get_or_init(|| def_crate.cfg_options(db)), - |attr, _, _, _| { - if let Meta::NamedKeyValue { name: Some(name), value, .. } = attr - && name.text() == "rustc_macro_transparency" - && let Some(value) = value.and_then(ast::String::cast) - && let Ok(value) = value.value() - { - match &*value { - "transparent" => ControlFlow::Break(Transparency::Transparent), - "semitransparent" => ControlFlow::Break(Transparency::SemiTransparent), - "opaque" => ControlFlow::Break(Transparency::Opaque), - _ => ControlFlow::Continue(()), - } - } else { - ControlFlow::Continue(()) - } + let transparency = |node| { + // ... would be nice to have the item tree here + let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db)); + match attrs + .iter() + .find(|it| { + it.path + .as_ident() + .map(|it| *it == sym::rustc_macro_transparency) + .unwrap_or(false) + })? + .token_tree_value()? + .token_trees() + .flat_tokens() + { + [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym { + s if *s == sym::transparent => Some(Transparency::Transparent), + s if *s == sym::semitransparent => Some(Transparency::SemiTransparent), + s if *s == sym::opaque => Some(Transparency::Opaque), + _ => None, }, - ) + _ => None, + } }; let ctx_edition = |ctx: SyntaxContext| { if ctx.is_root() { @@ -136,8 +133,7 @@ impl DeclarativeMacroExpander { "expected a token tree".into(), )), }, - transparency(ast::AnyHasAttrs::from(macro_rules)) - .unwrap_or(Transparency::SemiTransparent), + transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent), ), ast::Macro::MacroDef(macro_def) => ( match macro_def.body() { @@ -165,7 +161,7 @@ impl DeclarativeMacroExpander { "expected a token tree".into(), )), }, - transparency(macro_def.into()).unwrap_or(Transparency::Opaque), + transparency(¯o_def).unwrap_or(Transparency::Opaque), ), }; let edition = ctx_edition(match id.file_id { diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index fe557d68023d..a7f3e27a4553 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -55,6 +55,30 @@ impl From for HirFilePosition { } } +impl FilePositionWrapper { + pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FilePosition { + FilePositionWrapper { + file_id: EditionedFileId::new(db, self.file_id, edition), + offset: self.offset, + } + } +} + +impl FileRangeWrapper { + pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FileRange { + FileRangeWrapper { + file_id: EditionedFileId::new(db, self.file_id, edition), + range: self.range, + } + } +} + +impl InFileWrapper { + pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> InRealFile { + InRealFile { file_id: EditionedFileId::new(db, self.file_id, edition), value: self.value } + } +} + impl HirFileRange { pub fn file_range(self) -> Option { Some(FileRange { file_id: self.file_id.file_id()?, range: self.range }) @@ -383,7 +407,7 @@ impl InFile { // Fall back to whole macro call. let loc = db.lookup_intern_macro_call(mac_file); - loc.kind.original_call_range(db, loc.krate) + loc.kind.original_call_range(db) } } } @@ -429,10 +453,7 @@ impl InFile { Some(it) => it, None => { let loc = db.lookup_intern_macro_call(mac_file); - ( - loc.kind.original_call_range(db, loc.krate), - SyntaxContext::root(loc.def.edition), - ) + (loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition)) } } } @@ -447,7 +468,7 @@ impl InFile { Some(it) => it, _ => { let loc = db.lookup_intern_macro_call(mac_file); - loc.kind.original_call_range(db, loc.krate) + loc.kind.original_call_range(db) } } } diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index cba1c7c1d4b0..fe77e1565987 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -523,7 +523,6 @@ mod tests { fixups.remove, span_map.span_for_range(TextRange::empty(0.into())), DocCommentDesugarMode::Mbe, - |_, _| (true, Vec::new()), ); let actual = format!("{tt}\n"); @@ -699,7 +698,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {a .__ra_fixup ;} +fn foo () {a . __ra_fixup ;} "#]], ) } @@ -714,7 +713,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {a .__ra_fixup ; bar () ;} +fn foo () {a . __ra_fixup ; bar () ;} "#]], ) } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index e1103ef43e0f..472ec83ffef5 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -25,17 +25,18 @@ mod cfg_process; mod fixup; mod prettify_macro_expansion_; +use attrs::collect_attrs; +use rustc_hash::FxHashMap; use salsa::plumbing::{AsId, FromId}; use stdx::TupleExt; -use thin_vec::ThinVec; use triomphe::Arc; use core::fmt; -use std::{hash::Hash, ops}; +use std::hash::Hash; use base_db::Crate; use either::Either; -use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContext}; +use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext}; use syntax::{ SyntaxNode, SyntaxToken, TextRange, TextSize, ast::{self, AstNode}, @@ -316,6 +317,9 @@ pub enum MacroCallKind { Derive { ast_id: AstId, /// Syntactical index of the invoking `#[derive]` attribute. + /// + /// Outer attributes are counted first, then inner attributes. This does not support + /// out-of-line modules, which may have attributes spread across 2 files! derive_attr_index: AttrId, /// Index of the derive macro in the derive attribute derive_index: u32, @@ -325,68 +329,17 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`. + // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index` + // but we need to fix the `cfg_attr` handling first. attr_args: Option>, - /// This contains the list of all *active* attributes (derives and attr macros) preceding this - /// attribute, including this attribute. You can retrieve the [`AttrId`] of the current attribute - /// by calling [`invoc_attr()`] on this. - /// - /// The macro should not see the attributes here. + /// Syntactical index of the invoking `#[attribute]`. /// - /// [`invoc_attr()`]: AttrMacroAttrIds::invoc_attr - censored_attr_ids: AttrMacroAttrIds, + /// Outer attributes are counted first, then inner attributes. This does not support + /// out-of-line modules, which may have attributes spread across 2 files! + invoc_attr_index: AttrId, }, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct AttrMacroAttrIds(AttrMacroAttrIdsRepr); - -impl AttrMacroAttrIds { - #[inline] - pub fn from_one(id: AttrId) -> Self { - Self(AttrMacroAttrIdsRepr::One(id)) - } - - #[inline] - pub fn from_many(ids: &[AttrId]) -> Self { - if let &[id] = ids { - Self(AttrMacroAttrIdsRepr::One(id)) - } else { - Self(AttrMacroAttrIdsRepr::ManyDerives(ids.iter().copied().collect())) - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -enum AttrMacroAttrIdsRepr { - One(AttrId), - ManyDerives(ThinVec), -} - -impl ops::Deref for AttrMacroAttrIds { - type Target = [AttrId]; - - #[inline] - fn deref(&self) -> &Self::Target { - match &self.0 { - AttrMacroAttrIdsRepr::One(one) => std::slice::from_ref(one), - AttrMacroAttrIdsRepr::ManyDerives(many) => many, - } - } -} - -impl AttrMacroAttrIds { - #[inline] - pub fn invoc_attr(&self) -> AttrId { - match &self.0 { - AttrMacroAttrIdsRepr::One(it) => *it, - AttrMacroAttrIdsRepr::ManyDerives(it) => { - *it.last().expect("should always have at least one `AttrId`") - } - } - } -} - impl HirFileId { pub fn edition(self, db: &dyn ExpandDatabase) -> Edition { match self { @@ -630,20 +583,34 @@ impl MacroDefId { impl MacroCallLoc { pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile { - match &self.kind { + match self.kind { MacroCallKind::FnLike { ast_id, .. } => { ast_id.with_value(ast_id.to_node(db).syntax().clone()) } MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { // FIXME: handle `cfg_attr` - let (attr, _, _, _) = derive_attr_index.find_attr_range(db, self.krate, *ast_id); - ast_id.with_value(attr.syntax().clone()) + ast_id.with_value(ast_id.to_node(db)).map(|it| { + collect_attrs(&it) + .nth(derive_attr_index.ast_index()) + .and_then(|it| match it.1 { + Either::Left(attr) => Some(attr.syntax().clone()), + Either::Right(_) => None, + }) + .unwrap_or_else(|| it.syntax().clone()) + }) } - MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => { + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { if self.def.is_attribute_derive() { - let (attr, _, _, _) = - attr_ids.invoc_attr().find_attr_range(db, self.krate, *ast_id); - ast_id.with_value(attr.syntax().clone()) + // FIXME: handle `cfg_attr` + ast_id.with_value(ast_id.to_node(db)).map(|it| { + collect_attrs(&it) + .nth(invoc_attr_index.ast_index()) + .and_then(|it| match it.1 { + Either::Left(attr) => Some(attr.syntax().clone()), + Either::Right(_) => None, + }) + .unwrap_or_else(|| it.syntax().clone()) + }) } else { ast_id.with_value(ast_id.to_node(db).syntax().clone()) } @@ -748,7 +715,7 @@ impl MacroCallKind { /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros /// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the /// attribute's range, and derives get only the specific derive that is being referred to. - pub fn original_call_range(self, db: &dyn ExpandDatabase, krate: Crate) -> FileRange { + pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange { let mut kind = self; let file_id = loop { match kind.file_id() { @@ -770,11 +737,24 @@ impl MacroCallKind { } MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { // FIXME: should be the range of the macro name, not the whole derive - derive_attr_index.find_attr_range(db, krate, ast_id).2 + // FIXME: handle `cfg_attr` + collect_attrs(&ast_id.to_node(db)) + .nth(derive_attr_index.ast_index()) + .expect("missing derive") + .1 + .expect_left("derive is a doc comment?") + .syntax() + .text_range() } // FIXME: handle `cfg_attr` - MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => { - attr_ids.invoc_attr().find_attr_range(db, krate, ast_id).2 + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { + collect_attrs(&ast_id.to_node(db)) + .nth(invoc_attr_index.ast_index()) + .expect("missing attribute") + .1 + .expect_left("attribute macro is a doc comment?") + .syntax() + .text_range() } }; @@ -893,8 +873,7 @@ impl ExpansionInfo { let span = self.exp_map.span_at(token.start()); match &self.arg_map { SpanMap::RealSpanMap(_) => { - let file_id = - EditionedFileId::from_span_guess_origin(db, span.anchor.file_id).into(); + let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into(); let anchor_offset = db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start(); InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] } @@ -950,7 +929,7 @@ pub fn map_node_range_up_rooted( start = start.min(span.range.start()); end = end.max(span.range.end()); } - let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id); + let file_id = EditionedFileId::from_span(db, anchor.file_id); let anchor_offset = db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start(); Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }) @@ -976,12 +955,36 @@ pub fn map_node_range_up( start = start.min(span.range.start()); end = end.max(span.range.end()); } - let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id); + let file_id = EditionedFileId::from_span(db, anchor.file_id); let anchor_offset = db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start(); Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx)) } +/// Maps up the text range out of the expansion hierarchy back into the original file its from. +/// This version will aggregate the ranges of all spans with the same anchor and syntax context. +pub fn map_node_range_up_aggregated( + db: &dyn ExpandDatabase, + exp_map: &ExpansionSpanMap, + range: TextRange, +) -> FxHashMap<(SpanAnchor, SyntaxContext), TextRange> { + let mut map = FxHashMap::default(); + for span in exp_map.spans_for_range(range) { + let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range); + *range = TextRange::new( + range.start().min(span.range.start()), + range.end().max(span.range.end()), + ); + } + for ((anchor, _), range) in &mut map { + let file_id = EditionedFileId::from_span(db, anchor.file_id); + let anchor_offset = + db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start(); + *range += anchor_offset; + } + map +} + /// Looks up the span at the given offset. pub fn span_for_offset( db: &dyn ExpandDatabase, @@ -989,7 +992,7 @@ pub fn span_for_offset( offset: TextSize, ) -> (FileRange, SyntaxContext) { let span = exp_map.span_at(offset); - let file_id = EditionedFileId::from_span_guess_origin(db, span.anchor.file_id); + let file_id = EditionedFileId::from_span(db, span.anchor.file_id); let anchor_offset = db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start(); (FileRange { file_id, range: span.range + anchor_offset }, span.ctx) @@ -1059,7 +1062,7 @@ impl ExpandTo { } } -intern::impl_internable!(ModPath); +intern::impl_internable!(ModPath, attrs::AttrInput); #[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] #[doc(alias = "MacroFileId")] @@ -1122,14 +1125,6 @@ impl HirFileId { HirFileId::MacroFile(_) => None, } } - - #[inline] - pub fn krate(self, db: &dyn ExpandDatabase) -> Crate { - match self { - HirFileId::FileId(it) => it.krate(db), - HirFileId::MacroFile(it) => it.loc(db).krate, - } - } } impl PartialEq for HirFileId { diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index e9805e3f86b8..d84d978cdb7e 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -2,7 +2,7 @@ use std::{ fmt::{self, Display as _}, - iter::{self, Peekable}, + iter, }; use crate::{ @@ -12,11 +12,10 @@ use crate::{ tt, }; use base_db::Crate; -use intern::{Symbol, sym}; -use parser::T; +use intern::sym; use smallvec::SmallVec; use span::{Edition, SyntaxContext}; -use syntax::{AstNode, SyntaxToken, ast}; +use syntax::{AstNode, ast}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct ModPath { @@ -65,58 +64,6 @@ impl ModPath { ModPath { kind, segments: SmallVec::new_const() } } - pub fn from_tokens( - db: &dyn ExpandDatabase, - span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext, - is_abs: bool, - segments: impl Iterator, - ) -> Option { - let mut segments = segments.peekable(); - let mut result = SmallVec::new_const(); - let path_kind = if is_abs { - PathKind::Abs - } else { - let first = segments.next()?; - match first.kind() { - T![crate] => PathKind::Crate, - T![self] => PathKind::Super(handle_super(&mut segments)), - T![super] => PathKind::Super(1 + handle_super(&mut segments)), - T![ident] => { - let first_text = first.text(); - if first_text == "$crate" { - let ctxt = span_for_range(first.text_range()); - resolve_crate_root(db, ctxt) - .map(PathKind::DollarCrate) - .unwrap_or(PathKind::Crate) - } else { - result.push(Name::new_symbol_root(Symbol::intern(first_text))); - PathKind::Plain - } - } - _ => return None, - } - }; - for segment in segments { - if segment.kind() != T![ident] { - return None; - } - result.push(Name::new_symbol_root(Symbol::intern(segment.text()))); - } - if result.is_empty() { - return None; - } - result.shrink_to_fit(); - return Some(ModPath { kind: path_kind, segments: result }); - - fn handle_super(segments: &mut Peekable>) -> u8 { - let mut result = 0; - while segments.next_if(|it| it.kind() == T![super]).is_some() { - result += 1; - } - result - } - } - pub fn segments(&self) -> &[Name] { &self.segments } diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs index 8b0c0d72cd49..e5a778a95c7c 100644 --- a/crates/hir-expand/src/span_map.rs +++ b/crates/hir-expand/src/span_map.rs @@ -1,12 +1,13 @@ //! Span maps for real files and macro expansions. use span::{Span, SyntaxContext}; +use stdx::TupleExt; use syntax::{AstNode, TextRange, ast}; use triomphe::Arc; pub use span::RealSpanMap; -use crate::{HirFileId, MacroCallId, db::ExpandDatabase}; +use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase}; pub type ExpansionSpanMap = span::SpanMap; @@ -109,24 +110,26 @@ pub(crate) fn real_span_map( // them anchors too, but only if they have no attributes attached, as those might be proc-macros // and using different anchors inside of them will prevent spans from being joinable. tree.items().for_each(|item| match &item { - ast::Item::ExternBlock(it) if ast::attrs_including_inner(it).next().is_none() => { + ast::Item::ExternBlock(it) + if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => + { if let Some(extern_item_list) = it.extern_item_list() { pairs.extend( extern_item_list.extern_items().map(ast::Item::from).map(item_to_entry), ); } } - ast::Item::Impl(it) if ast::attrs_including_inner(it).next().is_none() => { + ast::Item::Impl(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => { if let Some(assoc_item_list) = it.assoc_item_list() { pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry)); } } - ast::Item::Module(it) if ast::attrs_including_inner(it).next().is_none() => { + ast::Item::Module(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => { if let Some(item_list) = it.item_list() { pairs.extend(item_list.items().map(item_to_entry)); } } - ast::Item::Trait(it) if ast::attrs_including_inner(it).next().is_none() => { + ast::Item::Trait(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => { if let Some(assoc_item_list) = it.assoc_item_list() { pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry)); } diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 0a6458562e15..18ebe7d7a539 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -6,7 +6,6 @@ mod tests; use base_db::Crate; use hir_def::{ EnumVariantId, GeneralConstId, HasModule, StaticId, - attrs::AttrFlags, expr_store::Body, hir::{Expr, ExprId}, type_ref::LiteralConstRef, @@ -199,7 +198,7 @@ pub(crate) fn const_eval_discriminant_variant<'db>( return Ok(value); } - let repr = AttrFlags::repr(db, loc.parent.into()); + let repr = db.enum_signature(loc.parent).repr; let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed()); let mir_body = db.monomorphized_mir_body( diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs index c0e223380bca..0815e62f87ee 100644 --- a/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/crates/hir-ty/src/diagnostics/decl_check.rs @@ -17,8 +17,8 @@ use std::fmt; use hir_def::{ AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, - ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, attrs::AttrFlags, - db::DefDatabase, hir::Pat, item_tree::FieldsShape, signatures::StaticFlags, src::HasSource, + ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, db::DefDatabase, hir::Pat, + item_tree::FieldsShape, signatures::StaticFlags, src::HasSource, }; use hir_expand::{ HirFileId, @@ -201,7 +201,7 @@ impl<'a> DeclValidator<'a> { // Don't run the lint on extern "[not Rust]" fn items with the // #[no_mangle] attribute. - let no_mangle = AttrFlags::query(self.db, func.into()).contains(AttrFlags::NO_MANGLE); + let no_mangle = self.db.attrs(func.into()).by_key(sym::no_mangle).exists(); if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) { cov_mark::hit!(extern_func_no_mangle_ignored); } else { diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index c70c6b611944..fb942e336e65 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -2,9 +2,7 @@ use std::{cell::LazyCell, fmt}; -use hir_def::{ - EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId, attrs::AttrFlags, -}; +use hir_def::{EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use intern::sym; use rustc_pattern_analysis::{ IndexVec, PatCx, PrivateUninhabitedField, @@ -120,7 +118,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> { /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`. fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool { let is_local = adt.krate(self.db) == self.module.krate(); - !is_local && AttrFlags::query(self.db, adt.into()).contains(AttrFlags::NON_EXHAUSTIVE) + !is_local && self.db.attrs(adt.into()).by_key(sym::non_exhaustive).exists() } fn variant_id_for_adt( diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs index 8ac7ab19cd3b..53524d66a33c 100644 --- a/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -144,7 +144,7 @@ struct UnsafeVisitor<'db> { inside_assignment: bool, inside_union_destructure: bool, callback: &'db mut dyn FnMut(UnsafeDiagnostic), - def_target_features: TargetFeatures<'db>, + def_target_features: TargetFeatures, // FIXME: This needs to be the edition of the span of each call. edition: Edition, /// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when @@ -162,7 +162,7 @@ impl<'db> UnsafeVisitor<'db> { ) -> Self { let resolver = def.resolver(db); let def_target_features = match def { - DefWithBodyId::FunctionId(func) => TargetFeatures::from_fn(db, func), + DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())), _ => TargetFeatures::default(), }; let krate = resolver.module().krate(); diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 03ae970acaa7..9891f3f248bd 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -38,7 +38,7 @@ use hir_def::{ lang_item::{LangItem, LangItemTarget, lang_item}, layout::Integer, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, - signatures::{ConstSignature, EnumSignature, StaticSignature}, + signatures::{ConstSignature, StaticSignature}, type_ref::{ConstRef, LifetimeRefId, TypeRefId}, }; use hir_expand::{mod_path::ModPath, name::Name}; @@ -104,7 +104,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc ctx.collect_const(c, &db.const_signature(c)), DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)), DefWithBodyId::VariantId(v) => { - ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) { + ctx.return_ty = match db.enum_signature(v.lookup(db).parent).variant_body_type() { hir_def::layout::IntegerType::Pointer(signed) => match signed { true => ctx.types.isize, false => ctx.types.usize, @@ -759,7 +759,7 @@ pub(crate) struct InferenceContext<'body, 'db> { /// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext /// and resolve the path via its methods. This will ensure proper error reporting. pub(crate) resolver: Resolver<'db>, - target_features: OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>, + target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, pub(crate) generic_def: GenericDefId, table: unify::InferenceTable<'db>, /// The traits in scope, disregarding block modules. This is used for caching purposes. @@ -903,14 +903,14 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } fn target_features<'a>( - db: &'db dyn HirDatabase, - target_features: &'a OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>, + db: &dyn HirDatabase, + target_features: &'a OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, owner: DefWithBodyId, krate: Crate, - ) -> (&'a TargetFeatures<'db>, TargetFeatureIsSafeInTarget) { + ) -> (&'a TargetFeatures, TargetFeatureIsSafeInTarget) { let (target_features, target_feature_is_safe) = target_features.get_or_init(|| { let target_features = match owner { - DefWithBodyId::FunctionId(id) => TargetFeatures::from_fn(db, id), + DefWithBodyId::FunctionId(id) => TargetFeatures::from_attrs(&db.attrs(id.into())), _ => TargetFeatures::default(), }; let target_feature_is_safe = match &krate.workspace_data(db).target { diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs index 9b95eef0e0d6..78889ccb89a2 100644 --- a/crates/hir-ty/src/infer/coerce.rs +++ b/crates/hir-ty/src/infer/coerce.rs @@ -37,11 +37,11 @@ use hir_def::{ CallableDefId, - attrs::AttrFlags, hir::{ExprId, ExprOrPatId}, lang_item::LangItem, signatures::FunctionSignature, }; +use intern::sym; use rustc_ast_ir::Mutability; use rustc_type_ir::{ BoundVar, TypeAndMut, @@ -76,7 +76,7 @@ use crate::{ struct Coerce<'a, 'b, 'db> { table: &'a mut InferenceTable<'db>, has_errors: &'a mut bool, - target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures<'db>, TargetFeatureIsSafeInTarget), + target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures, TargetFeatureIsSafeInTarget), use_lub: bool, /// Determines whether or not allow_two_phase_borrow is set on any /// autoref adjustments we create while coercing. We don't want to @@ -864,14 +864,14 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> { return Err(TypeError::IntrinsicCast); } - let attrs = AttrFlags::query(self.table.db, def_id.into()); - if attrs.contains(AttrFlags::RUSTC_FORCE_INLINE) { + let attrs = self.table.db.attrs(def_id.into()); + if attrs.by_key(sym::rustc_force_inline).exists() { return Err(TypeError::ForceInlineCast); } - if b_hdr.safety.is_safe() && attrs.contains(AttrFlags::HAS_TARGET_FEATURE) { + if b_hdr.safety.is_safe() && attrs.by_key(sym::target_feature).exists() { let fn_target_features = - TargetFeatures::from_fn_no_implications(self.table.db, def_id); + TargetFeatures::from_attrs_no_implications(&attrs); // Allow the coercion if the current function has all the features that would be // needed to call the coercee safely. let (target_features, target_feature_is_safe) = @@ -1056,7 +1056,7 @@ impl<'db> InferenceContext<'_, 'db> { let is_force_inline = |ty: Ty<'db>| { if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(did)), _) = ty.kind() { - AttrFlags::query(self.db, did.into()).contains(AttrFlags::RUSTC_FORCE_INLINE) + self.db.attrs(did.into()).by_key(sym::rustc_force_inline).exists() } else { false } diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index a1d99a45287d..efb7244ff637 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -2365,11 +2365,9 @@ impl<'db> InferenceContext<'_, 'db> { }; let data = self.db.function_signature(func); - let Some(legacy_const_generics_indices) = data.legacy_const_generics_indices(self.db, func) - else { + let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else { return Default::default(); }; - let mut legacy_const_generics_indices = Box::<[u32]>::from(legacy_const_generics_indices); // only use legacy const generics if the param count matches with them if data.params.len() + legacy_const_generics_indices.len() != args.len() { @@ -2378,8 +2376,9 @@ impl<'db> InferenceContext<'_, 'db> { } else { // there are more parameters than there should be without legacy // const params; use them - legacy_const_generics_indices.sort_unstable(); - return legacy_const_generics_indices; + let mut indices = legacy_const_generics_indices.as_ref().clone(); + indices.sort(); + return indices; } } @@ -2392,8 +2391,9 @@ impl<'db> InferenceContext<'_, 'db> { self.infer_expr(args[arg_idx as usize], &expected, ExprIsRead::Yes); // FIXME: evaluate and unify with the const } - legacy_const_generics_indices.sort_unstable(); - legacy_const_generics_indices + let mut indices = legacy_const_generics_indices.as_ref().clone(); + indices.sort(); + indices } /// Dereferences a single level of immutable referencing. diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index b650f5c1a16a..fc0b9d30b333 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -4,7 +4,6 @@ use std::fmt; use hir_def::{ AdtId, LocalFieldId, StructId, - attrs::AttrFlags, layout::{LayoutCalculatorError, LayoutData}, }; use la_arena::{Idx, RawIdx}; @@ -175,7 +174,8 @@ pub fn layout_of_ty_query<'db>( TyKind::Adt(def, args) => { match def.inner().id { hir_def::AdtId::StructId(s) => { - let repr = AttrFlags::repr(db, s.into()).unwrap_or_default(); + let data = db.struct_signature(s); + let repr = data.repr.unwrap_or_default(); if repr.simd() { return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target); } diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs index ecebf7935d06..a8f04bf8c132 100644 --- a/crates/hir-ty/src/layout/adt.rs +++ b/crates/hir-ty/src/layout/adt.rs @@ -4,9 +4,9 @@ use std::{cmp, ops::Bound}; use hir_def::{ AdtId, VariantId, - attrs::AttrFlags, signatures::{StructFlags, VariantFields}, }; +use intern::sym; use rustc_abi::{Integer, ReprOptions, TargetDataLayout}; use rustc_index::IndexVec; use smallvec::SmallVec; @@ -44,15 +44,15 @@ pub fn layout_of_adt_query<'db>( r.push(handle_variant(s.into(), s.fields(db))?); ( r, - AttrFlags::repr(db, s.into()).unwrap_or_default(), + sig.repr.unwrap_or_default(), sig.flags.intersects(StructFlags::IS_UNSAFE_CELL | StructFlags::IS_UNSAFE_PINNED), ) } AdtId::UnionId(id) => { - let repr = AttrFlags::repr(db, id.into()); + let data = db.union_signature(id); let mut r = SmallVec::new(); r.push(handle_variant(id.into(), id.fields(db))?); - (r, repr.unwrap_or_default(), false) + (r, data.repr.unwrap_or_default(), false) } AdtId::EnumId(e) => { let variants = e.enum_variants(db); @@ -61,7 +61,7 @@ pub fn layout_of_adt_query<'db>( .iter() .map(|&(v, _, _)| handle_variant(v.into(), v.fields(db))) .collect::, _>>()?; - (r, AttrFlags::repr(db, e.into()).unwrap_or_default(), false) + (r, db.enum_signature(e).repr.unwrap_or_default(), false) } }; let variants = variants @@ -105,12 +105,27 @@ pub(crate) fn layout_of_adt_cycle_result<'db>( } fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound, Bound) { - let range = AttrFlags::rustc_layout_scalar_valid_range(db, def); - let get = |value| match value { - Some(it) => Bound::Included(it), - None => Bound::Unbounded, + let attrs = db.attrs(def.into()); + let get = |name| { + let attr = attrs.by_key(name).tt_values(); + for tree in attr { + if let Some(it) = tree.iter().next_as_view() { + let text = it.to_string().replace('_', ""); + let (text, base) = match text.as_bytes() { + [b'0', b'x', ..] => (&text[2..], 16), + [b'0', b'o', ..] => (&text[2..], 8), + [b'0', b'b', ..] => (&text[2..], 2), + _ => (&*text, 10), + }; + + if let Ok(it) = u128::from_str_radix(text, base) { + return Bound::Included(it); + } + } + } + Bound::Unbounded }; - (get(range.start), get(range.end)) + (get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end)) } /// Finds the appropriate Integer type and signedness for the given diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 1b5f4595ca3c..cec63566338f 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -8,11 +8,11 @@ use base_db::Crate; use hir_def::{ AdtId, AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleId, TraitId, TypeAliasId, - attrs::AttrFlags, nameres::{DefMap, block_def_map, crate_def_map}, signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags}, }; use hir_expand::name::Name; +use intern::sym; use rustc_ast_ir::Mutability; use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::{ @@ -230,8 +230,7 @@ impl TraitImpls { // FIXME: Reservation impls should be considered during coherence checks. If we are // (ever) to implement coherence checks, this filtering should be done by the trait // solver. - if AttrFlags::query(db, impl_id.into()).contains(AttrFlags::RUSTC_RESERVATION_IMPL) - { + if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() { continue; } let target_trait = match db.impl_trait(impl_id) { diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs index d4aab2d09496..4b1adecf8c87 100644 --- a/crates/hir-ty/src/mir/eval/shim.rs +++ b/crates/hir-ty/src/mir/eval/shim.rs @@ -3,11 +3,9 @@ //! use std::cmp::{self, Ordering}; -use hir_def::{ - CrateRootModuleId, attrs::AttrFlags, resolver::HasResolver, signatures::FunctionSignature, -}; +use hir_def::{CrateRootModuleId, resolver::HasResolver, signatures::FunctionSignature}; use hir_expand::name::Name; -use intern::sym; +use intern::{Symbol, sym}; use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _}; use stdx::never; @@ -55,7 +53,7 @@ impl<'db> Evaluator<'db> { } let function_data = self.db.function_signature(def); - let attrs = AttrFlags::query(self.db, def.into()); + let attrs = self.db.attrs(def.into()); let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def); if is_intrinsic { @@ -67,7 +65,7 @@ impl<'db> Evaluator<'db> { locals, span, !function_data.has_body() - || attrs.contains(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN), + || attrs.by_key(sym::rustc_intrinsic_must_be_overridden).exists(), ); } let is_extern_c = match def.lookup(self.db).container { @@ -87,13 +85,18 @@ impl<'db> Evaluator<'db> { .map(|()| true); } - if attrs.intersects( - AttrFlags::RUSTC_ALLOCATOR - | AttrFlags::RUSTC_DEALLOCATOR - | AttrFlags::RUSTC_REALLOCATOR - | AttrFlags::RUSTC_ALLOCATOR_ZEROED, - ) { - self.exec_alloc_fn(attrs, args, destination)?; + let alloc_fn = + attrs.iter().filter_map(|it| it.path().as_ident()).map(|it| it.symbol()).find(|it| { + [ + &sym::rustc_allocator, + &sym::rustc_deallocator, + &sym::rustc_reallocator, + &sym::rustc_allocator_zeroed, + ] + .contains(it) + }); + if let Some(alloc_fn) = alloc_fn { + self.exec_alloc_fn(alloc_fn, args, destination)?; return Ok(true); } if let Some(it) = self.detect_lang_function(def) { @@ -242,14 +245,12 @@ impl<'db> Evaluator<'db> { fn exec_alloc_fn( &mut self, - alloc_fn: AttrFlags, + alloc_fn: &Symbol, args: &[IntervalAndTy<'db>], destination: Interval, ) -> Result<'db, ()> { match alloc_fn { - _ if alloc_fn - .intersects(AttrFlags::RUSTC_ALLOCATOR_ZEROED | AttrFlags::RUSTC_ALLOCATOR) => - { + _ if *alloc_fn == sym::rustc_allocator_zeroed || *alloc_fn == sym::rustc_allocator => { let [size, align] = args else { return Err(MirEvalError::InternalError( "rustc_allocator args are not provided".into(), @@ -260,8 +261,8 @@ impl<'db> Evaluator<'db> { let result = self.heap_allocate(size, align)?; destination.write_from_bytes(self, &result.to_bytes())?; } - _ if alloc_fn.contains(AttrFlags::RUSTC_DEALLOCATOR) => { /* no-op for now */ } - _ if alloc_fn.contains(AttrFlags::RUSTC_REALLOCATOR) => { + _ if *alloc_fn == sym::rustc_deallocator => { /* no-op for now */ } + _ if *alloc_fn == sym::rustc_reallocator => { let [ptr, old_size, align, new_size] = args else { return Err(MirEvalError::InternalError( "rustc_allocator args are not provided".into(), @@ -287,14 +288,14 @@ impl<'db> Evaluator<'db> { fn detect_lang_function(&self, def: FunctionId) -> Option { use LangItem::*; - let attrs = AttrFlags::query(self.db, def.into()); + let attrs = self.db.attrs(def.into()); - if attrs.contains(AttrFlags::RUSTC_CONST_PANIC_STR) { + if attrs.by_key(sym::rustc_const_panic_str).exists() { // `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE. return Some(LangItem::BeginPanic); } - let candidate = attrs.lang_item_with_attrs(self.db, def.into())?; + let candidate = attrs.lang_item()?; // We want to execute these functions with special logic // `PanicFmt` is not detected here as it's redirected later. if [BeginPanic, SliceLen, DropInPlace].contains(&candidate) { diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs index 7f7d596be9fb..ce8b76837a3c 100644 --- a/crates/hir-ty/src/next_solver/interner.rs +++ b/crates/hir-ty/src/next_solver/interner.rs @@ -9,7 +9,6 @@ use base_db::Crate; use hir_def::{ AdtId, AttrDefId, BlockId, CallableDefId, EnumVariantId, ItemContainerId, StructId, UnionId, VariantId, - attrs::AttrFlags, lang_item::LangItem, signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags}, }; @@ -468,28 +467,28 @@ impl AdtDef { let variants = vec![(VariantIdx(0), VariantDef::Struct(struct_id))]; - let data_repr = data.repr(db, struct_id); + let mut repr = ReprOptions::default(); + repr.align = data.repr.and_then(|r| r.align); + repr.pack = data.repr.and_then(|r| r.pack); + repr.int = data.repr.and_then(|r| r.int); + let mut repr_flags = ReprFlags::empty(); if flags.is_box { repr_flags.insert(ReprFlags::IS_LINEAR); } - if data_repr.is_some_and(|r| r.c()) { + if data.repr.is_some_and(|r| r.c()) { repr_flags.insert(ReprFlags::IS_C); } - if data_repr.is_some_and(|r| r.simd()) { + if data.repr.is_some_and(|r| r.simd()) { repr_flags.insert(ReprFlags::IS_SIMD); } - let repr = ReprOptions { - align: data_repr.and_then(|r| r.align), - pack: data_repr.and_then(|r| r.pack), - int: data_repr.and_then(|r| r.int), - flags: repr_flags, - ..ReprOptions::default() - }; + repr.flags = repr_flags; (flags, variants, repr) } AdtId::UnionId(union_id) => { + let data = db.union_signature(union_id); + let flags = AdtFlags { is_enum: false, is_union: true, @@ -502,24 +501,22 @@ impl AdtDef { let variants = vec![(VariantIdx(0), VariantDef::Union(union_id))]; - let data_repr = AttrFlags::repr(db, union_id.into()); + let mut repr = ReprOptions::default(); + repr.align = data.repr.and_then(|r| r.align); + repr.pack = data.repr.and_then(|r| r.pack); + repr.int = data.repr.and_then(|r| r.int); + let mut repr_flags = ReprFlags::empty(); if flags.is_box { repr_flags.insert(ReprFlags::IS_LINEAR); } - if data_repr.is_some_and(|r| r.c()) { + if data.repr.is_some_and(|r| r.c()) { repr_flags.insert(ReprFlags::IS_C); } - if data_repr.is_some_and(|r| r.simd()) { + if data.repr.is_some_and(|r| r.simd()) { repr_flags.insert(ReprFlags::IS_SIMD); } - let repr = ReprOptions { - align: data_repr.and_then(|r| r.align), - pack: data_repr.and_then(|r| r.pack), - int: data_repr.and_then(|r| r.int), - flags: repr_flags, - ..ReprOptions::default() - }; + repr.flags = repr_flags; (flags, variants, repr) } @@ -543,26 +540,24 @@ impl AdtDef { .map(|(idx, v)| (idx, VariantDef::Enum(v.0))) .collect(); - let data_repr = AttrFlags::repr(db, enum_id.into()); + let data = db.enum_signature(enum_id); + + let mut repr = ReprOptions::default(); + repr.align = data.repr.and_then(|r| r.align); + repr.pack = data.repr.and_then(|r| r.pack); + repr.int = data.repr.and_then(|r| r.int); let mut repr_flags = ReprFlags::empty(); if flags.is_box { repr_flags.insert(ReprFlags::IS_LINEAR); } - if data_repr.is_some_and(|r| r.c()) { + if data.repr.is_some_and(|r| r.c()) { repr_flags.insert(ReprFlags::IS_C); } - if data_repr.is_some_and(|r| r.simd()) { + if data.repr.is_some_and(|r| r.simd()) { repr_flags.insert(ReprFlags::IS_SIMD); } - - let repr = ReprOptions { - align: data_repr.and_then(|r| r.align), - pack: data_repr.and_then(|r| r.pack), - int: data_repr.and_then(|r| r.int), - flags: repr_flags, - ..ReprOptions::default() - }; + repr.flags = repr_flags; (flags, variants, repr) } diff --git a/crates/hir-ty/src/target_feature.rs b/crates/hir-ty/src/target_feature.rs index 2bd675ba124e..0a8ed2cf0cab 100644 --- a/crates/hir-ty/src/target_feature.rs +++ b/crates/hir-ty/src/target_feature.rs @@ -1,35 +1,31 @@ //! Stuff for handling `#[target_feature]` (needed for unsafe check). -use std::borrow::Cow; use std::sync::LazyLock; -use hir_def::FunctionId; -use hir_def::attrs::AttrFlags; -use intern::Symbol; +use hir_def::attr::Attrs; +use hir_def::tt; +use intern::{Symbol, sym}; use rustc_hash::{FxHashMap, FxHashSet}; -use crate::db::HirDatabase; - #[derive(Debug, Default, Clone)] -pub struct TargetFeatures<'db> { - pub(crate) enabled: Cow<'db, FxHashSet>, +pub struct TargetFeatures { + pub(crate) enabled: FxHashSet, } -impl<'db> TargetFeatures<'db> { - pub fn from_fn(db: &'db dyn HirDatabase, owner: FunctionId) -> Self { - let mut result = TargetFeatures::from_fn_no_implications(db, owner); +impl TargetFeatures { + pub fn from_attrs(attrs: &Attrs) -> Self { + let mut result = TargetFeatures::from_attrs_no_implications(attrs); result.expand_implications(); result } fn expand_implications(&mut self) { let all_implications = LazyLock::force(&TARGET_FEATURE_IMPLICATIONS); - let enabled = self.enabled.to_mut(); - let mut queue = enabled.iter().cloned().collect::>(); + let mut queue = self.enabled.iter().cloned().collect::>(); while let Some(feature) = queue.pop() { if let Some(implications) = all_implications.get(&feature) { for implication in implications { - if enabled.insert(implication.clone()) { + if self.enabled.insert(implication.clone()) { queue.push(implication.clone()); } } @@ -38,9 +34,25 @@ impl<'db> TargetFeatures<'db> { } /// Retrieves the target features from the attributes, and does not expand the target features implied by them. - pub(crate) fn from_fn_no_implications(db: &'db dyn HirDatabase, owner: FunctionId) -> Self { - let enabled = AttrFlags::target_features(db, owner); - Self { enabled: Cow::Borrowed(enabled) } + pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self { + let enabled = attrs + .by_key(sym::target_feature) + .tt_values() + .filter_map(|tt| match tt.token_trees().flat_tokens() { + [ + tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)), + tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })), + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + kind: tt::LitKind::Str, + symbol: features, + .. + })), + ] if enable_ident.sym == sym::enable => Some(features), + _ => None, + }) + .flat_map(|features| features.as_str().split(',').map(Symbol::intern)) + .collect(); + Self { enabled } } } diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs index 50625c1c26d5..bc4701970c76 100644 --- a/crates/hir-ty/src/tests/incremental.rs +++ b/crates/hir-ty/src/tests/incremental.rs @@ -31,6 +31,7 @@ fn foo() -> i32 { &[("infer_shim", 1)], expect_test::expect![[r#" [ + "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -39,7 +40,7 @@ fn foo() -> i32 { "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", - "AttrFlags::query_", + "attrs_shim", "body_shim", "body_with_source_map_shim", "trait_environment_shim", @@ -78,7 +79,7 @@ fn foo() -> i32 { "ast_id_map_shim", "file_item_tree_query", "real_span_map_shim", - "AttrFlags::query_", + "attrs_shim", "function_signature_with_source_map_shim", "function_signature_shim", "body_with_source_map_shim", @@ -117,6 +118,7 @@ fn baz() -> i32 { &[("infer_shim", 3)], expect_test::expect![[r#" [ + "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -125,7 +127,7 @@ fn baz() -> i32 { "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", - "AttrFlags::query_", + "attrs_shim", "body_shim", "body_with_source_map_shim", "trait_environment_shim", @@ -133,8 +135,8 @@ fn baz() -> i32 { "expr_scopes_shim", "lang_item", "crate_lang_items", - "AttrFlags::query_", - "AttrFlags::query_", + "attrs_shim", + "attrs_shim", "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", @@ -187,13 +189,13 @@ fn baz() -> i32 { "ast_id_map_shim", "file_item_tree_query", "real_span_map_shim", - "AttrFlags::query_", + "attrs_shim", "function_signature_with_source_map_shim", "function_signature_shim", "body_with_source_map_shim", "body_shim", - "AttrFlags::query_", - "AttrFlags::query_", + "attrs_shim", + "attrs_shim", "function_signature_with_source_map_shim", "function_signature_shim", "body_with_source_map_shim", @@ -233,6 +235,7 @@ $0", &[("trait_impls_in_crate_shim", 1)], expect_test::expect![[r#" [ + "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -304,6 +307,7 @@ $0", &[("trait_impls_in_crate_shim", 1)], expect_test::expect![[r#" [ + "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -376,6 +380,7 @@ $0", &[("trait_impls_in_crate_shim", 1)], expect_test::expect![[r#" [ + "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -449,6 +454,7 @@ $0", &[("trait_impls_in_crate_shim", 1)], expect_test::expect![[r#" [ + "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -497,14 +503,14 @@ impl SomeStruct { "real_span_map_shim", "crate_local_def_map", "trait_impls_in_crate_shim", - "AttrFlags::query_", + "attrs_shim", "impl_trait_with_diagnostics_shim", "impl_signature_shim", "impl_signature_with_source_map_shim", "impl_self_ty_with_diagnostics_shim", "struct_signature_shim", "struct_signature_with_source_map_shim", - "AttrFlags::query_", + "attrs_shim", ] "#]], ); @@ -554,6 +560,7 @@ fn main() { &[("trait_solve_shim", 0)], expect_test::expect![[r#" [ + "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -562,22 +569,22 @@ fn main() { "TraitItems::query_with_diagnostics_", "body_shim", "body_with_source_map_shim", - "AttrFlags::query_", + "attrs_shim", "ImplItems::of_", "infer_shim", "trait_signature_shim", "trait_signature_with_source_map_shim", - "AttrFlags::query_", + "attrs_shim", "function_signature_shim", "function_signature_with_source_map_shim", - "AttrFlags::query_", + "attrs_shim", "body_shim", "body_with_source_map_shim", "trait_environment_shim", "lang_item", "crate_lang_items", - "AttrFlags::query_", - "AttrFlags::query_", + "attrs_shim", + "attrs_shim", "generic_predicates_shim", "return_type_impl_traits_shim", "infer_shim", @@ -659,22 +666,22 @@ fn main() { "crate_local_def_map", "TraitItems::query_with_diagnostics_", "body_with_source_map_shim", - "AttrFlags::query_", + "attrs_shim", "body_shim", "ImplItems::of_", "infer_shim", - "AttrFlags::query_", + "attrs_shim", "trait_signature_with_source_map_shim", - "AttrFlags::query_", + "attrs_shim", "function_signature_with_source_map_shim", "function_signature_shim", "body_with_source_map_shim", "body_shim", "trait_environment_shim", "crate_lang_items", - "AttrFlags::query_", - "AttrFlags::query_", - "AttrFlags::query_", + "attrs_shim", + "attrs_shim", + "attrs_shim", "generic_predicates_shim", "return_type_impl_traits_shim", "infer_shim", diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 41dc4dc53375..ca5e33fe6ad0 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -9,7 +9,6 @@ use base_db::{ }; use hir_def::{ EnumId, EnumVariantId, FunctionId, Lookup, TraitId, - attrs::AttrFlags, db::DefDatabase, hir::generics::WherePredicate, lang_item::LangItem, @@ -120,7 +119,7 @@ pub fn target_feature_is_safe_in_target(target: &TargetData) -> TargetFeatureIsS pub fn is_fn_unsafe_to_call( db: &dyn HirDatabase, func: FunctionId, - caller_target_features: &TargetFeatures<'_>, + caller_target_features: &TargetFeatures, call_edition: Edition, target_feature_is_safe: TargetFeatureIsSafeInTarget, ) -> Unsafety { @@ -131,7 +130,8 @@ pub fn is_fn_unsafe_to_call( if data.has_target_feature() && target_feature_is_safe == TargetFeatureIsSafeInTarget::No { // RFC 2396 . - let callee_target_features = TargetFeatures::from_fn_no_implications(db, func); + let callee_target_features = + TargetFeatures::from_attrs_no_implications(&db.attrs(func.into())); if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) { return Unsafety::Unsafe; } @@ -152,7 +152,7 @@ pub fn is_fn_unsafe_to_call( if is_intrinsic_block { // legacy intrinsics // extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute - if AttrFlags::query(db, func.into()).contains(AttrFlags::RUSTC_SAFE_INTRINSIC) { + if db.attrs(func.into()).by_key(sym::rustc_safe_intrinsic).exists() { Unsafety::Safe } else { Unsafety::Unsafe diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 3376c51fe5c9..147f1b8653be 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -2,12 +2,9 @@ use std::ops::ControlFlow; -use cfg::CfgExpr; -use either::Either; use hir_def::{ - AssocItemId, AttrDefId, FieldId, InternedModuleId, LifetimeParamId, ModuleDefId, - TypeOrConstParamId, - attrs::{AttrFlags, Docs, IsInnerDoc}, + AssocItemId, AttrDefId, ModuleDefId, + attr::AttrsWithOwner, expr_store::path::Path, item_scope::ItemInNs, per_ns::Namespace, @@ -18,7 +15,6 @@ use hir_expand::{ name::Name, }; use hir_ty::{db::HirDatabase, method_resolution}; -use intern::Symbol; use crate::{ Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl, @@ -26,161 +22,28 @@ use crate::{ Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, }; -#[derive(Debug, Clone, Copy)] -pub enum AttrsOwner { - AttrDef(AttrDefId), - Field(FieldId), - LifetimeParam(LifetimeParamId), - TypeOrConstParam(TypeOrConstParamId), -} - -impl AttrsOwner { - #[inline] - fn attr_def(&self) -> Option { - match self { - AttrsOwner::AttrDef(it) => Some(*it), - _ => None, - } - } -} - -#[derive(Debug, Clone)] -pub struct AttrsWithOwner { - pub(crate) attrs: AttrFlags, - owner: AttrsOwner, -} - -impl AttrsWithOwner { - fn new(db: &dyn HirDatabase, owner: AttrDefId) -> Self { - Self { attrs: AttrFlags::query(db, owner), owner: AttrsOwner::AttrDef(owner) } - } - - fn new_field(db: &dyn HirDatabase, owner: FieldId) -> Self { - Self { attrs: AttrFlags::query_field(db, owner), owner: AttrsOwner::Field(owner) } - } - - fn new_lifetime_param(db: &dyn HirDatabase, owner: LifetimeParamId) -> Self { - Self { - attrs: AttrFlags::query_lifetime_param(db, owner), - owner: AttrsOwner::LifetimeParam(owner), - } - } - fn new_type_or_const_param(db: &dyn HirDatabase, owner: TypeOrConstParamId) -> Self { - Self { - attrs: AttrFlags::query_type_or_const_param(db, owner), - owner: AttrsOwner::TypeOrConstParam(owner), - } - } - - #[inline] - pub fn is_unstable(&self) -> bool { - self.attrs.contains(AttrFlags::IS_UNSTABLE) - } - - #[inline] - pub fn is_macro_export(&self) -> bool { - self.attrs.contains(AttrFlags::IS_MACRO_EXPORT) - } - - #[inline] - pub fn is_doc_notable_trait(&self) -> bool { - self.attrs.contains(AttrFlags::IS_DOC_NOTABLE_TRAIT) - } - - #[inline] - pub fn is_doc_hidden(&self) -> bool { - self.attrs.contains(AttrFlags::IS_DOC_HIDDEN) - } - - #[inline] - pub fn is_deprecated(&self) -> bool { - self.attrs.contains(AttrFlags::IS_DEPRECATED) - } - - #[inline] - pub fn is_non_exhaustive(&self) -> bool { - self.attrs.contains(AttrFlags::NON_EXHAUSTIVE) - } - - #[inline] - pub fn is_test(&self) -> bool { - self.attrs.contains(AttrFlags::IS_TEST) - } - - #[inline] - pub fn lang(&self, db: &dyn HirDatabase) -> Option<&'static str> { - self.owner - .attr_def() - .and_then(|owner| self.attrs.lang_item_with_attrs(db, owner)) - .map(|lang| lang.name()) - } - - #[inline] - pub fn doc_aliases<'db>(&self, db: &'db dyn HirDatabase) -> &'db [Symbol] { - let owner = match self.owner { - AttrsOwner::AttrDef(it) => Either::Left(it), - AttrsOwner::Field(it) => Either::Right(it), - AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return &[], - }; - self.attrs.doc_aliases(db, owner) - } - - #[inline] - pub fn cfgs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db CfgExpr> { - let owner = match self.owner { - AttrsOwner::AttrDef(it) => Either::Left(it), - AttrsOwner::Field(it) => Either::Right(it), - AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None, - }; - self.attrs.cfgs(db, owner) - } - - #[inline] - pub fn hir_docs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db Docs> { - match self.owner { - AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(), - AttrsOwner::Field(it) => AttrFlags::field_docs(db, it), - AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None, - } - } -} - -pub trait HasAttrs: Sized { - #[inline] - fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { - match self.attr_id(db) { - AttrsOwner::AttrDef(it) => AttrsWithOwner::new(db, it), - AttrsOwner::Field(it) => AttrsWithOwner::new_field(db, it), - AttrsOwner::LifetimeParam(it) => AttrsWithOwner::new_lifetime_param(db, it), - AttrsOwner::TypeOrConstParam(it) => AttrsWithOwner::new_type_or_const_param(db, it), - } - } - +pub trait HasAttrs { + fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner; #[doc(hidden)] - fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner; - - #[inline] - fn hir_docs(self, db: &dyn HirDatabase) -> Option<&Docs> { - match self.attr_id(db) { - AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(), - AttrsOwner::Field(it) => AttrFlags::field_docs(db, it), - AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None, - } - } + fn attr_id(self) -> AttrDefId; } macro_rules! impl_has_attrs { ($(($def:ident, $def_id:ident),)*) => {$( impl HasAttrs for $def { - #[inline] - fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner { - AttrsOwner::AttrDef(AttrDefId::$def_id(self.into())) + fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { + let def = AttrDefId::$def_id(self.into()); + AttrsWithOwner::new(db, def) + } + fn attr_id(self) -> AttrDefId { + AttrDefId::$def_id(self.into()) } } )*}; } impl_has_attrs![ + (Field, FieldId), (Variant, EnumVariantId), (Static, StaticId), (Const, ConstId), @@ -189,6 +52,8 @@ impl_has_attrs![ (Macro, MacroId), (Function, FunctionId), (Adt, AdtId), + (Module, ModuleId), + (GenericParam, GenericParamId), (Impl, ImplId), (ExternCrateDecl, ExternCrateId), ]; @@ -196,9 +61,11 @@ impl_has_attrs![ macro_rules! impl_has_attrs_enum { ($($variant:ident),* for $enum:ident) => {$( impl HasAttrs for $variant { - #[inline] - fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner { - $enum::$variant(self).attr_id(db) + fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { + $enum::$variant(self).attrs(db) + } + fn attr_id(self) -> AttrDefId { + $enum::$variant(self).attr_id() } } )*}; @@ -207,46 +74,30 @@ macro_rules! impl_has_attrs_enum { impl_has_attrs_enum![Struct, Union, Enum for Adt]; impl_has_attrs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam]; -impl HasAttrs for Module { - #[inline] - fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner { - AttrsOwner::AttrDef(AttrDefId::ModuleId(InternedModuleId::new(db, self.id))) - } -} - -impl HasAttrs for GenericParam { - #[inline] - fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner { +impl HasAttrs for AssocItem { + fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { match self { - GenericParam::TypeParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()), - GenericParam::ConstParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()), - GenericParam::LifetimeParam(it) => AttrsOwner::LifetimeParam(it.into()), + AssocItem::Function(it) => it.attrs(db), + AssocItem::Const(it) => it.attrs(db), + AssocItem::TypeAlias(it) => it.attrs(db), } } -} - -impl HasAttrs for AssocItem { - #[inline] - fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner { + fn attr_id(self) -> AttrDefId { match self { - AssocItem::Function(it) => it.attr_id(db), - AssocItem::Const(it) => it.attr_id(db), - AssocItem::TypeAlias(it) => it.attr_id(db), + AssocItem::Function(it) => it.attr_id(), + AssocItem::Const(it) => it.attr_id(), + AssocItem::TypeAlias(it) => it.attr_id(), } } } impl HasAttrs for crate::Crate { - #[inline] - fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner { - self.root_module().attr_id(db) + fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { + let def = AttrDefId::ModuleId(self.root_module().id); + AttrsWithOwner::new(db, def) } -} - -impl HasAttrs for Field { - #[inline] - fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner { - AttrsOwner::Field(self.into()) + fn attr_id(self) -> AttrDefId { + AttrDefId::ModuleId(self.root_module().id) } } @@ -256,22 +107,21 @@ pub fn resolve_doc_path_on( def: impl HasAttrs + Copy, link: &str, ns: Option, - is_inner_doc: IsInnerDoc, + is_inner_doc: bool, ) -> Option { - resolve_doc_path_on_(db, link, def.attr_id(db), ns, is_inner_doc) + resolve_doc_path_on_(db, link, def.attr_id(), ns, is_inner_doc) } fn resolve_doc_path_on_( db: &dyn HirDatabase, link: &str, - attr_id: AttrsOwner, + attr_id: AttrDefId, ns: Option, - is_inner_doc: IsInnerDoc, + is_inner_doc: bool, ) -> Option { let resolver = match attr_id { - AttrsOwner::AttrDef(AttrDefId::ModuleId(it)) => { - let it = it.loc(db); - if is_inner_doc.yes() { + AttrDefId::ModuleId(it) => { + if is_inner_doc { it.resolver(db) } else if let Some(parent) = Module::from(it).parent(db) { parent.id.resolver(db) @@ -279,20 +129,20 @@ fn resolve_doc_path_on_( it.resolver(db) } } - AttrsOwner::AttrDef(AttrDefId::AdtId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::FunctionId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::EnumVariantId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::StaticId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::ConstId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::TraitId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::TypeAliasId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::ImplId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::ExternBlockId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::UseId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::MacroId(it)) => it.resolver(db), - AttrsOwner::AttrDef(AttrDefId::ExternCrateId(it)) => it.resolver(db), - AttrsOwner::Field(it) => it.parent.resolver(db), - AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None, + AttrDefId::FieldId(it) => it.parent.resolver(db), + AttrDefId::AdtId(it) => it.resolver(db), + AttrDefId::FunctionId(it) => it.resolver(db), + AttrDefId::EnumVariantId(it) => it.resolver(db), + AttrDefId::StaticId(it) => it.resolver(db), + AttrDefId::ConstId(it) => it.resolver(db), + AttrDefId::TraitId(it) => it.resolver(db), + AttrDefId::TypeAliasId(it) => it.resolver(db), + AttrDefId::ImplId(it) => it.resolver(db), + AttrDefId::ExternBlockId(it) => it.resolver(db), + AttrDefId::UseId(it) => it.resolver(db), + AttrDefId::MacroId(it) => it.resolver(db), + AttrDefId::ExternCrateId(it) => it.resolver(db), + AttrDefId::GenericParamId(_) => return None, }; let mut modpath = doc_modpath_from_str(link)?; diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 6ef6ea272e58..a6d67e8fb4fb 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -153,7 +153,8 @@ pub struct UnresolvedImport { #[derive(Debug, Clone, Eq, PartialEq)] pub struct UnresolvedMacroCall { - pub range: InFile, + pub macro_call: InFile, + pub precise_location: Option, pub path: ModPath, pub is_bang: bool, } @@ -184,7 +185,8 @@ pub struct InactiveCode { #[derive(Debug, Clone, Eq, PartialEq)] pub struct MacroError { - pub range: InFile, + pub node: InFile, + pub precise_location: Option, pub message: String, pub error: bool, pub kind: &'static str, @@ -192,7 +194,8 @@ pub struct MacroError { #[derive(Debug, Clone, Eq, PartialEq)] pub struct MacroExpansionParseError { - pub range: InFile, + pub node: InFile, + pub precise_location: Option, pub errors: Arc<[SyntaxError]>, } @@ -210,12 +213,12 @@ pub struct UnimplementedBuiltinMacro { #[derive(Debug)] pub struct InvalidDeriveTarget { - pub range: InFile, + pub node: InFile, } #[derive(Debug)] pub struct MalformedDerive { - pub range: InFile, + pub node: InFile, } #[derive(Debug)] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 1b24aad103b4..941890312317 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -45,12 +45,11 @@ use arrayvec::ArrayVec; use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin}; use either::Either; use hir_def::{ - AdtId, AssocItemId, AssocItemLoc, CallableDefId, ConstId, ConstParamId, CrateRootModuleId, - DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, - GenericParamId, HasModule, ImplId, InternedModuleId, ItemContainerId, LifetimeParamId, + AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, + CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, + FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, SyntheticSyntax, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, - attrs::AttrFlags, expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap}, hir::{ BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat, @@ -64,12 +63,13 @@ use hir_def::{ }, per_ns::PerNs, resolver::{HasResolver, Resolver}, - signatures::{EnumSignature, ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields}, + signatures::{ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields}, src::HasSource as _, visibility::visibility_from_ast, }; use hir_expand::{ - AstId, MacroCallKind, RenderedExpandError, ValueResult, proc_macro::ProcMacroKind, + AstId, MacroCallKind, RenderedExpandError, ValueResult, attrs::collect_attrs, + proc_macro::ProcMacroKind, }; use hir_ty::{ TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId, all_super_traits, autoderef, @@ -98,8 +98,8 @@ use smallvec::SmallVec; use span::{AstIdNode, Edition, FileId}; use stdx::{format_to, impl_from, never}; use syntax::{ - AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr, - ast::{self, HasName, HasVisibility as _}, + AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr, + ast::{self, HasAttrs as _, HasName, HasVisibility as _}, format_smolstr, }; use triomphe::{Arc, ThinArc}; @@ -107,7 +107,7 @@ use triomphe::{Arc, ThinArc}; use crate::db::{DefDatabase, HirDatabase}; pub use crate::{ - attrs::{AttrsWithOwner, HasAttrs, resolve_doc_path_on}, + attrs::{HasAttrs, resolve_doc_path_on}, diagnostics::*, has_source::HasSource, semantics::{ @@ -130,7 +130,7 @@ pub use { hir_def::{ Complete, FindPathConfig, - attrs::{Docs, IsInnerDoc}, + attr::{AttrSourceMap, Attrs, AttrsWithOwner}, find_path::PrefixKind, import_map, lang_item::LangItem, @@ -144,6 +144,7 @@ pub use { }, hir_expand::{ EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind, + attrs::{Attr, AttrId}, change::ChangeWithProcMacros, files::{ FilePosition, FilePositionWrapper, FileRange, FileRangeWrapper, HirFilePosition, @@ -290,10 +291,11 @@ impl Crate { } /// Try to get the root URL of the documentation of a crate. - pub fn get_html_root_url(self, db: &dyn HirDatabase) -> Option { + pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option { // Look for #![doc(html_root_url = "...")] - let doc_url = AttrFlags::doc_html_root_url(db, self.id); - doc_url.as_ref().map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") + let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into())); + let doc_url = attrs.by_key(sym::doc).find_string_value_in_tt(sym::html_root_url); + doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") } pub fn cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions { @@ -638,7 +640,7 @@ impl Module { // FIXME: This is accidentally quadratic. continue; } - emit_def_diagnostic(db, acc, diag, edition, def_map.krate()); + emit_def_diagnostic(db, acc, diag, edition); } if !self.id.is_block_module() { @@ -657,9 +659,8 @@ impl Module { acc.extend(def.diagnostics(db, style_lints)) } ModuleDef::Trait(t) => { - let krate = t.krate(db); for diag in TraitItems::query_with_diagnostics(db, t.id).1.iter() { - emit_def_diagnostic(db, acc, diag, edition, krate.id); + emit_def_diagnostic(db, acc, diag, edition); } for item in t.items(db) { @@ -777,7 +778,7 @@ impl Module { let ast_id_map = db.ast_id_map(file_id); for diag in impl_def.id.impl_items_with_diagnostics(db).1.iter() { - emit_def_diagnostic(db, acc, diag, edition, loc.container.krate()); + emit_def_diagnostic(db, acc, diag, edition); } if inherent_impls.invalid_impls().contains(&impl_def.id) { @@ -808,10 +809,21 @@ impl Module { return None; } let parent = impl_def.id.into(); - let (lifetimes_attrs, type_and_consts_attrs) = - AttrFlags::query_generic_params(db, parent); - let res = lifetimes_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE)) - || type_and_consts_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE)); + let generic_params = db.generic_params(parent); + let lifetime_params = generic_params.iter_lt().map(|(local_id, _)| { + GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id }) + }); + let type_params = generic_params + .iter_type_or_consts() + .filter(|(_, it)| it.type_param().is_some()) + .map(|(local_id, _)| { + GenericParamId::TypeParamId(TypeParamId::from_unchecked( + TypeOrConstParamId { parent, local_id }, + )) + }); + let res = type_params.chain(lifetime_params).any(|p| { + db.attrs(AttrDefId::GenericParamId(p)).by_key(sym::may_dangle).exists() + }); Some(res) })() .unwrap_or(false); @@ -972,17 +984,6 @@ impl Module { ) -> Option { hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg) } - - #[inline] - pub fn doc_keyword(self, db: &dyn HirDatabase) -> Option { - AttrFlags::doc_keyword(db, InternedModuleId::new(db, self.id)) - } - - /// Whether it has `#[path = "..."]` attribute. - #[inline] - pub fn has_path(&self, db: &dyn HirDatabase) -> bool { - self.attrs(db).attrs.contains(AttrFlags::HAS_PATH) - } } fn macro_call_diagnostics<'db>( @@ -997,19 +998,31 @@ fn macro_call_diagnostics<'db>( if let Some(err) = err { let loc = db.lookup_intern_macro_call(macro_call_id); let file_id = loc.kind.file_id(); - let mut range = precise_macro_call_location(&loc.kind, db, loc.krate); + let node = + InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id())); let RenderedExpandError { message, error, kind } = err.render_to_string(db); - if Some(err.span().anchor.file_id) == file_id.file_id().map(|it| it.editioned_file_id(db)) { - range.value = err.span().range - + db.ast_id_map(file_id).get_erased(err.span().anchor.ast_id).text_range().start(); - } - acc.push(MacroError { range, message, error, kind }.into()); + let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id); + let precise_location = if editioned_file_id == file_id { + Some( + err.span().range + + db.ast_id_map(editioned_file_id.into()) + .get_erased(err.span().anchor.ast_id) + .text_range() + .start(), + ) + } else { + None + }; + acc.push(MacroError { node, precise_location, message, error, kind }.into()); } if !parse_errors.is_empty() { let loc = db.lookup_intern_macro_call(macro_call_id); - let range = precise_macro_call_location(&loc.kind, db, loc.krate); - acc.push(MacroExpansionParseError { range, errors: parse_errors.clone() }.into()) + let (node, precise_location) = precise_macro_call_location(&loc.kind, db); + acc.push( + MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() } + .into(), + ) } } @@ -1033,7 +1046,6 @@ fn emit_macro_def_diagnostics<'db>( acc, &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, edition, - m.krate(db).id, ); } } @@ -1043,9 +1055,8 @@ fn emit_def_diagnostic<'db>( acc: &mut Vec>, diag: &DefDiagnostic, edition: Edition, - krate: base_db::Crate, ) { - emit_def_diagnostic_(db, acc, &diag.kind, edition, krate) + emit_def_diagnostic_(db, acc, &diag.kind, edition) } fn emit_def_diagnostic_<'db>( @@ -1053,7 +1064,6 @@ fn emit_def_diagnostic_<'db>( acc: &mut Vec>, diag: &DefDiagnosticKind, edition: Edition, - krate: base_db::Crate, ) { match diag { DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => { @@ -1076,7 +1086,8 @@ fn emit_def_diagnostic_<'db>( let RenderedExpandError { message, error, kind } = err.render_to_string(db); acc.push( MacroError { - range: InFile::new(ast.file_id, item.text_range()), + node: InFile::new(ast.file_id, item.syntax_node_ptr()), + precise_location: None, message: format!("{}: {message}", path.display(db, edition)), error, kind, @@ -1106,10 +1117,11 @@ fn emit_def_diagnostic_<'db>( ); } DefDiagnosticKind::UnresolvedMacroCall { ast, path } => { - let location = precise_macro_call_location(ast, db, krate); + let (node, precise_location) = precise_macro_call_location(ast, db); acc.push( UnresolvedMacroCall { - range: location, + macro_call: node, + precise_location, path: path.clone(), is_bang: matches!(ast, MacroCallKind::FnLike { .. }), } @@ -1128,12 +1140,34 @@ fn emit_def_diagnostic_<'db>( ); } DefDiagnosticKind::InvalidDeriveTarget { ast, id } => { - let derive = id.find_attr_range(db, krate, *ast).3.path_range(); - acc.push(InvalidDeriveTarget { range: ast.with_value(derive) }.into()); + let node = ast.to_node(db); + let derive = node.attrs().nth(*id); + match derive { + Some(derive) => { + acc.push( + InvalidDeriveTarget { + node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))), + } + .into(), + ); + } + None => stdx::never!("derive diagnostic on item without derive attribute"), + } } DefDiagnosticKind::MalformedDerive { ast, id } => { - let derive = id.find_attr_range(db, krate, *ast).2; - acc.push(MalformedDerive { range: ast.with_value(derive) }.into()); + let node = ast.to_node(db); + let derive = node.attrs().nth(*id); + match derive { + Some(derive) => { + acc.push( + MalformedDerive { + node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))), + } + .into(), + ); + } + None => stdx::never!("derive diagnostic on item without derive attribute"), + } } DefDiagnosticKind::MacroDefError { ast, message } => { let node = ast.to_node(db); @@ -1152,28 +1186,61 @@ fn emit_def_diagnostic_<'db>( fn precise_macro_call_location( ast: &MacroCallKind, db: &dyn HirDatabase, - krate: base_db::Crate, -) -> InFile { +) -> (InFile, Option) { // FIXME: maybe we actually want slightly different ranges for the different macro diagnostics // - e.g. the full attribute for macro errors, but only the name for name resolution match ast { MacroCallKind::FnLike { ast_id, .. } => { let node = ast_id.to_node(db); - let range = node - .path() - .and_then(|it| it.segment()) - .and_then(|it| it.name_ref()) - .map(|it| it.syntax().text_range()); - let range = range.unwrap_or_else(|| node.syntax().text_range()); - ast_id.with_value(range) + ( + ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), + node.path() + .and_then(|it| it.segment()) + .and_then(|it| it.name_ref()) + .map(|it| it.syntax().text_range()), + ) } MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => { - let range = derive_attr_index.find_derive_range(db, krate, *ast_id, *derive_index); - ast_id.with_value(range) + let node = ast_id.to_node(db); + // Compute the precise location of the macro name's token in the derive + // list. + let token = (|| { + let derive_attr = collect_attrs(&node) + .nth(derive_attr_index.ast_index()) + .and_then(|x| Either::left(x.1))?; + let token_tree = derive_attr.meta()?.token_tree()?; + let chunk_by = token_tree + .syntax() + .children_with_tokens() + .filter_map(|elem| match elem { + syntax::NodeOrToken::Token(tok) => Some(tok), + _ => None, + }) + .chunk_by(|t| t.kind() == T![,]); + let (_, mut group) = chunk_by + .into_iter() + .filter(|&(comma, _)| !comma) + .nth(*derive_index as usize)?; + group.find(|t| t.kind() == T![ident]) + })(); + ( + ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), + token.as_ref().map(|tok| tok.text_range()), + ) } - MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => { - let attr_range = attr_ids.invoc_attr().find_attr_range(db, krate, *ast_id).2; - ast_id.with_value(attr_range) + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { + let node = ast_id.to_node(db); + let attr = collect_attrs(&node) + .nth(invoc_attr_index.ast_index()) + .and_then(|x| Either::left(x.1)) + .unwrap_or_else(|| { + panic!("cannot find attribute #{}", invoc_attr_index.ast_index()) + }); + + ( + ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))), + Some(attr.syntax().text_range()), + ) } } } @@ -1371,7 +1438,7 @@ impl Struct { } pub fn repr(self, db: &dyn HirDatabase) -> Option { - AttrFlags::repr(db, self.id.into()) + db.struct_signature(self.id).repr } pub fn kind(self, db: &dyn HirDatabase) -> StructKind { @@ -1387,7 +1454,7 @@ impl Struct { } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) + db.attrs(self.id.into()).is_unstable() } pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedStruct<'db> { @@ -1476,7 +1543,7 @@ impl Union { .collect() } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) + db.attrs(self.id.into()).is_unstable() } } @@ -1511,7 +1578,7 @@ impl Enum { } pub fn repr(self, db: &dyn HirDatabase) -> Option { - AttrFlags::repr(db, self.id.into()) + db.enum_signature(self.id).repr } pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> { @@ -1527,7 +1594,7 @@ impl Enum { let interner = DbInterner::new_with(db, None, None); Type::new_for_crate( self.id.lookup(db).container.krate(), - match EnumSignature::variant_body_type(db, self.id) { + match db.enum_signature(self.id).variant_body_type() { layout::IntegerType::Pointer(sign) => match sign { true => Ty::new_int(interner, rustc_type_ir::IntTy::Isize), false => Ty::new_uint(interner, rustc_type_ir::UintTy::Usize), @@ -1568,7 +1635,7 @@ impl Enum { } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) + db.attrs(self.id.into()).is_unstable() } } @@ -1669,7 +1736,7 @@ impl Variant { } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) + db.attrs(self.id.into()).is_unstable() } pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedVariant<'db> { @@ -2154,7 +2221,8 @@ fn expr_store_diagnostics<'db>( InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into() } ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall { - range: node.map(|ptr| ptr.text_range()), + macro_call: (*node).map(|ast_ptr| ast_ptr.into()), + precise_location: None, path: path.clone(), is_bang: true, } @@ -2379,33 +2447,33 @@ impl Function { /// Does this function have `#[test]` attribute? pub fn is_test(self, db: &dyn HirDatabase) -> bool { - self.attrs(db).is_test() + db.attrs(self.id.into()).is_test() } /// is this a `fn main` or a function with an `export_name` of `main`? pub fn is_main(self, db: &dyn HirDatabase) -> bool { - self.exported_main(db) + db.attrs(self.id.into()).export_name() == Some(&sym::main) || self.module(db).is_crate_root() && db.function_signature(self.id).name == sym::main } /// Is this a function with an `export_name` of `main`? pub fn exported_main(self, db: &dyn HirDatabase) -> bool { - AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_EXPORT_NAME_MAIN) + db.attrs(self.id.into()).export_name() == Some(&sym::main) } /// Does this function have the ignore attribute? pub fn is_ignore(self, db: &dyn HirDatabase) -> bool { - AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_IGNORE) + db.attrs(self.id.into()).is_ignore() } /// Does this function have `#[bench]` attribute? pub fn is_bench(self, db: &dyn HirDatabase) -> bool { - AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_BENCH) + db.attrs(self.id.into()).is_bench() } /// Is this function marked as unstable with `#[feature]` attribute? pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) + db.attrs(self.id.into()).is_unstable() } pub fn is_unsafe_to_call( @@ -2416,7 +2484,8 @@ impl Function { ) -> bool { let (target_features, target_feature_is_safe_in_target) = caller .map(|caller| { - let target_features = hir_ty::TargetFeatures::from_fn(db, caller.id); + let target_features = + hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into())); let target_feature_is_safe_in_target = match &caller.krate(db).id.workspace_data(db).target { Ok(target) => hir_ty::target_feature_is_safe_in_target(target), @@ -2447,6 +2516,14 @@ impl Function { } pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option { + let attrs = db.attrs(self.id.into()); + // FIXME: Store this in FunctionData flags? + if !(attrs.is_proc_macro() + || attrs.is_proc_macro_attribute() + || attrs.is_proc_macro_derive()) + { + return None; + } let def_map = crate_def_map(db, HasModule::krate(&self.id, db)); def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() }) } @@ -2899,7 +2976,7 @@ impl Trait { /// `#[rust_analyzer::completions(...)]` mode. pub fn complete(self, db: &dyn HirDatabase) -> Complete { - Complete::extract(true, self.attrs(db).attrs) + Complete::extract(true, &self.attrs(db)) } } @@ -3070,10 +3147,10 @@ impl Macro { let loc = id.lookup(db); let source = loc.source(db); match loc.kind { - ProcMacroKind::CustomDerive => AttrFlags::derive_info(db, self.id).map_or_else( - || as_name_opt(source.value.name()), - |info| Name::new_symbol_root(info.trait_name.clone()), - ), + ProcMacroKind::CustomDerive => db + .attrs(id.into()) + .parse_proc_macro_derive() + .map_or_else(|| as_name_opt(source.value.name()), |(it, _)| it), ProcMacroKind::Bang | ProcMacroKind::Attr => as_name_opt(source.value.name()), } } @@ -3081,7 +3158,7 @@ impl Macro { } pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool { - matches!(self.id, MacroId::MacroRulesId(_) if AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_MACRO_EXPORT)) + matches!(self.id, MacroId::MacroRulesId(_) if db.attrs(self.id.into()).by_key(sym::macro_export).exists()) } pub fn is_proc_macro(self) -> bool { @@ -3905,10 +3982,18 @@ impl DeriveHelper { } pub fn name(&self, db: &dyn HirDatabase) -> Name { - AttrFlags::derive_info(db, self.derive) - .and_then(|it| it.helpers.get(self.idx as usize)) - .map(|helper| Name::new_symbol_root(helper.clone())) - .unwrap_or_else(Name::missing) + match self.derive { + makro @ MacroId::Macro2Id(_) => db + .attrs(makro.into()) + .parse_rustc_builtin_macro() + .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()), + MacroId::MacroRulesId(_) => None, + makro @ MacroId::ProcMacroId(_) => db + .attrs(makro.into()) + .parse_proc_macro_derive() + .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()), + } + .unwrap_or_else(Name::missing) } } @@ -4129,7 +4214,7 @@ impl TypeParam { } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - self.attrs(db).is_unstable() + db.attrs(GenericParamId::from(self.id).into()).is_unstable() } } diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 8eb1c9725cd2..62ce3daab75d 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -21,6 +21,7 @@ use hir_def::{ }; use hir_expand::{ EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId, + attrs::collect_attrs, builtin::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, files::{FileRangeWrapper, HirFileRange, InRealFile}, @@ -35,7 +36,7 @@ use intern::{Interned, Symbol, sym}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{SmallVec, smallvec}; -use span::{FileId, SyntaxContext}; +use span::{Edition, FileId, SyntaxContext}; use stdx::{TupleExt, always}; use syntax::{ AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, @@ -385,14 +386,17 @@ impl<'db> SemanticsImpl<'db> { } pub fn attach_first_edition(&self, file: FileId) -> Option { - let krate = self.file_to_module_defs(file).next()?.krate(); - Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id)) + Some(EditionedFileId::new( + self.db, + file, + self.file_to_module_defs(file).next()?.krate().edition(self.db), + )) } pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile { let file_id = self .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file_id)); + .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT)); let tree = self.db.parse(file_id).tree(); self.cache(tree.syntax().clone(), file_id.into()); @@ -1193,34 +1197,33 @@ impl<'db> SemanticsImpl<'db> { .zip(Some(item)) }) .map(|(call_id, item)| { - let item_range = item.syntax().text_range(); - let loc = db.lookup_intern_macro_call(call_id); - let text_range = match loc.kind { + let attr_id = match db.lookup_intern_macro_call(call_id).kind { hir_expand::MacroCallKind::Attr { - censored_attr_ids: attr_ids, - .. - } => { - // FIXME: here, the attribute's text range is used to strip away all - // entries from the start of the attribute "list" up the invoking - // attribute. But in - // ``` - // mod foo { - // #![inner] - // } - // ``` - // we don't wanna strip away stuff in the `mod foo {` range, that is - // here if the id corresponds to an inner attribute we got strip all - // text ranges of the outer ones, and then all of the inner ones up - // to the invoking attribute so that the inbetween is ignored. - // FIXME: Should cfg_attr be handled differently? - let (attr, _, _, _) = attr_ids - .invoc_attr() - .find_attr_range_with_source(db, loc.krate, &item); - let start = attr.syntax().text_range().start(); - TextRange::new(start, item_range.end()) - } - _ => item_range, + invoc_attr_index, .. + } => invoc_attr_index.ast_index(), + _ => 0, }; + // FIXME: here, the attribute's text range is used to strip away all + // entries from the start of the attribute "list" up the invoking + // attribute. But in + // ``` + // mod foo { + // #![inner] + // } + // ``` + // we don't wanna strip away stuff in the `mod foo {` range, that is + // here if the id corresponds to an inner attribute we got strip all + // text ranges of the outer ones, and then all of the inner ones up + // to the invoking attribute so that the inbetween is ignored. + let text_range = item.syntax().text_range(); + let start = collect_attrs(&item) + .nth(attr_id) + .map(|attr| match attr.1 { + Either::Left(it) => it.syntax().text_range().start(), + Either::Right(it) => it.syntax().text_range().start(), + }) + .unwrap_or_else(|| text_range.start()); + let text_range = TextRange::new(start, text_range.end()); filter_duplicates(tokens, text_range); process_expansion_for_token(ctx, &mut stack, call_id) }) @@ -1470,14 +1473,6 @@ impl<'db> SemanticsImpl<'db> { FileRangeWrapper { file_id: file_id.file_id(self.db), range } } - pub fn diagnostics_display_range_for_range( - &self, - src: InFile, - ) -> FileRangeWrapper { - let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db); - FileRangeWrapper { file_id: file_id.file_id(self.db), range } - } - fn token_ancestors_with_macros( &self, token: SyntaxToken, diff --git a/crates/hir/src/semantics/child_by_source.rs b/crates/hir/src/semantics/child_by_source.rs index 165ac7e4a08d..5019a5987e51 100644 --- a/crates/hir/src/semantics/child_by_source.rs +++ b/crates/hir/src/semantics/child_by_source.rs @@ -5,7 +5,7 @@ //! node for a *child*, and get its hir. use either::Either; -use hir_expand::HirFileId; +use hir_expand::{HirFileId, attrs::collect_attrs}; use span::AstIdNode; use syntax::{AstPtr, ast}; @@ -94,7 +94,6 @@ impl ChildBySource for ModuleId { impl ChildBySource for ItemScope { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { - let krate = file_id.krate(db); self.declarations().for_each(|item| add_module_def(db, res, file_id, item)); self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL)); self.extern_blocks().for_each(|extern_block| { @@ -124,10 +123,12 @@ impl ChildBySource for ItemScope { |(ast_id, calls)| { let adt = ast_id.to_node(db); calls.for_each(|(attr_id, call_id, calls)| { - // FIXME: Fix cfg_attr handling. - let (attr, _, _, _) = attr_id.find_attr_range_with_source(db, krate, &adt); - res[keys::DERIVE_MACRO_CALL] - .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into())); + if let Some((_, Either::Left(attr))) = + collect_attrs(&adt).nth(attr_id.ast_index()) + { + res[keys::DERIVE_MACRO_CALL] + .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into())); + } }); }, ); diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index 9059c88ad66a..d8c624e5c689 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -392,12 +392,12 @@ impl<'a> SymbolCollector<'a> { let mut do_not_complete = Complete::Yes; if let Some(attrs) = def.attrs(self.db) { - do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs); + do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs); if let Some(trait_do_not_complete) = trait_do_not_complete { do_not_complete = Complete::for_trait_item(trait_do_not_complete, do_not_complete); } - for alias in attrs.doc_aliases(self.db) { + for alias in attrs.doc_aliases() { self.symbols.insert(FileSymbol { name: alias.clone(), def, @@ -441,9 +441,9 @@ impl<'a> SymbolCollector<'a> { let mut do_not_complete = Complete::Yes; if let Some(attrs) = def.attrs(self.db) { - do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs); + do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs); - for alias in attrs.doc_aliases(self.db) { + for alias in attrs.doc_aliases() { self.symbols.insert(FileSymbol { name: alias.clone(), def, diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs index e06c534e3c51..7843ab9e8f25 100644 --- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -1,7 +1,7 @@ use std::iter::{self, Peekable}; use either::Either; -use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics}; +use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics, sym}; use ide_db::RootDatabase; use ide_db::assists::ExprFillDefaultMode; use ide_db::syntax_helpers::suggest_name; @@ -401,7 +401,7 @@ impl ExtendedVariant { fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool { match self { ExtendedVariant::Variant { variant: var, .. } => { - var.attrs(db).is_doc_hidden() && var.module(db).krate() != krate + var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate } _ => false, } @@ -424,7 +424,7 @@ impl ExtendedEnum { fn is_non_exhaustive(&self, db: &RootDatabase, krate: Crate) -> bool { match self { ExtendedEnum::Enum { enum_: e, .. } => { - e.attrs(db).is_non_exhaustive() && e.module(db).krate() != krate + e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate } _ => false, } diff --git a/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/crates/ide-assists/src/handlers/destructure_struct_binding.rs index 46f210804da3..8b24d33bf996 100644 --- a/crates/ide-assists/src/handlers/destructure_struct_binding.rs +++ b/crates/ide-assists/src/handlers/destructure_struct_binding.rs @@ -1,4 +1,4 @@ -use hir::HasVisibility; +use hir::{HasVisibility, sym}; use ide_db::{ FxHashMap, FxHashSet, assists::AssistId, @@ -93,7 +93,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option) -> let mut buf = String::from("./"); let db = ctx.db(); match parent_module.name(db) { - Some(name) if !parent_module.is_mod_rs(db) && !parent_module.has_path(db) => { + Some(name) + if !parent_module.is_mod_rs(db) + && parent_module + .attrs(db) + .by_key(sym::path) + .string_value_unescape() + .is_none() => + { format_to!(buf, "{}/", name.as_str()) } _ => (), diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index eb7553222a68..2977f8b8c2e7 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -68,7 +68,7 @@ pub mod utils; use hir::Semantics; use ide_db::{EditionedFileId, RootDatabase}; -use syntax::TextRange; +use syntax::{Edition, TextRange}; pub(crate) use crate::assist_context::{AssistContext, Assists}; @@ -90,7 +90,7 @@ pub fn assists( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(range.file_id) - .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, range.file_id)); + .unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT)); let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range }); let mut acc = Assists::new(&ctx, resolve); handlers::all().iter().for_each(|handler| { diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index 2e220b129fe1..ade60691b57b 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -321,13 +321,11 @@ fn check_with_config( let _tracing = setup_tracing(); let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before); db.enable_proc_attr_macros(); - let sema = Semantics::new(&db); - let file_with_caret_id = - sema.attach_first_edition(file_with_caret_id.file_id(&db)).unwrap_or(file_with_caret_id); let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string(); let frange = hir::FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; + let sema = Semantics::new(&db); let ctx = AssistContext::new(sema, &config, frange); let resolve = match expected { ExpectedResult::Unresolved => AssistResolveStrategy::None, diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index 7a86339c1c9c..5a3c5a39dac7 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -93,7 +93,16 @@ pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option { } pub fn has_test_related_attribute(attrs: &hir::AttrsWithOwner) -> bool { - attrs.is_test() + attrs.iter().any(|attr| { + let path = attr.path(); + (|| { + Some( + path.segments().first()?.as_str().starts_with("test") + || path.segments().last()?.as_str().ends_with("test"), + ) + })() + .unwrap_or_default() + }) } #[derive(Clone, Copy, PartialEq)] @@ -119,7 +128,7 @@ pub fn filter_assoc_items( .copied() .filter(|assoc_item| { if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent - && assoc_item.attrs(sema.db).is_doc_hidden() + && assoc_item.attrs(sema.db).has_doc_hidden() { if let hir::AssocItem::Function(f) = assoc_item && !f.has_body(sema.db) diff --git a/crates/ide-completion/src/completions/attribute/lint.rs b/crates/ide-completion/src/completions/attribute/lint.rs index df577b8ed02e..c87c46d98127 100644 --- a/crates/ide-completion/src/completions/attribute/lint.rs +++ b/crates/ide-completion/src/completions/attribute/lint.rs @@ -56,7 +56,7 @@ pub(super) fn complete_lint( }; let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label, ctx.edition); - item.documentation(Documentation::new_owned(description.to_owned())); + item.documentation(Documentation::new(description.to_owned())); item.add_to(acc, ctx.db) } } diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs index 20d01485a45a..d1e05a4359f1 100644 --- a/crates/ide-completion/src/completions/flyimport.rs +++ b/crates/ide-completion/src/completions/flyimport.rs @@ -266,7 +266,7 @@ fn import_on_the_fly( let original_item = &import.original_item; !ctx.is_item_hidden(&import.item_to_import) && !ctx.is_item_hidden(original_item) - && ctx.check_stability(original_item.attrs(ctx.db).as_ref()) + && ctx.check_stability(original_item.attrs(ctx.db).as_deref()) }) .filter(|import| filter_excluded_flyimport(ctx, import)) .sorted_by(|a, b| { @@ -313,7 +313,7 @@ fn import_on_the_fly_pat_( let original_item = &import.original_item; !ctx.is_item_hidden(&import.item_to_import) && !ctx.is_item_hidden(original_item) - && ctx.check_stability(original_item.attrs(ctx.db).as_ref()) + && ctx.check_stability(original_item.attrs(ctx.db).as_deref()) }) .sorted_by(|a, b| { let key = |import_path| { diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index 4474d6181c20..73cbe3f0aaab 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -440,7 +440,7 @@ fn add_custom_postfix_completions( let body = snippet.postfix_snippet(receiver_text); let mut builder = postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body); - builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```"))); + builder.documentation(Documentation::new(format!("```rust\n{body}\n```"))); for import in imports.into_iter() { builder.add_import(import); } diff --git a/crates/ide-completion/src/completions/snippet.rs b/crates/ide-completion/src/completions/snippet.rs index 04450aea75bf..ead9852eff53 100644 --- a/crates/ide-completion/src/completions/snippet.rs +++ b/crates/ide-completion/src/completions/snippet.rs @@ -139,7 +139,7 @@ fn add_custom_completions( }; let body = snip.snippet(); let mut builder = snippet(ctx, cap, trigger, &body); - builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```"))); + builder.documentation(Documentation::new(format!("```rust\n{body}\n```"))); for import in imports.into_iter() { builder.add_import(import); } diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index c95b83ef8a02..fc2cc3b796ec 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -558,7 +558,7 @@ impl CompletionContext<'_> { I: hir::HasAttrs + Copy, { let attrs = item.attrs(self.db); - attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect() + attrs.doc_aliases().map(|it| it.as_str().into()).collect() } /// Check if an item is `#[doc(hidden)]`. @@ -572,7 +572,7 @@ impl CompletionContext<'_> { } /// Checks whether this item should be listed in regards to stability. Returns `true` if we should. - pub(crate) fn check_stability(&self, attrs: Option<&hir::AttrsWithOwner>) -> bool { + pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool { let Some(attrs) = attrs else { return true; }; @@ -590,15 +590,15 @@ impl CompletionContext<'_> { /// Whether the given trait is an operator trait or not. pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool { - match trait_.attrs(self.db).lang(self.db) { - Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang), + match trait_.attrs(self.db).lang() { + Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()), None => false, } } /// Whether the given trait has `#[doc(notable_trait)]` pub(crate) fn is_doc_notable_trait(&self, trait_: hir::Trait) -> bool { - trait_.attrs(self.db).is_doc_notable_trait() + trait_.attrs(self.db).has_doc_notable_trait() } /// Returns the traits in scope, with the [`Drop`] trait removed. @@ -662,7 +662,7 @@ impl CompletionContext<'_> { fn is_visible_impl( &self, vis: &hir::Visibility, - attrs: &hir::AttrsWithOwner, + attrs: &hir::Attrs, defining_crate: hir::Crate, ) -> Visible { if !self.check_stability(Some(attrs)) { @@ -684,18 +684,14 @@ impl CompletionContext<'_> { if self.is_doc_hidden(attrs, defining_crate) { Visible::No } else { Visible::Yes } } - pub(crate) fn is_doc_hidden( - &self, - attrs: &hir::AttrsWithOwner, - defining_crate: hir::Crate, - ) -> bool { + pub(crate) fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool { // `doc(hidden)` items are only completed within the defining crate. - self.krate != defining_crate && attrs.is_doc_hidden() + self.krate != defining_crate && attrs.has_doc_hidden() } pub(crate) fn doc_aliases_in_scope(&self, scope_def: ScopeDef) -> Vec { if let Some(attrs) = scope_def.attrs(self.db) { - attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect() + attrs.doc_aliases().map(|it| it.as_str().into()).collect() } else { vec![] } diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index c526c7f070bf..303c71230d60 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -57,8 +57,7 @@ pub struct CompletionItem { /// Additional info to show in the UI pop up. pub detail: Option, - // FIXME: Make this with `'db` lifetime. - pub documentation: Option>, + pub documentation: Option, /// Whether this item is marked as deprecated pub deprecated: bool, @@ -489,8 +488,7 @@ pub(crate) struct Builder { insert_text: Option, is_snippet: bool, detail: Option, - // FIXME: Make this with `'db` lifetime. - documentation: Option>, + documentation: Option, lookup: Option, kind: CompletionItemKind, text_edit: Option, @@ -646,11 +644,11 @@ impl Builder { self } #[allow(unused)] - pub(crate) fn documentation(&mut self, docs: Documentation<'_>) -> &mut Builder { + pub(crate) fn documentation(&mut self, docs: Documentation) -> &mut Builder { self.set_documentation(Some(docs)) } - pub(crate) fn set_documentation(&mut self, docs: Option>) -> &mut Builder { - self.documentation = docs.map(Documentation::into_owned); + pub(crate) fn set_documentation(&mut self, docs: Option) -> &mut Builder { + self.documentation = docs; self } pub(crate) fn set_deprecated(&mut self, deprecated: bool) -> &mut Builder { diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 77a2a3a3a9a0..094e679501fc 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -10,7 +10,7 @@ pub(crate) mod type_alias; pub(crate) mod union_literal; pub(crate) mod variant; -use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type}; +use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type, sym}; use ide_db::text_edit::TextEdit; use ide_db::{ RootDatabase, SnippetCap, SymbolKind, @@ -91,7 +91,8 @@ impl<'a> RenderContext<'a> { } fn is_deprecated(&self, def: impl HasAttrs) -> bool { - def.attrs(self.db()).is_deprecated() + let attrs = def.attrs(self.db()); + attrs.by_key(sym::deprecated).exists() } fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool { @@ -114,7 +115,7 @@ impl<'a> RenderContext<'a> { } // FIXME: remove this - fn docs(&self, def: impl HasDocs) -> Option> { + fn docs(&self, def: impl HasDocs) -> Option { def.docs(self.db()) } } @@ -320,9 +321,7 @@ pub(crate) fn render_expr( ); let edit = TextEdit::replace(source_range, snippet); item.snippet_edit(ctx.config.snippet_cap?, edit); - item.documentation(Documentation::new_owned(String::from( - "Autogenerated expression by term search", - ))); + item.documentation(Documentation::new(String::from("Autogenerated expression by term search"))); item.set_relevance(crate::CompletionRelevance { type_match: compute_type_match(ctx, &expr.ty(ctx.db)), ..Default::default() @@ -555,7 +554,7 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind { } } -fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option> { +fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option { use hir::ModuleDef::*; match resolution { ScopeDef::ModuleDef(Module(it)) => it.docs(db), diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs index 8b14f05b72b2..6c89e49f94e8 100644 --- a/crates/ide-completion/src/render/literal.rs +++ b/crates/ide-completion/src/render/literal.rs @@ -180,7 +180,7 @@ impl Variant { } } - fn docs(self, db: &dyn HirDatabase) -> Option> { + fn docs(self, db: &dyn HirDatabase) -> Option { match self { Variant::Struct(it) => it.docs(db), Variant::EnumVariant(it) => it.docs(db), diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index 60474a31b4d3..312d3bd426f9 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -108,7 +108,7 @@ fn build_completion( label: SmolStr, lookup: SmolStr, pat: String, - def: impl HasDocs, + def: impl HasDocs + Copy, adt_ty: hir::Type<'_>, // Missing in context of match statement completions is_variant_missing: bool, diff --git a/crates/ide-completion/src/render/variant.rs b/crates/ide-completion/src/render/variant.rs index cfd6340f1eea..37d0fa18c497 100644 --- a/crates/ide-completion/src/render/variant.rs +++ b/crates/ide-completion/src/render/variant.rs @@ -1,7 +1,7 @@ //! Code common to structs, unions, and enum variants. use crate::context::CompletionContext; -use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind}; +use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind, sym}; use ide_db::SnippetCap; use itertools::Itertools; use syntax::SmolStr; @@ -105,8 +105,8 @@ pub(crate) fn visible_fields( .copied() .collect::>(); let has_invisible_field = n_fields - fields.len() > 0; - let is_foreign_non_exhaustive = - item.attrs(ctx.db).is_non_exhaustive() && item.krate(ctx.db) != module.krate(); + let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(sym::non_exhaustive).exists() + && item.krate(ctx.db) != module.krate(); let fields_omitted = has_invisible_field || is_foreign_non_exhaustive; Some((fields, fields_omitted)) } diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index 36d739455030..b32a89545726 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -160,12 +160,12 @@ pub(crate) fn position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (RootDatabase, FilePosition) { let mut database = RootDatabase::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&database, ra_fixture); database.enable_proc_attr_macros(); database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - let position = FilePosition { file_id: file_id.file_id(), offset }; + let position = FilePosition { file_id: file_id.file_id(&database), offset }; (database, position) } diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index 9ce85b2bf330..c051fd863de6 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -5,10 +5,8 @@ // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). -use std::borrow::Cow; - use crate::RootDatabase; -use crate::documentation::{Documentation, HasDocs}; +use crate::documentation::{DocsRangeMap, Documentation, HasDocs}; use crate::famous_defs::FamousDefs; use arrayvec::ArrayVec; use either::Either; @@ -23,7 +21,7 @@ use hir::{ use span::Edition; use stdx::{format_to, impl_from}; use syntax::{ - SyntaxKind, SyntaxNode, SyntaxToken, + SyntaxKind, SyntaxNode, SyntaxToken, TextSize, ast::{self, AstNode}, match_ast, }; @@ -201,25 +199,21 @@ impl Definition { Some(name) } - pub fn docs<'db>( + pub fn docs( &self, - db: &'db RootDatabase, + db: &RootDatabase, famous_defs: Option<&FamousDefs<'_, '_>>, display_target: DisplayTarget, - ) -> Option> { - self.docs_with_rangemap(db, famous_defs, display_target).map(|docs| match docs { - Either::Left(Cow::Borrowed(docs)) => Documentation::new_borrowed(docs.docs()), - Either::Left(Cow::Owned(docs)) => Documentation::new_owned(docs.into_docs()), - Either::Right(docs) => docs, - }) + ) -> Option { + self.docs_with_rangemap(db, famous_defs, display_target).map(|(docs, _)| docs) } - pub fn docs_with_rangemap<'db>( + pub fn docs_with_rangemap( &self, - db: &'db RootDatabase, + db: &RootDatabase, famous_defs: Option<&FamousDefs<'_, '_>>, display_target: DisplayTarget, - ) -> Option, Documentation<'db>>> { + ) -> Option<(Documentation, Option)> { let docs = match self { Definition::Macro(it) => it.docs_with_rangemap(db), Definition::Field(it) => it.docs_with_rangemap(db), @@ -235,13 +229,15 @@ impl Definition { it.docs_with_rangemap(db).or_else(|| { // docs are missing, try to fall back to the docs of the aliased item. let adt = it.ty(db).as_adt()?; - let mut docs = adt.docs_with_rangemap(db)?.into_owned(); + let (docs, range_map) = adt.docs_with_rangemap(db)?; let header_docs = format!( "*This is the documentation for* `{}`\n\n", adt.display(db, display_target) ); - docs.prepend_str(&header_docs); - Some(Cow::Owned(docs)) + let offset = TextSize::new(header_docs.len() as u32); + let range_map = range_map.shift_docstring_line_range(offset); + let docs = header_docs + docs.as_str(); + Some((Documentation::new(docs), range_map)) }) } Definition::BuiltinType(it) => { @@ -250,7 +246,7 @@ impl Definition { let primitive_mod = format!("prim_{}", it.name().display(fd.0.db, display_target.edition)); let doc_owner = find_std_module(fd, &primitive_mod, display_target.edition)?; - doc_owner.docs_with_rangemap(db) + doc_owner.docs_with_rangemap(fd.0.db) }) } Definition::BuiltinLifetime(StaticLifetime) => None, @@ -286,7 +282,7 @@ impl Definition { ); } - return Some(Either::Right(Documentation::new_owned(docs.replace('*', "\\*")))); + return Some((Documentation::new(docs.replace('*', "\\*")), None)); } Definition::ToolModule(_) => None, Definition::DeriveHelper(_) => None, @@ -303,7 +299,7 @@ impl Definition { let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; item.docs_with_rangemap(db) }) - .map(Either::Left) + .map(|(docs, range_map)| (docs, Some(range_map))) } pub fn label(&self, db: &RootDatabase, display_target: DisplayTarget) -> String { diff --git a/crates/ide-db/src/documentation.rs b/crates/ide-db/src/documentation.rs index 4c4691cca2ca..cab19aadfd01 100644 --- a/crates/ide-db/src/documentation.rs +++ b/crates/ide-db/src/documentation.rs @@ -1,100 +1,337 @@ //! Documentation attribute related utilities. -use std::borrow::Cow; - -use hir::{HasAttrs, db::HirDatabase, resolve_doc_path_on}; +use either::Either; +use hir::{ + AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile, + db::{DefDatabase, HirDatabase}, + resolve_doc_path_on, sym, +}; +use itertools::Itertools; +use span::{TextRange, TextSize}; +use syntax::{ + AstToken, + ast::{self, IsString}, +}; /// Holds documentation #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Documentation<'db>(Cow<'db, str>); - -impl<'db> Documentation<'db> { - #[inline] - pub fn new_owned(s: String) -> Self { - Documentation(Cow::Owned(s)) - } +pub struct Documentation(String); - #[inline] - pub fn new_borrowed(s: &'db str) -> Self { - Documentation(Cow::Borrowed(s)) +impl Documentation { + pub fn new(s: String) -> Self { + Documentation(s) } - #[inline] - pub fn into_owned(self) -> Documentation<'static> { - Documentation::new_owned(self.0.into_owned()) - } - - #[inline] pub fn as_str(&self) -> &str { &self.0 } } -pub trait HasDocs: HasAttrs + Copy { - fn docs(self, db: &dyn HirDatabase) -> Option> { - let docs = match self.docs_with_rangemap(db)? { - Cow::Borrowed(docs) => Documentation::new_borrowed(docs.docs()), - Cow::Owned(docs) => Documentation::new_owned(docs.into_docs()), - }; - Some(docs) - } - fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option> { - self.hir_docs(db).map(Cow::Borrowed) +impl From for String { + fn from(Documentation(string): Documentation) -> Self { + string } +} + +pub trait HasDocs: HasAttrs { + fn docs(self, db: &dyn HirDatabase) -> Option; + fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)>; fn resolve_doc_path( self, db: &dyn HirDatabase, link: &str, ns: Option, - is_inner_doc: hir::IsInnerDoc, - ) -> Option { - resolve_doc_path_on(db, self, link, ns, is_inner_doc) + is_inner_doc: bool, + ) -> Option; +} +/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree. +#[derive(Debug)] +pub struct DocsRangeMap { + source_map: AttrSourceMap, + // (docstring-line-range, attr_index, attr-string-range) + // a mapping from the text range of a line of the [`Documentation`] to the attribute index and + // the original (untrimmed) syntax doc line + mapping: Vec<(TextRange, AttrId, TextRange)>, +} + +impl DocsRangeMap { + /// Maps a [`TextRange`] relative to the documentation string back to its AST range + pub fn map(&self, range: TextRange) -> Option<(InFile, AttrId)> { + let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?; + let (line_docs_range, idx, original_line_src_range) = self.mapping[found]; + if !line_docs_range.contains_range(range) { + return None; + } + + let relative_range = range - line_docs_range.start(); + + let InFile { file_id, value: source } = self.source_map.source_of_id(idx); + match source { + Either::Left(attr) => { + let string = get_doc_string_in_attr(attr)?; + let text_range = string.open_quote_text_range()?; + let range = TextRange::at( + text_range.end() + original_line_src_range.start() + relative_range.start(), + string.syntax().text_range().len().min(range.len()), + ); + Some((InFile { file_id, value: range }, idx)) + } + Either::Right(comment) => { + let text_range = comment.syntax().text_range(); + let range = TextRange::at( + text_range.start() + + TextSize::try_from(comment.prefix().len()).ok()? + + original_line_src_range.start() + + relative_range.start(), + text_range.len().min(range.len()), + ); + Some((InFile { file_id, value: range }, idx)) + } + } } + + pub fn shift_docstring_line_range(self, offset: TextSize) -> DocsRangeMap { + let mapping = self + .mapping + .into_iter() + .map(|(buf_offset, id, base_offset)| { + let buf_offset = buf_offset.checked_add(offset).unwrap(); + (buf_offset, id, base_offset) + }) + .collect_vec(); + DocsRangeMap { source_map: self.source_map, mapping } + } +} + +pub fn docs_with_rangemap( + db: &dyn DefDatabase, + attrs: &AttrsWithOwner, +) -> Option<(Documentation, DocsRangeMap)> { + let docs = attrs + .by_key(sym::doc) + .attrs() + .filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id))); + let indent = doc_indent(attrs); + let mut buf = String::new(); + let mut mapping = Vec::new(); + for (doc, idx) in docs { + if !doc.is_empty() { + let mut base_offset = 0; + for raw_line in doc.split('\n') { + let line = raw_line.trim_end(); + let line_len = line.len(); + let (offset, line) = match line.char_indices().nth(indent) { + Some((offset, _)) => (offset, &line[offset..]), + None => (0, line), + }; + let buf_offset = buf.len(); + buf.push_str(line); + mapping.push(( + TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?), + idx, + TextRange::at( + (base_offset + offset).try_into().ok()?, + line_len.try_into().ok()?, + ), + )); + buf.push('\n'); + base_offset += raw_line.len() + 1; + } + } else { + buf.push('\n'); + } + } + buf.pop(); + if buf.is_empty() { + None + } else { + Some((Documentation(buf), DocsRangeMap { mapping, source_map: attrs.source_map(db) })) + } +} + +pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option { + let docs = attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()); + let indent = doc_indent(attrs); + let mut buf = String::new(); + for doc in docs { + // str::lines doesn't yield anything for the empty string + if !doc.is_empty() { + // We don't trim trailing whitespace from doc comments as multiple trailing spaces + // indicates a hard line break in Markdown. + let lines = doc.lines().map(|line| { + line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..]) + }); + + buf.extend(Itertools::intersperse(lines, "\n")); + } + buf.push('\n'); + } + buf.pop(); + if buf.is_empty() { None } else { Some(buf) } } macro_rules! impl_has_docs { ($($def:ident,)*) => {$( - impl HasDocs for hir::$def {} + impl HasDocs for hir::$def { + fn docs(self, db: &dyn HirDatabase) -> Option { + docs_from_attrs(&self.attrs(db)).map(Documentation) + } + fn docs_with_rangemap( + self, + db: &dyn HirDatabase, + ) -> Option<(Documentation, DocsRangeMap)> { + docs_with_rangemap(db, &self.attrs(db)) + } + fn resolve_doc_path( + self, + db: &dyn HirDatabase, + link: &str, + ns: Option, + is_inner_doc: bool, + ) -> Option { + resolve_doc_path_on(db, self, link, ns, is_inner_doc) + } + } )*}; } impl_has_docs![ Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate, - AssocItem, Struct, Union, Enum, ]; +macro_rules! impl_has_docs_enum { + ($($variant:ident),* for $enum:ident) => {$( + impl HasDocs for hir::$variant { + fn docs(self, db: &dyn HirDatabase) -> Option { + hir::$enum::$variant(self).docs(db) + } + + fn docs_with_rangemap( + self, + db: &dyn HirDatabase, + ) -> Option<(Documentation, DocsRangeMap)> { + hir::$enum::$variant(self).docs_with_rangemap(db) + } + fn resolve_doc_path( + self, + db: &dyn HirDatabase, + link: &str, + ns: Option, + is_inner_doc: bool, + ) -> Option { + hir::$enum::$variant(self).resolve_doc_path(db, link, ns, is_inner_doc) + } + } + )*}; +} + +impl_has_docs_enum![Struct, Union, Enum for Adt]; + +impl HasDocs for hir::AssocItem { + fn docs(self, db: &dyn HirDatabase) -> Option { + match self { + hir::AssocItem::Function(it) => it.docs(db), + hir::AssocItem::Const(it) => it.docs(db), + hir::AssocItem::TypeAlias(it) => it.docs(db), + } + } + + fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> { + match self { + hir::AssocItem::Function(it) => it.docs_with_rangemap(db), + hir::AssocItem::Const(it) => it.docs_with_rangemap(db), + hir::AssocItem::TypeAlias(it) => it.docs_with_rangemap(db), + } + } + + fn resolve_doc_path( + self, + db: &dyn HirDatabase, + link: &str, + ns: Option, + is_inner_doc: bool, + ) -> Option { + match self { + hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), + hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), + hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), + } + } +} + impl HasDocs for hir::ExternCrateDecl { - fn docs(self, db: &dyn HirDatabase) -> Option> { - let crate_docs = self.resolved_crate(db)?.hir_docs(db); - let decl_docs = self.hir_docs(db); + fn docs(self, db: &dyn HirDatabase) -> Option { + let crate_docs = docs_from_attrs(&self.resolved_crate(db)?.root_module().attrs(db)); + let decl_docs = docs_from_attrs(&self.attrs(db)); match (decl_docs, crate_docs) { (None, None) => None, - (Some(docs), None) | (None, Some(docs)) => { - Some(Documentation::new_borrowed(docs.docs())) - } - (Some(decl_docs), Some(crate_docs)) => { - let mut docs = String::with_capacity( - decl_docs.docs().len() + "\n\n".len() + crate_docs.docs().len(), - ); - docs.push_str(decl_docs.docs()); - docs.push_str("\n\n"); - docs.push_str(crate_docs.docs()); - Some(Documentation::new_owned(docs)) + (Some(decl_docs), None) => Some(decl_docs), + (None, Some(crate_docs)) => Some(crate_docs), + (Some(mut decl_docs), Some(crate_docs)) => { + decl_docs.push('\n'); + decl_docs.push('\n'); + decl_docs += &crate_docs; + Some(decl_docs) } } + .map(Documentation::new) } - fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option> { - let crate_docs = self.resolved_crate(db)?.hir_docs(db); - let decl_docs = self.hir_docs(db); + fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> { + let crate_docs = docs_with_rangemap(db, &self.resolved_crate(db)?.root_module().attrs(db)); + let decl_docs = docs_with_rangemap(db, &self.attrs(db)); match (decl_docs, crate_docs) { (None, None) => None, - (Some(docs), None) | (None, Some(docs)) => Some(Cow::Borrowed(docs)), - (Some(decl_docs), Some(crate_docs)) => { - let mut docs = decl_docs.clone(); - docs.append_str("\n\n"); - docs.append(crate_docs); - Some(Cow::Owned(docs)) + (Some(decl_docs), None) => Some(decl_docs), + (None, Some(crate_docs)) => Some(crate_docs), + ( + Some((Documentation(mut decl_docs), mut decl_range_map)), + Some((Documentation(crate_docs), crate_range_map)), + ) => { + decl_docs.push('\n'); + decl_docs.push('\n'); + let offset = TextSize::new(decl_docs.len() as u32); + decl_docs += &crate_docs; + let crate_range_map = crate_range_map.shift_docstring_line_range(offset); + decl_range_map.mapping.extend(crate_range_map.mapping); + Some((Documentation(decl_docs), decl_range_map)) } } } + fn resolve_doc_path( + self, + db: &dyn HirDatabase, + link: &str, + ns: Option, + is_inner_doc: bool, + ) -> Option { + resolve_doc_path_on(db, self, link, ns, is_inner_doc) + } +} + +fn get_doc_string_in_attr(it: &ast::Attr) -> Option { + match it.expr() { + // #[doc = lit] + Some(ast::Expr::Literal(lit)) => match lit.kind() { + ast::LiteralKind::String(it) => Some(it), + _ => None, + }, + // #[cfg_attr(..., doc = "", ...)] + None => { + // FIXME: See highlight injection for what to do here + None + } + _ => None, + } +} + +fn doc_indent(attrs: &hir::Attrs) -> usize { + let mut min = !0; + for val in attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) { + if let Some(m) = + val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min() + { + min = min.min(m); + } + } + min } diff --git a/crates/ide-db/src/ra_fixture.rs b/crates/ide-db/src/ra_fixture.rs index cd86e7765196..1f056a835bc6 100644 --- a/crates/ide-db/src/ra_fixture.rs +++ b/crates/ide-db/src/ra_fixture.rs @@ -25,14 +25,18 @@ impl RootDatabase { // We don't want a mistake in the fixture to crash r-a, so we wrap this in `catch_unwind()`. std::panic::catch_unwind(|| { let mut db = RootDatabase::default(); - let fixture = - test_fixture::ChangeFixture::parse_with_proc_macros(text, minicore.0, Vec::new()); + let fixture = test_fixture::ChangeFixture::parse_with_proc_macros( + &db, + text, + minicore.0, + Vec::new(), + ); db.apply_change(fixture.change); let files = fixture .files .into_iter() .zip(fixture.file_lines) - .map(|(file_id, range)| (file_id.file_id(), range)) + .map(|(file_id, range)| (file_id.file_id(&db), range)) .collect(); (db, files, fixture.sysroot_files) }) @@ -521,7 +525,7 @@ impl_empty_upmap_from_ra_fixture!( &str, String, SmolStr, - Documentation<'_>, + Documentation, SymbolKind, CfgExpr, ReferenceCategory, diff --git a/crates/ide-db/src/rust_doc.rs b/crates/ide-db/src/rust_doc.rs index 36a6938af6b8..eacd9b9b4d2f 100644 --- a/crates/ide-db/src/rust_doc.rs +++ b/crates/ide-db/src/rust_doc.rs @@ -33,7 +33,7 @@ pub fn is_rust_fence(s: &str) -> bool { const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; -pub fn format_docs(src: &Documentation<'_>) -> String { +pub fn format_docs(src: &Documentation) -> String { format_docs_(src.as_str()) } diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 8b53cea7e6d3..f1d076e874d5 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -12,7 +12,7 @@ use either::Either; use hir::{ Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs, HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer, - ModuleSource, PathResolution, Semantics, Visibility, + ModuleSource, PathResolution, Semantics, Visibility, sym, }; use memchr::memmem::Finder; use parser::SyntaxKind; @@ -169,7 +169,7 @@ impl SearchScope { entries.extend( source_root .iter() - .map(|id| (EditionedFileId::new(db, id, crate_data.edition, krate), None)), + .map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)), ); } SearchScope { entries } @@ -183,9 +183,11 @@ impl SearchScope { let source_root = db.file_source_root(root_file).source_root_id(db); let source_root = db.source_root(source_root).source_root(db); - entries.extend(source_root.iter().map(|id| { - (EditionedFileId::new(db, id, rev_dep.edition(db), rev_dep.into()), None) - })); + entries.extend( + source_root + .iter() + .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)), + ); } SearchScope { entries } } @@ -199,7 +201,7 @@ impl SearchScope { SearchScope { entries: source_root .iter() - .map(|id| (EditionedFileId::new(db, id, of.edition(db), of.into()), None)) + .map(|id| (EditionedFileId::new(db, id, of.edition(db)), None)) .collect(), } } @@ -366,7 +368,7 @@ impl Definition { if let Definition::Macro(macro_def) = self { return match macro_def.kind(db) { hir::MacroKind::Declarative => { - if macro_def.attrs(db).is_macro_export() { + if macro_def.attrs(db).by_key(sym::macro_export).exists() { SearchScope::reverse_dependencies(db, module.krate()) } else { SearchScope::krate(db, module.krate()) diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt index 427a51055948..30d1df4f8e55 100644 --- a/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/crates/ide-db/src/test_data/test_doc_alias.txt @@ -3,7 +3,7 @@ Module { id: ModuleId { krate: Crate( - Id(2c00), + Id(3000), ), block: None, local_id: Idx::(0), @@ -16,7 +16,7 @@ Struct( Struct { id: StructId( - 3801, + 3401, ), }, ), @@ -24,7 +24,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -50,7 +50,7 @@ Struct( Struct { id: StructId( - 3800, + 3400, ), }, ), @@ -58,7 +58,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -84,7 +84,7 @@ Struct( Struct { id: StructId( - 3800, + 3400, ), }, ), @@ -92,7 +92,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -118,7 +118,7 @@ Struct( Struct { id: StructId( - 3800, + 3400, ), }, ), @@ -126,7 +126,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -152,7 +152,7 @@ Struct( Struct { id: StructId( - 3800, + 3400, ), }, ), @@ -160,7 +160,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -186,7 +186,7 @@ Struct( Struct { id: StructId( - 3801, + 3401, ), }, ), @@ -194,7 +194,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -220,7 +220,7 @@ Struct( Struct { id: StructId( - 3800, + 3400, ), }, ), @@ -228,7 +228,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index ce93fa59e258..973256c470f3 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -3,7 +3,7 @@ Module { id: ModuleId { krate: Crate( - Id(2c00), + Id(3000), ), block: None, local_id: Idx::(0), @@ -22,7 +22,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -49,14 +49,14 @@ def: TypeAlias( TypeAlias { id: TypeAliasId( - 6c00, + 6800, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -88,7 +88,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -115,14 +115,14 @@ def: Const( Const { id: ConstId( - 6400, + 6000, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -147,14 +147,14 @@ def: Const( Const { id: ConstId( - 6402, + 6002, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -180,7 +180,7 @@ Enum( Enum { id: EnumId( - 5000, + 4c00, ), }, ), @@ -188,7 +188,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -214,7 +214,7 @@ Macro { id: Macro2Id( Macro2Id( - 4c00, + 4800, ), ), }, @@ -222,7 +222,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -248,7 +248,7 @@ Macro { id: Macro2Id( Macro2Id( - 4c00, + 4800, ), ), }, @@ -256,7 +256,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -281,14 +281,14 @@ def: Static( Static { id: StaticId( - 6800, + 6400, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -314,7 +314,7 @@ Struct( Struct { id: StructId( - 4801, + 4401, ), }, ), @@ -322,7 +322,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -348,7 +348,7 @@ Struct( Struct { id: StructId( - 4800, + 4400, ), }, ), @@ -356,7 +356,7 @@ loc: DeclarationLocation { hir_file_id: MacroFile( MacroCallId( - Id(3c00), + Id(3800), ), ), ptr: SyntaxNodePtr { @@ -382,7 +382,7 @@ Struct( Struct { id: StructId( - 4805, + 4405, ), }, ), @@ -390,7 +390,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -418,7 +418,7 @@ Struct( Struct { id: StructId( - 4806, + 4406, ), }, ), @@ -426,7 +426,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -454,7 +454,7 @@ Struct( Struct { id: StructId( - 4807, + 4407, ), }, ), @@ -462,7 +462,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -488,7 +488,7 @@ Struct( Struct { id: StructId( - 4802, + 4402, ), }, ), @@ -496,7 +496,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -521,14 +521,14 @@ def: Trait( Trait { id: TraitId( - 5c00, + 5800, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -554,7 +554,7 @@ Macro { id: Macro2Id( Macro2Id( - 4c00, + 4800, ), ), }, @@ -562,7 +562,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -588,7 +588,7 @@ Union( Union { id: UnionId( - 5400, + 5000, ), }, ), @@ -596,7 +596,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -622,7 +622,7 @@ Module { id: ModuleId { krate: Crate( - Id(2c00), + Id(3000), ), block: None, local_id: Idx::(1), @@ -632,7 +632,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -658,7 +658,7 @@ Module { id: ModuleId { krate: Crate( - Id(2c00), + Id(3000), ), block: None, local_id: Idx::(2), @@ -668,7 +668,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -694,7 +694,7 @@ Macro { id: MacroRulesId( MacroRulesId( - 3801, + 3401, ), ), }, @@ -702,7 +702,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -727,14 +727,14 @@ def: Function( Function { id: FunctionId( - 6002, + 5c02, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -761,14 +761,14 @@ def: Function( Function { id: FunctionId( - 6001, + 5c01, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -796,7 +796,7 @@ Macro { id: MacroRulesId( MacroRulesId( - 3800, + 3400, ), ), }, @@ -804,7 +804,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -829,14 +829,14 @@ def: Function( Function { id: FunctionId( - 6000, + 5c00, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -862,7 +862,7 @@ Macro { id: MacroRulesId( MacroRulesId( - 3801, + 3401, ), ), }, @@ -870,7 +870,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -895,14 +895,14 @@ def: Function( Function { id: FunctionId( - 6003, + 5c03, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -930,7 +930,7 @@ Module { id: ModuleId { krate: Crate( - Id(2c00), + Id(3000), ), block: None, local_id: Idx::(1), @@ -943,7 +943,7 @@ Struct( Struct { id: StructId( - 4803, + 4403, ), }, ), @@ -951,7 +951,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { @@ -977,7 +977,7 @@ Module { id: ModuleId { krate: Crate( - Id(2c00), + Id(3000), ), block: None, local_id: Idx::(2), @@ -989,14 +989,14 @@ def: Trait( Trait { id: TraitId( - 5c00, + 5800, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3001), + Id(2001), ), ), ptr: SyntaxNodePtr { @@ -1022,7 +1022,7 @@ Macro { id: Macro2Id( Macro2Id( - 4c00, + 4800, ), ), }, @@ -1030,7 +1030,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3001), + Id(2001), ), ), ptr: SyntaxNodePtr { @@ -1056,7 +1056,7 @@ Struct( Struct { id: StructId( - 4804, + 4404, ), }, ), @@ -1064,7 +1064,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3001), + Id(2001), ), ), ptr: SyntaxNodePtr { @@ -1090,7 +1090,7 @@ Macro { id: Macro2Id( Macro2Id( - 4c00, + 4800, ), ), }, @@ -1098,7 +1098,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3001), + Id(2001), ), ), ptr: SyntaxNodePtr { @@ -1124,7 +1124,7 @@ Struct( Struct { id: StructId( - 4804, + 4404, ), }, ), @@ -1132,7 +1132,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3001), + Id(2001), ), ), ptr: SyntaxNodePtr { diff --git a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt index 3ab837aa613f..22872b577f71 100644 --- a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt +++ b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt @@ -13,7 +13,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3001), + Id(2001), ), ), ptr: SyntaxNodePtr { diff --git a/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/crates/ide-db/src/test_data/test_symbols_with_imports.txt index a6a808d616a7..9f98bf87e2e8 100644 --- a/crates/ide-db/src/test_data/test_symbols_with_imports.txt +++ b/crates/ide-db/src/test_data/test_symbols_with_imports.txt @@ -13,7 +13,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3001), + Id(2001), ), ), ptr: SyntaxNodePtr { @@ -47,7 +47,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(3000), + Id(2000), ), ), ptr: SyntaxNodePtr { diff --git a/crates/ide-db/src/traits.rs b/crates/ide-db/src/traits.rs index 7b9fdb1e1cf3..61e28386d072 100644 --- a/crates/ide-db/src/traits.rs +++ b/crates/ide-db/src/traits.rs @@ -114,7 +114,8 @@ fn assoc_item_of_trait( #[cfg(test)] mod tests { use expect_test::{Expect, expect}; - use hir::{EditionedFileId, FilePosition, Semantics}; + use hir::FilePosition; + use hir::Semantics; use span::Edition; use syntax::ast::{self, AstNode}; use test_fixture::ChangeFixture; @@ -126,11 +127,10 @@ mod tests { #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (RootDatabase, FilePosition) { let mut database = RootDatabase::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&database, ra_fixture); database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); - let file_id = EditionedFileId::from_span_guess_origin(&database, file_id); let offset = range_or_offset.expect_offset(); (database, FilePosition { file_id, offset }) } diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs index dfa9639f6eb9..8611ef653b02 100644 --- a/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -95,7 +95,7 @@ fn f() { //^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled #[cfg(no)] #[cfg(no2)] mod m; - //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled + //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no and no2 are disabled #[cfg(all(not(a), b))] enum E {} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: b is disabled @@ -130,6 +130,7 @@ trait Bar { /// Tests that `cfg` attributes behind `cfg_attr` is handled properly. #[test] fn inactive_via_cfg_attr() { + cov_mark::check!(cfg_attr_active); check( r#" #[cfg_attr(not(never), cfg(no))] fn f() {} diff --git a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs index 9aa7aed16964..8b708f229d00 100644 --- a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs +++ b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs @@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target( ctx: &DiagnosticsContext<'_>, d: &hir::InvalidDeriveTarget, ) -> Diagnostic { - let display_range = ctx.sema.diagnostics_display_range_for_range(d.range); + let display_range = ctx.sema.diagnostics_display_range(d.node); Diagnostic::new( DiagnosticCode::RustcHardError("E0774"), @@ -29,7 +29,7 @@ mod tests { //- minicore:derive mod __ { #[derive()] - // ^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s + //^^^^^^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s fn main() {} } "#, diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs index a44b043f433c..6a1ecae65150 100644 --- a/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -13,7 +13,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; // This diagnostic is shown for proc macros that have been specifically disabled via `rust-analyzer.procMacro.ignored`. pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic { // Use more accurate position if available. - let display_range = ctx.sema.diagnostics_display_range_for_range(d.range); + let display_range = ctx.resolve_precise_location(&d.node, d.precise_location); Diagnostic::new( DiagnosticCode::Ra(d.kind, if d.error { Severity::Error } else { Severity::WeakWarning }), d.message.clone(), @@ -27,10 +27,8 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> // This diagnostic is shown for macro expansion errors. pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefError) -> Diagnostic { // Use more accurate position if available. - let display_range = match d.name { - Some(name) => ctx.sema.diagnostics_display_range_for_range(d.node.with_value(name)), - None => ctx.sema.diagnostics_display_range(d.node.map(|it| it.syntax_node_ptr())), - }; + let display_range = + ctx.resolve_precise_location(&d.node.map(|it| it.syntax_node_ptr()), d.name); Diagnostic::new( DiagnosticCode::Ra("macro-def-error", Severity::Error), d.message.clone(), @@ -137,12 +135,10 @@ macro_rules! env { () => {} } #[rustc_builtin_macro] macro_rules! concat { () => {} } - include!(concat!( - // ^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run - env!( - //^^^ error: `OUT_DIR` not set, build scripts may have failed to run - "OUT_DIR"), "/out.rs")); - //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run + include!(concat!(env!("OUT_DIR"), "/out.rs")); + //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run + //^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run + //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run "#, ); } @@ -186,7 +182,7 @@ fn main() { //^^^^^^^^^^^^^^^^ error: failed to load file `does not exist` include!(concat!("does ", "not ", "exist")); - // ^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist` + //^^^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist` env!(invalid); //^^^^^^^ error: expected string literal @@ -293,7 +289,7 @@ include!("include-me.rs"); //- /include-me.rs /// long doc that pushes the diagnostic range beyond the first file's text length #[err] - // ^^^ error: unresolved macro `err` +//^^^^^^error: unresolved macro `err` mod prim_never {} "#, ); diff --git a/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/crates/ide-diagnostics/src/handlers/malformed_derive.rs index 7d0c71f4fa7c..701b30b9b593 100644 --- a/crates/ide-diagnostics/src/handlers/malformed_derive.rs +++ b/crates/ide-diagnostics/src/handlers/malformed_derive.rs @@ -7,7 +7,7 @@ pub(crate) fn malformed_derive( ctx: &DiagnosticsContext<'_>, d: &hir::MalformedDerive, ) -> Diagnostic { - let display_range = ctx.sema.diagnostics_display_range_for_range(d.range); + let display_range = ctx.sema.diagnostics_display_range(d.node); Diagnostic::new( DiagnosticCode::RustcHardError("E0777"), @@ -28,7 +28,7 @@ mod tests { //- minicore:derive mod __ { #[derive = "aaaa"] - // ^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]` + //^^^^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]` struct Foo; } "#, diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index 030c82ca0ba7..a87b8c42ac1d 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -8,7 +8,8 @@ pub(crate) fn unresolved_macro_call( ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMacroCall, ) -> Diagnostic { - let display_range = ctx.sema.diagnostics_display_range_for_range(d.range); + // Use more accurate position if available. + let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location); let bang = if d.is_bang { "!" } else { "" }; Diagnostic::new( DiagnosticCode::RustcHardError("unresolved-macro-call"), @@ -75,7 +76,7 @@ self::m!(); self::m2!(); r#" mod _test_inner { #![empty_attr] - // ^^^^^^^^^^ error: unresolved macro `empty_attr` + //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr` } "#, ); diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 5c8f030de4de..1530e6465246 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -102,7 +102,7 @@ use ide_db::{ use itertools::Itertools; use syntax::{ AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, T, TextRange, - ast::{self, AstNode}, + ast::{self, AstNode, HasAttrs}, }; // FIXME: Make this an enum @@ -277,6 +277,31 @@ struct DiagnosticsContext<'a> { is_nightly: bool, } +impl DiagnosticsContext<'_> { + fn resolve_precise_location( + &self, + node: &InFile, + precise_location: Option, + ) -> FileRange { + let sema = &self.sema; + (|| { + let precise_location = precise_location?; + let root = sema.parse_or_expand(node.file_id); + match root.covering_element(precise_location) { + syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)), + syntax::NodeOrToken::Token(it) => { + node.with_value(it).original_file_range_opt(sema.db) + } + } + })() + .map(|frange| ide_db::FileRange { + file_id: frange.file_id.file_id(self.sema.db), + range: frange.range, + }) + .unwrap_or_else(|| sema.diagnostics_display_range(*node)) + } +} + /// Request parser level diagnostics for the given [`FileId`]. pub fn syntax_diagnostics( db: &RootDatabase, @@ -292,7 +317,7 @@ pub fn syntax_diagnostics( let sema = Semantics::new(db); let editioned_file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); let (file_id, _) = editioned_file_id.unpack(db); @@ -323,7 +348,7 @@ pub fn semantic_diagnostics( let sema = Semantics::new(db); let editioned_file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); let (file_id, edition) = editioned_file_id.unpack(db); let mut res = Vec::new(); @@ -401,7 +426,7 @@ pub fn semantic_diagnostics( Diagnostic::new( DiagnosticCode::SyntaxError, format!("Syntax Error in Expansion: {err}"), - ctx.sema.diagnostics_display_range_for_range(d.range), + ctx.resolve_precise_location(&d.node.clone(), d.precise_location), ) })); continue; @@ -652,7 +677,7 @@ fn find_outline_mod_lint_severity( let lint_groups = lint_groups(&diag.code, edition); lint_attrs( sema, - &ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"), + ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"), edition, ) .for_each(|(lint, severity)| { @@ -673,7 +698,7 @@ fn lint_severity_at( .ancestors() .filter_map(ast::AnyHasAttrs::cast) .find_map(|ancestor| { - lint_attrs(sema, &ancestor, edition) + lint_attrs(sema, ancestor, edition) .find_map(|(lint, severity)| lint_groups.contains(&lint).then_some(severity)) }) .or_else(|| { @@ -681,13 +706,13 @@ fn lint_severity_at( }) } -// FIXME: Switch this to analysis' `expand_cfg_attr`. fn lint_attrs<'a>( sema: &'a Semantics<'a, RootDatabase>, - ancestor: &'a ast::AnyHasAttrs, + ancestor: ast::AnyHasAttrs, edition: Edition, ) -> impl Iterator + 'a { - ast::attrs_including_inner(ancestor) + ancestor + .attrs_including_inner() .filter_map(|attr| { attr.as_simple_call().and_then(|(name, value)| match &*name { "allow" | "expect" => Some(Either::Left(iter::once((Severity::Allow, value)))), diff --git a/crates/ide-ssr/src/from_comment.rs b/crates/ide-ssr/src/from_comment.rs index de26879c2959..181cc74a51d4 100644 --- a/crates/ide-ssr/src/from_comment.rs +++ b/crates/ide-ssr/src/from_comment.rs @@ -17,7 +17,7 @@ pub fn ssr_from_comment( frange: FileRange, ) -> Option<(MatchFinder<'_>, TextRange)> { let comment = { - let file_id = EditionedFileId::current_edition_guess_origin(db, frange.file_id); + let file_id = EditionedFileId::current_edition(db, frange.file_id); let file = db.parse(file_id); file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast) diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs index 7b2142a9f348..43ad12c1f699 100644 --- a/crates/ide-ssr/src/lib.rs +++ b/crates/ide-ssr/src/lib.rs @@ -125,9 +125,9 @@ impl<'db> MatchFinder<'db> { ) -> Result, SsrError> { restrict_ranges.retain(|range| !range.range.is_empty()); let sema = Semantics::new(db); - let file_id = sema.attach_first_edition(lookup_context.file_id).unwrap_or_else(|| { - EditionedFileId::current_edition_guess_origin(db, lookup_context.file_id) - }); + let file_id = sema + .attach_first_edition(lookup_context.file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(db, lookup_context.file_id)); let resolution_scope = resolving::ResolutionScope::new( &sema, hir::FilePosition { file_id, offset: lookup_context.offset }, diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs index d23d22b4e898..72f857ceda90 100644 --- a/crates/ide-ssr/src/search.rs +++ b/crates/ide-ssr/src/search.rs @@ -135,9 +135,11 @@ impl<'db> MatchFinder<'db> { // seems to get put into a single source root. let mut files = Vec::new(); self.search_files_do(|file_id| { - files.push(self.sema.attach_first_edition(file_id).unwrap_or_else(|| { - EditionedFileId::current_edition_guess_origin(self.sema.db, file_id) - })); + files.push( + self.sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(self.sema.db, file_id)), + ); }); SearchScope::files(&files) } diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index 0ed91cf7f588..c197d559aa89 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -13,13 +13,13 @@ use stdx::format_to; use url::Url; use hir::{ - Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase, + Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase, sym, }; use ide_db::{ RootDatabase, base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb}, defs::{Definition, NameClass, NameRefClass}, - documentation::{Documentation, HasDocs}, + documentation::{DocsRangeMap, Documentation, HasDocs, docs_with_rangemap}, helpers::pick_best_token, }; use syntax::{ @@ -54,7 +54,7 @@ pub(crate) fn rewrite_links( db: &RootDatabase, markdown: &str, definition: Definition, - range_map: Option<&hir::Docs>, + range_map: Option, ) -> String { let mut cb = broken_link_clone_cb; let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb)) @@ -74,9 +74,9 @@ pub(crate) fn rewrite_links( TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap()); let is_inner_doc = range_map .as_ref() - .and_then(|range_map| range_map.find_ast_range(text_range)) - .map(|(_, is_inner)| is_inner) - .unwrap_or(hir::IsInnerDoc::No); + .and_then(|range_map| range_map.map(text_range)) + .map(|(_, attr_id)| attr_id.is_inner_attr()) + .unwrap_or(false); if let Some((target, title)) = rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type) { @@ -187,7 +187,7 @@ pub(crate) fn external_docs( /// Extracts all links from a given markdown text returning the definition text range, link-text /// and the namespace if known. pub(crate) fn extract_definitions_from_docs( - docs: &Documentation<'_>, + docs: &Documentation, ) -> Vec<(TextRange, String, Option)> { Parser::new_with_broken_link_callback( docs.as_str(), @@ -214,7 +214,7 @@ pub(crate) fn resolve_doc_path_for_def( def: Definition, link: &str, ns: Option, - is_inner_doc: hir::IsInnerDoc, + is_inner_doc: bool, ) -> Option { match def { Definition::Module(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), @@ -324,11 +324,11 @@ impl DocCommentToken { let token_start = t.text_range().start(); let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len; let (attributes, def) = Self::doc_attributes(sema, &node, is_inner)?; - let doc_mapping = attributes.hir_docs(sema.db)?; + let (docs, doc_mapping) = docs_with_rangemap(sema.db, &attributes)?; let (in_expansion_range, link, ns, is_inner) = - extract_definitions_from_docs(&Documentation::new_borrowed(doc_mapping.docs())).into_iter().find_map(|(range, link, ns)| { - let (mapped, is_inner) = doc_mapping.find_ast_range(range)?; - (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, is_inner)) + extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| { + let (mapped, idx) = doc_mapping.map(range)?; + (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, idx.is_inner_attr())) })?; // get the relative range to the doc/attribute in the expansion let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start; @@ -416,7 +416,7 @@ fn rewrite_intra_doc_link( def: Definition, target: &str, title: &str, - is_inner_doc: hir::IsInnerDoc, + is_inner_doc: bool, link_type: LinkType, ) -> Option<(String, String)> { let (link, ns) = parse_intra_doc_link(target); @@ -659,12 +659,14 @@ fn filename_and_frag_for_def( Definition::Crate(_) => String::from("index.html"), Definition::Module(m) => match m.name(db) { // `#[doc(keyword = "...")]` is internal used only by rust compiler - Some(name) => match m.doc_keyword(db) { - Some(kw) => { - format!("keyword.{kw}.html") + Some(name) => { + match m.attrs(db).by_key(sym::doc).find_string_value_in_tt(sym::keyword) { + Some(kw) => { + format!("keyword.{kw}.html") + } + None => format!("{}/index.html", name.as_str()), } - None => format!("{}/index.html", name.as_str()), - }, + } None => String::from("index.html"), }, Definition::Trait(t) => { diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs index 34ffc11c4b5f..3fd885535a23 100644 --- a/crates/ide/src/doc_links/tests.rs +++ b/crates/ide/src/doc_links/tests.rs @@ -1,11 +1,11 @@ -use std::{borrow::Cow, iter}; +use std::iter; use expect_test::{Expect, expect}; use hir::Semantics; use ide_db::{ FilePosition, FileRange, RootDatabase, defs::Definition, - documentation::{Documentation, HasDocs}, + documentation::{DocsRangeMap, Documentation, HasDocs}, }; use itertools::Itertools; use syntax::{AstNode, SyntaxNode, ast, match_ast}; @@ -45,9 +45,9 @@ fn check_external_docs( fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let sema = &Semantics::new(&analysis.db); - let (cursor_def, docs) = def_under_cursor(sema, &position); + let (cursor_def, docs, range) = def_under_cursor(sema, &position); let res = - hir::attach_db(sema.db, || rewrite_links(sema.db, docs.docs(), cursor_def, Some(&docs))); + hir::attach_db(sema.db, || rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range))); expect.assert_eq(&res) } @@ -57,36 +57,33 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, mut expected) = fixture::annotations(ra_fixture); expected.sort_by_key(key_fn); let sema = &Semantics::new(&analysis.db); - hir::attach_db(sema.db, || { - let (cursor_def, docs) = def_under_cursor(sema, &position); - let defs = extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs())); - let actual: Vec<_> = defs - .into_iter() - .flat_map(|(text_range, link, ns)| { - let attr = docs.find_ast_range(text_range); - let is_inner_attr = - attr.map(|(_file, is_inner)| is_inner).unwrap_or(hir::IsInnerDoc::No); - let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr) - .unwrap_or_else(|| panic!("Failed to resolve {link}")); - def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link)) - }) - .map(|(nav_target, link)| { - let range = FileRange { - file_id: nav_target.file_id, - range: nav_target.focus_or_full_range(), - }; - (range, link) - }) - .sorted_by_key(key_fn) - .collect(); - assert_eq!(expected, actual); - }); -} - -fn def_under_cursor<'db>( - sema: &Semantics<'db, RootDatabase>, + let (cursor_def, docs, range) = def_under_cursor(sema, &position); + let defs = extract_definitions_from_docs(&docs); + let actual: Vec<_> = defs + .into_iter() + .flat_map(|(text_range, link, ns)| { + let attr = range.map(text_range); + let is_inner_attr = attr.map(|(_file, attr)| attr.is_inner_attr()).unwrap_or(false); + let def = hir::attach_db(sema.db, || { + resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr) + .unwrap_or_else(|| panic!("Failed to resolve {link}")) + }); + def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link)) + }) + .map(|(nav_target, link)| { + let range = + FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() }; + (range, link) + }) + .sorted_by_key(key_fn) + .collect(); + assert_eq!(expected, actual); +} + +fn def_under_cursor( + sema: &Semantics<'_, RootDatabase>, position: &FilePosition, -) -> (Definition, Cow<'db, hir::Docs>) { +) -> (Definition, Documentation, DocsRangeMap) { let (docs, def) = sema .parse_guess_edition(position.file_id) .syntax() @@ -97,14 +94,14 @@ fn def_under_cursor<'db>( .find_map(|it| node_to_def(sema, &it)) .expect("no def found") .unwrap(); - let docs = docs.expect("no docs found for cursor def"); - (def, docs) + let (docs, range) = docs.expect("no docs found for cursor def"); + (def, docs, range) } -fn node_to_def<'db>( - sema: &Semantics<'db, RootDatabase>, +fn node_to_def( + sema: &Semantics<'_, RootDatabase>, node: &SyntaxNode, -) -> Option>, Definition)>> { +) -> Option, Definition)>> { Some(match_ast! { match node { ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Module(def))), diff --git a/crates/ide/src/fixture.rs b/crates/ide/src/fixture.rs index 1a8591d25dca..fbf89042fae1 100644 --- a/crates/ide/src/fixture.rs +++ b/crates/ide/src/fixture.rs @@ -7,10 +7,10 @@ use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange}; /// Creates analysis for a single file. pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); - (host.analysis(), change_fixture.files[0].file_id()) + (host.analysis(), change_fixture.files[0].file_id(&host.db)) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -18,23 +18,23 @@ pub(crate) fn position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, FilePosition) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - (host.analysis(), FilePosition { file_id: file_id.file_id(), offset }) + (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }) } /// Creates analysis for a single file, returns range marked with a pair of $0. pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileRange) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let range = range_or_offset.expect_range(); - (host.analysis(), FileRange { file_id: file_id.file_id(), range }) + (host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range }) } /// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0. @@ -42,11 +42,11 @@ pub(crate) fn range_or_position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, FileId, RangeOrOffset) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); - (host.analysis(), file_id.file_id(), range_or_offset) + (host.analysis(), file_id.file_id(&host.db), range_or_offset) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -54,24 +54,25 @@ pub(crate) fn annotations( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, FilePosition, Vec<(FileRange, String)>) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); + let db = &host.db; let annotations = change_fixture .files .iter() .flat_map(|&file_id| { - let file_text = host.analysis().file_text(file_id.file_id()).unwrap(); + let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap(); let annotations = extract_annotations(&file_text); annotations .into_iter() - .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data)) + .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data)) }) .collect(); - (host.analysis(), FilePosition { file_id: file_id.file_id(), offset }, annotations) + (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations) } /// Creates analysis from a multi-file fixture with annotations without $0 @@ -79,19 +80,20 @@ pub(crate) fn annotations_without_marker( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, Vec<(FileRange, String)>) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); + let db = &host.db; let annotations = change_fixture .files .iter() .flat_map(|&file_id| { - let file_text = host.analysis().file_text(file_id.file_id()).unwrap(); + let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap(); let annotations = extract_annotations(&file_text); annotations .into_iter() - .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data)) + .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data)) }) .collect(); (host.analysis(), annotations) diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index cc333d66caf3..875403c4e32a 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -355,7 +355,7 @@ trait Bar {} fn test() { #[derive(Copy)] - // ^^^^^^^^^^^^ + //^^^^^^^^^^^^^^^ struct Foo$0; impl Foo {} diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index f7870032ea28..04ce5a7567f3 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -62,7 +62,7 @@ pub(crate) fn highlight_related( let _p = tracing::info_span!("highlight_related").entered(); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(sema.db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id)); let syntax = sema.parse(file_id).syntax().clone(); let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 5bdfb5735658..a1eff3aaee78 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -1,5 +1,5 @@ //! Logic for rendering the different hover messages -use std::{borrow::Cow, env, mem, ops::Not}; +use std::{env, mem, ops::Not}; use either::Either; use hir::{ @@ -11,7 +11,7 @@ use hir::{ use ide_db::{ RootDatabase, defs::{Definition, find_std_module}, - documentation::{Documentation, HasDocs}, + documentation::{DocsRangeMap, HasDocs}, famous_defs::FamousDefs, generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES}, syntax_helpers::prettify_macro_expansion, @@ -278,9 +278,9 @@ pub(super) fn keyword( keyword_hints(sema, token, parent, edition, display_target); let doc_owner = find_std_module(&famous_defs, &keyword_mod, edition)?; - let docs = doc_owner.docs_with_rangemap(sema.db)?; + let (docs, range_map) = doc_owner.docs_with_rangemap(sema.db)?; let (markup, range_map) = - markup(Some(Either::Left(docs)), description, None, None, String::new()); + markup(Some(docs.into()), Some(range_map), description, None, None, String::new()); let markup = process_markup(sema.db, Definition::Module(doc_owner), &markup, range_map, config); Some(HoverResult { markup, actions }) } @@ -370,12 +370,12 @@ pub(super) fn process_markup( db: &RootDatabase, def: Definition, markup: &Markup, - markup_range_map: Option, + markup_range_map: Option, config: &HoverConfig<'_>, ) -> Markup { let markup = markup.as_str(); let markup = if config.links_in_hover { - rewrite_links(db, markup, def, markup_range_map.as_ref()) + rewrite_links(db, markup, def, markup_range_map) } else { remove_links(markup) }; @@ -484,7 +484,7 @@ pub(super) fn definition( config: &HoverConfig<'_>, edition: Edition, display_target: DisplayTarget, -) -> (Markup, Option) { +) -> (Markup, Option) { let mod_path = definition_path(db, &def, edition); let label = match def { Definition::Trait(trait_) => trait_ @@ -520,7 +520,12 @@ pub(super) fn definition( } _ => def.label(db, display_target), }; - let docs = def.docs_with_rangemap(db, famous_defs, display_target); + let (docs, range_map) = + if let Some((docs, doc_range)) = def.docs_with_rangemap(db, famous_defs, display_target) { + (Some(docs), doc_range) + } else { + (None, None) + }; let value = || match def { Definition::Variant(it) => { if !it.parent_enum(db).is_data_carrying(db) { @@ -837,7 +842,14 @@ pub(super) fn definition( } }; - markup(docs, desc, extra.is_empty().not().then_some(extra), mod_path, subst_types) + markup( + docs.map(Into::into), + range_map, + desc, + extra.is_empty().not().then_some(extra), + mod_path, + subst_types, + ) } #[derive(Debug)] @@ -1112,12 +1124,13 @@ fn definition_path(db: &RootDatabase, &def: &Definition, edition: Edition) -> Op } fn markup( - docs: Option, Documentation<'_>>>, + docs: Option, + range_map: Option, rust: String, extra: Option, mod_path: Option, subst_types: String, -) -> (Markup, Option) { +) -> (Markup, Option) { let mut buf = String::new(); if let Some(mod_path) = mod_path @@ -1138,21 +1151,10 @@ fn markup( if let Some(doc) = docs { format_to!(buf, "\n___\n\n"); let offset = TextSize::new(buf.len() as u32); - let docs_str = match &doc { - Either::Left(docs) => docs.docs(), - Either::Right(docs) => docs.as_str(), - }; - format_to!(buf, "{}", docs_str); - let range_map = match doc { - Either::Left(range_map) => { - let mut range_map = range_map.into_owned(); - range_map.shift_by(offset); - Some(range_map) - } - Either::Right(_) => None, - }; + let buf_range_map = range_map.map(|range_map| range_map.shift_docstring_line_range(offset)); + format_to!(buf, "{}", doc); - (buf.into(), range_map) + (buf.into(), buf_range_map) } else { (buf.into(), None) } diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index d474e50d3c2c..21550d5e6665 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -90,7 +90,7 @@ pub(crate) fn inlay_hints( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); let file = sema.parse(file_id); let file = file.syntax(); @@ -143,7 +143,7 @@ pub(crate) fn inlay_hints_resolve( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); let file = sema.parse(file_id); let file = file.syntax(); diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index a633877adb4e..857252832ffe 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -331,8 +331,7 @@ impl Analysis { pub fn parse(&self, file_id: FileId) -> Cancellable { // FIXME edition self.with_db(|db| { - let editioned_file_id_wrapper = - EditionedFileId::current_edition_guess_origin(&self.db, file_id); + let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); db.parse(editioned_file_id_wrapper).tree() }) @@ -361,7 +360,7 @@ impl Analysis { /// supported). pub fn matching_brace(&self, position: FilePosition) -> Cancellable> { self.with_db(|db| { - let file_id = EditionedFileId::current_edition_guess_origin(&self.db, position.file_id); + let file_id = EditionedFileId::current_edition(&self.db, position.file_id); let parse = db.parse(file_id); let file = parse.tree(); matching_brace::matching_brace(&file, position.offset) @@ -422,7 +421,7 @@ impl Analysis { pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable { self.with_db(|db| { let editioned_file_id_wrapper = - EditionedFileId::current_edition_guess_origin(&self.db, frange.file_id); + EditionedFileId::current_edition(&self.db, frange.file_id); let parse = db.parse(editioned_file_id_wrapper); join_lines::join_lines(config, &parse.tree(), frange.range) }) @@ -463,8 +462,7 @@ impl Analysis { ) -> Cancellable> { // FIXME: Edition self.with_db(|db| { - let editioned_file_id_wrapper = - EditionedFileId::current_edition_guess_origin(&self.db, file_id); + let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); let source_file = db.parse(editioned_file_id_wrapper).tree(); file_structure::file_structure(&source_file, config) }) @@ -495,8 +493,7 @@ impl Analysis { /// Returns the set of folding ranges. pub fn folding_ranges(&self, file_id: FileId) -> Cancellable> { self.with_db(|db| { - let editioned_file_id_wrapper = - EditionedFileId::current_edition_guess_origin(&self.db, file_id); + let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree()) }) diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 8e73ddf8bfc3..b222ff3eec0b 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -54,8 +54,7 @@ pub struct NavigationTarget { // FIXME: Symbol pub container_name: Option, pub description: Option, - // FIXME: Use the database lifetime here. - pub docs: Option>, + pub docs: Option, /// In addition to a `name` field, a `NavigationTarget` may also be aliased /// In such cases we want a `NavigationTarget` to be accessible by its alias // FIXME: Symbol @@ -164,7 +163,7 @@ impl NavigationTarget { full_range, SymbolKind::Module, ); - res.docs = module.docs(db).map(Documentation::into_owned); + res.docs = module.docs(db); res.description = Some( module.display(db, module.krate().to_display_target(db)).to_string(), ); @@ -438,7 +437,7 @@ where D::KIND, ) .map(|mut res| { - res.docs = self.docs(db).map(Documentation::into_owned); + res.docs = self.docs(db); res.description = hir::attach_db(db, || { Some(self.display(db, self.krate(db).to_display_target(db)).to_string()) }); @@ -537,7 +536,7 @@ impl TryToNav for hir::ExternCrateDecl { SymbolKind::Module, ); - res.docs = self.docs(db).map(Documentation::into_owned); + res.docs = self.docs(db); res.description = Some(self.display(db, krate.to_display_target(db)).to_string()); res.container_name = container_name(db, *self, edition); res @@ -559,9 +558,10 @@ impl TryToNav for hir::Field { FieldSource::Named(it) => { NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map( |mut res| { - res.docs = self.docs(db).map(Documentation::into_owned); - res.description = - Some(self.display(db, krate.to_display_target(db)).to_string()); + res.docs = self.docs(db); + res.description = hir::attach_db(db, || { + Some(self.display(db, krate.to_display_target(db)).to_string()) + }); res }, ) @@ -600,7 +600,7 @@ impl TryToNav for hir::Macro { self.kind(db).into(), ) .map(|mut res| { - res.docs = self.docs(db).map(Documentation::into_owned); + res.docs = self.docs(db); res }), ) @@ -939,7 +939,7 @@ pub(crate) fn orig_range_with_focus_r( ) -> UpmappingResult<(FileRange, Option)> { let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) }; - let call = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()); + let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind; let def_range = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db); @@ -965,8 +965,7 @@ pub(crate) fn orig_range_with_focus_r( // name lies outside the node, so instead point to the macro call which // *should* contain the name _ => { - let call = call(); - let kind = call.kind; + let kind = call_kind(); let range = kind.clone().original_call_range_with_input(db); //If the focus range is in the attribute/derive body, we // need to point the call site to the entire body, if not, fall back @@ -978,7 +977,7 @@ pub(crate) fn orig_range_with_focus_r( { range } else { - kind.original_call_range(db, call.krate) + kind.original_call_range(db) } } }, @@ -1007,14 +1006,11 @@ pub(crate) fn orig_range_with_focus_r( }, ), // node is in macro def, just show the focus - _ => { - let call = call(); - ( - // show the macro call - (call.kind.original_call_range(db, call.krate), None), - Some((focus_range, Some(focus_range))), - ) - } + _ => ( + // show the macro call + (call_kind().original_call_range(db), None), + Some((focus_range, Some(focus_range))), + ), } } // lost name? can't happen for single tokens diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index c4dcd588d693..a53a19299727 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -1124,10 +1124,7 @@ pub(super) struct Foo$0 { check_with_scope( code, Some(&mut |db| { - SearchScope::single_file(EditionedFileId::current_edition_guess_origin( - db, - FileId::from_raw(2), - )) + SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2))) }), expect![[r#" quux Function FileId(0) 19..35 26..30 diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 4b475dac87b5..494701d97def 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -3,13 +3,17 @@ use std::{fmt, sync::OnceLock}; use arrayvec::ArrayVec; use ast::HasName; use cfg::{CfgAtom, CfgExpr}; -use hir::{AsAssocItem, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase, sym}; +use hir::{ + AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase, + sym, +}; use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn}; use ide_db::impl_empty_upmap_from_ra_fixture; use ide_db::{ FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind, base_db::RootQueryDb, defs::Definition, + documentation::docs_from_attrs, helpers::visit_file_defs, search::{FileReferenceNode, SearchScope}, }; @@ -319,7 +323,7 @@ pub(crate) fn runnable_fn( def: hir::Function, ) -> Option { let edition = def.krate(sema.db).edition(sema.db); - let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db).cfgs(sema.db)); + let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db)); let kind = if !under_cfg_test && def.is_main(sema.db) { RunnableKind::Bin } else { @@ -354,7 +358,7 @@ pub(crate) fn runnable_fn( let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db); let update_test = UpdateTest::find_snapshot_macro(sema, file_range); - let cfg = def.attrs(sema.db).cfgs(sema.db).cloned(); + let cfg = def.attrs(sema.db).cfg(); Some(Runnable { use_name_in_title: false, nav, kind, cfg, update_test }) } @@ -362,8 +366,8 @@ pub(crate) fn runnable_mod( sema: &Semantics<'_, RootDatabase>, def: hir::Module, ) -> Option { - let cfg = def.attrs(sema.db).cfgs(sema.db); - if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) { + if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db))) + { return None; } let path = def @@ -377,7 +381,8 @@ pub(crate) fn runnable_mod( }) .join("::"); - let cfg = cfg.cloned(); + let attrs = def.attrs(sema.db); + let cfg = attrs.cfg(); let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site(); let module_source = sema.module_definition_node(def); @@ -404,10 +409,10 @@ pub(crate) fn runnable_impl( let display_target = def.module(sema.db).krate().to_display_target(sema.db); let edition = display_target.edition; let attrs = def.attrs(sema.db); - if !has_runnable_doc_test(sema.db, &attrs) { + if !has_runnable_doc_test(&attrs) { return None; } - let cfg = attrs.cfgs(sema.db).cloned(); + let cfg = attrs.cfg(); let nav = def.try_to_nav(sema)?.call_site(); let ty = def.self_ty(sema.db); let adt_name = ty.as_adt()?.name(sema.db); @@ -437,16 +442,8 @@ pub(crate) fn runnable_impl( }) } -fn has_cfg_test(cfg: Option<&CfgExpr>) -> bool { - return cfg.is_some_and(has_cfg_test_impl); - - fn has_cfg_test_impl(cfg: &CfgExpr) -> bool { - match cfg { - CfgExpr::Atom(CfgAtom::Flag(s)) => *s == sym::test, - CfgExpr::Any(cfgs) | CfgExpr::All(cfgs) => cfgs.iter().any(has_cfg_test_impl), - _ => false, - } - } +fn has_cfg_test(attrs: AttrsWithOwner) -> bool { + attrs.cfgs().any(|cfg| matches!(&cfg, CfgExpr::Atom(CfgAtom::Flag(s)) if *s == sym::test)) } /// Creates a test mod runnable for outline modules at the top of their definition. @@ -456,8 +453,8 @@ fn runnable_mod_outline_definition( ) -> Option { def.as_source_file_id(sema.db)?; - let cfg = def.attrs(sema.db).cfgs(sema.db); - if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) { + if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db))) + { return None; } let path = def @@ -471,7 +468,8 @@ fn runnable_mod_outline_definition( }) .join("::"); - let cfg = cfg.cloned(); + let attrs = def.attrs(sema.db); + let cfg = attrs.cfg(); let mod_source = sema.module_definition_node(def); let mod_syntax = mod_source.file_syntax(sema.db); @@ -510,7 +508,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op let display_target = krate .unwrap_or_else(|| (*db.all_crates().last().expect("no crate graph present")).into()) .to_display_target(db); - if !has_runnable_doc_test(db, &attrs) { + if !has_runnable_doc_test(&attrs) { return None; } let def_name = def.name(db)?; @@ -556,7 +554,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op use_name_in_title: false, nav, kind: RunnableKind::DocTest { test_id }, - cfg: attrs.cfgs(db).cloned(), + cfg: attrs.cfg(), update_test: UpdateTest::default(), }; Some(res) @@ -573,15 +571,15 @@ impl TestAttr { } } -fn has_runnable_doc_test(db: &RootDatabase, attrs: &hir::AttrsWithOwner) -> bool { +fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool { const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] = &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"]; - attrs.hir_docs(db).is_some_and(|doc| { + docs_from_attrs(attrs).is_some_and(|doc| { let mut in_code_block = false; - for line in doc.docs().lines() { + for line in doc.lines() { if let Some(header) = RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence)) { diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index a8fc57a431b4..5f7e12cf53f8 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -31,7 +31,7 @@ use crate::RootDatabase; /// edited. #[derive(Debug)] pub struct SignatureHelp { - pub doc: Option>, + pub doc: Option, pub signature: String, pub active_parameter: Option, parameters: Vec, @@ -174,7 +174,7 @@ fn signature_help_for_call( let mut fn_params = None; match callable.kind() { hir::CallableKind::Function(func) => { - res.doc = func.docs(db).map(Documentation::into_owned); + res.doc = func.docs(db); format_to!(res.signature, "fn {}", func.name(db).display(db, edition)); let generic_params = GenericDef::Function(func) @@ -196,7 +196,7 @@ fn signature_help_for_call( }); } hir::CallableKind::TupleStruct(strukt) => { - res.doc = strukt.docs(db).map(Documentation::into_owned); + res.doc = strukt.docs(db); format_to!(res.signature, "struct {}", strukt.name(db).display(db, edition)); let generic_params = GenericDef::Adt(strukt.into()) @@ -209,7 +209,7 @@ fn signature_help_for_call( } } hir::CallableKind::TupleEnumVariant(variant) => { - res.doc = variant.docs(db).map(Documentation::into_owned); + res.doc = variant.docs(db); format_to!( res.signature, "enum {}", @@ -314,33 +314,33 @@ fn signature_help_for_generics( let db = sema.db; match generics_def { hir::GenericDef::Function(it) => { - res.doc = it.docs(db).map(Documentation::into_owned); + res.doc = it.docs(db); format_to!(res.signature, "fn {}", it.name(db).display(db, edition)); } hir::GenericDef::Adt(hir::Adt::Enum(it)) => { - res.doc = it.docs(db).map(Documentation::into_owned); + res.doc = it.docs(db); format_to!(res.signature, "enum {}", it.name(db).display(db, edition)); if let Some(variant) = variant { // In paths, generics of an enum can be specified *after* one of its variants. // eg. `None::` // We'll use the signature of the enum, but include the docs of the variant. - res.doc = variant.docs(db).map(Documentation::into_owned); + res.doc = variant.docs(db); } } hir::GenericDef::Adt(hir::Adt::Struct(it)) => { - res.doc = it.docs(db).map(Documentation::into_owned); + res.doc = it.docs(db); format_to!(res.signature, "struct {}", it.name(db).display(db, edition)); } hir::GenericDef::Adt(hir::Adt::Union(it)) => { - res.doc = it.docs(db).map(Documentation::into_owned); + res.doc = it.docs(db); format_to!(res.signature, "union {}", it.name(db).display(db, edition)); } hir::GenericDef::Trait(it) => { - res.doc = it.docs(db).map(Documentation::into_owned); + res.doc = it.docs(db); format_to!(res.signature, "trait {}", it.name(db).display(db, edition)); } hir::GenericDef::TypeAlias(it) => { - res.doc = it.docs(db).map(Documentation::into_owned); + res.doc = it.docs(db); format_to!(res.signature, "type {}", it.name(db).display(db, edition)); } // These don't have generic args that can be specified @@ -495,7 +495,7 @@ fn signature_help_for_tuple_struct_pat( let fields: Vec<_> = if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res { let en = variant.parent_enum(db); - res.doc = en.docs(db).map(Documentation::into_owned); + res.doc = en.docs(db); format_to!( res.signature, "enum {}::{} (", @@ -512,7 +512,7 @@ fn signature_help_for_tuple_struct_pat( match adt { hir::Adt::Struct(it) => { - res.doc = it.docs(db).map(Documentation::into_owned); + res.doc = it.docs(db); format_to!(res.signature, "struct {} (", it.name(db).display(db, edition)); it.fields(db) } @@ -622,7 +622,7 @@ fn signature_help_for_record_<'db>( fields = variant.fields(db); let en = variant.parent_enum(db); - res.doc = en.docs(db).map(Documentation::into_owned); + res.doc = en.docs(db); format_to!( res.signature, "enum {}::{} {{ ", @@ -639,12 +639,12 @@ fn signature_help_for_record_<'db>( match adt { hir::Adt::Struct(it) => { fields = it.fields(db); - res.doc = it.docs(db).map(Documentation::into_owned); + res.doc = it.docs(db); format_to!(res.signature, "struct {} {{ ", it.name(db).display(db, edition)); } hir::Adt::Union(it) => { fields = it.fields(db); - res.doc = it.docs(db).map(Documentation::into_owned); + res.doc = it.docs(db); format_to!(res.signature, "union {} {{ ", it.name(db).display(db, edition)); } _ => return None, @@ -740,12 +740,12 @@ mod tests { #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (RootDatabase, FilePosition) { let mut database = RootDatabase::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&database, ra_fixture); database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - let position = FilePosition { file_id: file_id.file_id(), offset }; + let position = FilePosition { file_id: file_id.file_id(&database), offset }; (database, position) } diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index ec8292968dbf..e261928c413f 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -42,8 +42,7 @@ pub struct ReferenceData { #[derive(Debug)] pub struct TokenStaticData { - // FIXME: Make this have the lifetime of the database. - pub documentation: Option>, + pub documentation: Option, pub hover: Option, pub definition: Option, pub references: Vec, @@ -110,7 +109,7 @@ fn documentation_for_definition( sema: &Semantics<'_, RootDatabase>, def: Definition, scope_node: &SyntaxNode, -) -> Option> { +) -> Option { let famous_defs = match &def { Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), _ => None, @@ -125,7 +124,6 @@ fn documentation_for_definition( }) .to_display_target(sema.db), ) - .map(Documentation::into_owned) } // FIXME: This is a weird function diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 782a73d20ca3..66895cb0b053 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -199,7 +199,7 @@ pub(crate) fn highlight( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); // Determine the root based on the given range. let (root, range_to_highlight) = { diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs index 597550b482cd..75e46b8ebfde 100644 --- a/crates/ide/src/syntax_highlighting/html.rs +++ b/crates/ide/src/syntax_highlighting/html.rs @@ -20,7 +20,7 @@ pub(crate) fn highlight_as_html_with_config( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); let file = sema.parse(file_id); let file = file.syntax(); fn rainbowify(seed: u64) -> String { diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs index 26d2bb5e0288..7955f5ac0de9 100644 --- a/crates/ide/src/syntax_highlighting/inject.rs +++ b/crates/ide/src/syntax_highlighting/inject.rs @@ -1,13 +1,16 @@ //! "Recursive" Syntax highlighting for code in doctests and fixtures. -use hir::{EditionedFileId, HirFileId, InFile, Semantics}; +use std::mem; + +use either::Either; +use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym}; +use ide_db::range_mapper::RangeMapper; use ide_db::{ - SymbolKind, defs::Definition, documentation::Documentation, range_mapper::RangeMapper, - rust_doc::is_rust_fence, + SymbolKind, defs::Definition, documentation::docs_with_rangemap, rust_doc::is_rust_fence, }; use syntax::{ - SyntaxNode, TextRange, TextSize, - ast::{self, IsString}, + AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize, + ast::{self, AstNode, IsString, QuoteOffsets}, }; use crate::{ @@ -93,79 +96,118 @@ pub(super) fn doc_comment( None => return, }; let src_file_id: HirFileId = src_file_id.into(); - let Some(docs) = attributes.hir_docs(sema.db) else { return }; // Extract intra-doc links and emit highlights for them. - extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs())) - .into_iter() - .filter_map(|(range, link, ns)| { - docs.find_ast_range(range) - .filter(|(mapping, _)| mapping.file_id == src_file_id) - .and_then(|(InFile { value: mapped_range, .. }, is_inner)| { - Some(mapped_range) - .zip(resolve_doc_path_for_def(sema.db, def, &link, ns, is_inner)) + if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) { + extract_definitions_from_docs(&docs) + .into_iter() + .filter_map(|(range, link, ns)| { + doc_mapping + .map(range) + .filter(|(mapping, _)| mapping.file_id == src_file_id) + .and_then(|(InFile { value: mapped_range, .. }, attr_id)| { + Some(mapped_range).zip(resolve_doc_path_for_def( + sema.db, + def, + &link, + ns, + attr_id.is_inner_attr(), + )) + }) + }) + .for_each(|(range, def)| { + hl.add(HlRange { + range, + highlight: module_def_to_hl_tag(def) + | HlMod::Documentation + | HlMod::Injected + | HlMod::IntraDocLink, + binding_hash: None, }) - }) - .for_each(|(range, def)| { - hl.add(HlRange { - range, - highlight: module_def_to_hl_tag(def) - | HlMod::Documentation - | HlMod::Injected - | HlMod::IntraDocLink, - binding_hash: None, }) - }); + } // Extract doc-test sources from the docs and calculate highlighting for them. let mut inj = RangeMapper::default(); inj.add_unmapped("fn doctest() {\n"); + let attrs_source_map = attributes.source_map(sema.db); + let mut is_codeblock = false; let mut is_doctest = false; - let mut has_doctests = false; - - let mut docs_offset = TextSize::new(0); - for mut line in docs.docs().split('\n') { - let mut line_docs_offset = docs_offset; - docs_offset += TextSize::of(line) + TextSize::of("\n"); - - match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) { - Some(idx) => { - is_codeblock = !is_codeblock; - // Check whether code is rust by inspecting fence guards - let guards = &line[idx + RUSTDOC_FENCE_LENGTH..]; - let is_rust = is_rust_fence(guards); - is_doctest = is_codeblock && is_rust; - continue; - } - None if !is_doctest => continue, - None => (), - } - - // lines marked with `#` should be ignored in output, we skip the `#` char - if line.starts_with('#') { - line_docs_offset += TextSize::of("#"); - line = &line["#".len()..]; - } + let mut new_comments = Vec::new(); + let mut string; - let Some((InFile { file_id, value: mapped_range }, _)) = - docs.find_ast_range(TextRange::at(line_docs_offset, TextSize::of(line))) - else { - continue; - }; + for attr in attributes.by_key(sym::doc).attrs() { + let InFile { file_id, value: src } = attrs_source_map.source_of(attr); if file_id != src_file_id { continue; } + let (line, range) = match &src { + Either::Left(it) => { + string = match find_doc_string_in_attr(attr, it) { + Some(it) => it, + None => continue, + }; + let text = string.text(); + let text_range = string.syntax().text_range(); + match string.quote_offsets() { + Some(QuoteOffsets { contents, .. }) => { + (&text[contents - text_range.start()], contents) + } + None => (text, text_range), + } + } + Either::Right(comment) => { + let value = comment.prefix().len(); + let range = comment.syntax().text_range(); + ( + &comment.text()[value..], + TextRange::new(range.start() + TextSize::try_from(value).unwrap(), range.end()), + ) + } + }; + + let mut range_start = range.start(); + for line in line.split('\n') { + let line_len = TextSize::from(line.len() as u32); + let prev_range_start = { + let next_range_start = range_start + line_len + TextSize::from(1); + mem::replace(&mut range_start, next_range_start) + }; + let mut pos = TextSize::from(0); + + match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) { + Some(idx) => { + is_codeblock = !is_codeblock; + // Check whether code is rust by inspecting fence guards + let guards = &line[idx + RUSTDOC_FENCE_LENGTH..]; + let is_rust = is_rust_fence(guards); + is_doctest = is_codeblock && is_rust; + continue; + } + None if !is_doctest => continue, + None => (), + } - has_doctests = true; - inj.add(line, mapped_range); - inj.add_unmapped("\n"); + // whitespace after comment is ignored + if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) { + pos += TextSize::of(ws); + } + // lines marked with `#` should be ignored in output, we skip the `#` char + if line[pos.into()..].starts_with('#') { + pos += TextSize::of('#'); + } + + new_comments.push(TextRange::at(prev_range_start, pos)); + inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start); + inj.add_unmapped("\n"); + } } - if !has_doctests { + if new_comments.is_empty() { return; // no need to run an analysis on an empty file } @@ -198,6 +240,37 @@ pub(super) fn doc_comment( } } } + + for range in new_comments { + hl.add(HlRange { + range, + highlight: HlTag::Comment | HlMod::Documentation, + binding_hash: None, + }); + } +} + +fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option { + match it.expr() { + // #[doc = lit] + Some(ast::Expr::Literal(lit)) => match lit.kind() { + ast::LiteralKind::String(it) => Some(it), + _ => None, + }, + // #[cfg_attr(..., doc = "", ...)] + None => { + // We gotta hunt the string token manually here + let text = attr.string_value()?.as_str(); + // FIXME: We just pick the first string literal that has the same text as the doc attribute + // This means technically we might highlight the wrong one + it.syntax() + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .filter_map(ast::String::cast) + .find(|string| string.text().get(1..string.text().len() - 1) == Some(text)) + } + _ => None, + } } fn module_def_to_hl_tag(def: Definition) -> HlTag { diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index 53750ae0bac0..d00f279c8299 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html @@ -42,21 +42,21 @@
//! This is a module to test doc injection.
 //! ```
-//! fn test() {}
+//! fn test() {}
 //! ```
 
 //! Syntactic name ref highlighting testing
 //! ```rust
-//! extern crate self;
-//! extern crate other as otter;
-//! extern crate core;
-//! trait T { type Assoc; }
-//! fn f<Arg>() -> use<Arg> where (): T<Assoc = ()> {}
+//! extern crate self;
+//! extern crate other as otter;
+//! extern crate core;
+//! trait T { type Assoc; }
+//! fn f<Arg>() -> use<Arg> where (): T<Assoc = ()> {}
 //! ```
 mod outline_module;
 
 /// ```
-/// let _ = "early doctests should not go boom";
+/// let _ = "early doctests should not go boom";
 /// ```
 struct Foo {
     bar: bool,
@@ -65,15 +65,15 @@
 /// This is an impl of [`Foo`] with a code block.
 ///
 /// ```
-/// fn foo() {
+/// fn foo() {
 ///
-/// }
+/// }
 /// ```
 impl Foo {
     /// ```
-    /// let _ = "Call me
+    /// let _ = "Call me
     //    KILLER WHALE
-    ///     Ishmael.";
+    ///     Ishmael.";
     /// ```
     pub const bar: bool = true;
 
@@ -82,8 +82,8 @@
     /// # Examples
     ///
     /// ```
-    /// # #![allow(unused_mut)]
-    /// let mut foo: Foo = Foo::new();
+    /// # #![allow(unused_mut)]
+    /// let mut foo: Foo = Foo::new();
     /// ```
     pub const fn new() -> Foo {
         Foo { bar: true }
@@ -94,38 +94,38 @@
     /// # Examples
     ///
     /// ```
-    /// use x::y;
+    /// use x::y;
     ///
-    /// let foo = Foo::new();
+    /// let foo = Foo::new();
     ///
-    /// // calls bar on foo
-    /// assert!(foo.bar());
+    /// // calls bar on foo
+    /// assert!(foo.bar());
     ///
-    /// let bar = foo.bar || Foo::bar;
+    /// let bar = foo.bar || Foo::bar;
     ///
-    /// /* multi-line
-    ///        comment */
+    /// /* multi-line
+    ///        comment */
     ///
-    /// let multi_line_string = "Foo
-    ///   bar\n
-    ///          ";
+    /// let multi_line_string = "Foo
+    ///   bar\n
+    ///          ";
     ///
     /// ```
     ///
     /// ```rust,no_run
-    /// let foobar = Foo::new().bar();
+    /// let foobar = Foo::new().bar();
     /// ```
     ///
     /// ~~~rust,no_run
-    /// // code block with tilde.
-    /// let foobar = Foo::new().bar();
+    /// // code block with tilde.
+    /// let foobar = Foo::new().bar();
     /// ~~~
     ///
     /// ```
-    /// // functions
-    /// fn foo<T, const X: usize>(arg: i32) {
-    ///     let x: T = X;
-    /// }
+    /// // functions
+    /// fn foo<T, const X: usize>(arg: i32) {
+    ///     let x: T = X;
+    /// }
     /// ```
     ///
     /// ```sh
@@ -150,8 +150,8 @@
 }
 
 /// ```
-/// macro_rules! noop { ($expr:expr) => { $expr }}
-/// noop!(1);
+/// macro_rules! noop { ($expr:expr) => { $expr }}
+/// noop!(1);
 /// ```
 macro_rules! noop {
     ($expr:expr) => {
@@ -160,18 +160,18 @@
 }
 
 /// ```rust
-/// let _ = example(&[1, 2, 3]);
+/// let _ = example(&[1, 2, 3]);
 /// ```
 ///
 /// ```
-/// loop {}
+/// loop {}
 #[cfg_attr(not(feature = "false"), doc = "loop {}")]
 #[doc = "loop {}"]
 /// ```
 ///
 #[cfg_attr(feature = "alloc", doc = "```rust")]
 #[cfg_attr(not(feature = "alloc"), doc = "```ignore")]
-/// let _ = example(&alloc::vec![1, 2, 3]);
+/// let _ = example(&alloc::vec![1, 2, 3]);
 /// ```
 pub fn mix_and_match() {}
 
@@ -187,7 +187,7 @@
 /**
     Really, I don't get it
     ```rust
-    let _ = example(&[1, 2, 3]);
+    let _ = example(&[1, 2, 3]);
     ```
     [`block_comments`] tests these without indentation
 */
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index 0381865fed45..ed55ac5bf04b 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -75,10 +75,7 @@ pub(crate) fn on_char_typed(
     // FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
     // causing the editor to feel sluggish!
     let edition = Edition::CURRENT_FIXME;
-    let editioned_file_id_wrapper = EditionedFileId::from_span_guess_origin(
-        db,
-        span::EditionedFileId::new(position.file_id, edition),
-    );
+    let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
     let file = &db.parse(editioned_file_id_wrapper);
     let char_matches_position =
         file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs
index 76a2802d294c..fdc583a15cc7 100644
--- a/crates/ide/src/typing/on_enter.rs
+++ b/crates/ide/src/typing/on_enter.rs
@@ -51,7 +51,7 @@ use ide_db::text_edit::TextEdit;
 // ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
 pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option {
     let editioned_file_id_wrapper =
-        ide_db::base_db::EditionedFileId::current_edition_guess_origin(db, position.file_id);
+        ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
     let parse = db.parse(editioned_file_id_wrapper);
     let file = parse.tree();
     let token = file.syntax().token_at_offset(position.offset).left_biased()?;
diff --git a/crates/ide/src/view_item_tree.rs b/crates/ide/src/view_item_tree.rs
index c9a2f31696f4..2cd751463bdb 100644
--- a/crates/ide/src/view_item_tree.rs
+++ b/crates/ide/src/view_item_tree.rs
@@ -12,6 +12,6 @@ pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
     db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db))
 }
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 54ad9603ba03..de24bc09ff0f 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -145,9 +145,7 @@ impl flags::AnalysisStats {
                     if !source_root.is_library || self.with_deps {
                         let length = db.file_text(file_id).text(db).lines().count();
                         let item_stats = db
-                            .file_item_tree(
-                                EditionedFileId::current_edition_guess_origin(db, file_id).into(),
-                            )
+                            .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
                             .item_tree_stats()
                             .into();
 
@@ -157,9 +155,7 @@ impl flags::AnalysisStats {
                     } else {
                         let length = db.file_text(file_id).text(db).lines().count();
                         let item_stats = db
-                            .file_item_tree(
-                                EditionedFileId::current_edition_guess_origin(db, file_id).into(),
-                            )
+                            .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
                             .item_tree_stats()
                             .into();
 
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index 92bb2c1ce4fa..37f83f6dee67 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -514,12 +514,12 @@ mod test {
 
     fn position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (AnalysisHost, FilePosition) {
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(ra_fixture);
+        let change_fixture = ChangeFixture::parse(host.raw_database(), ra_fixture);
         host.raw_database_mut().apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ()");
         let offset = range_or_offset.expect_offset();
-        let position = FilePosition { file_id: file_id.file_id(), offset };
+        let position = FilePosition { file_id: file_id.file_id(host.raw_database()), offset };
         (host, position)
     }
 
@@ -870,7 +870,7 @@ pub mod example_mod {
         let s = "/// foo\nfn bar() {}";
 
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(s);
+        let change_fixture = ChangeFixture::parse(host.raw_database(), s);
         host.raw_database_mut().apply_change(change_fixture.change);
 
         let analysis = host.analysis();
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs
index cc2ab0f07ca0..e3e3a143de03 100644
--- a/crates/rust-analyzer/src/cli/ssr.rs
+++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -73,7 +73,7 @@ impl flags::Search {
                 let sr = db.source_root(root).source_root(db);
                 for file_id in sr.iter() {
                     for debug_info in match_finder.debug_where_text_equal(
-                        EditionedFileId::current_edition_guess_origin(db, file_id),
+                        EditionedFileId::current_edition(db, file_id),
                         debug_snippet,
                     ) {
                         println!("{debug_info:#?}");
diff --git a/crates/rust-analyzer/src/cli/unresolved_references.rs b/crates/rust-analyzer/src/cli/unresolved_references.rs
index 2cb0fe9eefad..0362e13b88b7 100644
--- a/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -141,7 +141,7 @@ fn all_unresolved_references(
 ) -> Vec {
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(sema.db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
     let file = sema.parse(file_id);
     let root = file.syntax();
 
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 5a42cbd933f9..04b20033062e 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -119,7 +119,7 @@ pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSe
     }
 }
 
-pub(crate) fn documentation(documentation: Documentation<'_>) -> lsp_types::Documentation {
+pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
     let value = format_docs(&documentation);
     let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
     lsp_types::Documentation::MarkupContent(markup_content)
@@ -1970,7 +1970,7 @@ pub(crate) fn markup_content(
         ide::HoverDocFormat::Markdown => lsp_types::MarkupKind::Markdown,
         ide::HoverDocFormat::PlainText => lsp_types::MarkupKind::PlainText,
     };
-    let value = format_docs(&Documentation::new_owned(markup.into()));
+    let value = format_docs(&Documentation::new(markup.into()));
     lsp_types::MarkupContent { kind, value }
 }
 
diff --git a/crates/syntax-bridge/src/lib.rs b/crates/syntax-bridge/src/lib.rs
index 2d1955d1f651..4e525be3fe3c 100644
--- a/crates/syntax-bridge/src/lib.rs
+++ b/crates/syntax-bridge/src/lib.rs
@@ -1,6 +1,6 @@
 //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
 
-use std::{collections::VecDeque, fmt, hash::Hash};
+use std::{fmt, hash::Hash};
 
 use intern::Symbol;
 use rustc_hash::{FxHashMap, FxHashSet};
@@ -102,34 +102,26 @@ where
     SpanData: Copy + fmt::Debug,
     SpanMap: SpanMapper>,
 {
-    let mut c =
-        Converter::new(node, map, Default::default(), Default::default(), span, mode, |_, _| {
-            (true, Vec::new())
-        });
+    let mut c = Converter::new(node, map, Default::default(), Default::default(), span, mode);
     convert_tokens(&mut c)
 }
 
 /// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
 /// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
 /// be injected or hidden from the output.
-pub fn syntax_node_to_token_tree_modified(
+pub fn syntax_node_to_token_tree_modified(
     node: &SyntaxNode,
     map: SpanMap,
     append: FxHashMap>>>,
     remove: FxHashSet,
     call_site: SpanData,
     mode: DocCommentDesugarMode,
-    on_enter: OnEvent,
 ) -> tt::TopSubtree>
 where
     SpanMap: SpanMapper>,
     SpanData: Copy + fmt::Debug,
-    OnEvent: FnMut(
-        &mut PreorderWithTokens,
-        &WalkEvent,
-    ) -> (bool, Vec>>),
 {
-    let mut c = Converter::new(node, map, append, remove, call_site, mode, on_enter);
+    let mut c = Converter::new(node, map, append, remove, call_site, mode);
     convert_tokens(&mut c)
 }
 
@@ -632,9 +624,9 @@ where
     }
 }
 
-struct Converter {
+struct Converter {
     current: Option,
-    current_leaves: VecDeque>,
+    current_leaves: Vec>,
     preorder: PreorderWithTokens,
     range: TextRange,
     punct_offset: Option<(SyntaxToken, TextSize)>,
@@ -644,13 +636,9 @@ struct Converter {
     remove: FxHashSet,
     call_site: S,
     mode: DocCommentDesugarMode,
-    on_event: OnEvent,
 }
 
-impl Converter
-where
-    OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent) -> (bool, Vec>),
-{
+impl Converter {
     fn new(
         node: &SyntaxNode,
         map: SpanMap,
@@ -658,9 +646,8 @@ where
         remove: FxHashSet,
         call_site: S,
         mode: DocCommentDesugarMode,
-        on_enter: OnEvent,
     ) -> Self {
-        let mut converter = Converter {
+        let mut this = Converter {
             current: None,
             preorder: node.preorder_with_tokens(),
             range: node.text_range(),
@@ -669,21 +656,16 @@ where
             append,
             remove,
             call_site,
-            current_leaves: VecDeque::new(),
+            current_leaves: vec![],
             mode,
-            on_event: on_enter,
         };
-        converter.current = converter.next_token();
-        converter
+        let first = this.next_token();
+        this.current = first;
+        this
     }
 
     fn next_token(&mut self) -> Option {
         while let Some(ev) = self.preorder.next() {
-            let (keep_event, insert_leaves) = (self.on_event)(&mut self.preorder, &ev);
-            self.current_leaves.extend(insert_leaves);
-            if !keep_event {
-                continue;
-            }
             match ev {
                 WalkEvent::Enter(token) => {
                     if self.remove.contains(&token) {
@@ -693,9 +675,10 @@ where
                             }
                             node => {
                                 self.preorder.skip_subtree();
-                                if let Some(v) = self.append.remove(&node) {
+                                if let Some(mut v) = self.append.remove(&node) {
+                                    v.reverse();
                                     self.current_leaves.extend(v);
-                                    continue;
+                                    return None;
                                 }
                             }
                         }
@@ -704,9 +687,10 @@ where
                     }
                 }
                 WalkEvent::Leave(ele) => {
-                    if let Some(v) = self.append.remove(&ele) {
+                    if let Some(mut v) = self.append.remove(&ele) {
+                        v.reverse();
                         self.current_leaves.extend(v);
-                        continue;
+                        return None;
                     }
                 }
             }
@@ -731,8 +715,8 @@ impl SynToken {
     }
 }
 
-impl SrcToken, S> for SynToken {
-    fn kind(&self, _ctx: &Converter) -> SyntaxKind {
+impl SrcToken, S> for SynToken {
+    fn kind(&self, _ctx: &Converter) -> SyntaxKind {
         match self {
             SynToken::Ordinary(token) => token.kind(),
             SynToken::Punct { token, offset: i } => {
@@ -744,14 +728,14 @@ impl SrcToken, S> for SynTok
             }
         }
     }
-    fn to_char(&self, _ctx: &Converter) -> Option {
+    fn to_char(&self, _ctx: &Converter) -> Option {
         match self {
             SynToken::Ordinary(_) => None,
             SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
             SynToken::Leaf(_) => None,
         }
     }
-    fn to_text(&self, _ctx: &Converter) -> SmolStr {
+    fn to_text(&self, _ctx: &Converter) -> SmolStr {
         match self {
             SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
             SynToken::Leaf(_) => {
@@ -768,11 +752,10 @@ impl SrcToken, S> for SynTok
     }
 }
 
-impl TokenConverter for Converter
+impl TokenConverter for Converter
 where
     S: Copy,
     SpanMap: SpanMapper,
-    OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent) -> (bool, Vec>),
 {
     type Token = SynToken;
     fn convert_doc_comment(
@@ -798,7 +781,10 @@ where
             ));
         }
 
-        if let Some(leaf) = self.current_leaves.pop_front() {
+        if let Some(leaf) = self.current_leaves.pop() {
+            if self.current_leaves.is_empty() {
+                self.current = self.next_token();
+            }
             return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
         }
 
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index 5d67fd449175..aea99a4389b9 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -26,8 +26,7 @@ pub use self::{
     generated::{nodes::*, tokens::*},
     node_ext::{
         AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind,
-        SlicePatComponents, StructKind, TokenTreeChildren, TypeBoundKind, TypeOrConstParam,
-        VisibilityKind,
+        SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind,
     },
     operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
     token_ext::{
@@ -36,7 +35,6 @@ pub use self::{
     traits::{
         AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericArgs,
         HasGenericParams, HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
-        attrs_including_inner,
     },
 };
 
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 901d17bb1491..af741d100f68 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -10,7 +10,7 @@ use parser::SyntaxKind;
 use rowan::{GreenNodeData, GreenTokenData};
 
 use crate::{
-    NodeOrToken, SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxToken, T, TokenText,
+    NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, T, TokenText,
     ast::{
         self, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName,
         HasTypeBounds, SyntaxNode, support,
@@ -1114,39 +1114,3 @@ impl ast::OrPat {
             .filter(|it| it.kind() == T![|])
     }
 }
-
-/// An iterator over the elements in an [`ast::TokenTree`].
-///
-/// Does not yield trivia or the delimiters.
-#[derive(Clone)]
-pub struct TokenTreeChildren {
-    iter: SyntaxElementChildren,
-}
-
-impl TokenTreeChildren {
-    #[inline]
-    pub fn new(tt: &ast::TokenTree) -> Self {
-        let mut iter = tt.syntax.children_with_tokens();
-        iter.next(); // Bump the opening delimiter.
-        Self { iter }
-    }
-}
-
-impl Iterator for TokenTreeChildren {
-    type Item = NodeOrToken;
-
-    #[inline]
-    fn next(&mut self) -> Option {
-        self.iter.find_map(|item| match item {
-            NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node),
-            NodeOrToken::Token(token) => {
-                let kind = token.kind();
-                (!matches!(
-                    kind,
-                    SyntaxKind::WHITESPACE | SyntaxKind::COMMENT | T![')'] | T![']'] | T!['}']
-                ))
-                .then_some(NodeOrToken::Token(token))
-            }
-        })
-    }
-}
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index 83ab87c1c687..e1a9f3ac0341 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -40,8 +40,8 @@ impl ast::Comment {
     }
 
     /// Returns the textual content of a doc comment node as a single string with prefix and suffix
-    /// removed, plus the offset of the returned string from the beginning of the comment.
-    pub fn doc_comment(&self) -> Option<(&str, TextSize)> {
+    /// removed.
+    pub fn doc_comment(&self) -> Option<&str> {
         let kind = self.kind();
         match kind {
             CommentKind { shape, doc: Some(_) } => {
@@ -52,7 +52,7 @@ impl ast::Comment {
                 } else {
                     text
                 };
-                Some((text, TextSize::of(prefix)))
+                Some(text)
             }
             _ => None,
         }
diff --git a/crates/syntax/src/ast/traits.rs b/crates/syntax/src/ast/traits.rs
index 2f4109a2c976..5290f32dd27d 100644
--- a/crates/syntax/src/ast/traits.rs
+++ b/crates/syntax/src/ast/traits.rs
@@ -4,9 +4,8 @@
 use either::Either;
 
 use crate::{
-    SyntaxElement, SyntaxNode, SyntaxToken, T,
+    SyntaxElement, SyntaxToken, T,
     ast::{self, AstChildren, AstNode, AstToken, support},
-    match_ast,
     syntax_node::SyntaxElementChildren,
 };
 
@@ -77,44 +76,34 @@ pub trait HasAttrs: AstNode {
         self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
     }
 
-    /// This may return the same node as called with (with `SourceFile`). The caller has the responsibility
-    /// to avoid duplicate attributes.
-    fn inner_attributes_node(&self) -> Option {
-        let syntax = self.syntax();
-        Some(match_ast! {
-            match syntax {
-                // A `SourceFile` contains the inner attributes of itself.
-                ast::SourceFile(_) => syntax.clone(),
-                ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
-                ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
-                ast::MatchExpr(it) => it.match_arm_list()?.syntax().clone(),
-                ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
-                ast::Trait(it) => it.assoc_item_list()?.syntax().clone(),
-                ast::Module(it) => it.item_list()?.syntax().clone(),
-                ast::BlockExpr(it) => {
-                    if !it.may_carry_attributes() {
-                        return None;
-                    }
-                    syntax.clone()
-                },
-                _ => return None,
-            }
-        })
+    /// Returns all attributes of this node, including inner attributes that may not be directly under this node
+    /// but under a child.
+    fn attrs_including_inner(self) -> impl Iterator
+    where
+        Self: Sized,
+    {
+        let inner_attrs_node = if let Some(it) =
+            support::child::(self.syntax()).and_then(|it| it.stmt_list())
+        {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else {
+            None
+        };
+
+        self.attrs().chain(inner_attrs_node.into_iter().flat_map(|it| support::children(&it)))
     }
 }
 
-/// Returns all attributes of this node, including inner attributes that may not be directly under this node
-/// but under a child.
-pub fn attrs_including_inner(owner: &dyn HasAttrs) -> impl Iterator + Clone {
-    owner.attrs().filter(|attr| attr.kind().is_outer()).chain(
-        owner
-            .inner_attributes_node()
-            .into_iter()
-            .flat_map(|node| support::children::(&node))
-            .filter(|attr| attr.kind().is_inner()),
-    )
-}
-
 pub trait HasDocComments: HasAttrs {
     fn doc_comments(&self) -> DocCommentIter {
         DocCommentIter { iter: self.syntax().children_with_tokens() }
@@ -129,7 +118,7 @@ impl DocCommentIter {
     #[cfg(test)]
     pub fn doc_comment_text(self) -> Option {
         let docs = itertools::Itertools::join(
-            &mut self.filter_map(|comment| comment.doc_comment().map(|it| it.0.to_owned())),
+            &mut self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)),
             "\n",
         );
         if docs.is_empty() { None } else { Some(docs) }
@@ -162,7 +151,7 @@ impl AttrDocCommentIter {
 impl Iterator for AttrDocCommentIter {
     type Item = Either;
     fn next(&mut self) -> Option {
-        self.iter.find_map(|el| match el {
+        self.iter.by_ref().find_map(|el| match el {
             SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
             SyntaxElement::Token(tok) => {
                 ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index 2b05add55216..aefe81f83e29 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -5,7 +5,7 @@ use base_db::target::TargetData;
 use base_db::{
     Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
     DependencyBuilder, Env, FileChange, FileSet, FxIndexMap, LangCrateOrigin, SourceDatabase,
-    SourceRoot, Version, VfsPath,
+    SourceRoot, Version, VfsPath, salsa,
 };
 use cfg::CfgOptions;
 use hir_expand::{
@@ -37,11 +37,10 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(ra_fixture);
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
         assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
-        let file = EditionedFileId::from_span_guess_origin(&db, fixture.files[0]);
-        (db, file)
+        (db, fixture.files[0])
     }
 
     #[track_caller]
@@ -49,21 +48,16 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, Vec) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(ra_fixture);
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
-        let files = fixture
-            .files
-            .into_iter()
-            .map(|file| EditionedFileId::from_span_guess_origin(&db, file))
-            .collect();
-        (db, files)
+        (db, fixture.files)
     }
 
     #[track_caller]
     fn with_files(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Self {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(ra_fixture);
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -75,8 +69,12 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         proc_macros: Vec<(String, ProcMacro)>,
     ) -> Self {
         let mut db = Self::default();
-        let fixture =
-            ChangeFixture::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, proc_macros);
+        let fixture = ChangeFixture::parse_with_proc_macros(
+            &db,
+            ra_fixture,
+            MiniCore::RAW_SOURCE,
+            proc_macros,
+        );
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -101,13 +99,12 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId, RangeOrOffset) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(ra_fixture);
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
 
         let (file_id, range_or_offset) = fixture
             .file_position
             .expect("Could not find file position in fixture. Did you forget to add an `$0`?");
-        let file_id = EditionedFileId::from_span_guess_origin(&db, file_id);
         (db, file_id, range_or_offset)
     }
 
@@ -119,9 +116,9 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
 impl WithFixture for DB {}
 
 pub struct ChangeFixture {
-    pub file_position: Option<(span::EditionedFileId, RangeOrOffset)>,
+    pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
     pub file_lines: Vec,
-    pub files: Vec,
+    pub files: Vec,
     pub change: ChangeWithProcMacros,
     pub sysroot_files: Vec,
 }
@@ -129,11 +126,15 @@ pub struct ChangeFixture {
 const SOURCE_ROOT_PREFIX: &str = "/";
 
 impl ChangeFixture {
-    pub fn parse(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> ChangeFixture {
-        Self::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
+    pub fn parse(
+        db: &dyn salsa::Database,
+        #[rust_analyzer::rust_fixture] ra_fixture: &str,
+    ) -> ChangeFixture {
+        Self::parse_with_proc_macros(db, ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
     }
 
     pub fn parse_with_proc_macros(
+        db: &dyn salsa::Database,
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
         minicore_raw: &str,
         mut proc_macro_defs: Vec<(String, ProcMacro)>,
@@ -201,7 +202,7 @@ impl ChangeFixture {
             let meta = FileMeta::from_fixture(entry, current_source_root_kind);
             if let Some(range_or_offset) = range_or_offset {
                 file_position =
-                    Some((span::EditionedFileId::new(file_id, meta.edition), range_or_offset));
+                    Some((EditionedFileId::new(db, file_id, meta.edition), range_or_offset));
             }
 
             assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
@@ -258,7 +259,7 @@ impl ChangeFixture {
             source_change.change_file(file_id, Some(text));
             let path = VfsPath::new_virtual_path(meta.path);
             file_set.insert(file_id, path);
-            files.push(span::EditionedFileId::new(file_id, meta.edition));
+            files.push(EditionedFileId::new(db, file_id, meta.edition));
             file_id = FileId::from_raw(file_id.index() + 1);
         }