diff --git a/src/tools/rust-analyzer/.github/workflows/ci.yaml b/src/tools/rust-analyzer/.github/workflows/ci.yaml index be830415f9cb7..07fa85a61cbd0 100644 --- a/src/tools/rust-analyzer/.github/workflows/ci.yaml +++ b/src/tools/rust-analyzer/.github/workflows/ci.yaml @@ -90,7 +90,7 @@ jobs: - name: Switch to stable toolchain run: | rustup update --no-self-update stable - rustup component add --toolchain stable rust-src + rustup component add --toolchain stable rust-src clippy rustup default stable - name: Run analysis-stats on rust-analyzer @@ -103,6 +103,10 @@ jobs: RUSTC_BOOTSTRAP: 1 run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std + - name: clippy + if: matrix.os == 'ubuntu-latest' + run: cargo clippy --all-targets + # Weird targets to catch non-portable code rust-cross: if: github.repository == 'rust-lang/rust-analyzer' @@ -203,11 +207,25 @@ jobs: working-directory: ./editors/code if: needs.changes.outputs.typescript == 'true' + typo-check: + name: Typo Check + runs-on: ubuntu-latest + timeout-minutes: 10 + env: + FORCE_COLOR: 1 + TYPOS_VERSION: v1.18.0 + steps: + - name: download typos + run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin + + - name: check for typos + run: typos + end-success: name: bors build finished if: github.event.pusher.name == 'bors' && success() runs-on: ubuntu-latest - needs: [rust, rust-cross, typescript] + needs: [rust, rust-cross, typescript, typo-check] steps: - name: Mark the job as successful run: exit 0 @@ -216,7 +234,7 @@ jobs: name: bors build finished if: github.event.pusher.name == 'bors' && !success() runs-on: ubuntu-latest - needs: [rust, rust-cross, typescript] + needs: [rust, rust-cross, typescript, typo-check] steps: - name: Mark the job as a failure run: exit 1 diff --git a/src/tools/rust-analyzer/.github/workflows/release.yaml b/src/tools/rust-analyzer/.github/workflows/release.yaml index 6a3cdfe3a3c7f..9077a9ac21eb6 100644 --- a/src/tools/rust-analyzer/.github/workflows/release.yaml +++ b/src/tools/rust-analyzer/.github/workflows/release.yaml @@ -43,10 +43,10 @@ jobs: - os: ubuntu-20.04 target: arm-unknown-linux-gnueabihf code-target: linux-armhf - - os: macos-11 + - os: macos-12 target: x86_64-apple-darwin code-target: darwin-x64 - - os: macos-11 + - os: macos-12 target: aarch64-apple-darwin code-target: darwin-arm64 diff --git a/src/tools/rust-analyzer/.typos.toml b/src/tools/rust-analyzer/.typos.toml new file mode 100644 index 0000000000000..e638a3e648d68 --- /dev/null +++ b/src/tools/rust-analyzer/.typos.toml @@ -0,0 +1,31 @@ +[default.extend-identifiers] +AnserStyle = "AnserStyle" +datas = "datas" +impl_froms = "impl_froms" +selfs = "selfs" + +[default.extend-words] +anser = "anser" +ba = "ba" +fo = "fo" +ket = "ket" +makro = "makro" +raison = "raison" +trivias = "trivias" +TOOD = "TOOD" + +[default] +extend-ignore-re = [ + # ignore string which contains $x (x is a num), which use widely in test + ".*\\$\\d.*", + # ignore generated content like `boxed....nner()`, `Defaul...efault` + "\\w*\\.{3,4}\\w*", +] + +[files] +extend-exclude = [ + "*.json", + "*.rast", + "crates/parser/test_data/lexer/err/*", + "bench_data/*", +] diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 7513abf17c8c4..e9492ce0202b2 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -19,11 +19,11 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "always-assert" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4436e0292ab1bb631b42973c61205e704475fe8126af845c8d923c0996328127" +checksum = "a1078fa1ce1e34b1872d8611ad921196d76bdd7027e949fbe31231abde201892" dependencies = [ - "log", + "tracing", ] [[package]] @@ -78,6 +78,7 @@ dependencies = [ "span", "stdx", "syntax", + "tracing", "triomphe", "vfs", ] @@ -494,8 +495,10 @@ dependencies = [ "profile", "rustc-hash", "smallvec", + "span", "stdx", "syntax", + "tracing", "triomphe", "tt", ] @@ -592,7 +595,7 @@ dependencies = [ "profile", "project-model", "ra-ap-rustc_abi", - "ra-ap-rustc_index", + "ra-ap-rustc_index 0.35.0", "ra-ap-rustc_pattern_analysis", "rustc-hash", "scoped-tls", @@ -670,6 +673,7 @@ dependencies = [ "test-fixture", "test-utils", "text-edit", + "tracing", ] [[package]] @@ -690,6 +694,7 @@ dependencies = [ "test-fixture", "test-utils", "text-edit", + "tracing", ] [[package]] @@ -747,6 +752,7 @@ dependencies = [ "test-fixture", "test-utils", "text-edit", + "tracing", ] [[package]] @@ -1342,6 +1348,7 @@ dependencies = [ "once_cell", "perf-event", "tikv-jemalloc-ctl", + "tracing", "winapi", ] @@ -1419,12 +1426,12 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.33.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce9100fc66c6c60aeeb076868ead9c2eaa65d6a5a90404f08c242327a92ff4b" +checksum = "3c0baa423a2c2bfd6e4bd40e7215f7ddebd12a649ce0b65078a38b91068895aa" dependencies = [ "bitflags 2.4.1", - "ra-ap-rustc_index", + "ra-ap-rustc_index 0.35.0", "tracing", ] @@ -1435,7 +1442,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5313d7f243b63ef9e58d94355b11aa8499f1328055f1f58adf0a5ea7d2faca" dependencies = [ "arrayvec", - "ra-ap-rustc_index_macros", + "ra-ap-rustc_index_macros 0.33.0", + "smallvec", +] + +[[package]] +name = "ra-ap-rustc_index" +version = "0.35.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "322b751895cc4a0a2ee0c6ab36ec80bc8abf5f8d76254c482f96f03c27c92ebe" +dependencies = [ + "arrayvec", + "ra-ap-rustc_index_macros 0.35.0", "smallvec", ] @@ -1451,11 +1469,23 @@ dependencies = [ "synstructure", ] +[[package]] +name = "ra-ap-rustc_index_macros" +version = "0.35.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "054e25eac52f0506c1309ca4317c11ad4925d7b99eb897f71aa7c3cbafb46c2b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", + "synstructure", +] + [[package]] name = "ra-ap-rustc_lexer" -version = "0.33.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2d221356e5717595e8a0afa5fba1620dcb4032ab784dc4d98fdc7284e3feb66" +checksum = "c8da0fa51a1a97ba4296a1c78fa454815a153b472e2546b6338a0902ad59e015" dependencies = [ "unicode-properties", "unicode-xid", @@ -1463,11 +1493,11 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.33.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab62fc925612374103b4f178da347b535b35d9eb1ff5ba42105c990b2e25a164" +checksum = "3851f930a54adcb76889983dcd5c00a0c4e206e190e1384dbc00d49b82dfb45e" dependencies = [ - "ra-ap-rustc_index", + "ra-ap-rustc_index 0.35.0", "ra-ap-rustc_lexer", ] @@ -1478,7 +1508,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c4085e0c771fd4b883930b599ef42966b855762bbe4052c17673b3253421a6d" dependencies = [ "derivative", - "ra-ap-rustc_index", + "ra-ap-rustc_index 0.33.0", "rustc-hash", "rustc_apfloat", "smallvec", @@ -1581,7 +1611,6 @@ dependencies = [ "tikv-jemallocator", "toolchain", "tracing", - "tracing-log", "tracing-subscriber", "tracing-tree", "triomphe", @@ -1595,26 +1624,26 @@ dependencies = [ [[package]] name = "rust-analyzer-salsa" -version = "0.17.0-pre.5" +version = "0.17.0-pre.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca9d387a9801f4fb9b366789ad1bfc08448cafc49cf148d907cfcd88ab665d7f" +checksum = "719825638c59fd26a55412a24561c7c5bcf54364c88b9a7a04ba08a6eafaba8d" dependencies = [ "indexmap", "lock_api", - "log", "oorandom", "parking_lot", "rust-analyzer-salsa-macros", "rustc-hash", "smallvec", + "tracing", "triomphe", ] [[package]] name = "rust-analyzer-salsa-macros" -version = "0.17.0-pre.5" +version = "0.17.0-pre.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2035f385d7fae31e9b086f40b272ee1d79c484472f31c9a10348a406e841eaf" +checksum = "4d96498e9684848c6676c399032ebc37c52da95ecbefa83d71ccc53b9f8a4a8e" dependencies = [ "heck", "proc-macro2", @@ -1661,9 +1690,9 @@ dependencies = [ [[package]] name = "scip" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e84d21062a3ba08d58870c8c36b0c005b2b2261c6ad1bf7042585427c781883" +checksum = "e5dc1bd66649133af84ab62436ddd2856c2605182b02dec2cd197f684dfe15ef" dependencies = [ "protobuf", ] @@ -1863,6 +1892,7 @@ dependencies = [ "stdx", "test-utils", "text-edit", + "tracing", "triomphe", "ungrammar", ] @@ -1890,6 +1920,7 @@ dependencies = [ "rustc-hash", "stdx", "text-size", + "tracing", ] [[package]] diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 56db5a28c08a3..3fb5d9aa7a869 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -79,10 +79,10 @@ tt = { path = "./crates/tt", version = "0.0.0" } vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.33.0", default-features = false } -ra-ap-rustc_parse_format = { version = "0.33.0", default-features = false } -ra-ap-rustc_index = { version = "0.33.0", default-features = false } -ra-ap-rustc_abi = { version = "0.33.0", default-features = false } +ra-ap-rustc_lexer = { version = "0.35.0", default-features = false } +ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false } +ra-ap-rustc_index = { version = "0.35.0", default-features = false } +ra-ap-rustc_abi = { version = "0.35.0", default-features = false } ra-ap-rustc_pattern_analysis = { version = "0.33.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. @@ -113,7 +113,7 @@ itertools = "0.12.0" libc = "0.2.150" nohash-hasher = "0.2.0" rayon = "1.8.0" -rust-analyzer-salsa = "0.17.0-pre.5" +rust-analyzer-salsa = "0.17.0-pre.6" rustc-hash = "1.1.0" semver = "1.0.14" serde = { version = "1.0.192", features = ["derive"] } @@ -128,9 +128,9 @@ text-size = "1.1.1" tracing = "0.1.40" tracing-tree = "0.3.0" tracing-subscriber = { version = "0.3.18", default-features = false, features = [ - "registry", - "fmt", - "tracing-log", + "registry", + "fmt", + "tracing-log", ] } triomphe = { version = "0.1.10", default-features = false, features = ["std"] } xshell = "0.2.5" @@ -167,29 +167,14 @@ new_ret_no_self = "allow" ## Following lints should be tackled at some point borrowed_box = "allow" -borrow_deref_ref = "allow" -derivable_impls = "allow" derived_hash_with_manual_eq = "allow" -field_reassign_with_default = "allow" forget_non_drop = "allow" -format_collect = "allow" -large_enum_variant = "allow" needless_doctest_main = "allow" -new_without_default = "allow" non_canonical_clone_impl = "allow" non_canonical_partial_ord_impl = "allow" self_named_constructors = "allow" -skip_while_next = "allow" too_many_arguments = "allow" -toplevel_ref_arg = "allow" type_complexity = "allow" -unnecessary_cast = "allow" -unnecessary_filter_map = "allow" -unnecessary_lazy_evaluations = "allow" -unnecessary_mut_passed = "allow" -useless_conversion = "allow" -useless_format = "allow" -wildcard_in_or_patterns = "allow" wrong_self_convention = "allow" ## warn at following lints diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml index 1aa43175f90b2..485ba78846a85 100644 --- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml @@ -17,6 +17,7 @@ rust-analyzer-salsa.workspace = true rustc-hash.workspace = true triomphe.workspace = true semver.workspace = true +tracing.workspace = true # local deps cfg.workspace = true @@ -27,4 +28,4 @@ vfs.workspace = true span.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/base-db/src/change.rs b/src/tools/rust-analyzer/crates/base-db/src/change.rs index 4332e572e2092..003ffb24d9d0d 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/change.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/change.rs @@ -51,7 +51,7 @@ impl FileChange { } pub fn apply(self, db: &mut dyn SourceDatabaseExt) { - let _p = profile::span("RootDatabase::apply_change"); + let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::apply_change").entered(); if let Some(roots) = self.roots { for (idx, root) in roots.into_iter().enumerate() { let root_id = SourceRootId(idx as u32); diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index 852f36ea712a6..51e6fdb9510a8 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -494,7 +494,7 @@ impl CrateGraph { from: CrateId, dep: Dependency, ) -> Result<(), CyclicDependenciesError> { - let _p = profile::span("add_dep"); + let _p = tracing::span!(tracing::Level::INFO, "add_dep").entered(); self.check_cycle_after_dependency(from, dep.crate_id)?; diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index 90da7efd4a8b2..d7fc9d4c95cd6 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -65,7 +65,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { } fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse { - let _p = profile::span("parse_query").detail(|| format!("{file_id:?}")); + let _p = tracing::span!(tracing::Level::INFO, "parse_query", ?file_id).entered(); let text = db.file_text(file_id); SourceFile::parse(&text) } @@ -116,7 +116,7 @@ impl FileLoader for FileLoaderDelegate<&'_ T> { } fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> { - let _p = profile::span("relevant_crates"); + let _p = tracing::span!(tracing::Level::INFO, "relevant_crates").entered(); let source_root = self.0.file_source_root(file_id); self.0.source_root_crates(source_root) } diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs index 68faca51e8263..22603842a1b6b 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs +++ b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs @@ -493,7 +493,9 @@ impl CargoActor { // Skip certain kinds of messages to only spend time on what's useful JsonMessage::Cargo(message) => match message { cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => { - self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap(); + self.sender + .send(CargoMessage::CompilerArtifact(Box::new(artifact))) + .unwrap(); } cargo_metadata::Message::CompilerMessage(msg) => { self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap(); @@ -538,7 +540,7 @@ impl CargoActor { } enum CargoMessage { - CompilerArtifact(cargo_metadata::Artifact), + CompilerArtifact(Box), Diagnostic(Diagnostic), } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index a8e081705e324..bee6f0083b1e4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -75,7 +75,7 @@ impl Attrs { db: &dyn DefDatabase, v: VariantId, ) -> Arc> { - let _p = profile::span("fields_attrs_query"); + let _p = tracing::span!(tracing::Level::INFO, "fields_attrs_query").entered(); // FIXME: There should be some proper form of mapping between item tree field ids and hir field ids let mut res = ArenaMap::default(); @@ -322,7 +322,7 @@ impl AttrsWithOwner { } pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs { - let _p = profile::span("attrs_query"); + let _p = tracing::span!(tracing::Level::INFO, "attrs_query").entered(); // FIXME: this should use `Trace` to avoid duplication in `source_map` below let raw_attrs = match def { AttrDefId::ModuleId(module) => { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs index e4308c6b7f1f2..ce8a9eab14a9d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs @@ -122,7 +122,7 @@ impl Body { db: &dyn DefDatabase, def: DefWithBodyId, ) -> (Arc, Arc) { - let _p = profile::span("body_with_source_map_query"); + let _p = tracing::span!(tracing::Level::INFO, "body_with_source_map_query").entered(); let mut params = None; let mut is_async_fn = false; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs index 0f2b279670c17..b821b91b8959e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs @@ -33,7 +33,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo } ) }), - DefWithBodyId::InTypeConstId(_) => format!("In type const = "), + DefWithBodyId::InTypeConstId(_) => "In type const = ".to_string(), DefWithBodyId::VariantId(it) => { let loc = it.lookup(db); let enum_loc = loc.parent.lookup(db); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs index a76ddffb41166..a27ffe216750e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs @@ -256,7 +256,7 @@ impl SsrError { "##, ); - assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]); + assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]); expect![[r#" fn main() { _ = $crate::error::SsrError::new( @@ -309,7 +309,7 @@ fn f() { "#, ); - let (_, source_map) = db.body_with_source_map(def.into()); + let (_, source_map) = db.body_with_source_map(def); assert_eq!(source_map.diagnostics(), &[]); for (_, def_map) in body.blocks(&db) { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index ca02b5d68e2e2..7ce05b64d022e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -340,7 +340,7 @@ impl ImplData { db: &dyn DefDatabase, id: ImplId, ) -> (Arc, DefDiagnostics) { - let _p = profile::span("impl_data_with_diagnostics_query"); + let _p = tracing::span!(tracing::Level::INFO, "impl_data_with_diagnostics_query").entered(); let ItemLoc { container: module_id, id: tree_id } = id.lookup(db); let item_tree = tree_id.item_tree(db); @@ -782,7 +782,7 @@ impl<'a> AssocItemCollector<'a> { self.diagnostics.push(DefDiagnostic::macro_expansion_parse_error( self.module_id.local_id, error_call_kind(), - errors.into(), + errors, )); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index 708abb5369377..68f57600ec450 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -1,9 +1,10 @@ //! Defines database & queries for name resolution. -use base_db::{salsa, CrateId, SourceDatabase, Upcast}; +use base_db::{salsa, CrateId, FileId, SourceDatabase, Upcast}; use either::Either; use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId}; use intern::Interned; use la_arena::ArenaMap; +use span::MacroCallId; use syntax::{ast, AstPtr}; use triomphe::Arc; @@ -234,10 +235,26 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast Option>; fn crate_supports_no_std(&self, crate_id: CrateId) -> bool; + + fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, FileId)>; +} + +// return: macro call id and include file id +fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId, FileId)> { + db.crate_def_map(krate) + .modules + .values() + .flat_map(|m| m.scope.iter_macro_invoc()) + .filter_map(|invoc| { + db.lookup_intern_macro_call(*invoc.1) + .include_file_id(db.upcast(), *invoc.1) + .map(|x| (*invoc.1, x)) + }) + .collect() } fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc { - let _p = profile::span("crate_def_map:wait"); + let _p = tracing::span!(tracing::Level::INFO, "crate_def_map:wait").entered(); db.crate_def_map_query(krate) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index efda8abf4b8bb..1a44c319de58e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -13,7 +13,7 @@ use crate::{ item_scope::ItemInNs, nameres::DefMap, path::{ModPath, PathKind}, - visibility::{Visibility, VisibilityExplicity}, + visibility::{Visibility, VisibilityExplicitness}, CrateRootModuleId, ModuleDefId, ModuleId, }; @@ -26,7 +26,7 @@ pub fn find_path( prefer_no_std: bool, prefer_prelude: bool, ) -> Option { - let _p = profile::span("find_path"); + let _p = tracing::span!(tracing::Level::INFO, "find_path").entered(); find_path_inner(FindPathCtx { db, prefixed: None, prefer_no_std, prefer_prelude }, item, from) } @@ -38,7 +38,7 @@ pub fn find_path_prefixed( prefer_no_std: bool, prefer_prelude: bool, ) -> Option { - let _p = profile::span("find_path_prefixed"); + let _p = tracing::span!(tracing::Level::INFO, "find_path_prefixed").entered(); find_path_inner( FindPathCtx { db, prefixed: Some(prefix_kind), prefer_no_std, prefer_prelude }, item, @@ -497,7 +497,7 @@ fn find_local_import_locations( item: ItemInNs, from: ModuleId, ) -> Vec<(ModuleId, Name)> { - let _p = profile::span("find_local_import_locations"); + let _p = tracing::span!(tracing::Level::INFO, "find_local_import_locations").entered(); // `from` can import anything below `from` with visibility of at least `from`, and anything // above `from` with any visibility. That means we do not need to descend into private siblings @@ -544,11 +544,11 @@ fn find_local_import_locations( if let Some((name, vis, declared)) = data.scope.name_of(item) { if vis.is_visible_from(db, from) { let is_pub_or_explicit = match vis { - Visibility::Module(_, VisibilityExplicity::Explicit) => { + Visibility::Module(_, VisibilityExplicitness::Explicit) => { cov_mark::hit!(explicit_private_imports); true } - Visibility::Module(_, VisibilityExplicity::Implicit) => { + Visibility::Module(_, VisibilityExplicitness::Implicit) => { cov_mark::hit!(discount_private_imports); false } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs index 7daae821f88d5..349d327aaae92 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs @@ -373,7 +373,7 @@ impl GenericParams { db: &dyn DefDatabase, def: GenericDefId, ) -> Interned { - let _p = profile::span("generic_params_query"); + let _p = tracing::span!(tracing::Level::INFO, "generic_params_query").entered(); let krate = def.module(db).krate; let cfg_options = db.crate_graph(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs index c0d1738b5048e..b097a721c7586 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs @@ -166,6 +166,7 @@ enum PositionUsedAs { } use PositionUsedAs::*; +#[allow(clippy::unnecessary_lazy_evaluations)] pub(crate) fn parse( s: &ast::String, fmt_snippet: Option, @@ -177,9 +178,9 @@ pub(crate) fn parse( let text = s.text_without_quotes(); let str_style = match s.quote_offsets() { Some(offsets) => { - let raw = u32::from(offsets.quotes.0.len()) - 1; + let raw = usize::from(offsets.quotes.0.len()) - 1; // subtract 1 for the `r` prefix - (raw != 0).then(|| raw as usize - 1) + (raw != 0).then(|| raw - 1) } None => None, }; @@ -214,7 +215,7 @@ pub(crate) fn parse( let mut used = vec![false; args.explicit_args().len()]; let mut invalid_refs = Vec::new(); - let mut numeric_refences_to_named_arg = Vec::new(); + let mut numeric_references_to_named_arg = Vec::new(); enum ArgRef<'a> { Index(usize), @@ -231,7 +232,7 @@ pub(crate) fn parse( used[index] = true; if arg.kind.ident().is_some() { // This was a named argument, but it was used as a positional argument. - numeric_refences_to_named_arg.push((index, span, used_as)); + numeric_references_to_named_arg.push((index, span, used_as)); } Ok(index) } else { @@ -432,7 +433,7 @@ pub(crate) fn parse( } } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct FormatArgumentsCollector { arguments: Vec, num_unnamed_args: usize, @@ -451,7 +452,7 @@ impl FormatArgumentsCollector { } pub fn new() -> Self { - Self { arguments: vec![], names: vec![], num_unnamed_args: 0, num_explicit_args: 0 } + Default::default() } pub fn add(&mut self, arg: FormatArgument) -> usize { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs index 15c127f15628c..c698510ca99b4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs @@ -75,7 +75,7 @@ impl ImportMap { } pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { - let _p = profile::span("import_map_query"); + let _p = tracing::span!(tracing::Level::INFO, "import_map_query").entered(); let map = Self::collect_import_map(db, krate); @@ -126,7 +126,7 @@ impl ImportMap { } fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex { - let _p = profile::span("collect_import_map"); + let _p = tracing::span!(tracing::Level::INFO, "collect_import_map").entered(); let def_map = db.crate_def_map(krate); let mut map = FxIndexMap::default(); @@ -216,7 +216,7 @@ impl ImportMap { is_type_in_ns: bool, trait_import_info: &ImportInfo, ) { - let _p = profile::span("collect_trait_assoc_items"); + let _p = tracing::span!(tracing::Level::INFO, "collect_trait_assoc_items").entered(); for &(ref assoc_item_name, item) in &db.trait_data(tr).items { let module_def_id = match item { AssocItemId::FunctionId(f) => ModuleDefId::from(f), @@ -297,7 +297,7 @@ impl SearchMode { SearchMode::Exact => candidate.eq_ignore_ascii_case(query), SearchMode::Prefix => { query.len() <= candidate.len() && { - let prefix = &candidate[..query.len() as usize]; + let prefix = &candidate[..query.len()]; if case_sensitive { prefix == query } else { @@ -396,9 +396,9 @@ impl Query { pub fn search_dependencies( db: &dyn DefDatabase, krate: CrateId, - ref query: Query, + query: &Query, ) -> FxHashSet { - let _p = profile::span("search_dependencies").detail(|| format!("{query:?}")); + let _p = tracing::span!(tracing::Level::INFO, "search_dependencies", ?query).entered(); let graph = db.crate_graph(); @@ -446,7 +446,7 @@ fn search_maps( let end = (value & 0xFFFF_FFFF) as usize; let start = (value >> 32) as usize; let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx]; - let importables = &importables[start as usize..end]; + let importables = &importables[start..end]; let iter = importables .iter() @@ -516,7 +516,7 @@ mod tests { }) .expect("could not find crate"); - let actual = search_dependencies(db.upcast(), krate, query) + let actual = search_dependencies(db.upcast(), krate, &query) .into_iter() .filter_map(|dependency| { let dependency_krate = dependency.krate(db.upcast())?; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index 168ee4acffbef..0b0c838bedb53 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -17,7 +17,7 @@ use syntax::ast; use crate::{ db::DefDatabase, per_ns::PerNs, - visibility::{Visibility, VisibilityExplicity}, + visibility::{Visibility, VisibilityExplicitness}, AdtId, BuiltinType, ConstId, ExternCrateId, HasModule, ImplId, LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, }; @@ -336,6 +336,12 @@ impl ItemScope { pub(crate) fn macro_invoc(&self, call: AstId) -> Option { self.macro_invocations.get(&call).copied() } + + pub(crate) fn iter_macro_invoc( + &self, + ) -> impl Iterator, &MacroCallId)> { + self.macro_invocations.iter() + } } impl ItemScope { @@ -647,14 +653,16 @@ impl ItemScope { .map(|(_, vis, _)| vis) .chain(self.values.values_mut().map(|(_, vis, _)| vis)) .chain(self.unnamed_trait_imports.values_mut().map(|(vis, _)| vis)) - .for_each(|vis| *vis = Visibility::Module(this_module, VisibilityExplicity::Implicit)); + .for_each(|vis| { + *vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit) + }); for (mac, vis, import) in self.macros.values_mut() { if matches!(mac, MacroId::ProcMacroId(_) if import.is_none()) { continue; } - *vis = Visibility::Module(this_module, VisibilityExplicity::Implicit); + *vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index c37cf52155102..336e0de7fd66f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -69,7 +69,7 @@ use crate::{ generics::{GenericParams, LifetimeParamData, TypeOrConstParamData}, path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, type_ref::{Mutability, TraitRef, TypeBound, TypeRef}, - visibility::{RawVisibility, VisibilityExplicity}, + visibility::{RawVisibility, VisibilityExplicitness}, BlockId, Lookup, }; @@ -109,7 +109,8 @@ pub struct ItemTree { impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { - let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}")); + let _p = tracing::span!(tracing::Level::INFO, "file_item_tree_query", ?file_id).entered(); + let syntax = db.parse_or_expand(file_id); let ctx = lower::Ctx::new(db, file_id); @@ -252,10 +253,10 @@ impl ItemVisibilities { RawVisibility::Public => RawVisibilityId::PUB, RawVisibility::Module(path, explicitiy) if path.segments().is_empty() => { match (&path.kind, explicitiy) { - (PathKind::Super(0), VisibilityExplicity::Explicit) => { + (PathKind::Super(0), VisibilityExplicitness::Explicit) => { RawVisibilityId::PRIV_EXPLICIT } - (PathKind::Super(0), VisibilityExplicity::Implicit) => { + (PathKind::Super(0), VisibilityExplicitness::Implicit) => { RawVisibilityId::PRIV_IMPLICIT } (PathKind::Crate, _) => RawVisibilityId::PUB_CRATE, @@ -269,11 +270,11 @@ impl ItemVisibilities { static VIS_PUB: RawVisibility = RawVisibility::Public; static VIS_PRIV_IMPLICIT: RawVisibility = - RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicity::Implicit); + RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicitness::Implicit); static VIS_PRIV_EXPLICIT: RawVisibility = - RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicity::Explicit); + RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicitness::Explicit); static VIS_PUB_CRATE: RawVisibility = - RawVisibility::Module(ModPath::from_kind(PathKind::Crate), VisibilityExplicity::Explicit); + RawVisibility::Module(ModPath::from_kind(PathKind::Crate), VisibilityExplicitness::Explicit); #[derive(Default, Debug, Eq, PartialEq)] struct ItemTreeData { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index 60c8baf424dbb..7d98f6cfe88c1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -91,7 +91,7 @@ impl LangItems { db: &dyn DefDatabase, krate: CrateId, ) -> Option> { - let _p = profile::span("crate_lang_items_query"); + let _p = tracing::span!(tracing::Level::INFO, "crate_lang_items_query").entered(); let mut lang_items = LangItems::default(); @@ -163,7 +163,7 @@ impl LangItems { start_crate: CrateId, item: LangItem, ) -> Option { - let _p = profile::span("lang_item_query"); + let _p = tracing::span!(tracing::Level::INFO, "lang_item_query").entered(); if let Some(target) = db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied()) { @@ -183,7 +183,7 @@ impl LangItems { ) where T: Into + Copy, { - let _p = profile::span("collect_lang_item"); + let _p = tracing::span!(tracing::Level::INFO, "collect_lang_item").entered(); if let Some(lang_item) = lang_attr(db, item.into()) { self.items.entry(lang_item).or_insert_with(|| constructor(item)); } @@ -199,7 +199,7 @@ pub(crate) fn notable_traits_in_deps( db: &dyn DefDatabase, krate: CrateId, ) -> Arc<[Arc<[TraitId]>]> { - let _p = profile::span("notable_traits_in_deps").detail(|| format!("{krate:?}")); + let _p = tracing::span!(tracing::Level::INFO, "notable_traits_in_deps", ?krate).entered(); let crate_graph = db.crate_graph(); Arc::from_iter( @@ -208,7 +208,7 @@ pub(crate) fn notable_traits_in_deps( } pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option> { - let _p = profile::span("crate_notable_traits").detail(|| format!("{krate:?}")); + let _p = tracing::span!(tracing::Level::INFO, "crate_notable_traits", ?krate).entered(); let mut traits = Vec::new(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index 243de663977fd..71bc52133361f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -745,7 +745,7 @@ impl InTypeConstId { } } -/// A constant, which might appears as a const item, an annonymous const block in expressions +/// A constant, which might appears as a const item, an anonymous const block in expressions /// or patterns, or as a constant in types with const generics. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum GeneralConstId { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 4690ca5d363cd..9596100b60e13 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -460,13 +460,13 @@ fn test_concat_expand() { #[rustc_builtin_macro] macro_rules! concat {} -fn main() { concat!("foo", "r", 0, r#"bar"#, "\n", false, '"', '\0'); } +fn main() { concat!("fo", "o", 0, r#"bar"#, "\n", false, '"', '\0'); } "##, expect![[r##" #[rustc_builtin_macro] macro_rules! concat {} -fn main() { "foor0bar\nfalse\"\u{0}"; } +fn main() { "foo0bar\nfalse\"\u{0}"; } "##]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 71ba49721746d..6717ee1aa5fdf 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -72,7 +72,7 @@ fn main() { } "#]], ); - // FIXME we should ahev testing infra for multi level expansion tests + // FIXME we should have testing infra for multi level expansion tests check( r#" macro_rules! __rust_force_expr { @@ -544,11 +544,11 @@ fn test_proptest_arbitrary() { check( r#" macro_rules! arbitrary { - ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty; + ([$($bounds : tt)*] $typ: ty, $strategy: ty, $params: ty; $args: ident => $logic: expr) => { impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ { type Parameters = $params; - type Strategy = $strat; + type Strategy = $strategy; fn arbitrary_with($args: Self::Parameters) -> Self::Strategy { $logic } @@ -569,11 +569,11 @@ arbitrary!( "#, expect![[r#" macro_rules! arbitrary { - ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty; + ([$($bounds : tt)*] $typ: ty, $strategy: ty, $params: ty; $args: ident => $logic: expr) => { impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ { type Parameters = $params; - type Strategy = $strat; + type Strategy = $strategy; fn arbitrary_with($args: Self::Parameters) -> Self::Strategy { $logic } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index ec2994053877e..e315414e9bdd6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -25,7 +25,7 @@ use hir_expand::{ InFile, MacroFileId, MacroFileIdExt, }; use span::Span; -use stdx::format_to; +use stdx::{format_to, format_to_acc}; use syntax::{ ast::{self, edit::IndentLevel}, AstNode, @@ -149,8 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream if tree { let tree = format!("{:#?}", parse.syntax_node()) .split_inclusive('\n') - .map(|line| format!("// {line}")) - .collect::(); + .fold(String::new(), |mut acc, line| format_to_acc!(acc, "// {line}")); format_to!(expn_text, "\n{}", tree) } let range = call.syntax().text_range(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 2295df16fdcc4..2a9390e797808 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -79,7 +79,7 @@ use crate::{ nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode}, path::ModPath, per_ns::PerNs, - visibility::{Visibility, VisibilityExplicity}, + visibility::{Visibility, VisibilityExplicitness}, AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId, LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId, }; @@ -306,9 +306,10 @@ impl DefMap { pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0)); pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { - let _p = profile::span("crate_def_map_query").detail(|| { - db.crate_graph()[krate].display_name.as_deref().unwrap_or_default().to_string() - }); + let crate_graph = db.crate_graph(); + let krate_name = crate_graph[krate].display_name.as_deref().unwrap_or_default(); + + let _p = tracing::span!(tracing::Level::INFO, "crate_def_map_query", ?krate_name).entered(); let crate_graph = db.crate_graph(); @@ -335,7 +336,7 @@ impl DefMap { // this visibility for anything outside IDE, so that's probably OK. let visibility = Visibility::Module( ModuleId { krate, local_id, block: None }, - VisibilityExplicity::Implicit, + VisibilityExplicitness::Implicit, ); let module_data = ModuleData::new( ModuleOrigin::BlockExpr { block: block.ast_id, id: block_id }, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 248d3213d5d75..fb6fd867a1618 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -273,7 +273,7 @@ struct DefCollector<'a> { impl DefCollector<'_> { fn seed_with_top_level(&mut self) { - let _p = profile::span("seed_with_top_level"); + let _p = tracing::span!(tracing::Level::INFO, "seed_with_top_level").entered(); let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id; let item_tree = self.db.file_item_tree(file_id.into()); @@ -401,7 +401,7 @@ impl DefCollector<'_> { } fn resolution_loop(&mut self) { - let _p = profile::span("DefCollector::resolution_loop"); + let _p = tracing::span!(tracing::Level::INFO, "DefCollector::resolution_loop").entered(); // main name resolution fixed-point loop. let mut i = 0; @@ -410,7 +410,7 @@ impl DefCollector<'_> { self.db.unwind_if_cancelled(); { - let _p = profile::span("resolve_imports loop"); + let _p = tracing::span!(tracing::Level::INFO, "resolve_imports loop").entered(); 'resolve_imports: loop { if self.resolve_imports() == ReachedFixedPoint::Yes { @@ -436,7 +436,7 @@ impl DefCollector<'_> { } fn collect(&mut self) { - let _p = profile::span("DefCollector::collect"); + let _p = tracing::span!(tracing::Level::INFO, "DefCollector::collect").entered(); self.resolution_loop(); @@ -792,8 +792,8 @@ impl DefCollector<'_> { } fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport { - let _p = profile::span("resolve_import") - .detail(|| format!("{}", import.path.display(self.db.upcast()))); + let _p = tracing::span!(tracing::Level::INFO, "resolve_import", import_path = %import.path.display(self.db.upcast())) + .entered(); tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition); match import.source { ImportSource::ExternCrate { .. } => { @@ -856,7 +856,7 @@ impl DefCollector<'_> { } fn record_resolved_import(&mut self, directive: &ImportDirective) { - let _p = profile::span("record_resolved_import"); + let _p = tracing::span!(tracing::Level::INFO, "record_resolved_import").entered(); let module_id = directive.module_id; let import = &directive.import; @@ -1430,7 +1430,7 @@ impl DefCollector<'_> { fn finish(mut self) -> DefMap { // Emit diagnostics for all remaining unexpanded macros. - let _p = profile::span("DefCollector::finish"); + let _p = tracing::span!(tracing::Level::INFO, "DefCollector::finish").entered(); for directive in &self.unresolved_macros { match &directive.kind { @@ -1924,7 +1924,7 @@ impl ModCollector<'_, '_> { item_tree: self.item_tree, mod_dir, } - .collect_in_top_module(&*items); + .collect_in_top_module(items); if is_macro_use { self.import_all_legacy_macros(module_id); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs index 01f79f042f789..1e13f7f8fd018 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs @@ -87,7 +87,7 @@ impl DefMap { within_impl: bool, ) -> Option { let mut vis = match visibility { - RawVisibility::Module(path, explicity) => { + RawVisibility::Module(path, explicitness) => { let (result, remaining) = self.resolve_path(db, original_module, path, BuiltinShadowMode::Module, None); if remaining.is_some() { @@ -95,7 +95,7 @@ impl DefMap { } let types = result.take_types()?; match types { - ModuleDefId::ModuleId(m) => Visibility::Module(m, *explicity), + ModuleDefId::ModuleId(m) => Visibility::Module(m, *explicitness), // error: visibility needs to refer to module _ => { return None; @@ -269,7 +269,7 @@ impl DefMap { stdx::never!(module.is_block_module()); if self.block != def_map.block { - // If we have a different `DefMap` from `self` (the orignal `DefMap` we started + // If we have a different `DefMap` from `self` (the original `DefMap` we started // with), resolve the remaining path segments in that `DefMap`. let path = ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned()); @@ -475,7 +475,7 @@ impl DefMap { let macro_use_prelude = || { self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| { PerNs::macros( - it.into(), + it, Visibility::Public, // FIXME? None, // extern_crate.map(ImportOrExternCrate::ExternCrate), @@ -540,7 +540,7 @@ impl DefMap { } } -/// Given a block module, returns its nearest non-block module and the `DefMap` it blongs to. +/// Given a block module, returns its nearest non-block module and the `DefMap` it belongs to. fn adjust_to_nearest_non_block_module( db: &dyn DefDatabase, def_map: &DefMap, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs index 14890364d0bd1..36ab62d0f7f1b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs @@ -16,19 +16,13 @@ pub enum Namespace { Macros, } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] pub struct PerNs { pub types: Option<(ModuleDefId, Visibility, Option)>, pub values: Option<(ModuleDefId, Visibility, Option)>, pub macros: Option<(MacroId, Visibility, Option)>, } -impl Default for PerNs { - fn default() -> Self { - PerNs { types: None, values: None, macros: None } - } -} - impl PerNs { pub fn none() -> PerNs { PerNs { types: None, values: None, macros: None } @@ -92,7 +86,7 @@ impl PerNs { } pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs { - let _p = profile::span("PerNs::filter_visibility"); + let _p = tracing::span!(tracing::Level::INFO, "PerNs::filter_visibility").entered(); PerNs { types: self.types.filter(|&(_, v, _)| f(v)), values: self.values.filter(|&(_, v, _)| f(v)), @@ -125,19 +119,17 @@ impl PerNs { } pub fn iter_items(self) -> impl Iterator)> { - let _p = profile::span("PerNs::iter_items"); + let _p = tracing::span!(tracing::Level::INFO, "PerNs::iter_items").entered(); self.types .map(|it| (ItemInNs::Types(it.0), it.2)) .into_iter() .chain( self.values - .map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))) - .into_iter(), + .map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import))), ) .chain( self.macros - .map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))) - .into_iter(), + .map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import))), ) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs index c992c3c920486..9edb03c7cab90 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs @@ -41,13 +41,13 @@ impl Default for TestDB { impl Upcast for TestDB { fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { - &*self + self } } impl Upcast for TestDB { fn upcast(&self) -> &(dyn DefDatabase + 'static) { - &*self + self } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs index 3294ce29a4a05..b9676179a5fdd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs @@ -20,14 +20,17 @@ use crate::{ pub enum RawVisibility { /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is /// equivalent to `pub(self)`. - Module(ModPath, VisibilityExplicity), + Module(ModPath, VisibilityExplicitness), /// `pub`. Public, } impl RawVisibility { pub(crate) const fn private() -> RawVisibility { - RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)), VisibilityExplicity::Implicit) + RawVisibility::Module( + ModPath::from_kind(PathKind::Super(0)), + VisibilityExplicitness::Implicit, + ) } pub(crate) fn from_ast( @@ -53,19 +56,19 @@ impl RawVisibility { None => return RawVisibility::private(), Some(path) => path, }; - RawVisibility::Module(path, VisibilityExplicity::Explicit) + RawVisibility::Module(path, VisibilityExplicitness::Explicit) } ast::VisibilityKind::PubCrate => { let path = ModPath::from_kind(PathKind::Crate); - RawVisibility::Module(path, VisibilityExplicity::Explicit) + RawVisibility::Module(path, VisibilityExplicitness::Explicit) } ast::VisibilityKind::PubSuper => { let path = ModPath::from_kind(PathKind::Super(1)); - RawVisibility::Module(path, VisibilityExplicity::Explicit) + RawVisibility::Module(path, VisibilityExplicitness::Explicit) } ast::VisibilityKind::PubSelf => { let path = ModPath::from_kind(PathKind::Super(0)); - RawVisibility::Module(path, VisibilityExplicity::Explicit) + RawVisibility::Module(path, VisibilityExplicitness::Explicit) } ast::VisibilityKind::Pub => RawVisibility::Public, } @@ -85,7 +88,7 @@ impl RawVisibility { #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Visibility { /// Visibility is restricted to a certain module. - Module(ModuleId, VisibilityExplicity), + Module(ModuleId, VisibilityExplicitness), /// Visibility is unrestricted. Public, } @@ -206,12 +209,12 @@ impl Visibility { /// Whether the item was imported through `pub(crate) use` or just `use`. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum VisibilityExplicity { +pub enum VisibilityExplicitness { Explicit, Implicit, } -impl VisibilityExplicity { +impl VisibilityExplicitness { pub fn is_explicit(&self) -> bool { matches!(self, Self::Explicit) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs index 30d38299d99d4..c20c1639e1a49 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs @@ -230,12 +230,12 @@ impl Attr { ) ) }) - .unwrap_or_else(|| tt.len()); + .unwrap_or(tt.len()); let (path, input) = tt.split_at(path_end); let path = Interned::new(ModPath::from_tt(db, path)?); - let input = match input.get(0) { + let input = match input.first() { Some(tt::TokenTree::Subtree(tree)) => { Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone())))) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index 8c43017971fb0..f220284fae7c8 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -80,6 +80,9 @@ pub trait ExpandDatabase: SourceDatabase { #[salsa::invoke(SpanMap::new)] fn span_map(&self, file_id: HirFileId) -> SpanMap; + #[salsa::transparent] + #[salsa::invoke(crate::span_map::expansion_span_map)] + fn expansion_span_map(&self, file_id: MacroFileId) -> Arc; #[salsa::invoke(crate::span_map::real_span_map)] fn real_span_map(&self, file_id: FileId) -> Arc; @@ -280,7 +283,7 @@ fn parse_macro_expansion( db: &dyn ExpandDatabase, macro_file: MacroFileId, ) -> ExpandResult<(Parse, Arc)> { - let _p = profile::span("parse_macro_expansion"); + let _p = tracing::span!(tracing::Level::INFO, "parse_macro_expansion").entered(); let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let expand_to = loc.expand_to(); let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc); @@ -501,7 +504,7 @@ fn macro_expand( macro_call_id: MacroCallId, loc: MacroCallLoc, ) -> ExpandResult> { - let _p = profile::span("macro_expand"); + let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered(); let ExpandResult { value: tt, mut err } = match loc.def.kind { MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc), diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs index d0a1bef11c3b7..707daf040242d 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs @@ -5,7 +5,7 @@ use either::Either; use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId}; use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize}; -use crate::{db, ExpansionInfo, MacroFileIdExt}; +use crate::{db, map_node_range_up, span_for_offset, MacroFileIdExt}; /// `InFile` stores a value of `T` inside a particular file/syntax tree. /// @@ -147,7 +147,7 @@ impl InFile<&SyntaxNode> { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { if let Some((res, ctxt)) = - ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) + map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range()) { // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. @@ -163,12 +163,15 @@ impl InFile<&SyntaxNode> { } /// Falls back to the macro call range if the node cannot be mapped up fully. - pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange { + pub fn original_file_range_with_macro_call_body( + self, + db: &dyn db::ExpandDatabase, + ) -> FileRange { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { if let Some((res, ctxt)) = - ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) + map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range()) { // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. @@ -193,7 +196,7 @@ impl InFile<&SyntaxNode> { Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT)) } HirFileIdRepr::MacroFile(mac_file) => { - ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) + map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range()) } } } @@ -215,7 +218,7 @@ impl InFile<&SyntaxNode> { } let (FileRange { file_id, range }, ctx) = - ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?; + map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?; // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. @@ -246,8 +249,11 @@ impl InFile { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { - let (range, ctxt) = ExpansionInfo::new(db, mac_file) - .span_for_offset(db, self.value.text_range().start()); + let (range, ctxt) = span_for_offset( + db, + &db.expansion_span_map(mac_file), + self.value.text_range().start(), + ); // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. @@ -269,8 +275,11 @@ impl InFile { Some(FileRange { file_id, range: self.value.text_range() }) } HirFileIdRepr::MacroFile(mac_file) => { - let (range, ctxt) = ExpansionInfo::new(db, mac_file) - .span_for_offset(db, self.value.text_range().start()); + let (range, ctxt) = span_for_offset( + db, + &db.expansion_span_map(mac_file), + self.value.text_range().start(), + ); // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. @@ -286,7 +295,7 @@ impl InFile { impl InMacroFile { pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) { - ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value) + span_for_offset(db, &db.expansion_span_map(self.file_id), self.value) } } @@ -300,7 +309,7 @@ impl InFile { (FileRange { file_id, range: self.value }, SyntaxContextId::ROOT) } HirFileIdRepr::MacroFile(mac_file) => { - match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) { + match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) { Some(it) => it, None => { let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); @@ -315,7 +324,7 @@ impl InFile { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, HirFileIdRepr::MacroFile(mac_file) => { - match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) { + match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) { Some((it, SyntaxContextId::ROOT)) => it, _ => { let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); @@ -335,7 +344,7 @@ impl InFile { Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)) } HirFileIdRepr::MacroFile(mac_file) => { - ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) + map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) } } } @@ -355,8 +364,11 @@ impl InFile { return None; } - let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id) - .map_node_range_up(db, self.value.syntax().text_range())?; + let (FileRange { file_id, range }, ctx) = map_node_range_up( + db, + &db.expansion_span_map(file_id), + self.value.syntax().text_range(), + )?; // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index 05f12527a4400..bd25052490360 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -523,6 +523,24 @@ impl MacroCallLoc { } } } + + pub fn include_file_id( + &self, + db: &dyn ExpandDatabase, + macro_call_id: MacroCallId, + ) -> Option { + if self.def.is_include() { + if let Some(eager) = &self.eager { + if let Ok(it) = + builtin_fn_macro::include_input_to_file_id(db, macro_call_id, &eager.arg) + { + return Some(it); + } + } + } + + None + } } impl MacroCallKind { @@ -659,6 +677,10 @@ impl ExpansionInfo { Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?)) } + pub fn call_file(&self) -> HirFileId { + self.arg.file_id + } + /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. pub fn map_range_down( &self, @@ -679,13 +701,7 @@ impl ExpansionInfo { offset: TextSize, ) -> (FileRange, SyntaxContextId) { debug_assert!(self.expanded.value.text_range().contains(offset)); - let span = self.exp_map.span_at(offset); - let anchor_offset = db - .ast_id_map(span.anchor.file_id.into()) - .get_erased(span.anchor.ast_id) - .text_range() - .start(); - (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx) + span_for_offset(db, &self.exp_map, offset) } /// Maps up the text range out of the expansion hierarchy back into the original file its from. @@ -695,27 +711,7 @@ impl ExpansionInfo { range: TextRange, ) -> Option<(FileRange, SyntaxContextId)> { debug_assert!(self.expanded.value.text_range().contains_range(range)); - let mut spans = self.exp_map.spans_for_range(range); - let Span { range, anchor, ctx } = spans.next()?; - let mut start = range.start(); - let mut end = range.end(); - - for span in spans { - if span.anchor != anchor || span.ctx != ctx { - return None; - } - start = start.min(span.range.start()); - end = end.max(span.range.end()); - } - let anchor_offset = - db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); - Some(( - FileRange { - file_id: anchor.file_id, - range: TextRange::new(start, end) + anchor_offset, - }, - ctx, - )) + map_node_range_up(db, &self.exp_map, range) } /// Maps up the text range out of the expansion into is macro call. @@ -804,6 +800,47 @@ impl ExpansionInfo { } } +/// Maps up the text range out of the expansion hierarchy back into the original file its from. +pub fn map_node_range_up( + db: &dyn ExpandDatabase, + exp_map: &ExpansionSpanMap, + range: TextRange, +) -> Option<(FileRange, SyntaxContextId)> { + let mut spans = exp_map.spans_for_range(range); + let Span { range, anchor, ctx } = spans.next()?; + let mut start = range.start(); + let mut end = range.end(); + + for span in spans { + if span.anchor != anchor || span.ctx != ctx { + return None; + } + start = start.min(span.range.start()); + end = end.max(span.range.end()); + } + let anchor_offset = + db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); + Some(( + FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset }, + ctx, + )) +} + +/// Looks up the span at the given offset. +pub fn span_for_offset( + db: &dyn ExpandDatabase, + exp_map: &ExpansionSpanMap, + offset: TextSize, +) -> (FileRange, SyntaxContextId) { + let span = exp_map.span_at(offset); + let anchor_offset = db + .ast_id_map(span.anchor.file_id.into()) + .get_erased(span.anchor.ast_id) + .text_range() + .start(); + (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx) +} + /// In Rust, macros expand token trees to token trees. When we want to turn a /// token tree into an AST node, we need to figure out what kind of AST node we /// want: something like `foo` can be a type, an expression, or a pattern. diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index 0eb1fc1eb5009..b64c3549e421e 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -232,7 +232,7 @@ fn convert_path( ast::PathSegmentKind::SuperKw => { let mut deg = 1; let mut next_segment = None; - while let Some(segment) = segments.next() { + for segment in segments.by_ref() { match segment.kind()? { ast::PathSegmentKind::SuperKw => deg += 1, ast::PathSegmentKind::Name(name) => { @@ -284,13 +284,13 @@ fn convert_path( } fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option { - let mut leafs = tt.iter().filter_map(|tt| match tt { + let mut leaves = tt.iter().filter_map(|tt| match tt { tt::TokenTree::Leaf(leaf) => Some(leaf), tt::TokenTree::Subtree(_) => None, }); let mut segments = smallvec::smallvec![]; - let kind = match leafs.next()? { - tt::Leaf::Punct(tt::Punct { char: ':', .. }) => match leafs.next()? { + let kind = match leaves.next()? { + tt::Leaf::Punct(tt::Punct { char: ':', .. }) => match leaves.next()? { tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs, _ => return None, }, @@ -300,7 +300,7 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option PathKind::Super(0), tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => { let mut deg = 1; - while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leafs.next() { + while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leaves.next() { if text != "super" { segments.push(Name::new_text_dont_use(text.clone())); break; @@ -316,7 +316,7 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option return None, }; - segments.extend(leafs.filter_map(|leaf| match leaf { + segments.extend(leaves.filter_map(|leaf| match leaf { ::tt::Leaf::Ident(ident) => Some(Name::new_text_dont_use(ident.text.clone())), _ => None, })); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs index 25c78fade824f..70b47fc54b11c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs @@ -1,4 +1,4 @@ -//! Proc Macro Expander stub +//! Proc Macro Expander stuff use core::fmt; use std::{panic::RefUnwindSafe, sync}; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs index 8e624f5585d48..4a60a9485608a 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs @@ -1,5 +1,5 @@ //! Span maps for real files and macro expansions. -use span::{FileId, HirFileId, HirFileIdRepr, Span}; +use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span}; use syntax::{AstNode, TextRange}; use triomphe::Arc; @@ -94,3 +94,10 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc Arc { + db.parse_macro_expansion(file_id).value.1 +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index 4625a3b01a36c..991fd2f91df85 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -142,7 +142,7 @@ pub(crate) fn deref_by_trait( table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>, ty: Ty, ) -> Option { - let _p = profile::span("deref_by_trait"); + let _p = tracing::span!(tracing::Level::INFO, "deref_by_trait").entered(); if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() { // don't try to deref unknown variables return None; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 4d509f20d01ca..7e460f9f867ae 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -689,7 +689,7 @@ pub(crate) fn impl_datum_query( krate: CrateId, impl_id: ImplId, ) -> Arc { - let _p = profile::span("impl_datum"); + let _p = tracing::span!(tracing::Level::INFO, "impl_datum").entered(); debug!("impl_datum {:?}", impl_id); let impl_: hir_def::ImplId = from_chalk(db, impl_id); impl_def_datum(db, krate, impl_id, impl_) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index aa7ca48b18ba6..21679150b3428 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -118,7 +118,7 @@ pub trait HirDatabase: DefDatabase + Upcast { fn layout_of_ty(&self, ty: Ty, env: Arc) -> Result, LayoutError>; #[salsa::invoke(crate::layout::target_data_layout_query)] - fn target_data_layout(&self, krate: CrateId) -> Option>; + fn target_data_layout(&self, krate: CrateId) -> Result, Arc>; #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] fn lookup_impl_method( @@ -281,7 +281,7 @@ pub trait HirDatabase: DefDatabase + Upcast { } fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc { - let _p = profile::span("infer:wait").detail(|| match def { + let detail = match def { DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(), DefWithBodyId::StaticId(it) => { db.static_data(it).name.clone().display(db.upcast()).to_string() @@ -297,7 +297,8 @@ fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc db.enum_variant_data(it).name.display(db.upcast()).to_string() } DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"), - }); + }; + let _p = tracing::span!(tracing::Level::INFO, "infer:wait", ?detail).entered(); db.infer_query(def) } @@ -307,7 +308,7 @@ fn trait_solve_wait( block: Option, goal: crate::Canonical>, ) -> Option { - let _p = profile::span("trait_solve::wait"); + let _p = tracing::span!(tracing::Level::INFO, "trait_solve::wait").entered(); db.trait_solve_query(krate, block, goal) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index a37dba480564e..78f2005e676c7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -45,7 +45,7 @@ mod allow { } pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec { - let _p = profile::span("validate_module_item"); + let _p = tracing::span!(tracing::Level::INFO, "validate_module_item").entered(); let mut validator = DeclValidator::new(db); validator.validate_item(owner); validator.sink diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index 530608292e6fd..eda8f2371c9cd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -48,7 +48,8 @@ pub enum BodyValidationDiagnostic { impl BodyValidationDiagnostic { pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec { - let _p = profile::span("BodyValidationDiagnostic::collect"); + let _p = + tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered(); let infer = db.infer(owner); let mut validator = ExprValidator::new(owner, infer); validator.validate_body(db); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index cd67ca599310c..0b595042cd567 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -237,7 +237,7 @@ impl<'p> MatchCheckCtx<'p> { ctor = Or; // Collect here because `Arena::alloc_extend` panics on reentrancy. let subpats: SmallVec<[_; 2]> = - pats.into_iter().map(|pat| self.lower_pat(pat)).collect(); + pats.iter().map(|pat| self.lower_pat(pat)).collect(); fields = self.pattern_arena.alloc_extend(subpats); } } @@ -460,7 +460,8 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { _f: &mut fmt::Formatter<'_>, _pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>, ) -> fmt::Result { - unimplemented!() + // FIXME: implement this, as using `unimplemented!()` causes panics in `tracing`. + Ok(()) } fn bug(&self, fmt: fmt::Arguments<'_>) -> ! { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 96c7949e3d626..2327c8df1b44c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -605,8 +605,11 @@ fn render_const_scalar( write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast())) } hir_def::AdtId::EnumId(e) => { + let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else { + return f.write_str(""); + }; let Some((var_id, var_layout)) = - detect_variant_from_bytes(&layout, f.db, trait_env, b, e) + detect_variant_from_bytes(&layout, f.db, &target_data_layout, b, e) else { return f.write_str(""); }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 0d89269b32509..71c3f89716d82 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -75,7 +75,7 @@ pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; /// The entry point of type inference. pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc { - let _p = profile::span("infer_query"); + let _p = tracing::span!(tracing::Level::INFO, "infer_query").entered(); let resolver = def.resolver(db.upcast()); let body = db.body(def); let mut ctx = InferenceContext::new(db, def, &body, resolver); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index 9c415400775a2..61c82339508de 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -509,7 +509,8 @@ impl<'a> InferenceTable<'a> { } pub(crate) fn resolve_obligations_as_possible(&mut self) { - let _span = profile::span("resolve_obligations_as_possible"); + let _span = + tracing::span!(tracing::Level::INFO, "resolve_obligations_as_possible").entered(); let mut changed = true; let mut obligations = mem::take(&mut self.resolve_obligations_buffer); while mem::take(&mut changed) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs index a63556f450dda..532b650e8ff27 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs @@ -84,8 +84,7 @@ impl TypeVisitor for UninhabitedFrom<'_> { Some(0) | None => CONTINUE_OPAQUELY_INHABITED, Some(1..) => item_ty.super_visit_with(self, outer_binder), }, - - TyKind::Ref(..) | _ => CONTINUE_OPAQUELY_INHABITED, + _ => CONTINUE_OPAQUELY_INHABITED, }; self.recursive_ty.remove(ty); self.max_depth += 1; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index 2b84cb6b13d2c..310c4cc9ffeca 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -198,7 +198,7 @@ pub fn layout_of_ty_query( trait_env: Arc, ) -> Result, LayoutError> { let krate = trait_env.krate; - let Some(target) = db.target_data_layout(krate) else { + let Ok(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable); }; let cx = LayoutCx { target: &target }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs index 47930358a11d6..4cc7dffc24ebb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs @@ -32,7 +32,7 @@ pub fn layout_of_adt_query( trait_env: Arc, ) -> Result, LayoutError> { let krate = trait_env.krate; - let Some(target) = db.target_data_layout(krate) else { + let Ok(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable); }; let cx = LayoutCx { target: &target }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs index b2185a03ea2dd..5bfe7bf010f1c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs @@ -2,6 +2,7 @@ use base_db::CrateId; use hir_def::layout::TargetDataLayout; +use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors}; use triomphe::Arc; use crate::db::HirDatabase; @@ -9,15 +10,40 @@ use crate::db::HirDatabase; pub fn target_data_layout_query( db: &dyn HirDatabase, krate: CrateId, -) -> Option> { +) -> Result, Arc> { let crate_graph = db.crate_graph(); - let target_layout = crate_graph[krate].target_layout.as_ref().ok()?; - let res = TargetDataLayout::parse_from_llvm_datalayout_string(target_layout); - if let Err(_e) = &res { - // FIXME: Print the error here once it implements debug/display - // also logging here is somewhat wrong, but unfortunately this is the earliest place we can - // parse that doesn't impose a dependency to the rust-abi crate for project-model - tracing::error!("Failed to parse target data layout for {krate:?}"); + let res = crate_graph[krate].target_layout.as_deref(); + match res { + Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) { + Ok(it) => Ok(Arc::new(it)), + Err(e) => { + Err(match e { + TargetDataLayoutErrors::InvalidAddressSpace { addr_space, cause, err } => { + format!( + r#"invalid address space `{addr_space}` for `{cause}` in "data-layout": {err}"# + ) + } + TargetDataLayoutErrors::InvalidBits { kind, bit, cause, err } => format!(r#"invalid {kind} `{bit}` for `{cause}` in "data-layout": {err}"#), + TargetDataLayoutErrors::MissingAlignment { cause } => format!(r#"missing alignment for `{cause}` in "data-layout""#), + TargetDataLayoutErrors::InvalidAlignment { cause, err } => format!( + r#"invalid alignment for `{cause}` in "data-layout": `{align}` is {err_kind}"#, + align = err.align(), + err_kind = match err { + AlignFromBytesError::NotPowerOfTwo(_) => "not a power of two", + AlignFromBytesError::TooLarge(_) => "too large", + } + ), + TargetDataLayoutErrors::InconsistentTargetArchitecture { dl, target } => { + format!(r#"inconsistent target specification: "data-layout" claims architecture is {dl}-endian, while "target-endian" is `{target}`"#) + } + TargetDataLayoutErrors::InconsistentTargetPointerWidth { + pointer_size, + target, + } => format!(r#"inconsistent target specification: "data-layout" claims pointers are {pointer_size}-bit, while "target-pointer-width" is `{target}`"#), + TargetDataLayoutErrors::InvalidBitsSize { err } => err, + }.into()) + } + }, + Err(e) => Err(Arc::from(&**e)), } - res.ok().map(Arc::new) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 54e91e7b29a66..288c42405d6a3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -363,7 +363,6 @@ has_interner!(CallableSig); pub enum FnAbi { Aapcs, AapcsUnwind, - AmdgpuKernel, AvrInterrupt, AvrNonBlockingInterrupt, C, @@ -422,7 +421,6 @@ impl FnAbi { match s { "aapcs-unwind" => FnAbi::AapcsUnwind, "aapcs" => FnAbi::Aapcs, - "amdgpu-kernel" => FnAbi::AmdgpuKernel, "avr-interrupt" => FnAbi::AvrInterrupt, "avr-non-blocking-interrupt" => FnAbi::AvrNonBlockingInterrupt, "C-cmse-nonsecure-call" => FnAbi::CCmseNonsecureCall, @@ -465,7 +463,6 @@ impl FnAbi { match self { FnAbi::Aapcs => "aapcs", FnAbi::AapcsUnwind => "aapcs-unwind", - FnAbi::AmdgpuKernel => "amdgpu-kernel", FnAbi::AvrInterrupt => "avr-interrupt", FnAbi::AvrNonBlockingInterrupt => "avr-non-blocking-interrupt", FnAbi::C => "C", diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index f8ce3008f1a96..1c068bf684ae0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -143,7 +143,8 @@ pub struct TraitImpls { impl TraitImpls { pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { - let _p = profile::span("trait_impls_in_crate_query").detail(|| format!("{krate:?}")); + let _p = + tracing::span!(tracing::Level::INFO, "trait_impls_in_crate_query", ?krate).entered(); let mut impls = FxHashMap::default(); Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate)); @@ -155,7 +156,7 @@ impl TraitImpls { db: &dyn HirDatabase, block: BlockId, ) -> Option> { - let _p = profile::span("trait_impls_in_block_query"); + let _p = tracing::span!(tracing::Level::INFO, "trait_impls_in_block_query").entered(); let mut impls = FxHashMap::default(); Self::collect_def_map(db, &mut impls, &db.block_def_map(block)); @@ -171,7 +172,8 @@ impl TraitImpls { db: &dyn HirDatabase, krate: CrateId, ) -> Arc<[Arc]> { - let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}")); + let _p = + tracing::span!(tracing::Level::INFO, "trait_impls_in_deps_query", ?krate).entered(); let crate_graph = db.crate_graph(); Arc::from_iter( @@ -272,7 +274,8 @@ pub struct InherentImpls { impl InherentImpls { pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { - let _p = profile::span("inherent_impls_in_crate_query").detail(|| format!("{krate:?}")); + let _p = + tracing::span!(tracing::Level::INFO, "inherent_impls_in_crate_query", ?krate).entered(); let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; let crate_def_map = db.crate_def_map(krate); @@ -286,7 +289,7 @@ impl InherentImpls { db: &dyn HirDatabase, block: BlockId, ) -> Option> { - let _p = profile::span("inherent_impls_in_block_query"); + let _p = tracing::span!(tracing::Level::INFO, "inherent_impls_in_block_query").entered(); let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; let block_def_map = db.block_def_map(block); @@ -359,7 +362,7 @@ pub(crate) fn incoherent_inherent_impl_crates( krate: CrateId, fp: TyFingerprint, ) -> SmallVec<[CrateId; 2]> { - let _p = profile::span("inherent_impl_crates_query"); + let _p = tracing::span!(tracing::Level::INFO, "inherent_impl_crates_query").entered(); let mut res = SmallVec::new(); let crate_graph = db.crate_graph(); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs index f7d043fc4e6a8..ea4e60cad3006 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs @@ -71,7 +71,7 @@ pub fn borrowck_query( db: &dyn HirDatabase, def: DefWithBodyId, ) -> Result, MirLowerError> { - let _p = profile::span("borrowck_query"); + let _p = tracing::span!(tracing::Level::INFO, "borrowck_query").entered(); let mut res = vec![]; all_mir_bodies(db, def, |body| { res.push(BorrowckResult { @@ -444,7 +444,7 @@ fn mutability_of_locals( } if destination.projection.lookup(&body.projection_store).is_empty() { if ever_init_map.get(destination.local).copied().unwrap_or_default() { - push_mut_span(destination.local, MirSpan::Unknown, &mut result); + push_mut_span(destination.local, terminator.span, &mut result); } else { ever_init_map.insert(destination.local, true); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 8143dc05c38d4..50c4d00660b7e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -17,6 +17,7 @@ use hir_def::{ use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile}; use intern::Interned; use la_arena::ArenaMap; +use rustc_abi::TargetDataLayout; use rustc_hash::{FxHashMap, FxHashSet}; use stdx::never; use syntax::{SyntaxNodePtr, TextRange}; @@ -51,7 +52,7 @@ macro_rules! from_bytes { ($ty:tt, $value:expr) => { ($ty::from_le_bytes(match ($value).try_into() { Ok(it) => it, - Err(_) => return Err(MirEvalError::TypeError(stringify!(mismatched size in constructing $ty))), + Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())), })) }; } @@ -145,6 +146,7 @@ enum MirOrDynIndex { pub struct Evaluator<'a> { db: &'a dyn HirDatabase, trait_env: Arc, + target_data_layout: Arc, stack: Vec, heap: Vec, code_stack: Vec, @@ -316,12 +318,12 @@ impl Address { pub enum MirEvalError { ConstEvalError(String, Box), LayoutError(LayoutError, Ty), - /// Means that code had type errors (or mismatched args) and we shouldn't generate mir in first place. - TypeError(&'static str), + TargetDataLayoutNotAvailable(Arc), /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected /// then use this type of error. UndefinedBehavior(String), Panic(String), + // FIXME: This should be folded into ConstEvalError? MirLowerError(FunctionId, MirLowerError), MirLowerErrorForClosure(ClosureId, MirLowerError), TypeIsUnsized(Ty, &'static str), @@ -330,11 +332,12 @@ pub enum MirEvalError { InFunction(Box, Vec<(Either, MirSpan, DefWithBodyId)>), ExecutionLimitExceeded, StackOverflow, - TargetDataLayoutNotAvailable, + /// FIXME: Fold this into InternalError InvalidVTableId(usize), + /// ? CoerceUnsizedError(Ty), - LangItemNotFound(LangItem), - BrokenLayout(Box), + /// These should not occur, usually indicates a bug in mir lowering. + InternalError(Box), } impl MirEvalError { @@ -359,8 +362,8 @@ impl MirEvalError { func )?; } - Either::Right(clos) => { - writeln!(f, "In {:?}", clos)?; + Either::Right(closure) => { + writeln!(f, "In {:?}", closure)?; } } let source_map = db.body_with_source_map(*def).1; @@ -406,8 +409,8 @@ impl MirEvalError { span_formatter, )?; } - MirEvalError::TypeError(_) - | MirEvalError::UndefinedBehavior(_) + MirEvalError::UndefinedBehavior(_) + | MirEvalError::TargetDataLayoutNotAvailable(_) | MirEvalError::Panic(_) | MirEvalError::MirLowerErrorForClosure(_, _) | MirEvalError::TypeIsUnsized(_, _) @@ -415,10 +418,8 @@ impl MirEvalError { | MirEvalError::InvalidConst(_) | MirEvalError::ExecutionLimitExceeded | MirEvalError::StackOverflow - | MirEvalError::TargetDataLayoutNotAvailable | MirEvalError::CoerceUnsizedError(_) - | MirEvalError::LangItemNotFound(_) - | MirEvalError::BrokenLayout(_) + | MirEvalError::InternalError(_) | MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?, } Ok(()) @@ -431,16 +432,16 @@ impl std::fmt::Debug for MirEvalError { Self::ConstEvalError(arg0, arg1) => { f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish() } - Self::LangItemNotFound(arg0) => f.debug_tuple("LangItemNotFound").field(arg0).finish(), Self::LayoutError(arg0, arg1) => { f.debug_tuple("LayoutError").field(arg0).field(arg1).finish() } - Self::TypeError(arg0) => f.debug_tuple("TypeError").field(arg0).finish(), Self::UndefinedBehavior(arg0) => { f.debug_tuple("UndefinedBehavior").field(arg0).finish() } Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"), - Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"), + Self::TargetDataLayoutNotAvailable(arg0) => { + f.debug_tuple("TargetDataLayoutNotAvailable").field(arg0).finish() + } Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."), Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"), Self::StackOverflow => write!(f, "stack overflow"), @@ -453,7 +454,7 @@ impl std::fmt::Debug for MirEvalError { Self::CoerceUnsizedError(arg0) => { f.debug_tuple("CoerceUnsizedError").field(arg0).finish() } - Self::BrokenLayout(arg0) => f.debug_tuple("BrokenLayout").field(arg0).finish(), + Self::InternalError(arg0) => f.debug_tuple("InternalError").field(arg0).finish(), Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(), Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(), Self::InvalidConst(arg0) => { @@ -530,7 +531,11 @@ pub fn interpret_mir( trait_env: Option>, ) -> (Result, MirOutput) { let ty = body.locals[return_slot()].ty.clone(); - let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env); + let mut evaluator = + match Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env) { + Ok(it) => it, + Err(e) => return (Err(e), MirOutput { stdout: vec![], stderr: vec![] }), + }; let it: Result = (|| { if evaluator.ptr_size() != std::mem::size_of::() { not_supported!("targets with different pointer size from host"); @@ -566,9 +571,15 @@ impl Evaluator<'_> { owner: DefWithBodyId, assert_placeholder_ty_is_unused: bool, trait_env: Option>, - ) -> Evaluator<'_> { + ) -> Result> { let crate_id = owner.module(db.upcast()).krate(); - Evaluator { + let target_data_layout = match db.target_data_layout(crate_id) { + Ok(target_data_layout) => target_data_layout, + Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)), + }; + let cached_ptr_size = target_data_layout.pointer_size.bytes_usize(); + Ok(Evaluator { + target_data_layout, stack: vec![0], heap: vec![0], code_stack: vec![], @@ -590,10 +601,7 @@ impl Evaluator<'_> { not_special_fn_cache: RefCell::new(Default::default()), mir_or_dyn_index_cache: RefCell::new(Default::default()), unused_locals_store: RefCell::new(Default::default()), - cached_ptr_size: match db.target_data_layout(crate_id) { - Some(it) => it.pointer_size.bytes_usize(), - None => 8, - }, + cached_ptr_size, cached_fn_trait_func: db .lang_item(crate_id, LangItem::Fn) .and_then(|x| x.as_trait()) @@ -606,7 +614,7 @@ impl Evaluator<'_> { .lang_item(crate_id, LangItem::FnOnce) .and_then(|x| x.as_trait()) .and_then(|x| db.trait_data(x).method_by_name(&name![call_once])), - } + }) } fn place_addr(&self, p: &Place, locals: &Locals) -> Result
{ @@ -754,8 +762,8 @@ impl Evaluator<'_> { RustcEnumVariantIdx(it.lookup(self.db.upcast()).index as usize) } _ => { - return Err(MirEvalError::TypeError( - "Multivariant layout only happens for enums", + return Err(MirEvalError::InternalError( + "mismatched layout".into(), )) } }] @@ -993,12 +1001,12 @@ impl Evaluator<'_> { IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?, } if remain_args == 0 { - return Err(MirEvalError::TypeError("more arguments provided")); + return Err(MirEvalError::InternalError("too many arguments".into())); } remain_args -= 1; } if remain_args > 0 { - return Err(MirEvalError::TypeError("not enough arguments provided")); + return Err(MirEvalError::InternalError("too few arguments".into())); } Ok(()) } @@ -1071,8 +1079,8 @@ impl Evaluator<'_> { match metadata { Some(m) => m, None => { - return Err(MirEvalError::TypeError( - "type without metadata is used for Rvalue::Len", + return Err(MirEvalError::InternalError( + "type without metadata is used for Rvalue::Len".into(), )); } } @@ -1312,7 +1320,7 @@ impl Evaluator<'_> { } AggregateKind::Tuple(ty) => { let layout = self.layout(ty)?; - Owned(self.make_by_layout( + Owned(self.construct_with_layout( layout.size.bytes_usize(), &layout, None, @@ -1334,7 +1342,7 @@ impl Evaluator<'_> { AggregateKind::Adt(it, subst) => { let (size, variant_layout, tag) = self.layout_of_variant(*it, subst.clone(), locals)?; - Owned(self.make_by_layout( + Owned(self.construct_with_layout( size, &variant_layout, tag, @@ -1343,7 +1351,7 @@ impl Evaluator<'_> { } AggregateKind::Closure(ty) => { let layout = self.layout(ty)?; - Owned(self.make_by_layout( + Owned(self.construct_with_layout( layout.size.bytes_usize(), &layout, None, @@ -1415,10 +1423,7 @@ impl Evaluator<'_> { Ok(r) } Variants::Multiple { tag, tag_encoding, variants, .. } => { - let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else { - not_supported!("missing target data layout"); - }; - let size = tag.size(&*target_data_layout).bytes_usize(); + let size = tag.size(&*self.target_data_layout).bytes_usize(); let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field match tag_encoding { TagEncoding::Direct => { @@ -1458,9 +1463,8 @@ impl Evaluator<'_> { if let TyKind::Adt(id, subst) = kind { if let AdtId::StructId(struct_id) = id.0 { let field_types = self.db.field_types(struct_id.into()); - let mut field_types = field_types.iter(); if let Some(ty) = - field_types.next().map(|it| it.1.clone().substitute(Interner, subst)) + field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst)) { return self.coerce_unsized_look_through_fields(&ty, goal); } @@ -1578,10 +1582,6 @@ impl Evaluator<'_> { Ok(match &layout.variants { Variants::Single { .. } => (layout.size.bytes_usize(), layout, None), Variants::Multiple { variants, tag, tag_encoding, .. } => { - let cx = self - .db - .target_data_layout(self.crate_id) - .ok_or(MirEvalError::TargetDataLayoutNotAvailable)?; let enum_variant_id = match it { VariantId::EnumVariantId(it) => it, _ => not_supported!("multi variant layout for non-enums"), @@ -1612,7 +1612,7 @@ impl Evaluator<'_> { if have_tag { Some(( layout.fields.offset(0).bytes_usize(), - tag.size(&*cx).bytes_usize(), + tag.size(&*self.target_data_layout).bytes_usize(), discriminant, )) } else { @@ -1623,7 +1623,7 @@ impl Evaluator<'_> { }) } - fn make_by_layout( + fn construct_with_layout( &mut self, size: usize, // Not necessarily equal to variant_layout.size variant_layout: &Layout, @@ -1634,7 +1634,14 @@ impl Evaluator<'_> { if let Some((offset, size, value)) = tag { match result.get_mut(offset..offset + size) { Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]), - None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))), + None => { + return Err(MirEvalError::InternalError( + format!( + "encoded tag ({offset}, {size}, {value}) is out of bounds 0..{size}" + ) + .into(), + )) + } } } for (i, op) in values.enumerate() { @@ -1642,7 +1649,11 @@ impl Evaluator<'_> { let op = op.get(self)?; match result.get_mut(offset..offset + op.len()) { Some(it) => it.copy_from_slice(op), - None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))), + None => { + return Err(MirEvalError::InternalError( + format!("field offset ({offset}) is out of bounds 0..{size}").into(), + )) + } } } Ok(result) @@ -1695,28 +1706,29 @@ impl Evaluator<'_> { } ConstScalar::Unknown => not_supported!("evaluating unknown const"), }; - let mut v: Cow<'_, [u8]> = Cow::Borrowed(v); let patch_map = memory_map.transform_addresses(|b, align| { let addr = self.heap_allocate(b.len(), align)?; self.write_memory(addr, b)?; Ok(addr.to_usize()) })?; let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1)); - if size != v.len() { + let v: Cow<'_, [u8]> = if size != v.len() { // Handle self enum if size == 16 && v.len() < 16 { - v = Cow::Owned(pad16(&v, false).to_vec()); + Cow::Owned(pad16(v, false).to_vec()) } else if size < 16 && v.len() == 16 { - v = Cow::Owned(v[0..size].to_vec()); + Cow::Borrowed(&v[0..size]) } else { return Err(MirEvalError::InvalidConst(konst.clone())); } - } + } else { + Cow::Borrowed(v) + }; let addr = self.heap_allocate(size, align)?; self.write_memory(addr, &v)?; self.patch_addresses( &patch_map, - |bytes| match &memory_map { + |bytes| match memory_map { MemoryMap::Empty | MemoryMap::Simple(_) => { Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes))) } @@ -2000,7 +2012,7 @@ impl Evaluator<'_> { if let Some((v, l)) = detect_variant_from_bytes( &layout, this.db, - this.trait_env.clone(), + &this.target_data_layout, bytes, e, ) { @@ -2079,7 +2091,7 @@ impl Evaluator<'_> { if let Some((ev, layout)) = detect_variant_from_bytes( &layout, self.db, - self.trait_env.clone(), + &self.target_data_layout, self.read_memory(addr, layout.size.bytes_usize())?, e, ) { @@ -2153,14 +2165,14 @@ impl Evaluator<'_> { ) -> Result> { let id = from_bytes!(usize, bytes.get(self)?); let next_ty = self.vtable_map.ty(id)?.clone(); - match &next_ty.kind(Interner) { + match next_ty.kind(Interner) { TyKind::FnDef(def, generic_args) => { self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span) } TyKind::Closure(id, subst) => { self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span) } - _ => Err(MirEvalError::TypeError("function pointer to non function")), + _ => Err(MirEvalError::InternalError("function pointer to non function".into())), } } @@ -2241,7 +2253,7 @@ impl Evaluator<'_> { CallableDefId::StructId(id) => { let (size, variant_layout, tag) = self.layout_of_variant(id.into(), generic_args, locals)?; - let result = self.make_by_layout( + let result = self.construct_with_layout( size, &variant_layout, tag, @@ -2253,7 +2265,7 @@ impl Evaluator<'_> { CallableDefId::EnumVariantId(id) => { let (size, variant_layout, tag) = self.layout_of_variant(id.into(), generic_args, locals)?; - let result = self.make_by_layout( + let result = self.construct_with_layout( size, &variant_layout, tag, @@ -2407,7 +2419,9 @@ impl Evaluator<'_> { target_bb: Option, span: MirSpan, ) -> Result> { - let func = args.first().ok_or(MirEvalError::TypeError("fn trait with no arg"))?; + let func = args + .first() + .ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?; let mut func_ty = func.ty.clone(); let mut func_data = func.interval; while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) { @@ -2450,7 +2464,7 @@ impl Evaluator<'_> { ) .intern(Interner); let layout = self.layout(&ty)?; - let result = self.make_by_layout( + let result = self.construct_with_layout( layout.size.bytes_usize(), &layout, None, @@ -2634,7 +2648,7 @@ pub fn render_const_using_debug_impl( owner: ConstId, c: &Const, ) -> Result { - let mut evaluator = Evaluator::new(db, owner.into(), false, None); + let mut evaluator = Evaluator::new(db, owner.into(), false, None)?; let locals = &Locals { ptr: ArenaMap::new(), body: db @@ -2699,12 +2713,7 @@ pub fn render_const_using_debug_impl( pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] { let is_negative = is_signed && it.last().unwrap_or(&0) > &127; - let fill_with = if is_negative { 255 } else { 0 }; - it.iter() - .copied() - .chain(iter::repeat(fill_with)) - .take(16) - .collect::>() - .try_into() - .expect("iterator take is not working") + let mut res = [if is_negative { 255 } else { 0 }; 16]; + res[..it.len()].copy_from_slice(it); + res } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index 4336e1e53b7f5..b4fb99acae778 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -18,7 +18,7 @@ macro_rules! from_bytes { ($ty:tt, $value:expr) => { ($ty::from_le_bytes(match ($value).try_into() { Ok(it) => it, - Err(_) => return Err(MirEvalError::TypeError("mismatched size")), + Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())), })) }; } @@ -249,7 +249,9 @@ impl Evaluator<'_> { match alloc_fn { "rustc_allocator_zeroed" | "rustc_allocator" => { let [size, align] = args else { - return Err(MirEvalError::TypeError("rustc_allocator args are not provided")); + return Err(MirEvalError::InternalError( + "rustc_allocator args are not provided".into(), + )); }; let size = from_bytes!(usize, size.get(self)?); let align = from_bytes!(usize, align.get(self)?); @@ -259,7 +261,9 @@ impl Evaluator<'_> { "rustc_deallocator" => { /* no-op for now */ } "rustc_reallocator" => { let [ptr, old_size, align, new_size] = args else { - return Err(MirEvalError::TypeError("rustc_allocator args are not provided")); + return Err(MirEvalError::InternalError( + "rustc_allocator args are not provided".into(), + )); }; let old_size = from_bytes!(usize, old_size.get(self)?); let new_size = from_bytes!(usize, new_size.get(self)?); @@ -339,22 +343,22 @@ impl Evaluator<'_> { Err(MirEvalError::Panic(message)) } SliceLen => { - let arg = args - .next() - .ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?; + let arg = args.next().ok_or(MirEvalError::InternalError( + "argument of <[T]>::len() is not provided".into(), + ))?; let ptr_size = arg.len() / 2; Ok(arg[ptr_size..].into()) } DropInPlace => { let ty = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)).ok_or( - MirEvalError::TypeError( - "generic argument of drop_in_place is not provided", + MirEvalError::InternalError( + "generic argument of drop_in_place is not provided".into(), ), )?; - let arg = args - .next() - .ok_or(MirEvalError::TypeError("argument of drop_in_place is not provided"))?; + let arg = args.next().ok_or(MirEvalError::InternalError( + "argument of drop_in_place is not provided".into(), + ))?; self.run_drop_glue_deep( ty.clone(), locals, @@ -380,7 +384,9 @@ impl Evaluator<'_> { 318 => { // SYS_getrandom let [buf, len, _flags] = args else { - return Err(MirEvalError::TypeError("SYS_getrandom args are not provided")); + return Err(MirEvalError::InternalError( + "SYS_getrandom args are not provided".into(), + )); }; let addr = Address::from_bytes(buf.get(self)?)?; let size = from_bytes!(usize, len.get(self)?); @@ -408,7 +414,7 @@ impl Evaluator<'_> { match as_str { "memcmp" => { let [ptr1, ptr2, size] = args else { - return Err(MirEvalError::TypeError("memcmp args are not provided")); + return Err(MirEvalError::InternalError("memcmp args are not provided".into())); }; let addr1 = Address::from_bytes(ptr1.get(self)?)?; let addr2 = Address::from_bytes(ptr2.get(self)?)?; @@ -424,7 +430,9 @@ impl Evaluator<'_> { } "write" => { let [fd, ptr, len] = args else { - return Err(MirEvalError::TypeError("libc::write args are not provided")); + return Err(MirEvalError::InternalError( + "libc::write args are not provided".into(), + )); }; let fd = u128::from_le_bytes(pad16(fd.get(self)?, false)); let interval = Interval { @@ -446,14 +454,16 @@ impl Evaluator<'_> { "pthread_key_create" => { let key = self.thread_local_storage.create_key(); let Some(arg0) = args.first() else { - return Err(MirEvalError::TypeError("pthread_key_create arg0 is not provided")); + return Err(MirEvalError::InternalError( + "pthread_key_create arg0 is not provided".into(), + )); }; let arg0_addr = Address::from_bytes(arg0.get(self)?)?; let key_ty = if let Some((ty, ..)) = arg0.ty.as_reference_or_ptr() { ty } else { - return Err(MirEvalError::TypeError( - "pthread_key_create arg0 is not a pointer", + return Err(MirEvalError::InternalError( + "pthread_key_create arg0 is not a pointer".into(), )); }; let arg0_interval = Interval::new( @@ -467,8 +477,8 @@ impl Evaluator<'_> { } "pthread_getspecific" => { let Some(arg0) = args.first() else { - return Err(MirEvalError::TypeError( - "pthread_getspecific arg0 is not provided", + return Err(MirEvalError::InternalError( + "pthread_getspecific arg0 is not provided".into(), )); }; let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]); @@ -478,14 +488,14 @@ impl Evaluator<'_> { } "pthread_setspecific" => { let Some(arg0) = args.first() else { - return Err(MirEvalError::TypeError( - "pthread_setspecific arg0 is not provided", + return Err(MirEvalError::InternalError( + "pthread_setspecific arg0 is not provided".into(), )); }; let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]); let Some(arg1) = args.get(1) else { - return Err(MirEvalError::TypeError( - "pthread_setspecific arg1 is not provided", + return Err(MirEvalError::InternalError( + "pthread_setspecific arg1 is not provided".into(), )); }; let value = from_bytes!(u128, pad16(arg1.get(self)?, false)); @@ -502,14 +512,16 @@ impl Evaluator<'_> { } "syscall" => { let Some((id, rest)) = args.split_first() else { - return Err(MirEvalError::TypeError("syscall arg1 is not provided")); + return Err(MirEvalError::InternalError("syscall arg1 is not provided".into())); }; let id = from_bytes!(i64, id.get(self)?); self.exec_syscall(id, rest, destination, locals, span) } "sched_getaffinity" => { let [_pid, _set_size, set] = args else { - return Err(MirEvalError::TypeError("libc::write args are not provided")); + return Err(MirEvalError::InternalError( + "libc::write args are not provided".into(), + )); }; let set = Address::from_bytes(set.get(self)?)?; // Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001 @@ -520,7 +532,9 @@ impl Evaluator<'_> { } "getenv" => { let [name] = args else { - return Err(MirEvalError::TypeError("libc::write args are not provided")); + return Err(MirEvalError::InternalError( + "libc::write args are not provided".into(), + )); }; let mut name_buf = vec![]; let name = { @@ -586,8 +600,8 @@ impl Evaluator<'_> { "sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs" | "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => { let [arg] = args else { - return Err(MirEvalError::TypeError( - "f64 intrinsic signature doesn't match fn (f64) -> f64", + return Err(MirEvalError::InternalError( + "f64 intrinsic signature doesn't match fn (f64) -> f64".into(), )); }; let arg = from_bytes!(f64, arg.get(self)?); @@ -614,8 +628,8 @@ impl Evaluator<'_> { } "pow" | "minnum" | "maxnum" | "copysign" => { let [arg1, arg2] = args else { - return Err(MirEvalError::TypeError( - "f64 intrinsic signature doesn't match fn (f64, f64) -> f64", + return Err(MirEvalError::InternalError( + "f64 intrinsic signature doesn't match fn (f64, f64) -> f64".into(), )); }; let arg1 = from_bytes!(f64, arg1.get(self)?); @@ -630,8 +644,8 @@ impl Evaluator<'_> { } "powi" => { let [arg1, arg2] = args else { - return Err(MirEvalError::TypeError( - "powif64 signature doesn't match fn (f64, i32) -> f64", + return Err(MirEvalError::InternalError( + "powif64 signature doesn't match fn (f64, i32) -> f64".into(), )); }; let arg1 = from_bytes!(f64, arg1.get(self)?); @@ -640,8 +654,8 @@ impl Evaluator<'_> { } "fma" => { let [arg1, arg2, arg3] = args else { - return Err(MirEvalError::TypeError( - "fmaf64 signature doesn't match fn (f64, f64, f64) -> f64", + return Err(MirEvalError::InternalError( + "fmaf64 signature doesn't match fn (f64, f64, f64) -> f64".into(), )); }; let arg1 = from_bytes!(f64, arg1.get(self)?); @@ -658,8 +672,8 @@ impl Evaluator<'_> { "sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs" | "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => { let [arg] = args else { - return Err(MirEvalError::TypeError( - "f32 intrinsic signature doesn't match fn (f32) -> f32", + return Err(MirEvalError::InternalError( + "f32 intrinsic signature doesn't match fn (f32) -> f32".into(), )); }; let arg = from_bytes!(f32, arg.get(self)?); @@ -686,8 +700,8 @@ impl Evaluator<'_> { } "pow" | "minnum" | "maxnum" | "copysign" => { let [arg1, arg2] = args else { - return Err(MirEvalError::TypeError( - "f32 intrinsic signature doesn't match fn (f32, f32) -> f32", + return Err(MirEvalError::InternalError( + "f32 intrinsic signature doesn't match fn (f32, f32) -> f32".into(), )); }; let arg1 = from_bytes!(f32, arg1.get(self)?); @@ -702,8 +716,8 @@ impl Evaluator<'_> { } "powi" => { let [arg1, arg2] = args else { - return Err(MirEvalError::TypeError( - "powif32 signature doesn't match fn (f32, i32) -> f32", + return Err(MirEvalError::InternalError( + "powif32 signature doesn't match fn (f32, i32) -> f32".into(), )); }; let arg1 = from_bytes!(f32, arg1.get(self)?); @@ -712,8 +726,8 @@ impl Evaluator<'_> { } "fma" => { let [arg1, arg2, arg3] = args else { - return Err(MirEvalError::TypeError( - "fmaf32 signature doesn't match fn (f32, f32, f32) -> f32", + return Err(MirEvalError::InternalError( + "fmaf32 signature doesn't match fn (f32, f32, f32) -> f32".into(), )); }; let arg1 = from_bytes!(f32, arg1.get(self)?); @@ -730,7 +744,9 @@ impl Evaluator<'_> { let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("size_of generic arg is not provided")); + return Err(MirEvalError::InternalError( + "size_of generic arg is not provided".into(), + )); }; let size = self.size_of_sized(ty, locals, "size_of arg")?; destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size]) @@ -739,7 +755,9 @@ impl Evaluator<'_> { let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("align_of generic arg is not provided")); + return Err(MirEvalError::InternalError( + "align_of generic arg is not provided".into(), + )); }; let align = self.layout(ty)?.align.abi.bytes(); destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size]) @@ -748,10 +766,14 @@ impl Evaluator<'_> { let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("size_of_val generic arg is not provided")); + return Err(MirEvalError::InternalError( + "size_of_val generic arg is not provided".into(), + )); }; let [arg] = args else { - return Err(MirEvalError::TypeError("size_of_val args are not provided")); + return Err(MirEvalError::InternalError( + "size_of_val args are not provided".into(), + )); }; if let Some((size, _)) = self.size_align_of(ty, locals)? { destination.write_from_bytes(self, &size.to_le_bytes()) @@ -765,12 +787,14 @@ impl Evaluator<'_> { let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError( - "min_align_of_val generic arg is not provided", + return Err(MirEvalError::InternalError( + "min_align_of_val generic arg is not provided".into(), )); }; let [arg] = args else { - return Err(MirEvalError::TypeError("min_align_of_val args are not provided")); + return Err(MirEvalError::InternalError( + "min_align_of_val args are not provided".into(), + )); }; if let Some((_, align)) = self.size_align_of(ty, locals)? { destination.write_from_bytes(self, &align.to_le_bytes()) @@ -784,7 +808,9 @@ impl Evaluator<'_> { let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("type_name generic arg is not provided")); + return Err(MirEvalError::InternalError( + "type_name generic arg is not provided".into(), + )); }; let ty_name = match ty.display_source_code( self.db, @@ -808,7 +834,9 @@ impl Evaluator<'_> { let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("size_of generic arg is not provided")); + return Err(MirEvalError::InternalError( + "size_of generic arg is not provided".into(), + )); }; let result = !ty.clone().is_copy(self.db, locals.body.owner); destination.write_from_bytes(self, &[u8::from(result)]) @@ -817,14 +845,18 @@ impl Evaluator<'_> { // FIXME: this is wrong for const eval, it should return 2 in some // cases. let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("wrapping_add args are not provided")); + return Err(MirEvalError::InternalError( + "wrapping_add args are not provided".into(), + )); }; let ans = lhs.get(self)? == rhs.get(self)?; destination.write_from_bytes(self, &[u8::from(ans)]) } "saturating_add" | "saturating_sub" => { let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("saturating_add args are not provided")); + return Err(MirEvalError::InternalError( + "saturating_add args are not provided".into(), + )); }; let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); @@ -844,7 +876,9 @@ impl Evaluator<'_> { } "wrapping_add" | "unchecked_add" => { let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("wrapping_add args are not provided")); + return Err(MirEvalError::InternalError( + "wrapping_add args are not provided".into(), + )); }; let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); @@ -853,7 +887,9 @@ impl Evaluator<'_> { } "ptr_offset_from_unsigned" | "ptr_offset_from" => { let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("wrapping_sub args are not provided")); + return Err(MirEvalError::InternalError( + "wrapping_sub args are not provided".into(), + )); }; let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false)); @@ -861,8 +897,8 @@ impl Evaluator<'_> { let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError( - "ptr_offset_from generic arg is not provided", + return Err(MirEvalError::InternalError( + "ptr_offset_from generic arg is not provided".into(), )); }; let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128; @@ -871,7 +907,9 @@ impl Evaluator<'_> { } "wrapping_sub" | "unchecked_sub" => { let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("wrapping_sub args are not provided")); + return Err(MirEvalError::InternalError( + "wrapping_sub args are not provided".into(), + )); }; let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); @@ -880,7 +918,9 @@ impl Evaluator<'_> { } "wrapping_mul" | "unchecked_mul" => { let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("wrapping_mul args are not provided")); + return Err(MirEvalError::InternalError( + "wrapping_mul args are not provided".into(), + )); }; let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); @@ -890,7 +930,9 @@ impl Evaluator<'_> { "wrapping_shl" | "unchecked_shl" => { // FIXME: signed let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("unchecked_shl args are not provided")); + return Err(MirEvalError::InternalError( + "unchecked_shl args are not provided".into(), + )); }; let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); @@ -900,7 +942,9 @@ impl Evaluator<'_> { "wrapping_shr" | "unchecked_shr" => { // FIXME: signed let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("unchecked_shr args are not provided")); + return Err(MirEvalError::InternalError( + "unchecked_shr args are not provided".into(), + )); }; let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); @@ -910,7 +954,9 @@ impl Evaluator<'_> { "unchecked_rem" => { // FIXME: signed let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("unchecked_rem args are not provided")); + return Err(MirEvalError::InternalError( + "unchecked_rem args are not provided".into(), + )); }; let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); @@ -922,7 +968,9 @@ impl Evaluator<'_> { "unchecked_div" | "exact_div" => { // FIXME: signed let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("unchecked_div args are not provided")); + return Err(MirEvalError::InternalError( + "unchecked_div args are not provided".into(), + )); }; let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); @@ -933,7 +981,9 @@ impl Evaluator<'_> { } "add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => { let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("const_eval_select args are not provided")); + return Err(MirEvalError::InternalError( + "const_eval_select args are not provided".into(), + )); }; let result_ty = TyKind::Tuple( 2, @@ -954,7 +1004,7 @@ impl Evaluator<'_> { || ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255); let is_overflow = vec![u8::from(is_overflow)]; let layout = self.layout(&result_ty)?; - let result = self.make_by_layout( + let result = self.construct_with_layout( layout.size.bytes_usize(), &layout, None, @@ -966,15 +1016,15 @@ impl Evaluator<'_> { } "copy" | "copy_nonoverlapping" => { let [src, dst, offset] = args else { - return Err(MirEvalError::TypeError( - "copy_nonoverlapping args are not provided", + return Err(MirEvalError::InternalError( + "copy_nonoverlapping args are not provided".into(), )); }; let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError( - "copy_nonoverlapping generic arg is not provided", + return Err(MirEvalError::InternalError( + "copy_nonoverlapping generic arg is not provided".into(), )); }; let src = Address::from_bytes(src.get(self)?)?; @@ -988,18 +1038,22 @@ impl Evaluator<'_> { } "offset" | "arith_offset" => { let [ptr, offset] = args else { - return Err(MirEvalError::TypeError("offset args are not provided")); + return Err(MirEvalError::InternalError("offset args are not provided".into())); }; let ty = if name == "offset" { let Some(ty0) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("offset generic arg is not provided")); + return Err(MirEvalError::InternalError( + "offset generic arg is not provided".into(), + )); }; let Some(ty1) = generic_args.as_slice(Interner).get(1).and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("offset generic arg is not provided")); + return Err(MirEvalError::InternalError( + "offset generic arg is not provided".into(), + )); }; if !matches!( ty1.as_builtin(), @@ -1008,15 +1062,15 @@ impl Evaluator<'_> { | BuiltinType::Uint(BuiltinUint::Usize) ) ) { - return Err(MirEvalError::TypeError( - "offset generic arg is not usize or isize", + return Err(MirEvalError::InternalError( + "offset generic arg is not usize or isize".into(), )); } match ty0.as_raw_ptr() { Some((ty, _)) => ty, None => { - return Err(MirEvalError::TypeError( - "offset generic arg is not a raw pointer", + return Err(MirEvalError::InternalError( + "offset generic arg is not a raw pointer".into(), )); } } @@ -1024,8 +1078,8 @@ impl Evaluator<'_> { let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError( - "arith_offset generic arg is not provided", + return Err(MirEvalError::InternalError( + "arith_offset generic arg is not provided".into(), )); }; ty @@ -1046,19 +1100,21 @@ impl Evaluator<'_> { } "transmute" => { let [arg] = args else { - return Err(MirEvalError::TypeError("transmute arg is not provided")); + return Err(MirEvalError::InternalError( + "transmute arg is not provided".into(), + )); }; destination.write_from_interval(self, arg.interval) } "likely" | "unlikely" => { let [arg] = args else { - return Err(MirEvalError::TypeError("likely arg is not provided")); + return Err(MirEvalError::InternalError("likely arg is not provided".into())); }; destination.write_from_interval(self, arg.interval) } "ctpop" => { let [arg] = args else { - return Err(MirEvalError::TypeError("ctpop arg is not provided")); + return Err(MirEvalError::InternalError("ctpop arg is not provided".into())); }; let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones(); destination @@ -1066,7 +1122,7 @@ impl Evaluator<'_> { } "ctlz" | "ctlz_nonzero" => { let [arg] = args else { - return Err(MirEvalError::TypeError("ctlz arg is not provided")); + return Err(MirEvalError::InternalError("ctlz arg is not provided".into())); }; let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize; @@ -1076,7 +1132,7 @@ impl Evaluator<'_> { } "cttz" | "cttz_nonzero" => { let [arg] = args else { - return Err(MirEvalError::TypeError("cttz arg is not provided")); + return Err(MirEvalError::InternalError("cttz arg is not provided".into())); }; let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros(); destination @@ -1084,7 +1140,9 @@ impl Evaluator<'_> { } "rotate_left" => { let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("rotate_left args are not provided")); + return Err(MirEvalError::InternalError( + "rotate_left args are not provided".into(), + )); }; let lhs = &lhs.get(self)?[0..destination.size]; let rhs = rhs.get(self)?[0] as u32; @@ -1114,7 +1172,9 @@ impl Evaluator<'_> { } "rotate_right" => { let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("rotate_right args are not provided")); + return Err(MirEvalError::InternalError( + "rotate_right args are not provided".into(), + )); }; let lhs = &lhs.get(self)?[0..destination.size]; let rhs = rhs.get(self)?[0] as u32; @@ -1144,13 +1204,15 @@ impl Evaluator<'_> { } "discriminant_value" => { let [arg] = args else { - return Err(MirEvalError::TypeError("discriminant_value arg is not provided")); + return Err(MirEvalError::InternalError( + "discriminant_value arg is not provided".into(), + )); }; let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError( - "discriminant_value generic arg is not provided", + return Err(MirEvalError::InternalError( + "discriminant_value generic arg is not provided".into(), )); }; let addr = Address::from_bytes(arg.get(self)?)?; @@ -1161,11 +1223,15 @@ impl Evaluator<'_> { } "const_eval_select" => { let [tuple, const_fn, _] = args else { - return Err(MirEvalError::TypeError("const_eval_select args are not provided")); + return Err(MirEvalError::InternalError( + "const_eval_select args are not provided".into(), + )); }; let mut args = vec![const_fn.clone()]; let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else { - return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple")); + return Err(MirEvalError::InternalError( + "const_eval_select arg[0] is not a tuple".into(), + )); }; let layout = self.layout(&tuple.ty)?; for (i, field) in fields.iter(Interner).enumerate() { @@ -1196,21 +1262,25 @@ impl Evaluator<'_> { } "read_via_copy" | "volatile_load" => { let [arg] = args else { - return Err(MirEvalError::TypeError("read_via_copy args are not provided")); + return Err(MirEvalError::InternalError( + "read_via_copy args are not provided".into(), + )); }; let addr = Address::from_bytes(arg.interval.get(self)?)?; destination.write_from_interval(self, Interval { addr, size: destination.size }) } "write_via_move" => { let [ptr, val] = args else { - return Err(MirEvalError::TypeError("write_via_move args are not provided")); + return Err(MirEvalError::InternalError( + "write_via_move args are not provided".into(), + )); }; let dst = Address::from_bytes(ptr.get(self)?)?; let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError( - "write_via_copy generic arg is not provided", + return Err(MirEvalError::InternalError( + "write_via_copy generic arg is not provided".into(), )); }; let size = self.size_of_sized(ty, locals, "write_via_move ptr type")?; @@ -1219,14 +1289,18 @@ impl Evaluator<'_> { } "write_bytes" => { let [dst, val, count] = args else { - return Err(MirEvalError::TypeError("write_bytes args are not provided")); + return Err(MirEvalError::InternalError( + "write_bytes args are not provided".into(), + )); }; let count = from_bytes!(usize, count.get(self)?); let val = from_bytes!(u8, val.get(self)?); let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("write_bytes generic arg is not provided")); + return Err(MirEvalError::InternalError( + "write_bytes generic arg is not provided".into(), + )); }; let dst = Address::from_bytes(dst.get(self)?)?; let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?; @@ -1310,10 +1384,14 @@ impl Evaluator<'_> { let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided")); + return Err(MirEvalError::InternalError( + "atomic intrinsic generic arg is not provided".into(), + )); }; let Some(arg0) = args.first() else { - return Err(MirEvalError::TypeError("atomic intrinsic arg0 is not provided")); + return Err(MirEvalError::InternalError( + "atomic intrinsic arg0 is not provided".into(), + )); }; let arg0_addr = Address::from_bytes(arg0.get(self)?)?; let arg0_interval = @@ -1322,7 +1400,9 @@ impl Evaluator<'_> { return destination.write_from_interval(self, arg0_interval); } let Some(arg1) = args.get(1) else { - return Err(MirEvalError::TypeError("atomic intrinsic arg1 is not provided")); + return Err(MirEvalError::InternalError( + "atomic intrinsic arg1 is not provided".into(), + )); }; if name.starts_with("store_") { return arg0_interval.write_from_interval(self, arg1.interval); @@ -1374,7 +1454,9 @@ impl Evaluator<'_> { return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]); } let Some(arg2) = args.get(2) else { - return Err(MirEvalError::TypeError("atomic intrinsic arg2 is not provided")); + return Err(MirEvalError::InternalError( + "atomic intrinsic arg2 is not provided".into(), + )); }; if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") { let dest = if arg1.get(self)? == arg0_interval.get(self)? { @@ -1389,7 +1471,7 @@ impl Evaluator<'_> { ) .intern(Interner); let layout = self.layout(&result_ty)?; - let result = self.make_by_layout( + let result = self.construct_with_layout( layout.size.bytes_usize(), &layout, None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs index f9156417f249f..eddfd0acfb98c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -10,7 +10,7 @@ macro_rules! from_bytes { ($ty:tt, $value:expr) => { ($ty::from_le_bytes(match ($value).try_into() { Ok(it) => it, - Err(_) => return Err(MirEvalError::TypeError("mismatched size")), + Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())), })) }; } @@ -40,7 +40,9 @@ impl Evaluator<'_> { .substitute(Interner, subst); return Ok((fields.len(), field_ty)); } - return Err(MirEvalError::TypeError("simd type with no len param")); + return Err(MirEvalError::InternalError( + "simd type with no len param".into(), + )); } }; match try_const_usize(self.db, len) { @@ -48,14 +50,18 @@ impl Evaluator<'_> { let Some(ty) = subst.as_slice(Interner).first().and_then(|it| it.ty(Interner)) else { - return Err(MirEvalError::TypeError("simd type with no ty param")); + return Err(MirEvalError::InternalError( + "simd type with no ty param".into(), + )); }; Ok((len as usize, ty.clone())) } - None => Err(MirEvalError::TypeError("simd type with unevaluatable len param")), + None => Err(MirEvalError::InternalError( + "simd type with unevaluatable len param".into(), + )), } } - _ => Err(MirEvalError::TypeError("simd type which is not a struct")), + _ => Err(MirEvalError::InternalError("simd type which is not a struct".into())), } } @@ -71,7 +77,9 @@ impl Evaluator<'_> { match name { "and" | "or" | "xor" => { let [left, right] = args else { - return Err(MirEvalError::TypeError("simd bit op args are not provided")); + return Err(MirEvalError::InternalError( + "simd bit op args are not provided".into(), + )); }; let result = left .get(self)? @@ -88,7 +96,7 @@ impl Evaluator<'_> { } "eq" | "ne" | "lt" | "le" | "gt" | "ge" => { let [left, right] = args else { - return Err(MirEvalError::TypeError("simd args are not provided")); + return Err(MirEvalError::InternalError("simd args are not provided".into())); }; let (len, ty) = self.detect_simd_ty(&left.ty)?; let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_))); @@ -125,7 +133,9 @@ impl Evaluator<'_> { } "bitmask" => { let [op] = args else { - return Err(MirEvalError::TypeError("simd_bitmask args are not provided")); + return Err(MirEvalError::InternalError( + "simd_bitmask args are not provided".into(), + )); }; let (op_len, _) = self.detect_simd_ty(&op.ty)?; let op_count = op.interval.size / op_len; @@ -139,18 +149,20 @@ impl Evaluator<'_> { } "shuffle" => { let [left, right, index] = args else { - return Err(MirEvalError::TypeError("simd_shuffle args are not provided")); + return Err(MirEvalError::InternalError( + "simd_shuffle args are not provided".into(), + )); }; let TyKind::Array(_, index_len) = index.ty.kind(Interner) else { - return Err(MirEvalError::TypeError( - "simd_shuffle index argument has non-array type", + return Err(MirEvalError::InternalError( + "simd_shuffle index argument has non-array type".into(), )); }; let index_len = match try_const_usize(self.db, index_len) { Some(it) => it as usize, None => { - return Err(MirEvalError::TypeError( - "simd type with unevaluatable len param", + return Err(MirEvalError::InternalError( + "simd type with unevaluatable len param".into(), )) } }; @@ -164,8 +176,8 @@ impl Evaluator<'_> { let val = match vector.clone().nth(index) { Some(it) => it, None => { - return Err(MirEvalError::TypeError( - "out of bound access in simd shuffle", + return Err(MirEvalError::InternalError( + "out of bound access in simd shuffle".into(), )) } }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 99930798e87ab..28d26c6c8ae6e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -97,7 +97,7 @@ pub enum MirLowerError { MutatingRvalue, UnresolvedLabel, UnresolvedUpvar(Place), - UnaccessableLocal, + InaccessibleLocal, // monomorphization errors: GenericArgNotProvided(TypeOrConstParamId, Substitution), @@ -116,7 +116,7 @@ impl DropScopeToken { ctx.pop_drop_scope_internal(current, span) } - /// It is useful when we want a drop scope is syntaxically closed, but we don't want to execute any drop + /// It is useful when we want a drop scope is syntactically closed, but we don't want to execute any drop /// code. Either when the control flow is diverging (so drop code doesn't reached) or when drop is handled /// for us (for example a block that ended with a return statement. Return will drop everything, so the block shouldn't /// do anything) @@ -186,7 +186,7 @@ impl MirLowerError { | MirLowerError::UnsizedTemporary(_) | MirLowerError::IncompleteExpr | MirLowerError::IncompletePattern - | MirLowerError::UnaccessableLocal + | MirLowerError::InaccessibleLocal | MirLowerError::TraitFunctionDefinition(_, _) | MirLowerError::UnresolvedName(_) | MirLowerError::RecordLiteralWithoutPath @@ -939,7 +939,7 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(Some(current)) } Expr::BinaryOp { lhs, rhs, op } => { - let op = op.ok_or(MirLowerError::IncompleteExpr)?; + let op: BinaryOp = op.ok_or(MirLowerError::IncompleteExpr)?; let is_builtin = 'b: { // Without adjust here is a hack. We assume that we know every possible adjustment // for binary operator, and use without adjust to simplify our conditions. @@ -1843,8 +1843,8 @@ impl<'ctx> MirLowerCtx<'ctx> { None => { // FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which // is a hir lowering problem IMO. - // never!("Using unaccessable local for binding is always a bug"); - Err(MirLowerError::UnaccessableLocal) + // never!("Using inaccessible local for binding is always a bug"); + Err(MirLowerError::InaccessibleLocal) } } } @@ -2068,7 +2068,7 @@ pub fn mir_body_for_closure_query( } pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result> { - let _p = profile::span("mir_body_query").detail(|| match def { + let detail = match def { DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(), DefWithBodyId::StaticId(it) => db.static_data(it).name.display(db.upcast()).to_string(), DefWithBodyId::ConstId(it) => db @@ -2082,7 +2082,8 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result format!("in type const {it:?}"), - }); + }; + let _p = tracing::span!(tracing::Level::INFO, "mir_body_query", ?detail).entered(); let body = db.body(def); let infer = db.infer(def); let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index 65ab12929dd2c..8202bac532f7a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -114,7 +114,7 @@ impl MirLowerCtx<'_> { index: i as u32, })) }), - &mut cond_place, + &cond_place, mode, )? } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index e4756ee9e29b5..db14addaf185b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -3424,7 +3424,7 @@ fn bin_op_with_rhs_is_self_for_assoc_bound() { fn repro(t: T) -> bool where T: Request, - T::Output: Convertable, + T::Output: Convertible, { let a = execute(&t).convert(); let b = execute(&t).convert(); @@ -3439,7 +3439,7 @@ where { ::output() } -trait Convertable { +trait Convertible { type TraitSelf: PartialEq; type AssocAsDefaultSelf: PartialEq; fn convert(self) -> Self::AssocAsDefaultSelf; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index b6bc76bc98d53..3a1a4e63ea121 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -100,13 +100,14 @@ pub(crate) fn trait_solve_query( block: Option, goal: Canonical>, ) -> Option { - let _p = profile::span("trait_solve_query").detail(|| match &goal.value.goal.data(Interner) { + let detail = match &goal.value.goal.data(Interner) { GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => { db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string() } GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(), _ => "??".to_string(), - }); + }; + let _p = tracing::span!(tracing::Level::INFO, "trait_solve_query", ?detail).entered(); tracing::info!("trait_solve_query({:?})", goal.value.goal); if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index 2cdee5a15a88a..c150314138ade 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -24,18 +24,18 @@ use hir_def::{ }; use hir_expand::name::Name; use intern::Interned; +use rustc_abi::TargetDataLayout; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; use stdx::never; -use triomphe::Arc; use crate::{ consteval::unknown_const, db::HirDatabase, layout::{Layout, TagEncoding}, mir::pad16, - ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitEnvironment, - TraitRef, TraitRefExt, Ty, WhereClause, + ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitRef, TraitRefExt, + Ty, WhereClause, }; pub(crate) fn fn_traits( @@ -192,7 +192,7 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { /// and it doesn't store the closure types and fields. /// /// Codes should not assume this ordering, and should always use methods available -/// on this struct for retriving, and `TyBuilder::substs_for_closure` for creating. +/// on this struct for retrieving, and `TyBuilder::substs_for_closure` for creating. pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution); impl<'a> ClosureSubst<'a> { @@ -431,18 +431,16 @@ impl FallibleTypeFolder for UnevaluatedConstEvaluatorFolder<'_> { pub(crate) fn detect_variant_from_bytes<'a>( layout: &'a Layout, db: &dyn HirDatabase, - trait_env: Arc, + target_data_layout: &TargetDataLayout, b: &[u8], e: EnumId, ) -> Option<(EnumVariantId, &'a Layout)> { - let krate = trait_env.krate; let (var_id, var_layout) = match &layout.variants { hir_def::layout::Variants::Single { index } => { (db.enum_data(e).variants[index.0].0, layout) } hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => { - let target_data_layout = db.target_data_layout(krate)?; - let size = tag.size(&*target_data_layout).bytes_usize(); + let size = tag.size(target_data_layout).bytes_usize(); let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field let tag = i128::from_le_bytes(pad16(&b[offset..offset + size], false)); match tag_encoding { diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml index e4e4bcea6108d..7fea8372876ee 100644 --- a/src/tools/rust-analyzer/crates/hir/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml @@ -17,6 +17,7 @@ either.workspace = true arrayvec.workspace = true itertools.workspace = true smallvec.workspace = true +tracing.workspace = true triomphe.workspace = true once_cell = "1.17.1" @@ -30,9 +31,10 @@ profile.workspace = true stdx.workspace = true syntax.workspace = true tt.workspace = true +span.workspace = true [features] in-rust-tree = [] [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs index fc0a196df7cbe..5c369f42e6e7d 100644 --- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs @@ -239,10 +239,9 @@ fn resolve_impl_trait_item( ) -> Option { let canonical = ty.canonical(); let krate = ty.krate(db); - let environment = resolver.generic_def().map_or_else( - || crate::TraitEnvironment::empty(krate.id).into(), - |d| db.trait_environment(d), - ); + let environment = resolver + .generic_def() + .map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); let traits_in_scope = resolver.traits_in_scope(db.upcast()); let mut result = None; @@ -297,7 +296,7 @@ fn as_module_def_if_namespace_matches( AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types), }; - (ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def)) + (ns.unwrap_or(expected_ns) == expected_ns).then_some(DocLinkDef::ModuleDef(def)) } fn modpath_from_str(link: &str) -> Option { @@ -311,7 +310,7 @@ fn modpath_from_str(link: &str) -> Option { "self" => PathKind::Super(0), "super" => { let mut deg = 1; - while let Some(segment) = parts.next() { + for segment in parts.by_ref() { if segment == "super" { deg += 1; } else { diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index bf29a53913d1d..2d8f1dbad51a6 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -4,11 +4,12 @@ //! This probably isn't the best way to do this -- ideally, diagnostics should //! be expressed in terms of hir types themselves. pub use hir_ty::diagnostics::{CaseType, IncorrectCase}; +use hir_ty::{db::HirDatabase, diagnostics::BodyValidationDiagnostic, InferenceDiagnostic}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; -use hir_def::{path::ModPath, AssocItemId}; +use hir_def::{body::SyntheticSyntax, hir::ExprOrPatId, path::ModPath, AssocItemId, DefWithBodyId}; use hir_expand::{name::Name, HirFileId, InFile}; use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange}; @@ -30,14 +31,28 @@ macro_rules! diagnostics { )* }; } +// FIXME Accept something like the following in the macro call instead +// diagnostics![ +// pub struct BreakOutsideOfLoop { +// pub expr: InFile>, +// pub is_break: bool, +// pub bad_value_break: bool, +// }, ... +// or more concisely +// BreakOutsideOfLoop { +// expr: InFile>, +// is_break: bool, +// bad_value_break: bool, +// }, ... +// ] diagnostics![ BreakOutsideOfLoop, ExpectedFunction, InactiveCode, + IncoherentImpl, IncorrectCase, InvalidDeriveTarget, - IncoherentImpl, MacroDefError, MacroError, MacroExpansionParseError, @@ -55,8 +70,8 @@ diagnostics![ ReplaceFilterMapNextWithFindMap, TraitImplIncorrectSafety, TraitImplMissingAssocItems, - TraitImplRedundantAssocItems, TraitImplOrphan, + TraitImplRedundantAssocItems, TypedHole, TypeMismatch, UndeclaredLabel, @@ -326,3 +341,219 @@ pub struct TraitImplRedundantAssocItems { pub impl_: AstPtr, pub assoc_item: (Name, AssocItem), } + +impl AnyDiagnostic { + pub(crate) fn body_validation_diagnostic( + db: &dyn HirDatabase, + diagnostic: BodyValidationDiagnostic, + source_map: &hir_def::body::BodySourceMap, + ) -> Option { + match diagnostic { + BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => { + let variant_data = variant.variant_data(db.upcast()); + let missed_fields = missed_fields + .into_iter() + .map(|idx| variant_data.fields()[idx].name.clone()) + .collect(); + + match record { + Either::Left(record_expr) => match source_map.expr_syntax(record_expr) { + Ok(source_ptr) => { + let root = source_ptr.file_syntax(db.upcast()); + if let ast::Expr::RecordExpr(record_expr) = + source_ptr.value.to_node(&root) + { + if record_expr.record_expr_field_list().is_some() { + let field_list_parent_path = + record_expr.path().map(|path| AstPtr::new(&path)); + return Some( + MissingFields { + file: source_ptr.file_id, + field_list_parent: AstPtr::new(&Either::Left( + record_expr, + )), + field_list_parent_path, + missed_fields, + } + .into(), + ); + } + } + } + Err(SyntheticSyntax) => (), + }, + Either::Right(record_pat) => match source_map.pat_syntax(record_pat) { + Ok(source_ptr) => { + if let Some(ptr) = source_ptr.value.cast::() { + let root = source_ptr.file_syntax(db.upcast()); + let record_pat = ptr.to_node(&root); + if record_pat.record_pat_field_list().is_some() { + let field_list_parent_path = + record_pat.path().map(|path| AstPtr::new(&path)); + return Some( + MissingFields { + file: source_ptr.file_id, + field_list_parent: AstPtr::new(&Either::Right( + record_pat, + )), + field_list_parent_path, + missed_fields, + } + .into(), + ); + } + } + } + Err(SyntheticSyntax) => (), + }, + } + } + BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => { + if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) { + return Some( + ReplaceFilterMapNextWithFindMap { + file: next_source_ptr.file_id, + next_expr: next_source_ptr.value, + } + .into(), + ); + } + } + BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => { + match source_map.expr_syntax(match_expr) { + Ok(source_ptr) => { + let root = source_ptr.file_syntax(db.upcast()); + if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) { + match match_expr.expr() { + Some(scrut_expr) if match_expr.match_arm_list().is_some() => { + return Some( + MissingMatchArms { + scrutinee_expr: InFile::new( + source_ptr.file_id, + AstPtr::new(&scrut_expr), + ), + uncovered_patterns, + } + .into(), + ); + } + _ => {} + } + } + } + Err(SyntheticSyntax) => (), + } + } + } + None + } + + pub(crate) fn inference_diagnostic( + db: &dyn HirDatabase, + def: DefWithBodyId, + d: &InferenceDiagnostic, + source_map: &hir_def::body::BodySourceMap, + ) -> Option { + let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic"); + let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic"); + Some(match d { + &InferenceDiagnostic::NoSuchField { field: expr, private } => { + let expr_or_pat = match expr { + ExprOrPatId::ExprId(expr) => { + source_map.field_syntax(expr).map(AstPtr::wrap_left) + } + ExprOrPatId::PatId(pat) => { + source_map.pat_field_syntax(pat).map(AstPtr::wrap_right) + } + }; + NoSuchField { field: expr_or_pat, private }.into() + } + &InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => { + MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }.into() + } + &InferenceDiagnostic::PrivateField { expr, field } => { + let expr = expr_syntax(expr); + let field = field.into(); + PrivateField { expr, field }.into() + } + &InferenceDiagnostic::PrivateAssocItem { id, item } => { + let expr_or_pat = match id { + ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), + ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right), + }; + let item = item.into(); + PrivateAssocItem { expr_or_pat, item }.into() + } + InferenceDiagnostic::ExpectedFunction { call_expr, found } => { + let call_expr = expr_syntax(*call_expr); + ExpectedFunction { call: call_expr, found: Type::new(db, def, found.clone()) } + .into() + } + InferenceDiagnostic::UnresolvedField { + expr, + receiver, + name, + method_with_same_name_exists, + } => { + let expr = expr_syntax(*expr); + UnresolvedField { + expr, + name: name.clone(), + receiver: Type::new(db, def, receiver.clone()), + method_with_same_name_exists: *method_with_same_name_exists, + } + .into() + } + InferenceDiagnostic::UnresolvedMethodCall { + expr, + receiver, + name, + field_with_same_name, + assoc_func_with_same_name, + } => { + let expr = expr_syntax(*expr); + UnresolvedMethodCall { + expr, + name: name.clone(), + receiver: Type::new(db, def, receiver.clone()), + field_with_same_name: field_with_same_name + .clone() + .map(|ty| Type::new(db, def, ty)), + assoc_func_with_same_name: *assoc_func_with_same_name, + } + .into() + } + &InferenceDiagnostic::UnresolvedAssocItem { id } => { + let expr_or_pat = match id { + ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), + ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right), + }; + UnresolvedAssocItem { expr_or_pat }.into() + } + &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => { + let expr = expr_syntax(expr); + BreakOutsideOfLoop { expr, is_break, bad_value_break }.into() + } + InferenceDiagnostic::TypedHole { expr, expected } => { + let expr = expr_syntax(*expr); + TypedHole { expr, expected: Type::new(db, def, expected.clone()) }.into() + } + &InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => { + let expr_or_pat = match pat { + ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), + ExprOrPatId::PatId(pat) => { + let InFile { file_id, value } = + source_map.pat_syntax(pat).expect("unexpected synthetic"); + + // cast from Either -> Either<_, Pat> + let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else { + return None; + }; + InFile { file_id, value: ptr } + } + }; + MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into() + } + }) + } +} diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index c332ab0050cfb..1e21045e9818b 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -61,7 +61,7 @@ use hir_def::{ use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind}; use hir_ty::{ all_super_traits, autoderef, check_orphan_rules, - consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, + consteval::{try_const_usize, unknown_const_as_generic, ConstExt}, diagnostics::BodyValidationDiagnostic, known_const_to_ast, layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, @@ -70,9 +70,9 @@ use hir_ty::{ primitive::UintTy, traits::FnTrait, AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, - GenericArgData, InferenceDiagnostic, Interner, ParamKind, QuantifiedWhereClause, Scalar, - Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, - ValueTyDefId, WhereClause, + GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, + TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId, + WhereClause, }; use itertools::Itertools; use nameres::diagnostics::DefDiagnosticKind; @@ -131,8 +131,10 @@ pub use { MacroFileIdExt, }, hir_ty::{ + consteval::ConstEvalError, display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, layout::LayoutError, + mir::{MirEvalError, MirLowerError}, PointerCast, Safety, }, // FIXME: Properly encapsulate mir @@ -233,8 +235,8 @@ impl Crate { db: &dyn DefDatabase, query: import_map::Query, ) -> impl Iterator> { - let _p = profile::span("query_external_importables"); - import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| { + let _p = tracing::span!(tracing::Level::INFO, "query_external_importables"); + import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| { match ItemInNs::from(item) { ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id), ItemInNs::Macros(mac_id) => Either::Right(mac_id), @@ -537,13 +539,8 @@ impl Module { /// Fills `acc` with the module's diagnostics. pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec) { - let _p = profile::span("Module::diagnostics").detail(|| { - format!( - "{:?}", - self.name(db) - .map_or("".into(), |name| name.display(db.upcast()).to_string()) - ) - }); + let name = self.name(db); + let _p = tracing::span!(tracing::Level::INFO, "Module::diagnostics", ?name); let def_map = self.id.def_map(db.upcast()); for diag in def_map.diagnostics() { if diag.in_module != self.id.local_id { @@ -906,7 +903,7 @@ fn emit_def_diagnostic_( } DefDiagnosticKind::InvalidDeriveTarget { ast, id } => { let node = ast.to_node(db.upcast()); - let derive = node.attrs().nth(*id as usize); + let derive = node.attrs().nth(*id); match derive { Some(derive) => { acc.push( @@ -921,7 +918,7 @@ fn emit_def_diagnostic_( } DefDiagnosticKind::MalformedDerive { ast, id } => { let node = ast.to_node(db.upcast()); - let derive = node.attrs().nth(*id as usize); + let derive = node.attrs().nth(*id); match derive { Some(derive) => { acc.push( @@ -1626,116 +1623,8 @@ impl DefWithBody { } let infer = db.infer(self.into()); - let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic"); - let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic"); for d in &infer.diagnostics { - acc.push(match d { - &InferenceDiagnostic::NoSuchField { field: expr, private } => { - let expr_or_pat = match expr { - ExprOrPatId::ExprId(expr) => { - source_map.field_syntax(expr).map(AstPtr::wrap_left) - } - ExprOrPatId::PatId(pat) => { - source_map.pat_field_syntax(pat).map(AstPtr::wrap_right) - } - }; - NoSuchField { field: expr_or_pat, private }.into() - } - &InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => { - MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }.into() - } - &InferenceDiagnostic::PrivateField { expr, field } => { - let expr = expr_syntax(expr); - let field = field.into(); - PrivateField { expr, field }.into() - } - &InferenceDiagnostic::PrivateAssocItem { id, item } => { - let expr_or_pat = match id { - ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), - ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right), - }; - let item = item.into(); - PrivateAssocItem { expr_or_pat, item }.into() - } - InferenceDiagnostic::ExpectedFunction { call_expr, found } => { - let call_expr = expr_syntax(*call_expr); - ExpectedFunction { - call: call_expr, - found: Type::new(db, DefWithBodyId::from(self), found.clone()), - } - .into() - } - InferenceDiagnostic::UnresolvedField { - expr, - receiver, - name, - method_with_same_name_exists, - } => { - let expr = expr_syntax(*expr); - UnresolvedField { - expr, - name: name.clone(), - receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()), - method_with_same_name_exists: *method_with_same_name_exists, - } - .into() - } - InferenceDiagnostic::UnresolvedMethodCall { - expr, - receiver, - name, - field_with_same_name, - assoc_func_with_same_name, - } => { - let expr = expr_syntax(*expr); - UnresolvedMethodCall { - expr, - name: name.clone(), - receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()), - field_with_same_name: field_with_same_name - .clone() - .map(|ty| Type::new(db, DefWithBodyId::from(self), ty)), - assoc_func_with_same_name: *assoc_func_with_same_name, - } - .into() - } - &InferenceDiagnostic::UnresolvedAssocItem { id } => { - let expr_or_pat = match id { - ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), - ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right), - }; - UnresolvedAssocItem { expr_or_pat }.into() - } - &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => { - let expr = expr_syntax(expr); - BreakOutsideOfLoop { expr, is_break, bad_value_break }.into() - } - InferenceDiagnostic::TypedHole { expr, expected } => { - let expr = expr_syntax(*expr); - - TypedHole { - expr, - expected: Type::new(db, DefWithBodyId::from(self), expected.clone()), - } - .into() - } - &InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => { - let expr_or_pat = match pat { - ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), - ExprOrPatId::PatId(pat) => { - let InFile { file_id, value } = - source_map.pat_syntax(pat).expect("unexpected synthetic"); - - // cast from Either -> Either<_, Pat> - let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else { - continue; - }; - InFile { file_id, value: ptr } - } - }; - MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into() - } - }); + acc.extend(AnyDiagnostic::inference_diagnostic(db, self.into(), d, &source_map)); } for (pat_or_expr, mismatch) in infer.type_mismatches() { let expr_or_pat = match pat_or_expr { @@ -1857,109 +1746,7 @@ impl DefWithBody { } for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) { - match diagnostic { - BodyValidationDiagnostic::RecordMissingFields { - record, - variant, - missed_fields, - } => { - let variant_data = variant.variant_data(db.upcast()); - let missed_fields = missed_fields - .into_iter() - .map(|idx| variant_data.fields()[idx].name.clone()) - .collect(); - - match record { - Either::Left(record_expr) => match source_map.expr_syntax(record_expr) { - Ok(source_ptr) => { - let root = source_ptr.file_syntax(db.upcast()); - if let ast::Expr::RecordExpr(record_expr) = - source_ptr.value.to_node(&root) - { - if record_expr.record_expr_field_list().is_some() { - let field_list_parent_path = - record_expr.path().map(|path| AstPtr::new(&path)); - acc.push( - MissingFields { - file: source_ptr.file_id, - field_list_parent: AstPtr::new(&Either::Left( - record_expr, - )), - field_list_parent_path, - missed_fields, - } - .into(), - ) - } - } - } - Err(SyntheticSyntax) => (), - }, - Either::Right(record_pat) => match source_map.pat_syntax(record_pat) { - Ok(source_ptr) => { - if let Some(ptr) = source_ptr.value.cast::() { - let root = source_ptr.file_syntax(db.upcast()); - let record_pat = ptr.to_node(&root); - if record_pat.record_pat_field_list().is_some() { - let field_list_parent_path = - record_pat.path().map(|path| AstPtr::new(&path)); - acc.push( - MissingFields { - file: source_ptr.file_id, - field_list_parent: AstPtr::new(&Either::Right( - record_pat, - )), - field_list_parent_path, - missed_fields, - } - .into(), - ) - } - } - } - Err(SyntheticSyntax) => (), - }, - } - } - BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => { - if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) { - acc.push( - ReplaceFilterMapNextWithFindMap { - file: next_source_ptr.file_id, - next_expr: next_source_ptr.value, - } - .into(), - ); - } - } - BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => { - match source_map.expr_syntax(match_expr) { - Ok(source_ptr) => { - let root = source_ptr.file_syntax(db.upcast()); - if let ast::Expr::MatchExpr(match_expr) = - &source_ptr.value.to_node(&root) - { - match match_expr.expr() { - Some(scrut_expr) if match_expr.match_arm_list().is_some() => { - acc.push( - MissingMatchArms { - scrutinee_expr: InFile::new( - source_ptr.file_id, - AstPtr::new(&scrut_expr), - ), - uncovered_patterns, - } - .into(), - ); - } - _ => {} - } - } - } - Err(SyntheticSyntax) => (), - } - } - } + acc.extend(AnyDiagnostic::body_validation_diagnostic(db, diagnostic, &source_map)); } let def: ModuleDef = match self { @@ -1975,7 +1762,6 @@ impl DefWithBody { } } } - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Function { pub(crate) id: FunctionId, @@ -4266,7 +4052,7 @@ impl Type { name: Option<&Name>, mut callback: impl FnMut(Function) -> Option, ) -> Option { - let _p = profile::span("iterate_method_candidates"); + let _p = tracing::span!(tracing::Level::INFO, "iterate_method_candidates"); let mut slot = None; self.iterate_method_candidates_dyn( @@ -4345,7 +4131,7 @@ impl Type { name: Option<&Name>, mut callback: impl FnMut(AssocItem) -> Option, ) -> Option { - let _p = profile::span("iterate_path_candidates"); + let _p = tracing::span!(tracing::Level::INFO, "iterate_path_candidates"); let mut slot = None; self.iterate_path_candidates_dyn( db, @@ -4411,7 +4197,7 @@ impl Type { &'a self, db: &'a dyn HirDatabase, ) -> impl Iterator + 'a { - let _p = profile::span("applicable_inherent_traits"); + let _p = tracing::span!(tracing::Level::INFO, "applicable_inherent_traits"); self.autoderef_(db) .filter_map(|ty| ty.dyn_trait()) .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id)) @@ -4419,7 +4205,7 @@ impl Type { } pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator + 'a { - let _p = profile::span("env_traits"); + let _p = tracing::span!(tracing::Level::INFO, "env_traits"); self.autoderef_(db) .filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_))) .flat_map(|ty| { diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index fdb94a6d5a74d..a869029d09663 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -25,6 +25,7 @@ use hir_expand::{ use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; +use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, @@ -131,6 +132,7 @@ pub struct SemanticsImpl<'db> { /// Rootnode to HirFileId cache cache: RefCell>, // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens + // So we might wanna move them out into something specific for semantic highlighting expansion_info_cache: RefCell>, /// MacroCall to its expansion's MacroFileId cache macro_call_cache: RefCell, MacroFileId>>, @@ -607,29 +609,102 @@ impl<'db> SemanticsImpl<'db> { res } - fn descend_into_macros_impl( + // return: + // SourceAnalyzer(file_id that original call include!) + // macro file id + // token in include! macro mapped from token in params + // span for the mapped token + fn is_from_include_file( &self, token: SyntaxToken, - f: &mut dyn FnMut(InFile) -> ControlFlow<()>, - ) { - let _p = profile::span("descend_into_macros"); - let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) { - Some(it) => it, - None => return, - }; + ) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> { + let parent = token.parent()?; + let file_id = self.find_file(&parent).file_id.file_id()?; + + let mut cache = self.expansion_info_cache.borrow_mut(); + + // iterate related crates and find all include! invocations that include_file_id matches + for (invoc, _) in self + .db + .relevant_crates(file_id) + .iter() + .flat_map(|krate| self.db.include_macro_invoc(*krate)) + .filter(|&(_, include_file_id)| include_file_id == file_id) + { + let macro_file = invoc.as_macro_file(); + let expansion_info = cache + .entry(macro_file) + .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); - let span = match sa.file_id.file_id() { - Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()), - None => { - stdx::never!(); - return; + // Create the source analyzer for the macro call scope + let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file())) + else { + continue; + }; + { + let InMacroFile { file_id: macro_file, value } = expansion_info.expanded(); + self.cache(value, macro_file.into()); } - }; + + // get mapped token in the include! macro file + let span = span::SpanData { + range: token.text_range(), + anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + ctx: SyntaxContextId::ROOT, + }; + let Some(InMacroFile { file_id, value: mut mapped_tokens }) = + expansion_info.map_range_down(span) + else { + continue; + }; + + // if we find one, then return + if let Some(t) = mapped_tokens.next() { + return Some((sa, file_id.into(), t, span)); + } + } + + None + } + + fn descend_into_macros_impl( + &self, + mut token: SyntaxToken, + f: &mut dyn FnMut(InFile) -> ControlFlow<()>, + ) { + let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros"); + let (sa, span, file_id) = + match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) { + Some(sa) => match sa.file_id.file_id() { + Some(file_id) => ( + sa, + self.db.real_span_map(file_id).span_for_range(token.text_range()), + file_id.into(), + ), + None => { + stdx::never!(); + return; + } + }, + None => { + // if we cannot find a source analyzer for this token, then we try to find out + // whether this file is an included file and treat that as the include input + let Some((it, macro_file_id, mapped_token, s)) = + self.is_from_include_file(token) + else { + return; + }; + token = mapped_token; + (it, s, macro_file_id) + } + }; let mut cache = self.expansion_info_cache.borrow_mut(); let mut mcache = self.macro_call_cache.borrow_mut(); let def_map = sa.resolver.def_map(); + let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; + let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { let expansion_info = cache .entry(macro_file) @@ -651,8 +726,6 @@ impl<'db> SemanticsImpl<'db> { res }; - let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])]; - while let Some((file_id, mut tokens)) = stack.pop() { while let Some(token) = tokens.pop() { let was_not_remapped = (|| { @@ -1222,7 +1295,7 @@ impl<'db> SemanticsImpl<'db> { offset: Option, infer_body: bool, ) -> Option { - let _p = profile::span("Semantics::analyze_impl"); + let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl"); let node = self.find_file(node); let container = self.with_ctx(|ctx| ctx.find_container(node))?; diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index f60b3749b0805..14dbe6924032f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -117,7 +117,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> { impl SourceToDefCtx<'_, '_> { pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> { - let _p = profile::span("SourceBinder::to_module_def"); + let _p = tracing::span!(tracing::Level::INFO, "SourceBinder::to_module_def"); let mut mods = SmallVec::new(); for &crate_id in self.db.relevant_crates(file).iter() { // FIXME: inner items @@ -132,7 +132,7 @@ impl SourceToDefCtx<'_, '_> { } pub(super) fn module_to_def(&self, src: InFile) -> Option { - let _p = profile::span("module_to_def"); + let _p = tracing::span!(tracing::Level::INFO, "module_to_def"); let parent_declaration = src .syntax() .ancestors_with_macros_skip_attr_item(self.db.upcast()) @@ -153,7 +153,7 @@ impl SourceToDefCtx<'_, '_> { } pub(super) fn source_file_to_def(&self, src: InFile) -> Option { - let _p = profile::span("source_file_to_def"); + let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def"); let file_id = src.file_id.original_file(self.db.upcast()); self.file_to_def(file_id).first().copied() } diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml index 4d4bac5fb9664..98961a18de257 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml @@ -17,6 +17,7 @@ cov-mark = "2.0.0-pre.1" itertools.workspace = true either.workspace = true smallvec.workspace = true +tracing.workspace = true # local deps stdx.workspace = true @@ -38,4 +39,4 @@ sourcegen.workspace = true in-rust-tree = [] [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 410c623109ea0..c1b95bb1e253e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -105,7 +105,7 @@ fn add_missing_impl_members_inner( assist_id: &'static str, label: &'static str, ) -> Option<()> { - let _p = profile::span("add_missing_impl_members_inner"); + let _p = tracing::span!(tracing::Level::INFO, "add_missing_impl_members_inner"); let impl_def = ctx.find_node_at_offset::()?; let impl_ = ctx.sema.to_def(&impl_def)?; @@ -370,17 +370,17 @@ impl Foo for S { add_missing_impl_members, r#" pub trait Trait<'a, 'b, A, B, C> { - fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C; + fn foo(&self, one: &'a A, another: &'b B) -> &'a C; } impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {$0}"#, r#" pub trait Trait<'a, 'b, A, B, C> { - fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C; + fn foo(&self, one: &'a A, another: &'b B) -> &'a C; } impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () { - fn foo(&self, one: &'x T, anoter: &'y V) -> &'x U { + fn foo(&self, one: &'x T, another: &'y V) -> &'x U { ${0:todo!()} } }"#, @@ -393,7 +393,7 @@ impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () { add_missing_default_members, r#" pub trait Trait<'a, 'b, A, B, C: Default> { - fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) { + fn foo(&self, _one: &'a A, _another: &'b B) -> (C, &'a i32) { let value: &'a i32 = &0; (C::default(), value) } @@ -402,14 +402,14 @@ pub trait Trait<'a, 'b, A, B, C: Default> { impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {$0}"#, r#" pub trait Trait<'a, 'b, A, B, C: Default> { - fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) { + fn foo(&self, _one: &'a A, _another: &'b B) -> (C, &'a i32) { let value: &'a i32 = &0; (C::default(), value) } } impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () { - $0fn foo(&self, _one: &'x T, _anoter: &'y V) -> (U, &'x i32) { + $0fn foo(&self, _one: &'x T, _another: &'y V) -> (U, &'x i32) { let value: &'x i32 = &0; (::default(), value) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index 0f23b69908d6c..22d16cf6b36e4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -163,7 +163,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op block } else { // `expr_replace` is a descendant of `to_wrap`, so both steps need to be - // handled seperately, otherwise we wrap the wrong expression + // handled separately, otherwise we wrap the wrong expression let to_wrap = edit.make_mut(to_wrap); // Replace the target expr first so that we don't need to find where diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs index 7e4f140a28faa..dc27af5cbed20 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs @@ -418,7 +418,7 @@ where } #[test] - fn new_function_with_generics_and_wheres() { + fn new_function_with_generics_and_where() { check_assist( generate_default_from_new, r#" diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 154a1f59c722c..898bd01291a25 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -295,7 +295,7 @@ fn generate_impl( // those in strukt. // // These generics parameters will also be used in `field_ty` and - // `where_clauses`, so we should substitude arguments in them as well. + // `where_clauses`, so we should substitute arguments in them as well. let strukt_params = resolve_name_conflicts(strukt_params, &old_impl_params); let (field_ty, ty_where_clause) = match &strukt_params { Some(strukt_params) => { @@ -491,7 +491,7 @@ fn remove_useless_where_clauses(trait_ty: &ast::Type, self_ty: &ast::Type, wc: a // Generate generic args that should be apply to current impl. // -// For exmaple, say we have implementation `impl Trait for B`, +// For example, say we have implementation `impl Trait for B`, // and `b: B` in struct `S`. Then the `A` should be instantiated to `T`. // While the last two generic args `B` and `C` doesn't change, it remains // ``. So we apply `` as generic arguments to impl. @@ -637,7 +637,7 @@ fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str()); // We want rhs of the const assignment to be a qualified path - // The general case for const assigment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`) + // The general case for const assignment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`) // The qualified will have the following generic syntax : // >::ConstName; // FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it. @@ -779,7 +779,7 @@ impl Trait for Base {} #[test] fn test_self_ty() { - // trait whith `Self` type cannot be delegated + // trait with `Self` type cannot be delegated // // See the function `fn f() -> Self`. // It should be `fn f() -> Base` in `Base`, and `fn f() -> S` in `S` diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index d90d366ffe4c4..91eaa96b6cb0d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -6,7 +6,7 @@ use syntax::{ use crate::{AssistContext, AssistId, AssistKind, Assists}; -// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredicable case [#15581]. +// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredictable case [#15581]. // Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need. // Assist: generate_mut_trait_impl diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index a8817436ba1fc..8881aa69f29d3 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -183,7 +183,7 @@ fn remove_items_visibility(item: &ast::AssocItem) { fn strip_body(item: &ast::AssocItem) { if let ast::AssocItem::Fn(f) = item { if let Some(body) = f.body() { - // In constrast to function bodies, we want to see no ws before a semicolon. + // In contrast to function bodies, we want to see no ws before a semicolon. // So let's remove them if we see any. if let Some(prev) = body.syntax().prev_sibling_or_token() { if prev.kind() == SyntaxKind::WHITESPACE { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs index 965e4aa786e7a..f7da88b2c1838 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs @@ -120,7 +120,7 @@ fn main() -> () { } #[test] - fn fromed_in_child_mod_imported() { + fn from_in_child_mod_imported() { check_assist( into_to_qualified_from, r#" @@ -168,7 +168,7 @@ fn main() -> () { } #[test] - fn fromed_in_child_mod_not_imported() { + fn from_in_child_mod_not_imported() { check_assist( into_to_qualified_from, r#" diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs index 2beab26dce738..797c5c0653321 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs @@ -1,8 +1,9 @@ use either::Either; use ide_db::imports::{ insert_use::{ImportGranularity, InsertUseConfig}, - merge_imports::{try_merge_imports, try_merge_trees, MergeBehavior}, + merge_imports::{try_merge_imports, try_merge_trees, try_normalize_use_tree, MergeBehavior}, }; +use itertools::Itertools; use syntax::{ algo::neighbor, ast::{self, edit_in_place::Removable}, @@ -32,24 +33,13 @@ use Edit::*; pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let (target, edits) = if ctx.has_empty_selection() { // Merge a neighbor - let mut tree: ast::UseTree = ctx.find_node_at_offset()?; - if ctx.config.insert_use.granularity == ImportGranularity::One - && tree.parent_use_tree_list().is_some() - { - cov_mark::hit!(resolve_top_use_tree_for_import_one); - tree = tree.top_use_tree(); - } + cov_mark::hit!(merge_with_use_item_neighbors); + let tree = ctx.find_node_at_offset::()?.top_use_tree(); let target = tree.syntax().text_range(); - let edits = if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) { - cov_mark::hit!(merge_with_use_item_neighbors); - let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter(); - use_item.try_merge_from(&mut neighbor, &ctx.config.insert_use) - } else { - cov_mark::hit!(merge_with_use_tree_neighbors); - let mut neighbor = next_prev().find_map(|dir| neighbor(&tree, dir)).into_iter(); - tree.clone().try_merge_from(&mut neighbor, &ctx.config.insert_use) - }; + let use_item = tree.syntax().parent().and_then(ast::Use::cast)?; + let mut neighbor = next_prev().find_map(|dir| neighbor(&use_item, dir)).into_iter(); + let edits = use_item.try_merge_from(&mut neighbor, &ctx.config.insert_use); (target, edits?) } else { // Merge selected @@ -94,7 +84,35 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio for edit in edits_mut { match edit { Remove(it) => it.as_ref().either(Removable::remove, Removable::remove), - Replace(old, new) => ted::replace(old, new), + Replace(old, new) => { + ted::replace(old, &new); + + // If there's a selection and we're replacing a use tree in a tree list, + // normalize the parent use tree if it only contains the merged subtree. + if !ctx.has_empty_selection() { + let normalized_use_tree = ast::UseTree::cast(new) + .as_ref() + .and_then(ast::UseTree::parent_use_tree_list) + .and_then(|use_tree_list| { + if use_tree_list.use_trees().collect_tuple::<(_,)>().is_some() { + Some(use_tree_list.parent_use_tree()) + } else { + None + } + }) + .and_then(|target_tree| { + try_normalize_use_tree( + &target_tree, + ctx.config.insert_use.granularity.into(), + ) + .map(|top_use_tree_flat| (target_tree, top_use_tree_flat)) + }); + if let Some((old_tree, new_tree)) = normalized_use_tree { + cov_mark::hit!(replace_parent_with_normalized_use_tree); + ted::replace(old_tree.syntax(), new_tree.syntax()); + } + } + } } } }, @@ -201,20 +219,17 @@ use std::fmt$0::{Display, Debug}; use std::fmt::{Display, Debug}; ", r" -use std::fmt::{Display, Debug}; +use std::fmt::{Debug, Display}; ", ); // The assist macro below calls `check_assist_import_one` 4 times with different input - // use item variations based on the first 2 input parameters, but only 2 calls - // contain `use {std::fmt$0::{Display, Debug}};` for which the top use tree will need - // to be resolved. - cov_mark::check_count!(resolve_top_use_tree_for_import_one, 2); + // use item variations based on the first 2 input parameters. cov_mark::check_count!(merge_with_use_item_neighbors, 4); check_assist_import_one_variations!( "std::fmt$0::{Display, Debug}", "std::fmt::{Display, Debug}", - "use {std::fmt::{Display, Debug}};" + "use {std::fmt::{Debug, Display}};" ); } @@ -257,7 +272,7 @@ use std::fmt::{Debug, Display}; } #[test] - fn merge_self1() { + fn merge_self() { check_assist( merge_imports, r" @@ -276,21 +291,8 @@ use std::fmt::{self, Display}; } #[test] - fn merge_self2() { - check_assist( - merge_imports, - r" -use std::{fmt, $0fmt::Display}; -", - r" -use std::{fmt::{self, Display}}; -", - ); - } - - #[test] - fn not_applicable_to_single_one_style_import() { - cov_mark::check!(resolve_top_use_tree_for_import_one); + fn not_applicable_to_single_import() { + check_assist_not_applicable(merge_imports, "use std::{fmt, $0fmt::Display};"); check_assist_not_applicable_for_import_one( merge_imports, "use {std::{fmt, $0fmt::Display}};", @@ -385,14 +387,14 @@ pub(in this::path) use std::fmt::{Debug, Display}; #[test] fn test_merge_nested() { - cov_mark::check!(merge_with_use_tree_neighbors); check_assist( merge_imports, r" -use std::{fmt$0::Debug, fmt::Display}; +use std::{fmt$0::Debug, fmt::Error}; +use std::{fmt::Write, fmt::Display}; ", r" -use std::{fmt::{Debug, Display}}; +use std::fmt::{Debug, Display, Error, Write}; ", ); } @@ -402,10 +404,11 @@ use std::{fmt::{Debug, Display}}; check_assist( merge_imports, r" -use std::{fmt::Debug, fmt$0::Display}; +use std::{fmt::Debug, fmt$0::Error}; +use std::{fmt::Write, fmt::Display}; ", r" -use std::{fmt::{Debug, Display}}; +use std::fmt::{Debug, Display, Error, Write}; ", ); } @@ -419,13 +422,13 @@ use std$0::{fmt::{Write, Display}}; use std::{fmt::{self, Debug}}; ", r" -use std::{fmt::{self, Debug, Display, Write}}; +use std::fmt::{self, Debug, Display, Write}; ", ); check_assist_import_one_variations!( "std$0::{fmt::{Write, Display}}", "std::{fmt::{self, Debug}}", - "use {std::{fmt::{self, Debug, Display, Write}}};" + "use {std::fmt::{self, Debug, Display, Write}};" ); } @@ -438,26 +441,13 @@ use std$0::{fmt::{self, Debug}}; use std::{fmt::{Write, Display}}; ", r" -use std::{fmt::{self, Debug, Display, Write}}; +use std::fmt::{self, Debug, Display, Write}; ", ); check_assist_import_one_variations!( "std$0::{fmt::{self, Debug}}", "std::{fmt::{Write, Display}}", - "use {std::{fmt::{self, Debug, Display, Write}}};" - ); - } - - #[test] - fn test_merge_self_with_nested_self_item() { - check_assist( - merge_imports, - r" -use std::{fmt$0::{self, Debug}, fmt::{Write, Display}}; -", - r" -use std::{fmt::{self, Debug, Display, Write}}; -", + "use {std::fmt::{self, Debug, Display, Write}};" ); } @@ -470,13 +460,13 @@ use foo::$0{bar::{self}}; use foo::{bar}; ", r" -use foo::{bar::{self}}; +use foo::bar; ", ); check_assist_import_one_variations!( "foo::$0{bar::{self}}", "foo::{bar}", - "use {foo::{bar::{self}}};" + "use {foo::bar};" ); } @@ -489,13 +479,13 @@ use foo::$0{bar}; use foo::{bar::{self}}; ", r" -use foo::{bar::{self}}; +use foo::bar; ", ); check_assist_import_one_variations!( "foo::$0{bar}", "foo::{bar::{self}}", - "use {foo::{bar::{self}}};" + "use {foo::bar};" ); } @@ -508,13 +498,13 @@ use std$0::{fmt::*}; use std::{fmt::{self, Display}}; ", r" -use std::{fmt::{self, Display, *}}; +use std::fmt::{self, Display, *}; ", ); check_assist_import_one_variations!( "std$0::{fmt::*}", "std::{fmt::{self, Display}}", - "use {std::{fmt::{self, Display, *}}};" + "use {std::fmt::{self, Display, *}};" ); } @@ -579,29 +569,27 @@ use foo::{bar, baz}; check_assist( merge_imports, r" -use { - foo$0::bar, - foo::baz, +use foo$0::{ + bar, baz, }; +use foo::qux; ", r" -use { - foo::{bar, baz}, +use foo::{ + bar, baz, qux, }; ", ); check_assist( merge_imports, r" -use { - foo::baz, - foo$0::bar, +use foo::{ + baz, bar, }; +use foo$0::qux; ", r" -use { - foo::{bar, baz}, -}; +use foo::{bar, baz, qux}; ", ); } @@ -711,12 +699,19 @@ use std::{ };", ); - // FIXME: Remove redundant braces. See also unnecessary-braces diagnostic. cov_mark::check!(merge_with_selected_use_tree_neighbors); + check_assist( + merge_imports, + r"use std::{fmt::Result, $0fmt::Display, fmt::Debug$0};", + r"use std::{fmt::Result, fmt::{Debug, Display}};", + ); + + cov_mark::check!(merge_with_selected_use_tree_neighbors); + cov_mark::check!(replace_parent_with_normalized_use_tree); check_assist( merge_imports, r"use std::$0{fmt::Display, fmt::Debug}$0;", - r"use std::{fmt::{Debug, Display}};", + r"use std::fmt::{Debug, Display};", ); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs new file mode 100644 index 0000000000000..7d003efe721d1 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs @@ -0,0 +1,219 @@ +use ide_db::imports::merge_imports::try_normalize_import; +use syntax::{ast, AstNode}; + +use crate::{ + assist_context::{AssistContext, Assists}, + AssistId, AssistKind, +}; + +// Assist: normalize_import +// +// Normalizes an import. +// +// ``` +// use$0 std::{io, {fmt::Formatter}}; +// ``` +// -> +// ``` +// use std::{fmt::Formatter, io}; +// ``` +pub(crate) fn normalize_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let use_item = if ctx.has_empty_selection() { + ctx.find_node_at_offset()? + } else { + ctx.covering_element().ancestors().find_map(ast::Use::cast)? + }; + + let target = use_item.syntax().text_range(); + let normalized_use_item = + try_normalize_import(&use_item, ctx.config.insert_use.granularity.into())?; + + acc.add( + AssistId("normalize_import", AssistKind::RefactorRewrite), + "Normalize import", + target, + |builder| { + builder.replace_ast(use_item, normalized_use_item); + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{ + check_assist, check_assist_import_one, check_assist_not_applicable, + check_assist_not_applicable_for_import_one, + }; + + use super::*; + + macro_rules! check_assist_variations { + ($fixture: literal, $expected: literal) => { + check_assist( + normalize_import, + concat!("use $0", $fixture, ";"), + concat!("use ", $expected, ";"), + ); + check_assist( + normalize_import, + concat!("$0use ", $fixture, ";"), + concat!("use ", $expected, ";"), + ); + + check_assist_import_one( + normalize_import, + concat!("use $0", $fixture, ";"), + concat!("use {", $expected, "};"), + ); + check_assist_import_one( + normalize_import, + concat!("$0use ", $fixture, ";"), + concat!("use {", $expected, "};"), + ); + + check_assist_import_one( + normalize_import, + concat!("use $0{", $fixture, "};"), + concat!("use {", $expected, "};"), + ); + check_assist_import_one( + normalize_import, + concat!("$0use {", $fixture, "};"), + concat!("use {", $expected, "};"), + ); + + check_assist( + normalize_import, + concat!("use $0", $fixture, "$0;"), + concat!("use ", $expected, ";"), + ); + check_assist( + normalize_import, + concat!("$0use ", $fixture, ";$0"), + concat!("use ", $expected, ";"), + ); + }; + } + + macro_rules! check_assist_not_applicable_variations { + ($fixture: literal) => { + check_assist_not_applicable(normalize_import, concat!("use $0", $fixture, ";")); + check_assist_not_applicable(normalize_import, concat!("$0use ", $fixture, ";")); + + check_assist_not_applicable_for_import_one( + normalize_import, + concat!("use $0{", $fixture, "};"), + ); + check_assist_not_applicable_for_import_one( + normalize_import, + concat!("$0use {", $fixture, "};"), + ); + }; + } + + #[test] + fn test_order() { + check_assist_variations!( + "foo::{*, Qux, bar::{Quux, Bar}, baz, FOO_BAZ, self, Baz}", + "foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}" + ); + } + + #[test] + fn test_redundant_braces() { + check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{baz, Qux}"); + check_assist_variations!("foo::{bar::{self}}", "foo::bar"); + check_assist_variations!("foo::{bar::{*}}", "foo::bar::*"); + check_assist_variations!("foo::{bar::{Qux as Quux}}", "foo::bar::Qux as Quux"); + check_assist_variations!( + "foo::bar::{{FOO_BAZ, Qux, self}, {*, baz}}", + "foo::bar::{self, baz, Qux, FOO_BAZ, *}" + ); + check_assist_variations!( + "foo::bar::{{{FOO_BAZ}, {{Qux}, {self}}}, {{*}, {baz}}}", + "foo::bar::{self, baz, Qux, FOO_BAZ, *}" + ); + } + + #[test] + fn test_merge() { + check_assist_variations!( + "foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux}}", + "foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}" + ); + check_assist_variations!( + "foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux, bar::{baz::Foo}}}", + "foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}" + ); + } + + #[test] + fn test_merge_self() { + check_assist_variations!("std::{fmt, fmt::Display}", "std::fmt::{self, Display}"); + } + + #[test] + fn test_merge_nested() { + check_assist_variations!("std::{fmt::Debug, fmt::Display}", "std::fmt::{Debug, Display}"); + } + + #[test] + fn test_merge_nested2() { + check_assist_variations!("std::{fmt::Debug, fmt::Display}", "std::fmt::{Debug, Display}"); + } + + #[test] + fn test_merge_self_with_nested_self_item() { + check_assist_variations!( + "std::{fmt::{self, Debug}, fmt::{Write, Display}}", + "std::fmt::{self, Debug, Display, Write}" + ); + } + + #[test] + fn works_with_trailing_comma() { + check_assist( + normalize_import, + r" +use $0{ + foo::bar, + foo::baz, +}; + ", + r" +use foo::{bar, baz}; + ", + ); + check_assist_import_one( + normalize_import, + r" +use $0{ + foo::bar, + foo::baz, +}; +", + r" +use { + foo::{bar, baz}, +}; +", + ); + } + + #[test] + fn not_applicable_to_normalized_import() { + check_assist_not_applicable_variations!("foo::bar"); + check_assist_not_applicable_variations!("foo::bar::*"); + check_assist_not_applicable_variations!("foo::bar::Qux as Quux"); + check_assist_not_applicable_variations!("foo::bar::{self, baz, Qux, FOO_BAZ, *}"); + check_assist_not_applicable_variations!( + "foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}" + ); + check_assist_not_applicable_variations!( + "foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}" + ); + check_assist_not_applicable_variations!( + "foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}" + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index edcf52a9b38eb..2fec104323dc7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -183,6 +183,7 @@ mod handlers { mod move_guard; mod move_module_to_file; mod move_to_mod_rs; + mod normalize_import; mod number_representation; mod promote_local_to_const; mod pull_assignment_up; @@ -300,6 +301,7 @@ mod handlers { move_module_to_file::move_module_to_file, move_to_mod_rs::move_to_mod_rs, move_from_mod_rs::move_from_mod_rs, + normalize_import::normalize_import, number_representation::reformat_number_literal, pull_assignment_up::pull_assignment_up, promote_local_to_const::promote_local_to_const, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 0ce89ae0a9a5e..8d7c49d52c23d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -2217,6 +2217,19 @@ fn t() {} ) } +#[test] +fn doctest_normalize_import() { + check_doc_test( + "normalize_import", + r#####" +use$0 std::{io, {fmt::Formatter}}; +"#####, + r#####" +use std::{fmt::Formatter, io}; +"#####, + ) +} + #[test] fn doctest_promote_local_to_const() { check_doc_test( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs index ad5ec83287529..088d93f9a6ba4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/sourcegen.rs @@ -2,6 +2,7 @@ use std::{fmt, fs, path::Path}; +use stdx::format_to_acc; use test_utils::project_root; #[test] @@ -172,8 +173,7 @@ impl fmt::Display for Assist { fn hide_hash_comments(text: &str) -> String { text.split('\n') // want final newline .filter(|&it| !(it.starts_with("# ") || it == "#")) - .map(|it| format!("{it}\n")) - .collect() + .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n")) } fn reveal_hash_comments(text: &str) -> String { @@ -187,6 +187,5 @@ fn reveal_hash_comments(text: &str) -> String { it } }) - .map(|it| format!("{it}\n")) - .collect() + .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n")) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml index 7fbcf3d19e0f1..f2a11276ba2b8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml @@ -14,6 +14,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" itertools.workspace = true +tracing.workspace = true once_cell = "1.17.0" smallvec.workspace = true @@ -38,4 +39,4 @@ test-utils.workspace = true test-fixture.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index e5fdac327cdb6..00135a6d202cb 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -31,7 +31,7 @@ pub(crate) fn complete_dot( } let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. }); - let is_method_acces_with_parens = + let is_method_access_with_parens = matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); complete_fields( @@ -41,7 +41,7 @@ pub(crate) fn complete_dot( |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), is_field_access, - is_method_acces_with_parens, + is_method_access_with_parens, ); complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); @@ -114,14 +114,14 @@ fn complete_fields( mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type), mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type), is_field_access: bool, - is_method_acess_with_parens: bool, + is_method_access_with_parens: bool, ) { let mut seen_names = FxHashSet::default(); for receiver in receiver.autoderef(ctx.db) { for (field, ty) in receiver.fields(ctx.db) { if seen_names.insert(field.name(ctx.db)) && (is_field_access - || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure()))) + || (is_method_access_with_parens && (ty.is_fn() || ty.is_closure()))) { named_field(acc, field, ty); } @@ -131,7 +131,7 @@ fn complete_fields( // already seen without inserting into the hashset. if !seen_names.contains(&hir::Name::new_tuple_field(i)) && (is_field_access - || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure()))) + || (is_method_access_with_parens && (ty.is_fn() || ty.is_closure()))) { // Tuple fields are always public (tuple struct fields are handled above). tuple_index(acc, i, ty); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs index d3c817d4b43ad..1433216d6111d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs @@ -15,7 +15,7 @@ pub(crate) fn complete_expr_path( path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx, expr_ctx: &ExprCtx, ) { - let _p = profile::span("complete_expr_path"); + let _p = tracing::span!(tracing::Level::INFO, "complete_expr_path").entered(); if !ctx.qualifier_ctx.none() { return; } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs index 75017cf66f8db..b5d5604c7519f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs @@ -26,7 +26,6 @@ const SUPPORTED_CALLING_CONVENTIONS: &[&str] = &[ "ptx-kernel", "msp430-interrupt", "x86-interrupt", - "amdgpu-kernel", "efiapi", "avr-interrupt", "avr-non-blocking-interrupt", diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index e330430d6b90b..0e04ad35d33d1 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -207,7 +207,8 @@ fn import_on_the_fly( position: SyntaxNode, potential_import_name: String, ) -> Option<()> { - let _p = profile::span("import_on_the_fly").detail(|| potential_import_name.clone()); + let _p = + tracing::span!(tracing::Level::INFO, "import_on_the_fly", ?potential_import_name).entered(); ImportScope::find_insert_use_container(&position, &ctx.sema)?; @@ -293,7 +294,8 @@ fn import_on_the_fly_pat_( position: SyntaxNode, potential_import_name: String, ) -> Option<()> { - let _p = profile::span("import_on_the_fly_pat").detail(|| potential_import_name.clone()); + let _p = tracing::span!(tracing::Level::INFO, "import_on_the_fly_pat", ?potential_import_name) + .entered(); ImportScope::find_insert_use_container(&position, &ctx.sema)?; @@ -343,7 +345,9 @@ fn import_on_the_fly_method( position: SyntaxNode, potential_import_name: String, ) -> Option<()> { - let _p = profile::span("import_on_the_fly_method").detail(|| potential_import_name.clone()); + let _p = + tracing::span!(tracing::Level::INFO, "import_on_the_fly_method", ?potential_import_name) + .entered(); ImportScope::find_insert_use_container(&position, &ctx.sema)?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs index 4de15ab759629..addd9dac1a766 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs @@ -28,7 +28,7 @@ pub(crate) fn complete_item_list( path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx, kind: &ItemListKind, ) { - let _p = profile::span("complete_item_list"); + let _p = tracing::span!(tracing::Level::INFO, "complete_item_list").entered(); if path_ctx.is_trivial_path() { add_keywords(acc, ctx, Some(kind)); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index 5d138eea46f4b..ecf5b29e2c0c3 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -21,7 +21,7 @@ pub(crate) fn complete_mod( return None; } - let _p = profile::span("completion::complete_mod"); + let _p = tracing::span!(tracing::Level::INFO, "completion::complete_mod").entered(); let mut current_module = ctx.module; // For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index a846ffe10e636..af83d4104f76e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -84,6 +84,13 @@ pub(crate) fn complete_postfix( ) .add_to(acc, ctx.db); + postfix_snippet( + "lete", + "let Ok else {}", + &format!("let Ok($1) = {receiver_text} else {{\n $2\n}};\n$0"), + ) + .add_to(acc, ctx.db); + postfix_snippet( "while", "while let Ok {}", @@ -99,6 +106,13 @@ pub(crate) fn complete_postfix( ) .add_to(acc, ctx.db); + postfix_snippet( + "lete", + "let Some else {}", + &format!("let Some($1) = {receiver_text} else {{\n $2\n}};\n$0"), + ) + .add_to(acc, ctx.db); + postfix_snippet( "while", "while let Some {}", @@ -469,6 +483,29 @@ fn main() { ); } + #[test] + fn option_letelse() { + check_edit( + "lete", + r#" +//- minicore: option +fn main() { + let bar = Some(true); + bar.$0 +} +"#, + r#" +fn main() { + let bar = Some(true); + let Some($1) = bar else { + $2 +}; +$0 +} +"#, + ); + } + #[test] fn result_match() { check_edit( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs index a30fd13b1d5f3..e6a4335c3fec8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs @@ -15,7 +15,7 @@ pub(crate) fn complete_type_path( path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx, location: &TypeLocation, ) { - let _p = profile::span("complete_type_path"); + let _p = tracing::span!(tracing::Level::INFO, "complete_type_path").entered(); let scope_def_applicable = |def| { use hir::{GenericParam::*, ModuleDef::*}; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 575f524209c70..2c0370c58f70a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -568,7 +568,8 @@ impl CompletionContext<'_> { /// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and /// passes all doc-aliases along, to funnel it into [`Completions::add_path_resolution`]. pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef, Vec)) { - let _p = profile::span("CompletionContext::process_all_names"); + let _p = + tracing::span!(tracing::Level::INFO, "CompletionContext::process_all_names").entered(); self.scope.process_all_names(&mut |name, def| { if self.is_scope_def_hidden(def) { return; @@ -579,7 +580,8 @@ impl CompletionContext<'_> { } pub(crate) fn process_all_names_raw(&self, f: &mut dyn FnMut(Name, ScopeDef)) { - let _p = profile::span("CompletionContext::process_all_names_raw"); + let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::process_all_names_raw") + .entered(); self.scope.process_all_names(f); } @@ -637,7 +639,7 @@ impl<'a> CompletionContext<'a> { position @ FilePosition { file_id, offset }: FilePosition, config: &'a CompletionConfig, ) -> Option<(CompletionContext<'a>, CompletionAnalysis)> { - let _p = profile::span("CompletionContext::new"); + let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::new").entered(); let sema = Semantics::new(db); let original_file = sema.parse(file_id); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index 8a4ac00de9105..c06b64df1c525 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -72,7 +72,7 @@ fn expand( mut fake_ident_token: SyntaxToken, relative_offset: TextSize, ) -> ExpansionResult { - let _p = profile::span("CompletionContext::expand"); + let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::expand").entered(); let mut derive_ctx = None; 'expansion: loop { @@ -211,7 +211,7 @@ fn analyze( original_token: &SyntaxToken, self_token: &SyntaxToken, ) -> Option<(CompletionAnalysis, (Option, Option), QualifierCtx)> { - let _p = profile::span("CompletionContext::analyze"); + let _p = tracing::span!(tracing::Level::INFO, "CompletionContext::analyze").entered(); let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } = expansion_result; @@ -1267,8 +1267,7 @@ fn pattern_context_for( pat .syntax() .ancestors() - .skip_while(|it| ast::Pat::can_cast(it.kind())) - .next() + .find(|it| !ast::Pat::can_cast(it.kind())) .map_or((PatternRefutability::Irrefutable, false), |node| { let refutability = match_ast! { match node { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index 864b993f7136e..bcf169f46530d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -433,7 +433,7 @@ impl Builder { } pub(crate) fn build(self, db: &RootDatabase) -> CompletionItem { - let _p = profile::span("item::Builder::build"); + let _p = tracing::span!(tracing::Level::INFO, "item::Builder::build").entered(); let label = self.label; let mut label_detail = None; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index d26b6f431b5b4..733523d369424 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -236,7 +236,7 @@ pub fn resolve_completion_edits( FilePosition { file_id, offset }: FilePosition, imports: impl IntoIterator, ) -> Option> { - let _p = profile::span("resolve_completion_edits"); + let _p = tracing::span!(tracing::Level::INFO, "resolve_completion_edits").entered(); let sema = hir::Semantics::new(db); let original_file = sema.parse(file_id); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index ad26280ae748f..4d49d2f4987f1 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -292,7 +292,7 @@ fn render_resolution_pat( import_to_add: Option, resolution: ScopeDef, ) -> Builder { - let _p = profile::span("render_resolution"); + let _p = tracing::span!(tracing::Level::INFO, "render_resolution").entered(); use hir::ModuleDef::*; if let ScopeDef::ModuleDef(Macro(mac)) = resolution { @@ -310,7 +310,7 @@ fn render_resolution_path( import_to_add: Option, resolution: ScopeDef, ) -> Builder { - let _p = profile::span("render_resolution"); + let _p = tracing::span!(tracing::Level::INFO, "render_resolution").entered(); use hir::ModuleDef::*; match resolution { @@ -418,7 +418,7 @@ fn render_resolution_simple_( import_to_add: Option, resolution: ScopeDef, ) -> Builder { - let _p = profile::span("render_resolution"); + let _p = tracing::span!(tracing::Level::INFO, "render_resolution").entered(); let db = ctx.db(); let ctx = ctx.import_to_add(import_to_add); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs index 3c73983c39a2b..a2bfac994ff0b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs @@ -6,7 +6,7 @@ use ide_db::SymbolKind; use crate::{item::CompletionItem, render::RenderContext}; pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option { - let _p = profile::span("render_const"); + let _p = tracing::span!(tracing::Level::INFO, "render_const").entered(); render(ctx, const_) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index 0f2608d1325a9..4ae7ea861c75b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -25,7 +25,7 @@ pub(crate) fn render_fn( local_name: Option, func: hir::Function, ) -> Builder { - let _p = profile::span("render_fn"); + let _p = tracing::span!(tracing::Level::INFO, "render_fn").entered(); render(ctx, local_name, func, FuncKind::Function(path_ctx)) } @@ -36,7 +36,7 @@ pub(crate) fn render_method( local_name: Option, func: hir::Function, ) -> Builder { - let _p = profile::span("render_method"); + let _p = tracing::span!(tracing::Level::INFO, "render_method").entered(); render(ctx, local_name, func, FuncKind::Method(dot_access, receiver)) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs index f2d67df01d31b..f52a5f7625557 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs @@ -27,7 +27,7 @@ pub(crate) fn render_variant_lit( variant: hir::Variant, path: Option, ) -> Option { - let _p = profile::span("render_enum_variant"); + let _p = tracing::span!(tracing::Level::INFO, "render_enum_variant").entered(); let db = ctx.db(); let name = local_name.unwrap_or_else(|| variant.name(db)); @@ -41,7 +41,7 @@ pub(crate) fn render_struct_literal( path: Option, local_name: Option, ) -> Option { - let _p = profile::span("render_struct_literal"); + let _p = tracing::span!(tracing::Level::INFO, "render_struct_literal").entered(); let db = ctx.db(); let name = local_name.unwrap_or_else(|| strukt.name(db)); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs index 915a245ab6b4c..540cfd03d6061 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs @@ -17,7 +17,7 @@ pub(crate) fn render_macro( name: hir::Name, macro_: hir::Macro, ) -> Builder { - let _p = profile::span("render_macro"); + let _p = tracing::span!(tracing::Level::INFO, "render_macro").entered(); render(ctx, *kind == PathKind::Use, *has_macro_bang, *has_call_parens, name, macro_) } @@ -27,7 +27,7 @@ pub(crate) fn render_macro_pat( name: hir::Name, macro_: hir::Macro, ) -> Builder { - let _p = profile::span("render_macro"); + let _p = tracing::span!(tracing::Level::INFO, "render_macro").entered(); render(ctx, false, false, false, name, macro_) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs index 6f998119b7cac..a5f851566cb08 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs @@ -20,7 +20,7 @@ pub(crate) fn render_struct_pat( strukt: hir::Struct, local_name: Option, ) -> Option { - let _p = profile::span("render_struct_pat"); + let _p = tracing::span!(tracing::Level::INFO, "render_struct_pat").entered(); let fields = strukt.fields(ctx.db()); let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, strukt)?; @@ -50,7 +50,7 @@ pub(crate) fn render_variant_pat( local_name: Option, path: Option<&hir::ModPath>, ) -> Option { - let _p = profile::span("render_variant_pat"); + let _p = tracing::span!(tracing::Level::INFO, "render_variant_pat").entered(); let fields = variant.fields(ctx.db()); let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs index 343ba7e28d8e0..b192309e93f34 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs @@ -10,7 +10,7 @@ pub(crate) fn render_type_alias( ctx: RenderContext<'_>, type_alias: hir::TypeAlias, ) -> Option { - let _p = profile::span("render_type_alias"); + let _p = tracing::span!(tracing::Level::INFO, "render_type_alias").entered(); render(ctx, type_alias, false) } @@ -18,7 +18,7 @@ pub(crate) fn render_type_alias_with_eq( ctx: RenderContext<'_>, type_alias: hir::TypeAlias, ) -> Option { - let _p = profile::span("render_type_alias_with_eq"); + let _p = tracing::span!(tracing::Level::INFO, "render_type_alias_with_eq").entered(); render(ctx, type_alias, true) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index c421be51a0d07..154b69875aea8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -210,23 +210,14 @@ pub(crate) fn check_edit_with_config( let mut combined_edit = completion.text_edit.clone(); - resolve_completion_edits( - &db, - &config, - position, - completion - .import_to_add - .iter() - .cloned() - .filter_map(|(import_path, import_name)| Some((import_path, import_name))), - ) - .into_iter() - .flatten() - .for_each(|text_edit| { - combined_edit.union(text_edit).expect( - "Failed to apply completion resolve changes: change ranges overlap, but should not", - ) - }); + resolve_completion_edits(&db, &config, position, completion.import_to_add.iter().cloned()) + .into_iter() + .flatten() + .for_each(|text_edit| { + combined_edit.union(text_edit).expect( + "Failed to apply completion resolve changes: change ranges overlap, but should not", + ) + }); combined_edit.apply(&mut actual); assert_eq_text!(&ra_fixture_after, &actual) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs index b4f936b35aead..758c254a88289 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs @@ -105,7 +105,7 @@ fn func(param0 @ (param1, param2): (i32, i32)) { fn completes_all_the_things_in_fn_body() { check( r#" -use non_existant::Unresolved; +use non_existent::Unresolved; mod qualified { pub enum Enum { Variant } } impl Unit { @@ -170,7 +170,7 @@ impl Unit { ); check( r#" -use non_existant::Unresolved; +use non_existent::Unresolved; mod qualified { pub enum Enum { Variant } } impl Unit { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index 1af16ef857d03..eaa1bebc03c7e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -106,7 +106,7 @@ fn main() { } "#, r#" -use dep::{FirstStruct, some_module::{SecondStruct, ThirdStruct}}; +use dep::{some_module::{SecondStruct, ThirdStruct}, FirstStruct}; fn main() { ThirdStruct diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs index 766d1c1e43dba..296253aa1ee19 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs @@ -15,12 +15,13 @@ use crate::{symbol_index::SymbolsDatabase, Change, RootDatabase}; impl RootDatabase { pub fn request_cancellation(&mut self) { - let _p = profile::span("RootDatabase::request_cancellation"); + let _p = + tracing::span!(tracing::Level::INFO, "RootDatabase::request_cancellation").entered(); self.salsa_runtime_mut().synthetic_write(Durability::LOW); } pub fn apply_change(&mut self, change: Change) { - let _p = profile::span("RootDatabase::apply_change"); + let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::apply_change").entered(); self.request_cancellation(); tracing::trace!("apply_change {:?}", change); if let Some(roots) = &change.source_change.roots { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index 5995b318e8eff..81f2f87d96236 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -404,7 +404,7 @@ impl NameClass { } pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option { - let _p = profile::span("classify_name"); + let _p = tracing::span!(tracing::Level::INFO, "classify_name").entered(); let parent = name.syntax().parent()?; @@ -496,7 +496,7 @@ impl NameClass { sema: &Semantics<'_, RootDatabase>, lifetime: &ast::Lifetime, ) -> Option { - let _p = profile::span("classify_lifetime").detail(|| lifetime.to_string()); + let _p = tracing::span!(tracing::Level::INFO, "classify_lifetime", ?lifetime).entered(); let parent = lifetime.syntax().parent()?; if let Some(it) = ast::LifetimeParam::cast(parent.clone()) { @@ -587,7 +587,7 @@ impl NameRefClass { sema: &Semantics<'_, RootDatabase>, name_ref: &ast::NameRef, ) -> Option { - let _p = profile::span("classify_name_ref").detail(|| name_ref.to_string()); + let _p = tracing::span!(tracing::Level::INFO, "classify_name_ref", ?name_ref).entered(); let parent = name_ref.syntax().parent()?; @@ -686,7 +686,7 @@ impl NameRefClass { sema: &Semantics<'_, RootDatabase>, lifetime: &ast::Lifetime, ) -> Option { - let _p = profile::span("classify_lifetime_ref").detail(|| lifetime.to_string()); + let _p = tracing::span!(tracing::Level::INFO, "classify_lifetime_ref", ?lifetime).entered(); let parent = lifetime.syntax().parent()?; match parent.kind() { SyntaxKind::BREAK_EXPR | SyntaxKind::CONTINUE_EXPR => { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs index cc8e8431708ab..72ca354365e7b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs @@ -1,4 +1,4 @@ -//! Documentation attribute related utilties. +//! Documentation attribute related utilities. use either::Either; use hir::{ db::{DefDatabase, HirDatabase}, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs index f160def0aff00..677c8fd54c058 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs @@ -22,10 +22,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"detects certain glob imports that require reporting an ambiguity error"##, }, Lint { label: "ambiguous_glob_reexports", description: r##"ambiguous glob re-exports"## }, - Lint { - label: "ambiguous_wide_pointer_comparisons", - description: r##"detects ambiguous wide pointer comparisons"##, - }, Lint { label: "anonymous_parameters", description: r##"detects anonymous parameters"## }, Lint { label: "arithmetic_overflow", description: r##"arithmetic operation overflows"## }, Lint { @@ -110,7 +106,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[ }, Lint { label: "deref_into_dyn_supertrait", - description: r##"`Deref` implementation usage with a supertrait trait object for output are shadow by implicit coercion"##, + description: r##"`Deref` implementation usage with a supertrait trait object for output might be shadowed in the future"##, }, Lint { label: "deref_nullptr", @@ -180,7 +176,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[ }, Lint { label: "future_incompatible", - description: r##"lint group for: ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety, writes-through-immutable-pointer"##, + description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, implied-bounds-entailment, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety"##, }, Lint { label: "fuzzy_provenance_casts", @@ -198,6 +194,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[ label: "illegal_floating_point_literal_pattern", description: r##"floating-point literals cannot be used in patterns"##, }, + Lint { + label: "implied_bounds_entailment", + description: r##"impl method assumes more implied bounds than its corresponding trait method"##, + }, Lint { label: "improper_ctypes", description: r##"proper use of libc types in foreign modules"##, @@ -579,10 +579,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[ description: r##"enabling track_caller on an async fn is a no-op unless the async_fn_track_caller feature is enabled"##, }, Lint { label: "uninhabited_static", description: r##"uninhabited static"## }, - Lint { - label: "unit_bindings", - description: r##"binding is useless because it has the unit `()` type"##, - }, Lint { label: "unknown_crate_types", description: r##"unknown crate type found in `#[crate_type]` directive"##, @@ -740,19 +736,16 @@ pub const DEFAULT_LINTS: &[Lint] = &[ label: "while_true", description: r##"suggest using `loop { }` instead of `while true { }`"##, }, - Lint { - label: "writes_through_immutable_pointer", - description: r##"shared references are immutable, and pointers derived from them must not be written to"##, - }, ]; pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "future_incompatible", - description: r##"lint group for: ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety, writes-through-immutable-pointer"##, + description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, implied-bounds-entailment, indirect-structural-match, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety"##, }, children: &[ + "deref_into_dyn_supertrait", "ambiguous_associated_items", "ambiguous_glob_imports", "byte_slice_in_packed_struct_with_derive", @@ -767,6 +760,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ "forbidden_lint_groups", "ill_formed_attribute_input", "illegal_floating_point_literal_pattern", + "implied_bounds_entailment", "indirect_structural_match", "invalid_doc_attributes", "invalid_type_param_default", @@ -790,7 +784,6 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[ "unstable_syntax_pre_expansion", "unsupported_calling_conventions", "where_clauses_object_safety", - "writes_through_immutable_pointer", ], }, LintGroup { @@ -963,17 +956,6 @@ The tracking issue for this feature is: [#44839] [#44839]: https://github.com/rust-lang/rust/issues/44839 ------------------------- -"##, - }, - Lint { - label: "abi_amdgpu_kernel", - description: r##"# `abi_amdgpu_kernel` - -The tracking issue for this feature is: [#51575] - -[#51575]: https://github.com/rust-lang/rust/issues/51575 - ------------------------ "##, }, @@ -1403,17 +1385,6 @@ The tracking issue for this feature is: [#91583] [#91583]: https://github.com/rust-lang/rust/issues/91583 ------------------------- -"##, - }, - Lint { - label: "array_methods", - description: r##"# `array_methods` - -The tracking issue for this feature is: [#76118] - -[#76118]: https://github.com/rust-lang/rust/issues/76118 - ------------------------ "##, }, @@ -1586,7 +1557,7 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | M68k | `reg_data` | None | `i8`, `i16`, `i32` | | CSKY | `reg` | None | `i8`, `i16`, `i32` | | CSKY | `freg` | None | `f32`, | -| s390x | `reg` | None | `i8`, `i16`, `i32`, `i64` | +| s390x | `reg`, `reg_addr` | None | `i8`, `i16`, `i32`, `i64` | | s390x | `freg` | None | `f32`, `f64` | ## Register aliases @@ -1660,9 +1631,10 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | NVPTX | `reg64` | None | `rd0` | None | | Hexagon | `reg` | None | `r0` | None | | PowerPC | `reg` | None | `0` | None | -| PowerPC | `reg_nonzero` | None | `3` | `b` | +| PowerPC | `reg_nonzero` | None | `3` | None | | PowerPC | `freg` | None | `0` | None | | s390x | `reg` | None | `%r0` | None | +| s390x | `reg_addr` | None | `%r1` | None | | s390x | `freg` | None | `%f0` | None | | CSKY | `reg` | None | `r0` | None | | CSKY | `freg` | None | `f0` | None | @@ -1756,6 +1728,15 @@ The tracking issue for this feature is: [#110011] [#110011]: https://github.com/rust-lang/rust/issues/110011 +------------------------ +"##, + }, + Lint { + label: "async_fn_traits", + description: r##"# `async_fn_traits` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + ------------------------ "##, }, @@ -2007,17 +1988,6 @@ The tracking issue for this feature is: [#80996] [#80996]: https://github.com/rust-lang/rust/issues/80996 ------------------------- -"##, - }, - Lint { - label: "bound_map", - description: r##"# `bound_map` - -The tracking issue for this feature is: [#86026] - -[#86026]: https://github.com/rust-lang/rust/issues/86026 - ------------------------ "##, }, @@ -2397,6 +2367,17 @@ fn b() { } } ``` +"##, + }, + Lint { + label: "cfg_sanitizer_cfi", + description: r##"# `cfg_sanitizer_cfi` + +The tracking issue for this feature is: [#89653] + +[#89653]: https://github.com/rust-lang/rust/issues/89653 + +------------------------ "##, }, Lint { @@ -2829,17 +2810,6 @@ The tracking issue for this feature is: [#91583] [#91583]: https://github.com/rust-lang/rust/issues/91583 ------------------------- -"##, - }, - Lint { - label: "const_assume", - description: r##"# `const_assume` - -The tracking issue for this feature is: [#76972] - -[#76972]: https://github.com/rust-lang/rust/issues/76972 - ------------------------ "##, }, @@ -3114,6 +3084,17 @@ The tracking issue for this feature is: [#79597] [#79597]: https://github.com/rust-lang/rust/issues/79597 +------------------------ +"##, + }, + Lint { + label: "const_hint_assert_unchecked", + description: r##"# `const_hint_assert_unchecked` + +The tracking issue for this feature is: [#119131] + +[#119131]: https://github.com/rust-lang/rust/issues/119131 + ------------------------ "##, }, @@ -3518,6 +3499,17 @@ The tracking issue for this feature is: [#83570] [#83570]: https://github.com/rust-lang/rust/issues/83570 +------------------------ +"##, + }, + Lint { + label: "const_slice_first_last_chunk", + description: r##"# `const_slice_first_last_chunk` + +The tracking issue for this feature is: [#111774] + +[#111774]: https://github.com/rust-lang/rust/issues/111774 + ------------------------ "##, }, @@ -3593,6 +3585,17 @@ The tracking issue for this feature is: [#101804] [#101804]: https://github.com/rust-lang/rust/issues/101804 +------------------------ +"##, + }, + Lint { + label: "const_str_from_raw_parts_mut", + description: r##"# `const_str_from_raw_parts_mut` + +The tracking issue for this feature is: [#119206] + +[#119206]: https://github.com/rust-lang/rust/issues/119206 + ------------------------ "##, }, @@ -3615,6 +3618,17 @@ The tracking issue for this feature is: [#91005] [#91005]: https://github.com/rust-lang/rust/issues/91005 +------------------------ +"##, + }, + Lint { + label: "const_strict_overflow_ops", + description: r##"# `const_strict_overflow_ops` + +The tracking issue for this feature is: [#118260] + +[#118260]: https://github.com/rust-lang/rust/issues/118260 + ------------------------ "##, }, @@ -3754,15 +3768,6 @@ The tracking issue for this feature is: [#117693] [#117693]: https://github.com/rust-lang/rust/issues/117693 ------------------------- -"##, - }, - Lint { - label: "core_panic", - description: r##"# `core_panic` - -This feature is internal to the Rust compiler and is not intended for general use. - ------------------------ "##, }, @@ -5316,6 +5321,17 @@ The tracking issue for this feature is: [#113521] [#113521]: https://github.com/rust-lang/rust/issues/113521 +------------------------ +"##, + }, + Lint { + label: "generic_nonzero", + description: r##"# `generic_nonzero` + +The tracking issue for this feature is: [#120257] + +[#120257]: https://github.com/rust-lang/rust/issues/120257 + ------------------------ "##, }, @@ -5436,6 +5452,17 @@ The tracking issue for this feature is: [#44839] [#44839]: https://github.com/rust-lang/rust/issues/44839 +------------------------ +"##, + }, + Lint { + label: "hint_assert_unchecked", + description: r##"# `hint_assert_unchecked` + +The tracking issue for this feature is: [#119131] + +[#119131]: https://github.com/rust-lang/rust/issues/119131 + ------------------------ "##, }, @@ -5759,6 +5786,15 @@ The tracking issue for this feature is: [#53485] Add the methods `is_sorted`, `is_sorted_by` and `is_sorted_by_key` to `[T]`; add the methods `is_sorted`, `is_sorted_by` and `is_sorted_by_key` to `Iterator`. +"##, + }, + Lint { + label: "is_val_statically_known", + description: r##"# `is_val_statically_known` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ "##, }, Lint { @@ -6417,6 +6453,17 @@ The tracking issue for this feature is: [#98262] [#98262]: https://github.com/rust-lang/rust/issues/98262 +------------------------ +"##, + }, + Lint { + label: "min_exhaustive_patterns", + description: r##"# `min_exhaustive_patterns` + +The tracking issue for this feature is: [#119612] + +[#119612]: https://github.com/rust-lang/rust/issues/119612 + ------------------------ "##, }, @@ -6514,17 +6561,6 @@ The tracking issue for this feature is: [#81872] [#81872]: https://github.com/rust-lang/rust/issues/81872 ------------------------- -"##, - }, - Lint { - label: "mutex_unpoison", - description: r##"# `mutex_unpoison` - -The tracking issue for this feature is: [#96469] - -[#96469]: https://github.com/rust-lang/rust/issues/96469 - ------------------------ "##, }, @@ -6532,9 +6568,9 @@ The tracking issue for this feature is: [#96469] label: "naked_functions", description: r##"# `naked_functions` -The tracking issue for this feature is: [#32408] +The tracking issue for this feature is: [#90957] -[#32408]: https://github.com/rust-lang/rust/issues/32408 +[#90957]: https://github.com/rust-lang/rust/issues/90957 ------------------------ "##, @@ -6760,6 +6796,37 @@ The tracking issue for this feature is: [#117691] [#117691]: https://github.com/rust-lang/rust/issues/117691 +------------------------ +"##, + }, + Lint { + label: "non_zero_count_ones", + description: r##"# `non_zero_count_ones` + +The tracking issue for this feature is: [#120287] + +[#120287]: https://github.com/rust-lang/rust/issues/120287 + +------------------------ +"##, + }, + Lint { + label: "nonzero_from_mut", + description: r##"# `nonzero_from_mut` + +The tracking issue for this feature is: [#106290] + +[#106290]: https://github.com/rust-lang/rust/issues/106290 + +------------------------ +"##, + }, + Lint { + label: "nonzero_internals", + description: r##"# `nonzero_internals` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + ------------------------ "##, }, @@ -6817,23 +6884,23 @@ The tracking issue for this feature is: [#43561] "##, }, Lint { - label: "offset_of", - description: r##"# `offset_of` + label: "offset_of_enum", + description: r##"# `offset_of_enum` -The tracking issue for this feature is: [#106655] +The tracking issue for this feature is: [#120141] -[#106655]: https://github.com/rust-lang/rust/issues/106655 +[#120141]: https://github.com/rust-lang/rust/issues/120141 ------------------------ "##, }, Lint { - label: "offset_of_enum", - description: r##"# `offset_of_enum` + label: "offset_of_nested", + description: r##"# `offset_of_nested` -The tracking issue for this feature is: [#106655] +The tracking issue for this feature is: [#120140] -[#106655]: https://github.com/rust-lang/rust/issues/106655 +[#120140]: https://github.com/rust-lang/rust/issues/120140 ------------------------ "##, @@ -7164,6 +7231,17 @@ The tracking issue for this feature is: [#115268] [#115268]: https://github.com/rust-lang/rust/issues/115268 +------------------------ +"##, + }, + Lint { + label: "proc_macro_c_str_literals", + description: r##"# `proc_macro_c_str_literals` + +The tracking issue for this feature is: [#119750] + +[#119750]: https://github.com/rust-lang/rust/issues/119750 + ------------------------ "##, }, @@ -7383,17 +7461,6 @@ The tracking issue for this feature is: [#107792] [#107792]: https://github.com/rust-lang/rust/issues/107792 ------------------------- -"##, - }, - Lint { - label: "raw_os_nonzero", - description: r##"# `raw_os_nonzero` - -The tracking issue for this feature is: [#82363] - -[#82363]: https://github.com/rust-lang/rust/issues/82363 - ------------------------ "##, }, @@ -7553,17 +7620,6 @@ The tracking issue for this feature is: [#93743] [#93743]: https://github.com/rust-lang/rust/issues/93743 ------------------------- -"##, - }, - Lint { - label: "round_ties_even", - description: r##"# `round_ties_even` - -The tracking issue for this feature is: [#96710] - -[#96710]: https://github.com/rust-lang/rust/issues/96710 - ------------------------ "##, }, @@ -7813,17 +7869,6 @@ The tracking issue for this feature is: [#27747] [#27747]: https://github.com/rust-lang/rust/issues/27747 ------------------------- -"##, - }, - Lint { - label: "slice_first_last_chunk", - description: r##"# `slice_first_last_chunk` - -The tracking issue for this feature is: [#111774] - -[#111774]: https://github.com/rust-lang/rust/issues/111774 - ------------------------ "##, }, @@ -7846,17 +7891,6 @@ The tracking issue for this feature is: [#89792] [#89792]: https://github.com/rust-lang/rust/issues/89792 ------------------------- -"##, - }, - Lint { - label: "slice_group_by", - description: r##"# `slice_group_by` - -The tracking issue for this feature is: [#80552] - -[#80552]: https://github.com/rust-lang/rust/issues/80552 - ------------------------ "##, }, @@ -8056,6 +8090,17 @@ The tracking issue for this feature is: [#96137] [#96137]: https://github.com/rust-lang/rust/issues/96137 +------------------------ +"##, + }, + Lint { + label: "split_at_checked", + description: r##"# `split_at_checked` + +The tracking issue for this feature is: [#119128] + +[#119128]: https://github.com/rust-lang/rust/issues/119128 + ------------------------ "##, }, @@ -8192,6 +8237,17 @@ The tracking issue for this feature is: [#15701] [#15701]: https://github.com/rust-lang/rust/issues/15701 +------------------------ +"##, + }, + Lint { + label: "str_from_raw_parts", + description: r##"# `str_from_raw_parts` + +The tracking issue for this feature is: [#119206] + +[#119206]: https://github.com/rust-lang/rust/issues/119206 + ------------------------ "##, }, @@ -8212,6 +8268,17 @@ The tracking issue for this feature is: [#116258] This feature is internal to the Rust compiler and is not intended for general use. +------------------------ +"##, + }, + Lint { + label: "str_lines_remainder", + description: r##"# `str_lines_remainder` + +The tracking issue for this feature is: [#77998] + +[#77998]: https://github.com/rust-lang/rust/issues/77998 + ------------------------ "##, }, @@ -8245,6 +8312,17 @@ The tracking issue for this feature is: [#77998] [#77998]: https://github.com/rust-lang/rust/issues/77998 +------------------------ +"##, + }, + Lint { + label: "strict_overflow_ops", + description: r##"# `strict_overflow_ops` + +The tracking issue for this feature is: [#118260] + +[#118260]: https://github.com/rust-lang/rust/issues/118260 + ------------------------ "##, }, @@ -8758,6 +8836,37 @@ pub fn main() { println!("{:?}", b); } ``` +"##, + }, + Lint { + label: "trait_upcasting", + description: r##"# `trait_upcasting` + +The tracking issue for this feature is: [#65991] + +[#65991]: https://github.com/rust-lang/rust/issues/65991 + +------------------------ + +The `trait_upcasting` feature adds support for trait upcasting coercion. This allows a +trait object of type `dyn Bar` to be cast to a trait object of type `dyn Foo` +so long as `Bar: Foo`. + +```rust,edition2018 +#![feature(trait_upcasting)] +#![allow(incomplete_features)] + +trait Foo {} + +trait Bar: Foo {} + +impl Foo for i32 {} + +impl Bar for T {} + +let bar: &dyn Bar = &123; +let foo: &dyn Foo = bar; +``` "##, }, Lint { @@ -10492,6 +10601,11 @@ differing by an underscore."##, label: "clippy::duration_subsec", description: r##"Checks for calculation of subsecond microseconds or milliseconds from other `Duration` methods."##, + }, + Lint { + label: "clippy::eager_transmute", + description: r##"Checks for integer validity checks, followed by a transmute that is (incorrectly) evaluated +eagerly (e.g. using `bool::then_some`)."##, }, Lint { label: "clippy::else_if_without_else", @@ -10510,6 +10624,10 @@ As of this writing, the `never_type` is still a nightly-only experimental API. Therefore, this lint is only triggered if the `never_type` is enabled."##, }, + Lint { + label: "clippy::empty_enum_variants_with_brackets", + description: r##"Finds enum variants without fields that are declared with empty brackets."##, + }, Lint { label: "clippy::empty_line_after_doc_comments", description: r##"Checks for empty lines after documentation comments."##, @@ -10776,7 +10894,7 @@ multithreaded executors are likely to be used for running these Futures."##, Lint { label: "clippy::get_first", description: r##"Checks for usage of `x.get(0)` instead of -`x.first()`."##, +`x.first()` or `x.front()`."##, }, Lint { label: "clippy::get_last_with_len", @@ -10876,6 +10994,11 @@ because `Deref` is a supertrait of `DerefMut`)"##, description: r##"Looks for floating-point expressions that can be expressed using built-in methods to improve accuracy at the cost of performance."##, + }, + Lint { + label: "clippy::incompatible_msrv", + description: r##"This lint checks that no function newer than the defined MSRV (minimum +supported rust version) is used in the crate."##, }, Lint { label: "clippy::inconsistent_digit_grouping", @@ -11028,6 +11151,16 @@ create a `Vec`."##, label: "clippy::iter_count", description: r##"Checks for the use of `.iter().count()`."##, }, + Lint { + label: "clippy::iter_filter_is_ok", + description: r##"Checks for usage of `.filter(Result::is_ok)` that may be replaced with a `.flatten()` call. +This lint will require additional changes to the follow-up calls as it appects the type."##, + }, + Lint { + label: "clippy::iter_filter_is_some", + description: r##"Checks for usage of `.filter(Option::is_some)` that may be replaced with a `.flatten()` call. +This lint will require additional changes to the follow-up calls as it appects the type."##, + }, Lint { label: "clippy::iter_kv_map", description: r##"Checks for iterating a map (`HashMap` or `BTreeMap`) and @@ -11196,6 +11329,14 @@ when `lines` has type `std::io::Lines`."##, label: "clippy::linkedlist", description: r##"Checks for usage of any `LinkedList`, suggesting to use a `Vec` or a `VecDeque` (formerly called `RingBuf`)."##, + }, + Lint { + label: "clippy::lint_groups_priority", + description: r##"Checks for lint groups with the same priority as lints in the `Cargo.toml` +[`[lints]` table](https://doc.rust-lang.org/cargo/reference/manifest.html#the-lints-section). + +This lint will be removed once [cargo#12918](https://github.com/rust-lang/cargo/issues/12918) +is resolved."##, }, Lint { label: "clippy::little_endian_bytes", @@ -11280,6 +11421,7 @@ ascii range"##, description: r##"Checks for manual `is_infinite` reimplementations (i.e., `x == ::INFINITY || x == ::NEG_INFINITY`)."##, }, + Lint { label: "clippy::manual_is_variant_and", description: r##""## }, Lint { label: "clippy::manual_let_else", description: r##"Warn of cases where `let...else` could be used"##, @@ -11556,10 +11698,10 @@ is greater than the largest index used to index into the slice."##, Lint { label: "clippy::missing_enforced_import_renames", description: r##"Checks for imports that do not rename the item as specified -in the `enforce-import-renames` config option. +in the `enforced-import-renames` config option. Note: Even though this lint is warn-by-default, it will only trigger if -import renames are defined in the clippy.toml file."##, +import renames are defined in the `clippy.toml` file."##, }, Lint { label: "clippy::missing_errors_doc", @@ -11947,6 +12089,10 @@ character escapes in C."##, taken to satisfy a bound and suggests to dereference the other argument instead"##, }, + Lint { + label: "clippy::option_as_ref_cloned", + description: r##"Checks for usage of `.as_ref().cloned()` and `.as_mut().cloned()` on `Option`s"##, + }, Lint { label: "clippy::option_as_ref_deref", description: r##"Checks for usage of `_.as_ref().map(Deref::deref)` or its aliases (such as String::as_str)."##, @@ -11958,7 +12104,7 @@ suggests usage of the `env!` macro."##, }, Lint { label: "clippy::option_filter_map", - description: r##"Checks for iterators of `Option`s using ``.filter(Option::is_some).map(Option::unwrap)` that may + description: r##"Checks for iterators of `Option`s using `.filter(Option::is_some).map(Option::unwrap)` that may be replaced with a `.flatten()` call."##, }, Lint { @@ -12144,6 +12290,11 @@ namely `*const T` to `*const U` and `*mut T` to `*mut U`."##, label: "clippy::pub_enum_variant_names", description: r##"Nothing. This lint has been deprecated."##, }, + Lint { + label: "clippy::pub_underscore_fields", + description: r##"Checks whether any field of the struct is prefixed with an `_` (underscore) and also marked +`pub` (public)"##, + }, Lint { label: "clippy::pub_use", description: r##"Restricts the usage of `pub use ...`"## }, Lint { label: "clippy::pub_with_shorthand", @@ -12239,8 +12390,8 @@ value that is going to be dropped without further use."##, Lint { label: "clippy::redundant_closure", description: r##"Checks for closures which just call another function where -the function can be called directly. `unsafe` functions or calls where types -get adjusted are ignored."##, +the function can be called directly. `unsafe` functions, calls where types +get adjusted or where the callee is marked `#[track_caller]` are ignored."##, }, Lint { label: "clippy::redundant_closure_call", @@ -12348,7 +12499,7 @@ they are equivalent to `1`. (Related discussion in [rust-clippy#7306](https://gi }, Lint { label: "clippy::result_filter_map", - description: r##"Checks for iterators of `Result`s using ``.filter(Result::is_ok).map(Result::unwrap)` that may + description: r##"Checks for iterators of `Result`s using `.filter(Result::is_ok).map(Result::unwrap)` that may be replaced with a `.flatten()` call."##, }, Lint { @@ -12443,7 +12594,7 @@ see the `unseparated_literal_suffix` lint."##, }, Lint { label: "clippy::serde_api_misuse", - description: r##"Checks for mis-uses of the serde API."##, + description: r##"Checks for misuses of the serde API."##, }, Lint { label: "clippy::shadow_reuse", @@ -12580,6 +12731,12 @@ use an unstable sort than a stable sort."##, label: "clippy::std_instead_of_core", description: r##"Finds items imported through `std` when available through `core`."##, }, + Lint { + label: "clippy::str_split_at_newline", + description: r##"Checks for usages of `str.trim().split(\ +)` and `str.trim().split(\\ +)`."##, + }, Lint { label: "clippy::str_to_string", description: r##"This lint checks for `.to_string()` method calls on values of type `&str`."##, @@ -12674,6 +12831,11 @@ is followed immediately by a newline or the `else` seems to be missing."##, label: "clippy::suspicious_op_assign_impl", description: r##"Lints for suspicious operations in impls of OpAssign, e.g. subtracting elements in an AddAssign impl."##, + }, + Lint { + label: "clippy::suspicious_open_options", + description: r##"Checks for the suspicious use of `OpenOptions::create()` +without an explicit `OpenOptions::truncate()`."##, }, Lint { label: "clippy::suspicious_operation_groupings", @@ -12724,6 +12886,10 @@ either `ignore`, `no_run` or `compile_fail`."##, description: r##"Triggers when a testing function (marked with the `#[test]` attribute) isn't inside a testing module (marked with `#[cfg(test)]`)."##, }, + Lint { + label: "clippy::thread_local_initializer_can_be_made_const", + description: r##"Suggests to use `const` in `thread_local!` macro if possible."##, + }, Lint { label: "clippy::to_digit_is_some", description: r##"Checks for `.to_digit(..).is_some()` on `char`s."##, @@ -12734,6 +12900,10 @@ either `ignore`, `no_run` or `compile_fail`."##, applied to a type that implements [`Display`](https://doc.rust-lang.org/std/fmt/trait.Display.html) in a macro that does formatting."##, }, + Lint { + label: "clippy::to_string_trait_impl", + description: r##"Checks for direct implementations of `ToString`."##, + }, Lint { label: "clippy::todo", description: r##"Checks for usage of `todo!`."## }, Lint { label: "clippy::too_many_arguments", @@ -12847,7 +13017,7 @@ declarations above a certain complexity threshold."##, }, Lint { label: "clippy::unchecked_duration_subtraction", - description: r##"Lints subtraction between an [`Instant`] and a [`Duration`]."##, + description: r##"Lints subtraction between an `Instant` and a `Duration`."##, }, Lint { label: "clippy::unconditional_recursion", @@ -13001,6 +13171,10 @@ sub-expression."##, label: "clippy::unnecessary_owned_empty_strings", description: r##"Detects cases of owned empty strings being passed as an argument to a function expecting `&str`"##, }, + Lint { + label: "clippy::unnecessary_result_map_or_else", + description: r##"Checks for usage of `.map_or_else()` map closure for `Result` type."##, + }, Lint { label: "clippy::unnecessary_safety_comment", description: r##"Checks for `// SAFETY: ` comments on safe code."##, @@ -13510,7 +13684,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::correctness", - description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::ifs_same_cond, clippy::impl_hash_borrow_with_str_and_bytes, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##, + description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::eager_transmute, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::ifs_same_cond, clippy::impl_hash_borrow_with_str_and_bytes, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::lint_groups_priority, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##, }, children: &[ "clippy::absurd_extreme_comparisons", @@ -13522,6 +13696,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::deprecated_semver", "clippy::derive_ord_xor_partial_ord", "clippy::derived_hash_with_manual_eq", + "clippy::eager_transmute", "clippy::enum_clike_unportable_variant", "clippy::eq_op", "clippy::erasing_op", @@ -13541,6 +13716,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::iter_skip_zero", "clippy::iterator_step_by_zero", "clippy::let_underscore_lock", + "clippy::lint_groups_priority", "clippy::match_str_case_mismatch", "clippy::mem_replace_with_uninit", "clippy::min_max", @@ -13665,7 +13841,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::pedantic", - description: r##"lint group for: clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_instant_elapsed, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_match_else, clippy::stable_sort_primitive, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::struct_field_names, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##, + description: r##"lint group for: clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_filter_is_ok, clippy::iter_filter_is_some, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_instant_elapsed, clippy::manual_is_variant_and, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_as_ref_cloned, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::pub_underscore_fields, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_match_else, clippy::stable_sort_primitive, clippy::str_split_at_newline, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::struct_field_names, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##, }, children: &[ "clippy::bool_to_int_with_if", @@ -13705,6 +13881,8 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::into_iter_without_iter", "clippy::invalid_upcast_comparisons", "clippy::items_after_statements", + "clippy::iter_filter_is_ok", + "clippy::iter_filter_is_some", "clippy::iter_not_returning_iterator", "clippy::iter_without_into_iter", "clippy::large_digit_groups", @@ -13715,6 +13893,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::macro_use_imports", "clippy::manual_assert", "clippy::manual_instant_elapsed", + "clippy::manual_is_variant_and", "clippy::manual_let_else", "clippy::manual_ok_or", "clippy::manual_string_new", @@ -13741,9 +13920,11 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::needless_raw_string_hashes", "clippy::no_effect_underscore_binding", "clippy::no_mangle_with_rust_abi", + "clippy::option_as_ref_cloned", "clippy::option_option", "clippy::ptr_as_ptr", "clippy::ptr_cast_constness", + "clippy::pub_underscore_fields", "clippy::range_minus_one", "clippy::range_plus_one", "clippy::redundant_closure_for_method_calls", @@ -13757,6 +13938,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::similar_names", "clippy::single_match_else", "clippy::stable_sort_primitive", + "clippy::str_split_at_newline", "clippy::string_add_assign", "clippy::struct_excessive_bools", "clippy::struct_field_names", @@ -13783,7 +13965,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::perf", - description: r##"lint group for: clippy::box_collection, clippy::box_default, clippy::boxed_local, clippy::cmp_owned, clippy::collapsible_str_replace, clippy::drain_collect, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_collect, clippy::format_in_format_args, clippy::iter_nth, clippy::iter_overeager_cloned, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_retain, clippy::manual_str_repeat, clippy::manual_try_fold, clippy::map_entry, clippy::missing_spin_loop, clippy::redundant_allocation, clippy::result_large_err, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push, clippy::waker_clone_wake"##, + description: r##"lint group for: clippy::box_collection, clippy::box_default, clippy::boxed_local, clippy::cmp_owned, clippy::collapsible_str_replace, clippy::drain_collect, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_collect, clippy::format_in_format_args, clippy::iter_nth, clippy::iter_overeager_cloned, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_retain, clippy::manual_str_repeat, clippy::manual_try_fold, clippy::map_entry, clippy::missing_spin_loop, clippy::redundant_allocation, clippy::result_large_err, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::thread_local_initializer_can_be_made_const, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push, clippy::waker_clone_wake"##, }, children: &[ "clippy::box_collection", @@ -13810,6 +13992,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::result_large_err", "clippy::single_char_pattern", "clippy::slow_vector_initialization", + "clippy::thread_local_initializer_can_be_made_const", "clippy::to_string_in_format_args", "clippy::unnecessary_to_owned", "clippy::useless_vec", @@ -13820,7 +14003,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::restriction", - description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::infinite_loop, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::iter_over_hash_type, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##, + description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_enum_variants_with_brackets, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::infinite_loop, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::iter_over_hash_type, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##, }, children: &[ "clippy::absolute_paths", @@ -13842,6 +14025,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::disallowed_script_idents", "clippy::else_if_without_else", "clippy::empty_drop", + "clippy::empty_enum_variants_with_brackets", "clippy::empty_structs_with_brackets", "clippy::error_impl_error", "clippy::exhaustive_enums", @@ -13939,7 +14123,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::style", - description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blocks_in_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::default_instead_of_iter_empty, clippy::disallowed_macros, clippy::disallowed_methods, clippy::disallowed_names, clippy::disallowed_types, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::err_expect, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::filter_map_bool_then, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::get_first, clippy::if_same_then_else, clippy::implicit_saturating_add, clippy::implicit_saturating_sub, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::init_numbered_fields, clippy::into_iter_on_ref, clippy::is_digit_ascii_radix, clippy::items_after_test_module, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::let_unit_value, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_bits, clippy::manual_is_ascii_check, clippy::manual_is_finite, clippy::manual_is_infinite, clippy::manual_map, clippy::manual_next_back, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::manual_while_let_some, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_enforced_import_renames, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, clippy::needless_doctest_main, clippy::needless_else, clippy::needless_late_init, clippy::needless_parens_on_range_literals, clippy::needless_pub_self, clippy::needless_range_loop, clippy::needless_return, clippy::needless_return_with_question_mark, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::non_minimal_cfg, clippy::obfuscated_if_else, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_err_ok, clippy::option_map_or_none, clippy::partialeq_to_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::toplevel_ref_arg, clippy::trim_split_whitespace, clippy::unnecessary_fallible_conversions, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unnecessary_owned_empty_strings, clippy::unsafe_removed_from_name, clippy::unused_enumerate_index, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##, + description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blocks_in_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::default_instead_of_iter_empty, clippy::disallowed_macros, clippy::disallowed_methods, clippy::disallowed_names, clippy::disallowed_types, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::err_expect, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::filter_map_bool_then, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::get_first, clippy::if_same_then_else, clippy::implicit_saturating_add, clippy::implicit_saturating_sub, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::init_numbered_fields, clippy::into_iter_on_ref, clippy::is_digit_ascii_radix, clippy::items_after_test_module, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::let_unit_value, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_bits, clippy::manual_is_ascii_check, clippy::manual_is_finite, clippy::manual_is_infinite, clippy::manual_map, clippy::manual_next_back, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::manual_while_let_some, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_enforced_import_renames, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, clippy::needless_doctest_main, clippy::needless_else, clippy::needless_late_init, clippy::needless_parens_on_range_literals, clippy::needless_pub_self, clippy::needless_range_loop, clippy::needless_return, clippy::needless_return_with_question_mark, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::non_minimal_cfg, clippy::obfuscated_if_else, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_err_ok, clippy::option_map_or_none, clippy::partialeq_to_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::to_string_trait_impl, clippy::toplevel_ref_arg, clippy::trim_split_whitespace, clippy::unnecessary_fallible_conversions, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unnecessary_owned_empty_strings, clippy::unsafe_removed_from_name, clippy::unused_enumerate_index, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##, }, children: &[ "clippy::assertions_on_constants", @@ -14064,6 +14248,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::string_extend_chars", "clippy::tabs_in_doc_comments", "clippy::to_digit_is_some", + "clippy::to_string_trait_impl", "clippy::toplevel_ref_arg", "clippy::trim_split_whitespace", "clippy::unnecessary_fallible_conversions", @@ -14088,7 +14273,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::suspicious", - description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::crate_in_macro_def, clippy::drop_non_drop, clippy::duplicate_mod, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::ineffective_open_options, clippy::iter_out_of_bounds, clippy::join_absolute_paths, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::maybe_misused_cfg, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::multi_assignments, clippy::mut_range_bound, clippy::mutable_key_type, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::repeat_vec_with_capacity, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::test_attr_in_doctest, clippy::type_id_on_box, clippy::unconditional_recursion"##, + description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::crate_in_macro_def, clippy::drop_non_drop, clippy::duplicate_mod, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::incompatible_msrv, clippy::ineffective_open_options, clippy::iter_out_of_bounds, clippy::join_absolute_paths, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::maybe_misused_cfg, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::multi_assignments, clippy::mut_range_bound, clippy::mutable_key_type, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::repeat_vec_with_capacity, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_open_options, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::test_attr_in_doctest, clippy::type_id_on_box, clippy::unconditional_recursion, clippy::unnecessary_result_map_or_else"##, }, children: &[ "clippy::almost_complete_range", @@ -14110,6 +14295,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::forget_non_drop", "clippy::four_forward_slashes", "clippy::from_raw_with_void_ptr", + "clippy::incompatible_msrv", "clippy::ineffective_open_options", "clippy::iter_out_of_bounds", "clippy::join_absolute_paths", @@ -14139,12 +14325,14 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::suspicious_else_formatting", "clippy::suspicious_map", "clippy::suspicious_op_assign_impl", + "clippy::suspicious_open_options", "clippy::suspicious_to_owned", "clippy::suspicious_unary_op_formatting", "clippy::swap_ptr_to_ref", "clippy::test_attr_in_doctest", "clippy::type_id_on_box", "clippy::unconditional_recursion", + "clippy::unnecessary_result_map_or_else", ], }, ]; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index 9363bdfa14b2a..0b5ad7060e043 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -35,7 +35,7 @@ pub fn pick_token(mut tokens: TokenAtOffset) -> Option /// Converts the mod path struct into its ast representation. pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { - let _p = profile::span("mod_path_to_ast"); + let _p = tracing::span!(tracing::Level::INFO, "mod_path_to_ast").entered(); let mut segments = Vec::new(); let mut is_abs = false; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 089bd44c2a5da..cb3f01f345825 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -208,7 +208,8 @@ impl ImportAssets { prefer_no_std: bool, prefer_prelude: bool, ) -> impl Iterator { - let _p = profile::span("import_assets::search_for_imports"); + let _p = + tracing::span!(tracing::Level::INFO, "import_assets::search_for_imports").entered(); self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude) } @@ -219,7 +220,8 @@ impl ImportAssets { prefer_no_std: bool, prefer_prelude: bool, ) -> impl Iterator { - let _p = profile::span("import_assets::search_for_relative_paths"); + let _p = tracing::span!(tracing::Level::INFO, "import_assets::search_for_relative_paths") + .entered(); self.search_for(sema, None, prefer_no_std, prefer_prelude) } @@ -260,7 +262,7 @@ impl ImportAssets { prefer_no_std: bool, prefer_prelude: bool, ) -> impl Iterator { - let _p = profile::span("import_assets::search_for"); + let _p = tracing::span!(tracing::Level::INFO, "import_assets::search_for").entered(); let scope = match sema.scope(&self.candidate_node) { Some(it) => it, @@ -305,7 +307,7 @@ impl ImportAssets { } fn scope_definitions(&self, sema: &Semantics<'_, RootDatabase>) -> FxHashSet { - let _p = profile::span("import_assets::scope_definitions"); + let _p = tracing::span!(tracing::Level::INFO, "import_assets::scope_definitions").entered(); let mut scope_definitions = FxHashSet::default(); if let Some(scope) = sema.scope(&self.candidate_node) { scope.process_all_names(&mut |_, scope_def| { @@ -323,7 +325,8 @@ fn path_applicable_imports( mod_path: impl Fn(ItemInNs) -> Option + Copy, scope_filter: impl Fn(ItemInNs) -> bool + Copy, ) -> FxHashSet { - let _p = profile::span("import_assets::path_applicable_imports"); + let _p = + tracing::span!(tracing::Level::INFO, "import_assets::path_applicable_imports").entered(); match &path_candidate.qualifier { None => { @@ -370,7 +373,7 @@ fn import_for_item( original_item: ItemInNs, scope_filter: impl Fn(ItemInNs) -> bool, ) -> Option { - let _p = profile::span("import_assets::import_for_item"); + let _p = tracing::span!(tracing::Level::INFO, "import_assets::import_for_item").entered(); let [first_segment, ..] = unresolved_qualifier else { return None }; let item_as_assoc = item_as_assoc(db, original_item); @@ -504,7 +507,8 @@ fn trait_applicable_items( mod_path: impl Fn(ItemInNs) -> Option, scope_filter: impl Fn(hir::Trait) -> bool, ) -> FxHashSet { - let _p = profile::span("import_assets::trait_applicable_items"); + let _p = + tracing::span!(tracing::Level::INFO, "import_assets::trait_applicable_items").entered(); let db = sema.db; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs index 09b4a1c1baac0..f29f91eea84f5 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs @@ -17,6 +17,7 @@ use syntax::{ use crate::{ imports::merge_imports::{ common_prefix, eq_attrs, eq_visibility, try_merge_imports, use_tree_cmp, MergeBehavior, + NormalizationStyle, }, RootDatabase, }; @@ -40,6 +41,15 @@ pub enum ImportGranularity { One, } +impl From for NormalizationStyle { + fn from(granularity: ImportGranularity) -> Self { + match granularity { + ImportGranularity::One => NormalizationStyle::One, + _ => NormalizationStyle::Default, + } + } +} + #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct InsertUseConfig { pub granularity: ImportGranularity, @@ -184,7 +194,7 @@ fn insert_use_with_alias_option( cfg: &InsertUseConfig, alias: Option, ) { - let _p = profile::span("insert_use"); + let _p = tracing::span!(tracing::Level::INFO, "insert_use").entered(); let mut mb = match cfg.granularity { ImportGranularity::Crate => Some(MergeBehavior::Crate), ImportGranularity::Module => Some(MergeBehavior::Module), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs index 2ed6069887138..6b0fecae26758 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs @@ -635,7 +635,7 @@ use std::io;", check_one( "std::io", r"use {std::fmt::{Result, Display}};", - r"use {std::{fmt::{Result, Display}, io}};", + r"use {std::{fmt::{Display, Result}, io}};", ); } @@ -650,12 +650,12 @@ fn merge_groups_full() { check_crate( "std::io", r"use std::fmt::{Result, Display};", - r"use std::{fmt::{Result, Display}, io};", + r"use std::{fmt::{Display, Result}, io};", ); check_one( "std::io", r"use {std::fmt::{Result, Display}};", - r"use {std::{fmt::{Result, Display}, io}};", + r"use {std::{fmt::{Display, Result}, io}};", ); } @@ -749,12 +749,12 @@ fn merge_groups_full_nested_deep() { check_crate( "std::foo::bar::quux::Baz", r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};", - r"use std::foo::bar::{Qux, quux::{Baz, Fez, Fizz}};", + r"use std::foo::bar::{quux::{Baz, Fez, Fizz}, Qux};", ); check_one( "std::foo::bar::quux::Baz", r"use {std::foo::bar::{Qux, quux::{Fez, Fizz}}};", - r"use {std::foo::bar::{Qux, quux::{Baz, Fez, Fizz}}};", + r"use {std::foo::bar::{quux::{Baz, Fez, Fizz}, Qux}};", ); } @@ -763,7 +763,7 @@ fn merge_groups_full_nested_long() { check_crate( "std::foo::bar::Baz", r"use std::{foo::bar::Qux};", - r"use std::{foo::bar::{Baz, Qux}};", + r"use std::foo::bar::{Baz, Qux};", ); } @@ -772,12 +772,12 @@ fn merge_groups_last_nested_long() { check_crate( "std::foo::bar::Baz", r"use std::{foo::bar::Qux};", - r"use std::{foo::bar::{Baz, Qux}};", + r"use std::foo::bar::{Baz, Qux};", ); check_one( "std::foo::bar::Baz", r"use {std::{foo::bar::Qux}};", - r"use {std::{foo::bar::{Baz, Qux}}};", + r"use {std::foo::bar::{Baz, Qux}};", ); } @@ -898,7 +898,7 @@ fn merge_glob() { r" use syntax::{SyntaxKind::*};", r" -use syntax::{SyntaxKind::{self, *}};", +use syntax::SyntaxKind::{self, *};", ) } @@ -907,7 +907,7 @@ fn merge_glob_nested() { check_crate( "foo::bar::quux::Fez", r"use foo::bar::{Baz, quux::*};", - r"use foo::bar::{Baz, quux::{Fez, *}};", + r"use foo::bar::{quux::{Fez, *}, Baz};", ) } @@ -1211,7 +1211,7 @@ fn insert_with_renamed_import_complex_use() { use self::foo::{self, Foo as _, Bar}; "#, r#" -use self::foo::{self, Foo, Bar}; +use self::foo::{self, Bar, Foo}; "#, &InsertUseConfig { granularity: ImportGranularity::Crate, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs index 7ec38c317df30..b153aafa0e179 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs @@ -1,15 +1,17 @@ //! Handle syntactic aspects of merging UseTrees. use std::cmp::Ordering; -use std::iter::empty; use itertools::{EitherOrBoth, Itertools}; use parser::T; use stdx::is_upper_snake_case; use syntax::{ algo, - ast::{self, make, AstNode, HasAttrs, HasName, HasVisibility, PathSegmentKind}, + ast::{ + self, edit_in_place::Removable, make, AstNode, HasAttrs, HasName, HasVisibility, + PathSegmentKind, + }, ted::{self, Position}, - Direction, + Direction, SyntaxElement, }; use crate::syntax_helpers::node_ext::vis_eq; @@ -58,6 +60,10 @@ pub fn try_merge_imports( let lhs_tree = lhs.use_tree()?; let rhs_tree = rhs.use_tree()?; try_merge_trees_mut(&lhs_tree, &rhs_tree, merge_behavior)?; + + // Ignore `None` result because normalization should not affect the merge result. + try_normalize_use_tree_mut(&lhs_tree, merge_behavior.into()); + Some(lhs) } @@ -71,6 +77,10 @@ pub fn try_merge_trees( let lhs = lhs.clone_subtree().clone_for_update(); let rhs = rhs.clone_subtree().clone_for_update(); try_merge_trees_mut(&lhs, &rhs, merge)?; + + // Ignore `None` result because normalization should not affect the merge result. + try_normalize_use_tree_mut(&lhs, merge.into()); + Some(lhs) } @@ -173,61 +183,301 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) } Err(insert_idx) => { use_trees.insert(insert_idx, rhs_t.clone()); - match lhs.use_tree_list() { - // Creates a new use tree list with the item. - None => lhs.get_or_create_use_tree_list().add_use_tree(rhs_t), - // Recreates the use tree list with sorted items (see `use_tree_cmp` doc). - Some(use_tree_list) => { - if use_tree_list.l_curly_token().is_none() { - ted::insert_raw( - Position::first_child_of(use_tree_list.syntax()), - make::token(T!['{']), - ); - } - if use_tree_list.r_curly_token().is_none() { - ted::insert_raw( - Position::last_child_of(use_tree_list.syntax()), - make::token(T!['}']), - ); - } + // We simply add the use tree to the end of tree list. Ordering of use trees + // and imports is done by the `try_normalize_*` functions. The sorted `use_trees` + // vec is only used for binary search. + lhs.get_or_create_use_tree_list().add_use_tree(rhs_t); + } + } + } + Some(()) +} - let mut elements = Vec::new(); - for (idx, tree) in use_trees.iter().enumerate() { - if idx > 0 { - elements.push(make::token(T![,]).into()); - elements.push(make::tokens::single_space().into()); - } - elements.push(tree.syntax().clone().into()); - } +/// Style to follow when normalizing a use tree. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum NormalizationStyle { + /// Merges all descendant use tree lists with only one child use tree into their parent use tree. + /// + /// Examples: + /// - `foo::{bar::{Qux}}` -> `foo::bar::Qux` + /// - `foo::{bar::{self}}` -> `foo::bar` + /// - `{foo::bar}` -> `foo::bar` + Default, + /// Same as default but wraps the root use tree in a use tree list. + /// + /// Examples: + /// - `foo::{bar::{Qux}}` -> `{foo::bar::Qux}` + /// - `foo::{bar::{self}}` -> `{foo::bar}` + /// - `{foo::bar}` -> `{foo::bar}` + One, +} + +impl From for NormalizationStyle { + fn from(mb: MergeBehavior) -> Self { + match mb { + MergeBehavior::One => NormalizationStyle::One, + _ => NormalizationStyle::Default, + } + } +} + +/// Normalizes a use item by: +/// - Ordering all use trees +/// - Merging use trees with common prefixes +/// - Removing redundant braces based on the specified normalization style +/// (see [`NormalizationStyle`] doc) +/// +/// Examples: +/// +/// Using the "Default" normalization style +/// +/// - `foo::{bar::Qux, bar::{self}}` -> `foo::bar::{self, Qux}` +/// - `foo::bar::{self}` -> `foo::bar` +/// - `{foo::bar}` -> `foo::bar` +/// +/// Using the "One" normalization style +/// +/// - `foo::{bar::Qux, bar::{self}}` -> `{foo::bar::{self, Qux}}` +/// - `foo::bar::{self}` -> `{foo::bar}` +/// - `foo::bar` -> `{foo::bar}` +pub fn try_normalize_import(use_item: &ast::Use, style: NormalizationStyle) -> Option { + let use_item = use_item.clone_subtree().clone_for_update(); + try_normalize_use_tree_mut(&use_item.use_tree()?, style)?; + Some(use_item) +} + +/// Normalizes a use tree (see [`try_normalize_import`] doc). +pub fn try_normalize_use_tree( + use_tree: &ast::UseTree, + style: NormalizationStyle, +) -> Option { + let use_tree = use_tree.clone_subtree().clone_for_update(); + try_normalize_use_tree_mut(&use_tree, style)?; + Some(use_tree) +} + +pub fn try_normalize_use_tree_mut( + use_tree: &ast::UseTree, + style: NormalizationStyle, +) -> Option<()> { + if style == NormalizationStyle::One { + let mut modified = false; + modified |= use_tree.wrap_in_tree_list().is_some(); + modified |= recursive_normalize(use_tree, style).is_some(); + if !modified { + // Either the use tree was already normalized or its semantically empty. + return None; + } + } else { + recursive_normalize(use_tree, NormalizationStyle::Default)?; + } + Some(()) +} - let start = use_tree_list - .l_curly_token() - .and_then(|l_curly| { - algo::non_trivia_sibling(l_curly.into(), Direction::Next) - }) - .filter(|it| it.kind() != T!['}']); - let end = use_tree_list - .r_curly_token() - .and_then(|r_curly| { - algo::non_trivia_sibling(r_curly.into(), Direction::Prev) - }) - .filter(|it| it.kind() != T!['{']); - if let Some((start, end)) = start.zip(end) { - // Attempt to insert elements while preserving preceding and trailing trivia. - ted::replace_all(start..=end, elements); +/// Recursively normalizes a use tree and its subtrees (if any). +fn recursive_normalize(use_tree: &ast::UseTree, style: NormalizationStyle) -> Option<()> { + let use_tree_list = use_tree.use_tree_list()?; + let merge_subtree_into_parent_tree = |single_subtree: &ast::UseTree| { + let merged_path = match (use_tree.path(), single_subtree.path()) { + (None, None) => None, + (Some(outer), None) => Some(outer), + (None, Some(inner)) if path_is_self(&inner) => None, + (None, Some(inner)) => Some(inner), + (Some(outer), Some(inner)) if path_is_self(&inner) => Some(outer), + (Some(outer), Some(inner)) => Some(make::path_concat(outer, inner).clone_for_update()), + }; + if merged_path.is_some() + || single_subtree.use_tree_list().is_some() + || single_subtree.star_token().is_some() + { + ted::remove_all_iter(use_tree.syntax().children_with_tokens()); + if let Some(path) = merged_path { + ted::insert_raw(Position::first_child_of(use_tree.syntax()), path.syntax()); + if single_subtree.use_tree_list().is_some() || single_subtree.star_token().is_some() + { + ted::insert_raw( + Position::last_child_of(use_tree.syntax()), + make::token(T![::]), + ); + } + } + if let Some(inner_use_tree_list) = single_subtree.use_tree_list() { + ted::insert_raw( + Position::last_child_of(use_tree.syntax()), + inner_use_tree_list.syntax(), + ); + } else if single_subtree.star_token().is_some() { + ted::insert_raw(Position::last_child_of(use_tree.syntax()), make::token(T![*])); + } else if let Some(rename) = single_subtree.rename() { + ted::insert_raw( + Position::last_child_of(use_tree.syntax()), + make::tokens::single_space(), + ); + ted::insert_raw(Position::last_child_of(use_tree.syntax()), rename.syntax()); + } + Some(()) + } else { + // Bail on semantically empty use trees. + None + } + }; + let one_style_tree_list = |subtree: &ast::UseTree| match ( + subtree.path().is_none() && subtree.star_token().is_none() && subtree.rename().is_none(), + subtree.use_tree_list(), + ) { + (true, tree_list) => tree_list, + _ => None, + }; + let add_element_to_list = |elem: SyntaxElement, elements: &mut Vec| { + if !elements.is_empty() { + elements.push(make::token(T![,]).into()); + elements.push(make::tokens::single_space().into()); + } + elements.push(elem); + }; + if let Some((single_subtree,)) = use_tree_list.use_trees().collect_tuple() { + if style == NormalizationStyle::One { + // Only normalize descendant subtrees if the normalization style is "one". + recursive_normalize(&single_subtree, NormalizationStyle::Default)?; + } else { + // Otherwise, merge the single subtree into it's parent (if possible) + // and then normalize the result. + merge_subtree_into_parent_tree(&single_subtree)?; + recursive_normalize(use_tree, style); + } + } else { + // Tracks whether any changes have been made to the use tree. + let mut modified = false; + + // Recursively un-nests (if necessary) and then normalizes each subtree in the tree list. + for subtree in use_tree_list.use_trees() { + if let Some(one_tree_list) = one_style_tree_list(&subtree) { + let mut elements = Vec::new(); + let mut one_tree_list_iter = one_tree_list.use_trees(); + let mut prev_skipped = Vec::new(); + loop { + let mut prev_skipped_iter = prev_skipped.into_iter(); + let mut curr_skipped = Vec::new(); + + while let Some(sub_sub_tree) = + one_tree_list_iter.next().or(prev_skipped_iter.next()) + { + if let Some(sub_one_tree_list) = one_style_tree_list(&sub_sub_tree) { + curr_skipped.extend(sub_one_tree_list.use_trees()); } else { - let new_use_tree_list = make::use_tree_list(empty()).clone_for_update(); - let trees_pos = match new_use_tree_list.l_curly_token() { - Some(l_curly) => Position::after(l_curly), - None => Position::last_child_of(new_use_tree_list.syntax()), - }; - ted::insert_all_raw(trees_pos, elements); - ted::replace(use_tree_list.syntax(), new_use_tree_list.syntax()); + modified |= + recursive_normalize(&sub_sub_tree, NormalizationStyle::Default) + .is_some(); + add_element_to_list( + sub_sub_tree.syntax().clone().into(), + &mut elements, + ); } } + + if curr_skipped.is_empty() { + // Un-nesting is complete. + break; + } + prev_skipped = curr_skipped; + } + + // Either removes the subtree (if its semantically empty) or replaces it with + // the un-nested elements. + if elements.is_empty() { + subtree.remove(); + } else { + ted::replace_with_many(subtree.syntax(), elements); } + modified = true; + } else { + modified |= recursive_normalize(&subtree, NormalizationStyle::Default).is_some(); } } + + // Merge all merge-able subtrees. + let mut tree_list_iter = use_tree_list.use_trees(); + let mut anchor = tree_list_iter.next()?; + let mut prev_skipped = Vec::new(); + loop { + let mut has_merged = false; + let mut prev_skipped_iter = prev_skipped.into_iter(); + let mut next_anchor = None; + let mut curr_skipped = Vec::new(); + + while let Some(candidate) = tree_list_iter.next().or(prev_skipped_iter.next()) { + let result = try_merge_trees_mut(&anchor, &candidate, MergeBehavior::Crate); + if result.is_some() { + // Remove merged subtree. + candidate.remove(); + has_merged = true; + } else if next_anchor.is_none() { + next_anchor = Some(candidate); + } else { + curr_skipped.push(candidate); + } + } + + if has_merged { + // Normalize the merge result. + recursive_normalize(&anchor, NormalizationStyle::Default); + modified = true; + } + + let (Some(next_anchor), true) = (next_anchor, !curr_skipped.is_empty()) else { + // Merging is complete. + break; + }; + + // Try to merge the remaining subtrees in the next iteration. + anchor = next_anchor; + prev_skipped = curr_skipped; + } + + let mut subtrees: Vec<_> = use_tree_list.use_trees().collect(); + // Merge the remaining subtree into its parent, if its only one and + // the normalization style is not "one". + if subtrees.len() == 1 && style != NormalizationStyle::One { + modified |= merge_subtree_into_parent_tree(&subtrees[0]).is_some(); + } + // Order the remaining subtrees (if necessary). + if subtrees.len() > 1 { + let mut did_sort = false; + subtrees.sort_unstable_by(|a, b| { + let order = use_tree_cmp_bin_search(a, b); + if !did_sort && order == Ordering::Less { + did_sort = true; + } + order + }); + if did_sort { + let start = use_tree_list + .l_curly_token() + .and_then(|l_curly| algo::non_trivia_sibling(l_curly.into(), Direction::Next)) + .filter(|it| it.kind() != T!['}']); + let end = use_tree_list + .r_curly_token() + .and_then(|r_curly| algo::non_trivia_sibling(r_curly.into(), Direction::Prev)) + .filter(|it| it.kind() != T!['{']); + if let Some((start, end)) = start.zip(end) { + // Attempt to insert elements while preserving preceding and trailing trivia. + let mut elements = Vec::new(); + for subtree in subtrees { + add_element_to_list(subtree.syntax().clone().into(), &mut elements); + } + ted::replace_all(start..=end, elements); + } else { + let new_use_tree_list = make::use_tree_list(subtrees).clone_for_update(); + ted::replace(use_tree_list.syntax(), new_use_tree_list.syntax()); + } + modified = true; + } + } + + if !modified { + // Either the use tree was already normalized or its semantically empty. + return None; + } } Some(()) } @@ -280,7 +530,7 @@ fn use_tree_cmp_bin_search(lhs: &ast::UseTree, rhs: &ast::UseTree) -> Ordering { /// and `crate` first, then identifier imports with lowercase ones first and upper snake case /// (e.g. UPPER_SNAKE_CASE) ones last, then glob imports, and at last list imports. /// -/// Example foo::{self, foo, baz, Baz, Qux, FOO_BAZ, *, {Bar}} +/// Example: `foo::{self, baz, foo, Baz, Qux, FOO_BAZ, *, {Bar}}` /// Ref: . pub(super) fn use_tree_cmp(a: &ast::UseTree, b: &ast::UseTree) -> Ordering { let a_is_simple_path = a.is_simple_path() && a.rename().is_none(); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs index 432f1d745d205..1b6f650768b9e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs @@ -20,14 +20,9 @@ pub fn items_with_name<'a>( name: NameToImport, assoc_item_search: AssocSearchMode, ) -> impl Iterator + 'a { - let _p = profile::span("items_with_name").detail(|| { - format!( - "Name: {}, crate: {:?}, assoc items: {:?}", - name.text(), - assoc_item_search, - krate.display_name(sema.db).map(|name| name.to_string()), - ) - }); + let krate_name = krate.display_name(sema.db).map(|name| name.to_string()); + let _p = tracing::span!(tracing::Level::INFO, "items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate_name) + .entered(); let prefix = matches!(name, NameToImport::Prefix(..)); let (local_query, external_query) = match name { @@ -77,7 +72,7 @@ fn find_items<'a>( local_query: symbol_index::Query, external_query: import_map::Query, ) -> impl Iterator + 'a { - let _p = profile::span("find_items"); + let _p = tracing::span!(tracing::Level::INFO, "find_items").entered(); let db = sema.db; // NOTE: `external_query` includes `assoc_item_search`, so we don't need to diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 7769d8fba100c..006d8882c11e0 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -134,6 +134,7 @@ pub enum ReferenceCategory { // FIXME: Some day should be able to search in doc comments. Would probably // need to switch from enum to bitflags then? // DocComment + Test, } /// Generally, `search_scope` returns files that might contain references for the element. @@ -273,7 +274,7 @@ impl IntoIterator for SearchScope { impl Definition { fn search_scope(&self, db: &RootDatabase) -> SearchScope { - let _p = profile::span("search_scope"); + let _p = tracing::span!(tracing::Level::INFO, "search_scope").entered(); if let Definition::BuiltinType(_) = self { return SearchScope::crate_graph(db); @@ -303,14 +304,18 @@ impl Definition { DefWithBody::InTypeConst(_) => return SearchScope::empty(), }; return match def { - Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)), + Some(def) => SearchScope::file_range( + def.as_ref().original_file_range_with_macro_call_body(db), + ), None => SearchScope::single_file(file_id), }; } if let Definition::SelfType(impl_) = self { return match impl_.source(db).map(|src| src.syntax().cloned()) { - Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)), + Some(def) => SearchScope::file_range( + def.as_ref().original_file_range_with_macro_call_body(db), + ), None => SearchScope::single_file(file_id), }; } @@ -327,7 +332,9 @@ impl Definition { hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()), }; return match def { - Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)), + Some(def) => SearchScope::file_range( + def.as_ref().original_file_range_with_macro_call_body(db), + ), None => SearchScope::single_file(file_id), }; } @@ -435,7 +442,7 @@ impl<'a> FindUsages<'a> { } pub fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) { - let _p = profile::span("FindUsages:search"); + let _p = tracing::span!(tracing::Level::INFO, "FindUsages:search").entered(); let sema = self.sema; let search_scope = { @@ -743,7 +750,7 @@ impl<'a> FindUsages<'a> { let reference = FileReference { range, name: FileReferenceNode::NameRef(name_ref.clone()), - category: ReferenceCategory::new(&def, name_ref), + category: ReferenceCategory::new(self.sema, &def, name_ref), }; sink(file_id, reference) } @@ -759,7 +766,7 @@ impl<'a> FindUsages<'a> { let reference = FileReference { range, name: FileReferenceNode::NameRef(name_ref.clone()), - category: ReferenceCategory::new(&def, name_ref), + category: ReferenceCategory::new(self.sema, &def, name_ref), }; sink(file_id, reference) } @@ -769,7 +776,7 @@ impl<'a> FindUsages<'a> { let reference = FileReference { range, name: FileReferenceNode::NameRef(name_ref.clone()), - category: ReferenceCategory::new(&def, name_ref), + category: ReferenceCategory::new(self.sema, &def, name_ref), }; sink(file_id, reference) } else { @@ -783,10 +790,10 @@ impl<'a> FindUsages<'a> { let local = Definition::Local(local); let access = match self.def { Definition::Field(_) if field == self.def => { - ReferenceCategory::new(&field, name_ref) + ReferenceCategory::new(self.sema, &field, name_ref) } Definition::Local(_) if local == self.def => { - ReferenceCategory::new(&local, name_ref) + ReferenceCategory::new(self.sema, &local, name_ref) } _ => return false, }; @@ -871,7 +878,15 @@ fn def_to_ty(sema: &Semantics<'_, RootDatabase>, def: &Definition) -> Option Option { + fn new( + sema: &Semantics<'_, RootDatabase>, + def: &Definition, + r: &ast::NameRef, + ) -> Option { + if is_name_ref_in_test(sema, r) { + return Some(ReferenceCategory::Test); + } + // Only Locals and Fields have accesses for now. if !matches!(def, Definition::Local(_) | Definition::Field(_)) { return is_name_ref_in_import(r).then_some(ReferenceCategory::Import); @@ -910,3 +925,10 @@ fn is_name_ref_in_import(name_ref: &ast::NameRef) -> bool { .and_then(|it| it.parent_path().top_path().syntax().parent()) .map_or(false, |it| it.kind() == SyntaxKind::USE_TREE) } + +fn is_name_ref_in_test(sema: &Semantics<'_, RootDatabase>, name_ref: &ast::NameRef) -> bool { + name_ref.syntax().ancestors().any(|node| match ast::Fn::cast(node) { + Some(it) => sema.to_def(&it).map_or(false, |func| func.is_test(sema.db)), + None => false, + }) +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index 7774b0834dca5..92c09089e1f13 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -124,7 +124,7 @@ pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast Arc { - let _p = profile::span("library_symbols"); + let _p = tracing::span!(tracing::Level::INFO, "library_symbols").entered(); let mut symbol_collector = SymbolCollector::new(db.upcast()); @@ -142,14 +142,14 @@ fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Ar } fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc { - let _p = profile::span("module_symbols"); + let _p = tracing::span!(tracing::Level::INFO, "module_symbols").entered(); let symbols = SymbolCollector::collect_module(db.upcast(), module); Arc::new(SymbolIndex::new(symbols)) } pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc]> { - let _p = profile::span("crate_symbols"); + let _p = tracing::span!(tracing::Level::INFO, "crate_symbols").entered(); krate.modules(db.upcast()).into_iter().map(|module| db.module_symbols(module)).collect() } @@ -200,7 +200,7 @@ impl std::ops::Deref for Snap { // | VS Code | kbd:[Ctrl+T] // |=== pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { - let _p = profile::span("world_symbols").detail(|| query.query.clone()); + let _p = tracing::span!(tracing::Level::INFO, "world_symbols", query = ?query.query).entered(); let indices: Vec<_> = if query.libs { db.library_roots() @@ -320,7 +320,7 @@ impl Query { indices: &'sym [Arc], cb: impl FnMut(&'sym FileSymbol), ) { - let _p = profile::span("symbol_index::Query::search"); + let _p = tracing::span!(tracing::Level::INFO, "symbol_index::Query::search").entered(); let mut op = fst::map::OpBuilder::new(); match self.mode { SearchMode::Exact => { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml index 3ed48457a2842..69768041389ac 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml @@ -16,6 +16,7 @@ cov-mark = "2.0.0-pre.1" either.workspace = true itertools.workspace = true serde_json.workspace = true +tracing.workspace = true once_cell = "1.17.0" # local deps @@ -39,4 +40,4 @@ sourcegen.workspace = true in-rust-tree = [] [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index e75d89737281c..66ebf593505d2 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -6,7 +6,7 @@ use syntax::{ AstNode, AstPtr, }; -use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: mismatched-tuple-struct-pat-arg-count // @@ -50,7 +50,7 @@ fn invalid_args_range( expected: usize, found: usize, ) -> FileRange { - adjusted_display_range_new(ctx, source, &|expr| { + adjusted_display_range(ctx, source, &|expr| { let (text_range, r_paren_token, expected_arg) = match expr { Either::Left(ast::Expr::CallExpr(call)) => { let arg_list = call.arg_list()?; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs index 251a645292ed5..6be2c54e6030e 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs @@ -19,7 +19,7 @@ pub(crate) fn trait_impl_incorrect_safety( }, adjusted_display_range::( ctx, - InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() }, + InFile { file_id: d.file_id, value: d.impl_ }, &|impl_| { if d.should_be_safe { Some(match (impl_.unsafe_token(), impl_.impl_token()) { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs index 56188cddf0b2e..58d1b7f31d2fe 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs @@ -25,7 +25,7 @@ pub(crate) fn trait_impl_missing_assoc_item( format!("not all trait items implemented, missing: {missing}"), adjusted_display_range::( ctx, - InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() }, + InFile { file_id: d.file_id, value: d.impl_ }, &|impl_| impl_.trait_().map(|t| t.syntax().text_range()), ), ) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 23042e222b584..750189beecb1d 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -1,3 +1,4 @@ +use either::Either; use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type}; use ide_db::{famous_defs::FamousDefs, source_change::SourceChange}; use syntax::{ @@ -13,33 +14,24 @@ use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, Dia // This diagnostic is triggered when the type of an expression or pattern does not match // the expected type. pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic { - let display_range = match &d.expr_or_pat.value { - expr if ast::Expr::can_cast(expr.kind()) => adjusted_display_range::( - ctx, - InFile { file_id: d.expr_or_pat.file_id, value: expr.syntax_node_ptr() }, - &|expr| { - let salient_token_range = match expr { - ast::Expr::IfExpr(it) => it.if_token()?.text_range(), - ast::Expr::LoopExpr(it) => it.loop_token()?.text_range(), - ast::Expr::ForExpr(it) => it.for_token()?.text_range(), - ast::Expr::WhileExpr(it) => it.while_token()?.text_range(), - ast::Expr::BlockExpr(it) => it.stmt_list()?.r_curly_token()?.text_range(), - ast::Expr::MatchExpr(it) => it.match_token()?.text_range(), - ast::Expr::MethodCallExpr(it) => it.name_ref()?.ident_token()?.text_range(), - ast::Expr::FieldExpr(it) => it.name_ref()?.ident_token()?.text_range(), - ast::Expr::AwaitExpr(it) => it.await_token()?.text_range(), - _ => return None, - }; - - cov_mark::hit!(type_mismatch_range_adjustment); - Some(salient_token_range) - }, - ), - pat => ctx.sema.diagnostics_display_range(InFile { - file_id: d.expr_or_pat.file_id, - value: pat.syntax_node_ptr(), - }), - }; + let display_range = adjusted_display_range(ctx, d.expr_or_pat, &|node| { + let Either::Left(expr) = node else { return None }; + let salient_token_range = match expr { + ast::Expr::IfExpr(it) => it.if_token()?.text_range(), + ast::Expr::LoopExpr(it) => it.loop_token()?.text_range(), + ast::Expr::ForExpr(it) => it.for_token()?.text_range(), + ast::Expr::WhileExpr(it) => it.while_token()?.text_range(), + ast::Expr::BlockExpr(it) => it.stmt_list()?.r_curly_token()?.text_range(), + ast::Expr::MatchExpr(it) => it.match_token()?.text_range(), + ast::Expr::MethodCallExpr(it) => it.name_ref()?.ident_token()?.text_range(), + ast::Expr::FieldExpr(it) => it.name_ref()?.ident_token()?.text_range(), + ast::Expr::AwaitExpr(it) => it.await_token()?.text_range(), + _ => return None, + }; + + cov_mark::hit!(type_mismatch_range_adjustment); + Some(salient_token_range) + }); let mut diag = Diagnostic::new( DiagnosticCode::RustcHardError("E0308"), format!( diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 321459412182f..0e7a5720d4d25 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -8,7 +8,7 @@ use ide_db::{ use syntax::{ast, AstNode, AstPtr}; use text_edit::TextEdit; -use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-field // @@ -29,7 +29,7 @@ pub(crate) fn unresolved_field( d.name.display(ctx.sema.db), d.receiver.display(ctx.sema.db) ), - adjusted_display_range_new(ctx, d.expr, &|expr| { + adjusted_display_range(ctx, d.expr, &|expr| { Some( match expr { ast::Expr::MethodCallExpr(it) => it.name_ref(), diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 41fb67290852c..9f8fee67f31c1 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -11,7 +11,7 @@ use syntax::{ }; use text_edit::TextEdit; -use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-method // @@ -34,7 +34,7 @@ pub(crate) fn unresolved_method( d.name.display(ctx.sema.db), d.receiver.display(ctx.sema.db) ), - adjusted_display_range_new(ctx, d.expr, &|expr| { + adjusted_display_range(ctx, d.expr, &|expr| { Some( match expr { ast::Expr::MethodCallExpr(it) => it.name_ref(), diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index f35fc5b533a66..5ad7069e317a9 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -89,7 +89,6 @@ use ide_db::{ use once_cell::sync::Lazy; use stdx::never; use syntax::{ - algo::find_node_at_range, ast::{self, AstNode}, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, }; @@ -293,7 +292,7 @@ pub fn diagnostics( resolve: &AssistResolveStrategy, file_id: FileId, ) -> Vec { - let _p = profile::span("diagnostics"); + let _p = tracing::span!(tracing::Level::INFO, "diagnostics").entered(); let sema = Semantics::new(db); let parse = db.parse(file_id); let mut res = Vec::new(); @@ -571,24 +570,6 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist { } fn adjusted_display_range( - ctx: &DiagnosticsContext<'_>, - diag_ptr: InFile, - adj: &dyn Fn(N) -> Option, -) -> FileRange { - let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr); - - let source_file = ctx.sema.db.parse(file_id); - FileRange { - file_id, - range: find_node_at_range::(&source_file.syntax_node(), range) - .filter(|it| it.syntax().text_range() == range) - .and_then(adj) - .unwrap_or(range), - } -} - -// FIXME Replace the one above with this one? -fn adjusted_display_range_new( ctx: &DiagnosticsContext<'_>, diag_ptr: InFile>, adj: &dyn Fn(N) -> Option, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index 742db32564de3..f394a491b5124 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -173,7 +173,7 @@ fn minicore_smoke_test() { fn check(minicore: MiniCore) { let source = minicore.source_code(); let mut config = DiagnosticsConfig::test_sample(); - // This should be ignored since we conditionaly remove code which creates single item use with braces + // This should be ignored since we conditionally remove code which creates single item use with braces config.disabled.insert("unused_braces".to_string()); check_diagnostics_with_config(config, &source); } diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs index b863e144f0aca..2bc0721123178 100755 --- a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs +++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs @@ -494,7 +494,7 @@ fn main() { 2, 3, ], - strustS => StructS { + structS => StructS { a: 31, }, } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index d64295bdd6907..4fed1f9158ce2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -226,6 +226,7 @@ mod tests { .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range }) .sorted_by_key(cmp) .collect::>(); + assert_eq!(expected, navs); } @@ -236,6 +237,60 @@ mod tests { assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {navs:?}") } + #[test] + fn goto_def_in_included_file() { + check( + r#" +//- minicore:include +//- /main.rs + +include!("a.rs"); + +fn main() { + foo(); +} + +//- /a.rs +fn func_in_include() { + //^^^^^^^^^^^^^^^ +} + +fn foo() { + func_in_include$0(); +} +"#, + ); + } + + #[test] + fn goto_def_in_included_file_nested() { + check( + r#" +//- minicore:include +//- /main.rs + +macro_rules! passthrough { + ($($tt:tt)*) => { $($tt)* } +} + +passthrough!(include!("a.rs")); + +fn main() { + foo(); +} + +//- /a.rs +fn func_in_include() { + //^^^^^^^^^^^^^^^ +} + +fn foo() { + func_in_include$0(); +} +"#, + ); + } + #[test] fn goto_def_if_items_same_name() { check( diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index c3a403b10704b..979ca4575d067 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -55,7 +55,7 @@ pub(crate) fn highlight_related( config: HighlightRelatedConfig, pos @ FilePosition { offset, file_id }: FilePosition, ) -> Option> { - let _p = profile::span("highlight_related"); + let _p = tracing::span!(tracing::Level::INFO, "highlight_related").entered(); let syntax = sema.parse(file_id).syntax().clone(); let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { @@ -519,6 +519,7 @@ mod tests { ReferenceCategory::Read => "read", ReferenceCategory::Write => "write", ReferenceCategory::Import => "import", + ReferenceCategory::Test => "test", } .to_string() }), diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 77a06a97e22d2..19b181ae3b61e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -120,6 +120,7 @@ pub(crate) fn hover( Some(res) } +#[allow(clippy::field_reassign_with_default)] fn hover_simple( sema: &Semantics<'_, RootDatabase>, FilePosition { file_id, offset }: FilePosition, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 348308d7100ad..9f4427090e9e8 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -7196,8 +7196,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6012..6220, - focus_range: 6077..6083, + full_range: 6156..6364, + focus_range: 6221..6227, name: "Future", kind: Trait, container_name: "future", @@ -7210,8 +7210,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6850..7316, - focus_range: 6894..6902, + full_range: 6994..7460, + focus_range: 7038..7046, name: "Iterator", kind: Trait, container_name: "iterator", diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 46e5901852ae5..8311e770b4b41 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -454,7 +454,7 @@ pub(crate) fn inlay_hints( range_limit: Option, config: &InlayHintsConfig, ) -> Vec { - let _p = profile::span("inlay_hints"); + let _p = tracing::span!(tracing::Level::INFO, "inlay_hints").entered(); let sema = Semantics::new(db); let file = sema.parse(file_id); let file = file.syntax(); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs index 5a206643acf6a..3104b85768f47 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs @@ -177,7 +177,7 @@ mod tests { #[test] fn try_operator() { - // We currently show drop inlay hint for every `?` operator that may potentialy drop something. We probably need to + // We currently show drop inlay hint for every `?` operator that may potentially drop something. We probably need to // make it configurable as it doesn't seem very useful. check_with_config( ONLY_DROP_CONFIG, diff --git a/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs index d704d12a05b23..a95d1771ce0b3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs +++ b/src/tools/rust-analyzer/crates/ide/src/prime_caches.rs @@ -33,7 +33,7 @@ pub(crate) fn parallel_prime_caches( num_worker_threads: u8, cb: &(dyn Fn(ParallelPrimeCachesProgress) + Sync), ) { - let _p = profile::span("prime_caches"); + let _p = tracing::span!(tracing::Level::INFO, "prime_caches").entered(); let graph = db.crate_graph(); let mut crates_to_prime = { diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 78fe84f70d3a6..bdda25a111f81 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -57,7 +57,7 @@ pub(crate) fn find_all_refs( position: FilePosition, search_scope: Option, ) -> Option> { - let _p = profile::span("find_all_refs"); + let _p = tracing::span!(tracing::Level::INFO, "find_all_refs").entered(); let syntax = sema.parse(position.file_id).syntax().clone(); let make_searcher = |literal_search: bool| { move |def: Definition| { @@ -307,6 +307,51 @@ mod tests { use crate::{fixture, SearchScope}; + #[test] + fn exclude_tests() { + check( + r#" +fn test_func() {} + +fn func() { + test_func$0(); +} + +#[test] +fn test() { + test_func(); +} +"#, + expect![[r#" + test_func Function FileId(0) 0..17 3..12 + + FileId(0) 35..44 + FileId(0) 75..84 Test + "#]], + ); + + check( + r#" +fn test_func() {} + +fn func() { + test_func$0(); +} + +#[::core::prelude::v1::test] +fn test() { + test_func(); +} +"#, + expect![[r#" + test_func Function FileId(0) 0..17 3..12 + + FileId(0) 35..44 + FileId(0) 96..105 Test + "#]], + ); + } + #[test] fn test_struct_literal_after_space() { check( @@ -454,6 +499,7 @@ fn main() { "#]], ); } + #[test] fn test_variant_tuple_before_paren() { check( @@ -1435,7 +1481,7 @@ fn test$0() { expect![[r#" test Function FileId(0) 0..33 11..15 - FileId(0) 24..28 + FileId(0) 24..28 Test "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 8c6f5e2e9cb50..dfcbaf54d4f92 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -186,7 +186,7 @@ pub(crate) fn highlight( file_id: FileId, range_to_highlight: Option, ) -> Vec { - let _p = profile::span("highlight"); + let _p = tracing::span!(tracing::Level::INFO, "highlight").entered(); let sema = Semantics::new(db); // Determine the root based on the given range. diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs index 5b7a25408a828..800bc994adab2 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs @@ -292,7 +292,7 @@ fn expand_subtree( Err(e) => { // XXX: It *might* make sense to emit a dummy integer value like `0` here. // That would type inference a bit more robust in cases like - // `v[${count(t)}]` where index doesn't matter, but also coult also lead to + // `v[${count(t)}]` where index doesn't matter, but also could lead to // wrong infefrence for cases like `tup.${count(t)}` where index itself // does matter. if err.is_none() { diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs index 4513732902db7..d6c3bd1892693 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs @@ -622,7 +622,7 @@ where struct Converter { current: Option, - current_leafs: Vec>, + current_leaves: Vec>, preorder: PreorderWithTokens, range: TextRange, punct_offset: Option<(SyntaxToken, TextSize)>, @@ -650,7 +650,7 @@ impl Converter { append, remove, call_site, - current_leafs: vec![], + current_leaves: vec![], }; let first = this.next_token(); this.current = first; @@ -665,7 +665,7 @@ impl Converter { self.preorder.skip_subtree(); if let Some(mut v) = self.append.remove(&n.into()) { v.reverse(); - self.current_leafs.extend(v); + self.current_leaves.extend(v); return None; } } @@ -673,7 +673,7 @@ impl Converter { WalkEvent::Leave(ele) => { if let Some(mut v) = self.append.remove(&ele) { v.reverse(); - self.current_leafs.extend(v); + self.current_leaves.extend(v); return None; } } @@ -758,8 +758,8 @@ where } } - if let Some(leaf) = self.current_leafs.pop() { - if self.current_leafs.is_empty() { + if let Some(leaf) = self.current_leaves.pop() { + if self.current_leaves.is_empty() { self.current = self.next_token(); } return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0)))); diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index 208051113a7d2..379d184dd684e 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -113,7 +113,7 @@ impl ProcMacroServer { } pub fn load_dylib(&self, dylib: MacroDylib) -> Result, ServerError> { - let _p = profile::span("ProcMacroClient::load_dylib"); + let _p = tracing::span!(tracing::Level::INFO, "ProcMacroClient::load_dylib").entered(); let macros = self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?; @@ -184,7 +184,7 @@ impl ProcMacro { .process .lock() .unwrap_or_else(|e| e.into_inner()) - .send_task(msg::Request::ExpandMacro(task))?; + .send_task(msg::Request::ExpandMacro(Box::new(task)))?; match response { msg::Response::ExpandMacro(it) => { diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs index 557ddba5c78fe..e28fe387b84bd 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs @@ -29,7 +29,7 @@ pub enum Request { /// Since [`NO_VERSION_CHECK_VERSION`] ListMacros { dylib_path: PathBuf }, /// Since [`NO_VERSION_CHECK_VERSION`] - ExpandMacro(ExpandMacro), + ExpandMacro(Box), /// Since [`VERSION_CHECK_VERSION`] ApiVersionCheck {}, /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs index 5ff1f36c545e6..5f81c0a96d967 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs @@ -106,9 +106,9 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&' /// pub fn read_version(dylib_path: &AbsPath) -> io::Result { let dylib_file = File::open(dylib_path)?; - let dylib_mmaped = unsafe { Mmap::map(&dylib_file) }?; + let dylib_mmapped = unsafe { Mmap::map(&dylib_file) }?; - let dot_rustc = read_section(&dylib_mmaped, ".rustc")?; + let dot_rustc = read_section(&dylib_mmapped, ".rustc")?; // check if magic is valid if &dot_rustc[0..4] != b"rust" { diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index af9a03826ffc0..a36200cdb4c35 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -45,9 +45,11 @@ fn run() -> io::Result<()> { msg::Response::ListMacros(srv.list_macros(&dylib_path)) } msg::Request::ExpandMacro(task) => match srv.span_mode() { - msg::SpanMode::Id => msg::Response::ExpandMacro(srv.expand(task).map(|(it, _)| it)), + msg::SpanMode::Id => { + msg::Response::ExpandMacro(srv.expand(*task).map(|(it, _)| it)) + } msg::SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended( - srv.expand(task).map(|(tree, span_data_table)| msg::ExpandMacroExtended { + srv.expand(*task).map(|(tree, span_data_table)| msg::ExpandMacroExtended { tree, span_data_table, }), diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs index c9b605a808765..ff62980e4ffe3 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs @@ -109,11 +109,11 @@ fn main() { let mut artifact_path = None; for message in Message::parse_stream(output.stdout.as_slice()) { if let Message::CompilerArtifact(artifact) = message.unwrap() { - if artifact.target.kind.contains(&"proc-macro".to_string()) { - if artifact.package_id.repr.starts_with(&repr) || artifact.package_id.repr == pkgid - { - artifact_path = Some(PathBuf::from(&artifact.filenames[0])); - } + if artifact.target.kind.contains(&"proc-macro".to_string()) + && (artifact.package_id.repr.starts_with(&repr) + || artifact.package_id.repr == pkgid) + { + artifact_path = Some(PathBuf::from(&artifact.filenames[0])); } } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index bcf3600d27366..b864a5e4fd63e 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -1,4 +1,4 @@ -//! proc-macro server backend based on rust-analyzer's internal span represention +//! proc-macro server backend based on rust-analyzer's internal span representation //! This backend is used solely by rust-analyzer as it ties into rust-analyzer internals. //! //! It is an unfortunate result of how the proc-macro API works that we need to look into the diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml index 5350023c88fac..a87b67f5c69f3 100644 --- a/src/tools/rust-analyzer/crates/profile/Cargo.toml +++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml @@ -13,6 +13,7 @@ doctest = false [dependencies] once_cell = "1.17.0" +tracing.workspace = true cfg-if = "1.0.0" la-arena.workspace = true libc.workspace = true @@ -33,4 +34,4 @@ jemalloc = ["jemalloc-ctl"] # default = [ "cpu_profiler" ] [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/profile/src/hprof.rs b/src/tools/rust-analyzer/crates/profile/src/hprof.rs deleted file mode 100644 index ea89a89c5c5ca..0000000000000 --- a/src/tools/rust-analyzer/crates/profile/src/hprof.rs +++ /dev/null @@ -1,326 +0,0 @@ -//! Simple hierarchical profiler -use std::{ - cell::RefCell, - collections::{BTreeMap, HashSet}, - env, fmt, - io::{stderr, Write}, - sync::{ - atomic::{AtomicBool, Ordering}, - RwLock, - }, - time::{Duration, Instant}, -}; - -use once_cell::sync::Lazy; - -use crate::tree::{Idx, Tree}; - -/// Filtering syntax -/// env RA_PROFILE=* // dump everything -/// env RA_PROFILE=foo|bar|baz // enabled only selected entries -/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms -pub fn init() { - countme::enable(env::var("RA_COUNT").is_ok()); - let spec = env::var("RA_PROFILE").unwrap_or_default(); - init_from(&spec); -} - -pub fn init_from(spec: &str) { - let filter = if spec.is_empty() { Filter::disabled() } else { Filter::from_spec(spec) }; - filter.install(); -} - -type Label = &'static str; - -/// This function starts a profiling scope in the current execution stack with a given description. -/// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop. -/// It supports nested profiling scopes in case when this function is invoked multiple times at the execution stack. -/// In this case the profiling information will be nested at the output. -/// Profiling information is being printed in the stderr. -/// -/// # Example -/// ``` -/// profile::init_from("profile1|profile2@2"); -/// profiling_function1(); -/// -/// fn profiling_function1() { -/// let _p = profile::span("profile1"); -/// profiling_function2(); -/// } -/// -/// fn profiling_function2() { -/// let _p = profile::span("profile2"); -/// } -/// ``` -/// This will print in the stderr the following: -/// ```text -/// 0ms - profile -/// 0ms - profile2 -/// ``` -#[inline] -pub fn span(label: Label) -> ProfileSpan { - debug_assert!(!label.is_empty()); - - let enabled = PROFILING_ENABLED.load(Ordering::Relaxed); - if enabled && with_profile_stack(|stack| stack.push(label)) { - ProfileSpan(Some(ProfilerImpl { label, detail: None })) - } else { - ProfileSpan(None) - } -} - -#[inline] -pub fn heartbeat_span() -> HeartbeatSpan { - let enabled = PROFILING_ENABLED.load(Ordering::Relaxed); - HeartbeatSpan::new(enabled) -} - -#[inline] -pub fn heartbeat() { - let enabled = PROFILING_ENABLED.load(Ordering::Relaxed); - if enabled { - with_profile_stack(|it| it.heartbeat(1)); - } -} - -pub struct ProfileSpan(Option); - -struct ProfilerImpl { - label: Label, - detail: Option, -} - -impl ProfileSpan { - pub fn detail(mut self, detail: impl FnOnce() -> String) -> ProfileSpan { - if let Some(profiler) = &mut self.0 { - profiler.detail = Some(detail()); - } - self - } -} - -impl Drop for ProfilerImpl { - #[inline] - fn drop(&mut self) { - with_profile_stack(|it| it.pop(self.label, self.detail.take())); - } -} - -pub struct HeartbeatSpan { - enabled: bool, -} - -impl HeartbeatSpan { - #[inline] - pub fn new(enabled: bool) -> Self { - if enabled { - with_profile_stack(|it| it.heartbeats(true)); - } - Self { enabled } - } -} - -impl Drop for HeartbeatSpan { - fn drop(&mut self) { - if self.enabled { - with_profile_stack(|it| it.heartbeats(false)); - } - } -} - -static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false); -static FILTER: Lazy> = Lazy::new(Default::default); - -fn with_profile_stack(f: impl FnOnce(&mut ProfileStack) -> T) -> T { - thread_local!(static STACK: RefCell = RefCell::new(ProfileStack::new())); - STACK.with(|it| f(&mut it.borrow_mut())) -} - -#[derive(Default, Clone, Debug)] -struct Filter { - depth: usize, - allowed: HashSet, - longer_than: Duration, - heartbeat_longer_than: Duration, - version: usize, -} - -impl Filter { - fn disabled() -> Filter { - Filter::default() - } - - fn from_spec(mut spec: &str) -> Filter { - let longer_than = if let Some(idx) = spec.rfind('>') { - let longer_than = spec[idx + 1..].parse().expect("invalid profile longer_than"); - spec = &spec[..idx]; - Duration::from_millis(longer_than) - } else { - Duration::new(0, 0) - }; - let heartbeat_longer_than = longer_than; - - let depth = if let Some(idx) = spec.rfind('@') { - let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth"); - spec = &spec[..idx]; - depth - } else { - 999 - }; - let allowed = - if spec == "*" { HashSet::new() } else { spec.split('|').map(String::from).collect() }; - Filter { depth, allowed, longer_than, heartbeat_longer_than, version: 0 } - } - - fn install(mut self) { - PROFILING_ENABLED.store(self.depth > 0, Ordering::SeqCst); - let mut old = FILTER.write().unwrap(); - self.version = old.version + 1; - *old = self; - } -} - -struct ProfileStack { - frames: Vec, - filter: Filter, - messages: Tree, - heartbeats: bool, -} - -struct Frame { - t: Instant, - heartbeats: u32, -} - -#[derive(Default)] -struct Message { - duration: Duration, - label: Label, - detail: Option, -} - -impl ProfileStack { - fn new() -> ProfileStack { - ProfileStack { - frames: Vec::new(), - messages: Tree::default(), - filter: Default::default(), - heartbeats: false, - } - } - - fn push(&mut self, label: Label) -> bool { - if self.frames.is_empty() { - if let Ok(f) = FILTER.try_read() { - if f.version > self.filter.version { - self.filter = f.clone(); - } - }; - } - if self.frames.len() > self.filter.depth { - return false; - } - let allowed = &self.filter.allowed; - if self.frames.is_empty() && !allowed.is_empty() && !allowed.contains(label) { - return false; - } - - self.frames.push(Frame { t: Instant::now(), heartbeats: 0 }); - self.messages.start(); - true - } - - fn pop(&mut self, label: Label, detail: Option) { - let frame = self.frames.pop().unwrap(); - let duration = frame.t.elapsed(); - - if self.heartbeats { - self.heartbeat(frame.heartbeats); - let avg_span = duration / (frame.heartbeats + 1); - if avg_span > self.filter.heartbeat_longer_than { - eprintln!("Too few heartbeats {label} ({}/{duration:?})?", frame.heartbeats); - } - } - - self.messages.finish(Message { duration, label, detail }); - if self.frames.is_empty() { - let longer_than = self.filter.longer_than; - // Convert to millis for comparison to avoid problems with rounding - // (otherwise we could print `0ms` despite user's `>0` filter when - // `duration` is just a few nanos). - if duration.as_millis() > longer_than.as_millis() { - if let Some(root) = self.messages.root() { - print(&self.messages, root, 0, longer_than, &mut stderr().lock()); - } - } - self.messages.clear(); - } - } - - fn heartbeats(&mut self, yes: bool) { - self.heartbeats = yes; - } - fn heartbeat(&mut self, n: u32) { - if let Some(frame) = self.frames.last_mut() { - frame.heartbeats += n; - } - } -} - -fn print( - tree: &Tree, - curr: Idx, - level: u32, - longer_than: Duration, - out: &mut impl Write, -) { - let current_indent = " ".repeat(level as usize); - let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {it}")).unwrap_or_default(); - writeln!( - out, - "{}{} - {}{}", - current_indent, - ms(tree[curr].duration), - tree[curr].label, - detail, - ) - .expect("printing profiling info"); - - let mut accounted_for = Duration::default(); - let mut short_children = BTreeMap::new(); // Use `BTreeMap` to get deterministic output. - for child in tree.children(curr) { - accounted_for += tree[child].duration; - - if tree[child].duration.as_millis() > longer_than.as_millis() { - print(tree, child, level + 1, longer_than, out); - } else { - let (total_duration, cnt) = - short_children.entry(tree[child].label).or_insert((Duration::default(), 0)); - *total_duration += tree[child].duration; - *cnt += 1; - } - } - - for (child_msg, (duration, count)) in &short_children { - writeln!(out, " {current_indent}{} - {child_msg} ({count} calls)", ms(*duration)) - .expect("printing profiling info"); - } - - let unaccounted = tree[curr].duration - accounted_for; - if tree.children(curr).next().is_some() && unaccounted > longer_than { - writeln!(out, " {current_indent}{} - ???", ms(unaccounted)) - .expect("printing profiling info"); - } -} - -#[allow(non_camel_case_types)] -struct ms(Duration); - -impl fmt::Display for ms { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.0.as_millis() { - 0 => f.write_str(" 0 "), - n => write!(f, "{n:5}ms"), - } - } -} diff --git a/src/tools/rust-analyzer/crates/profile/src/lib.rs b/src/tools/rust-analyzer/crates/profile/src/lib.rs index d86aa0c414406..38c5b3fc9c725 100644 --- a/src/tools/rust-analyzer/crates/profile/src/lib.rs +++ b/src/tools/rust-analyzer/crates/profile/src/lib.rs @@ -4,15 +4,12 @@ #[cfg(feature = "cpu_profiler")] mod google_cpu_profiler; -mod hprof; mod memory_usage; mod stop_watch; -mod tree; use std::cell::RefCell; pub use crate::{ - hprof::{heartbeat, heartbeat_span, init, init_from, span}, memory_usage::{Bytes, MemoryUsage}, stop_watch::{StopWatch, StopWatchSpan}, }; diff --git a/src/tools/rust-analyzer/crates/profile/src/tree.rs b/src/tools/rust-analyzer/crates/profile/src/tree.rs deleted file mode 100644 index 1290fba36fab3..0000000000000 --- a/src/tools/rust-analyzer/crates/profile/src/tree.rs +++ /dev/null @@ -1,84 +0,0 @@ -//! A simple tree implementation which tries to not allocate all over the place. -use std::ops; - -use la_arena::Arena; - -#[derive(Default)] -pub(crate) struct Tree { - nodes: Arena>, - current_path: Vec<(Idx, Option>)>, -} - -pub(crate) type Idx = la_arena::Idx>; - -impl Tree { - pub(crate) fn start(&mut self) - where - T: Default, - { - let me = self.nodes.alloc(Node::new(T::default())); - if let Some((parent, last_child)) = self.current_path.last_mut() { - let slot = match *last_child { - Some(last_child) => &mut self.nodes[last_child].next_sibling, - None => &mut self.nodes[*parent].first_child, - }; - let prev = slot.replace(me); - assert!(prev.is_none()); - *last_child = Some(me); - } - - self.current_path.push((me, None)); - } - - pub(crate) fn finish(&mut self, data: T) { - let (me, _last_child) = self.current_path.pop().unwrap(); - self.nodes[me].data = data; - } - - pub(crate) fn root(&self) -> Option> { - self.nodes.iter().next().map(|(idx, _)| idx) - } - - pub(crate) fn children(&self, idx: Idx) -> impl Iterator> + '_ { - NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child } - } - pub(crate) fn clear(&mut self) { - self.nodes.clear(); - self.current_path.clear(); - } -} - -impl ops::Index> for Tree { - type Output = T; - fn index(&self, index: Idx) -> &T { - &self.nodes[index].data - } -} - -pub(crate) struct Node { - data: T, - first_child: Option>, - next_sibling: Option>, -} - -impl Node { - fn new(data: T) -> Node { - Node { data, first_child: None, next_sibling: None } - } -} - -struct NodeIter<'a, T> { - nodes: &'a Arena>, - next: Option>, -} - -impl Iterator for NodeIter<'_, T> { - type Item = Idx; - - fn next(&mut self) -> Option> { - self.next.map(|next| { - self.next = self.nodes[next].next_sibling; - next - }) - } -} diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs index c5d55f7d2171f..cf12d5b71df56 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs @@ -26,7 +26,7 @@ pub(crate) fn get( extra_env: &FxHashMap, config: RustcCfgConfig<'_>, ) -> Vec { - let _p = profile::span("rustc_cfg::get"); + let _p = tracing::span!(tracing::Level::INFO, "rustc_cfg::get").entered(); let mut res = Vec::with_capacity(6 * 2 + 1); // Some nightly-only cfgs, which are required for stdlib diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 88974e889e8bc..8c5ea0619ac7e 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -60,7 +60,7 @@ pub enum ProjectWorkspace { cargo: CargoWorkspace, build_scripts: WorkspaceBuildScripts, sysroot: Result>, - rustc: Result<(CargoWorkspace, WorkspaceBuildScripts), Option>, + rustc: Result, Option>, /// Holds cfg flags for the current target. We get those by running /// `rustc --print cfg`. /// @@ -119,7 +119,7 @@ impl fmt::Debug for ProjectWorkspace { .field("sysroot", &sysroot.is_ok()) .field( "n_rustc_compiler_crates", - &rustc.as_ref().map_or(0, |(rc, _)| rc.packages().len()), + &rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(rc, _)| rc.packages().len()), ) .field("n_rustc_cfg", &rustc_cfg.len()) .field("n_cfg_overrides", &cfg_overrides.len()) @@ -265,7 +265,7 @@ impl ProjectWorkspace { cargo_toml.parent(), &config.extra_env, ); - Ok((workspace, buildscripts)) + Ok(Box::new((workspace, buildscripts))) } Err(e) => { tracing::error!( @@ -603,7 +603,7 @@ impl ProjectWorkspace { PackageRoot { is_local, include, exclude } }) .chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root()))) - .chain(rustc.iter().flat_map(|(rustc, _)| { + .chain(rustc.iter().map(|a| a.as_ref()).flat_map(|(rustc, _)| { rustc.packages().map(move |krate| PackageRoot { is_local: false, include: vec![rustc[krate].manifest.parent().to_path_buf()], @@ -631,7 +631,8 @@ impl ProjectWorkspace { sysroot_package_len + project.n_crates() } ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => { - let rustc_package_len = rustc.as_ref().map_or(0, |(it, _)| it.packages().len()); + let rustc_package_len = + rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(it, _)| it.packages().len()); let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.num_packages()); cargo.packages().len() + sysroot_package_len + rustc_package_len } @@ -647,7 +648,7 @@ impl ProjectWorkspace { load: &mut dyn FnMut(&AbsPath) -> Option, extra_env: &FxHashMap, ) -> (CrateGraph, ProcMacroPaths) { - let _p = profile::span("ProjectWorkspace::to_crate_graph"); + let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered(); let (mut crate_graph, proc_macros) = match self { ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain } => { @@ -672,7 +673,7 @@ impl ProjectWorkspace { target_layout, } => cargo_to_crate_graph( load, - rustc.as_ref().ok(), + rustc.as_ref().map(|a| a.as_ref()).ok(), cargo, sysroot.as_ref().ok(), rustc_cfg.clone(), @@ -891,7 +892,7 @@ fn cargo_to_crate_graph( target_layout: TargetLayoutLoadResult, toolchain: Option<&Version>, ) -> (CrateGraph, ProcMacroPaths) { - let _p = profile::span("cargo_to_crate_graph"); + let _p = tracing::span!(tracing::Level::INFO, "cargo_to_crate_graph").entered(); let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let crate_graph = &mut res.0; let proc_macros = &mut res.1; @@ -1088,7 +1089,7 @@ fn detached_files_to_crate_graph( sysroot: Option<&Sysroot>, target_layout: TargetLayoutLoadResult, ) -> (CrateGraph, ProcMacroPaths) { - let _p = profile::span("detached_files_to_crate_graph"); + let _p = tracing::span!(tracing::Level::INFO, "detached_files_to_crate_graph").entered(); let mut crate_graph = CrateGraph::default(); let (public_deps, _libproc_macro) = match sysroot { Some(sysroot) => sysroot_to_crate_graph( @@ -1384,7 +1385,7 @@ fn sysroot_to_crate_graph( load: &mut dyn FnMut(&AbsPath) -> Option, toolchain: Option<&Version>, ) -> (SysrootPublicDeps, Option) { - let _p = profile::span("sysroot_to_crate_graph"); + let _p = tracing::span!(tracing::Level::INFO, "sysroot_to_crate_graph").entered(); match sysroot.mode() { SysrootMode::Workspace(cargo) => { let (mut cg, mut pm) = cargo_to_crate_graph( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml index db5cabaf76941..a212041e66b46 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml @@ -23,7 +23,7 @@ anyhow.workspace = true crossbeam-channel = "0.5.5" dissimilar.workspace = true itertools.workspace = true -scip = "0.3.1" +scip = "0.3.3" lsp-types = { version = "=0.95.0", features = ["proposed"] } parking_lot = "0.12.1" xflags = "0.3.0" @@ -37,11 +37,10 @@ mimalloc = { version = "0.1.30", default-features = false, optional = true } lsp-server.workspace = true tracing.workspace = true tracing-subscriber.workspace = true -tracing-log = "0.2.0" tracing-tree.workspace = true triomphe.workspace = true nohash-hasher.workspace = true -always-assert = "0.1.2" +always-assert = "0.2.0" walkdir = "2.3.2" cfg.workspace = true diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs deleted file mode 100644 index 1f923f6cf8dda..0000000000000 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs +++ /dev/null @@ -1,137 +0,0 @@ -//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber` -//! filter syntax and `tracing_appender` for non blocking output. - -use std::{ - fmt, - fs::File, - io::{self, Stderr}, - sync::Arc, -}; - -use anyhow::Context; -use tracing::{level_filters::LevelFilter, Event, Subscriber}; -use tracing_log::NormalizeEvent; -use tracing_subscriber::{ - filter::Targets, - fmt::{ - format::Writer, writer::BoxMakeWriter, FmtContext, FormatEvent, FormatFields, - FormattedFields, MakeWriter, - }, - layer::SubscriberExt, - registry::LookupSpan, - util::SubscriberInitExt, - Registry, -}; -use tracing_tree::HierarchicalLayer; - -pub(crate) struct LoggerConfig { - pub(crate) log_file: Option, - pub(crate) filter: String, - pub(crate) chalk_filter: Option, -} - -struct MakeWriterStderr; - -impl MakeWriter<'_> for MakeWriterStderr { - type Writer = Stderr; - - fn make_writer(&self) -> Self::Writer { - io::stderr() - } -} - -impl LoggerConfig { - pub(crate) fn init(self) -> anyhow::Result<()> { - let mut filter: Targets = self - .filter - .parse() - .with_context(|| format!("invalid log filter: `{}`", self.filter))?; - - let mut chalk_layer = None; - if let Some(chalk_filter) = self.chalk_filter { - let level: LevelFilter = - chalk_filter.parse().with_context(|| "invalid chalk log filter")?; - chalk_layer = Some( - HierarchicalLayer::default() - .with_indent_lines(true) - .with_ansi(false) - .with_indent_amount(2) - .with_writer(io::stderr), - ); - filter = filter - .with_target("chalk_solve", level) - .with_target("chalk_ir", level) - .with_target("chalk_recursive", level); - }; - - let writer = match self.log_file { - Some(file) => BoxMakeWriter::new(Arc::new(file)), - None => BoxMakeWriter::new(io::stderr), - }; - let ra_fmt_layer = - tracing_subscriber::fmt::layer().event_format(LoggerFormatter).with_writer(writer); - - let registry = Registry::default().with(filter).with(ra_fmt_layer); - match chalk_layer { - Some(chalk_layer) => registry.with(chalk_layer).init(), - None => registry.init(), - } - Ok(()) - } -} - -#[derive(Debug)] -struct LoggerFormatter; - -impl FormatEvent for LoggerFormatter -where - S: Subscriber + for<'a> LookupSpan<'a>, - N: for<'a> FormatFields<'a> + 'static, -{ - fn format_event( - &self, - ctx: &FmtContext<'_, S, N>, - mut writer: Writer<'_>, - event: &Event<'_>, - ) -> fmt::Result { - // Write level and target - let level = *event.metadata().level(); - - // If this event is issued from `log` crate, then the value of target is - // always "log". `tracing-log` has hard coded it for some reason, so we - // need to extract it using `normalized_metadata` method which is part of - // `tracing_log::NormalizeEvent`. - let target = match event.normalized_metadata() { - // This event is issued from `log` crate - Some(log) => log.target(), - None => event.metadata().target(), - }; - write!(writer, "[{level} {target}] ")?; - - // Write spans and fields of each span - ctx.visit_spans(|span| { - write!(writer, "{}", span.name())?; - - let ext = span.extensions(); - - // `FormattedFields` is a formatted representation of the span's - // fields, which is stored in its extensions by the `fmt` layer's - // `new_span` method. The fields will have been formatted - // by the same field formatter that's provided to the event - // formatter in the `FmtContext`. - let fields = &ext.get::>().expect("will never be `None`"); - - if !fields.is_empty() { - write!(writer, "{{{fields}}}")?; - } - write!(writer, ": ")?; - - Ok(()) - })?; - - // Write fields on the event - ctx.field_format().format_fields(writer.by_ref(), event)?; - - writeln!(writer) - } -} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 04387291907b7..66b680571a97d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -7,14 +7,14 @@ #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; -mod logger; mod rustc_wrapper; -use std::{env, fs, path::PathBuf, process}; +use std::{env, fs, path::PathBuf, process, sync::Arc}; use anyhow::Context; use lsp_server::Connection; use rust_analyzer::{cli::flags, config::Config, from_json}; +use tracing_subscriber::fmt::writer::BoxMakeWriter; use vfs::AbsPathBuf; #[cfg(feature = "mimalloc")] @@ -123,26 +123,21 @@ fn setup_logging(log_file_flag: Option) -> anyhow::Result<()> { None => None, }; - logger::LoggerConfig { - log_file, + let writer = match log_file { + Some(file) => BoxMakeWriter::new(Arc::new(file)), + None => BoxMakeWriter::new(std::io::stderr), + }; + + rust_analyzer::tracing::Config { + writer, // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually // useful information in there for debugging. filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_string()), - // The meaning of CHALK_DEBUG I suspected is to tell chalk crates - // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing - // logs. But now we can only have just one filter, which means we have to - // merge chalk filter to our main filter (from RA_LOG env). - // - // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`. - // As the value should only affect chalk crates, we'd better manually - // specify the target. And for simplicity, CHALK_DEBUG only accept the value - // that specify level. chalk_filter: env::var("CHALK_DEBUG").ok(), + profile_filter: env::var("RA_PROFILE").ok(), } .init()?; - profile::init(); - Ok(()) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index f42e14f2e51ea..31bdd2a0e82b1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -58,12 +58,14 @@ impl flags::AnalysisStats { Rand32::new(seed) }; - let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = match self.no_sysroot { - true => None, - false => Some(RustLibSource::Discover), + let cargo_config = CargoConfig { + sysroot: match self.no_sysroot { + true => None, + false => Some(RustLibSource::Discover), + }, + sysroot_query_metadata: self.query_sysroot_metadata, + ..Default::default() }; - cargo_config.sysroot_query_metadata = self.query_sysroot_metadata; let no_progress = &|_| (); let mut db_load_sw = self.stop_watch(); @@ -302,13 +304,13 @@ impl flags::AnalysisStats { let mut fail = 0; for &c in consts { all += 1; - let Err(e) = c.render_eval(db) else { + let Err(error) = c.render_eval(db) else { continue; }; if verbosity.is_spammy() { let full_name = full_name_of_item(db, c.module(db), c.name(db).unwrap_or(Name::missing())); - println!("Const eval for {full_name} failed due {e:?}"); + println!("Const eval for {full_name} failed due {error:?}"); } fail += 1; } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs index 0182cf5402e56..6d2e97be20e50 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs @@ -13,8 +13,8 @@ use crate::cli::flags; impl flags::Diagnostics { pub fn run(self) -> anyhow::Result<()> { - let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = Some(RustLibSource::Discover); + let cargo_config = + CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let with_proc_macro_server = if let Some(p) = &self.proc_macro_srv { let path = vfs::AbsPathBuf::assert(std::env::current_dir()?.join(p)); ProcMacroServerChoice::Explicit(path) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs index cc9e2a7ce26f8..252b1e1a48581 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs @@ -71,7 +71,7 @@ xflags::xflags! { optional --with-deps /// Don't load sysroot crates (`std`, `core` & friends). optional --no-sysroot - /// Run cargo metadata on the sysroot to analyze its third-pary dependencies. + /// Run cargo metadata on the sysroot to analyze its third-party dependencies. /// Requires --no-sysroot to not be set. optional --query-sysroot-metadata diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index 2138ecead53f4..64f965e22ac70 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -287,8 +287,8 @@ impl flags::Lsif { pub fn run(self) -> anyhow::Result<()> { eprintln!("Generating LSIF started..."); let now = Instant::now(); - let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = Some(RustLibSource::Discover); + let cargo_config = + CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let no_progress = &|_| (); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs index 5ef8cdff4cf25..757f2dd70cad9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/parse.rs @@ -5,7 +5,7 @@ use crate::cli::{flags, read_stdin}; impl flags::Parse { pub fn run(self) -> anyhow::Result<()> { - let _p = profile::span("parsing"); + let _p = tracing::span!(tracing::Level::INFO, "parsing").entered(); let text = read_stdin()?; let file = SourceFile::parse(&text).tree(); if !self.no_dump { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs index e1704199151a6..d07dcdec25105 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs @@ -13,8 +13,8 @@ use crate::cli::{flags, full_name_of_item, Result}; impl flags::RunTests { pub fn run(self) -> Result<()> { - let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = Some(RustLibSource::Discover); + let cargo_config = + CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro_server: ProcMacroServerChoice::Sysroot, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index 522eb53128fb7..be7e434acac25 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -59,8 +59,8 @@ impl Tester { path.push("ra-rustc-test.rs"); let tmp_file = AbsPathBuf::try_from(path).unwrap(); std::fs::write(&tmp_file, "")?; - let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = Some(RustLibSource::Discover); + let cargo_config = + CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let workspace = ProjectWorkspace::DetachedFiles { files: vec![tmp_file.clone()], sysroot: Ok(Sysroot::discover( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index c9cf40db3a4cb..81622a4617ab9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -143,11 +143,14 @@ impl flags::Scip { .map(|hover| hover.markup.as_str()) .filter(|it| !it.is_empty()) .map(|it| vec![it.to_owned()]); + let position_encoding = + scip_types::PositionEncoding::UTF8CodeUnitOffsetFromLineStart.into(); let signature_documentation = token.signature.clone().map(|text| scip_types::Document { relative_path: relative_path.clone(), language: "rust".to_string(), text, + position_encoding, ..Default::default() }); let symbol_info = scip_types::SymbolInformation { @@ -181,13 +184,16 @@ impl flags::Scip { continue; } + let position_encoding = + scip_types::PositionEncoding::UTF8CodeUnitOffsetFromLineStart.into(); documents.push(scip_types::Document { relative_path, language: "rust".to_string(), occurrences, symbols, - special_fields: Default::default(), text: String::new(), + position_encoding, + special_fields: Default::default(), }); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs index f87dcb889a48b..8f11d82f8fd93 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs @@ -10,8 +10,8 @@ use crate::cli::flags; impl flags::Ssr { pub fn run(self) -> anyhow::Result<()> { use ide_db::base_db::SourceDatabaseExt; - let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = Some(RustLibSource::Discover); + let cargo_config = + CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro_server: ProcMacroServerChoice::Sysroot, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 3c1b464c3c1f5..815f6ea12e86c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -32,6 +32,7 @@ use project_model::{ }; use rustc_hash::{FxHashMap, FxHashSet}; use serde::{de::DeserializeOwned, Deserialize}; +use stdx::format_to_acc; use vfs::{AbsPath, AbsPathBuf}; use crate::{ @@ -494,6 +495,9 @@ config_data! { /// Exclude imports from find-all-references. references_excludeImports: bool = "false", + /// Exclude tests from find-all-references. + references_excludeTests: bool = "false", + /// Allow renaming of items not belonging to the loaded workspaces. rename_allowExternalItems: bool = "false", @@ -1545,6 +1549,10 @@ impl Config { self.data.references_excludeImports } + pub fn find_all_refs_exclude_tests(&self) -> bool { + self.data.references_excludeTests + } + pub fn snippet_cap(&self) -> bool { self.experimental("snippetTextEdit") } @@ -1737,7 +1745,7 @@ impl Config { } pub fn main_loop_num_threads(&self) -> usize { - self.data.numThreads.unwrap_or(num_cpus::get_physical().try_into().unwrap_or(1)) + self.data.numThreads.unwrap_or(num_cpus::get_physical()) } pub fn typing_autoclose_angle(&self) -> bool { @@ -2556,14 +2564,13 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json #[cfg(test)] fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String { - fields - .iter() - .map(|(field, _ty, doc, default)| { - let name = format!("rust-analyzer.{}", field.replace('_', ".")); - let doc = doc_comment_to_string(doc); - if default.contains('\n') { - format!( - r#"[[{name}]]{name}:: + fields.iter().fold(String::new(), |mut acc, (field, _ty, doc, default)| { + let name = format!("rust-analyzer.{}", field.replace('_', ".")); + let doc = doc_comment_to_string(doc); + if default.contains('\n') { + format_to_acc!( + acc, + r#"[[{name}]]{name}:: + -- Default: @@ -2573,16 +2580,17 @@ Default: {doc} -- "# - ) - } else { - format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n") - } - }) - .collect::() + ) + } else { + format_to_acc!(acc, "[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n") + } + }) } fn doc_comment_to_string(doc: &[&str]) -> String { - doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{it}\n")).collect() + doc.iter() + .map(|it| it.strip_prefix(' ').unwrap_or(it)) + .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n")) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs index ab3881f438b2a..c91b22999dede 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs @@ -128,7 +128,7 @@ pub(crate) fn fetch_native_diagnostics( snapshot: GlobalStateSnapshot, subscriptions: Vec, ) -> Vec<(FileId, Vec)> { - let _p = profile::span("fetch_native_diagnostics"); + let _p = tracing::span!(tracing::Level::INFO, "fetch_native_diagnostics").entered(); let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned()); let convert_diagnostic = diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs index 7da4311888181..fa856a796a8ef 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs @@ -1,5 +1,8 @@ //! See [RequestDispatcher]. -use std::{fmt, panic, thread}; +use std::{ + fmt::{self, Debug}, + panic, thread, +}; use ide::Cancelled; use lsp_server::ExtractError; @@ -49,6 +52,8 @@ impl RequestDispatcher<'_> { Some(it) => it, None => return self, }; + let _guard = tracing::span!(tracing::Level::INFO, "request", method = ?req.method, "request_id" = ?req.id).entered(); + tracing::debug!(?params); let result = { let _pctx = stdx::panic_context::enter(panic_context); f(self.global_state, params) @@ -74,6 +79,8 @@ impl RequestDispatcher<'_> { Some(it) => it, None => return self, }; + let _guard = tracing::span!(tracing::Level::INFO, "request", method = ?req.method, "request_id" = ?req.id).entered(); + tracing::debug!(?params); let global_state_snapshot = self.global_state.snapshot(); let result = panic::catch_unwind(move || { @@ -192,6 +199,8 @@ impl RequestDispatcher<'_> { Some(it) => it, None => return self, }; + let _guard = tracing::span!(tracing::Level::INFO, "request", method = ?req.method, "request_id" = ?req.id).entered(); + tracing::debug!(?params); let world = self.global_state.snapshot(); if MAIN_POOL { @@ -313,12 +322,16 @@ impl NotificationDispatcher<'_> { ) -> anyhow::Result<&mut Self> where N: lsp_types::notification::Notification, - N::Params: DeserializeOwned + Send, + N::Params: DeserializeOwned + Send + Debug, { let not = match self.not.take() { Some(it) => it, None => return Ok(self), }; + + let _guard = + tracing::span!(tracing::Level::INFO, "notification", method = ?not.method).entered(); + let params = match not.extract::(N::METHOD) { Ok(it) => it, Err(ExtractError::JsonError { method, error }) => { @@ -329,6 +342,9 @@ impl NotificationDispatcher<'_> { return Ok(self); } }; + + tracing::debug!(?params); + let _pctx = stdx::panic_context::enter(format!( "\nversion: {}\nnotification: {}", version(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 232c03ae6c4fe..2f226d01155bd 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -215,7 +215,7 @@ impl GlobalState { } pub(crate) fn process_changes(&mut self) -> bool { - let _p = profile::span("GlobalState::process_changes"); + let _p = tracing::span!(tracing::Level::INFO, "GlobalState::process_changes").entered(); let mut file_changes = FxHashMap::<_, (bool, ChangedFile)>::default(); let (change, modified_rust_files, workspace_structure_change) = { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index c556fdee504b8..1f24e95010571 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -54,7 +54,7 @@ pub(crate) fn handle_did_open_text_document( state: &mut GlobalState, params: DidOpenTextDocumentParams, ) -> anyhow::Result<()> { - let _p = profile::span("handle_did_open_text_document"); + let _p = tracing::span!(tracing::Level::INFO, "handle_did_open_text_document").entered(); if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { let already_exists = state @@ -79,7 +79,7 @@ pub(crate) fn handle_did_change_text_document( state: &mut GlobalState, params: DidChangeTextDocumentParams, ) -> anyhow::Result<()> { - let _p = profile::span("handle_did_change_text_document"); + let _p = tracing::span!(tracing::Level::INFO, "handle_did_change_text_document").entered(); if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { let data = match state.mem_docs.get_mut(&path) { @@ -113,7 +113,7 @@ pub(crate) fn handle_did_close_text_document( state: &mut GlobalState, params: DidCloseTextDocumentParams, ) -> anyhow::Result<()> { - let _p = profile::span("handle_did_close_text_document"); + let _p = tracing::span!(tracing::Level::INFO, "handle_did_close_text_document").entered(); if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { if state.mem_docs.remove(&path).is_err() { @@ -247,7 +247,7 @@ pub(crate) fn handle_did_change_watched_files( } fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { - let _p = profile::span("run_flycheck"); + let _p = tracing::span!(tracing::Level::INFO, "run_flycheck").entered(); let file_id = state.vfs.read().0.file_id(&vfs_path); if let Some(file_id) = file_id { @@ -326,13 +326,13 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { } pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { - let _p = profile::span("handle_stop_flycheck"); + let _p = tracing::span!(tracing::Level::INFO, "handle_stop_flycheck").entered(); state.flycheck.iter().for_each(|flycheck| flycheck.cancel()); Ok(()) } pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { - let _p = profile::span("handle_clear_flycheck"); + let _p = tracing::span!(tracing::Level::INFO, "handle_clear_flycheck").entered(); state.diagnostics.clear_check_all(); Ok(()) } @@ -341,7 +341,7 @@ pub(crate) fn handle_run_flycheck( state: &mut GlobalState, params: RunFlycheckParams, ) -> anyhow::Result<()> { - let _p = profile::span("handle_run_flycheck"); + let _p = tracing::span!(tracing::Level::INFO, "handle_run_flycheck").entered(); if let Some(text_document) = params.text_document { if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) { if run_flycheck(state, vfs_path) { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index 1a55dcebc1350..2be2ba5c44657 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -70,7 +70,7 @@ pub(crate) fn handle_analyzer_status( snap: GlobalStateSnapshot, params: lsp_ext::AnalyzerStatusParams, ) -> anyhow::Result { - let _p = profile::span("handle_analyzer_status"); + let _p = tracing::span!(tracing::Level::INFO, "handle_analyzer_status").entered(); let mut buf = String::new(); @@ -114,7 +114,7 @@ pub(crate) fn handle_analyzer_status( } pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> anyhow::Result { - let _p = profile::span("handle_memory_usage"); + let _p = tracing::span!(tracing::Level::INFO, "handle_memory_usage").entered(); let mem = state.analysis_host.per_query_memory_usage(); let mut out = String::new(); @@ -135,7 +135,7 @@ pub(crate) fn handle_syntax_tree( snap: GlobalStateSnapshot, params: lsp_ext::SyntaxTreeParams, ) -> anyhow::Result { - let _p = profile::span("handle_syntax_tree"); + let _p = tracing::span!(tracing::Level::INFO, "handle_syntax_tree").entered(); let id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(id)?; let text_range = params.range.and_then(|r| from_proto::text_range(&line_index, r).ok()); @@ -147,7 +147,7 @@ pub(crate) fn handle_view_hir( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result { - let _p = profile::span("handle_view_hir"); + let _p = tracing::span!(tracing::Level::INFO, "handle_view_hir").entered(); let position = from_proto::file_position(&snap, params)?; let res = snap.analysis.view_hir(position)?; Ok(res) @@ -157,7 +157,7 @@ pub(crate) fn handle_view_mir( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result { - let _p = profile::span("handle_view_mir"); + let _p = tracing::span!(tracing::Level::INFO, "handle_view_mir").entered(); let position = from_proto::file_position(&snap, params)?; let res = snap.analysis.view_mir(position)?; Ok(res) @@ -167,7 +167,7 @@ pub(crate) fn handle_interpret_function( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result { - let _p = profile::span("handle_interpret_function"); + let _p = tracing::span!(tracing::Level::INFO, "handle_interpret_function").entered(); let position = from_proto::file_position(&snap, params)?; let res = snap.analysis.interpret_function(position)?; Ok(res) @@ -185,7 +185,7 @@ pub(crate) fn handle_view_item_tree( snap: GlobalStateSnapshot, params: lsp_ext::ViewItemTreeParams, ) -> anyhow::Result { - let _p = profile::span("handle_view_item_tree"); + let _p = tracing::span!(tracing::Level::INFO, "handle_view_item_tree").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let res = snap.analysis.view_item_tree(file_id)?; Ok(res) @@ -195,7 +195,7 @@ pub(crate) fn handle_view_crate_graph( snap: GlobalStateSnapshot, params: ViewCrateGraphParams, ) -> anyhow::Result { - let _p = profile::span("handle_view_crate_graph"); + let _p = tracing::span!(tracing::Level::INFO, "handle_view_crate_graph").entered(); let dot = snap.analysis.view_crate_graph(params.full)?.map_err(anyhow::Error::msg)?; Ok(dot) } @@ -204,7 +204,7 @@ pub(crate) fn handle_expand_macro( snap: GlobalStateSnapshot, params: lsp_ext::ExpandMacroParams, ) -> anyhow::Result> { - let _p = profile::span("handle_expand_macro"); + let _p = tracing::span!(tracing::Level::INFO, "handle_expand_macro").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; let offset = from_proto::offset(&line_index, params.position)?; @@ -217,7 +217,7 @@ pub(crate) fn handle_selection_range( snap: GlobalStateSnapshot, params: lsp_types::SelectionRangeParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_selection_range"); + let _p = tracing::span!(tracing::Level::INFO, "handle_selection_range").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; let res: anyhow::Result> = params @@ -260,7 +260,7 @@ pub(crate) fn handle_matching_brace( snap: GlobalStateSnapshot, params: lsp_ext::MatchingBraceParams, ) -> anyhow::Result> { - let _p = profile::span("handle_matching_brace"); + let _p = tracing::span!(tracing::Level::INFO, "handle_matching_brace").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; params @@ -283,7 +283,7 @@ pub(crate) fn handle_join_lines( snap: GlobalStateSnapshot, params: lsp_ext::JoinLinesParams, ) -> anyhow::Result> { - let _p = profile::span("handle_join_lines"); + let _p = tracing::span!(tracing::Level::INFO, "handle_join_lines").entered(); let config = snap.config.join_lines(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; @@ -308,7 +308,7 @@ pub(crate) fn handle_on_enter( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_on_enter"); + let _p = tracing::span!(tracing::Level::INFO, "handle_on_enter").entered(); let position = from_proto::file_position(&snap, params)?; let edit = match snap.analysis.on_enter(position)? { None => return Ok(None), @@ -323,7 +323,7 @@ pub(crate) fn handle_on_type_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentOnTypeFormattingParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_on_type_formatting"); + let _p = tracing::span!(tracing::Level::INFO, "handle_on_type_formatting").entered(); let mut position = from_proto::file_position(&snap, params.text_document_position)?; let line_index = snap.file_line_index(position.file_id)?; @@ -364,7 +364,7 @@ pub(crate) fn handle_document_symbol( snap: GlobalStateSnapshot, params: lsp_types::DocumentSymbolParams, ) -> anyhow::Result> { - let _p = profile::span("handle_document_symbol"); + let _p = tracing::span!(tracing::Level::INFO, "handle_document_symbol").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; @@ -453,7 +453,7 @@ pub(crate) fn handle_workspace_symbol( snap: GlobalStateSnapshot, params: WorkspaceSymbolParams, ) -> anyhow::Result> { - let _p = profile::span("handle_workspace_symbol"); + let _p = tracing::span!(tracing::Level::INFO, "handle_workspace_symbol").entered(); let config = snap.config.workspace_symbol(); let (all_symbols, libs) = decide_search_scope_and_kind(¶ms, &config); @@ -545,7 +545,7 @@ pub(crate) fn handle_will_rename_files( snap: GlobalStateSnapshot, params: lsp_types::RenameFilesParams, ) -> anyhow::Result> { - let _p = profile::span("handle_will_rename_files"); + let _p = tracing::span!(tracing::Level::INFO, "handle_will_rename_files").entered(); let source_changes: Vec = params .files @@ -607,7 +607,7 @@ pub(crate) fn handle_goto_definition( snap: GlobalStateSnapshot, params: lsp_types::GotoDefinitionParams, ) -> anyhow::Result> { - let _p = profile::span("handle_goto_definition"); + let _p = tracing::span!(tracing::Level::INFO, "handle_goto_definition").entered(); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.goto_definition(position)? { None => return Ok(None), @@ -622,7 +622,7 @@ pub(crate) fn handle_goto_declaration( snap: GlobalStateSnapshot, params: lsp_types::request::GotoDeclarationParams, ) -> anyhow::Result> { - let _p = profile::span("handle_goto_declaration"); + let _p = tracing::span!(tracing::Level::INFO, "handle_goto_declaration").entered(); let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?; let nav_info = match snap.analysis.goto_declaration(position)? { None => return handle_goto_definition(snap, params), @@ -637,7 +637,7 @@ pub(crate) fn handle_goto_implementation( snap: GlobalStateSnapshot, params: lsp_types::request::GotoImplementationParams, ) -> anyhow::Result> { - let _p = profile::span("handle_goto_implementation"); + let _p = tracing::span!(tracing::Level::INFO, "handle_goto_implementation").entered(); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.goto_implementation(position)? { None => return Ok(None), @@ -652,7 +652,7 @@ pub(crate) fn handle_goto_type_definition( snap: GlobalStateSnapshot, params: lsp_types::request::GotoTypeDefinitionParams, ) -> anyhow::Result> { - let _p = profile::span("handle_goto_type_definition"); + let _p = tracing::span!(tracing::Level::INFO, "handle_goto_type_definition").entered(); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.goto_type_definition(position)? { None => return Ok(None), @@ -667,7 +667,7 @@ pub(crate) fn handle_parent_module( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result> { - let _p = profile::span("handle_parent_module"); + let _p = tracing::span!(tracing::Level::INFO, "handle_parent_module").entered(); if let Ok(file_path) = ¶ms.text_document.uri.to_file_path() { if file_path.file_name().unwrap_or_default() == "Cargo.toml" { // search workspaces for parent packages or fallback to workspace root @@ -734,7 +734,7 @@ pub(crate) fn handle_runnables( snap: GlobalStateSnapshot, params: lsp_ext::RunnablesParams, ) -> anyhow::Result> { - let _p = profile::span("handle_runnables"); + let _p = tracing::span!(tracing::Level::INFO, "handle_runnables").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok()); @@ -829,7 +829,7 @@ pub(crate) fn handle_related_tests( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result> { - let _p = profile::span("handle_related_tests"); + let _p = tracing::span!(tracing::Level::INFO, "handle_related_tests").entered(); let position = from_proto::file_position(&snap, params)?; let tests = snap.analysis.related_tests(position, None)?; @@ -847,7 +847,7 @@ pub(crate) fn handle_completion( snap: GlobalStateSnapshot, params: lsp_types::CompletionParams, ) -> anyhow::Result> { - let _p = profile::span("handle_completion"); + let _p = tracing::span!(tracing::Level::INFO, "handle_completion").entered(); let text_document_position = params.text_document_position.clone(); let position = from_proto::file_position(&snap, params.text_document_position)?; let completion_trigger_character = @@ -875,7 +875,7 @@ pub(crate) fn handle_completion_resolve( snap: GlobalStateSnapshot, mut original_completion: CompletionItem, ) -> anyhow::Result { - let _p = profile::span("handle_completion_resolve"); + let _p = tracing::span!(tracing::Level::INFO, "handle_completion_resolve").entered(); if !all_edits_are_disjoint(&original_completion, &[]) { return Err(invalid_params_error( @@ -931,7 +931,7 @@ pub(crate) fn handle_folding_range( snap: GlobalStateSnapshot, params: FoldingRangeParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_folding_range"); + let _p = tracing::span!(tracing::Level::INFO, "handle_folding_range").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let folds = snap.analysis.folding_ranges(file_id)?; let text = snap.analysis.file_text(file_id)?; @@ -948,7 +948,7 @@ pub(crate) fn handle_signature_help( snap: GlobalStateSnapshot, params: lsp_types::SignatureHelpParams, ) -> anyhow::Result> { - let _p = profile::span("handle_signature_help"); + let _p = tracing::span!(tracing::Level::INFO, "handle_signature_help").entered(); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let help = match snap.analysis.signature_help(position)? { Some(it) => it, @@ -963,7 +963,7 @@ pub(crate) fn handle_hover( snap: GlobalStateSnapshot, params: lsp_ext::HoverParams, ) -> anyhow::Result> { - let _p = profile::span("handle_hover"); + let _p = tracing::span!(tracing::Level::INFO, "handle_hover").entered(); let range = match params.position { PositionOrRange::Position(position) => Range::new(position, position), PositionOrRange::Range(range) => range, @@ -1000,7 +1000,7 @@ pub(crate) fn handle_prepare_rename( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result> { - let _p = profile::span("handle_prepare_rename"); + let _p = tracing::span!(tracing::Level::INFO, "handle_prepare_rename").entered(); let position = from_proto::file_position(&snap, params)?; let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?; @@ -1014,7 +1014,7 @@ pub(crate) fn handle_rename( snap: GlobalStateSnapshot, params: RenameParams, ) -> anyhow::Result> { - let _p = profile::span("handle_rename"); + let _p = tracing::span!(tracing::Level::INFO, "handle_rename").entered(); let position = from_proto::file_position(&snap, params.text_document_position)?; let mut change = snap @@ -1051,10 +1051,11 @@ pub(crate) fn handle_references( snap: GlobalStateSnapshot, params: lsp_types::ReferenceParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_references"); + let _p = tracing::span!(tracing::Level::INFO, "handle_references").entered(); let position = from_proto::file_position(&snap, params.text_document_position)?; let exclude_imports = snap.config.find_all_refs_exclude_imports(); + let exclude_tests = snap.config.find_all_refs_exclude_tests(); let refs = match snap.analysis.find_all_refs(position, None)? { None => return Ok(None), @@ -1078,7 +1079,8 @@ pub(crate) fn handle_references( .flat_map(|(file_id, refs)| { refs.into_iter() .filter(|&(_, category)| { - !exclude_imports || category != Some(ReferenceCategory::Import) + (!exclude_imports || category != Some(ReferenceCategory::Import)) + && (!exclude_tests || category != Some(ReferenceCategory::Test)) }) .map(move |(range, _)| FileRange { file_id, range }) }) @@ -1094,7 +1096,7 @@ pub(crate) fn handle_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentFormattingParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_formatting"); + let _p = tracing::span!(tracing::Level::INFO, "handle_formatting").entered(); run_rustfmt(&snap, params.text_document, None) } @@ -1103,7 +1105,7 @@ pub(crate) fn handle_range_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentRangeFormattingParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_range_formatting"); + let _p = tracing::span!(tracing::Level::INFO, "handle_range_formatting").entered(); run_rustfmt(&snap, params.text_document, Some(params.range)) } @@ -1112,7 +1114,7 @@ pub(crate) fn handle_code_action( snap: GlobalStateSnapshot, params: lsp_types::CodeActionParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_code_action"); + let _p = tracing::span!(tracing::Level::INFO, "handle_code_action").entered(); if !snap.config.code_action_literals() { // We intentionally don't support command-based actions, as those either @@ -1186,7 +1188,7 @@ pub(crate) fn handle_code_action_resolve( snap: GlobalStateSnapshot, mut code_action: lsp_ext::CodeAction, ) -> anyhow::Result { - let _p = profile::span("handle_code_action_resolve"); + let _p = tracing::span!(tracing::Level::INFO, "handle_code_action_resolve").entered(); let params = match code_action.data.take() { Some(it) => it, None => return Err(invalid_params_error("code action without data".to_string()).into()), @@ -1276,7 +1278,7 @@ pub(crate) fn handle_code_lens( snap: GlobalStateSnapshot, params: lsp_types::CodeLensParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_code_lens"); + let _p = tracing::span!(tracing::Level::INFO, "handle_code_lens").entered(); let lens_config = snap.config.lens(); if lens_config.none() { @@ -1346,7 +1348,7 @@ pub(crate) fn handle_document_highlight( snap: GlobalStateSnapshot, params: lsp_types::DocumentHighlightParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_document_highlight"); + let _p = tracing::span!(tracing::Level::INFO, "handle_document_highlight").entered(); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let line_index = snap.file_line_index(position.file_id)?; @@ -1368,7 +1370,7 @@ pub(crate) fn handle_ssr( snap: GlobalStateSnapshot, params: lsp_ext::SsrParams, ) -> anyhow::Result { - let _p = profile::span("handle_ssr"); + let _p = tracing::span!(tracing::Level::INFO, "handle_ssr").entered(); let selections = params .selections .iter() @@ -1388,7 +1390,7 @@ pub(crate) fn handle_inlay_hints( snap: GlobalStateSnapshot, params: InlayHintParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_inlay_hints"); + let _p = tracing::span!(tracing::Level::INFO, "handle_inlay_hints").entered(); let document_uri = ¶ms.text_document.uri; let FileRange { file_id, range } = from_proto::file_range( &snap, @@ -1418,7 +1420,7 @@ pub(crate) fn handle_inlay_hints_resolve( snap: GlobalStateSnapshot, mut original_hint: InlayHint, ) -> anyhow::Result { - let _p = profile::span("handle_inlay_hints_resolve"); + let _p = tracing::span!(tracing::Level::INFO, "handle_inlay_hints_resolve").entered(); let data = match original_hint.data.take() { Some(it) => it, @@ -1465,7 +1467,7 @@ pub(crate) fn handle_call_hierarchy_prepare( snap: GlobalStateSnapshot, params: CallHierarchyPrepareParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_call_hierarchy_prepare"); + let _p = tracing::span!(tracing::Level::INFO, "handle_call_hierarchy_prepare").entered(); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.call_hierarchy(position)? { @@ -1487,7 +1489,7 @@ pub(crate) fn handle_call_hierarchy_incoming( snap: GlobalStateSnapshot, params: CallHierarchyIncomingCallsParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_call_hierarchy_incoming"); + let _p = tracing::span!(tracing::Level::INFO, "handle_call_hierarchy_incoming").entered(); let item = params.item; let doc = TextDocumentIdentifier::new(item.uri); @@ -1522,7 +1524,7 @@ pub(crate) fn handle_call_hierarchy_outgoing( snap: GlobalStateSnapshot, params: CallHierarchyOutgoingCallsParams, ) -> anyhow::Result>> { - let _p = profile::span("handle_call_hierarchy_outgoing"); + let _p = tracing::span!(tracing::Level::INFO, "handle_call_hierarchy_outgoing").entered(); let item = params.item; let doc = TextDocumentIdentifier::new(item.uri); @@ -1557,7 +1559,7 @@ pub(crate) fn handle_semantic_tokens_full( snap: GlobalStateSnapshot, params: SemanticTokensParams, ) -> anyhow::Result> { - let _p = profile::span("handle_semantic_tokens_full"); + let _p = tracing::span!(tracing::Level::INFO, "handle_semantic_tokens_full").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let text = snap.analysis.file_text(file_id)?; @@ -1587,7 +1589,7 @@ pub(crate) fn handle_semantic_tokens_full_delta( snap: GlobalStateSnapshot, params: SemanticTokensDeltaParams, ) -> anyhow::Result> { - let _p = profile::span("handle_semantic_tokens_full_delta"); + let _p = tracing::span!(tracing::Level::INFO, "handle_semantic_tokens_full_delta").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let text = snap.analysis.file_text(file_id)?; @@ -1630,7 +1632,7 @@ pub(crate) fn handle_semantic_tokens_range( snap: GlobalStateSnapshot, params: SemanticTokensRangeParams, ) -> anyhow::Result> { - let _p = profile::span("handle_semantic_tokens_range"); + let _p = tracing::span!(tracing::Level::INFO, "handle_semantic_tokens_range").entered(); let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?; let text = snap.analysis.file_text(frange.file_id)?; @@ -1656,7 +1658,7 @@ pub(crate) fn handle_open_docs( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result { - let _p = profile::span("handle_open_docs"); + let _p = tracing::span!(tracing::Level::INFO, "handle_open_docs").entered(); let position = from_proto::file_position(&snap, params)?; let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match ws { @@ -1695,7 +1697,7 @@ pub(crate) fn handle_open_cargo_toml( snap: GlobalStateSnapshot, params: lsp_ext::OpenCargoTomlParams, ) -> anyhow::Result> { - let _p = profile::span("handle_open_cargo_toml"); + let _p = tracing::span!(tracing::Level::INFO, "handle_open_cargo_toml").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? { @@ -1713,7 +1715,7 @@ pub(crate) fn handle_move_item( snap: GlobalStateSnapshot, params: lsp_ext::MoveItemParams, ) -> anyhow::Result> { - let _p = profile::span("handle_move_item"); + let _p = tracing::span!(tracing::Level::INFO, "handle_move_item").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let range = from_proto::file_range(&snap, ¶ms.text_document, params.range)?; @@ -1735,7 +1737,7 @@ pub(crate) fn handle_view_recursive_memory_layout( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, ) -> anyhow::Result> { - let _p = profile::span("view_recursive_memory_layout"); + let _p = tracing::span!(tracing::Level::INFO, "view_recursive_memory_layout").entered(); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; let offset = from_proto::offset(&line_index, params.position)?; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs index d94f7cefa60ee..acc02d6447c63 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -60,7 +60,7 @@ fn integrated_highlighting_benchmark() { analysis.highlight_as_html(file_id, false).unwrap(); } - profile::init_from("*>100"); + crate::tracing::hprof::init("*>100"); { let _it = stdx::timeit("change"); @@ -152,8 +152,7 @@ fn integrated_completion_benchmark() { analysis.completions(&config, position, None).unwrap(); } - profile::init_from("*>5"); - // let _s = profile::heartbeat_span(); + crate::tracing::hprof::init("*>5"); let completion_offset = { let _it = stdx::timeit("change"); @@ -168,7 +167,7 @@ fn integrated_completion_benchmark() { }; { - let _p = profile::span("unqualified path completion"); + let _p = tracing::span!(tracing::Level::INFO, "unqualified path completion").entered(); let _span = profile::cpu_span(); let analysis = host.analysis(); let config = CompletionConfig { @@ -209,7 +208,7 @@ fn integrated_completion_benchmark() { }; { - let _p = profile::span("dot completion"); + let _p = tracing::span!(tracing::Level::INFO, "dot completion").entered(); let _span = profile::cpu_span(); let analysis = host.analysis(); let config = CompletionConfig { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs index 29bc0b80d8a1d..b1809f58ae700 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs @@ -13,11 +13,6 @@ pub mod cli; -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - mod caps; mod cargo_target_spec; mod diagnostics; @@ -37,6 +32,12 @@ mod handlers { pub(crate) mod request; } +pub mod tracing { + pub mod config; + pub use config::Config; + pub mod hprof; +} + pub mod config; pub mod lsp; use self::lsp::ext as lsp_ext; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index fe381fbeb3f03..d363ac69fdc33 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -92,6 +92,7 @@ pub(crate) fn document_highlight_kind( ReferenceCategory::Read => Some(lsp_types::DocumentHighlightKind::READ), ReferenceCategory::Write => Some(lsp_types::DocumentHighlightKind::WRITE), ReferenceCategory::Import => None, + ReferenceCategory::Test => None, } } @@ -311,16 +312,14 @@ fn completion_item( set_score(&mut lsp_item, max_relevance, item.relevance); if config.completion().enable_imports_on_the_fly && !item.import_to_add.is_empty() { - let imports: Vec<_> = item + let imports = item .import_to_add .into_iter() - .filter_map(|(import_path, import_name)| { - Some(lsp_ext::CompletionImport { - full_import_path: import_path, - imported_name: import_name, - }) + .map(|(import_path, import_name)| lsp_ext::CompletionImport { + full_import_path: import_path, + imported_name: import_name, }) - .collect(); + .collect::>(); if !imports.is_empty() { let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports }; lsp_item.data = Some(to_value(data).unwrap()); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 0173805d44779..f3ead6d04f7d7 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -60,6 +60,17 @@ enum Event { Flycheck(flycheck::Message), } +impl fmt::Display for Event { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Event::Lsp(_) => write!(f, "Event::Lsp"), + Event::Task(_) => write!(f, "Event::Task"), + Event::Vfs(_) => write!(f, "Event::Vfs"), + Event::Flycheck(_) => write!(f, "Event::Flycheck"), + } + } +} + #[derive(Debug)] pub(crate) enum Task { Response(lsp_server::Response), @@ -196,7 +207,8 @@ impl GlobalState { fn handle_event(&mut self, event: Event) -> anyhow::Result<()> { let loop_start = Instant::now(); // NOTE: don't count blocking select! call as a loop-turn time - let _p = profile::span("GlobalState::handle_event"); + let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event", event = %event) + .entered(); let event_dbg_msg = format!("{event:?}"); tracing::debug!("{:?} handle_event({})", loop_start, event_dbg_msg); @@ -215,7 +227,8 @@ impl GlobalState { lsp_server::Message::Response(resp) => self.complete_request(resp), }, Event::Task(task) => { - let _p = profile::span("GlobalState::handle_event/task"); + let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/task") + .entered(); let mut prime_caches_progress = Vec::new(); self.handle_task(&mut prime_caches_progress, task); @@ -269,7 +282,8 @@ impl GlobalState { } } Event::Vfs(message) => { - let _p = profile::span("GlobalState::handle_event/vfs"); + let _p = + tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/vfs").entered(); self.handle_vfs_msg(message); // Coalesce many VFS event into a single loop turn while let Ok(message) = self.loader.receiver.try_recv() { @@ -277,7 +291,8 @@ impl GlobalState { } } Event::Flycheck(message) => { - let _p = profile::span("GlobalState::handle_event/flycheck"); + let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/flycheck") + .entered(); self.handle_flycheck_msg(message); // Coalesce many flycheck updates into a single loop turn while let Ok(message) = self.flycheck_receiver.try_recv() { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 969211f44007c..65c00cc08d1a1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -70,7 +70,8 @@ impl GlobalState { } pub(crate) fn update_configuration(&mut self, config: Config) { - let _p = profile::span("GlobalState::update_configuration"); + let _p = + tracing::span!(tracing::Level::INFO, "GlobalState::update_configuration").entered(); let old_config = mem::replace(&mut self.config, Arc::new(config)); if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() { self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity()); @@ -355,7 +356,7 @@ impl GlobalState { } pub(crate) fn switch_workspaces(&mut self, cause: Cause) { - let _p = profile::span("GlobalState::switch_workspaces"); + let _p = tracing::span!(tracing::Level::INFO, "GlobalState::switch_workspaces").entered(); tracing::info!(%cause, "will switch workspaces"); let Some((workspaces, force_reload_crate_graph)) = @@ -502,7 +503,7 @@ impl GlobalState { let mut crate_graph_file_dependencies = FxHashSet::default(); let mut load = |path: &AbsPath| { - let _p = profile::span("switch_workspaces::load"); + let _p = tracing::span!(tracing::Level::INFO, "switch_workspaces::load").entered(); let vfs_path = vfs::VfsPath::from(path.to_path_buf()); crate_graph_file_dependencies.insert(vfs_path.clone()); match vfs.file_id(&vfs_path) { @@ -585,7 +586,7 @@ impl GlobalState { } fn reload_flycheck(&mut self) { - let _p = profile::span("GlobalState::reload_flycheck"); + let _p = tracing::span!(tracing::Level::INFO, "GlobalState::reload_flycheck").entered(); let config = self.config.flycheck(); let sender = self.flycheck_sender.clone(); let invocation_strategy = match config { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs new file mode 100644 index 0000000000000..fcdbd1e6d9b56 --- /dev/null +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs @@ -0,0 +1,108 @@ +//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber` +//! filter syntax and `tracing_appender` for non blocking output. + +use std::io; + +use anyhow::Context; +use tracing::{level_filters::LevelFilter, Level}; +use tracing_subscriber::{ + filter::{self, Targets}, + fmt::{format::FmtSpan, MakeWriter}, + layer::SubscriberExt, + util::SubscriberInitExt, + Layer, Registry, +}; +use tracing_tree::HierarchicalLayer; + +use crate::tracing::hprof; + +pub struct Config { + pub writer: T, + pub filter: String, + /// The meaning of CHALK_DEBUG is to tell chalk crates + /// (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing + /// logs. But now we can only have just one filter, which means we have to + /// merge chalk filter to our main filter (from RA_LOG env). + /// + /// The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`. + /// As the value should only affect chalk crates, we'd better manually + /// specify the target. And for simplicity, CHALK_DEBUG only accept the value + /// that specify level. + pub chalk_filter: Option, + /// Filtering syntax, set in a shell: + /// ``` + /// env RA_PROFILE=* // dump everything + /// env RA_PROFILE=foo|bar|baz // enabled only selected entries + /// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 + /// ``` + pub profile_filter: Option, +} + +impl Config +where + T: for<'writer> MakeWriter<'writer> + Send + Sync + 'static, +{ + pub fn init(self) -> anyhow::Result<()> { + let filter: Targets = self + .filter + .parse() + .with_context(|| format!("invalid log filter: `{}`", self.filter))?; + + let writer = self.writer; + + let ra_fmt_layer = tracing_subscriber::fmt::layer() + .with_span_events(FmtSpan::CLOSE) + .with_writer(writer) + .with_filter(filter); + + let mut chalk_layer = None; + if let Some(chalk_filter) = self.chalk_filter { + let level: LevelFilter = + chalk_filter.parse().with_context(|| "invalid chalk log filter")?; + + let chalk_filter = Targets::new() + .with_target("chalk_solve", level) + .with_target("chalk_ir", level) + .with_target("chalk_recursive", level); + chalk_layer = Some( + HierarchicalLayer::default() + .with_indent_lines(true) + .with_ansi(false) + .with_indent_amount(2) + .with_writer(io::stderr) + .with_filter(chalk_filter), + ); + }; + + let mut profiler_layer = None; + if let Some(spec) = self.profile_filter { + let (write_filter, allowed_names) = hprof::WriteFilter::from_spec(&spec); + + // this filter the first pass for `tracing`: these are all the "profiling" spans, but things like + // span depth or duration are not filtered here: that only occurs at write time. + let profile_filter = filter::filter_fn(move |metadata| { + let allowed = match &allowed_names { + Some(names) => names.contains(metadata.name()), + None => true, + }; + + metadata.is_span() + && allowed + && metadata.level() >= &Level::INFO + && !metadata.target().starts_with("salsa") + && !metadata.target().starts_with("chalk") + }); + + let layer = hprof::SpanTree::default() + .aggregate(true) + .spec_filter(write_filter) + .with_filter(profile_filter); + + profiler_layer = Some(layer); + } + + Registry::default().with(ra_fmt_layer).with(chalk_layer).with(profiler_layer).try_init()?; + + Ok(()) + } +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs new file mode 100644 index 0000000000000..c99b551df8526 --- /dev/null +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs @@ -0,0 +1,272 @@ +//! Consumer of `tracing` data, which prints a hierarchical profile. +//! +//! Based on https://github.com/davidbarsky/tracing-tree, but does less, while +//! actually printing timings for spans by default. The code here is vendored from +//! https://github.com/matklad/tracing-span-tree. +//! +//! Usage: +//! +//! ```rust +//! let layer = hprof::SpanTree::default(); +//! Registry::default().with(layer).init(); +//! ``` +//! +//! Example output: +//! +//! ```text +//! 8.37ms top_level +//! 1.09ms middle +//! 1.06ms leaf +//! 1.06ms middle +//! 3.12ms middle +//! 1.06ms leaf +//! 3.06ms middle +//! ``` +//! +//! Same data, but with `.aggregate(true)`: +//! +//! ```text +//! 8.39ms top_level +//! 8.35ms 4 middle +//! 2.13ms 2 leaf +//! ``` + +use std::{ + fmt::Write, + mem, + time::{Duration, Instant}, +}; + +use rustc_hash::FxHashSet; +use tracing::{ + field::{Field, Visit}, + span::Attributes, + Event, Id, Level, Subscriber, +}; +use tracing_subscriber::{ + filter, + layer::{Context, SubscriberExt}, + registry::LookupSpan, + Layer, Registry, +}; + +use crate::tracing::hprof; + +pub fn init(spec: &str) { + let (write_filter, allowed_names) = WriteFilter::from_spec(spec); + + // this filter the first pass for `tracing`: these are all the "profiling" spans, but things like + // span depth or duration are not filtered here: that only occurs at write time. + let profile_filter = filter::filter_fn(move |metadata| { + let allowed = match &allowed_names { + Some(names) => names.contains(metadata.name()), + None => true, + }; + + metadata.is_span() + && allowed + && metadata.level() >= &Level::INFO + && !metadata.target().starts_with("salsa") + && !metadata.target().starts_with("chalk") + }); + + let layer = hprof::SpanTree::default() + .aggregate(true) + .spec_filter(write_filter) + .with_filter(profile_filter); + + let subscriber = Registry::default().with(layer); + tracing::subscriber::set_global_default(subscriber).unwrap(); +} + +#[derive(Default, Debug)] +pub(crate) struct SpanTree { + aggregate: bool, + write_filter: WriteFilter, +} + +impl SpanTree { + /// Merge identical sibling spans together. + pub(crate) fn aggregate(self, yes: bool) -> SpanTree { + SpanTree { aggregate: yes, ..self } + } + + /// Add a write-time filter for span duration or tree depth. + pub(crate) fn spec_filter(self, write_filter: WriteFilter) -> SpanTree { + SpanTree { write_filter, ..self } + } +} + +struct Data { + start: Instant, + children: Vec, + fields: String, +} + +impl Data { + fn new(attrs: &Attributes<'_>) -> Self { + let mut data = Self { start: Instant::now(), children: Vec::new(), fields: String::new() }; + + let mut visitor = DataVisitor { string: &mut data.fields }; + attrs.record(&mut visitor); + data + } + + fn into_node(self, name: &'static str) -> Node { + Node { + name, + fields: self.fields, + count: 1, + duration: self.start.elapsed(), + children: self.children, + } + } +} + +pub struct DataVisitor<'a> { + string: &'a mut String, +} + +impl<'a> Visit for DataVisitor<'a> { + fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { + write!(self.string, "{} = {:?} ", field.name(), value).unwrap(); + } +} + +impl Layer for SpanTree +where + S: Subscriber + for<'span> LookupSpan<'span>, +{ + fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) { + let span = ctx.span(id).unwrap(); + + let data = Data::new(attrs); + span.extensions_mut().insert(data); + } + + fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {} + + fn on_close(&self, id: Id, ctx: Context<'_, S>) { + let span = ctx.span(&id).unwrap(); + let data = span.extensions_mut().remove::().unwrap(); + let mut node = data.into_node(span.name()); + + match span.parent() { + Some(parent_span) => { + parent_span.extensions_mut().get_mut::().unwrap().children.push(node); + } + None => { + if self.aggregate { + node.aggregate() + } + node.print(&self.write_filter) + } + } + } +} + +#[derive(Default)] +struct Node { + name: &'static str, + fields: String, + count: u32, + duration: Duration, + children: Vec, +} + +impl Node { + fn print(&self, filter: &WriteFilter) { + self.go(0, filter) + } + + fn go(&self, level: usize, filter: &WriteFilter) { + if self.duration > filter.longer_than && level < filter.depth { + let duration = ms(self.duration); + let current_indent = level * 2; + + let mut out = String::new(); + let _ = write!(out, "{:current_indent$} {duration} {:<6}", "", self.name); + + if !self.fields.is_empty() { + let _ = write!(out, " @ {}", self.fields); + } + + if self.count > 1 { + let _ = write!(out, " ({} calls)", self.count); + } + + eprintln!("{}", out); + + for child in &self.children { + child.go(level + 1, filter) + } + } + } + + fn aggregate(&mut self) { + if self.children.is_empty() { + return; + } + + self.children.sort_by_key(|it| it.name); + let mut idx = 0; + for i in 1..self.children.len() { + if self.children[idx].name == self.children[i].name { + let child = mem::take(&mut self.children[i]); + self.children[idx].duration += child.duration; + self.children[idx].count += child.count; + self.children[idx].children.extend(child.children); + } else { + idx += 1; + assert!(idx <= i); + self.children.swap(idx, i); + } + } + self.children.truncate(idx + 1); + for child in &mut self.children { + child.aggregate() + } + } +} + +#[derive(Default, Clone, Debug)] +pub(crate) struct WriteFilter { + depth: usize, + longer_than: Duration, +} + +impl WriteFilter { + pub(crate) fn from_spec(mut spec: &str) -> (WriteFilter, Option>) { + let longer_than = if let Some(idx) = spec.rfind('>') { + let longer_than = spec[idx + 1..].parse().expect("invalid profile longer_than"); + spec = &spec[..idx]; + Duration::from_millis(longer_than) + } else { + Duration::new(0, 0) + }; + + let depth = if let Some(idx) = spec.rfind('@') { + let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth"); + spec = &spec[..idx]; + depth + } else { + 999 + }; + let allowed = if spec == "*" { + None + } else { + Some(FxHashSet::from_iter(spec.split('|').map(String::from))) + }; + (WriteFilter { depth, longer_than }, allowed) + } +} + +#[allow(non_camel_case_types)] +struct ms(Duration); + +impl std::fmt::Display for ms { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let n = self.0.as_millis(); + write!(f, "{n:5}ms") + } +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index 58a99cc4471a9..19890110d53db 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -31,6 +31,7 @@ use lsp_types::{ }; use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams}; use serde_json::json; +use stdx::format_to_acc; use test_utils::skip_slow_tests; use crate::{ @@ -38,9 +39,6 @@ use crate::{ testdir::TestDir, }; -const PROFILE: &str = ""; -// const PROFILE: &'static str = "*@3>100"; - #[test] fn completes_items_from_standard_library() { if skip_slow_tests() { @@ -594,8 +592,10 @@ fn diagnostics_dont_block_typing() { return; } - let librs: String = (0..10).map(|i| format!("mod m{i};")).collect(); - let libs: String = (0..10).map(|i| format!("//- /src/m{i}.rs\nfn foo() {{}}\n\n")).collect(); + let librs: String = (0..10).fold(String::new(), |mut acc, i| format_to_acc!(acc, "mod m{i};")); + let libs: String = (0..10).fold(String::new(), |mut acc, i| { + format_to_acc!(acc, "//- /src/m{i}.rs\nfn foo() {{}}\n\n") + }); let server = Project::with_fixture(&format!( r#" //- /Cargo.toml diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index e16990eabd092..d699374f9cde6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -9,11 +9,11 @@ use std::{ use crossbeam_channel::{after, select, Receiver}; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url}; -use rust_analyzer::{config::Config, lsp, main_loop}; +use rust_analyzer::{config::Config, lsp, main_loop, tracing}; use serde::Serialize; use serde_json::{json, to_string_pretty, Value}; use test_utils::FixtureWithProjectMeta; -use tracing_subscriber::{prelude::*, Layer}; +use tracing_subscriber::fmt::TestWriter; use vfs::AbsPathBuf; use crate::testdir::TestDir; @@ -91,12 +91,14 @@ impl Project<'_> { static INIT: Once = Once::new(); INIT.call_once(|| { - let filter: tracing_subscriber::filter::Targets = - std::env::var("RA_LOG").ok().and_then(|it| it.parse().ok()).unwrap_or_default(); - let layer = - tracing_subscriber::fmt::Layer::new().with_test_writer().with_filter(filter); - tracing_subscriber::Registry::default().with(layer).init(); - profile::init_from(crate::PROFILE); + let _ = tracing::Config { + writer: TestWriter::default(), + // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually + // useful information in there for debugging. + filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_string()), + chalk_filter: std::env::var("CHALK_DEBUG").ok(), + profile_filter: std::env::var("RA_PROFILE").ok(), + }; }); let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml index e6014cf812e56..2e3f9113b066a 100644 --- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml +++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml @@ -13,7 +13,7 @@ doctest = false [dependencies] backtrace = { version = "0.3.67", optional = true } -always-assert = { version = "0.1.2", features = ["log"] } +always-assert = { version = "0.2.0", features = ["tracing"] } jod-thread = "0.1.2" libc.workspace = true crossbeam-channel = "0.5.5" diff --git a/src/tools/rust-analyzer/crates/stdx/src/macros.rs b/src/tools/rust-analyzer/crates/stdx/src/macros.rs index d71e418c89bc6..85d9008fe123e 100644 --- a/src/tools/rust-analyzer/crates/stdx/src/macros.rs +++ b/src/tools/rust-analyzer/crates/stdx/src/macros.rs @@ -1,15 +1,5 @@ //! Convenience macros. -#[macro_export] -macro_rules! eprintln { - ($($tt:tt)*) => {{ - if $crate::is_ci() { - panic!("Forgot to remove debug-print?") - } - std::eprintln!($($tt)*) - }} -} - /// Appends formatted string to a `String`. #[macro_export] macro_rules! format_to { @@ -24,6 +14,22 @@ macro_rules! format_to { }; } +/// Appends formatted string to a `String` and returns the `String`. +/// +/// Useful for folding iterators into a `String`. +#[macro_export] +macro_rules! format_to_acc { + ($buf:expr, $lit:literal $($arg:tt)*) => { + { + use ::std::fmt::Write as _; + // We can't do ::std::fmt::Write::write_fmt($buf, format_args!($lit $($arg)*)) + // unfortunately, as that loses out on autoref behavior. + _ = $buf.write_fmt(format_args!($lit $($arg)*)); + $buf + } + }; +} + /// Generates `From` impls for `Enum E { Foo(Foo), Bar(Bar) }` enums /// /// # Example diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml index 9f78614bba66b..a0fd73ee13f57 100644 --- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -22,6 +22,7 @@ once_cell = "1.17.0" indexmap.workspace = true smol_str.workspace = true triomphe.workspace = true +tracing.workspace = true ra-ap-rustc_lexer.workspace = true diff --git a/src/tools/rust-analyzer/crates/syntax/src/algo.rs b/src/tools/rust-analyzer/crates/syntax/src/algo.rs index c4548b1647472..01f2af419ed89 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/algo.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/algo.rs @@ -120,7 +120,7 @@ pub struct TreeDiff { impl TreeDiff { pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { - let _p = profile::span("into_text_edit"); + let _p = tracing::span!(tracing::Level::INFO, "into_text_edit").entered(); for (anchor, to) in &self.insertions { let offset = match anchor { @@ -149,7 +149,7 @@ impl TreeDiff { /// /// This function tries to find a fine-grained diff. pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { - let _p = profile::span("diff"); + let _p = tracing::span!(tracing::Level::INFO, "diff").entered(); let mut diff = TreeDiff { replacements: FxHashMap::default(), diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index 247dfe0b459a5..c9944b75b0964 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -538,9 +538,13 @@ impl ast::UseTree { /// `foo::bar` -> `{foo::bar}` /// /// `{foo::bar}` -> `{foo::bar}` - pub fn wrap_in_tree_list(&self) { - if self.path().is_none() { - return; + pub fn wrap_in_tree_list(&self) -> Option<()> { + if self.use_tree_list().is_some() + && self.path().is_none() + && self.star_token().is_none() + && self.rename().is_none() + { + return None; } let subtree = self.clone_subtree().clone_for_update(); ted::remove_all_iter(self.syntax().children_with_tokens()); @@ -548,6 +552,7 @@ impl ast::UseTree { self.syntax(), make::use_tree_list(once(subtree)).clone_for_update().syntax(), ); + Some(()) } } @@ -960,10 +965,10 @@ impl ast::IdentPat { } pub trait HasVisibilityEdit: ast::HasVisibility { - fn set_visibility(&self, visbility: ast::Visibility) { + fn set_visibility(&self, visibility: ast::Visibility) { match self.visibility() { Some(current_visibility) => { - ted::replace(current_visibility.syntax(), visbility.syntax()) + ted::replace(current_visibility.syntax(), visibility.syntax()) } None => { let vis_before = self @@ -972,7 +977,7 @@ pub trait HasVisibilityEdit: ast::HasVisibility { .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); - ted::insert(ted::Position::before(vis_before), visbility.syntax()); + ted::insert(ted::Position::before(vis_before), visibility.syntax()); } } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index 62d64319e3858..d5eda8f15e4a0 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -9,10 +9,11 @@ //! API should require to assemble every node piecewise. The trick of //! `parse(format!())` we use internally is an implementation detail -- long //! term, it will be replaced with direct tree manipulation. + use itertools::Itertools; use parser::T; use rowan::NodeOrToken; -use stdx::{format_to, never}; +use stdx::{format_to, format_to_acc, never}; use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken}; @@ -759,15 +760,12 @@ pub fn match_arm_with_guard( } pub fn match_arm_list(arms: impl IntoIterator) -> ast::MatchArmList { - let arms_str = arms - .into_iter() - .map(|arm| { - let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like()); - let comma = if needs_comma { "," } else { "" }; - let arm = arm.syntax(); - format!(" {arm}{comma}\n") - }) - .collect::(); + let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| { + let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like()); + let comma = if needs_comma { "," } else { "" }; + let arm = arm.syntax(); + format_to_acc!(acc, " {arm}{comma}\n") + }); return from_text(&arms_str); fn from_text(text: &str) -> ast::MatchArmList { diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index ce01ee1c3594b..6e5e4127f4d43 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -384,7 +384,7 @@ impl ast::UseTreeList { // the below remove the innermost {}, got `use crate::{{{A}}}` remove_brace_in_use_tree_list(&self); - // the below remove othe unnecessary {}, got `use crate::A` + // the below remove other unnecessary {}, got `use crate::A` while let Some(parent_use_tree_list) = self.parent_use_tree().parent_use_tree_list() { remove_brace_in_use_tree_list(&parent_use_tree_list); self = parent_use_tree_list; diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests.rs b/src/tools/rust-analyzer/crates/syntax/src/tests.rs index 8ae1242cf7fd4..4c0a538f712f5 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/tests.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/tests.rs @@ -11,6 +11,7 @@ use std::{ use ast::HasName; use expect_test::expect_file; use rayon::prelude::*; +use stdx::format_to_acc; use test_utils::{bench, bench_fixture, project_root}; use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError}; @@ -104,10 +105,9 @@ fn self_hosting_parsing() { .collect::>(); if !errors.is_empty() { - let errors = errors - .into_iter() - .map(|(path, err)| format!("{}: {:?}\n", path.display(), err[0])) - .collect::(); + let errors = errors.into_iter().fold(String::new(), |mut acc, (path, err)| { + format_to_acc!(acc, "{}: {:?}\n", path.display(), err[0]) + }); panic!("Parsing errors:\n{errors}\n"); } } diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml index 56067d8341789..2ff1fad6c29d9 100644 --- a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml +++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml @@ -15,10 +15,11 @@ doctest = false # Avoid adding deps here, this crate is widely used in tests it should compile fast! dissimilar = "1.0.7" text-size.workspace = true +tracing.workspace = true rustc-hash.workspace = true stdx.workspace = true profile.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index b015dd69b528a..9c25d88cb8428 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -28,7 +28,7 @@ //! env: option //! eq: sized //! error: fmt -//! fmt: result, transmute, coerce_unsized +//! fmt: option, result, transmute, coerce_unsized //! fn: //! from: sized //! future: pin @@ -987,6 +987,10 @@ pub mod fmt { Arguments { pieces, fmt: None, args } } + pub const fn new_const(pieces: &'a [&'static str]) -> Arguments<'a> { + Arguments { pieces, fmt: None, args: &[] } + } + pub fn new_v1_formatted( pieces: &'a [&'static str], args: &'a [rt::Argument<'a>], @@ -1346,6 +1350,9 @@ pub mod iter { // region:panic mod panic { pub macro panic_2021 { + () => ( + $crate::panicking::panic("explicit panic") + ), ($($t:tt)+) => ( $crate::panicking::panic_fmt($crate::const_format_args!($($t)+)) ), @@ -1357,6 +1364,11 @@ mod panicking { pub const fn panic_fmt(_fmt: crate::fmt::Arguments<'_>) -> ! { loop {} } + + #[lang = "panic"] + pub const fn panic(expr: &'static str) -> ! { + panic_fmt(crate::fmt::Arguments::new_const(&[expr])) + } } // endregion:panic diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md index 3251dd752682e..bc558c202472e 100644 --- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md @@ -239,13 +239,13 @@ The primary goal of `onEnter` is to handle automatic indentation when opening a This is not yet implemented. The secondary goal is to handle fixing up syntax, like continuing doc strings and comments, and escaping `\n` in string literals. -As proper cursor positioning is raison-d'etat for `onEnter`, it uses `SnippetTextEdit`. +As proper cursor positioning is raison d'ĂȘtre for `onEnter`, it uses `SnippetTextEdit`. ### Unresolved Question * How to deal with synchronicity of the request? One option is to require the client to block until the server returns the response. - Another option is to do a OT-style merging of edits from client and server. + Another option is to do a operational transforms style merging of edits from client and server. A third option is to do a record-replay: client applies heuristic on enter immediately, then applies all user's keypresses. When the server is ready with the response, the client rollbacks all the changes and applies the recorded actions on top of the correct response. * How to deal with multiple carets? diff --git a/src/tools/rust-analyzer/docs/dev/style.md b/src/tools/rust-analyzer/docs/dev/style.md index 786127639ce75..4c5299bde3e91 100644 --- a/src/tools/rust-analyzer/docs/dev/style.md +++ b/src/tools/rust-analyzer/docs/dev/style.md @@ -99,14 +99,7 @@ Including a description and GIF suitable for the changelog means less work for t ## Clippy -We don't enforce Clippy. -A number of default lints have high false positive rate. -Selectively patching false-positives with `allow(clippy)` is probably worse than entirely disabling a problematic lint. -There's a `cargo lint` command which runs a subset of low-FPR lints. -Careful tweaking of `lint` is welcome. -Of course, applying Clippy suggestions is welcome as long as they indeed improve the code. - -**Rationale:** see [rust-lang/clippy#5537](https://github.com/rust-lang/rust-clippy/issues/5537). +We use Clippy to improve the code, but if some lints annoy you, allow them in the [Cargo.toml](../../Cargo.toml) [workspace.lints.clippy] section. # Code diff --git a/src/tools/rust-analyzer/docs/dev/syntax.md b/src/tools/rust-analyzer/docs/dev/syntax.md index fd6f220f4fa90..6c4daecc58ff7 100644 --- a/src/tools/rust-analyzer/docs/dev/syntax.md +++ b/src/tools/rust-analyzer/docs/dev/syntax.md @@ -128,7 +128,7 @@ Interior nodes are shared as well (for example in `(1 + 1) * (1 + 1)`). Note that, the result of the interning is an `Arc`. That is, it's not an index into interning table, so you don't have to have the table around to do anything with the tree. Each tree is fully self-contained (although different trees might share parts). -Currently, the interner is created per-file, but it will be easy to use a per-thread or per-some-contex one. +Currently, the interner is created per-file, but it will be easy to use a per-thread or per-some-context one. We use a `TextSize`, a newtyped `u32`, to store the length of the text. diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index f887bb9df31c8..cfa7503d73900 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -777,6 +777,11 @@ Internal config, path to proc-macro server executable. -- Exclude imports from find-all-references. -- +[[rust-analyzer.references.excludeTests]]rust-analyzer.references.excludeTests (default: `false`):: ++ +-- +Exclude tests from find-all-references. +-- [[rust-analyzer.rename.allowExternalItems]]rust-analyzer.rename.allowExternalItems (default: `false`):: + -- diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc index 069a62ddbfe96..9e9ea25779047 100644 --- a/src/tools/rust-analyzer/docs/user/manual.adoc +++ b/src/tools/rust-analyzer/docs/user/manual.adoc @@ -369,7 +369,7 @@ EOF See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started. -Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for Neovim. +Check out https://github.com/mrcjkb/rustaceanvim for a batteries included rust-analyzer setup for Neovim. ==== vim-lsp diff --git a/src/tools/rust-analyzer/editors/code/language-configuration.json b/src/tools/rust-analyzer/editors/code/language-configuration.json index a2af8b51a908b..1c348b63f1a23 100644 --- a/src/tools/rust-analyzer/editors/code/language-configuration.json +++ b/src/tools/rust-analyzer/editors/code/language-configuration.json @@ -6,9 +6,7 @@ "brackets": [ ["{", "}"], ["[", "]"], - ["(", ")"], - ["#[", "]"], - ["#![", "]"] + ["(", ")"] ], "colorizedBracketPairs": [ ["{", "}"], @@ -19,8 +17,6 @@ { "open": "{", "close": "}" }, { "open": "[", "close": "]" }, { "open": "(", "close": ")" }, - { "open": "#[", "close": "]" }, - { "open": "#![", "close": "]" }, { "open": "\"", "close": "\"", "notIn": ["string"] }, { "open": "/*", "close": " */" }, { "open": "`", "close": "`", "notIn": ["string"] } diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 5ed5146ea1b11..841e364ed8457 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -1505,6 +1505,11 @@ "default": false, "type": "boolean" }, + "rust-analyzer.references.excludeTests": { + "markdownDescription": "Exclude tests from find-all-references.", + "default": false, + "type": "boolean" + }, "rust-analyzer.rename.allowExternalItems": { "markdownDescription": "Allow renaming of items not belonging to the loaded workspaces.", "default": false, diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts index 599cfb4ff77a1..c386b9e5d8fb5 100644 --- a/src/tools/rust-analyzer/editors/code/src/main.ts +++ b/src/tools/rust-analyzer/editors/code/src/main.ts @@ -25,16 +25,7 @@ export async function deactivate() { export async function activate( context: vscode.ExtensionContext, ): Promise { - if (vscode.extensions.getExtension("rust-lang.rust")) { - vscode.window - .showWarningMessage( - `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` + - "plugins enabled. These are known to conflict and cause various functions of " + - "both plugins to not work correctly. You should disable one of them.", - "Got it", - ) - .then(() => {}, console.error); - } + checkConflictingExtensions(); const ctx = new Ctx(context, createCommands(), fetchWorkspace()); // VS Code doesn't show a notification when an extension fails to activate @@ -200,3 +191,26 @@ function createCommands(): Record { revealDependency: { enabled: commands.revealDependency }, }; } + +function checkConflictingExtensions() { + if (vscode.extensions.getExtension("rust-lang.rust")) { + vscode.window + .showWarningMessage( + `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` + + "plugins enabled. These are known to conflict and cause various functions of " + + "both plugins to not work correctly. You should disable one of them.", + "Got it", + ) + .then(() => {}, console.error); + } + + if (vscode.extensions.getExtension("panicbit.cargo")) { + vscode.window + .showWarningMessage( + `You have both the rust-analyzer (rust-lang.rust-analyzer) and Cargo (panicbit.cargo) plugins enabled` + + 'you can disable it or set {"cargo.automaticCheck": false} in settings.json to avoid invoking cargo twice', + "Got it", + ) + .then(() => {}, console.error); + } +} diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/error.rs b/src/tools/rust-analyzer/lib/lsp-server/src/error.rs index ebdd153b5b317..da55393339028 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/src/error.rs +++ b/src/tools/rust-analyzer/lib/lsp-server/src/error.rs @@ -14,7 +14,7 @@ impl ProtocolError { ProtocolError("disconnected channel".into(), true) } - /// Whether this error occured due to a disconnected channel. + /// Whether this error occurred due to a disconnected channel. pub fn channel_is_disconnected(&self) -> bool { self.1 }