diff --git a/.github/rust.json b/.github/rust.json new file mode 100644 index 0000000000000..ddaa1b0824b94 --- /dev/null +++ b/.github/rust.json @@ -0,0 +1,33 @@ +{ + "problemMatcher": [ + { + "owner": "rustfmt", + "severity": "warning", + "pattern": [ + { + "regexp": "^(Diff in (.+)) at line (\\d+):$", + "message": 1, + "file": 2, + "line": 3 + } + ] + }, + { + "owner": "clippy", + "pattern": [ + { + "regexp": "^(?:\\x1b\\[[\\d;]+m)*(warning|warn|error)(?:\\x1b\\[[\\d;]+m)*(\\[(.*)\\])?(?:\\x1b\\[[\\d;]+m)*:(?:\\x1b\\[[\\d;]+m)* ([^\\x1b]*)(?:\\x1b\\[[\\d;]+m)*$", + "severity": 1, + "message": 4, + "code": 3 + }, + { + "regexp": "^(?:\\x1b\\[[\\d;]+m)*\\s*(?:\\x1b\\[[\\d;]+m)*\\s*--> (?:\\x1b\\[[\\d;]+m)*(.*):(\\d*):(\\d*)(?:\\x1b\\[[\\d;]+m)*$", + "file": 1, + "line": 2, + "column": 3 + } + ] + } + ] +} diff --git a/.github/workflows/autopublish.yaml b/.github/workflows/autopublish.yaml index 9a5015005b3dc..4b97637088c31 100644 --- a/.github/workflows/autopublish.yaml +++ b/.github/workflows/autopublish.yaml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 964be478fa3aa..62fbd57abc165 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -27,7 +27,7 @@ jobs: typescript: ${{ steps.filter.outputs.typescript }} proc_macros: ${{ steps.filter.outputs.proc_macros }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242 id: filter with: @@ -56,7 +56,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} @@ -65,6 +65,10 @@ jobs: rustup update --no-self-update ${{ env.RUST_CHANNEL }} rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src rustup default ${{ env.RUST_CHANNEL }} + # https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json + - name: Install Rust Problem Matcher + if: matrix.os == 'ubuntu-latest' + run: echo "::add-matcher::.github/rust.json" - name: Cache Dependencies uses: Swatinem/rust-cache@988c164c3d0e93c4dbab36aaf5bbeb77425b2894 @@ -107,6 +111,10 @@ jobs: if: matrix.os == 'windows-latest' run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr + - name: rustfmt + if: matrix.os == 'ubuntu-latest' + run: cargo fmt -- --check + # Weird targets to catch non-portable code rust-cross: if: github.repository == 'rust-lang/rust-analyzer' @@ -121,7 +129,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain run: | @@ -153,13 +161,13 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 if: needs.changes.outputs.typescript == 'true' - name: Install Nodejs - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 if: needs.changes.outputs.typescript == 'true' - name: Install xvfb diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index 5af8aa1f77aac..f88c7f95d5c96 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -27,7 +27,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 1 diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index e6a9917a0bf3d..be9f504e59966 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -21,7 +21,7 @@ jobs: rustup component add rustfmt rust-src rustup default stable - name: Cache cargo - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -36,10 +36,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Restore cargo cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -52,7 +52,7 @@ jobs: run: cargo xtask metrics build - name: Cache target - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: target/ key: ${{ runner.os }}-target-${{ github.sha }} @@ -73,10 +73,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Restore cargo cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -86,7 +86,7 @@ jobs: key: ${{ runner.os }}-cargo-${{ github.sha }} - name: Restore target cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: target/ key: ${{ runner.os }}-target-${{ github.sha }} @@ -106,7 +106,7 @@ jobs: needs: [build_metrics, other_metrics] steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download build metrics uses: actions/download-artifact@v3 diff --git a/.github/workflows/publish-libs.yaml b/.github/workflows/publish-libs.yaml index 6d026c9ad910b..862373ec1cce0 100644 --- a/.github/workflows/publish-libs.yaml +++ b/.github/workflows/publish-libs.yaml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 9077a9ac21eb6..adb1c85051610 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -59,7 +59,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} @@ -78,9 +78,9 @@ jobs: rustup component add rust-src - name: Install Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - name: Update apt repositories if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' @@ -154,7 +154,7 @@ jobs: run: apk add --no-cache git clang lld musl-dev nodejs npm - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} @@ -188,9 +188,9 @@ jobs: needs: ["dist", "dist-x86_64-unknown-linux-musl"] steps: - name: Install Nodejs - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV if: github.ref == 'refs/heads/release' @@ -199,7 +199,7 @@ jobs: - run: 'echo "TAG: $TAG"' - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} diff --git a/.github/workflows/rustdoc.yaml b/.github/workflows/rustdoc.yaml index 05f3e254e5f5a..12a1a791fda2e 100644 --- a/.github/workflows/rustdoc.yaml +++ b/.github/workflows/rustdoc.yaml @@ -17,7 +17,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain run: rustup update --no-self-update stable diff --git a/Cargo.lock b/Cargo.lock index dc2bf3a76943e..7b29d7bb798df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1329,6 +1329,7 @@ dependencies = [ "paths", "proc-macro-api", "proc-macro-test", + "ra-ap-rustc_lexer", "span", "stdx", "tt", @@ -1470,12 +1471,12 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8a41dee58608b1fc93779ea365edaa70ac9927e3335ae914b675be0fa063cd7" +checksum = "df5a0ba0d08af366cf235dbe8eb7226cced7a4fe502c98aa434ccf416defd746" dependencies = [ "arrayvec", - "ra-ap-rustc_index_macros 0.36.0", + "ra-ap-rustc_index_macros 0.37.0", "smallvec", ] @@ -1493,9 +1494,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbfe98def54c4337a2f7d8233850bd5d5349972b185fe8a0db2b979164b30ed8" +checksum = "1971ebf9a701e0e68387c264a32517dcb4861ad3a4862f2e2803c1121ade20d5" dependencies = [ "proc-macro2", "quote", @@ -1525,11 +1526,11 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5529bffec7530b4a3425640bfdfd9b95d87c4c620f740266c0de6572561aab4" +checksum = "2c3c0e7ca9c5bdc66e3b590688e237a22ac47a48e4eac7f46b05b2abbfaf0abd" dependencies = [ - "ra-ap-rustc_index 0.36.0", + "ra-ap-rustc_index 0.37.0", "rustc-hash", "rustc_apfloat", "smallvec", diff --git a/Cargo.toml b/Cargo.toml index 2b81f7b11b238..49c7d369190ed 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -84,7 +84,7 @@ ra-ap-rustc_lexer = { version = "0.35.0", default-features = false } ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false } ra-ap-rustc_index = { version = "0.35.0", default-features = false } ra-ap-rustc_abi = { version = "0.35.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.36.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.37.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. sourcegen = { path = "./crates/sourcegen" } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 9560826e373e0..a817cd0c3ac2f 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -11,7 +11,6 @@ use std::{fmt, mem, ops, str::FromStr}; use cfg::CfgOptions; use la_arena::{Arena, Idx, RawIdx}; use rustc_hash::{FxHashMap, FxHashSet}; -use semver::Version; use syntax::SmolStr; use triomphe::Arc; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; @@ -243,6 +242,7 @@ impl CrateDisplayName { CrateDisplayName { crate_name, canonical_name } } } + pub type TargetLayoutLoadResult = Result, Arc>; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] @@ -291,71 +291,6 @@ pub struct CrateData { pub dependencies: Vec, pub origin: CrateOrigin, pub is_proc_macro: bool, - // FIXME: These things should not be per crate! These are more per workspace crate graph level - // things. This info does need to be somewhat present though as to prevent deduplication from - // happening across different workspaces with different layouts. - pub target_layout: TargetLayoutLoadResult, - pub toolchain: Option, -} - -impl CrateData { - /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value. - pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { - // This method has some obscure bits. These are mostly there to be compliant with - // some patches. References to the patches are given. - if self.root_file_id != other.root_file_id { - return false; - } - - if self.display_name != other.display_name { - return false; - } - - if self.is_proc_macro != other.is_proc_macro { - return false; - } - - if self.edition != other.edition { - return false; - } - - if self.version != other.version { - return false; - } - - let mut opts = self.cfg_options.difference(&other.cfg_options); - if let Some(it) = opts.next() { - // Don't care if rust_analyzer CfgAtom is the only cfg in the difference set of self's and other's cfgs. - // https://github.com/rust-lang/rust-analyzer/blob/0840038f02daec6ba3238f05d8caa037d28701a0/crates/project-model/src/workspace.rs#L894 - if it.to_string() != "rust_analyzer" { - return false; - } - - if opts.next().is_some() { - return false; - } - } - - if self.env != other.env { - return false; - } - - let slf_deps = self.dependencies.iter(); - let other_deps = other.dependencies.iter(); - - if ignore_dev_deps { - return slf_deps - .clone() - .filter(|it| it.kind != DependencyKind::Dev) - .eq(other_deps.clone().filter(|it| it.kind != DependencyKind::Dev)); - } - - slf_deps.eq(other_deps) - } - - pub fn channel(&self) -> Option { - self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) - } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -398,32 +333,22 @@ pub enum DependencyKind { pub struct Dependency { pub crate_id: CrateId, pub name: CrateName, - kind: DependencyKind, prelude: bool, } impl Dependency { - pub fn new(name: CrateName, crate_id: CrateId, kind: DependencyKind) -> Self { - Self { name, crate_id, prelude: true, kind } + pub fn new(name: CrateName, crate_id: CrateId) -> Self { + Self { name, crate_id, prelude: true } } - pub fn with_prelude( - name: CrateName, - crate_id: CrateId, - prelude: bool, - kind: DependencyKind, - ) -> Self { - Self { name, crate_id, prelude, kind } + pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self { + Self { name, crate_id, prelude } } /// Whether this dependency is to be added to the depending crate's extern prelude. pub fn is_prelude(&self) -> bool { self.prelude } - - pub fn kind(&self) -> DependencyKind { - self.kind - } } impl CrateGraph { @@ -438,8 +363,6 @@ impl CrateGraph { env: Env, is_proc_macro: bool, origin: CrateOrigin, - target_layout: Result, Arc>, - toolchain: Option, ) -> CrateId { let data = CrateData { root_file_id, @@ -451,9 +374,7 @@ impl CrateGraph { env, dependencies: Vec::new(), origin, - target_layout, is_proc_macro, - toolchain, }; self.arena.alloc(data) } @@ -523,6 +444,10 @@ impl CrateGraph { self.arena.is_empty() } + pub fn len(&self) -> usize { + self.arena.len() + } + pub fn iter(&self) -> impl Iterator + '_ { self.arena.iter().map(|(idx, _)| idx) } @@ -623,13 +548,17 @@ impl CrateGraph { /// /// This will deduplicate the crates of the graph where possible. /// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id. - /// If the crate dependencies were sorted, the resulting graph from this `extend` call will also have the crate dependencies sorted. + /// If the crate dependencies were sorted, the resulting graph from this `extend` call will also + /// have the crate dependencies sorted. + /// + /// Returns a mapping from `other`'s crate ids to the new crate ids in `self`. pub fn extend( &mut self, mut other: CrateGraph, proc_macros: &mut ProcMacroPaths, - on_finished: impl FnOnce(&FxHashMap), - ) { + merge: impl Fn((CrateId, &mut CrateData), (CrateId, &CrateData)) -> bool, + ) -> FxHashMap { + let m = self.len(); let topo = other.crates_in_topological_order(); let mut id_map: FxHashMap = FxHashMap::default(); for topo in topo { @@ -637,51 +566,21 @@ impl CrateGraph { crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); crate_data.dependencies.sort_by_key(|dep| dep.crate_id); - let res = self.arena.iter().find_map(|(id, data)| { - match (&data.origin, &crate_data.origin) { - (a, b) if a == b => { - if data.eq_ignoring_origin_and_deps(crate_data, false) { - return Some((id, false)); - } - } - (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. }) - | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) => { - // If the origins differ, check if the two crates are equal without - // considering the dev dependencies, if they are, they most likely are in - // different loaded workspaces which may cause issues. We keep the local - // version and discard the library one as the local version may have - // dev-dependencies that we want to keep resolving. See #15656 for more - // information. - if data.eq_ignoring_origin_and_deps(crate_data, true) { - return Some((id, !a.is_local())); - } - } - (_, _) => return None, - } - - None - }); - - if let Some((res, should_update_lib_to_local)) = res { - id_map.insert(topo, res); - if should_update_lib_to_local { - assert!(self.arena[res].origin.is_lib()); - assert!(crate_data.origin.is_local()); - self.arena[res].origin = crate_data.origin.clone(); - - // Move local's dev dependencies into the newly-local-formerly-lib crate. - self.arena[res].dependencies = crate_data.dependencies.clone(); - } - } else { - let id = self.arena.alloc(crate_data.clone()); - id_map.insert(topo, id); - } + let res = self + .arena + .iter_mut() + .take(m) + .find_map(|(id, data)| merge((id, data), (topo, &crate_data)).then_some(id)); + + let new_id = + if let Some(res) = res { res } else { self.arena.alloc(crate_data.clone()) }; + id_map.insert(topo, new_id); } *proc_macros = mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect(); - on_finished(&id_map); + id_map } fn find_path( @@ -719,11 +618,9 @@ impl CrateGraph { match (cfg_if, std) { (Some(cfg_if), Some(std)) => { self.arena[cfg_if].dependencies.clear(); - self.arena[std].dependencies.push(Dependency::new( - CrateName::new("cfg_if").unwrap(), - cfg_if, - DependencyKind::Normal, - )); + self.arena[std] + .dependencies + .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if)); true } _ => false, @@ -871,7 +768,7 @@ impl fmt::Display for CyclicDependenciesError { #[cfg(test)] mod tests { - use crate::{CrateOrigin, DependencyKind}; + use crate::CrateOrigin; use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; @@ -888,8 +785,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -901,8 +796,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -914,26 +807,15 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) .is_ok()); assert!(graph - .add_dep( - crate3, - Dependency::new(CrateName::new("crate1").unwrap(), crate1, DependencyKind::Normal) - ) + .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1,)) .is_err()); } @@ -950,8 +832,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -963,20 +843,12 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_err()); } @@ -993,8 +865,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -1006,8 +876,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -1019,20 +887,12 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) .is_ok()); } @@ -1049,8 +909,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -1062,26 +920,16 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph .add_dep( crate1, - Dependency::new( - CrateName::normalize_dashes("crate-name-with-dashes"), - crate2, - DependencyKind::Normal - ) + Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2,) ) .is_ok()); assert_eq!( graph[crate1].dependencies, - vec![Dependency::new( - CrateName::new("crate_name_with_dashes").unwrap(), - crate2, - DependencyKind::Normal - )] + vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2,)] ); } } diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index d7fc9d4c95cd6..cb2e6cdaa28dc 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -62,6 +62,20 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { /// The crate graph. #[salsa::input] fn crate_graph(&self) -> Arc; + + // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query + #[salsa::input] + fn data_layout(&self, krate: CrateId) -> TargetLayoutLoadResult; + + #[salsa::input] + fn toolchain(&self, krate: CrateId) -> Option; + + #[salsa::transparent] + fn toolchain_channel(&self, krate: CrateId) -> Option; +} + +fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option { + db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) } fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse { diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index c59aff2a8bbf8..ee39a2790bc37 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -14,7 +14,7 @@ use std::{ use command_group::{CommandGroup, GroupChild}; use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; -use paths::AbsPathBuf; +use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; use serde::Deserialize; use stdx::process::streaming_output; @@ -23,6 +23,7 @@ pub use cargo_metadata::diagnostic::{ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion, }; +use toolchain::Tool; #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub enum InvocationStrategy { @@ -89,9 +90,10 @@ impl FlycheckHandle { id: usize, sender: Box, config: FlycheckConfig, + sysroot_root: Option, workspace_root: AbsPathBuf, ) -> FlycheckHandle { - let actor = FlycheckActor::new(id, sender, config, workspace_root); + let actor = FlycheckActor::new(id, sender, config, sysroot_root, workspace_root); let (sender, receiver) = unbounded::(); let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) .name("Flycheck".to_owned()) @@ -101,13 +103,15 @@ impl FlycheckHandle { } /// Schedule a re-start of the cargo check worker to do a workspace wide check. - pub fn restart_workspace(&self) { - self.sender.send(StateChange::Restart(None)).unwrap(); + pub fn restart_workspace(&self, saved_file: Option) { + self.sender.send(StateChange::Restart { package: None, saved_file }).unwrap(); } /// Schedule a re-start of the cargo check worker to do a package wide check. pub fn restart_for_package(&self, package: String) { - self.sender.send(StateChange::Restart(Some(package))).unwrap(); + self.sender + .send(StateChange::Restart { package: Some(package), saved_file: None }) + .unwrap(); } /// Stop this cargo check worker. @@ -158,7 +162,7 @@ pub enum Progress { } enum StateChange { - Restart(Option), + Restart { package: Option, saved_file: Option }, Cancel, } @@ -171,6 +175,7 @@ struct FlycheckActor { /// Either the workspace root of the workspace we are flychecking, /// or the project root of the project. root: AbsPathBuf, + sysroot_root: Option, /// CargoHandle exists to wrap around the communication needed to be able to /// run `cargo check` without blocking. Currently the Rust standard library /// doesn't provide a way to read sub-process output without blocking, so we @@ -184,15 +189,25 @@ enum Event { CheckEvent(Option), } +const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; + impl FlycheckActor { fn new( id: usize, sender: Box, config: FlycheckConfig, + sysroot_root: Option, workspace_root: AbsPathBuf, ) -> FlycheckActor { tracing::info!(%id, ?workspace_root, "Spawning flycheck"); - FlycheckActor { id, sender, config, root: workspace_root, command_handle: None } + FlycheckActor { + id, + sender, + config, + sysroot_root, + root: workspace_root, + command_handle: None, + } } fn report_progress(&self, progress: Progress) { @@ -218,7 +233,7 @@ impl FlycheckActor { tracing::debug!(flycheck_id = self.id, "flycheck cancelled"); self.cancel_check_process(); } - Event::RequestStateChange(StateChange::Restart(package)) => { + Event::RequestStateChange(StateChange::Restart { package, saved_file }) => { // Cancel the previously spawned process self.cancel_check_process(); while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) { @@ -228,7 +243,11 @@ impl FlycheckActor { } } - let command = self.check_command(package.as_deref()); + let command = + match self.check_command(package.as_deref(), saved_file.as_deref()) { + Some(c) => c, + None => continue, + }; let formatted_command = format!("{:?}", command); tracing::debug!(?command, "will restart flycheck"); @@ -302,7 +321,14 @@ impl FlycheckActor { } } - fn check_command(&self, package: Option<&str>) -> Command { + /// Construct a `Command` object for checking the user's code. If the user + /// has specified a custom command with placeholders that we cannot fill, + /// return None. + fn check_command( + &self, + package: Option<&str>, + saved_file: Option<&AbsPath>, + ) -> Option { let (mut cmd, args) = match &self.config { FlycheckConfig::CargoCommand { command, @@ -316,7 +342,10 @@ impl FlycheckActor { ansi_color_output, target_dir, } => { - let mut cmd = Command::new(toolchain::cargo()); + let mut cmd = Command::new(Tool::Cargo.path()); + if let Some(sysroot_root) = &self.sysroot_root { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(sysroot_root)); + } cmd.arg(command); cmd.current_dir(&self.root); @@ -355,7 +384,7 @@ impl FlycheckActor { cmd.arg("--target-dir").arg(target_dir); } cmd.envs(extra_env); - (cmd, extra_args) + (cmd, extra_args.clone()) } FlycheckConfig::CustomCommand { command, @@ -384,12 +413,34 @@ impl FlycheckActor { } } - (cmd, args) + if args.contains(&SAVED_FILE_PLACEHOLDER.to_owned()) { + // If the custom command has a $saved_file placeholder, and + // we're saving a file, replace the placeholder in the arguments. + if let Some(saved_file) = saved_file { + let args = args + .iter() + .map(|arg| { + if arg == SAVED_FILE_PLACEHOLDER { + saved_file.to_string() + } else { + arg.clone() + } + }) + .collect(); + (cmd, args) + } else { + // The custom command has a $saved_file placeholder, + // but we had an IDE event that wasn't a file save. Do nothing. + return None; + } + } else { + (cmd, args.clone()) + } } }; cmd.args(args); - cmd + Some(cmd) } fn send(&self, check_task: Message) { diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index c91a5497262b7..519706c65f29b 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -377,27 +377,39 @@ impl AttrsWithOwner { AttrDefId::GenericParamId(it) => match it { GenericParamId::ConstParamId(it) => { let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - db.span_map(src.file_id).as_ref(), - ) + // FIXME: We should be never getting `None` here. + match src.value.get(it.local_id()) { + Some(val) => RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(val), + db.span_map(src.file_id).as_ref(), + ), + None => RawAttrs::EMPTY, + } } GenericParamId::TypeParamId(it) => { let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - db.span_map(src.file_id).as_ref(), - ) + // FIXME: We should be never getting `None` here. + match src.value.get(it.local_id()) { + Some(val) => RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(val), + db.span_map(src.file_id).as_ref(), + ), + None => RawAttrs::EMPTY, + } } GenericParamId::LifetimeParamId(it) => { let src = it.parent.child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id]), - db.span_map(src.file_id).as_ref(), - ) + // FIXME: We should be never getting `None` here. + match src.value.get(it.local_id) { + Some(val) => RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(val), + db.span_map(src.file_id).as_ref(), + ), + None => RawAttrs::EMPTY, + } } }, AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 29ac666277d0d..5dc5fedd23070 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -416,6 +416,11 @@ impl ExprCollector<'_> { let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr(Expr::Return { expr }, syntax_ptr) } + ast::Expr::BecomeExpr(e) => { + let expr = + e.expr().map(|e| self.collect_expr(e)).unwrap_or_else(|| self.missing_expr()); + self.alloc_expr(Expr::Become { expr }, syntax_ptr) + } ast::Expr::YieldExpr(e) => { self.is_lowering_coroutine = true; let expr = e.expr().map(|e| self.collect_expr(e)); @@ -1000,10 +1005,6 @@ impl ExprCollector<'_> { krate: *krate, }); } - Some(ExpandError::RecursionOverflowPoisoned) => { - // Recursion limit has been reached in the macro expansion tree, but not in - // this very macro call. Don't add diagnostics to avoid duplication. - } Some(err) => { self.source_map.diagnostics.push(BodyDiagnostic::MacroError { node: InFile::new(outer_file, syntax_ptr), @@ -1112,7 +1113,7 @@ impl ExprCollector<'_> { statements.push(Statement::Expr { expr, has_semi }); } } - ast::Stmt::Item(_item) => (), + ast::Stmt::Item(_item) => statements.push(Statement::Item), } } diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 4afb408651703..7007dea638ef9 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -261,6 +261,11 @@ impl Printer<'_> { self.print_expr(*expr); } } + Expr::Become { expr } => { + w!(self, "become"); + self.whitespace(); + self.print_expr(*expr); + } Expr::Yield { expr } => { w!(self, "yield"); if let Some(expr) = expr { @@ -623,6 +628,7 @@ impl Printer<'_> { } wln!(self); } + Statement::Item => (), } } diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index ab623250d4072..69b82ae871a4e 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -197,6 +197,7 @@ fn compute_block_scopes( Statement::Expr { expr, .. } => { compute_expr_scopes(*expr, body, scopes, scope); } + Statement::Item => (), } } if let Some(expr) = tail { diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 7ce05b64d022e..f506864902c47 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -634,7 +634,6 @@ impl<'a> AssocItemCollector<'a> { attr, ) { Ok(ResolvedAttr::Macro(call_id)) => { - self.attr_calls.push((ast_id, call_id)); // If proc attribute macro expansion is disabled, skip expanding it here if !self.db.expand_proc_attr_macros() { continue 'attrs; @@ -647,10 +646,21 @@ impl<'a> AssocItemCollector<'a> { // disabled. This is analogous to the handling in // `DefCollector::collect_macros`. if exp.is_dummy() { + self.diagnostics.push(DefDiagnostic::unresolved_proc_macro( + self.module_id.local_id, + loc.kind, + loc.def.krate, + )); + + continue 'attrs; + } + if exp.is_disabled() { continue 'attrs; } } + self.attr_calls.push((ast_id, call_id)); + let res = self.expander.enter_expand_id::(self.db, call_id); self.collect_macro_items(res, &|| loc.kind.clone()); diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index b83feeedc34c1..b99df1ed59348 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -140,13 +140,11 @@ impl Expander { // The overflow error should have been reported when it occurred (see the next branch), // so don't return overflow error here to avoid diagnostics duplication. cov_mark::hit!(overflow_but_not_me); - return ExpandResult::only_err(ExpandError::RecursionOverflowPoisoned); + return ExpandResult::ok(None); } else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() { self.recursion_depth = u32::MAX; cov_mark::hit!(your_stack_belongs_to_me); - return ExpandResult::only_err(ExpandError::other( - "reached recursion limit during macro expansion", - )); + return ExpandResult::only_err(ExpandError::RecursionOverflow); } let ExpandResult { value, err } = op(self); diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 2e137f67b4c2a..26247ba5b507d 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -447,18 +447,25 @@ fn select_best_path( } const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc]; - let choose = |new_path: (ModPath, _), old_path: (ModPath, _)| { - let new_has_prelude = new_path.0.segments().iter().any(|seg| seg == &known::prelude); - let old_has_prelude = old_path.0.segments().iter().any(|seg| seg == &known::prelude); + let choose = |new: (ModPath, _), old: (ModPath, _)| { + let (new_path, _) = &new; + let (old_path, _) = &old; + let new_has_prelude = new_path.segments().iter().any(|seg| seg == &known::prelude); + let old_has_prelude = old_path.segments().iter().any(|seg| seg == &known::prelude); match (new_has_prelude, old_has_prelude, prefer_prelude) { - (true, false, true) | (false, true, false) => new_path, - (true, false, false) | (false, true, true) => old_path, - // no prelude difference in the paths, so pick the smaller one + (true, false, true) | (false, true, false) => new, + (true, false, false) | (false, true, true) => old, + // no prelude difference in the paths, so pick the shorter one (true, true, _) | (false, false, _) => { - if new_path.0.len() < old_path.0.len() { - new_path + let new_path_is_shorter = new_path + .len() + .cmp(&old_path.len()) + .then_with(|| new_path.textual_len().cmp(&old_path.textual_len())) + .is_lt(); + if new_path_is_shorter { + new } else { - old_path + old } } } @@ -469,8 +476,8 @@ fn select_best_path( let rank = match prefer_no_std { false => |name: &Name| match name { name if name == &known::core => 0, - name if name == &known::alloc => 0, - name if name == &known::std => 1, + name if name == &known::alloc => 1, + name if name == &known::std => 2, _ => unreachable!(), }, true => |name: &Name| match name { @@ -1539,4 +1546,38 @@ pub mod foo { "krate::prelude::Foo", ); } + + #[test] + fn respect_segment_length() { + check_found_path( + r#" +//- /main.rs crate:main deps:petgraph +$0 +//- /petgraph.rs crate:petgraph +pub mod graph { + pub use crate::graph_impl::{ + NodeIndex + }; +} + +mod graph_impl { + pub struct NodeIndex(Ix); +} + +pub mod stable_graph { + #[doc(no_inline)] + pub use crate::graph::{NodeIndex}; +} + +pub mod prelude { + #[doc(no_inline)] + pub use crate::graph::{NodeIndex}; +} +"#, + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + ); + } } diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index ac44d379415c0..34b2910b4f5e5 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs @@ -182,6 +182,7 @@ pub enum Expr { tail: Option, }, Const(ConstBlockId), + // FIXME: Fold this into Block with an unsafe flag? Unsafe { id: Option, statements: Box<[Statement]>, @@ -216,6 +217,9 @@ pub enum Expr { Return { expr: Option, }, + Become { + expr: ExprId, + }, Yield { expr: Option, }, @@ -349,6 +353,9 @@ pub enum Statement { expr: ExprId, has_semi: bool, }, + // At the moment, we only use this to figure out if a return expression + // is really the last statement of a block. See #16566 + Item, } impl Expr { @@ -382,6 +389,7 @@ impl Expr { } } Statement::Expr { expr: expression, .. } => f(*expression), + Statement::Item => (), } } if let &Some(expr) = tail { @@ -410,6 +418,7 @@ impl Expr { f(expr); } } + Expr::Become { expr } => f(*expr), Expr::RecordLit { fields, spread, .. } => { for field in fields.iter() { f(field.expr); diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs index 0909d8c835443..63f211022c975 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs @@ -33,7 +33,7 @@ m!(&k"); "#, expect![[r#" macro_rules! m { ($i:literal) => {}; } -/* error: invalid token tree */"#]], +/* error: mismatched delimiters */"#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs index e875950e4e5f9..2d289b7683389 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs @@ -68,26 +68,26 @@ m2!(); "#, expect![[r#" macro_rules! i1 { invalid } -/* error: invalid macro definition: expected subtree */ +/* error: macro definition has parse errors */ macro_rules! e1 { $i:ident => () } -/* error: invalid macro definition: expected subtree */ +/* error: macro definition has parse errors */ macro_rules! e2 { ($i:ident) () } -/* error: invalid macro definition: expected `=` */ +/* error: macro definition has parse errors */ macro_rules! e3 { ($(i:ident)_) => () } -/* error: invalid macro definition: invalid repeat */ +/* error: macro definition has parse errors */ macro_rules! f1 { ($i) => ($i) } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! f2 { ($i:) => ($i) } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! f3 { ($i:_) => () } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! m1 { ($$i) => () } -/* error: invalid macro definition: `$$` is not allowed on the pattern side */ +/* error: macro definition has parse errors */ macro_rules! m2 { () => ( ${invalid()} ) } -/* error: invalid macro definition: invalid metavariable expression */ +/* error: macro definition has parse errors */ "#]], ) } @@ -137,18 +137,18 @@ macro_rules! m9 { ($($($($i:ident)?)*)+) => {}; } macro_rules! mA { ($($($($i:ident)+)?)*) => {}; } macro_rules! mB { ($($($($i:ident)+)*)?) => {}; } -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ "#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs index 6560d0ec4664b..bf70119838766 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs @@ -275,9 +275,9 @@ macro_rules! depth_too_large { } fn test() { - /* error: invalid macro definition: invalid metavariable expression */; - /* error: invalid macro definition: invalid metavariable expression */; - /* error: invalid macro definition: invalid metavariable expression */; + /* error: macro definition has parse errors */; + /* error: macro definition has parse errors */; + /* error: macro definition has parse errors */; } "#]], ); diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 6717ee1aa5fdf..4aad53c3bd71c 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -1090,3 +1090,57 @@ fn main() { "#]], ); } + +#[test] +fn regression_16529() { + check( + r#" +mod any { + #[macro_export] + macro_rules! nameable { + { + struct $name:ident[$a:lifetime] + } => { + $crate::any::nameable! { + struct $name[$a] + a + } + }; + { + struct $name:ident[$a:lifetime] + a + } => {}; + } + pub use nameable; + + nameable! { + Name['a] + } +} +"#, + expect![[r#" +mod any { + #[macro_export] + macro_rules! nameable { + { + struct $name:ident[$a:lifetime] + } => { + $crate::any::nameable! { + struct $name[$a] + a + } + }; + { + struct $name:ident[$a:lifetime] + a + } => {}; + } + pub use nameable; + + /* error: unexpected token in input */$crate::any::nameable! { + struct $name[$a]a + } +} +"#]], + ); +} diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs index ae56934f632f1..362c189f6a734 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -97,8 +97,8 @@ m2!(x macro_rules! m1 { ($x:ident) => { ($x } } macro_rules! m2 { ($x:ident) => {} } -/* error: invalid macro definition: expected subtree */ -/* error: invalid token tree */ +/* error: macro definition has parse errors */ +/* error: mismatched delimiters */ "#]], ) } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index fc5a6e80a427d..23b10cfd8e6c7 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -58,6 +58,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream name: "identity_when_valid".into(), kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityWhenValidProcMacroExpander), + disabled: false, }, )]; let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 21cc28f1b3d0d..88838f58fe787 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -11,7 +11,7 @@ use either::Either; use hir_expand::{ ast_id_map::FileAstId, attrs::{Attr, AttrId}, - builtin_attr_macro::find_builtin_attr, + builtin_attr_macro::{find_builtin_attr, BuiltinAttrExpander}, builtin_derive_macro::find_builtin_derive, builtin_fn_macro::find_builtin_macro, name::{name, AsName, Name}, @@ -98,9 +98,13 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI }; ( name.as_name(), - CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId( - idx as u32, - )), + if it.disabled { + CustomProcMacroExpander::disabled() + } else { + CustomProcMacroExpander::new( + hir_expand::proc_macro::ProcMacroId::new(idx as u32), + ) + }, ) }) .collect()) @@ -604,9 +608,6 @@ impl DefCollector<'_> { id: ItemTreeId, fn_id: FunctionId, ) { - if self.def_map.block.is_some() { - return; - } let kind = def.kind.to_basedb_kind(); let (expander, kind) = match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) { @@ -1120,9 +1121,16 @@ impl DefCollector<'_> { let mut push_resolved = |directive: &MacroDirective, call_id| { resolved.push((directive.module_id, directive.depth, directive.container, call_id)); }; + + #[derive(PartialEq, Eq)] + enum Resolved { + Yes, + No, + } + let mut res = ReachedFixedPoint::Yes; // Retain unresolved macros after this round of resolution. - macros.retain(|directive| { + let mut retain = |directive: &MacroDirective| { let subns = match &directive.kind { MacroDirectiveKind::FnLike { .. } => MacroSubNs::Bang, MacroDirectiveKind::Attr { .. } | MacroDirectiveKind::Derive { .. } => { @@ -1156,10 +1164,11 @@ impl DefCollector<'_> { self.def_map.modules[directive.module_id] .scope .add_macro_invoc(ast_id.ast_id, call_id); + push_resolved(directive, call_id); res = ReachedFixedPoint::No; - return false; + return Resolved::Yes; } } MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => { @@ -1198,7 +1207,7 @@ impl DefCollector<'_> { push_resolved(directive, call_id); res = ReachedFixedPoint::No; - return false; + return Resolved::Yes; } } MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, attr, tree } => { @@ -1221,7 +1230,7 @@ impl DefCollector<'_> { } .collect(&[*mod_item], directive.container); res = ReachedFixedPoint::No; - false + Resolved::Yes }; if let Some(ident) = path.as_ident() { @@ -1237,13 +1246,18 @@ impl DefCollector<'_> { let def = match resolver_def_id(path.clone()) { Some(def) if def.is_attribute() => def, - _ => return true, + _ => return Resolved::No, }; - if matches!( - def, - MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. } - if expander.is_derive() - ) { + + if let MacroDefId { + kind: + MacroDefKind::BuiltInAttr( + BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst, + _, + ), + .. + } = def + { // Resolved to `#[derive]`, we don't actually expand this attribute like // normal (as that would just be an identity expansion with extra output) // Instead we treat derive attributes special and apply them separately. @@ -1316,16 +1330,6 @@ impl DefCollector<'_> { let call_id = attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); - // If proc attribute macro expansion is disabled, skip expanding it here - if !self.db.expand_proc_attr_macros() { - self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( - directive.module_id, - self.db.lookup_intern_macro_call(call_id).kind, - def.krate, - )); - return recollect_without(self); - } - // Skip #[test]/#[bench] expansion, which would merely result in more memory usage // due to duplicating functions into macro expansions if matches!( @@ -1337,17 +1341,29 @@ impl DefCollector<'_> { } if let MacroDefKind::ProcMacro(exp, ..) = def.kind { - if exp.is_dummy() { - // If there's no expander for the proc macro (e.g. - // because proc macros are disabled, or building the - // proc macro crate failed), report this and skip - // expansion like we would if it was disabled + // If proc attribute macro expansion is disabled, skip expanding it here + if !self.db.expand_proc_attr_macros() { self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( directive.module_id, self.db.lookup_intern_macro_call(call_id).kind, def.krate, )); + return recollect_without(self); + } + // If there's no expander for the proc macro (e.g. + // because proc macros are disabled, or building the + // proc macro crate failed), report this and skip + // expansion like we would if it was disabled + if exp.is_dummy() { + self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( + directive.module_id, + self.db.lookup_intern_macro_call(call_id).kind, + def.krate, + )); + return recollect_without(self); + } + if exp.is_disabled() { return recollect_without(self); } } @@ -1358,12 +1374,13 @@ impl DefCollector<'_> { push_resolved(directive, call_id); res = ReachedFixedPoint::No; - return false; + return Resolved::Yes; } } - true - }); + Resolved::No + }; + macros.retain(|it| retain(it) == Resolved::No); // Attribute resolution can add unresolved macro invocations, so concatenate the lists. macros.extend(mem::take(&mut self.unresolved_macros)); self.unresolved_macros = macros; @@ -1673,7 +1690,11 @@ impl ModCollector<'_, '_> { FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); - if self.def_collector.is_proc_macro && self.module_id == DefMap::ROOT { + + if self.def_collector.def_map.block.is_none() + && self.def_collector.is_proc_macro + && self.module_id == DefMap::ROOT + { if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { self.def_collector.export_proc_macro( proc_macro, @@ -2333,7 +2354,7 @@ impl ModCollector<'_, '_> { resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) }, ) { - // FIXME: if there were errors, this mightve been in the eager expansion from an + // FIXME: if there were errors, this might've been in the eager expansion from an // unresolved macro, so we need to push this into late macro resolution. see fixme above if res.err.is_none() { // Legacy macros need to be expanded immediately, so that any macros they produce diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 0a3f7bf7ec3d6..161b2c0599099 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -103,6 +103,9 @@ impl DefDiagnostic { } // FIXME: Whats the difference between this and unresolved_macro_call + // FIXME: This is used for a lot of things, unresolved proc macros, disabled proc macros, etc + // yet the diagnostic handler in ide-diagnostics has to figure out what happened because this + // struct loses all that information! pub(crate) fn unresolved_proc_macro( container: LocalModuleId, ast: MacroCallKind, diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 6d3de0e55d24d..90cd3af75783d 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -446,7 +446,7 @@ fn compile_error_expand( ) -> ExpandResult { let err = match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { - Some(unquoted) => ExpandError::other(unquoted), + Some(unquoted) => ExpandError::other(unquoted.into_boxed_str()), None => ExpandError::other("`compile_error!` argument must be a string"), }, _ => ExpandError::other("`compile_error!` argument must be a string"), diff --git a/crates/hir-expand/src/change.rs b/crates/hir-expand/src/change.rs index 67b7df198e93e..c6611438e64d8 100644 --- a/crates/hir-expand/src/change.rs +++ b/crates/hir-expand/src/change.rs @@ -1,6 +1,10 @@ //! Defines a unit of change that can applied to the database to get the next //! state. Changes are transactional. -use base_db::{salsa::Durability, CrateGraph, FileChange, SourceDatabaseExt, SourceRoot}; +use base_db::{ + salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot, + TargetLayoutLoadResult, Version, +}; +use la_arena::RawIdx; use span::FileId; use triomphe::Arc; @@ -10,6 +14,8 @@ use crate::{db::ExpandDatabase, proc_macro::ProcMacros}; pub struct Change { pub source_change: FileChange, pub proc_macros: Option, + pub toolchains: Option>>, + pub target_data_layouts: Option>, } impl Change { @@ -22,6 +28,24 @@ impl Change { if let Some(proc_macros) = self.proc_macros { db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); } + if let Some(target_data_layouts) = self.target_data_layouts { + for (id, val) in target_data_layouts.into_iter().enumerate() { + db.set_data_layout_with_durability( + CrateId::from_raw(RawIdx::from(id as u32)), + val, + Durability::HIGH, + ); + } + } + if let Some(toolchains) = self.toolchains { + for (id, val) in toolchains.into_iter().enumerate() { + db.set_toolchain_with_durability( + CrateId::from_raw(RawIdx::from(id as u32)), + val, + Durability::HIGH, + ); + } + } } pub fn change_file(&mut self, file_id: FileId, new_text: Option>) { @@ -36,6 +60,14 @@ impl Change { self.proc_macros = Some(proc_macros); } + pub fn set_toolchains(&mut self, toolchains: Vec>) { + self.toolchains = Some(toolchains); + } + + pub fn set_target_data_layouts(&mut self, target_data_layouts: Vec) { + self.target_data_layouts = Some(target_data_layouts); + } + pub fn set_roots(&mut self, roots: Vec) { self.source_change.set_roots(roots) } diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 6a288cf91979a..7b62eaa0289dc 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -108,7 +108,7 @@ pub trait ExpandDatabase: SourceDatabase { fn macro_arg( &self, id: MacroCallId, - ) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>>; + ) -> ValueResult<(Arc, SyntaxFixupUndoInfo), Arc>>; /// Fetches the expander for this macro. #[salsa::transparent] #[salsa::invoke(TokenExpander::macro_expander)] @@ -326,58 +326,77 @@ fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, // FIXME: consider the following by putting fixup info into eager call info args - // ) -> ValueResult>, Arc>> { -) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>> { - let mismatched_delimiters = |arg: &SyntaxNode| { - let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); - let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); - let well_formed_tt = - matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); - if !well_formed_tt { - // Don't expand malformed (unbalanced) macro invocations. This is - // less than ideal, but trying to expand unbalanced macro calls - // sometimes produces pathological, deeply nested code which breaks - // all kinds of things. - // - // Some day, we'll have explicit recursion counters for all - // recursive things, at which point this code might be removed. - cov_mark::hit!(issue9358_bad_macro_stack_overflow); - Some(Arc::new(Box::new([SyntaxError::new( - "unbalanced token tree".to_owned(), - arg.text_range(), - )]) as Box<[_]>)) - } else { - None - } - }; + // ) -> ValueResult, Arc>> { +) -> ValueResult<(Arc, SyntaxFixupUndoInfo), Arc>> { let loc = db.lookup_intern_macro_call(id); if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) .then(|| loc.eager.as_deref()) .flatten() { - ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE))) + ValueResult::ok((arg.clone(), SyntaxFixupUndoInfo::NONE)) } else { let (parse, map) = parse_with_map(db, loc.kind.file_id()); let root = parse.syntax_node(); let syntax = match loc.kind { MacroCallKind::FnLike { ast_id, .. } => { + let dummy_tt = |kind| { + ( + Arc::new(tt::Subtree { + delimiter: tt::Delimiter { + open: loc.call_site, + close: loc.call_site, + kind, + }, + token_trees: Box::default(), + }), + SyntaxFixupUndoInfo::default(), + ) + }; + let node = &ast_id.to_ptr(db).to_node(&root); let offset = node.syntax().text_range().start(); - match node.token_tree() { - Some(tt) => { - let tt = tt.syntax(); - if let Some(e) = mismatched_delimiters(tt) { - return ValueResult::only_err(e); - } - tt.clone() - } - None => { - return ValueResult::only_err(Arc::new(Box::new([ - SyntaxError::new_at_offset("missing token tree".to_owned(), offset), - ]))); - } + let Some(tt) = node.token_tree() else { + return ValueResult::new( + dummy_tt(tt::DelimiterKind::Invisible), + Arc::new(Box::new([SyntaxError::new_at_offset( + "missing token tree".to_owned(), + offset, + )])), + ); + }; + let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']); + let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]); + + let mismatched_delimiters = !matches!( + (first, last), + (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']) + ); + if mismatched_delimiters { + // Don't expand malformed (unbalanced) macro invocations. This is + // less than ideal, but trying to expand unbalanced macro calls + // sometimes produces pathological, deeply nested code which breaks + // all kinds of things. + // + // So instead, we'll return an empty subtree here + cov_mark::hit!(issue9358_bad_macro_stack_overflow); + + let kind = match first { + _ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible, + T!['('] => tt::DelimiterKind::Parenthesis, + T!['['] => tt::DelimiterKind::Bracket, + T!['{'] => tt::DelimiterKind::Brace, + _ => tt::DelimiterKind::Invisible, + }; + return ValueResult::new( + dummy_tt(kind), + Arc::new(Box::new([SyntaxError::new_at_offset( + "mismatched delimiters".to_owned(), + offset, + )])), + ); } + tt.syntax().clone() } MacroCallKind::Derive { ast_id, .. } => { ast_id.to_ptr(db).to_node(&root).syntax().clone() @@ -427,15 +446,15 @@ fn macro_arg( if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { match parse.errors() { - [] => ValueResult::ok(Some((Arc::new(tt), undo_info))), + [] => ValueResult::ok((Arc::new(tt), undo_info)), errors => ValueResult::new( - Some((Arc::new(tt), undo_info)), + (Arc::new(tt), undo_info), // Box::<[_]>::from(res.errors()), not stable yet Arc::new(errors.to_vec().into_boxed_slice()), ), } } else { - ValueResult::ok(Some((Arc::new(tt), undo_info))) + ValueResult::ok((Arc::new(tt), undo_info)) } } } @@ -519,21 +538,20 @@ fn macro_expand( expander.expand(db, macro_call_id, &node, map.as_ref()) } _ => { - let ValueResult { value, err } = db.macro_arg(macro_call_id); - let Some((macro_arg, undo_info)) = value else { - return ExpandResult { - value: CowArc::Owned(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - // FIXME: We should make sure to enforce an invariant that invalid macro - // calls do not reach this call path! - err: Some(ExpandError::other("invalid token tree")), - }; + let ValueResult { value: (macro_arg, undo_info), err } = db.macro_arg(macro_call_id); + let format_parse_err = |err: Arc>| { + let mut buf = String::new(); + for err in &**err { + use std::fmt::Write; + _ = write!(buf, "{}, ", err); + } + buf.pop(); + buf.pop(); + ExpandError::other(buf) }; let arg = &*macro_arg; - match loc.def.kind { + let res = match loc.def.kind { MacroDefKind::Declarative(id) => { db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id) } @@ -549,16 +567,7 @@ fn macro_expand( MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { return ExpandResult { value: CowArc::Arc(macro_arg.clone()), - err: err.map(|err| { - let mut buf = String::new(); - for err in &**err { - use std::fmt::Write; - _ = write!(buf, "{}, ", err); - } - buf.pop(); - buf.pop(); - ExpandError::other(buf) - }), + err: err.map(format_parse_err), }; } MacroDefKind::BuiltInEager(it, _) => { @@ -570,6 +579,11 @@ fn macro_expand( res } _ => unreachable!(), + }; + ExpandResult { + value: res.value, + // if the arg had parse errors, show them instead of the expansion errors + err: err.map(format_parse_err).or(res.err), } } }; @@ -597,17 +611,7 @@ fn macro_expand( fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { let loc = db.lookup_intern_macro_call(id); - let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { - return ExpandResult { - value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - // FIXME: We should make sure to enforce an invariant that invalid macro - // calls do not reach this call path! - err: Some(ExpandError::other("invalid token tree")), - }; - }; + let (macro_arg, undo_info) = db.macro_arg(id).value; let expander = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => expander, diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 37084ee8b93c9..6874336cd2d05 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -31,7 +31,7 @@ impl DeclarativeMacroExpander { call_id: MacroCallId, ) -> ExpandResult { let loc = db.lookup_intern_macro_call(call_id); - let toolchain = &db.crate_graph()[loc.def.krate].toolchain; + let toolchain = db.toolchain(loc.def.krate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { @@ -44,9 +44,9 @@ impl DeclarativeMacroExpander { ) }); match self.mac.err() { - Some(e) => ExpandResult::new( + Some(_) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), + ExpandError::MacroDefinition, ), None => self .mac @@ -67,7 +67,7 @@ impl DeclarativeMacroExpander { krate: CrateId, call_site: Span, ) -> ExpandResult { - let toolchain = &db.crate_graph()[krate].toolchain; + let toolchain = db.toolchain(krate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { @@ -80,9 +80,9 @@ impl DeclarativeMacroExpander { ) }); match self.mac.err() { - Some(e) => ExpandResult::new( + Some(_) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), + ExpandError::MacroDefinition, ), None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), } @@ -119,7 +119,7 @@ impl DeclarativeMacroExpander { _ => None, } }; - let toolchain = crate_data.toolchain.as_ref(); + let toolchain = db.toolchain(def_crate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index fd028182faf6f..020ca75d80cb2 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -44,7 +44,6 @@ use crate::{ builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::{ExpandDatabase, TokenExpander}, - fixup::SyntaxFixupUndoInfo, hygiene::SyntaxContextData, mod_path::ModPath, proc_macro::{CustomProcMacroExpander, ProcMacroKind}, @@ -129,8 +128,11 @@ pub type ExpandResult = ValueResult; #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum ExpandError { UnresolvedProcMacro(CrateId), + /// The macro expansion is disabled. + MacroDisabled, + MacroDefinition, Mbe(mbe::ExpandError), - RecursionOverflowPoisoned, + RecursionOverflow, Other(Box>), ProcMacroPanic(Box>), } @@ -152,14 +154,14 @@ impl fmt::Display for ExpandError { match self { ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"), ExpandError::Mbe(it) => it.fmt(f), - ExpandError::RecursionOverflowPoisoned => { - f.write_str("overflow expanding the original macro") - } + ExpandError::RecursionOverflow => f.write_str("overflow expanding the original macro"), ExpandError::ProcMacroPanic(it) => { f.write_str("proc-macro panicked: ")?; f.write_str(it) } ExpandError::Other(it) => f.write_str(it), + ExpandError::MacroDisabled => f.write_str("macro disabled"), + ExpandError::MacroDefinition => f.write_str("macro definition has parse errors"), } } } @@ -225,8 +227,8 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing - // leakage problems here + // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index` + // but we need to fix the `cfg_attr` handling first. attr_args: Option>, /// Syntactical index of the invoking `#[attribute]`. /// @@ -758,15 +760,7 @@ impl ExpansionInfo { let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; - let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - ( - Arc::new(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - SyntaxFixupUndoInfo::NONE, - ) - }); + let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value; let def = loc.def.ast_id().left().and_then(|id| { let def_tt = match id.to_node(db) { diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index b64c3549e421e..136b0935be277 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -94,6 +94,21 @@ impl ModPath { } } + pub fn textual_len(&self) -> usize { + let base = match self.kind { + PathKind::Plain => 0, + PathKind::Super(0) => "self".len(), + PathKind::Super(i) => "super".len() * i as usize, + PathKind::Crate => "crate".len(), + PathKind::Abs => 0, + PathKind::DollarCrate(_) => "$crate".len(), + }; + self.segments() + .iter() + .map(|segment| segment.as_str().map_or(0, str::len)) + .fold(base, core::ops::Add::add) + } + pub fn is_ident(&self) -> bool { self.as_ident().is_some() } diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 70b47fc54b11c..ca6fc0afe2d7d 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -12,7 +12,13 @@ use syntax::SmolStr; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ProcMacroId(pub u32); +pub struct ProcMacroId(u32); + +impl ProcMacroId { + pub fn new(u32: u32) -> Self { + ProcMacroId(u32) + } +} #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum ProcMacroKind { @@ -49,6 +55,7 @@ pub struct ProcMacro { pub name: SmolStr, pub kind: ProcMacroKind, pub expander: sync::Arc, + pub disabled: bool, } #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] @@ -56,20 +63,35 @@ pub struct CustomProcMacroExpander { proc_macro_id: ProcMacroId, } -const DUMMY_ID: u32 = !0; - impl CustomProcMacroExpander { + const DUMMY_ID: u32 = !0; + const DISABLED_ID: u32 = !1; + pub fn new(proc_macro_id: ProcMacroId) -> Self { - assert_ne!(proc_macro_id.0, DUMMY_ID); + assert_ne!(proc_macro_id.0, Self::DUMMY_ID); + assert_ne!(proc_macro_id.0, Self::DISABLED_ID); Self { proc_macro_id } } - pub fn dummy() -> Self { - Self { proc_macro_id: ProcMacroId(DUMMY_ID) } + /// A dummy expander that always errors. This is used for proc-macros that are missing, usually + /// due to them not being built yet. + pub const fn dummy() -> Self { + Self { proc_macro_id: ProcMacroId(Self::DUMMY_ID) } + } + + /// The macro was not yet resolved. + pub const fn is_dummy(&self) -> bool { + self.proc_macro_id.0 == Self::DUMMY_ID + } + + /// A dummy expander that always errors. This expander is used for macros that have been disabled. + pub const fn disabled() -> Self { + Self { proc_macro_id: ProcMacroId(Self::DISABLED_ID) } } - pub fn is_dummy(&self) -> bool { - self.proc_macro_id.0 == DUMMY_ID + /// The macro is explicitly disabled and cannot be expanded. + pub const fn is_disabled(&self) -> bool { + self.proc_macro_id.0 == Self::DISABLED_ID } pub fn expand( @@ -84,10 +106,14 @@ impl CustomProcMacroExpander { mixed_site: Span, ) -> ExpandResult { match self.proc_macro_id { - ProcMacroId(DUMMY_ID) => ExpandResult::new( + ProcMacroId(Self::DUMMY_ID) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::UnresolvedProcMacro(def_crate), ), + ProcMacroId(Self::DISABLED_ID) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), + ExpandError::MacroDisabled, + ), ProcMacroId(id) => { let proc_macros = db.proc_macros(); let proc_macros = match proc_macros.get(&def_crate) { @@ -110,7 +136,7 @@ impl CustomProcMacroExpander { ); return ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other("Internal error"), + ExpandError::other("Internal error: proc-macro index out of bounds"), ); } }; diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 7f8fb7f4b5214..c4329a7b82bf8 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -169,9 +169,9 @@ impl ExprValidator { return; } - let pattern_arena = Arena::new(); - let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena); + let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db); + let pattern_arena = Arena::new(); let mut m_arms = Vec::with_capacity(arms.len()); let mut has_lowering_errors = false; for arm in arms { @@ -196,8 +196,9 @@ impl ExprValidator { // If we had a NotUsefulMatchArm diagnostic, we could // check the usefulness of each pattern as we added it // to the matrix here. + let pat = self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors); let m_arm = pat_analysis::MatchArm { - pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors), + pat: pattern_arena.alloc(pat), has_guard: arm.guard.is_some(), arm_data: (), }; @@ -223,7 +224,7 @@ impl ExprValidator { ValidityConstraint::ValidOnly, ) { Ok(report) => report, - Err(void) => match void {}, + Err(()) => return, }; // FIXME Report unreachable arms @@ -245,10 +246,10 @@ impl ExprValidator { db: &dyn HirDatabase, body: &Body, have_errors: &mut bool, - ) -> &'p DeconstructedPat<'p> { + ) -> DeconstructedPat<'p> { let mut patcx = match_check::PatCtxt::new(db, &self.infer, body); let pattern = patcx.lower_pattern(pat); - let pattern = cx.pattern_arena.alloc(cx.lower_pat(&pattern)); + let pattern = cx.lower_pat(&pattern); if !patcx.errors.is_empty() { *have_errors = true; } diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 712842372b625..e98a946a8708c 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -1,6 +1,7 @@ //! Interface with `rustc_pattern_analysis`. use std::fmt; +use tracing::debug; use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use rustc_hash::FxHashMap; @@ -11,7 +12,6 @@ use rustc_pattern_analysis::{ }; use smallvec::{smallvec, SmallVec}; use stdx::never; -use typed_arena::Arena; use crate::{ db::HirDatabase, @@ -26,7 +26,7 @@ use Constructor::*; // Re-export r-a-specific versions of all these types. pub(crate) type DeconstructedPat<'p> = - rustc_pattern_analysis::pat::DeconstructedPat<'p, MatchCheckCtx<'p>>; + rustc_pattern_analysis::pat::DeconstructedPat>; pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>; pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat>; @@ -40,7 +40,6 @@ pub(crate) struct MatchCheckCtx<'p> { module: ModuleId, body: DefWithBodyId, pub(crate) db: &'p dyn HirDatabase, - pub(crate) pattern_arena: &'p Arena>, exhaustive_patterns: bool, min_exhaustive_patterns: bool, } @@ -52,17 +51,12 @@ pub(crate) struct PatData<'p> { } impl<'p> MatchCheckCtx<'p> { - pub(crate) fn new( - module: ModuleId, - body: DefWithBodyId, - db: &'p dyn HirDatabase, - pattern_arena: &'p Arena>, - ) -> Self { + pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'p dyn HirDatabase) -> Self { let def_map = db.crate_def_map(module.krate()); let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); let min_exhaustive_patterns = def_map.is_unstable_feature_enabled("min_exhaustive_patterns"); - Self { module, body, db, pattern_arena, exhaustive_patterns, min_exhaustive_patterns } + Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns } } fn is_uninhabited(&self, ty: &Ty) -> bool { @@ -131,15 +125,15 @@ impl<'p> MatchCheckCtx<'p> { } pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> { - let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat)); + let singleton = |pat| vec![pat]; let ctor; - let fields: &[_]; + let fields: Vec<_>; match pat.kind.as_ref() { PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat), PatKind::Binding { subpattern: None, .. } | PatKind::Wild => { ctor = Wildcard; - fields = &[]; + fields = Vec::new(); } PatKind::Deref { subpattern } => { ctor = match pat.ty.kind(Interner) { @@ -157,7 +151,7 @@ impl<'p> MatchCheckCtx<'p> { match pat.ty.kind(Interner) { TyKind::Tuple(_, substs) => { ctor = Struct; - let mut wilds: SmallVec<[_; 2]> = substs + let mut wilds: Vec<_> = substs .iter(Interner) .map(|arg| arg.assert_ty_ref(Interner).clone()) .map(DeconstructedPat::wildcard) @@ -166,7 +160,7 @@ impl<'p> MatchCheckCtx<'p> { let idx: u32 = pat.field.into_raw().into(); wilds[idx as usize] = self.lower_pat(&pat.pattern); } - fields = self.pattern_arena.alloc_extend(wilds) + fields = wilds } TyKind::Adt(adt, substs) if is_box(self.db, adt.0) => { // The only legal patterns of type `Box` (outside `std`) are `_` and box @@ -216,33 +210,29 @@ impl<'p> MatchCheckCtx<'p> { field_id_to_id[field_idx as usize] = Some(i); ty }); - let mut wilds: SmallVec<[_; 2]> = - tys.map(DeconstructedPat::wildcard).collect(); + let mut wilds: Vec<_> = tys.map(DeconstructedPat::wildcard).collect(); for pat in subpatterns { let field_idx: u32 = pat.field.into_raw().into(); if let Some(i) = field_id_to_id[field_idx as usize] { wilds[i] = self.lower_pat(&pat.pattern); } } - fields = self.pattern_arena.alloc_extend(wilds); + fields = wilds; } _ => { never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty); ctor = Wildcard; - fields = &[]; + fields = Vec::new(); } } } &PatKind::LiteralBool { value } => { ctor = Bool(value); - fields = &[]; + fields = Vec::new(); } PatKind::Or { pats } => { ctor = Or; - // Collect here because `Arena::alloc_extend` panics on reentrancy. - let subpats: SmallVec<[_; 2]> = - pats.iter().map(|pat| self.lower_pat(pat)).collect(); - fields = self.pattern_arena.alloc_extend(subpats); + fields = pats.iter().map(|pat| self.lower_pat(pat)).collect(); } } let data = PatData { db: self.db }; @@ -307,7 +297,7 @@ impl<'p> MatchCheckCtx<'p> { } impl<'p> TypeCx for MatchCheckCtx<'p> { - type Error = Void; + type Error = (); type Ty = Ty; type VariantIdx = EnumVariantId; type StrLit = Void; @@ -463,7 +453,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { fn write_variant_name( f: &mut fmt::Formatter<'_>, - pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>, + pat: &rustc_pattern_analysis::pat::DeconstructedPat, ) -> fmt::Result { let variant = pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt)); @@ -485,8 +475,8 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { Ok(()) } - fn bug(&self, fmt: fmt::Arguments<'_>) -> ! { - panic!("{}", fmt) + fn bug(&self, fmt: fmt::Arguments<'_>) { + debug!("{}", fmt) } } diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 71c3f89716d82..1977f00517cd1 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -26,7 +26,7 @@ use std::{convert::identity, ops::Index}; use chalk_ir::{ cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety, - Scalar, TyKind, TypeFlags, + Scalar, TyKind, TypeFlags, Variance, }; use either::Either; use hir_def::{ @@ -58,8 +58,9 @@ use crate::{ static_lifetime, to_assoc_type_id, traits::FnTrait, utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder}, - AliasEq, AliasTy, ClosureId, DomainGoal, GenericArg, Goal, ImplTraitId, InEnvironment, - Interner, ProjectionTy, RpitId, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, + AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId, + InEnvironment, Interner, Lifetime, ProjectionTy, RpitId, Substitution, TraitEnvironment, + TraitRef, Ty, TyBuilder, TyExt, }; // This lint has a false positive here. See the link below for details. @@ -68,7 +69,7 @@ use crate::{ #[allow(unreachable_pub)] pub use coerce::could_coerce; #[allow(unreachable_pub)] -pub use unify::could_unify; +pub use unify::{could_unify, could_unify_deeply}; use cast::CastCheck; pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; @@ -688,10 +689,17 @@ impl<'a> InferenceContext<'a> { for ty in type_of_for_iterator.values_mut() { *ty = table.resolve_completely(ty.clone()); } - for mismatch in type_mismatches.values_mut() { + type_mismatches.retain(|_, mismatch| { mismatch.expected = table.resolve_completely(mismatch.expected.clone()); mismatch.actual = table.resolve_completely(mismatch.actual.clone()); - } + chalk_ir::zip::Zip::zip_with( + &mut UnknownMismatch(self.db), + Variance::Invariant, + &mismatch.expected, + &mismatch.actual, + ) + .is_ok() + }); diagnostics.retain_mut(|diagnostic| { use InferenceDiagnostic::*; match diagnostic { @@ -1502,3 +1510,116 @@ impl std::ops::BitOrAssign for Diverges { *self = *self | other; } } +/// A zipper that checks for unequal `{unknown}` occurrences in the two types. Used to filter out +/// mismatch diagnostics that only differ in `{unknown}`. These mismatches are usually not helpful. +/// As the cause is usually an underlying name resolution problem. +struct UnknownMismatch<'db>(&'db dyn HirDatabase); +impl chalk_ir::zip::Zipper for UnknownMismatch<'_> { + fn zip_tys(&mut self, variance: Variance, a: &Ty, b: &Ty) -> chalk_ir::Fallible<()> { + let zip_substs = |this: &mut Self, + variances, + sub_a: &Substitution, + sub_b: &Substitution| { + this.zip_substs(variance, variances, sub_a.as_slice(Interner), sub_b.as_slice(Interner)) + }; + match (a.kind(Interner), b.kind(Interner)) { + (TyKind::Adt(id_a, sub_a), TyKind::Adt(id_b, sub_b)) if id_a == id_b => zip_substs( + self, + Some(self.unification_database().adt_variance(*id_a)), + sub_a, + sub_b, + )?, + ( + TyKind::AssociatedType(assoc_ty_a, sub_a), + TyKind::AssociatedType(assoc_ty_b, sub_b), + ) if assoc_ty_a == assoc_ty_b => zip_substs(self, None, sub_a, sub_b)?, + (TyKind::Tuple(arity_a, sub_a), TyKind::Tuple(arity_b, sub_b)) + if arity_a == arity_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::OpaqueType(opaque_ty_a, sub_a), TyKind::OpaqueType(opaque_ty_b, sub_b)) + if opaque_ty_a == opaque_ty_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::Slice(ty_a), TyKind::Slice(ty_b)) => self.zip_tys(variance, ty_a, ty_b)?, + (TyKind::FnDef(fn_def_a, sub_a), TyKind::FnDef(fn_def_b, sub_b)) + if fn_def_a == fn_def_b => + { + zip_substs( + self, + Some(self.unification_database().fn_def_variance(*fn_def_a)), + sub_a, + sub_b, + )? + } + (TyKind::Ref(mutability_a, _, ty_a), TyKind::Ref(mutability_b, _, ty_b)) + if mutability_a == mutability_b => + { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Raw(mutability_a, ty_a), TyKind::Raw(mutability_b, ty_b)) + if mutability_a == mutability_b => + { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Array(ty_a, const_a), TyKind::Array(ty_b, const_b)) if const_a == const_b => { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Closure(id_a, sub_a), TyKind::Closure(id_b, sub_b)) if id_a == id_b => { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::Coroutine(coroutine_a, sub_a), TyKind::Coroutine(coroutine_b, sub_b)) + if coroutine_a == coroutine_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + ( + TyKind::CoroutineWitness(coroutine_a, sub_a), + TyKind::CoroutineWitness(coroutine_b, sub_b), + ) if coroutine_a == coroutine_b => zip_substs(self, None, sub_a, sub_b)?, + (TyKind::Function(fn_ptr_a), TyKind::Function(fn_ptr_b)) + if fn_ptr_a.sig == fn_ptr_b.sig && fn_ptr_a.num_binders == fn_ptr_b.num_binders => + { + zip_substs(self, None, &fn_ptr_a.substitution.0, &fn_ptr_b.substitution.0)? + } + (TyKind::Error, TyKind::Error) => (), + (TyKind::Error, _) | (_, TyKind::Error) => return Err(chalk_ir::NoSolution), + _ => (), + } + + Ok(()) + } + + fn zip_lifetimes(&mut self, _: Variance, _: &Lifetime, _: &Lifetime) -> chalk_ir::Fallible<()> { + Ok(()) + } + + fn zip_consts(&mut self, _: Variance, _: &Const, _: &Const) -> chalk_ir::Fallible<()> { + Ok(()) + } + + fn zip_binders( + &mut self, + variance: Variance, + a: &Binders, + b: &Binders, + ) -> chalk_ir::Fallible<()> + where + T: Clone + + HasInterner + + chalk_ir::zip::Zip + + TypeFoldable, + { + chalk_ir::zip::Zip::zip_with(self, variance, a.skip_binders(), b.skip_binders()) + } + + fn interner(&self) -> Interner { + Interner + } + + fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase { + &self.0 + } +} diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index c3746f787067c..22a70f951ea7a 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -485,6 +485,7 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.consume_expr(*expr); } + Statement::Item => (), } } if let Some(tail) = tail { @@ -531,6 +532,9 @@ impl InferenceContext<'_> { self.consume_expr(expr); } } + &Expr::Become { expr } => { + self.consume_expr(expr); + } Expr::RecordLit { fields, spread, .. } => { if let &Some(expr) = spread { self.consume_expr(expr); diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 8b8e97b0081c6..428ed6748c6c2 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -502,6 +502,7 @@ impl InferenceContext<'_> { self.result.standard_types.never.clone() } &Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr), + &Expr::Become { expr } => self.infer_expr_become(expr), Expr::Yield { expr } => { if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() { if let Some(expr) = expr { @@ -1084,6 +1085,27 @@ impl InferenceContext<'_> { self.result.standard_types.never.clone() } + fn infer_expr_become(&mut self, expr: ExprId) -> Ty { + match &self.return_coercion { + Some(return_coercion) => { + let ret_ty = return_coercion.expected_ty(); + + let call_expr_ty = + self.infer_expr_inner(expr, &Expectation::HasType(ret_ty.clone())); + + // NB: this should *not* coerce. + // tail calls don't support any coercions except lifetimes ones (like `&'static u8 -> &'a u8`). + self.unify(&call_expr_ty, &ret_ty); + } + None => { + // FIXME: diagnose `become` outside of functions + self.infer_expr_no_expect(expr); + } + } + + self.result.standard_types.never.clone() + } + fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty { if let Some(box_id) = self.resolve_boxed_box() { let table = &mut self.table; @@ -1367,6 +1389,7 @@ impl InferenceContext<'_> { ); } } + Statement::Item => (), } } diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index 663ea85323189..00e5eac229fb6 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs @@ -65,6 +65,7 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.infer_mut_expr(*expr, Mutability::Not); } + Statement::Item => (), } } if let Some(tail) = tail { @@ -93,6 +94,9 @@ impl InferenceContext<'_> { self.infer_mut_expr(expr, Mutability::Not); } } + Expr::Become { expr } => { + self.infer_mut_expr(*expr, Mutability::Not); + } Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index de23ca34990be..709760b64fd3f 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -74,6 +74,12 @@ impl> Canonicalized { } } +/// Check if types unify. +/// +/// Note that we consider placeholder types to unify with everything. +/// This means that there may be some unresolved goals that actually set bounds for the placeholder +/// type for the types to unify. For example `Option` and `Option` unify although there is +/// unresolved goal `T = U`. pub fn could_unify( db: &dyn HirDatabase, env: Arc, @@ -82,21 +88,35 @@ pub fn could_unify( unify(db, env, tys).is_some() } +/// Check if types unify eagerly making sure there are no unresolved goals. +/// +/// This means that placeholder types are not considered to unify if there are any bounds set on +/// them. For example `Option` and `Option` do not unify as we cannot show that `T = U` +pub fn could_unify_deeply( + db: &dyn HirDatabase, + env: Arc, + tys: &Canonical<(Ty, Ty)>, +) -> bool { + let mut table = InferenceTable::new(db, env); + let vars = make_substitutions(tys, &mut table); + let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); + let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); + let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars); + let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars); + table.resolve_obligations_as_possible(); + table.propagate_diverging_flag(); + let ty1_with_vars = table.resolve_completely(ty1_with_vars); + let ty2_with_vars = table.resolve_completely(ty2_with_vars); + table.unify_deeply(&ty1_with_vars, &ty2_with_vars) +} + pub(crate) fn unify( db: &dyn HirDatabase, env: Arc, tys: &Canonical<(Ty, Ty)>, ) -> Option { let mut table = InferenceTable::new(db, env); - let vars = Substitution::from_iter( - Interner, - tys.binders.iter(Interner).map(|it| match &it.kind { - chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner), - // FIXME: maybe wrong? - chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner), - chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner), - }), - ); + let vars = make_substitutions(tys, &mut table); let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); if !table.unify(&ty1_with_vars, &ty2_with_vars) { @@ -125,6 +145,21 @@ pub(crate) fn unify( )) } +fn make_substitutions( + tys: &chalk_ir::Canonical<(chalk_ir::Ty, chalk_ir::Ty)>, + table: &mut InferenceTable<'_>, +) -> chalk_ir::Substitution { + Substitution::from_iter( + Interner, + tys.binders.iter(Interner).map(|it| match &it.kind { + chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner), + // FIXME: maybe wrong? + chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner), + chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner), + }), + ) +} + bitflags::bitflags! { #[derive(Default, Clone, Copy)] pub(crate) struct TypeVariableFlags: u8 { @@ -431,6 +466,18 @@ impl<'a> InferenceTable<'a> { true } + /// Unify two relatable values (e.g. `Ty`) and check whether trait goals which arise from that could be fulfilled + pub(crate) fn unify_deeply>(&mut self, ty1: &T, ty2: &T) -> bool { + let result = match self.try_unify(ty1, ty2) { + Ok(r) => r, + Err(_) => return false, + }; + result.goals.iter().all(|goal| { + let canonicalized = self.canonicalize(goal.clone()); + self.try_resolve_obligation(&canonicalized).is_some() + }) + } + /// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the /// caller needs to deal with them. pub(crate) fn try_unify>( @@ -501,7 +548,8 @@ impl<'a> InferenceTable<'a> { fn register_obligation_in_env(&mut self, goal: InEnvironment) { let canonicalized = self.canonicalize(goal); - if !self.try_resolve_obligation(&canonicalized) { + let solution = self.try_resolve_obligation(&canonicalized); + if matches!(solution, Some(Solution::Ambig(_))) { self.pending_obligations.push(canonicalized); } } @@ -627,38 +675,35 @@ impl<'a> InferenceTable<'a> { fn try_resolve_obligation( &mut self, canonicalized: &Canonicalized>, - ) -> bool { + ) -> Option> { let solution = self.db.trait_solve( self.trait_env.krate, self.trait_env.block, canonicalized.value.clone(), ); - match solution { + match &solution { Some(Solution::Unique(canonical_subst)) => { canonicalized.apply_solution( self, Canonical { - binders: canonical_subst.binders, + binders: canonical_subst.binders.clone(), // FIXME: handle constraints - value: canonical_subst.value.subst, + value: canonical_subst.value.subst.clone(), }, ); - true } Some(Solution::Ambig(Guidance::Definite(substs))) => { - canonicalized.apply_solution(self, substs); - false + canonicalized.apply_solution(self, substs.clone()); } Some(_) => { // FIXME use this when trying to resolve everything at the end - false } None => { // FIXME obligation cannot be fulfilled => diagnostic - true } } + solution } pub(crate) fn callable_sig( diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs index 5bfe7bf010f1c..9b1424548c2a9 100644 --- a/crates/hir-ty/src/layout/target.rs +++ b/crates/hir-ty/src/layout/target.rs @@ -11,10 +11,8 @@ pub fn target_data_layout_query( db: &dyn HirDatabase, krate: CrateId, ) -> Result, Arc> { - let crate_graph = db.crate_graph(); - let res = crate_graph[krate].target_layout.as_deref(); - match res { - Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) { + match db.data_layout(krate) { + Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(&it) { Ok(it) => Ok(Arc::new(it)), Err(e) => { Err(match e { @@ -44,6 +42,6 @@ pub fn target_data_layout_query( }.into()) } }, - Err(e) => Err(Arc::from(&**e)), + Err(e) => Err(e), } } diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index ba3dfe8100d17..6c1eccb75e631 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -1,6 +1,7 @@ use chalk_ir::{AdtId, TyKind}; use either::Either; use hir_def::db::DefDatabase; +use project_model::target_data_layout::RustcDataLayoutConfig; use rustc_hash::FxHashMap; use test_fixture::WithFixture; use triomphe::Arc; @@ -15,13 +16,18 @@ use crate::{ mod closure; fn current_machine_data_layout() -> String { - project_model::target_data_layout::get(None, None, &FxHashMap::default()).unwrap() + project_model::target_data_layout::get( + RustcDataLayoutConfig::Rustc(None), + None, + &FxHashMap::default(), + ) + .unwrap() } fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutError> { let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( - "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}", + "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\n{ra_fixture}", ); let (db, file_ids) = TestDB::with_many_files(&ra_fixture); @@ -70,7 +76,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutErro fn eval_expr(ra_fixture: &str, minicore: &str) -> Result, LayoutError> { let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( - "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}", + "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\nfn main(){{let goal = {{{ra_fixture}}};}}", ); let (db, file_id) = TestDB::with_single_file(&ra_fixture); diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 70138633341ce..ec97bdc2c4343 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -79,8 +79,8 @@ pub use builder::{ParamKind, TyBuilder}; pub use chalk_ext::*; pub use infer::{ closure::{CaptureKind, CapturedItem}, - could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, - InferenceResult, OverloadedDeref, PointerCast, + could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode, + InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast, }; pub use interner::Interner; pub use lower::{ diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs index 9089c11c5d9bb..63fa87ad66288 100644 --- a/crates/hir-ty/src/mir/borrowck.rs +++ b/crates/hir-ty/src/mir/borrowck.rs @@ -7,6 +7,7 @@ use std::iter; use hir_def::{DefWithBodyId, HasModule}; use la_arena::ArenaMap; +use rustc_hash::FxHashMap; use stdx::never; use triomphe::Arc; @@ -14,7 +15,7 @@ use crate::{ db::{HirDatabase, InternedClosure}, mir::Operand, utils::ClosureSubst, - ClosureId, Interner, Ty, TyExt, TypeFlags, + ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags, }; use super::{ @@ -36,11 +37,27 @@ pub struct MovedOutOfRef { pub span: MirSpan, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PartiallyMoved { + pub ty: Ty, + pub span: MirSpan, + pub local: LocalId, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BorrowRegion { + pub local: LocalId, + pub kind: BorrowKind, + pub places: Vec, +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct BorrowckResult { pub mir_body: Arc, pub mutability_of_locals: ArenaMap, pub moved_out_of_ref: Vec, + pub partially_moved: Vec, + pub borrow_regions: Vec, } fn all_mir_bodies( @@ -80,12 +97,26 @@ pub fn borrowck_query( res.push(BorrowckResult { mutability_of_locals: mutability_of_locals(db, &body), moved_out_of_ref: moved_out_of_ref(db, &body), + partially_moved: partially_moved(db, &body), + borrow_regions: borrow_regions(db, &body), mir_body: body, }); })?; Ok(res.into()) } +fn make_fetch_closure_field( + db: &dyn HirDatabase, +) -> impl FnOnce(ClosureId, &Substitution, usize) -> Ty + '_ { + |c: ClosureId, subst: &Substitution, f: usize| { + let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); + let infer = db.infer(def); + let (captures, _) = infer.closure_info(&c); + let parent_subst = ClosureSubst(subst).parent_subst(); + captures.get(f).expect("broken closure field").ty.clone().substitute(Interner, parent_subst) + } +} + fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec { let mut result = vec![]; let mut for_operand = |op: &Operand, span: MirSpan| match op { @@ -99,18 +130,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec ty = proj.projected_ty( ty, db, - |c, subst, f| { - let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); - let infer = db.infer(def); - let (captures, _) = infer.closure_info(&c); - let parent_subst = ClosureSubst(subst).parent_subst(); - captures - .get(f) - .expect("broken closure field") - .ty - .clone() - .substitute(Interner, parent_subst) - }, + make_fetch_closure_field(db), body.owner.module(db.upcast()).krate(), ); } @@ -188,6 +208,132 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec result } +fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec { + let mut result = vec![]; + let mut for_operand = |op: &Operand, span: MirSpan| match op { + Operand::Copy(p) | Operand::Move(p) => { + let mut ty: Ty = body.locals[p.local].ty.clone(); + for proj in p.projection.lookup(&body.projection_store) { + ty = proj.projected_ty( + ty, + db, + make_fetch_closure_field(db), + body.owner.module(db.upcast()).krate(), + ); + } + if !ty.clone().is_copy(db, body.owner) + && !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR) + { + result.push(PartiallyMoved { span, ty, local: p.local }); + } + } + Operand::Constant(_) | Operand::Static(_) => (), + }; + for (_, block) in body.basic_blocks.iter() { + db.unwind_if_cancelled(); + for statement in &block.statements { + match &statement.kind { + StatementKind::Assign(_, r) => match r { + Rvalue::ShallowInitBoxWithAlloc(_) => (), + Rvalue::ShallowInitBox(o, _) + | Rvalue::UnaryOp(_, o) + | Rvalue::Cast(_, o, _) + | Rvalue::Repeat(o, _) + | Rvalue::Use(o) => for_operand(o, statement.span), + Rvalue::CopyForDeref(_) + | Rvalue::Discriminant(_) + | Rvalue::Len(_) + | Rvalue::Ref(_, _) => (), + Rvalue::CheckedBinaryOp(_, o1, o2) => { + for_operand(o1, statement.span); + for_operand(o2, statement.span); + } + Rvalue::Aggregate(_, ops) => { + for op in ops.iter() { + for_operand(op, statement.span); + } + } + }, + StatementKind::FakeRead(_) + | StatementKind::Deinit(_) + | StatementKind::StorageLive(_) + | StatementKind::StorageDead(_) + | StatementKind::Nop => (), + } + } + match &block.terminator { + Some(terminator) => match &terminator.kind { + TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, terminator.span), + TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::Goto { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::CoroutineDrop + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Drop { .. } => (), + TerminatorKind::DropAndReplace { value, .. } => { + for_operand(value, terminator.span); + } + TerminatorKind::Call { func, args, .. } => { + for_operand(func, terminator.span); + args.iter().for_each(|it| for_operand(it, terminator.span)); + } + TerminatorKind::Assert { cond, .. } => { + for_operand(cond, terminator.span); + } + TerminatorKind::Yield { value, .. } => { + for_operand(value, terminator.span); + } + }, + None => (), + } + } + result.shrink_to_fit(); + result +} + +fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec { + let mut borrows = FxHashMap::default(); + for (_, block) in body.basic_blocks.iter() { + db.unwind_if_cancelled(); + for statement in &block.statements { + if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind { + borrows + .entry(p.local) + .and_modify(|it: &mut BorrowRegion| { + it.places.push(statement.span); + }) + .or_insert_with(|| BorrowRegion { + local: p.local, + kind: *kind, + places: vec![statement.span], + }); + } + } + match &block.terminator { + Some(terminator) => match &terminator.kind { + TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::Goto { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::CoroutineDrop + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Drop { .. } => (), + TerminatorKind::DropAndReplace { .. } => {} + TerminatorKind::Call { .. } => {} + _ => (), + }, + None => (), + } + } + + borrows.into_values().collect() +} + #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum ProjectionCase { /// Projection is a local @@ -217,18 +363,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio ty = proj.projected_ty( ty, db, - |c, subst, f| { - let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); - let infer = db.infer(def); - let (captures, _) = infer.closure_info(&c); - let parent_subst = ClosureSubst(subst).parent_subst(); - captures - .get(f) - .expect("broken closure field") - .ty - .clone() - .substitute(Interner, parent_subst) - }, + make_fetch_closure_field(db), body.owner.module(db.upcast()).krate(), ); } diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 1572a6d497c57..b038900cdacba 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -775,6 +775,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.set_terminator(current, TerminatorKind::Return, expr_id.into()); Ok(None) } + Expr::Become { .. } => not_supported!("tail-calls"), Expr::Yield { .. } => not_supported!("yield"), Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => { let spread_place = match spread { @@ -1246,7 +1247,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_assignment(current, place, op.into(), expr_id.into()); Ok(Some(current)) } - Expr::Underscore => not_supported!("underscore"), + Expr::Underscore => Ok(Some(current)), } } @@ -1780,6 +1781,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_fake_read(c, p, expr.into()); current = scope2.pop_and_drop(self, c, expr.into()); } + hir_def::hir::Statement::Item => (), } } if let Some(tail) = tail { diff --git a/crates/hir-ty/src/tests/diagnostics.rs b/crates/hir-ty/src/tests/diagnostics.rs index 1876be303ad44..80f92eaf43553 100644 --- a/crates/hir-ty/src/tests/diagnostics.rs +++ b/crates/hir-ty/src/tests/diagnostics.rs @@ -1,3 +1,5 @@ +use crate::tests::check_no_mismatches; + use super::check; #[test] @@ -94,3 +96,43 @@ fn test(x: bool) { "#, ); } + +#[test] +fn no_mismatches_on_atpit() { + check_no_mismatches( + r#" +//- minicore: option, sized +#![feature(impl_trait_in_assoc_type)] + +trait WrappedAssoc { + type Assoc; + fn do_thing(&self) -> Option; +} + +struct Foo; +impl WrappedAssoc for Foo { + type Assoc = impl Sized; + + fn do_thing(&self) -> Option { + Some(()) + } +} +"#, + ); + check_no_mismatches( + r#" +//- minicore: option, sized +#![feature(impl_trait_in_assoc_type)] + +trait Trait { + type Assoc; + const DEFINE: Option; +} + +impl Trait for () { + type Assoc = impl Sized; + const DEFINE: Option = Option::Some(()); +} +"#, + ); +} diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 8474782282606..6c7dbe1db6ff7 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -3376,11 +3376,8 @@ fn main() { [x,] = &[1,]; //^^^^expected &[i32; 1], got [{unknown}; _] - // FIXME we only want the outermost error, but this matches the current - // behavior of slice patterns let x; [(x,),] = &[(1,),]; - // ^^^^expected {unknown}, got ({unknown},) //^^^^^^^expected &[(i32,); 1], got [{unknown}; _] let x; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 32abbc80c6af4..08f7bb14caa3a 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -31,6 +31,7 @@ mod has_source; pub mod db; pub mod diagnostics; pub mod symbols; +pub mod term_search; mod display; @@ -1084,6 +1085,27 @@ impl Field { Type::new(db, var_id, ty) } + // FIXME: Find better API to also handle const generics + pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator) -> Type { + let var_id = self.parent.into(); + let def_id: AdtId = match self.parent { + VariantDef::Struct(it) => it.id.into(), + VariantDef::Union(it) => it.id.into(), + VariantDef::Variant(it) => it.parent_enum(db).id.into(), + }; + let mut generics = generics.map(|it| it.ty.clone()); + let substs = TyBuilder::subst_for_def(db, def_id, None) + .fill(|x| match x { + ParamKind::Type => { + generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }) + .build(); + let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs); + Type::new(db, var_id, ty) + } + pub fn layout(&self, db: &dyn HirDatabase) -> Result { db.layout_of_ty( self.ty(db).ty, @@ -1152,6 +1174,10 @@ impl Struct { fn variant_data(self, db: &dyn HirDatabase) -> Arc { db.struct_data(self.id).variant_data.clone() } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } impl HasVisibility for Struct { @@ -1194,6 +1220,10 @@ impl Union { fn variant_data(self, db: &dyn HirDatabase) -> Arc { db.union_data(self.id).variant_data.clone() } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } impl HasVisibility for Union { @@ -1269,6 +1299,10 @@ impl Enum { pub fn layout(self, db: &dyn HirDatabase) -> Result { Adt::from(self).layout(db) } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } impl HasVisibility for Enum { @@ -1344,6 +1378,10 @@ impl Variant { _ => parent_layout, }) } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } /// Variants inherit visibility from the parent enum. @@ -1394,9 +1432,9 @@ impl Adt { /// Turns this ADT into a type with the given type parameters. This isn't /// the greatest API, FIXME find a better one. - pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type { + pub fn ty_with_args(self, db: &dyn HirDatabase, args: impl Iterator) -> Type { let id = AdtId::from(self); - let mut it = args.iter().map(|t| t.ty.clone()); + let mut it = args.map(|t| t.ty.clone()); let ty = TyBuilder::def_ty(db, id.into(), None) .fill(|x| { let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); @@ -1789,6 +1827,35 @@ impl Function { Type::new_with_resolver_inner(db, &resolver, ty) } + // FIXME: Find better API to also handle const generics + pub fn ret_type_with_args( + self, + db: &dyn HirDatabase, + generics: impl Iterator, + ) -> Type { + let resolver = self.id.resolver(db.upcast()); + let parent_id: Option = match self.id.lookup(db.upcast()).container { + ItemContainerId::ImplId(it) => Some(it.into()), + ItemContainerId::TraitId(it) => Some(it.into()), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, + }; + let mut generics = generics.map(|it| it.ty.clone()); + let mut filler = |x: &_| match x { + ParamKind::Type => { + generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }; + + let parent_substs = + parent_id.map(|id| TyBuilder::subst_for_def(db, id, None).fill(&mut filler).build()); + let substs = TyBuilder::subst_for_def(db, self.id, parent_substs).fill(&mut filler).build(); + + let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); + let ty = callable_sig.ret().clone(); + Type::new_with_resolver_inner(db, &resolver, ty) + } + pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option { if !self.is_async(db) { return None; @@ -1855,6 +1922,51 @@ impl Function { .collect() } + // FIXME: Find better API to also handle const generics + pub fn params_without_self_with_args( + self, + db: &dyn HirDatabase, + generics: impl Iterator, + ) -> Vec { + let environment = db.trait_environment(self.id.into()); + let parent_id: Option = match self.id.lookup(db.upcast()).container { + ItemContainerId::ImplId(it) => Some(it.into()), + ItemContainerId::TraitId(it) => Some(it.into()), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, + }; + let mut generics = generics.map(|it| it.ty.clone()); + let parent_substs = parent_id.map(|id| { + TyBuilder::subst_for_def(db, id, None) + .fill(|x| match x { + ParamKind::Type => generics + .next() + .unwrap_or_else(|| TyKind::Error.intern(Interner)) + .cast(Interner), + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }) + .build() + }); + + let substs = TyBuilder::subst_for_def(db, self.id, parent_substs) + .fill(|_| { + let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); + GenericArg::new(Interner, GenericArgData::Ty(ty)) + }) + .build(); + let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); + let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 }; + callable_sig + .params() + .iter() + .enumerate() + .skip(skip) + .map(|(idx, ty)| { + let ty = Type { env: environment.clone(), ty: ty.clone() }; + Param { func: self, ty, idx } + }) + .collect() + } + pub fn is_const(self, db: &dyn HirDatabase) -> bool { db.function_data(self.id).has_const_kw() } @@ -1889,6 +2001,11 @@ impl Function { db.function_data(self.id).attrs.is_bench() } + /// Is this function marked as unstable with `#[feature]` attribute? + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).attrs.is_unstable() + } + pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool { hir_ty::is_fn_unsafe_to_call(db, self.id) } @@ -2052,6 +2169,34 @@ impl SelfParam { let ty = callable_sig.params()[0].clone(); Type { env: environment, ty } } + + // FIXME: Find better API to also handle const generics + pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator) -> Type { + let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container { + ItemContainerId::ImplId(it) => it.into(), + ItemContainerId::TraitId(it) => it.into(), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => { + panic!("Never get here") + } + }; + + let mut generics = generics.map(|it| it.ty.clone()); + let mut filler = |x: &_| match x { + ParamKind::Type => { + generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }; + + let parent_substs = TyBuilder::subst_for_def(db, parent_id, None).fill(&mut filler).build(); + let substs = + TyBuilder::subst_for_def(db, self.func, Some(parent_substs)).fill(&mut filler).build(); + let callable_sig = + db.callable_item_signature(self.func.into()).substitute(Interner, &substs); + let environment = db.trait_environment(self.func.into()); + let ty = callable_sig.params()[0].clone(); + Type { env: environment, ty } + } } impl HasVisibility for Function { @@ -2754,7 +2899,7 @@ impl GenericDef { .collect() } - pub fn type_params(self, db: &dyn HirDatabase) -> Vec { + pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec { let generics = db.generic_params(self.into()); generics .type_or_consts @@ -3126,12 +3271,16 @@ impl TypeParam { let ty = generic_arg_from_param(db, self.id.into())?; let resolver = self.id.parent().resolver(db.upcast()); match ty.data(Interner) { - GenericArgData::Ty(it) => { + GenericArgData::Ty(it) if *it.kind(Interner) != TyKind::Error => { Some(Type::new_with_resolver_inner(db, &resolver, it.clone())) } _ => None, } } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(GenericParamId::from(self.id).into()).is_unstable() + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] @@ -3241,6 +3390,26 @@ impl TypeOrConstParam { Either::Right(it) => it.ty(db), } } + + pub fn as_type_param(self, db: &dyn HirDatabase) -> Option { + let params = db.generic_params(self.id.parent); + match ¶ms.type_or_consts[self.id.local_id] { + hir_def::generics::TypeOrConstParamData::TypeParamData(_) => { + Some(TypeParam { id: TypeParamId::from_unchecked(self.id) }) + } + hir_def::generics::TypeOrConstParamData::ConstParamData(_) => None, + } + } + + pub fn as_const_param(self, db: &dyn HirDatabase) -> Option { + let params = db.generic_params(self.id.parent); + match ¶ms.type_or_consts[self.id.local_id] { + hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None, + hir_def::generics::TypeOrConstParamData::ConstParamData(_) => { + Some(ConstParam { id: ConstParamId::from_unchecked(self.id) }) + } + } + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -3285,12 +3454,11 @@ impl Impl { .filter(filter), ) }); + for id in def_crates .iter() .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db)) .map(|Crate { id }| id) - .chain(def_crates.iter().copied()) - .unique() { all.extend( db.trait_impls_in_crate(id) @@ -3520,7 +3688,7 @@ pub enum CaptureKind { Move, } -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct Type { env: Arc, ty: Ty, @@ -3620,6 +3788,50 @@ impl Type { matches!(self.ty.kind(Interner), TyKind::Ref(..)) } + pub fn contains_reference(&self, db: &dyn HirDatabase) -> bool { + return go(db, self.env.krate, &self.ty); + + fn go(db: &dyn HirDatabase, krate: CrateId, ty: &Ty) -> bool { + match ty.kind(Interner) { + // Reference itself + TyKind::Ref(_, _, _) => true, + + // For non-phantom_data adts we check variants/fields as well as generic parameters + TyKind::Adt(adt_id, substitution) + if !db.struct_datum(krate, *adt_id).flags.phantom_data => + { + let adt_datum = &db.struct_datum(krate, *adt_id); + let adt_datum_bound = + adt_datum.binders.clone().substitute(Interner, substitution); + adt_datum_bound + .variants + .into_iter() + .flat_map(|variant| variant.fields.into_iter()) + .any(|ty| go(db, krate, &ty)) + || substitution + .iter(Interner) + .filter_map(|x| x.ty(Interner)) + .any(|ty| go(db, krate, ty)) + } + // And for `PhantomData`, we check `T`. + TyKind::Adt(_, substitution) + | TyKind::Tuple(_, substitution) + | TyKind::OpaqueType(_, substitution) + | TyKind::AssociatedType(_, substitution) + | TyKind::FnDef(_, substitution) => substitution + .iter(Interner) + .filter_map(|x| x.ty(Interner)) + .any(|ty| go(db, krate, ty)), + + // For `[T]` or `*T` we check `T` + TyKind::Array(ty, _) | TyKind::Slice(ty) | TyKind::Raw(_, ty) => go(db, krate, ty), + + // Consider everything else as not reference + _ => false, + } + } + } + pub fn as_reference(&self) -> Option<(Type, Mutability)> { let (ty, _lt, m) = self.ty.as_reference()?; let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut)); @@ -3727,14 +3939,16 @@ impl Type { ) } + // FIXME: Find better API that also handles const generics pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool { let mut it = args.iter().map(|t| t.ty.clone()); let trait_ref = TyBuilder::trait_ref(db, trait_.id) .push(self.ty.clone()) .fill(|x| { - let r = it.next().unwrap(); match x { - ParamKind::Type => r.cast(Interner), + ParamKind::Type => { + it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } ParamKind::Const(ty) => { // FIXME: this code is not covered in tests. unknown_const_as_generic(ty.clone()) @@ -4368,12 +4582,24 @@ impl Type { walk_type(db, self, &mut cb); } - + /// Check if type unifies with another type. + /// + /// Note that we consider placeholder types to unify with everything. + /// For example `Option` and `Option` unify although there is unresolved goal `T = U`. pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool { let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone())); hir_ty::could_unify(db, self.env.clone(), &tys) } + /// Check if type unifies with another type eagerly making sure there are no unresolved goals. + /// + /// This means that placeholder types are not considered to unify if there are any bounds set on + /// them. For example `Option` and `Option` do not unify as we cannot show that `T = U` + pub fn could_unify_with_deeply(&self, db: &dyn HirDatabase, other: &Type) -> bool { + let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone())); + hir_ty::could_unify_deeply(db, self.env.clone(), &tys) + } + pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool { let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone())); hir_ty::could_coerce(db, self.env.clone(), &tys) diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs new file mode 100644 index 0000000000000..72762007dc98f --- /dev/null +++ b/crates/hir/src/term_search.rs @@ -0,0 +1,298 @@ +//! Term search + +use hir_def::type_ref::Mutability; +use hir_ty::db::HirDatabase; +use itertools::Itertools; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::{ModuleDef, ScopeDef, Semantics, SemanticsScope, Type}; + +mod expr; +pub use expr::Expr; + +mod tactics; + +/// Key for lookup table to query new types reached. +#[derive(Debug, Hash, PartialEq, Eq)] +enum NewTypesKey { + ImplMethod, + StructProjection, +} + +/// Helper enum to squash big number of alternative trees into `Many` variant as there is too many +/// to take into account. +#[derive(Debug)] +enum AlternativeExprs { + /// There are few trees, so we keep track of them all + Few(FxHashSet), + /// There are too many trees to keep track of + Many, +} + +impl AlternativeExprs { + /// Construct alternative trees + /// + /// # Arguments + /// `threshold` - threshold value for many trees (more than that is many) + /// `exprs` - expressions iterator + fn new(threshold: usize, exprs: impl Iterator) -> AlternativeExprs { + let mut it = AlternativeExprs::Few(Default::default()); + it.extend_with_threshold(threshold, exprs); + it + } + + /// Get type trees stored in alternative trees (or `Expr::Many` in case of many) + /// + /// # Arguments + /// `ty` - Type of expressions queried (this is used to give type to `Expr::Many`) + fn exprs(&self, ty: &Type) -> Vec { + match self { + AlternativeExprs::Few(exprs) => exprs.iter().cloned().collect(), + AlternativeExprs::Many => vec![Expr::Many(ty.clone())], + } + } + + /// Extend alternative expressions + /// + /// # Arguments + /// `threshold` - threshold value for many trees (more than that is many) + /// `exprs` - expressions iterator + fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator) { + match self { + AlternativeExprs::Few(tts) => { + for it in exprs { + if tts.len() > threshold { + *self = AlternativeExprs::Many; + break; + } + + tts.insert(it); + } + } + AlternativeExprs::Many => (), + } + } +} + +/// # Lookup table for term search +/// +/// Lookup table keeps all the state during term search. +/// This means it knows what types and how are reachable. +/// +/// The secondary functionality for lookup table is to keep track of new types reached since last +/// iteration as well as keeping track of which `ScopeDef` items have been used. +/// Both of them are to speed up the term search by leaving out types / ScopeDefs that likely do +/// not produce any new results. +#[derive(Default, Debug)] +struct LookupTable { + /// All the `Expr`s in "value" produce the type of "key" + data: FxHashMap, + /// New types reached since last query by the `NewTypesKey` + new_types: FxHashMap>, + /// ScopeDefs that are not interesting any more + exhausted_scopedefs: FxHashSet, + /// ScopeDefs that were used in current round + round_scopedef_hits: FxHashSet, + /// Amount of rounds since scopedef was first used. + rounds_since_sopedef_hit: FxHashMap, + /// Types queried but not present + types_wishlist: FxHashSet, + /// Threshold to squash trees to `Many` + many_threshold: usize, +} + +impl LookupTable { + /// Initialize lookup table + fn new(many_threshold: usize) -> Self { + let mut res = Self { many_threshold, ..Default::default() }; + res.new_types.insert(NewTypesKey::ImplMethod, Vec::new()); + res.new_types.insert(NewTypesKey::StructProjection, Vec::new()); + res + } + + /// Find all `Expr`s that unify with the `ty` + fn find(&self, db: &dyn HirDatabase, ty: &Type) -> Option> { + self.data + .iter() + .find(|(t, _)| t.could_unify_with_deeply(db, ty)) + .map(|(t, tts)| tts.exprs(t)) + } + + /// Same as find but automatically creates shared reference of types in the lookup + /// + /// For example if we have type `i32` in data and we query for `&i32` it map all the type + /// trees we have for `i32` with `Expr::Reference` and returns them. + fn find_autoref(&self, db: &dyn HirDatabase, ty: &Type) -> Option> { + self.data + .iter() + .find(|(t, _)| t.could_unify_with_deeply(db, ty)) + .map(|(t, it)| it.exprs(t)) + .or_else(|| { + self.data + .iter() + .find(|(t, _)| { + Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, ty) + }) + .map(|(t, it)| { + it.exprs(t) + .into_iter() + .map(|expr| Expr::Reference(Box::new(expr))) + .collect() + }) + }) + } + + /// Insert new type trees for type + /// + /// Note that the types have to be the same, unification is not enough as unification is not + /// transitive. For example Vec and FxHashSet both unify with Iterator, + /// but they clearly do not unify themselves. + fn insert(&mut self, ty: Type, exprs: impl Iterator) { + match self.data.get_mut(&ty) { + Some(it) => it.extend_with_threshold(self.many_threshold, exprs), + None => { + self.data.insert(ty.clone(), AlternativeExprs::new(self.many_threshold, exprs)); + for it in self.new_types.values_mut() { + it.push(ty.clone()); + } + } + } + } + + /// Iterate all the reachable types + fn iter_types(&self) -> impl Iterator + '_ { + self.data.keys().cloned() + } + + /// Query new types reached since last query by key + /// + /// Create new key if you wish to query it to avoid conflicting with existing queries. + fn new_types(&mut self, key: NewTypesKey) -> Vec { + match self.new_types.get_mut(&key) { + Some(it) => std::mem::take(it), + None => Vec::new(), + } + } + + /// Mark `ScopeDef` as exhausted meaning it is not interesting for us any more + fn mark_exhausted(&mut self, def: ScopeDef) { + self.exhausted_scopedefs.insert(def); + } + + /// Mark `ScopeDef` as used meaning we managed to produce something useful from it + fn mark_fulfilled(&mut self, def: ScopeDef) { + self.round_scopedef_hits.insert(def); + } + + /// Start new round (meant to be called at the beginning of iteration in `term_search`) + /// + /// This functions marks some `ScopeDef`s as exhausted if there have been + /// `MAX_ROUNDS_AFTER_HIT` rounds after first using a `ScopeDef`. + fn new_round(&mut self) { + for def in &self.round_scopedef_hits { + let hits = + self.rounds_since_sopedef_hit.entry(*def).and_modify(|n| *n += 1).or_insert(0); + const MAX_ROUNDS_AFTER_HIT: u32 = 2; + if *hits > MAX_ROUNDS_AFTER_HIT { + self.exhausted_scopedefs.insert(*def); + } + } + self.round_scopedef_hits.clear(); + } + + /// Get exhausted `ScopeDef`s + fn exhausted_scopedefs(&self) -> &FxHashSet { + &self.exhausted_scopedefs + } + + /// Types queried but not found + fn take_types_wishlist(&mut self) -> FxHashSet { + std::mem::take(&mut self.types_wishlist) + } +} + +/// Context for the `term_search` function +#[derive(Debug)] +pub struct TermSearchCtx<'a, DB: HirDatabase> { + /// Semantics for the program + pub sema: &'a Semantics<'a, DB>, + /// Semantic scope, captures context for the term search + pub scope: &'a SemanticsScope<'a>, + /// Target / expected output type + pub goal: Type, + /// Configuration for term search + pub config: TermSearchConfig, +} + +/// Configuration options for the term search +#[derive(Debug, Clone, Copy)] +pub struct TermSearchConfig { + /// Enable borrow checking, this guarantees the outputs of the `term_search` to borrow-check + pub enable_borrowcheck: bool, + /// Indicate when to squash multiple trees to `Many` as there are too many to keep track + pub many_alternatives_threshold: usize, + /// Depth of the search eg. number of cycles to run + pub depth: usize, +} + +impl Default for TermSearchConfig { + fn default() -> Self { + Self { enable_borrowcheck: true, many_alternatives_threshold: 1, depth: 6 } + } +} + +/// # Term search +/// +/// Search for terms (expressions) that unify with the `goal` type. +/// +/// # Arguments +/// * `ctx` - Context for term search +/// +/// Internally this function uses Breadth First Search to find path to `goal` type. +/// The general idea is following: +/// 1. Populate lookup (frontier for BFS) from values (local variables, statics, constants, etc) +/// as well as from well knows values (such as `true/false` and `()`) +/// 2. Iteratively expand the frontier (or contents of the lookup) by trying different type +/// transformation tactics. For example functions take as from set of types (arguments) to some +/// type (return type). Other transformations include methods on type, type constructors and +/// projections to struct fields (field access). +/// 3. Once we manage to find path to type we are interested in we continue for single round to see +/// if we can find more paths that take us to the `goal` type. +/// 4. Return all the paths (type trees) that take us to the `goal` type. +/// +/// Note that there are usually more ways we can get to the `goal` type but some are discarded to +/// reduce the memory consumption. It is also unlikely anyone is willing ti browse through +/// thousands of possible responses so we currently take first 10 from every tactic. +pub fn term_search(ctx: &TermSearchCtx<'_, DB>) -> Vec { + let module = ctx.scope.module(); + let mut defs = FxHashSet::default(); + defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module))); + + ctx.scope.process_all_names(&mut |_, def| { + defs.insert(def); + }); + + let mut lookup = LookupTable::new(ctx.config.many_alternatives_threshold); + + // Try trivial tactic first, also populates lookup table + let mut solutions: Vec = tactics::trivial(ctx, &defs, &mut lookup).collect(); + // Use well known types tactic before iterations as it does not depend on other tactics + solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup)); + + for _ in 0..ctx.config.depth { + lookup.new_round(); + + solutions.extend(tactics::type_constructor(ctx, &defs, &mut lookup)); + solutions.extend(tactics::free_function(ctx, &defs, &mut lookup)); + solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup)); + solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup)); + solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup)); + + // Discard not interesting `ScopeDef`s for speedup + for def in lookup.exhausted_scopedefs() { + defs.remove(def); + } + } + + solutions.into_iter().filter(|it| !it.is_many()).unique().collect() +} diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs new file mode 100644 index 0000000000000..254fbe7e2b53e --- /dev/null +++ b/crates/hir/src/term_search/expr.rs @@ -0,0 +1,468 @@ +//! Type tree for term search + +use hir_def::find_path::PrefixKind; +use hir_expand::mod_path::ModPath; +use hir_ty::{ + db::HirDatabase, + display::{DisplaySourceCodeError, HirDisplay}, +}; +use itertools::Itertools; + +use crate::{ + Adt, AsAssocItem, Const, ConstParam, Field, Function, GenericDef, Local, ModuleDef, + SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant, +}; + +/// Helper function to get path to `ModuleDef` +fn mod_item_path( + sema_scope: &SemanticsScope<'_>, + def: &ModuleDef, + prefer_no_std: bool, + prefer_prelude: bool, +) -> Option { + let db = sema_scope.db; + // Account for locals shadowing items from module + let name_hit_count = def.name(db).map(|def_name| { + let mut name_hit_count = 0; + sema_scope.process_all_names(&mut |name, _| { + if name == def_name { + name_hit_count += 1; + } + }); + name_hit_count + }); + + let m = sema_scope.module(); + match name_hit_count { + Some(0..=1) | None => m.find_use_path(db.upcast(), *def, prefer_no_std, prefer_prelude), + Some(_) => m.find_use_path_prefixed( + db.upcast(), + *def, + PrefixKind::ByCrate, + prefer_no_std, + prefer_prelude, + ), + } +} + +/// Helper function to get path to `ModuleDef` as string +fn mod_item_path_str( + sema_scope: &SemanticsScope<'_>, + def: &ModuleDef, + prefer_no_std: bool, + prefer_prelude: bool, +) -> Result { + let path = mod_item_path(sema_scope, def, prefer_no_std, prefer_prelude); + path.map(|it| it.display(sema_scope.db.upcast()).to_string()) + .ok_or(DisplaySourceCodeError::PathNotFound) +} + +/// Helper function to get path to `Type` +fn type_path( + sema_scope: &SemanticsScope<'_>, + ty: &Type, + prefer_no_std: bool, + prefer_prelude: bool, +) -> Result { + let db = sema_scope.db; + let m = sema_scope.module(); + + match ty.as_adt() { + Some(adt) => { + let ty_name = ty.display_source_code(db, m.id, true)?; + + let mut path = + mod_item_path(sema_scope, &ModuleDef::Adt(adt), prefer_no_std, prefer_prelude) + .unwrap(); + path.pop_segment(); + let path = path.display(db.upcast()).to_string(); + let res = match path.is_empty() { + true => ty_name, + false => format!("{path}::{ty_name}"), + }; + Ok(res) + } + None => ty.display_source_code(db, m.id, true), + } +} + +/// Helper function to filter out generic parameters that are default +fn non_default_generics(db: &dyn HirDatabase, def: GenericDef, generics: &[Type]) -> Vec { + def.type_or_const_params(db) + .into_iter() + .filter_map(|it| it.as_type_param(db)) + .zip(generics) + .filter(|(tp, arg)| tp.default(db).as_ref() != Some(arg)) + .map(|(_, arg)| arg.clone()) + .collect() +} + +/// Type tree shows how can we get from set of types to some type. +/// +/// Consider the following code as an example +/// ``` +/// fn foo(x: i32, y: bool) -> Option { None } +/// fn bar() { +/// let a = 1; +/// let b = true; +/// let c: Option = _; +/// } +/// ``` +/// If we generate type tree in the place of `_` we get +/// ```txt +/// Option +/// | +/// foo(i32, bool) +/// / \ +/// a: i32 b: bool +/// ``` +/// So in short it pretty much gives us a way to get type `Option` using the items we have in +/// scope. +#[derive(Debug, Clone, Eq, Hash, PartialEq)] +pub enum Expr { + /// Constant + Const(Const), + /// Static variable + Static(Static), + /// Local variable + Local(Local), + /// Constant generic parameter + ConstParam(ConstParam), + /// Well known type (such as `true` for bool) + FamousType { ty: Type, value: &'static str }, + /// Function call (does not take self param) + Function { func: Function, generics: Vec, params: Vec }, + /// Method call (has self param) + Method { func: Function, generics: Vec, target: Box, params: Vec }, + /// Enum variant construction + Variant { variant: Variant, generics: Vec, params: Vec }, + /// Struct construction + Struct { strukt: Struct, generics: Vec, params: Vec }, + /// Struct field access + Field { expr: Box, field: Field }, + /// Passing type as reference (with `&`) + Reference(Box), + /// Indicates possibility of many different options that all evaluate to `ty` + Many(Type), +} + +impl Expr { + /// Generate source code for type tree. + /// + /// Note that trait imports are not added to generated code. + /// To make sure that the code is valid, callee has to also ensure that all the traits listed + /// by `traits_used` method are also imported. + pub fn gen_source_code( + &self, + sema_scope: &SemanticsScope<'_>, + many_formatter: &mut dyn FnMut(&Type) -> String, + prefer_no_std: bool, + prefer_prelude: bool, + ) -> Result { + let db = sema_scope.db; + let mod_item_path_str = |s, def| mod_item_path_str(s, def, prefer_no_std, prefer_prelude); + match self { + Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)), + Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)), + Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()), + Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()), + Expr::FamousType { value, .. } => Ok(value.to_string()), + Expr::Function { func, params, .. } => { + let args = params + .iter() + .map(|f| { + f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + + match func.as_assoc_item(db).map(|it| it.container(db)) { + Some(container) => { + let container_name = match container { + crate::AssocItemContainer::Trait(trait_) => { + mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))? + } + crate::AssocItemContainer::Impl(imp) => { + let self_ty = imp.self_ty(db); + // Should it be guaranteed that `mod_item_path` always exists? + match self_ty.as_adt().and_then(|adt| { + mod_item_path( + sema_scope, + &adt.into(), + prefer_no_std, + prefer_prelude, + ) + }) { + Some(path) => path.display(sema_scope.db.upcast()).to_string(), + None => self_ty.display(db).to_string(), + } + } + }; + let fn_name = func.name(db).display(db.upcast()).to_string(); + Ok(format!("{container_name}::{fn_name}({args})")) + } + None => { + let fn_name = mod_item_path_str(sema_scope, &ModuleDef::Function(*func))?; + Ok(format!("{fn_name}({args})")) + } + } + } + Expr::Method { func, target, params, .. } => { + if target.contains_many_in_illegal_pos() { + return Ok(many_formatter(&target.ty(db))); + } + + let func_name = func.name(db).display(db.upcast()).to_string(); + let self_param = func.self_param(db).unwrap(); + let target = target.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + )?; + let args = params + .iter() + .map(|f| { + f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + + match func.as_assoc_item(db).and_then(|it| it.container_or_implemented_trait(db)) { + Some(trait_) => { + let trait_name = mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?; + let target = match self_param.access(db) { + crate::Access::Shared => format!("&{target}"), + crate::Access::Exclusive => format!("&mut {target}"), + crate::Access::Owned => target, + }; + let res = match args.is_empty() { + true => format!("{trait_name}::{func_name}({target})",), + false => format!("{trait_name}::{func_name}({target}, {args})",), + }; + Ok(res) + } + None => Ok(format!("{target}.{func_name}({args})")), + } + } + Expr::Variant { variant, generics, params } => { + let generics = non_default_generics(db, (*variant).into(), generics); + let generics_str = match generics.is_empty() { + true => String::new(), + false => { + let generics = generics + .iter() + .map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude)) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("::<{generics}>") + } + }; + let inner = match variant.kind(db) { + StructKind::Tuple => { + let args = params + .iter() + .map(|f| { + f.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + ) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("{generics_str}({args})") + } + StructKind::Record => { + let fields = variant.fields(db); + let args = params + .iter() + .zip(fields.iter()) + .map(|(a, f)| { + let tmp = format!( + "{}: {}", + f.name(db).display(db.upcast()), + a.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude + )? + ); + Ok(tmp) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("{generics_str}{{ {args} }}") + } + StructKind::Unit => generics_str, + }; + + let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant))?; + Ok(format!("{prefix}{inner}")) + } + Expr::Struct { strukt, generics, params } => { + let generics = non_default_generics(db, (*strukt).into(), generics); + let inner = match strukt.kind(db) { + StructKind::Tuple => { + let args = params + .iter() + .map(|a| { + a.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + ) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("({args})") + } + StructKind::Record => { + let fields = strukt.fields(db); + let args = params + .iter() + .zip(fields.iter()) + .map(|(a, f)| { + let tmp = format!( + "{}: {}", + f.name(db).display(db.upcast()), + a.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude + )? + ); + Ok(tmp) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!(" {{ {args} }}") + } + StructKind::Unit => match generics.is_empty() { + true => String::new(), + false => { + let generics = generics + .iter() + .map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude)) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("::<{generics}>") + } + }, + }; + + let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)))?; + Ok(format!("{prefix}{inner}")) + } + Expr::Field { expr, field } => { + if expr.contains_many_in_illegal_pos() { + return Ok(many_formatter(&expr.ty(db))); + } + + let strukt = expr.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + )?; + let field = field.name(db).display(db.upcast()).to_string(); + Ok(format!("{strukt}.{field}")) + } + Expr::Reference(expr) => { + if expr.contains_many_in_illegal_pos() { + return Ok(many_formatter(&expr.ty(db))); + } + + let inner = expr.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + )?; + Ok(format!("&{inner}")) + } + Expr::Many(ty) => Ok(many_formatter(ty)), + } + } + + /// Get type of the type tree. + /// + /// Same as getting the type of root node + pub fn ty(&self, db: &dyn HirDatabase) -> Type { + match self { + Expr::Const(it) => it.ty(db), + Expr::Static(it) => it.ty(db), + Expr::Local(it) => it.ty(db), + Expr::ConstParam(it) => it.ty(db), + Expr::FamousType { ty, .. } => ty.clone(), + Expr::Function { func, generics, .. } => { + func.ret_type_with_args(db, generics.iter().cloned()) + } + Expr::Method { func, generics, target, .. } => func.ret_type_with_args( + db, + target.ty(db).type_arguments().chain(generics.iter().cloned()), + ), + Expr::Variant { variant, generics, .. } => { + Adt::from(variant.parent_enum(db)).ty_with_args(db, generics.iter().cloned()) + } + Expr::Struct { strukt, generics, .. } => { + Adt::from(*strukt).ty_with_args(db, generics.iter().cloned()) + } + Expr::Field { expr, field } => field.ty_with_args(db, expr.ty(db).type_arguments()), + Expr::Reference(it) => it.ty(db), + Expr::Many(ty) => ty.clone(), + } + } + + /// List the traits used in type tree + pub fn traits_used(&self, db: &dyn HirDatabase) -> Vec { + let mut res = Vec::new(); + + if let Expr::Method { func, params, .. } = self { + res.extend(params.iter().flat_map(|it| it.traits_used(db))); + if let Some(it) = func.as_assoc_item(db) { + if let Some(it) = it.container_or_implemented_trait(db) { + res.push(it); + } + } + } + + res + } + + /// Check in the tree contains `Expr::Many` variant in illegal place to insert `todo`, + /// `unimplemented` or similar macro + /// + /// Some examples are following + /// ```no_compile + /// macro!().foo + /// macro!().bar() + /// ¯o!() + /// ``` + fn contains_many_in_illegal_pos(&self) -> bool { + match self { + Expr::Method { target, .. } => target.contains_many_in_illegal_pos(), + Expr::Field { expr, .. } => expr.contains_many_in_illegal_pos(), + Expr::Reference(target) => target.is_many(), + Expr::Many(_) => true, + _ => false, + } + } + + /// Helper function to check if outermost type tree is `Expr::Many` variant + pub fn is_many(&self) -> bool { + matches!(self, Expr::Many(_)) + } +} diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs new file mode 100644 index 0000000000000..666d63ac1558b --- /dev/null +++ b/crates/hir/src/term_search/tactics.rs @@ -0,0 +1,859 @@ +//! Tactics for term search +//! +//! All the tactics take following arguments +//! * `ctx` - Context for the term search +//! * `defs` - Set of items in scope at term search target location +//! * `lookup` - Lookup table for types +//! And they return iterator that yields type trees that unify with the `goal` type. + +use std::iter; + +use hir_ty::db::HirDatabase; +use hir_ty::mir::BorrowKind; +use hir_ty::TyBuilder; +use itertools::Itertools; +use rustc_hash::FxHashSet; + +use crate::{ + Adt, AssocItem, Enum, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type, + TypeParam, Variant, +}; + +use crate::term_search::{Expr, TermSearchConfig}; + +use super::{LookupTable, NewTypesKey, TermSearchCtx}; + +/// # Trivial tactic +/// +/// Attempts to fulfill the goal by trying items in scope +/// Also works as a starting point to move all items in scope to lookup table. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +/// +/// Returns iterator that yields elements that unify with `goal`. +/// +/// _Note that there is no use of calling this tactic in every iteration as the output does not +/// depend on the current state of `lookup`_ +pub(super) fn trivial<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + defs.iter().filter_map(|def| { + let expr = match def { + ScopeDef::ModuleDef(ModuleDef::Const(it)) => Some(Expr::Const(*it)), + ScopeDef::ModuleDef(ModuleDef::Static(it)) => Some(Expr::Static(*it)), + ScopeDef::GenericParam(GenericParam::ConstParam(it)) => Some(Expr::ConstParam(*it)), + ScopeDef::Local(it) => { + if ctx.config.enable_borrowcheck { + let borrowck = db.borrowck(it.parent).ok()?; + + let invalid = borrowck.iter().any(|b| { + b.partially_moved.iter().any(|moved| { + Some(&moved.local) == b.mir_body.binding_locals.get(it.binding_id) + }) || b.borrow_regions.iter().any(|region| { + // Shared borrows are fine + Some(®ion.local) == b.mir_body.binding_locals.get(it.binding_id) + && region.kind != BorrowKind::Shared + }) + }); + + if invalid { + return None; + } + } + + Some(Expr::Local(*it)) + } + _ => None, + }?; + + lookup.mark_exhausted(*def); + + let ty = expr.ty(db); + lookup.insert(ty.clone(), std::iter::once(expr.clone())); + + // Don't suggest local references as they are not valid for return + if matches!(expr, Expr::Local(_)) && ty.contains_reference(db) { + return None; + } + + ty.could_unify_with_deeply(db, &ctx.goal).then_some(expr) + }) +} + +/// # Type constructor tactic +/// +/// Attempts different type constructors for enums and structs in scope +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn type_constructor<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + fn variant_helper( + db: &dyn HirDatabase, + lookup: &mut LookupTable, + parent_enum: Enum, + variant: Variant, + goal: &Type, + config: &TermSearchConfig, + ) -> Vec<(Type, Vec)> { + // Ignore unstable + if variant.is_unstable(db) { + return Vec::new(); + } + + let generics = GenericDef::from(variant.parent_enum(db)); + let Some(type_params) = generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>() + else { + // Ignore enums with const generics + return Vec::new(); + }; + + // We currently do not check lifetime bounds so ignore all types that have something to do + // with them + if !generics.lifetime_params(db).is_empty() { + return Vec::new(); + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + return Vec::new(); + } + + let non_default_type_params_len = + type_params.iter().filter(|it| it.default(db).is_none()).count(); + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + generic_params + .filter_map(move |generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = type_params + .iter() + .map(|it| it.default(db).unwrap_or_else(|| g.next().expect("No generic"))) + .collect(); + + let enum_ty = Adt::from(parent_enum).ty_with_args(db, generics.iter().cloned()); + + // Allow types with generics only if they take us straight to goal for + // performance reasons + if !generics.is_empty() && !enum_ty.could_unify_with_deeply(db, goal) { + return None; + } + + // Ignore types that have something to do with lifetimes + if config.enable_borrowcheck && enum_ty.contains_reference(db) { + return None; + } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = variant + .fields(db) + .into_iter() + .map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let variant_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Variant { variant, generics: generics.clone(), params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Variant { variant, generics: generics.clone(), params }) + .collect() + }; + lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned()); + + Some((enum_ty, variant_exprs)) + }) + .collect() + } + defs.iter() + .filter_map(move |def| match def { + ScopeDef::ModuleDef(ModuleDef::Variant(it)) => { + let variant_exprs = + variant_helper(db, lookup, it.parent_enum(db), *it, &ctx.goal, &ctx.config); + if variant_exprs.is_empty() { + return None; + } + lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it))); + Some(variant_exprs) + } + ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => { + let exprs: Vec<(Type, Vec)> = enum_ + .variants(db) + .into_iter() + .flat_map(|it| variant_helper(db, lookup, *enum_, it, &ctx.goal, &ctx.config)) + .collect(); + + if !exprs.is_empty() { + lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_)))); + } + + Some(exprs) + } + ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => { + // Ignore unstable and not visible + if it.is_unstable(db) || !it.is_visible_from(db, module) { + return None; + } + + let generics = GenericDef::from(*it); + + // Ignore const params for now + let type_params = generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // We currently do not check lifetime bounds so ignore all types that have something to do + // with them + if !generics.lifetime_params(db).is_empty() { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + return None; + } + + let non_default_type_params_len = + type_params.iter().filter(|it| it.default(db).is_none()).count(); + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = type_params + .iter() + .map(|it| { + it.default(db) + .unwrap_or_else(|| g.next().expect("Missing type param")) + }) + .collect(); + + let struct_ty = Adt::from(*it).ty_with_args(db, generics.iter().cloned()); + + // Allow types with generics only if they take us straight to goal for + // performance reasons + if non_default_type_params_len != 0 + && struct_ty.could_unify_with_deeply(db, &ctx.goal) + { + return None; + } + + // Ignore types that have something to do with lifetimes + if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) { + return None; + } + let fileds = it.fields(db); + // Check if all fields are visible, otherwise we cannot fill them + if fileds.iter().any(|it| !it.is_visible_from(db, module)) { + return None; + } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = fileds + .into_iter() + .map(|field| lookup.find(db, &field.ty(db))) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let struct_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Struct { + strukt: *it, + generics: generics.clone(), + params, + }) + .collect() + }; + + lookup + .mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(*it)))); + lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned()); + + Some((struct_ty, struct_exprs)) + }) + .collect(); + Some(exprs) + } + _ => None, + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Free function tactic +/// +/// Attempts to call different functions in scope with parameters from lookup table. +/// Functions that include generics are not used for performance reasons. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn free_function<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + defs.iter() + .filter_map(move |def| match def { + ScopeDef::ModuleDef(ModuleDef::Function(it)) => { + let generics = GenericDef::from(*it); + + // Ignore const params for now + let type_params = generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore lifetimes as we do not check them + if !generics.lifetime_params(db).is_empty() { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + return None; + } + + let non_default_type_params_len = + type_params.iter().filter(|it| it.default(db).is_none()).count(); + + // Ignore bigger number of generics for now as they kill the performance + if non_default_type_params_len > 0 { + return None; + } + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs: Vec<_> = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = type_params + .iter() + .map(|it| match it.default(db) { + Some(ty) => Some(ty), + None => { + let generic = g.next().expect("Missing type param"); + // Filter out generics that do not unify due to trait bounds + it.ty(db).could_unify_with(db, &generic).then_some(generic) + } + }) + .collect::>()?; + + let ret_ty = it.ret_type_with_args(db, generics.iter().cloned()); + // Filter out private and unsafe functions + if !it.is_visible_from(db, module) + || it.is_unsafe_to_call(db) + || it.is_unstable(db) + || ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) + || ret_ty.is_raw_ptr() + { + return None; + } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args(db, generics.iter().cloned()) + .into_iter() + .map(|field| { + let ty = field.ty(); + match ty.is_mutable_reference() { + true => None, + false => lookup.find_autoref(db, ty), + } + }) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let fn_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Function { func: *it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Function { + func: *it, + generics: generics.clone(), + + params, + }) + .collect() + }; + + lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it))); + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); + Some((ret_ty, fn_exprs)) + }) + .collect(); + Some(exprs) + } + _ => None, + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Impl method tactic +/// +/// Attempts to to call methods on types from lookup table. +/// This includes both functions from direct impl blocks as well as functions from traits. +/// Methods defined in impl blocks that are generic and methods that are themselves have +/// generics are ignored for performance reasons. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn impl_method<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + lookup + .new_types(NewTypesKey::ImplMethod) + .into_iter() + .flat_map(|ty| { + Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) + }) + .flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item))) + .filter_map(|(imp, ty, it)| match it { + AssocItem::Function(f) => Some((imp, ty, f)), + _ => None, + }) + .filter_map(move |(imp, ty, it)| { + let fn_generics = GenericDef::from(it); + let imp_generics = GenericDef::from(imp); + + // Ignore const params for now + let imp_type_params = imp_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore const params for now + let fn_type_params = fn_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore all functions that have something to do with lifetimes as we don't check them + if !fn_generics.lifetime_params(db).is_empty() { + return None; + } + + // Ignore functions without self param + if !it.has_self_param(db) { + return None; + } + + // Filter out private and unsafe functions + if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + { + return None; + } + + let non_default_type_params_len = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .filter(|it| it.default(db).is_none()) + .count(); + + // Ignore bigger number of generics for now as they kill the performance + if non_default_type_params_len > 0 { + return None; + } + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs: Vec<_> = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .map(|it| match it.default(db) { + Some(ty) => Some(ty), + None => { + let generic = g.next().expect("Missing type param"); + // Filter out generics that do not unify due to trait bounds + it.ty(db).could_unify_with(db, &generic).then_some(generic) + } + }) + .collect::>()?; + + let ret_ty = it.ret_type_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) + || ret_ty.is_raw_ptr() + { + return None; + } + + // Ignore functions that do not change the type + if ty.could_unify_with_deeply(db, &ret_ty) { + return None; + } + + let self_ty = it + .self_param(db) + .expect("No self param") + .ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned())); + + // Ignore functions that have different self type + if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) { + return None; + } + + let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup"); + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ) + .into_iter() + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::>()?; + + let fn_exprs: Vec = std::iter::once(target_type_exprs) + .chain(param_exprs) + .multi_cartesian_product() + .map(|params| { + let mut params = params.into_iter(); + let target = Box::new(params.next().unwrap()); + Expr::Method { + func: it, + generics: generics.clone(), + target, + params: params.collect(), + } + }) + .collect(); + + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); + Some((ret_ty, fn_exprs)) + }) + .collect(); + Some(exprs) + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Struct projection tactic +/// +/// Attempts different struct fields (`foo.bar.baz`) +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn struct_projection<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + lookup + .new_types(NewTypesKey::StructProjection) + .into_iter() + .map(|ty| (ty.clone(), lookup.find(db, &ty).expect("Expr not in lookup"))) + .flat_map(move |(ty, targets)| { + ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| { + if !field.is_visible_from(db, module) { + return None; + } + let exprs = targets + .clone() + .into_iter() + .map(move |target| Expr::Field { field, expr: Box::new(target) }); + Some((filed_ty, exprs)) + }) + }) + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Famous types tactic +/// +/// Attempts different values of well known types such as `true` or `false`. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// _Note that there is no point of calling it iteratively as the output is always the same_ +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn famous_types<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + [ + Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "true" }, + Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "false" }, + Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" }, + ] + .into_iter() + .map(|exprs| { + lookup.insert(exprs.ty(db), std::iter::once(exprs.clone())); + exprs + }) + .filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal)) +} + +/// # Impl static method (without self type) tactic +/// +/// Attempts different functions from impl blocks that take no self parameter. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn impl_static_method<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + lookup + .take_types_wishlist() + .into_iter() + .chain(iter::once(ctx.goal.clone())) + .flat_map(|ty| { + Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) + }) + .filter(|(_, imp)| !imp.is_unsafe(db)) + .flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item))) + .filter_map(|(imp, ty, it)| match it { + AssocItem::Function(f) => Some((imp, ty, f)), + _ => None, + }) + .filter_map(move |(imp, ty, it)| { + let fn_generics = GenericDef::from(it); + let imp_generics = GenericDef::from(imp); + + // Ignore const params for now + let imp_type_params = imp_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore const params for now + let fn_type_params = fn_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore all functions that have something to do with lifetimes as we don't check them + if !fn_generics.lifetime_params(db).is_empty() + || !imp_generics.lifetime_params(db).is_empty() + { + return None; + } + + // Ignore functions with self param + if it.has_self_param(db) { + return None; + } + + // Filter out private and unsafe functions + if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + { + return None; + } + + let non_default_type_params_len = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .filter(|it| it.default(db).is_none()) + .count(); + + // Ignore bigger number of generics for now as they kill the performance + if non_default_type_params_len > 1 { + return None; + } + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs: Vec<_> = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .map(|it| match it.default(db) { + Some(ty) => Some(ty), + None => { + let generic = g.next().expect("Missing type param"); + it.trait_bounds(db) + .into_iter() + .all(|bound| generic.impls_trait(db, bound, &[])); + // Filter out generics that do not unify due to trait bounds + it.ty(db).could_unify_with(db, &generic).then_some(generic) + } + }) + .collect::>()?; + + let ret_ty = it.ret_type_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) + || ret_ty.is_raw_ptr() + { + return None; + } + + // Ignore functions that do not change the type + // if ty.could_unify_with_deeply(db, &ret_ty) { + // return None; + // } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ) + .into_iter() + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let fn_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Function { func: it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Function { + func: it, + generics: generics.clone(), + params, + }) + .collect() + }; + + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); + Some((ret_ty, fn_exprs)) + }) + .collect(); + Some(exprs) + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs index 204e796fa2c0d..589591a6777ee 100644 --- a/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/crates/ide-assists/src/handlers/fix_visibility.rs @@ -79,7 +79,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) edit.edit_file(target_file); let vis_owner = edit.make_mut(vis_owner); - vis_owner.set_visibility(missing_visibility.clone_for_update()); + vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { edit.add_tabstop_before(cap, vis); @@ -131,7 +131,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> edit.edit_file(target_file); let vis_owner = edit.make_mut(vis_owner); - vis_owner.set_visibility(missing_visibility.clone_for_update()); + vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { edit.add_tabstop_before(cap, vis); diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 24094de22c8d1..5f7350bc2812b 100644 --- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -1,8 +1,13 @@ use crate::assist_context::{AssistContext, Assists}; use ide_db::assists::AssistId; use syntax::{ - ast::{self, edit::IndentLevel, make, HasGenericParams, HasVisibility}, - ted, AstNode, SyntaxKind, + ast::{ + self, + edit_in_place::{HasVisibilityEdit, Indent}, + make, HasGenericParams, HasName, + }, + ted::{self, Position}, + AstNode, SyntaxKind, T, }; // NOTES : @@ -44,7 +49,7 @@ use syntax::{ // }; // } // -// trait ${0:TraitName} { +// trait ${0:NewTrait} { // // Used as an associated constant. // const CONST_ASSOC: usize = N * 4; // @@ -53,7 +58,7 @@ use syntax::{ // const_maker! {i32, 7} // } // -// impl ${0:TraitName} for Foo { +// impl ${0:NewTrait} for Foo { // // Used as an associated constant. // const CONST_ASSOC: usize = N * 4; // @@ -94,8 +99,10 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ "Generate trait from impl", impl_ast.syntax().text_range(), |builder| { + let impl_ast = builder.make_mut(impl_ast); let trait_items = assoc_items.clone_for_update(); - let impl_items = assoc_items.clone_for_update(); + let impl_items = builder.make_mut(assoc_items); + let impl_name = builder.make_mut(impl_name); trait_items.assoc_items().for_each(|item| { strip_body(&item); @@ -112,46 +119,42 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ impl_ast.generic_param_list(), impl_ast.where_clause(), trait_items, - ); + ) + .clone_for_update(); + + let trait_name = trait_ast.name().expect("new trait should have a name"); + let trait_name_ref = make::name_ref(&trait_name.to_string()).clone_for_update(); // Change `impl Foo` to `impl NewTrait for Foo` - let arg_list = if let Some(genpars) = impl_ast.generic_param_list() { - genpars.to_generic_args().to_string() - } else { - "".to_owned() - }; - - if let Some(snippet_cap) = ctx.config.snippet_cap { - builder.replace_snippet( - snippet_cap, - impl_name.syntax().text_range(), - format!("${{0:TraitName}}{} for {}", arg_list, impl_name), - ); + let mut elements = vec![ + trait_name_ref.syntax().clone().into(), + make::tokens::single_space().into(), + make::token(T![for]).into(), + ]; + + if let Some(params) = impl_ast.generic_param_list() { + let gen_args = ¶ms.to_generic_args().clone_for_update(); + elements.insert(1, gen_args.syntax().clone().into()); + } - // Insert trait before TraitImpl - builder.insert_snippet( - snippet_cap, - impl_ast.syntax().text_range().start(), - format!( - "{}\n\n{}", - trait_ast.to_string().replace("NewTrait", "${0:TraitName}"), - IndentLevel::from_node(impl_ast.syntax()) - ), - ); - } else { - builder.replace( - impl_name.syntax().text_range(), - format!("NewTrait{} for {}", arg_list, impl_name), - ); + ted::insert_all(Position::before(impl_name.syntax()), elements); + + // Insert trait before TraitImpl + ted::insert_all_raw( + Position::before(impl_ast.syntax()), + vec![ + trait_ast.syntax().clone().into(), + make::tokens::whitespace(&format!("\n\n{}", impl_ast.indent_level())).into(), + ], + ); - // Insert trait before TraitImpl - builder.insert( - impl_ast.syntax().text_range().start(), - format!("{}\n\n{}", trait_ast, IndentLevel::from_node(impl_ast.syntax())), + // Link the trait name & trait ref names together as a placeholder snippet group + if let Some(cap) = ctx.config.snippet_cap { + builder.add_placeholder_snippet_group( + cap, + vec![trait_name.syntax().clone(), trait_name_ref.syntax().clone()], ); } - - builder.replace(assoc_items.syntax().text_range(), impl_items.to_string()); }, ); @@ -160,23 +163,8 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ /// `E0449` Trait items always share the visibility of their trait fn remove_items_visibility(item: &ast::AssocItem) { - match item { - ast::AssocItem::Const(c) => { - if let Some(vis) = c.visibility() { - ted::remove(vis.syntax()); - } - } - ast::AssocItem::Fn(f) => { - if let Some(vis) = f.visibility() { - ted::remove(vis.syntax()); - } - } - ast::AssocItem::TypeAlias(t) => { - if let Some(vis) = t.visibility() { - ted::remove(vis.syntax()); - } - } - _ => (), + if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) { + has_vis.set_visibility(None); } } @@ -404,12 +392,12 @@ impl F$0oo { r#" struct Foo([i32; N]); -trait ${0:TraitName} { +trait ${0:NewTrait} { // Used as an associated constant. const CONST: usize = N * 4; } -impl ${0:TraitName} for Foo { +impl ${0:NewTrait} for Foo { // Used as an associated constant. const CONST: usize = N * 4; } diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs new file mode 100644 index 0000000000000..51a1a406f316d --- /dev/null +++ b/crates/ide-assists/src/handlers/term_search.rs @@ -0,0 +1,253 @@ +//! Term search assist +use hir::term_search::TermSearchCtx; +use ide_db::{ + assists::{AssistId, AssistKind, GroupLabel}, + famous_defs::FamousDefs, +}; + +use itertools::Itertools; +use syntax::{ast, AstNode}; + +use crate::assist_context::{AssistContext, Assists}; + +pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let unexpanded = ctx.find_node_at_offset::()?; + let syntax = unexpanded.syntax(); + let goal_range = syntax.text_range(); + + let parent = syntax.parent()?; + let scope = ctx.sema.scope(&parent)?; + + let macro_call = ctx.sema.resolve_macro_call(&unexpanded)?; + + let famous_defs = FamousDefs(&ctx.sema, scope.krate()); + let std_todo = famous_defs.core_macros_todo()?; + let std_unimplemented = famous_defs.core_macros_unimplemented()?; + + if macro_call != std_todo && macro_call != std_unimplemented { + return None; + } + + let target_ty = ctx.sema.type_of_expr(&ast::Expr::cast(parent.clone())?)?.adjusted(); + + let term_search_ctx = TermSearchCtx { + sema: &ctx.sema, + scope: &scope, + goal: target_ty, + config: Default::default(), + }; + let paths = hir::term_search::term_search(&term_search_ctx); + + if paths.is_empty() { + return None; + } + + let mut formatter = |_: &hir::Type| String::from("todo!()"); + + let paths = paths + .into_iter() + .filter_map(|path| { + path.gen_source_code( + &scope, + &mut formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) + .ok() + }) + .unique(); + + for code in paths { + acc.add_group( + &GroupLabel(String::from("Term search")), + AssistId("term_search", AssistKind::Generate), + format!("Replace todo!() with {code}"), + goal_range, + |builder| { + builder.replace(goal_range, code); + }, + ); + } + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn test_complete_local() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!() }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_todo_with_msg() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_unimplemented_with_msg() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_unimplemented() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_struct_field() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +struct A { pub x: i32, y: bool } +fn f() { let a = A { x: 1, y: true }; let b: i32 = todo$0!(); }"#, + r#"struct A { pub x: i32, y: bool } +fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#, + ) + } + + #[test] + fn test_enum_with_generics() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented, option +fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, + r#"fn f() { let a: i32 = 1; let b: Option = None; }"#, + ) + } + + #[test] + fn test_enum_with_generics2() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Option { None, Some(T) } +fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, + r#"enum Option { None, Some(T) } +fn f() { let a: i32 = 1; let b: Option = Option::Some(a); }"#, + ) + } + + #[test] + fn test_enum_with_generics3() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Option { None, Some(T) } +fn f() { let a: Option = Option::None; let b: Option> = todo$0!(); }"#, + r#"enum Option { None, Some(T) } +fn f() { let a: Option = Option::None; let b: Option> = Option::Some(a); }"#, + ) + } + + #[test] + fn test_enum_with_generics4() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Foo { Foo(T) } +fn f() { let a = 0; let b: Foo = todo$0!(); }"#, + r#"enum Foo { Foo(T) } +fn f() { let a = 0; let b: Foo = Foo::Foo(a); }"#, + ); + + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Foo { Foo(T) } +fn f() { let a: Foo = Foo::Foo(0); let b: Foo = todo$0!(); }"#, + r#"enum Foo { Foo(T) } +fn f() { let a: Foo = Foo::Foo(0); let b: Foo = a; }"#, + ) + } + + #[test] + fn test_newtype() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +struct Foo(i32); +fn f() { let a: i32 = 1; let b: Foo = todo$0!(); }"#, + r#"struct Foo(i32); +fn f() { let a: i32 = 1; let b: Foo = Foo(a); }"#, + ) + } + + #[test] + fn test_shadowing() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = todo$0!(); }"#, + r#"fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = b; }"#, + ) + } + + #[test] + fn test_famous_bool() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: bool = todo$0!(); }"#, + r#"fn f() { let a: bool = false; }"#, + ) + } + + #[test] + fn test_fn_with_reference_types() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = 1; let b: f32 = todo$0!(); }"#, + r#"fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = 1; let b: f32 = f(&a); }"#, + ) + } + + #[test] + fn test_fn_with_reference_types2() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = &1; let b: f32 = todo$0!(); }"#, + r#"fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = &1; let b: f32 = f(a); }"#, + ) + } + + #[test] + fn test_fn_with_reference_types3() { + check_assist_not_applicable( + term_search, + r#"//- minicore: todo, unimplemented + fn f(a: &i32) -> f32 { a as f32 } + fn g() { let a = &mut 1; let b: f32 = todo$0!(); }"#, + ) + } +} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 2fec104323dc7..dcc89014b956b 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -60,11 +60,6 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - mod assist_config; mod assist_context; #[cfg(test)] @@ -210,6 +205,7 @@ mod handlers { mod replace_turbofish_with_explicit_type; mod sort_items; mod split_import; + mod term_search; mod toggle_ignore; mod unmerge_match_arm; mod unmerge_use; @@ -332,6 +328,7 @@ mod handlers { replace_arith_op::replace_arith_with_saturating, sort_items::sort_items, split_import::split_import, + term_search::term_search, toggle_ignore::toggle_ignore, unmerge_match_arm::unmerge_match_arm, unmerge_use::unmerge_use, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 8ad735d0ae801..268ba3225b668 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -1665,7 +1665,7 @@ macro_rules! const_maker { }; } -trait ${0:TraitName} { +trait ${0:NewTrait} { // Used as an associated constant. const CONST_ASSOC: usize = N * 4; @@ -1674,7 +1674,7 @@ trait ${0:TraitName} { const_maker! {i32, 7} } -impl ${0:TraitName} for Foo { +impl ${0:NewTrait} for Foo { // Used as an associated constant. const CONST_ASSOC: usize = N * 4; diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index ba3c0cf3fd60e..1ea7220960d25 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -40,7 +40,8 @@ use crate::{ literal::{render_struct_literal, render_variant_lit}, macro_::render_macro, pattern::{render_struct_pat, render_variant_pat}, - render_field, render_path_resolution, render_pattern_resolution, render_tuple_field, + render_expr, render_field, render_path_resolution, render_pattern_resolution, + render_tuple_field, type_alias::{render_type_alias, render_type_alias_with_eq}, union_literal::render_union_literal, RenderContext, @@ -157,6 +158,12 @@ impl Completions { item.add_to(self, ctx.db); } + pub(crate) fn add_expr(&mut self, ctx: &CompletionContext<'_>, expr: &hir::term_search::Expr) { + if let Some(item) = render_expr(ctx, expr) { + item.add_to(self, ctx.db) + } + } + pub(crate) fn add_crate_roots( &mut self, ctx: &CompletionContext<'_>, @@ -694,6 +701,7 @@ pub(super) fn complete_name_ref( match &path_ctx.kind { PathKind::Expr { expr_ctx } => { expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx); + expr::complete_expr(acc, ctx); dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx); item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx); diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 77fd5dd98b8d3..802e9bc3a8077 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -328,3 +328,59 @@ pub(crate) fn complete_expr_path( } } } + +pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>) { + let _p = tracing::span!(tracing::Level::INFO, "complete_expr").entered(); + + if !ctx.config.enable_term_search { + return; + } + + if !ctx.qualifier_ctx.none() { + return; + } + + if let Some(ty) = &ctx.expected_type { + // Ignore unit types as they are not very interesting + if ty.is_unit() || ty.is_unknown() { + return; + } + + let term_search_ctx = hir::term_search::TermSearchCtx { + sema: &ctx.sema, + scope: &ctx.scope, + goal: ty.clone(), + config: hir::term_search::TermSearchConfig { + enable_borrowcheck: false, + many_alternatives_threshold: 1, + depth: 6, + }, + }; + let exprs = hir::term_search::term_search(&term_search_ctx); + for expr in exprs { + // Expand method calls + match expr { + hir::term_search::Expr::Method { func, generics, target, params } + if target.is_many() => + { + let target_ty = target.ty(ctx.db); + let term_search_ctx = + hir::term_search::TermSearchCtx { goal: target_ty, ..term_search_ctx }; + let target_exprs = hir::term_search::term_search(&term_search_ctx); + + for expr in target_exprs { + let expanded_expr = hir::term_search::Expr::Method { + func, + generics: generics.clone(), + target: Box::new(expr), + params: params.clone(), + }; + + acc.add_expr(ctx, &expanded_expr) + } + } + _ => acc.add_expr(ctx, &expr), + } + } + } +} diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs index b9f91d34b2c2b..3bc329ecd748f 100644 --- a/crates/ide-completion/src/completions/flyimport.rs +++ b/crates/ide-completion/src/completions/flyimport.rs @@ -238,6 +238,8 @@ fn import_on_the_fly( (PathKind::Type { location }, ItemInNs::Types(ty)) => { if matches!(location, TypeLocation::TypeBound) { matches!(ty, ModuleDef::Trait(_)) + } else if matches!(location, TypeLocation::ImplTrait) { + matches!(ty, ModuleDef::Trait(_) | ModuleDef::Module(_)) } else { true } diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs index e6a4335c3fec8..e4678089462a3 100644 --- a/crates/ide-completion/src/completions/type.rs +++ b/crates/ide-completion/src/completions/type.rs @@ -31,6 +31,11 @@ pub(crate) fn complete_type_path( ScopeDef::ImplSelfType(_) => location.complete_self_type(), // Don't suggest attribute macros and derives. ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db), + ScopeDef::ModuleDef(Trait(_) | Module(_)) + if matches!(location, TypeLocation::ImplTrait) => + { + true + } // Type things are fine ScopeDef::ModuleDef( BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TraitAlias(_) | TypeAlias(_), @@ -184,6 +189,21 @@ pub(crate) fn complete_type_path( } } } + TypeLocation::ImplTrait => { + acc.add_nameref_keywords_with_colon(ctx); + ctx.process_all_names(&mut |name, def, doc_aliases| { + let is_trait_or_module = matches!( + def, + ScopeDef::ModuleDef( + hir::ModuleDef::Module(_) | hir::ModuleDef::Trait(_) + ) + ); + if is_trait_or_module { + acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases); + } + }); + return; + } _ => {} }; diff --git a/crates/ide-completion/src/config.rs b/crates/ide-completion/src/config.rs index ed5ddde8fbfe9..04563fb0f469b 100644 --- a/crates/ide-completion/src/config.rs +++ b/crates/ide-completion/src/config.rs @@ -14,6 +14,7 @@ pub struct CompletionConfig { pub enable_imports_on_the_fly: bool, pub enable_self_on_the_fly: bool, pub enable_private_editable: bool, + pub enable_term_search: bool, pub full_function_signatures: bool, pub callable: Option, pub snippet_cap: Option, diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 2a0004f60b820..aa22155feffe2 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -202,6 +202,7 @@ impl TypeLocation { } TypeLocation::AssocConstEq => false, TypeLocation::AssocTypeEq => true, + TypeLocation::ImplTrait => false, _ => true, } } @@ -716,7 +717,7 @@ impl<'a> CompletionContext<'a> { let krate = scope.krate(); let module = scope.module(); - let toolchain = db.crate_graph()[krate.into()].channel(); + let toolchain = db.toolchain_channel(krate.into()); // `toolchain == None` means we're in some detached files. Since we have no information on // the toolchain being used, let's just allow unstable items to be listed. let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None); diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 8552a20392abf..c2c0641961a6a 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -166,6 +166,8 @@ pub struct CompletionRelevance { pub postfix_match: Option, /// This is set for type inference results pub is_definite: bool, + /// This is set for items that are function (associated or method) + pub function: Option, } #[derive(Debug, Clone, Copy, Eq, PartialEq)] @@ -207,6 +209,24 @@ pub enum CompletionRelevancePostfixMatch { Exact, } +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub struct CompletionRelevanceFn { + pub has_params: bool, + pub has_self_param: bool, + pub return_type: CompletionRelevanceReturnType, +} + +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub enum CompletionRelevanceReturnType { + Other, + /// Returns the Self type of the impl/trait + DirectConstructor, + /// Returns something that indirectly constructs the `Self` type of the impl/trait e.g. `Result`, `Option` + Constructor, + /// Returns a possible builder for the type + Builder, +} + impl CompletionRelevance { /// Provides a relevance score. Higher values are more relevant. /// @@ -231,6 +251,7 @@ impl CompletionRelevance { postfix_match, is_definite, is_item_from_notable_trait, + function, } = self; // lower rank private things @@ -275,6 +296,33 @@ impl CompletionRelevance { if is_definite { score += 10; } + + score += function + .map(|asf| { + let mut fn_score = match asf.return_type { + CompletionRelevanceReturnType::DirectConstructor => 15, + CompletionRelevanceReturnType::Builder => 10, + CompletionRelevanceReturnType::Constructor => 5, + CompletionRelevanceReturnType::Other => 0, + }; + + // When a fn is bumped due to return type: + // Bump Constructor or Builder methods with no arguments, + // over them tha with self arguments + if fn_score > 0 { + if !asf.has_params { + // bump associated functions + fn_score += 1; + } else if asf.has_self_param { + // downgrade methods (below Constructor) + fn_score = 1; + } + } + + fn_score + }) + .unwrap_or_default(); + score } @@ -297,6 +345,7 @@ pub enum CompletionItemKind { Method, Snippet, UnresolvedReference, + Expression, } impl_from!(SymbolKind for CompletionItemKind); @@ -341,6 +390,7 @@ impl CompletionItemKind { CompletionItemKind::Method => "me", CompletionItemKind::Snippet => "sn", CompletionItemKind::UnresolvedReference => "??", + CompletionItemKind::Expression => "ex", } } } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 2ed080a834790..3f374b307fbe3 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -17,7 +17,7 @@ use ide_db::{ imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind, }; -use syntax::{format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange}; +use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange}; use text_edit::TextEdit; use crate::{ @@ -272,6 +272,82 @@ pub(crate) fn render_resolution_with_import_pat( Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution)) } +pub(crate) fn render_expr( + ctx: &CompletionContext<'_>, + expr: &hir::term_search::Expr, +) -> Option { + let mut i = 1; + let mut snippet_formatter = |ty: &hir::Type| { + let arg_name = ty + .as_adt() + .and_then(|adt| adt.name(ctx.db).as_text()) + .map(|s| stdx::to_lower_snake_case(s.as_str())) + .unwrap_or_else(|| String::from("_")); + let res = format!("${{{i}:{arg_name}}}"); + i += 1; + res + }; + + let mut label_formatter = |ty: &hir::Type| { + ty.as_adt() + .and_then(|adt| adt.name(ctx.db).as_text()) + .map(|s| stdx::to_lower_snake_case(s.as_str())) + .unwrap_or_else(|| String::from("...")) + }; + + let label = expr + .gen_source_code( + &ctx.scope, + &mut label_formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) + .ok()?; + + let source_range = match ctx.original_token.parent() { + Some(node) => match node.ancestors().find_map(ast::Path::cast) { + Some(path) => path.syntax().text_range(), + None => node.text_range(), + }, + None => ctx.source_range(), + }; + + let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label.clone()); + + let snippet = format!( + "{}$0", + expr.gen_source_code( + &ctx.scope, + &mut snippet_formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude + ) + .ok()? + ); + let edit = TextEdit::replace(source_range, snippet); + item.snippet_edit(ctx.config.snippet_cap?, edit); + item.documentation(Documentation::new(String::from("Autogenerated expression by term search"))); + item.set_relevance(crate::CompletionRelevance { + type_match: compute_type_match(ctx, &expr.ty(ctx.db)), + ..Default::default() + }); + for trait_ in expr.traits_used(ctx.db) { + let trait_item = hir::ItemInNs::from(hir::ModuleDef::from(trait_)); + let Some(path) = ctx.module.find_use_path( + ctx.db, + trait_item, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) else { + continue; + }; + + item.add_import(LocatedImport::new(path, trait_item, trait_item)); + } + + Some(item) +} + fn scope_def_to_name( resolution: ScopeDef, ctx: &RenderContext<'_>, @@ -599,6 +675,16 @@ mod tests { expect.assert_debug_eq(&actual); } + #[track_caller] + fn check_function_relevance(ra_fixture: &str, expect: Expect) { + let actual: Vec<_> = do_completion(ra_fixture, CompletionItemKind::Method) + .into_iter() + .map(|item| (item.detail.unwrap_or_default(), item.relevance.function)) + .collect(); + + expect.assert_debug_eq(&actual); + } + #[track_caller] fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) { let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None); @@ -961,6 +1047,7 @@ fn func(input: Struct) { } st Self [type] sp Self [type] st Struct [type] + ex Struct [type] lc self [local] fn func(…) [] me self.test() [] @@ -985,6 +1072,9 @@ fn main() { "#, expect![[r#" lc input [type+name+local] + ex input [type] + ex true [type] + ex false [type] lc inputbad [local] fn main() [] fn test(…) [] @@ -1174,6 +1264,7 @@ fn main() { let _: m::Spam = S$0 } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, trigger_call_info: true, }, @@ -1201,6 +1292,7 @@ fn main() { let _: m::Spam = S$0 } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, trigger_call_info: true, }, @@ -1280,6 +1372,7 @@ fn foo() { A { the$0 } } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] @@ -1313,6 +1406,26 @@ impl S { documentation: Documentation( "Method docs", ), + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + }, }, CompletionItem { label: "foo", @@ -1418,6 +1531,26 @@ fn foo(s: S) { s.$0 } kind: Method, lookup: "the_method", detail: "fn(&self)", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + }, }, ] "#]], @@ -1665,6 +1798,10 @@ fn f() { A { bar: b$0 }; } expect![[r#" fn bar() [type+name] fn baz() [type] + ex baz() [type] + ex bar() [type] + ex A { bar: baz() }.bar [type] + ex A { bar: bar() }.bar [type] st A [] fn f() [] "#]], @@ -1749,6 +1886,8 @@ fn main() { lc s [type+name+local] st S [type] st S [type] + ex s [type] + ex S [type] fn foo(…) [] fn main() [] "#]], @@ -1766,6 +1905,8 @@ fn main() { lc ssss [type+local] st S [type] st S [type] + ex ssss [type] + ex S [type] fn foo(…) [] fn main() [] "#]], @@ -1798,6 +1939,8 @@ fn main() { } "#, expect![[r#" + ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] + ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify] lc m [local] lc t [local] lc &t [type+local] @@ -1846,6 +1989,8 @@ fn main() { } "#, expect![[r#" + ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify] + ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify] lc m [local] lc t [local] lc &mut t [type+local] @@ -1894,6 +2039,8 @@ fn bar(t: Foo) {} ev Foo::A [type] ev Foo::B [type] en Foo [type] + ex Foo::A [type] + ex Foo::B [type] fn bar(…) [] fn foo() [] "#]], @@ -1947,6 +2094,8 @@ fn main() { } "#, expect![[r#" + ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] + ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify] st S [] st &S [type] st S [] @@ -2002,6 +2151,254 @@ fn main() { ); } + #[test] + fn constructor_order_simple() { + check_relevance( + r#" +struct Foo; +struct Other; +struct Option(T); + +impl Foo { + fn fn_ctr() -> Foo { unimplemented!() } + fn fn_another(n: u32) -> Other { unimplemented!() } + fn fn_ctr_self() -> Option { unimplemented!() } +} + +fn test() { + let a = Foo::$0; +} +"#, + expect![[r#" + fn fn_ctr() [type_could_unify] + fn fn_ctr_self() [type_could_unify] + fn fn_another(…) [type_could_unify] + "#]], + ); + } + + #[test] + fn constructor_order_kind() { + check_function_relevance( + r#" +struct Foo; +struct Bar; +struct Option(T); +enum Result { Ok(T), Err(E) }; + +impl Foo { + fn fn_ctr(&self) -> Foo { unimplemented!() } + fn fn_ctr_with_args(&self, n: u32) -> Foo { unimplemented!() } + fn fn_another(&self, n: u32) -> Bar { unimplemented!() } + fn fn_ctr_wrapped(&self, ) -> Option { unimplemented!() } + fn fn_ctr_wrapped_2(&self, ) -> Result { unimplemented!() } + fn fn_ctr_wrapped_3(&self, ) -> Result { unimplemented!() } // Self is not the first type + fn fn_ctr_wrapped_with_args(&self, m: u32) -> Option { unimplemented!() } + fn fn_another_unit(&self) { unimplemented!() } +} + +fn test() { + let a = self::Foo::$0; +} +"#, + expect![[r#" + [ + ( + "fn(&self, u32) -> Bar", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + ), + ( + "fn(&self)", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + ), + ( + "fn(&self) -> Foo", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: DirectConstructor, + }, + ), + ), + ( + "fn(&self, u32) -> Foo", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: DirectConstructor, + }, + ), + ), + ( + "fn(&self) -> Option", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self) -> Result", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self) -> Result", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self, u32) -> Option", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ] + "#]], + ); + } + + #[test] + fn constructor_order_relevance() { + check_relevance( + r#" +struct Foo; +struct FooBuilder; +struct Result(T); + +impl Foo { + fn fn_no_ret(&self) {} + fn fn_ctr_with_args(input: u32) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr() -> Result { unimplemented!() } + fn fn_other() -> Result { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a = self::Foo::$0; +} +"#, + // preference: + // Direct Constructor + // Direct Constructor with args + // Builder + // Constructor + // Others + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr() [type_could_unify] + me fn_no_ret(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + + // + } + + #[test] + fn function_relevance_generic_1() { + check_relevance( + r#" +struct Foo(T); +struct FooBuilder; +struct Option(T); +enum Result{Ok(T), Err(E)}; + +impl Foo { + fn fn_returns_unit(&self) {} + fn fn_ctr_with_args(input: T) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr_wrapped() -> Option { unimplemented!() } + fn fn_ctr_wrapped_2() -> Result { unimplemented!() } + fn fn_other() -> Option { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a = self::Foo::::$0; +} + "#, + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr_wrapped() [type_could_unify] + fn fn_ctr_wrapped_2() [type_could_unify] + me fn_returns_unit(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + } + + #[test] + fn function_relevance_generic_2() { + // Generic 2 + check_relevance( + r#" +struct Foo(T); +struct FooBuilder; +struct Option(T); +enum Result{Ok(T), Err(E)}; + +impl Foo { + fn fn_no_ret(&self) {} + fn fn_ctr_with_args(input: T) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr() -> Option { unimplemented!() } + fn fn_ctr2() -> Result { unimplemented!() } + fn fn_other() -> Option { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a : Res> = Foo::$0; +} + "#, + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr() [type_could_unify] + fn fn_ctr2() [type_could_unify] + me fn_no_ret(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + } + #[test] fn struct_field_method_ref() { check_kinds( @@ -2022,6 +2419,26 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } kind: Method, lookup: "baz", detail: "fn(&self) -> u32", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + }, ref_match: "&@107", }, CompletionItem { @@ -2096,6 +2513,7 @@ fn foo() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] @@ -2133,6 +2551,26 @@ fn main() { ), lookup: "foo", detail: "fn() -> S", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: false, + has_self_param: false, + return_type: Other, + }, + ), + }, ref_match: "&@92", }, ] @@ -2160,6 +2598,7 @@ fn foo() { "#, expect![[r#" lc foo [type+local] + ex foo [type] ev Foo::A(…) [type_could_unify] ev Foo::B [type_could_unify] en Foo [type_could_unify] @@ -2493,6 +2932,7 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, CompletionItem { @@ -2515,6 +2955,7 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 27186a2b7ffbe..cf9fe1ab30728 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -8,8 +8,13 @@ use syntax::{format_smolstr, AstNode, SmolStr}; use crate::{ context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind}, - item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance}, - render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext}, + item::{ + Builder, CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevanceFn, + CompletionRelevanceReturnType, + }, + render::{ + compute_exact_name_match, compute_ref_match, compute_type_match, match_types, RenderContext, + }, CallableSnippets, }; @@ -61,9 +66,9 @@ fn render( ), _ => (name.unescaped().to_smol_str(), name.to_smol_str()), }; - + let has_self_param = func.self_param(db).is_some(); let mut item = CompletionItem::new( - if func.self_param(db).is_some() { + if has_self_param { CompletionItemKind::Method } else { CompletionItemKind::SymbolKind(SymbolKind::Function) @@ -99,6 +104,15 @@ fn render( .filter(|_| !has_call_parens) .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?))); + let function = assoc_item + .and_then(|assoc_item| assoc_item.implementing_ty(db)) + .map(|self_type| compute_return_type_match(db, &ctx, self_type, &ret_type)) + .map(|return_type| CompletionRelevanceFn { + has_params: has_self_param || func.num_params(db) > 0, + has_self_param, + return_type, + }); + item.set_relevance(CompletionRelevance { type_match: if has_call_parens || complete_call_parens.is_some() { compute_type_match(completion, &ret_type) @@ -106,6 +120,7 @@ fn render( compute_type_match(completion, &func.ty(db)) }, exact_name_match: compute_exact_name_match(completion, &call), + function, is_op_method, is_item_from_notable_trait, ..ctx.completion_relevance() @@ -156,6 +171,33 @@ fn render( item } +fn compute_return_type_match( + db: &dyn HirDatabase, + ctx: &RenderContext<'_>, + self_type: hir::Type, + ret_type: &hir::Type, +) -> CompletionRelevanceReturnType { + if match_types(ctx.completion, &self_type, ret_type).is_some() { + // fn([..]) -> Self + CompletionRelevanceReturnType::DirectConstructor + } else if ret_type + .type_arguments() + .any(|ret_type_arg| match_types(ctx.completion, &self_type, &ret_type_arg).is_some()) + { + // fn([..]) -> Result OR Wrapped + CompletionRelevanceReturnType::Constructor + } else if ret_type + .as_adt() + .and_then(|adt| adt.name(db).as_str().map(|name| name.ends_with("Builder"))) + .unwrap_or(false) + { + // fn([..]) -> [..]Builder + CompletionRelevanceReturnType::Builder + } else { + CompletionRelevanceReturnType::Other + } +} + pub(super) fn add_call_parens<'b>( builder: &'b mut Builder, ctx: &CompletionContext<'_>, diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index 154b69875aea8..1f032c7df480d 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -65,6 +65,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: false, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 78907a2896c4a..7749fac40b9dc 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -97,6 +97,11 @@ fn func(param0 @ (param1, param2): (i32, i32)) { kw unsafe kw while kw while let + ex ifletlocal + ex letlocal + ex matcharm + ex param1 + ex param2 "#]], ); } @@ -241,6 +246,8 @@ fn complete_in_block() { sn macro_rules sn pd sn ppd + ex false + ex true "#]], ) } @@ -542,7 +549,26 @@ fn quux(x: i32) { m!(x$0 } "#, - expect![[r#""#]], + expect![[r#" + fn quux(…) fn(i32) + lc x i32 + lc y i32 + ma m!(…) macro_rules! m + bt u32 u32 + kw crate:: + kw false + kw for + kw if + kw if let + kw loop + kw match + kw return + kw self:: + kw true + kw unsafe + kw while + kw while let + "#]], ); } @@ -682,7 +708,9 @@ fn main() { } "#, expect![[r#" - fn test() fn() -> Zulu + fn test() fn() -> Zulu + ex Zulu + ex Zulu::test() "#]], ); } diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs index eaa1bebc03c7e..fff193ba4c9bd 100644 --- a/crates/ide-completion/src/tests/flyimport.rs +++ b/crates/ide-completion/src/tests/flyimport.rs @@ -1397,3 +1397,22 @@ pub use bridge2::server2::Span2; "#]], ); } + +#[test] +fn flyimport_only_traits_in_impl_trait_block() { + check( + r#" +//- /main.rs crate:main deps:dep +pub struct Bar; + +impl Foo$0 for Bar { } +//- /lib.rs crate:dep +pub trait FooTrait; + +pub struct FooStruct; +"#, + expect![[r#" + tt FooTrait (use dep::FooTrait) + "#]], + ); +} diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index 18afde1b7cefd..e64ec74c6106e 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -192,6 +192,8 @@ fn main() { bt u32 u32 kw crate:: kw self:: + ex Foo::default() + ex foo "#]], ); check( diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index a87d16c789faf..ff32eccfbff4c 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -225,10 +225,10 @@ impl S { fn foo() { let _ = lib::S::$0 } "#, expect![[r#" - ct PUBLIC_CONST pub const PUBLIC_CONST: u32 - fn public_method() fn() - ta PublicType pub type PublicType = u32 - "#]], + ct PUBLIC_CONST pub const PUBLIC_CONST: u32 + fn public_method() fn() + ta PublicType pub type PublicType = u32 + "#]], ); } @@ -242,8 +242,8 @@ impl U { fn m() { } } fn foo() { let _ = U::$0 } "#, expect![[r#" - fn m() fn() - "#]], + fn m() fn() + "#]], ); } @@ -256,8 +256,8 @@ trait Trait { fn m(); } fn foo() { let _ = Trait::$0 } "#, expect![[r#" - fn m() (as Trait) fn() - "#]], + fn m() (as Trait) fn() + "#]], ); } @@ -273,8 +273,8 @@ impl Trait for S {} fn foo() { let _ = S::$0 } "#, expect![[r#" - fn m() (as Trait) fn() - "#]], + fn m() (as Trait) fn() + "#]], ); } @@ -290,8 +290,8 @@ impl Trait for S {} fn foo() { let _ = ::$0 } "#, expect![[r#" - fn m() (as Trait) fn() - "#]], + fn m() (as Trait) fn() + "#]], ); } @@ -396,9 +396,9 @@ macro_rules! foo { () => {} } fn main() { let _ = crate::$0 } "#, expect![[r#" - fn main() fn() - ma foo!(…) macro_rules! foo - "#]], + fn main() fn() + ma foo!(…) macro_rules! foo + "#]], ); } @@ -694,8 +694,10 @@ fn bar() -> Bar { } "#, expect![[r#" - fn foo() (as Foo) fn() -> Self - "#]], + fn foo() (as Foo) fn() -> Self + ex Bar + ex bar() + "#]], ); } @@ -722,6 +724,8 @@ fn bar() -> Bar { expect![[r#" fn bar() fn() fn foo() (as Foo) fn() -> Self + ex Bar + ex bar() "#]], ); } @@ -748,6 +752,8 @@ fn bar() -> Bar { "#, expect![[r#" fn foo() (as Foo) fn() -> Self + ex Bar + ex bar() "#]], ); } diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs index c7161f82ce74f..db4ac9381cedb 100644 --- a/crates/ide-completion/src/tests/type_pos.rs +++ b/crates/ide-completion/src/tests/type_pos.rs @@ -989,3 +989,43 @@ fn foo<'a>() { S::<'static, F$0, _, _>; } "#]], ); } + +#[test] +fn complete_traits_on_impl_trait_block() { + check( + r#" +trait Foo {} + +struct Bar; + +impl $0 for Bar { } +"#, + expect![[r#" + md module + tt Foo + tt Trait + kw crate:: + kw self:: + "#]], + ); +} + +#[test] +fn complete_traits_with_path_on_impl_trait_block() { + check( + r#" +mod outer { + pub trait Foo {} + pub struct Bar; + pub mod inner { + } +} + +impl outer::$0 for Bar { } +"#, + expect![[r#" + md inner + tt Foo + "#]], + ); +} diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs index 4edfa37b32905..3106772e63b12 100644 --- a/crates/ide-db/src/famous_defs.rs +++ b/crates/ide-db/src/famous_defs.rs @@ -114,6 +114,14 @@ impl FamousDefs<'_, '_> { self.find_function("core:mem:drop") } + pub fn core_macros_todo(&self) -> Option { + self.find_macro("core:todo") + } + + pub fn core_macros_unimplemented(&self) -> Option { + self.find_macro("core:unimplemented") + } + pub fn builtin_crates(&self) -> impl Iterator { IntoIterator::into_iter([ self.std(), diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs index 3862acc2af4da..7e1811b4cacb6 100644 --- a/crates/ide-db/src/path_transform.rs +++ b/crates/ide-db/src/path_transform.rs @@ -148,7 +148,7 @@ impl<'a> PathTransform<'a> { let mut defaulted_params: Vec = Default::default(); self.generic_def .into_iter() - .flat_map(|it| it.type_params(db)) + .flat_map(|it| it.type_or_const_params(db)) .skip(skip) // The actual list of trait type parameters may be longer than the one // used in the `impl` block due to trailing default type parameters. diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 032b8338ab85d..6a7042988a9c4 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -71,7 +71,6 @@ impl Definition { &self, sema: &Semantics<'_, RootDatabase>, new_name: &str, - rename_external: bool, ) -> Result { // self.krate() returns None if // self is a built-in attr, built-in type or tool module. @@ -80,8 +79,8 @@ impl Definition { if let Some(krate) = self.krate(sema.db) { // Can we not rename non-local items? // Then bail if non-local - if !rename_external && !krate.origin(sema.db).is_local() { - bail!("Cannot rename a non-local definition as the config for it is disabled") + if !krate.origin(sema.db).is_local() { + bail!("Cannot rename a non-local definition") } } diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs index 73be6a4071e47..f59d8d08c8924 100644 --- a/crates/ide-db/src/source_change.rs +++ b/crates/ide-db/src/source_change.rs @@ -138,7 +138,7 @@ impl SnippetEdit { .into_iter() .zip(1..) .with_position() - .map(|pos| { + .flat_map(|pos| { let (snippet, index) = match pos { (itertools::Position::First, it) | (itertools::Position::Middle, it) => it, // last/only snippet gets index 0 @@ -146,11 +146,13 @@ impl SnippetEdit { | (itertools::Position::Only, (snippet, _)) => (snippet, 0), }; - let range = match snippet { - Snippet::Tabstop(pos) => TextRange::empty(pos), - Snippet::Placeholder(range) => range, - }; - (index, range) + match snippet { + Snippet::Tabstop(pos) => vec![(index, TextRange::empty(pos))], + Snippet::Placeholder(range) => vec![(index, range)], + Snippet::PlaceholderGroup(ranges) => { + ranges.into_iter().map(|range| (index, range)).collect() + } + } }) .collect_vec(); @@ -248,7 +250,7 @@ impl SourceChangeBuilder { fn commit(&mut self) { let snippet_edit = self.snippet_builder.take().map(|builder| { SnippetEdit::new( - builder.places.into_iter().map(PlaceSnippet::finalize_position).collect_vec(), + builder.places.into_iter().flat_map(PlaceSnippet::finalize_position).collect(), ) }); @@ -287,30 +289,10 @@ impl SourceChangeBuilder { pub fn insert(&mut self, offset: TextSize, text: impl Into) { self.edit.insert(offset, text.into()) } - /// Append specified `snippet` at the given `offset` - pub fn insert_snippet( - &mut self, - _cap: SnippetCap, - offset: TextSize, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.insert(offset, snippet); - } /// Replaces specified `range` of text with a given string. pub fn replace(&mut self, range: TextRange, replace_with: impl Into) { self.edit.replace(range, replace_with.into()) } - /// Replaces specified `range` of text with a given `snippet`. - pub fn replace_snippet( - &mut self, - _cap: SnippetCap, - range: TextRange, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.replace(range, snippet); - } pub fn replace_ast(&mut self, old: N, new: N) { algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) } @@ -356,6 +338,17 @@ impl SourceChangeBuilder { self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into())) } + /// Adds a snippet to move the cursor selected over `nodes` + /// + /// This allows for renaming newly generated items without having to go + /// through a separate rename step. + pub fn add_placeholder_snippet_group(&mut self, _cap: SnippetCap, nodes: Vec) { + assert!(nodes.iter().all(|node| node.parent().is_some())); + self.add_snippet(PlaceSnippet::OverGroup( + nodes.into_iter().map(|node| node.into()).collect(), + )) + } + fn add_snippet(&mut self, snippet: PlaceSnippet) { let snippet_builder = self.snippet_builder.get_or_insert(SnippetBuilder { places: vec![] }); snippet_builder.places.push(snippet); @@ -400,6 +393,13 @@ pub enum Snippet { Tabstop(TextSize), /// A placeholder snippet (e.g. `${0:placeholder}`). Placeholder(TextRange), + /// A group of placeholder snippets, e.g. + /// + /// ```no_run + /// let ${0:new_var} = 4; + /// fun(1, 2, 3, ${0:new_var}); + /// ``` + PlaceholderGroup(Vec), } enum PlaceSnippet { @@ -409,14 +409,20 @@ enum PlaceSnippet { After(SyntaxElement), /// Place a placeholder snippet in place of the element Over(SyntaxElement), + /// Place a group of placeholder snippets which are linked together + /// in place of the elements + OverGroup(Vec), } impl PlaceSnippet { - fn finalize_position(self) -> Snippet { + fn finalize_position(self) -> Vec { match self { - PlaceSnippet::Before(it) => Snippet::Tabstop(it.text_range().start()), - PlaceSnippet::After(it) => Snippet::Tabstop(it.text_range().end()), - PlaceSnippet::Over(it) => Snippet::Placeholder(it.text_range()), + PlaceSnippet::Before(it) => vec![Snippet::Tabstop(it.text_range().start())], + PlaceSnippet::After(it) => vec![Snippet::Tabstop(it.text_range().end())], + PlaceSnippet::Over(it) => vec![Snippet::Placeholder(it.text_range())], + PlaceSnippet::OverGroup(it) => { + vec![Snippet::PlaceholderGroup(it.into_iter().map(|it| it.text_range()).collect())] + } } } } diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index e4e735cecd89d..4f706e26af2b3 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -329,6 +329,7 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::RecordExpr(_) | ast::Expr::RefExpr(_) | ast::Expr::ReturnExpr(_) + | ast::Expr::BecomeExpr(_) | ast::Expr::TryExpr(_) | ast::Expr::TupleExpr(_) | ast::Expr::LetExpr(_) diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index dd64b93e4548b..5e2541795ca1c 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option {} + // ^^^^ error: expected (bool, bool), found bool + // ^^^^^ error: expected (bool, bool), found bool + None => {} + } +} + "#, + ); + } + #[test] fn mismatched_types_in_or_patterns() { cov_mark::check_count!(validate_match_bailed_out, 2); diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index a0d5d742d3622..b7667dc318f0c 100644 --- a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -182,6 +182,18 @@ fn foo() -> u8 { ); } + #[test] + fn no_diagnostic_if_not_last_statement2() { + check_diagnostics( + r#" +fn foo() -> u8 { + return 2; + fn bar() {} +} +"#, + ); + } + #[test] fn replace_with_expr() { check_fix( diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index e93eea8ce29e5..8c97281b78328 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -112,7 +112,8 @@ fn add_missing_ok_or_some( let variant_name = if Some(expected_enum) == core_result { "Ok" } else { "Some" }; - let wrapped_actual_ty = expected_adt.ty_with_args(ctx.sema.db, &[d.actual.clone()]); + let wrapped_actual_ty = + expected_adt.ty_with_args(ctx.sema.db, std::iter::once(d.actual.clone())); if !d.expected.could_unify_with(ctx.sema.db, &wrapped_actual_ty) { return None; diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs index 6441343ebacdc..56c8181e84ce6 100644 --- a/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -1,14 +1,20 @@ -use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, StructKind}; +use hir::{ + db::ExpandDatabase, + term_search::{term_search, TermSearchCtx}, + ClosureStyle, HirDisplay, +}; use ide_db::{ assists::{Assist, AssistId, AssistKind, GroupLabel}, label::Label, source_change::SourceChange, }; -use syntax::AstNode; +use itertools::Itertools; use text_edit::TextEdit; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; +use syntax::AstNode; + // Diagnostic: typed-hole // // This diagnostic is triggered when an underscore expression is used in an invalid position. @@ -36,50 +42,54 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option let (original_range, _) = d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?; let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?; - let mut assists = vec![]; - scope.process_all_names(&mut |name, def| { - let ty = match def { - hir::ScopeDef::ModuleDef(it) => match it { - hir::ModuleDef::Function(it) => it.ty(db), - hir::ModuleDef::Adt(hir::Adt::Struct(it)) if it.kind(db) != StructKind::Record => { - it.constructor_ty(db) - } - hir::ModuleDef::Variant(it) if it.kind(db) != StructKind::Record => { - it.constructor_ty(db) - } - hir::ModuleDef::Const(it) => it.ty(db), - hir::ModuleDef::Static(it) => it.ty(db), - _ => return, - }, - hir::ScopeDef::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db), - hir::ScopeDef::Local(it) => it.ty(db), - _ => return, - }; - // FIXME: should also check coercions if it is at a coercion site - if !ty.contains_unknown() && ty.could_unify_with(db, &d.expected) { - assists.push(Assist { - id: AssistId("typed-hole", AssistKind::QuickFix), - label: Label::new(format!("Replace `_` with `{}`", name.display(db))), - group: Some(GroupLabel("Replace `_` with a matching entity in scope".to_owned())), - target: original_range.range, - source_change: Some(SourceChange::from_text_edit( - original_range.file_id, - TextEdit::replace(original_range.range, name.display(db).to_string()), - )), - trigger_signature_help: false, - }); - } - }); - if assists.is_empty() { - None - } else { + + let term_search_ctx = TermSearchCtx { + sema: &ctx.sema, + scope: &scope, + goal: d.expected.clone(), + config: Default::default(), + }; + let paths = term_search(&term_search_ctx); + + let mut formatter = |_: &hir::Type| String::from("_"); + + let assists: Vec = paths + .into_iter() + .filter_map(|path| { + path.gen_source_code( + &scope, + &mut formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) + .ok() + }) + .unique() + .map(|code| Assist { + id: AssistId("typed-hole", AssistKind::QuickFix), + label: Label::new(format!("Replace `_` with `{}`", &code)), + group: Some(GroupLabel("Replace `_` with a term".to_owned())), + target: original_range.range, + source_change: Some(SourceChange::from_text_edit( + original_range.file_id, + TextEdit::replace(original_range.range, code), + )), + trigger_signature_help: false, + }) + .collect(); + + if !assists.is_empty() { Some(assists) + } else { + None } } #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fixes}; + use crate::tests::{ + check_diagnostics, check_fixes_unordered, check_has_fix, check_has_single_fix, + }; #[test] fn unknown() { @@ -99,7 +109,7 @@ fn main() { r#" fn main() { if _ {} - //^ error: invalid `_` expression, expected type `bool` + //^ 💡 error: invalid `_` expression, expected type `bool` let _: fn() -> i32 = _; //^ error: invalid `_` expression, expected type `fn() -> i32` let _: fn() -> () = _; // FIXME: This should trigger an assist because `main` matches via *coercion* @@ -129,7 +139,7 @@ fn main() { fn main() { let mut x = t(); x = _; - //^ 💡 error: invalid `_` expression, expected type `&str` + //^ error: invalid `_` expression, expected type `&str` x = ""; } fn t() -> T { loop {} } @@ -143,7 +153,8 @@ fn t() -> T { loop {} } r#" fn main() { let _x = [(); _]; - let _y: [(); 10] = [(); _]; + // FIXME: This should trigger error + // let _y: [(); 10] = [(); _]; _ = 0; (_,) = (1,); } @@ -153,7 +164,7 @@ fn main() { #[test] fn check_quick_fix() { - check_fixes( + check_fixes_unordered( r#" enum Foo { Bar @@ -175,7 +186,7 @@ use Foo::Bar; const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; - let _: Foo = local; + let _: Foo = Bar; //^ error: invalid `_` expression, expected type `fn()` } "#, @@ -187,7 +198,7 @@ use Foo::Bar; const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; - let _: Foo = param; + let _: Foo = local; //^ error: invalid `_` expression, expected type `fn()` } "#, @@ -199,7 +210,7 @@ use Foo::Bar; const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; - let _: Foo = CP; + let _: Foo = param; //^ error: invalid `_` expression, expected type `fn()` } "#, @@ -211,7 +222,7 @@ use Foo::Bar; const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; - let _: Foo = Bar; + let _: Foo = CP; //^ error: invalid `_` expression, expected type `fn()` } "#, @@ -230,4 +241,153 @@ fn main(param: Foo) { ], ); } + + #[test] + fn local_item_use_trait() { + check_has_fix( + r#" +struct Bar; +struct Baz; +trait Foo { + fn foo(self) -> Bar; +} +impl Foo for Baz { + fn foo(self) -> Bar { + unimplemented!() + } +} +fn asd() -> Bar { + let a = Baz; + _$0 +} +"#, + r" +struct Bar; +struct Baz; +trait Foo { + fn foo(self) -> Bar; +} +impl Foo for Baz { + fn foo(self) -> Bar { + unimplemented!() + } +} +fn asd() -> Bar { + let a = Baz; + Foo::foo(a) +} +", + ); + } + + #[test] + fn init_struct() { + check_has_fix( + r#"struct Abc {} +struct Qwe { a: i32, b: Abc } +fn main() { + let a: i32 = 1; + let c: Qwe = _$0; +}"#, + r#"struct Abc {} +struct Qwe { a: i32, b: Abc } +fn main() { + let a: i32 = 1; + let c: Qwe = Qwe { a: a, b: Abc { } }; +}"#, + ); + } + + #[test] + fn ignore_impl_func_with_incorrect_return() { + check_has_single_fix( + r#" +struct Bar {} +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for i32 { + type Res = Self; + fn foo(&self) -> Self::Res { 1 } +} +fn main() { + let a: i32 = 1; + let c: Bar = _$0; +}"#, + r#" +struct Bar {} +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for i32 { + type Res = Self; + fn foo(&self) -> Self::Res { 1 } +} +fn main() { + let a: i32 = 1; + let c: Bar = Bar { }; +}"#, + ); + } + + #[test] + fn use_impl_func_with_correct_return() { + check_has_fix( + r#" +struct Bar {} +struct A; +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for A { + type Res = Bar; + fn foo(&self) -> Self::Res { Bar { } } +} +fn main() { + let a = A; + let c: Bar = _$0; +}"#, + r#" +struct Bar {} +struct A; +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for A { + type Res = Bar; + fn foo(&self) -> Self::Res { Bar { } } +} +fn main() { + let a = A; + let c: Bar = Foo::foo(&a); +}"#, + ); + } + + #[test] + fn local_shadow_fn() { + check_fixes_unordered( + r#" +fn f() { + let f: i32 = 0; + _$0 +}"#, + vec![ + r#" +fn f() { + let f: i32 = 0; + () +}"#, + r#" +fn f() { + let f: i32 = 0; + crate::f() +}"#, + ], + ); + } } diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index b62bb5affdd8e..4e4a851f67e0a 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -91,6 +91,91 @@ fn check_nth_fix_with_config( assert_eq_text!(&after, &actual); } +pub(crate) fn check_fixes_unordered(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) { + for ra_fixture_after in ra_fixtures_after.iter() { + check_has_fix(ra_fixture_before, ra_fixture_after) + } +} + +#[track_caller] +pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) { + let after = trim_indent(ra_fixture_after); + + let (db, file_position) = RootDatabase::with_position(ra_fixture_before); + let mut conf = DiagnosticsConfig::test_sample(); + conf.expr_fill_default = ExprFillDefaultMode::Default; + let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); + + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); + assert!(fix.is_some(), "no diagnostic with desired fix"); +} + +#[track_caller] +pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &str) { + let after = trim_indent(ra_fixture_after); + + let (db, file_position) = RootDatabase::with_position(ra_fixture_before); + let mut conf = DiagnosticsConfig::test_sample(); + conf.expr_fill_default = ExprFillDefaultMode::Default; + let mut n_fixes = 0; + let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + n_fixes += fixes.len(); + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); + + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); + assert!(fix.is_some(), "no diagnostic with desired fix"); + assert!(n_fixes == 1, "Too many fixes suggested"); +} + /// Checks that there's a diagnostic *without* fix at `$0`. pub(crate) fn check_no_fix(ra_fixture: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture); diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index dbe6a5507cc3e..18821bd78bfac 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -501,7 +501,7 @@ fn get_doc_base_urls( let Some(krate) = def.krate(db) else { return Default::default() }; let Some(display_name) = krate.display_name(db) else { return Default::default() }; let crate_data = &db.crate_graph()[krate.into()]; - let channel = crate_data.channel().unwrap_or(ReleaseChannel::Nightly).as_str(); + let channel = db.toolchain_channel(krate.into()).unwrap_or(ReleaseChannel::Nightly).as_str(); let (web_base, local_base) = match &crate_data.origin { // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself. diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 30bfe6ee9dc34..69ddc1e45efbd 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -7263,8 +7263,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6157..6365, - focus_range: 6222..6228, + full_range: 6290..6498, + focus_range: 6355..6361, name: "Future", kind: Trait, container_name: "future", @@ -7277,8 +7277,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6995..7461, - focus_range: 7039..7047, + full_range: 7128..7594, + focus_range: 7172..7180, name: "Iterator", kind: Trait, container_name: "iterator", diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index effdbf2c1f041..3238887257a47 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -12,11 +12,6 @@ #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "128"] -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - #[cfg(test)] mod fixture; @@ -258,11 +253,11 @@ impl Analysis { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("Analysis::from_single_file has no target layout".into()), - None, ); change.change_file(file_id, Some(Arc::from(text))); change.set_crate_graph(crate_graph); + change.set_target_data_layouts(vec![Err("fixture has no layout".into())]); + change.set_toolchains(vec![None]); host.apply_change(change); (host.analysis(), file_id) } @@ -680,9 +675,8 @@ impl Analysis { &self, position: FilePosition, new_name: &str, - rename_external: bool, ) -> Cancellable> { - self.with_db(|db| rename::rename(db, position, new_name, rename_external)) + self.with_db(|db| rename::rename(db, position, new_name)) } pub fn prepare_rename( diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs index 413dbf9c5dfc6..f67aea2d5b9c1 100644 --- a/crates/ide/src/parent_module.rs +++ b/crates/ide/src/parent_module.rs @@ -54,7 +54,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec Vec { db.relevant_crates(file_id) .iter() diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index 9fce4bb0f8271..f2eedfa431693 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -84,7 +84,6 @@ pub(crate) fn rename( db: &RootDatabase, position: FilePosition, new_name: &str, - rename_external: bool, ) -> RenameResult { let sema = Semantics::new(db); let source_file = sema.parse(position.file_id); @@ -104,7 +103,7 @@ pub(crate) fn rename( return rename_to_self(&sema, local); } } - def.rename(&sema, new_name, rename_external) + def.rename(&sema, new_name) }) .collect(); @@ -123,9 +122,9 @@ pub(crate) fn will_rename_file( let module = sema.to_module_def(file_id)?; let def = Definition::Module(module); let mut change = if is_raw_identifier(new_name_stem) { - def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem]), true).ok()? + def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem])).ok()? } else { - def.rename(&sema, new_name_stem, true).ok()? + def.rename(&sema, new_name_stem).ok()? }; change.file_system_edits.clear(); Some(change) @@ -377,16 +376,11 @@ mod tests { use super::{RangeInfo, RenameError}; fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { - check_with_rename_config(new_name, ra_fixture_before, ra_fixture_after, true); + check_with_rename_config(new_name, ra_fixture_before, ra_fixture_after); } #[track_caller] - fn check_with_rename_config( - new_name: &str, - ra_fixture_before: &str, - ra_fixture_after: &str, - rename_external: bool, - ) { + fn check_with_rename_config(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { let ra_fixture_after = &trim_indent(ra_fixture_after); let (analysis, position) = fixture::position(ra_fixture_before); if !ra_fixture_after.starts_with("error: ") { @@ -395,7 +389,7 @@ mod tests { } } let rename_result = analysis - .rename(position, new_name, rename_external) + .rename(position, new_name) .unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}")); match rename_result { Ok(source_change) => { @@ -426,10 +420,8 @@ mod tests { fn check_expect(new_name: &str, ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); - let source_change = analysis - .rename(position, new_name, true) - .unwrap() - .expect("Expect returned a RenameError"); + let source_change = + analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError"); expect.assert_eq(&filter_expect(source_change)) } @@ -2636,19 +2628,7 @@ pub struct S; //- /main.rs crate:main deps:lib new_source_root:local use lib::S$0; "#, - "error: Cannot rename a non-local definition as the config for it is disabled", - false, - ); - - check( - "Baz", - r#" -//- /lib.rs crate:lib new_source_root:library -pub struct S; -//- /main.rs crate:main deps:lib new_source_root:local -use lib::S$0; -"#, - "use lib::Baz;\n", + "error: Cannot rename a non-local definition", ); } @@ -2663,8 +2643,7 @@ use core::hash::Hash; #[derive(H$0ash)] struct A; "#, - "error: Cannot rename a non-local definition as the config for it is disabled", - false, + "error: Cannot rename a non-local definition", ); } diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index bf6ad47a49527..453d1836e16e4 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -39,8 +39,6 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { data.env.clone(), data.is_proc_macro, data.origin.clone(), - data.target_layout.clone(), - data.toolchain.clone(), ); new_proc_macros.insert(new_id, proc_macros[&old_id].clone()); map.insert(old_id, new_id); diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index dee5afbf8d9e8..5feaf21aa9795 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -1,14 +1,16 @@ //! This module provides `StaticIndex` which is used for powering //! read-only code browsers and emitting LSIF -use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics}; use ide_db::{ base_db::{FileId, FileRange, SourceDatabaseExt}, defs::Definition, + documentation::Documentation, + famous_defs::FamousDefs, helpers::get_definition, FxHashMap, FxHashSet, RootDatabase, }; -use syntax::{AstNode, SyntaxKind::*, TextRange, T}; +use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T}; use crate::inlay_hints::InlayFieldsToResolve; use crate::navigation_target::UpmappingResult; @@ -22,7 +24,7 @@ use crate::{ /// A static representation of fully analyzed source code. /// -/// The intended use-case is powering read-only code browsers and emitting LSIF +/// The intended use-case is powering read-only code browsers and emitting LSIF/SCIP. #[derive(Debug)] pub struct StaticIndex<'a> { pub files: Vec, @@ -40,6 +42,7 @@ pub struct ReferenceData { #[derive(Debug)] pub struct TokenStaticData { + pub documentation: Option, pub hover: Option, pub definition: Option, pub references: Vec, @@ -103,6 +106,19 @@ fn all_modules(db: &dyn HirDatabase) -> Vec { modules } +fn documentation_for_definition( + sema: &Semantics<'_, RootDatabase>, + def: Definition, + scope_node: &SyntaxNode, +) -> Option { + let famous_defs = match &def { + Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), + _ => None, + }; + + def.docs(sema.db, famous_defs.as_ref()) +} + impl StaticIndex<'_> { fn add_file(&mut self, file_id: FileId) { let current_crate = crates_for(self.db, file_id).pop().map(Into::into); @@ -169,6 +185,7 @@ impl StaticIndex<'_> { *it } else { let it = self.tokens.insert(TokenStaticData { + documentation: documentation_for_definition(&sema, def, &node), hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { FileRange { file_id: it.file_id, range: it.focus_or_full_range() } diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 3321a0513b6f3..c3d85e38936d9 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -72,8 +72,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { dependencies, origin, is_proc_macro, - target_layout, - toolchain, } = &crate_graph[crate_id]; format_to!( buf, @@ -91,12 +89,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { format_to!(buf, " Env: {:?}\n", env); format_to!(buf, " Origin: {:?}\n", origin); format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro); - format_to!(buf, " Workspace Target Layout: {:?}\n", target_layout); - format_to!( - buf, - " Workspace Toolchain: {}\n", - toolchain.as_ref().map_or_else(|| "n/a".into(), |v| v.to_string()) - ); let deps = dependencies .iter() .map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw())) diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index c6dc071c394e0..8c5592da63ecd 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -2,7 +2,7 @@ //! for incorporating changes. // Note, don't remove any public api from this. This API is consumed by external tools // to run rust-analyzer as a library. -use std::{collections::hash_map::Entry, mem, path::Path, sync}; +use std::{collections::hash_map::Entry, iter, mem, path::Path, sync}; use crossbeam_channel::{unbounded, Receiver}; use hir_expand::proc_macro::{ @@ -18,7 +18,6 @@ use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; use span::Span; -use tt::DelimSpan; use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; pub struct LoadCargoConfig { @@ -68,9 +67,9 @@ pub fn load_workspace( let proc_macro_server = match &load_config.with_proc_macro_server { ProcMacroServerChoice::Sysroot => ws .find_sysroot_proc_macro_srv() - .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)), + .and_then(|it| ProcMacroServer::spawn(it, extra_env).map_err(Into::into)), ProcMacroServerChoice::Explicit(path) => { - ProcMacroServer::spawn(path.clone()).map_err(Into::into) + ProcMacroServer::spawn(path.clone(), extra_env).map_err(Into::into) } ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")), }; @@ -107,7 +106,7 @@ pub fn load_workspace( .collect() }; - let project_folders = ProjectFolders::new(&[ws], &[]); + let project_folders = ProjectFolders::new(std::slice::from_ref(&ws), &[]); loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![], @@ -115,6 +114,7 @@ pub fn load_workspace( }); let host = load_crate_graph( + &ws, crate_graph, proc_macros, project_folders.source_root_config, @@ -273,7 +273,7 @@ impl SourceRootConfig { pub fn load_proc_macro( server: &ProcMacroServer, path: &AbsPath, - dummy_replace: &[Box], + ignored_macros: &[Box], ) -> ProcMacroLoadResult { let res: Result, String> = (|| { let dylib = MacroDylib::new(path.to_path_buf()); @@ -283,7 +283,7 @@ pub fn load_proc_macro( } Ok(vec .into_iter() - .map(|expander| expander_to_proc_macro(expander, dummy_replace)) + .map(|expander| expander_to_proc_macro(expander, ignored_macros)) .collect()) })(); match res { @@ -302,6 +302,7 @@ pub fn load_proc_macro( } fn load_crate_graph( + ws: &ProjectWorkspace, crate_graph: CrateGraph, proc_macros: ProcMacros, source_root_config: SourceRootConfig, @@ -340,8 +341,17 @@ fn load_crate_graph( let source_roots = source_root_config.partition(vfs); analysis_change.set_roots(source_roots); + let num_crates = crate_graph.len(); analysis_change.set_crate_graph(crate_graph); analysis_change.set_proc_macros(proc_macros); + if let ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } = ws + { + analysis_change.set_target_data_layouts( + iter::repeat(target_layout.clone()).take(num_crates).collect(), + ); + analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect()); + } host.apply_change(analysis_change); host @@ -349,7 +359,7 @@ fn load_crate_graph( fn expander_to_proc_macro( expander: proc_macro_api::ProcMacro, - dummy_replace: &[Box], + ignored_macros: &[Box], ) -> ProcMacro { let name = From::from(expander.name()); let kind = match expander.kind() { @@ -357,16 +367,8 @@ fn expander_to_proc_macro( proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike, proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr, }; - let expander: sync::Arc = - if dummy_replace.iter().any(|replace| **replace == name) { - match kind { - ProcMacroKind::Attr => sync::Arc::new(IdentityExpander), - _ => sync::Arc::new(EmptyExpander), - } - } else { - sync::Arc::new(Expander(expander)) - }; - ProcMacro { name, kind, expander } + let disabled = ignored_macros.iter().any(|replace| **replace == name); + ProcMacro { name, kind, expander: sync::Arc::new(Expander(expander)), disabled } } #[derive(Debug)] @@ -391,42 +393,6 @@ impl ProcMacroExpander for Expander { } } -/// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user. -#[derive(Debug)] -struct IdentityExpander; - -impl ProcMacroExpander for IdentityExpander { - fn expand( - &self, - subtree: &tt::Subtree, - _: Option<&tt::Subtree>, - _: &Env, - _: Span, - _: Span, - _: Span, - ) -> Result, ProcMacroExpansionError> { - Ok(subtree.clone()) - } -} - -/// Empty expander, used for proc-macros that are deliberately ignored by the user. -#[derive(Debug)] -struct EmptyExpander; - -impl ProcMacroExpander for EmptyExpander { - fn expand( - &self, - _: &tt::Subtree, - _: Option<&tt::Subtree>, - _: &Env, - call_site: Span, - _: Span, - _: Span, - ) -> Result, ProcMacroExpansionError> { - Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site })) - } -} - #[cfg(test)] mod tests { use ide_db::base_db::SourceDatabase; diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 9291f799cca73..6d3055da28608 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -101,10 +101,20 @@ impl Bindings { }))) } MetaVarKind::Lifetime => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_static("'missing"), - span, - }))) + Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(span), + token_trees: Box::new([ + tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { + char: '\'', + span, + spacing: tt::Spacing::Joint, + })), + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_static("missing"), + span, + })), + ]), + })) } MetaVarKind::Literal => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index bfc5d197f6834..3c270e30a9ba8 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -700,10 +700,12 @@ impl SynToken { } impl SrcToken, S> for SynToken { - fn kind(&self, ctx: &Converter) -> SyntaxKind { + fn kind(&self, _ctx: &Converter) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), - SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), + SynToken::Punct { token, offset: i } => { + SyntaxKind::from_char(token.text().chars().nth(*i).unwrap()).unwrap() + } SynToken::Leaf(_) => { never!(); SyntaxKind::ERROR diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index f40c515fa079d..6b660180f8238 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -678,27 +678,38 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { attributes::outer_attrs(p); match p.current() { - IDENT | INT_NUMBER => { + IDENT | INT_NUMBER if p.nth_at(1, T![::]) => { // test_err record_literal_missing_ellipsis_recovery // fn main() { // S { S::default() } // } - if p.nth_at(1, T![::]) { - m.abandon(p); - p.expect(T![..]); - expr(p); - } else { + m.abandon(p); + p.expect(T![..]); + expr(p); + } + IDENT | INT_NUMBER => { + if p.nth_at(1, T![..]) { // test_err record_literal_before_ellipsis_recovery // fn main() { // S { field ..S::default() } // } - if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) { + name_ref_or_index(p); + p.error("expected `:`"); + } else { + // test_err record_literal_field_eq_recovery + // fn main() { + // S { field = foo } + // } + if p.nth_at(1, T![:]) { + name_ref_or_index(p); + p.bump(T![:]); + } else if p.nth_at(1, T![=]) { name_ref_or_index(p); - p.expect(T![:]); + p.err_and_bump("expected `:`"); } expr(p); - m.complete(p, RECORD_EXPR_FIELD); } + m.complete(p, RECORD_EXPR_FIELD); } T![.] if p.at(T![..]) => { m.abandon(p); diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 4197f248e0a97..48600641ad05b 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -58,6 +58,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T![match], T![move], T![return], + T![become], T![static], T![try], T![unsafe], @@ -102,6 +103,7 @@ pub(super) fn atom_expr( T![try] => try_block_expr(p, None), T![match] => match_expr(p), T![return] => return_expr(p), + T![become] => become_expr(p), T![yield] => yield_expr(p), T![do] if p.nth_at_contextual_kw(1, T![yeet]) => yeet_expr(p), T![continue] => continue_expr(p), @@ -621,6 +623,18 @@ fn return_expr(p: &mut Parser<'_>) -> CompletedMarker { m.complete(p, RETURN_EXPR) } +// test become_expr +// fn foo() { +// become foo(); +// } +fn become_expr(p: &mut Parser<'_>) -> CompletedMarker { + assert!(p.at(T![become])); + let m = p.start(); + p.bump(T![become]); + expr(p); + m.complete(p, BECOME_EXPR) +} + // test yield_expr // fn foo() { // yield; diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs index 3c577aa3cb499..4498daf21a3d8 100644 --- a/crates/parser/src/grammar/generic_params.rs +++ b/crates/parser/src/grammar/generic_params.rs @@ -157,6 +157,16 @@ fn type_bound(p: &mut Parser<'_>) -> bool { p.bump_any(); p.expect(T![const]); } + // test const_trait_bound + // const fn foo(_: impl const Trait) {} + T![const] => { + p.bump_any(); + } + // test async_trait_bound + // fn async_foo(_: impl async Fn(&i32)) {} + T![async] => { + p.bump_any(); + } _ => (), } if paths::is_use_path_start(p) { diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index 39ded41bb2413..5036742337921 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -323,6 +323,15 @@ fn record_pat_field(p: &mut Parser<'_>) { p.bump(T![:]); pattern(p); } + // test_err record_pat_field_eq_recovery + // fn main() { + // let S { field = foo }; + // } + IDENT | INT_NUMBER if p.nth(1) == T![=] => { + name_ref_or_index(p); + p.err_and_bump("expected `:`"); + pattern(p); + } T![box] => { // FIXME: not all box patterns should be allowed box_pat(p); diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 4b589037672f4..6ecfdc9f4664c 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -90,6 +90,7 @@ pub enum SyntaxKind { PUB_KW, REF_KW, RETURN_KW, + BECOME_KW, SELF_KW, SELF_TYPE_KW, STATIC_KW, @@ -195,6 +196,7 @@ pub enum SyntaxKind { BLOCK_EXPR, STMT_LIST, RETURN_EXPR, + BECOME_EXPR, YIELD_EXPR, YEET_EXPR, LET_EXPR, @@ -307,6 +309,7 @@ impl SyntaxKind { | PUB_KW | REF_KW | RETURN_KW + | BECOME_KW | SELF_KW | SELF_TYPE_KW | STATIC_KW @@ -425,6 +428,7 @@ impl SyntaxKind { "pub" => PUB_KW, "ref" => REF_KW, "return" => RETURN_KW, + "become" => BECOME_KW, "self" => SELF_KW, "Self" => SELF_TYPE_KW, "static" => STATIC_KW, @@ -496,4 +500,4 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } +macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } diff --git a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast index f511960040d5f..741b7845e7f14 100644 --- a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast +++ b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast @@ -24,26 +24,26 @@ SOURCE_FILE RECORD_EXPR_FIELD NAME_REF IDENT "field" - WHITESPACE " " - RANGE_EXPR - DOT2 ".." - CALL_EXPR - PATH_EXPR - PATH - PATH - PATH_SEGMENT - NAME_REF - IDENT "S" - COLON2 "::" - PATH_SEGMENT - NAME_REF - IDENT "default" - ARG_LIST - L_PAREN "(" - R_PAREN ")" + WHITESPACE " " + DOT2 ".." + CALL_EXPR + PATH_EXPR + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "default" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " R_CURLY "}" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 25: expected COLON +error 25: expected `:` +error 25: expected COMMA diff --git a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast new file mode 100644 index 0000000000000..ad4deeb0b67c9 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast @@ -0,0 +1,41 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + IDENT "field" + WHITESPACE " " + ERROR + EQ "=" + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 26: expected `:` diff --git a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs new file mode 100644 index 0000000000000..1eb1aa9b92642 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs @@ -0,0 +1,3 @@ +fn main() { + S { field = foo } +} diff --git a/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast new file mode 100644 index 0000000000000..6940a84b68302 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + LET_STMT + LET_KW "let" + WHITESPACE " " + RECORD_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_PAT_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_PAT_FIELD + NAME_REF + IDENT "field" + WHITESPACE " " + ERROR + EQ "=" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "foo" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 30: expected `:` diff --git a/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs new file mode 100644 index 0000000000000..c4949d6e12e7a --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs @@ -0,0 +1,3 @@ +fn main() { + let S { field = foo }; +} diff --git a/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast new file mode 100644 index 0000000000000..c544cf4e5e3ef --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast @@ -0,0 +1,31 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + BECOME_EXPR + BECOME_KW "become" + WHITESPACE " " + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs new file mode 100644 index 0000000000000..918a83ca6e83e --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs @@ -0,0 +1,3 @@ +fn foo() { + become foo(); +} diff --git a/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast new file mode 100644 index 0000000000000..ebf758286a7c2 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "async_foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + ASYNC_KW "async" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Fn" + PARAM_LIST + L_PAREN "(" + PARAM + REF_TYPE + AMP "&" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + R_PAREN ")" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs new file mode 100644 index 0000000000000..04d44175d778d --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs @@ -0,0 +1 @@ +fn async_foo(_: impl async Fn(&i32)) {} diff --git a/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast new file mode 100644 index 0000000000000..646873881bcb0 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast @@ -0,0 +1,34 @@ +SOURCE_FILE + FN + CONST_KW "const" + WHITESPACE " " + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + CONST_KW "const" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs new file mode 100644 index 0000000000000..8eb8f84c91f45 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs @@ -0,0 +1 @@ +const fn foo(_: impl const Trait) {} diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 1dadfc40ac431..6b16711a8d87b 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -13,6 +13,7 @@ mod version; use indexmap::IndexSet; use paths::AbsPathBuf; +use rustc_hash::FxHashMap; use span::Span; use std::{ fmt, io, @@ -107,8 +108,11 @@ pub struct MacroPanic { impl ProcMacroServer { /// Spawns an external process as the proc macro server and returns a client connected to it. - pub fn spawn(process_path: AbsPathBuf) -> io::Result { - let process = ProcMacroProcessSrv::run(process_path)?; + pub fn spawn( + process_path: AbsPathBuf, + env: &FxHashMap, + ) -> io::Result { + let process = ProcMacroProcessSrv::run(process_path, env)?; Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) }) } diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 96f97bf5e205e..12eafcea442d3 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -7,6 +7,7 @@ use std::{ }; use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::FxHashMap; use stdx::JodChild; use crate::{ @@ -26,9 +27,12 @@ pub(crate) struct ProcMacroProcessSrv { } impl ProcMacroProcessSrv { - pub(crate) fn run(process_path: AbsPathBuf) -> io::Result { + pub(crate) fn run( + process_path: AbsPathBuf, + env: &FxHashMap, + ) -> io::Result { let create_srv = |null_stderr| { - let mut process = Process::run(process_path.clone(), null_stderr)?; + let mut process = Process::run(process_path.clone(), env, null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); io::Result::Ok(ProcMacroProcessSrv { @@ -147,8 +151,12 @@ struct Process { } impl Process { - fn run(path: AbsPathBuf, null_stderr: bool) -> io::Result { - let child = JodChild(mk_child(&path, null_stderr)?); + fn run( + path: AbsPathBuf, + env: &FxHashMap, + null_stderr: bool, + ) -> io::Result { + let child = JodChild(mk_child(&path, env, null_stderr)?); Ok(Process { child }) } @@ -161,9 +169,14 @@ impl Process { } } -fn mk_child(path: &AbsPath, null_stderr: bool) -> io::Result { +fn mk_child( + path: &AbsPath, + env: &FxHashMap, + null_stderr: bool, +) -> io::Result { let mut cmd = Command::new(path.as_os_str()); - cmd.env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") + cmd.envs(env) + .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(if null_stderr { Stdio::null() } else { Stdio::inherit() }); diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index ba17ea6f7b439..bd7a31654584f 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -29,6 +29,7 @@ paths.workspace = true base-db.workspace = true span.workspace = true proc-macro-api.workspace = true +ra-ap-rustc_lexer.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 460a96c07f367..831632c64c0a2 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -20,6 +20,11 @@ extern crate proc_macro; #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_lexer as rustc_lexer; +#[cfg(feature = "in-rust-tree")] +extern crate rustc_lexer; + mod dylib; mod proc_macros; mod server; diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index 8a9d52a37a2f3..c6a0a6665553f 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -70,11 +70,58 @@ impl server::FreeFunctions for RaSpanServer { &mut self, s: &str, ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix + use proc_macro::bridge::LitKind; + use rustc_lexer::{LiteralKind, Token, TokenKind}; + + let mut tokens = rustc_lexer::tokenize(s); + let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 }); + + let lit = if minus_or_lit.kind == TokenKind::Minus { + let lit = tokens.next().ok_or(())?; + if !matches!( + lit.kind, + TokenKind::Literal { + kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, + .. + } + ) { + return Err(()); + } + lit + } else { + minus_or_lit + }; + + if tokens.next().is_some() { + return Err(()); + } + + let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; + let kind = match kind { + LiteralKind::Int { .. } => LitKind::Integer, + LiteralKind::Float { .. } => LitKind::Float, + LiteralKind::Char { .. } => LitKind::Char, + LiteralKind::Byte { .. } => LitKind::Byte, + LiteralKind::Str { .. } => LitKind::Str, + LiteralKind::ByteStr { .. } => LitKind::ByteStr, + LiteralKind::CStr { .. } => LitKind::CStr, + LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), + LiteralKind::RawByteStr { n_hashes } => { + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) + } + LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + }; + + let (lit, suffix) = s.split_at(suffix_start as usize); + let suffix = match suffix { + "" | "_" => None, + suffix => Some(Symbol::intern(self.interner, suffix)), + }; + Ok(bridge::Literal { - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, s), - suffix: None, + kind, + symbol: Symbol::intern(self.interner, lit), + suffix, span: self.call_site, }) } diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index 15a9e0deae44f..7e9d8057ac9a5 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -62,11 +62,58 @@ impl server::FreeFunctions for TokenIdServer { &mut self, s: &str, ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix + use proc_macro::bridge::LitKind; + use rustc_lexer::{LiteralKind, Token, TokenKind}; + + let mut tokens = rustc_lexer::tokenize(s); + let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 }); + + let lit = if minus_or_lit.kind == TokenKind::Minus { + let lit = tokens.next().ok_or(())?; + if !matches!( + lit.kind, + TokenKind::Literal { + kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, + .. + } + ) { + return Err(()); + } + lit + } else { + minus_or_lit + }; + + if tokens.next().is_some() { + return Err(()); + } + + let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; + let kind = match kind { + LiteralKind::Int { .. } => LitKind::Integer, + LiteralKind::Float { .. } => LitKind::Float, + LiteralKind::Char { .. } => LitKind::Char, + LiteralKind::Byte { .. } => LitKind::Byte, + LiteralKind::Str { .. } => LitKind::Str, + LiteralKind::ByteStr { .. } => LitKind::ByteStr, + LiteralKind::CStr { .. } => LitKind::CStr, + LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), + LiteralKind::RawByteStr { n_hashes } => { + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) + } + LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + }; + + let (lit, suffix) = s.split_at(suffix_start as usize); + let suffix = match suffix { + "" | "_" => None, + suffix => Some(Symbol::intern(self.interner, suffix)), + }; + Ok(bridge::Literal { - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, s), - suffix: None, + kind, + symbol: Symbol::intern(self.interner, lit), + suffix, span: self.call_site, }) } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index 87d832cc76fa0..e5bfe5ee92cd8 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -169,8 +169,8 @@ fn test_fn_like_mk_idents() { fn test_fn_like_macro_clone_literals() { assert_expand( "fn_like_clone_tokens", - r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#, - expect![[r#" + r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##"###, + expect![[r###" SUBTREE $$ 1 1 LITERAL 1u16 1 PUNCH , [alone] 1 @@ -181,8 +181,12 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] 1 LITERAL 3.14f32 1 PUNCH , [alone] 1 - LITERAL "hello bridge" 1"#]], - expect![[r#" + LITERAL ""hello bridge"" 1 + PUNCH , [alone] 1 + LITERAL ""suffixed""suffix 1 + PUNCH , [alone] 1 + LITERAL r##"r##"raw"##"## 1"###]], + expect![[r###" SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } @@ -193,7 +197,11 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], + LITERAL ""hello bridge"" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL ""suffixed""suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL r##"r##"raw"##"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]], ); } diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index a2c9856a3f733..ab72f1fba09dd 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -20,10 +20,11 @@ use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; use serde::Deserialize; +use toolchain::Tool; use crate::{ cfg_flag::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, - InvocationStrategy, Package, + InvocationStrategy, Package, Sysroot, TargetKind, }; #[derive(Debug, Default, Clone, PartialEq, Eq)] @@ -61,6 +62,7 @@ impl WorkspaceBuildScripts { config: &CargoConfig, allowed_features: &FxHashSet, workspace_root: &AbsPathBuf, + sysroot: Option<&Sysroot>, ) -> io::Result { let mut cmd = match config.run_build_script_command.as_deref() { Some([program, args @ ..]) => { @@ -69,7 +71,8 @@ impl WorkspaceBuildScripts { cmd } _ => { - let mut cmd = Command::new(toolchain::cargo()); + let mut cmd = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); cmd.args(&config.extra_args); @@ -133,6 +136,7 @@ impl WorkspaceBuildScripts { workspace: &CargoWorkspace, progress: &dyn Fn(String), toolchain: &Option, + sysroot: Option<&Sysroot>, ) -> io::Result { const RUST_1_62: Version = Version::new(1, 62, 0); @@ -151,6 +155,7 @@ impl WorkspaceBuildScripts { config, &allowed_features, &workspace.workspace_root().to_path_buf(), + sysroot, )?, workspace, current_dir, @@ -165,6 +170,7 @@ impl WorkspaceBuildScripts { config, &allowed_features, &workspace.workspace_root().to_path_buf(), + sysroot, )?; cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; @@ -194,7 +200,7 @@ impl WorkspaceBuildScripts { )) } }; - let cmd = Self::build_command(config, &Default::default(), workspace_root)?; + let cmd = Self::build_command(config, &Default::default(), workspace_root, None)?; // NB: Cargo.toml could have been modified between `cargo metadata` and // `cargo check`. We shouldn't assume that package ids we see here are // exactly those from `config`. @@ -415,6 +421,7 @@ impl WorkspaceBuildScripts { rustc: &CargoWorkspace, current_dir: &AbsPath, extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, ) -> Self { let mut bs = WorkspaceBuildScripts::default(); for p in rustc.packages() { @@ -422,7 +429,8 @@ impl WorkspaceBuildScripts { } let res = (|| { let target_libdir = (|| { - let mut cargo_config = Command::new(toolchain::cargo()); + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); cargo_config.envs(extra_env); cargo_config .current_dir(current_dir) @@ -431,7 +439,8 @@ impl WorkspaceBuildScripts { if let Ok(it) = utf8_stdout(cargo_config) { return Ok(it); } - let mut cmd = Command::new(toolchain::rustc()); + let mut cmd = Command::new(Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.args(["--print", "target-libdir"]); utf8_stdout(cmd) @@ -458,7 +467,11 @@ impl WorkspaceBuildScripts { .collect(); for p in rustc.packages() { let package = &rustc[p]; - if package.targets.iter().any(|&it| rustc[it].is_proc_macro) { + if package + .targets + .iter() + .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) + { if let Some((_, path)) = proc_macro_dylibs .iter() .find(|(name, _)| *name.trim_start_matches("lib") == package.name) diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index a99ee6e664c5f..08d86fd7b0fee 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -12,8 +12,9 @@ use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use serde::Deserialize; use serde_json::from_value; +use toolchain::Tool; -use crate::{utf8_stdout, InvocationLocation, ManifestPath}; +use crate::{utf8_stdout, InvocationLocation, ManifestPath, Sysroot}; use crate::{CfgOverrides, InvocationStrategy}; /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo @@ -188,8 +189,6 @@ pub struct TargetData { pub root: AbsPathBuf, /// Kind of target pub kind: TargetKind, - /// Is this target a proc-macro - pub is_proc_macro: bool, /// Required features of the target without which it won't build pub required_features: Vec, } @@ -198,7 +197,10 @@ pub struct TargetData { pub enum TargetKind { Bin, /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...). - Lib, + Lib { + /// Is this target a proc-macro + is_proc_macro: bool, + }, Example, Test, Bench, @@ -215,8 +217,8 @@ impl TargetKind { "bench" => TargetKind::Bench, "example" => TargetKind::Example, "custom-build" => TargetKind::BuildScript, - "proc-macro" => TargetKind::Lib, - _ if kind.contains("lib") => TargetKind::Lib, + "proc-macro" => TargetKind::Lib { is_proc_macro: true }, + _ if kind.contains("lib") => TargetKind::Lib { is_proc_macro: false }, _ => continue, }; } @@ -236,12 +238,13 @@ impl CargoWorkspace { cargo_toml: &ManifestPath, current_dir: &AbsPath, config: &CargoConfig, + sysroot: Option<&Sysroot>, progress: &dyn Fn(String), ) -> anyhow::Result { - let targets = find_list_of_build_targets(config, cargo_toml); + let targets = find_list_of_build_targets(config, cargo_toml, sysroot); let mut meta = MetadataCommand::new(); - meta.cargo_path(toolchain::cargo()); + meta.cargo_path(Tool::Cargo.path()); meta.manifest_path(cargo_toml.to_path_buf()); match &config.features { CargoFeatures::All => { @@ -289,6 +292,7 @@ impl CargoWorkspace { (|| -> Result { let mut command = meta.cargo_command(); + Sysroot::set_rustup_toolchain_env(&mut command, sysroot); command.envs(&config.extra_env); let output = command.output()?; if !output.status.success() { @@ -368,7 +372,6 @@ impl CargoWorkspace { name, root: AbsPathBuf::assert(src_path.into()), kind: TargetKind::new(&kind), - is_proc_macro: *kind == ["proc-macro"], required_features, }); pkg_data.targets.push(tgt); @@ -476,24 +479,30 @@ impl CargoWorkspace { } } -fn find_list_of_build_targets(config: &CargoConfig, cargo_toml: &ManifestPath) -> Vec { +fn find_list_of_build_targets( + config: &CargoConfig, + cargo_toml: &ManifestPath, + sysroot: Option<&Sysroot>, +) -> Vec { if let Some(target) = &config.target { return [target.into()].to_vec(); } - let build_targets = cargo_config_build_target(cargo_toml, &config.extra_env); + let build_targets = cargo_config_build_target(cargo_toml, &config.extra_env, sysroot); if !build_targets.is_empty() { return build_targets; } - rustc_discover_host_triple(cargo_toml, &config.extra_env).into_iter().collect() + rustc_discover_host_triple(cargo_toml, &config.extra_env, sysroot).into_iter().collect() } fn rustc_discover_host_triple( cargo_toml: &ManifestPath, extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, ) -> Option { - let mut rustc = Command::new(toolchain::rustc()); + let mut rustc = Command::new(Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut rustc, sysroot); rustc.envs(extra_env); rustc.current_dir(cargo_toml.parent()).arg("-vV"); tracing::debug!("Discovering host platform by {:?}", rustc); @@ -519,8 +528,10 @@ fn rustc_discover_host_triple( fn cargo_config_build_target( cargo_toml: &ManifestPath, extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, ) -> Vec { - let mut cargo_config = Command::new(toolchain::cargo()); + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index cf3231498f3e7..fba0aaa8ce9f4 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -49,7 +49,7 @@ //! user explores them belongs to that extension (it's totally valid to change //! rust-project.json over time via configuration request!) -use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, DependencyKind, Edition}; +use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, Edition}; use la_arena::RawIdx; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; @@ -135,7 +135,6 @@ impl ProjectJson { Dependency::new( dep_data.name, CrateId::from_raw(RawIdx::from(dep_data.krate as u32)), - DependencyKind::Normal, ) }) .collect::>(), diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs index 0aee002fbb3ff..1ad6e7255bf10 100644 --- a/crates/project-model/src/rustc_cfg.rs +++ b/crates/project-model/src/rustc_cfg.rs @@ -8,17 +8,13 @@ use rustc_hash::FxHashMap; use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath, Sysroot}; /// Determines how `rustc --print cfg` is discovered and invoked. -/// -/// There options are supported: -/// - [`RustcCfgConfig::Cargo`], which relies on `cargo rustc --print cfg` -/// and `RUSTC_BOOTSTRAP`. -/// - [`RustcCfgConfig::Explicit`], which uses an explicit path to the `rustc` -/// binary in the sysroot. -/// - [`RustcCfgConfig::Discover`], which uses [`toolchain::rustc`]. pub(crate) enum RustcCfgConfig<'a> { - Cargo(&'a ManifestPath), - Explicit(&'a Sysroot), - Discover, + /// Use `rustc --print cfg`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::rustc`]. + Rustc(Option<&'a Sysroot>), + /// Use `cargo --print cfg`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::cargo`]. + Cargo(Option<&'a Sysroot>, &'a ManifestPath), } pub(crate) fn get( @@ -71,9 +67,10 @@ fn get_rust_cfgs( extra_env: &FxHashMap, config: RustcCfgConfig<'_>, ) -> anyhow::Result { - let mut cmd = match config { - RustcCfgConfig::Cargo(cargo_toml) => { - let mut cmd = Command::new(toolchain::cargo()); + let sysroot = match config { + RustcCfgConfig::Cargo(sysroot, cargo_toml) => { + let mut cmd = Command::new(toolchain::Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) .args(["rustc", "-Z", "unstable-options", "--print", "cfg"]) @@ -82,25 +79,24 @@ fn get_rust_cfgs( cmd.args(["--target", target]); } - return utf8_stdout(cmd).context("Unable to run `cargo rustc`"); - } - RustcCfgConfig::Explicit(sysroot) => { - let rustc: std::path::PathBuf = sysroot.discover_rustc()?.into(); - tracing::debug!(?rustc, "using explicit rustc from sysroot"); - Command::new(rustc) - } - RustcCfgConfig::Discover => { - let rustc = toolchain::rustc(); - tracing::debug!(?rustc, "using rustc from env"); - Command::new(rustc) + match utf8_stdout(cmd) { + Ok(it) => return Ok(it), + Err(e) => { + tracing::warn!("failed to run `cargo rustc --print cfg`, falling back to invoking rustc directly: {e}"); + sysroot + } + } } + RustcCfgConfig::Rustc(sysroot) => sysroot, }; + let mut cmd = Command::new(toolchain::Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.args(["--print", "cfg", "-O"]); if let Some(target) = target { cmd.args(["--target", target]); } - utf8_stdout(cmd).context("Unable to run `rustc`") + utf8_stdout(cmd).context("unable to fetch cfgs via `rustc --print cfg -O`") } diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index 9e19a5258388f..07cfaba2d2ca2 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -4,24 +4,38 @@ //! but we can't process `.rlib` and need source code instead. The source code //! is typically installed with `rustup component add rust-src` command. -use std::{env, fs, iter, ops, path::PathBuf, process::Command}; +use std::{env, fs, iter, ops, path::PathBuf, process::Command, sync::Arc}; -use anyhow::{format_err, Context, Result}; +use anyhow::{format_err, Result}; use base_db::CrateName; use itertools::Itertools; use la_arena::{Arena, Idx}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; +use toolchain::probe_for_binary; use crate::{utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath}; -#[derive(Debug, Clone, Eq, PartialEq)] +#[derive(Debug, Clone)] pub struct Sysroot { root: AbsPathBuf, - src_root: AbsPathBuf, + src_root: Option>>, mode: SysrootMode, } +impl Eq for Sysroot {} +impl PartialEq for Sysroot { + fn eq(&self, other: &Self) -> bool { + self.root == other.root + && self.mode == other.mode + && match (&self.src_root, &other.src_root) { + (Some(Ok(this)), Some(Ok(other))) => this == other, + (None, None) | (Some(Err(_)), Some(Err(_))) => true, + _ => false, + } + } +} + #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) enum SysrootMode { Workspace(CargoWorkspace), @@ -86,8 +100,8 @@ impl Sysroot { /// Returns the sysroot "source" directory, where stdlib sources are located, like: /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library` - pub fn src_root(&self) -> &AbsPath { - &self.src_root + pub fn src_root(&self) -> Option<&AbsPath> { + self.src_root.as_ref()?.as_deref().ok() } pub fn is_empty(&self) -> bool { @@ -98,6 +112,11 @@ impl Sysroot { } pub fn loading_warning(&self) -> Option { + let src_root = match &self.src_root { + None => return Some(format!("sysroot at `{}` has no library sources", self.root)), + Some(Ok(src_root)) => src_root, + Some(Err(e)) => return Some(e.to_string()), + }; let has_core = match &self.mode { SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"), SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(), @@ -108,10 +127,7 @@ impl Sysroot { } else { " try running `rustup component add rust-src` to possible fix this" }; - Some(format!( - "could not find libcore in loaded sysroot at `{}`{var_note}", - self.src_root.as_path(), - )) + Some(format!("could not find libcore in loaded sysroot at `{}`{var_note}", src_root,)) } else { None } @@ -140,8 +156,19 @@ impl Sysroot { tracing::debug!("discovering sysroot for {dir}"); let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; let sysroot_src_dir = - discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?; - Ok(Sysroot::load(sysroot_dir, sysroot_src_dir, metadata)) + discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env); + Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata)) + } + + pub fn discover_no_source( + dir: &AbsPath, + extra_env: &FxHashMap, + ) -> Result { + tracing::debug!("discovering sysroot for {dir}"); + let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; + let sysroot_src_dir = + discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env); + Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), false)) } pub fn discover_with_src_override( @@ -152,33 +179,59 @@ impl Sysroot { ) -> Result { tracing::debug!("discovering sysroot for {current_dir}"); let sysroot_dir = discover_sysroot_dir(current_dir, extra_env)?; - Ok(Sysroot::load(sysroot_dir, src, metadata)) + Ok(Sysroot::load(sysroot_dir, Some(Ok(src)), metadata)) } pub fn discover_rustc_src(&self) -> Option { get_rustc_src(&self.root) } - pub fn discover_rustc(&self) -> anyhow::Result { - let rustc = self.root.join("bin/rustc"); - tracing::debug!(?rustc, "checking for rustc binary at location"); - match fs::metadata(&rustc) { - Ok(_) => Ok(rustc), - Err(e) => Err(e).context(format!( - "failed to discover rustc in sysroot: {:?}", - AsRef::::as_ref(&self.root) - )), - } - } - pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Result { let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| { format_err!("can't load standard library from sysroot path {sysroot_dir}") - })?; - Ok(Sysroot::load(sysroot_dir, sysroot_src_dir, metadata)) + }); + Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata)) + } + + pub fn set_rustup_toolchain_env(cmd: &mut Command, sysroot: Option<&Self>) { + if let Some(sysroot) = sysroot { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(&sysroot.root)); + } + } + + pub fn discover_proc_macro_srv(&self) -> anyhow::Result { + ["libexec", "lib"] + .into_iter() + .map(|segment| self.root().join(segment).join("rust-analyzer-proc-macro-srv")) + .find_map(|server_path| probe_for_binary(server_path.into())) + .map(AbsPathBuf::assert) + .ok_or_else(|| { + anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", self.root()) + }) } - pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf, metadata: bool) -> Sysroot { + pub fn load( + sysroot_dir: AbsPathBuf, + sysroot_src_dir: Option>, + metadata: bool, + ) -> Sysroot { + let sysroot_src_dir = match sysroot_src_dir { + Some(Ok(sysroot_src_dir)) => sysroot_src_dir, + Some(Err(e)) => { + return Sysroot { + root: sysroot_dir, + src_root: Some(Err(Arc::new(e))), + mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }), + } + } + None => { + return Sysroot { + root: sysroot_dir, + src_root: None, + mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }), + } + } + }; if metadata { let sysroot: Option<_> = (|| { let sysroot_cargo_toml = ManifestPath::try_from( @@ -187,10 +240,19 @@ impl Sysroot { .ok()?; let current_dir = AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot")).ok()?; + + let mut cargo_config = CargoConfig::default(); + // the sysroot uses `public-dependency`, so we make cargo think it's a nightly + cargo_config.extra_env.insert( + "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(), + "nightly".to_owned(), + ); + let res = CargoWorkspace::fetch_metadata( &sysroot_cargo_toml, ¤t_dir, - &CargoConfig::default(), + &cargo_config, + None, &|_| (), ) .map_err(|e| { @@ -274,7 +336,7 @@ impl Sysroot { let cargo_workspace = CargoWorkspace::new(res); Some(Sysroot { root: sysroot_dir.clone(), - src_root: sysroot_src_dir.clone(), + src_root: Some(Ok(sysroot_src_dir.clone())), mode: SysrootMode::Workspace(cargo_workspace), }) })(); @@ -326,7 +388,7 @@ impl Sysroot { } Sysroot { root: sysroot_dir, - src_root: sysroot_src_dir, + src_root: Some(Ok(sysroot_src_dir)), mode: SysrootMode::Stitched(stitched), } } diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs index cb995857ec7dc..af635dda5782d 100644 --- a/crates/project-model/src/target_data_layout.rs +++ b/crates/project-model/src/target_data_layout.rs @@ -3,38 +3,58 @@ use std::process::Command; use rustc_hash::FxHashMap; -use crate::{utf8_stdout, ManifestPath}; +use crate::{utf8_stdout, ManifestPath, Sysroot}; + +/// Determines how `rustc --print target-spec-json` is discovered and invoked. +pub enum RustcDataLayoutConfig<'a> { + /// Use `rustc --print target-spec-json`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::rustc`]. + Rustc(Option<&'a Sysroot>), + /// Use `cargo --print target-spec-json`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::cargo`]. + Cargo(Option<&'a Sysroot>, &'a ManifestPath), +} pub fn get( - cargo_toml: Option<&ManifestPath>, + config: RustcDataLayoutConfig<'_>, target: Option<&str>, extra_env: &FxHashMap, ) -> anyhow::Result { - let output = (|| { - if let Some(cargo_toml) = cargo_toml { - let mut cmd = Command::new(toolchain::rustc()); + let process = |output: String| { + (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() + .ok_or_else(|| { + anyhow::format_err!("could not fetch target-spec-json from command output") + }) + }; + let sysroot = match config { + RustcDataLayoutConfig::Cargo(sysroot, cargo_toml) => { + let mut cmd = Command::new(toolchain::Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) - .args(["-Z", "unstable-options", "--print", "target-spec-json"]) + .args(["rustc", "--", "-Z", "unstable-options", "--print", "target-spec-json"]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { cmd.args(["--target", target]); } match utf8_stdout(cmd) { - Ok(it) => return Ok(it), - Err(e) => tracing::debug!("{e:?}: falling back to querying rustc for cfgs"), + Ok(output) => return process(output), + Err(e) => { + tracing::warn!("failed to run `cargo rustc --print target-spec-json`, falling back to invoking rustc directly: {e}"); + sysroot + } } } - // using unstable cargo features failed, fall back to using plain rustc - let mut cmd = Command::new(toolchain::rustc()); - cmd.envs(extra_env) - .args(["-Z", "unstable-options", "--print", "target-spec-json"]) - .env("RUSTC_BOOTSTRAP", "1"); - if let Some(target) = target { - cmd.args(["--target", target]); - } - utf8_stdout(cmd) - })()?; - (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() - .ok_or_else(|| anyhow::format_err!("could not fetch target-spec-json from command output")) + RustcDataLayoutConfig::Rustc(sysroot) => sysroot, + }; + + let mut cmd = Command::new(toolchain::Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); + cmd.envs(extra_env) + .args(["-Z", "unstable-options", "--print", "target-spec-json"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(target) = target { + cmd.args(["--target", target]); + } + process(utf8_stdout(cmd)?) } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 74042e925ede4..b9b1b701f6d40 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -9,6 +9,7 @@ use expect_test::{expect_file, ExpectFile}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; use serde::de::DeserializeOwned; +use triomphe::Arc; use crate::{ CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot, @@ -34,6 +35,7 @@ fn load_cargo_with_overrides( cfg_overrides, toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; to_crate_graph(project_workspace) } @@ -53,6 +55,7 @@ fn load_cargo_with_fake_sysroot( cfg_overrides: Default::default(), toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; project_workspace.to_crate_graph( &mut { @@ -69,8 +72,13 @@ fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) { let data = get_test_json_file(file); let project = rooted_project_json(data); let sysroot = Ok(get_fake_sysroot()); - let project_workspace = - ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new(), toolchain: None }; + let project_workspace = ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg: Vec::new(), + toolchain: None, + target_layout: Err(Arc::from("test has no data layout")), + }; to_crate_graph(project_workspace) } @@ -125,7 +133,7 @@ fn get_fake_sysroot() -> Sysroot { // fake sysroot, so we give them both the same path: let sysroot_dir = AbsPathBuf::assert(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); - Sysroot::load(sysroot_dir, sysroot_src_dir, false) + Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) } fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { @@ -230,7 +238,7 @@ fn crate_graph_dedup_identical() { let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone()); - crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_| ()); + crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |(_, a), (_, b)| a == b); assert!(crate_graph.iter().eq(d_crate_graph.iter())); assert_eq!(proc_macros, d_proc_macros); } @@ -246,62 +254,10 @@ fn crate_graph_dedup() { load_cargo_with_fake_sysroot(path_map, "regex-metadata.json"); assert_eq!(regex_crate_graph.iter().count(), 60); - crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_| ()); + crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |(_, a), (_, b)| a == b); assert_eq!(crate_graph.iter().count(), 118); } -#[test] -fn test_deduplicate_origin_dev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ()); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - -#[test] -fn test_deduplicate_origin_dev_rev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ()); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - #[test] fn smoke_test_real_sysroot_cargo() { if std::env::var("SYSROOT_CARGO_METADATA").is_err() { @@ -327,6 +283,7 @@ fn smoke_test_real_sysroot_cargo() { cfg_overrides: Default::default(), toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; project_workspace.to_crate_graph( &mut { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index cda5ad2f1109f..b7ae76be8cec0 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -6,14 +6,15 @@ use std::{collections::VecDeque, fmt, fs, iter, process::Command, str::FromStr, use anyhow::{format_err, Context}; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, + FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, }; use cfg::{CfgAtom, CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; use stdx::always; +use toolchain::Tool; use triomphe::Arc; use crate::{ @@ -23,8 +24,9 @@ use crate::{ project_json::Crate, rustc_cfg::{self, RustcCfgConfig}, sysroot::{SysrootCrate, SysrootMode}, - target_data_layout, utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, - Package, ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, + target_data_layout::{self, RustcDataLayoutConfig}, + utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, + ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, }; /// A set of cfg-overrides per crate. @@ -69,7 +71,8 @@ pub enum ProjectWorkspace { rustc_cfg: Vec, cfg_overrides: CfgOverrides, toolchain: Option, - target_layout: Result, + target_layout: TargetLayoutLoadResult, + cargo_config_extra_env: FxHashMap, }, /// Project workspace was manually specified using a `rust-project.json` file. Json { @@ -79,6 +82,7 @@ pub enum ProjectWorkspace { /// `rustc --print cfg`. rustc_cfg: Vec, toolchain: Option, + target_layout: TargetLayoutLoadResult, }, // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning. // That's not the end user experience we should strive for. @@ -111,7 +115,8 @@ impl fmt::Debug for ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, - target_layout: data_layout, + target_layout, + cargo_config_extra_env, } => f .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) @@ -124,16 +129,25 @@ impl fmt::Debug for ProjectWorkspace { .field("n_rustc_cfg", &rustc_cfg.len()) .field("n_cfg_overrides", &cfg_overrides.len()) .field("toolchain", &toolchain) - .field("data_layout", &data_layout) + .field("data_layout", &target_layout) + .field("cargo_config_extra_env", &cargo_config_extra_env) .finish(), - ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain } => { + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg, + toolchain, + target_layout: data_layout, + } => { let mut debug_struct = f.debug_struct("Json"); debug_struct.field("n_crates", &project.n_crates()); if let Ok(sysroot) = sysroot { debug_struct.field("n_sysroot_crates", &sysroot.num_packages()); } - debug_struct.field("toolchain", &toolchain); - debug_struct.field("n_rustc_cfg", &rustc_cfg.len()); + debug_struct + .field("toolchain", &toolchain) + .field("n_rustc_cfg", &rustc_cfg.len()) + .field("data_layout", &data_layout); debug_struct.finish() } ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f @@ -146,6 +160,28 @@ impl fmt::Debug for ProjectWorkspace { } } +fn get_toolchain_version( + current_dir: &AbsPath, + sysroot: Option<&Sysroot>, + tool: Tool, + extra_env: &FxHashMap, + prefix: &str, +) -> Result, anyhow::Error> { + let cargo_version = utf8_stdout({ + let mut cmd = Command::new(tool.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); + cmd.envs(extra_env); + cmd.arg("--version").current_dir(current_dir); + cmd + }) + .with_context(|| format!("Failed to query rust toolchain version at {current_dir}, is your toolchain setup correctly?"))?; + anyhow::Ok( + cargo_version + .get(prefix.len()..) + .and_then(|it| Version::parse(it.split_whitespace().next()?).ok()), + ) +} + impl ProjectWorkspace { pub fn load( manifest: ProjectManifest, @@ -161,20 +197,6 @@ impl ProjectWorkspace { config: &CargoConfig, progress: &dyn Fn(String), ) -> anyhow::Result { - let version = |current_dir, cmd_path, prefix: &str| { - let cargo_version = utf8_stdout({ - let mut cmd = Command::new(cmd_path); - cmd.envs(&config.extra_env); - cmd.arg("--version").current_dir(current_dir); - cmd - }) - .with_context(|| format!("Failed to query rust toolchain version at {current_dir}, is your toolchain setup correctly?"))?; - anyhow::Ok( - cargo_version - .get(prefix.len()..) - .and_then(|it| Version::parse(it.split_whitespace().next()?).ok()), - ) - }; let res = match manifest { ProjectManifest::ProjectJson(project_json) => { let file = fs::read_to_string(project_json) @@ -182,30 +204,14 @@ impl ProjectWorkspace { let data = serde_json::from_str(&file) .with_context(|| format!("Failed to deserialize json file {project_json}"))?; let project_location = project_json.parent().to_path_buf(); - let toolchain = version(&*project_location, toolchain::rustc(), "rustc ")?; - let project_json = ProjectJson::new(&project_location, data); + let project_json: ProjectJson = ProjectJson::new(&project_location, data); ProjectWorkspace::load_inline( project_json, config.target.as_deref(), &config.extra_env, - toolchain, ) } ProjectManifest::CargoToml(cargo_toml) => { - let toolchain = version(cargo_toml.parent(), toolchain::cargo(), "cargo ")?; - let meta = CargoWorkspace::fetch_metadata( - cargo_toml, - cargo_toml.parent(), - config, - progress, - ) - .with_context(|| { - format!( - "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}", - ) - })?; - let cargo = CargoWorkspace::new(meta); - let sysroot = match (&config.sysroot, &config.sysroot_src) { (Some(RustLibSource::Path(path)), None) => { Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata).map_err(|e| { @@ -218,7 +224,7 @@ impl ProjectWorkspace { }) } (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => { - Ok(Sysroot::load(sysroot.clone(), sysroot_src.clone(), config.sysroot_query_metadata)) + Ok(Sysroot::load(sysroot.clone(), Some(Ok(sysroot_src.clone())), config.sysroot_query_metadata)) } (Some(RustLibSource::Discover), Some(sysroot_src)) => { Sysroot::discover_with_src_override( @@ -231,18 +237,19 @@ impl ProjectWorkspace { } (None, _) => Err(None), }; + let sysroot_ref = sysroot.as_ref().ok(); if let Ok(sysroot) = &sysroot { - tracing::info!(workspace = %cargo_toml, src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); + tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); } let rustc_dir = match &config.rustc_source { Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), Some(RustLibSource::Discover) => { - sysroot.as_ref().ok().and_then(Sysroot::discover_rustc_src).ok_or_else( - || Some("Failed to discover rustc source for sysroot.".to_owned()), - ) + sysroot_ref.and_then(Sysroot::discover_rustc_src).ok_or_else(|| { + Some("Failed to discover rustc source for sysroot.".to_owned()) + }) } None => Err(None), }; @@ -256,6 +263,7 @@ impl ProjectWorkspace { features: crate::CargoFeatures::default(), ..config.clone() }, + sysroot_ref, progress, ) { Ok(meta) => { @@ -264,6 +272,7 @@ impl ProjectWorkspace { &workspace, cargo_toml.parent(), &config.extra_env, + sysroot_ref ); Ok(Box::new((workspace, buildscripts))) } @@ -279,21 +288,45 @@ impl ProjectWorkspace { } }); + let toolchain = get_toolchain_version( + cargo_toml.parent(), + sysroot_ref, + toolchain::Tool::Cargo, + &config.extra_env, + "cargo ", + )?; let rustc_cfg = rustc_cfg::get( config.target.as_deref(), &config.extra_env, - RustcCfgConfig::Cargo(cargo_toml), + RustcCfgConfig::Cargo(sysroot_ref, cargo_toml), ); let cfg_overrides = config.cfg_overrides.clone(); let data_layout = target_data_layout::get( - Some(cargo_toml), + RustcDataLayoutConfig::Cargo(sysroot_ref, cargo_toml), config.target.as_deref(), &config.extra_env, ); if let Err(e) = &data_layout { tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace"); } + + let meta = CargoWorkspace::fetch_metadata( + cargo_toml, + cargo_toml.parent(), + config, + sysroot_ref, + progress, + ) + .with_context(|| { + format!( + "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}", + ) + })?; + let cargo = CargoWorkspace::new(meta); + + let cargo_config_extra_env = + cargo_config_env(cargo_toml, &config.extra_env, sysroot_ref); ProjectWorkspace::Cargo { cargo, build_scripts: WorkspaceBuildScripts::default(), @@ -302,7 +335,10 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, - target_layout: data_layout.map_err(|it| it.to_string()), + target_layout: data_layout + .map(Arc::from) + .map_err(|it| Arc::from(it.to_string())), + cargo_config_extra_env, } } }; @@ -314,15 +350,16 @@ impl ProjectWorkspace { project_json: ProjectJson, target: Option<&str>, extra_env: &FxHashMap, - toolchain: Option, ) -> ProjectWorkspace { let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) { - (Some(sysroot), Some(sysroot_src)) => Ok(Sysroot::load(sysroot, sysroot_src, false)), + (Some(sysroot), Some(sysroot_src)) => { + Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) + } (Some(sysroot), None) => { // assume sysroot is structured like rustup's and guess `sysroot_src` let sysroot_src = sysroot.join("lib").join("rustlib").join("src").join("rust").join("library"); - Ok(Sysroot::load(sysroot, sysroot_src, false)) + Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) } (None, Some(sysroot_src)) => { // assume sysroot is structured like rustup's and guess `sysroot` @@ -330,23 +367,36 @@ impl ProjectWorkspace { for _ in 0..5 { sysroot.pop(); } - Ok(Sysroot::load(sysroot, sysroot_src, false)) + Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) } (None, None) => Err(None), }; - let config = match &sysroot { - Ok(sysroot) => { - tracing::debug!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); - RustcCfgConfig::Explicit(sysroot) - } - Err(_) => { - tracing::debug!("discovering sysroot"); - RustcCfgConfig::Discover + let sysroot_ref = sysroot.as_ref().ok(); + let cfg_config = RustcCfgConfig::Rustc(sysroot_ref); + let data_layout_config = RustcDataLayoutConfig::Rustc(sysroot_ref); + let toolchain = match get_toolchain_version( + project_json.path(), + sysroot_ref, + toolchain::Tool::Rustc, + extra_env, + "rustc ", + ) { + Ok(it) => it, + Err(e) => { + tracing::error!("{e}"); + None } }; - let rustc_cfg = rustc_cfg::get(target, extra_env, config); - ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg, toolchain } + let rustc_cfg = rustc_cfg::get(target, extra_env, cfg_config); + let data_layout = target_data_layout::get(data_layout_config, target, extra_env); + ProjectWorkspace::Json { + project: project_json, + sysroot, + rustc_cfg, + toolchain, + target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + } } pub fn load_detached_files( @@ -373,18 +423,11 @@ impl ProjectWorkspace { } None => Err(None), }; - let rustc_config = match &sysroot { - Ok(sysroot) => { - tracing::info!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); - RustcCfgConfig::Explicit(sysroot) - } - Err(_) => { - tracing::info!("discovering sysroot"); - RustcCfgConfig::Discover - } - }; - - let rustc_cfg = rustc_cfg::get(None, &FxHashMap::default(), rustc_config); + let rustc_cfg = rustc_cfg::get( + None, + &FxHashMap::default(), + RustcCfgConfig::Rustc(sysroot.as_ref().ok()), + ); Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) } @@ -395,11 +438,17 @@ impl ProjectWorkspace { progress: &dyn Fn(String), ) -> anyhow::Result { match self { - ProjectWorkspace::Cargo { cargo, toolchain, .. } => { - WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain) - .with_context(|| { - format!("Failed to run build scripts for {}", cargo.workspace_root()) - }) + ProjectWorkspace::Cargo { cargo, toolchain, sysroot, .. } => { + WorkspaceBuildScripts::run_for_workspace( + config, + cargo, + progress, + toolchain, + sysroot.as_ref().ok(), + ) + .with_context(|| { + format!("Failed to run build scripts for {}", cargo.workspace_root()) + }) } ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => { Ok(WorkspaceBuildScripts::default()) @@ -472,18 +521,7 @@ impl ProjectWorkspace { ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. } | ProjectWorkspace::Json { sysroot: Ok(sysroot), .. } | ProjectWorkspace::DetachedFiles { sysroot: Ok(sysroot), .. } => { - let standalone_server_name = - format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); - ["libexec", "lib"] - .into_iter() - .map(|segment| sysroot.root().join(segment).join(&standalone_server_name)) - .find(|server_path| std::fs::metadata(server_path).is_ok()) - .ok_or_else(|| { - anyhow::format_err!( - "cannot find proc-macro server in sysroot `{}`", - sysroot.root() - ) - }) + sysroot.discover_proc_macro_srv() } ProjectWorkspace::DetachedFiles { .. } => { Err(anyhow::format_err!("cannot find proc-macro server, no sysroot was found")) @@ -503,8 +541,7 @@ impl ProjectWorkspace { /// The return type contains the path and whether or not /// the root is a member of the current workspace pub fn to_roots(&self) -> Vec { - let mk_sysroot = |sysroot: Result<_, _>, project_root: Option<&AbsPath>| { - let project_root = project_root.map(ToOwned::to_owned); + let mk_sysroot = |sysroot: Result<_, _>| { sysroot.into_iter().flat_map(move |sysroot: &Sysroot| { let mut r = match sysroot.mode() { SysrootMode::Workspace(ws) => ws @@ -532,18 +569,21 @@ impl ProjectWorkspace { }; r.push(PackageRoot { - // mark the sysroot as mutable if it is located inside of the project - is_local: project_root - .as_ref() - .map_or(false, |project_root| sysroot.src_root().starts_with(project_root)), - include: vec![sysroot.src_root().to_path_buf()], + is_local: false, + include: sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(), exclude: Vec::new(), }); r }) }; match self { - ProjectWorkspace::Json { project, sysroot, rustc_cfg: _, toolchain: _ } => project + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg: _, + toolchain: _, + target_layout: _, + } => project .crates() .map(|(_, krate)| PackageRoot { is_local: krate.is_workspace_member, @@ -552,7 +592,7 @@ impl ProjectWorkspace { }) .collect::>() .into_iter() - .chain(mk_sysroot(sysroot.as_ref(), Some(project.path()))) + .chain(mk_sysroot(sysroot.as_ref())) .collect::>(), ProjectWorkspace::Cargo { cargo, @@ -563,6 +603,7 @@ impl ProjectWorkspace { build_scripts, toolchain: _, target_layout: _, + cargo_config_extra_env: _, } => { cargo .packages() @@ -586,7 +627,7 @@ impl ProjectWorkspace { let extra_targets = cargo[pkg] .targets .iter() - .filter(|&&tgt| cargo[tgt].kind == TargetKind::Lib) + .filter(|&&tgt| matches!(cargo[tgt].kind, TargetKind::Lib { .. })) .filter_map(|&tgt| cargo[tgt].root.parent()) .map(|tgt| tgt.normalize().to_path_buf()) .filter(|path| !path.starts_with(&pkg_root)); @@ -602,7 +643,7 @@ impl ProjectWorkspace { } PackageRoot { is_local, include, exclude } }) - .chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root()))) + .chain(mk_sysroot(sysroot.as_ref())) .chain(rustc.iter().map(|a| a.as_ref()).flat_map(|(rustc, _)| { rustc.packages().map(move |krate| PackageRoot { is_local: false, @@ -619,7 +660,7 @@ impl ProjectWorkspace { include: vec![detached_file.clone()], exclude: Vec::new(), }) - .chain(mk_sysroot(sysroot.as_ref(), None)) + .chain(mk_sysroot(sysroot.as_ref())) .collect(), } } @@ -651,17 +692,19 @@ impl ProjectWorkspace { let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered(); let (mut crate_graph, proc_macros) = match self { - ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain } => { - project_json_to_crate_graph( - rustc_cfg.clone(), - load, - project, - sysroot.as_ref().ok(), - extra_env, - Err("rust-project.json projects have no target layout set".into()), - toolchain.clone(), - ) - } + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg, + toolchain: _, + target_layout: _, + } => project_json_to_crate_graph( + rustc_cfg.clone(), + load, + project, + sysroot.as_ref().ok(), + extra_env, + ), ProjectWorkspace::Cargo { cargo, sysroot, @@ -669,8 +712,9 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, build_scripts, - toolchain, - target_layout, + toolchain: _, + target_layout: _, + cargo_config_extra_env: _, } => cargo_to_crate_graph( load, rustc.as_ref().map(|a| a.as_ref()).ok(), @@ -679,20 +723,9 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, - match target_layout.as_ref() { - Ok(it) => Ok(Arc::from(it.as_str())), - Err(it) => Err(Arc::from(it.as_str())), - }, - toolchain.as_ref(), ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { - detached_files_to_crate_graph( - rustc_cfg.clone(), - load, - files, - sysroot.as_ref().ok(), - Err("detached file projects have no target layout set".into()), - ) + detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok()) } }; if crate_graph.patch_cfg_if() { @@ -713,6 +746,7 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, + cargo_config_extra_env, build_scripts: _, target_layout: _, }, @@ -723,6 +757,7 @@ impl ProjectWorkspace { rustc_cfg: o_rustc_cfg, cfg_overrides: o_cfg_overrides, toolchain: o_toolchain, + cargo_config_extra_env: o_cargo_config_extra_env, build_scripts: _, target_layout: _, }, @@ -733,14 +768,16 @@ impl ProjectWorkspace { && cfg_overrides == o_cfg_overrides && toolchain == o_toolchain && sysroot == o_sysroot + && cargo_config_extra_env == o_cargo_config_extra_env } ( - Self::Json { project, sysroot, rustc_cfg, toolchain }, + Self::Json { project, sysroot, rustc_cfg, toolchain, target_layout: _ }, Self::Json { project: o_project, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg, toolchain: o_toolchain, + target_layout: _, }, ) => { project == o_project @@ -771,21 +808,12 @@ fn project_json_to_crate_graph( project: &ProjectJson, sysroot: Option<&Sysroot>, extra_env: &FxHashMap, - target_layout: TargetLayoutLoadResult, - toolchain: Option, ) -> (CrateGraph, ProcMacroPaths) { let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let (crate_graph, proc_macros) = &mut res; - let sysroot_deps = sysroot.as_ref().map(|sysroot| { - sysroot_to_crate_graph( - crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - toolchain.as_ref(), - ) - }); + let sysroot_deps = sysroot + .as_ref() + .map(|sysroot| sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load)); let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned()); let mut cfg_cache: FxHashMap<&str, Vec> = FxHashMap::default(); @@ -813,12 +841,7 @@ fn project_json_to_crate_graph( let target_cfgs = match target.as_deref() { Some(target) => cfg_cache.entry(target).or_insert_with(|| { - let rustc_cfg = match sysroot { - Some(sysroot) => RustcCfgConfig::Explicit(sysroot), - None => RustcCfgConfig::Discover, - }; - - rustc_cfg::get(Some(target), extra_env, rustc_cfg) + rustc_cfg::get(Some(target), extra_env, RustcCfgConfig::Rustc(sysroot)) }), None => &rustc_cfg, }; @@ -845,8 +868,6 @@ fn project_json_to_crate_graph( } else { CrateOrigin::Local { repo: None, name: None } }, - target_layout.clone(), - toolchain.clone(), ); if *is_proc_macro { if let Some(path) = proc_macro_dylib_path.clone() { @@ -873,7 +894,7 @@ fn project_json_to_crate_graph( for dep in &krate.deps { if let Some(&to) = crates.get(&dep.crate_id) { - add_dep(crate_graph, from, dep.name.clone(), to, dep.kind().to_owned()) + add_dep(crate_graph, from, dep.name.clone(), to) } } } @@ -889,22 +910,13 @@ fn cargo_to_crate_graph( rustc_cfg: Vec, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: TargetLayoutLoadResult, - toolchain: Option<&Version>, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::span!(tracing::Level::INFO, "cargo_to_crate_graph").entered(); let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let crate_graph = &mut res.0; let proc_macros = &mut res.1; let (public_deps, libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph( - crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - toolchain, - ), + Some(sysroot) => sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load), None => (SysrootPublicDeps::default(), None), }; @@ -926,8 +938,6 @@ fn cargo_to_crate_graph( // Add test cfg for local crates if cargo[pkg].is_local { cfg_options.insert_atom("test".into()); - } - if cargo[pkg].is_member { cfg_options.insert_atom("rust_analyzer".into()); } @@ -949,7 +959,7 @@ fn cargo_to_crate_graph( let mut lib_tgt = None; for &tgt in cargo[pkg].targets.iter() { - if cargo[tgt].kind != TargetKind::Lib && !cargo[pkg].is_member { + if !matches!(cargo[tgt].kind, TargetKind::Lib { .. }) && !cargo[pkg].is_member { // For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't // add any targets except the library target, since those will not work correctly if // they use dev-dependencies. @@ -957,46 +967,46 @@ fn cargo_to_crate_graph( // https://github.com/rust-lang/rust-analyzer/issues/11300 continue; } - let &TargetData { ref name, kind, is_proc_macro, ref root, .. } = &cargo[tgt]; - - if kind == TargetKind::Lib - && sysroot.map_or(false, |sysroot| root.starts_with(sysroot.src_root())) - { - if let Some(&(_, crate_id, _)) = - public_deps.deps.iter().find(|(dep_name, ..)| dep_name.as_smol_str() == name) - { - pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, kind)); - - lib_tgt = Some((crate_id, name.clone())); - pkg_to_lib_crate.insert(pkg, crate_id); - // sysroot is inside the workspace, prevent the sysroot crates from being duplicated here - continue; - } - } + let &TargetData { ref name, kind, ref root, .. } = &cargo[tgt]; let Some(file_id) = load(root) else { continue }; + let build_data = build_scripts.get_output(pkg); + let pkg_data = &cargo[pkg]; let crate_id = add_target_crate_root( crate_graph, proc_macros, - &cargo[pkg], - build_scripts.get_output(pkg), + pkg_data, + build_data, cfg_options.clone(), file_id, name, - is_proc_macro, - target_layout.clone(), - false, - toolchain.cloned(), + kind, + if pkg_data.is_local { + CrateOrigin::Local { + repo: pkg_data.repository.clone(), + name: Some(pkg_data.name.clone()), + } + } else { + CrateOrigin::Library { + repo: pkg_data.repository.clone(), + name: pkg_data.name.clone(), + } + }, ); - if kind == TargetKind::Lib { + if let TargetKind::Lib { .. } = kind { lib_tgt = Some((crate_id, name.clone())); pkg_to_lib_crate.insert(pkg, crate_id); } // Even crates that don't set proc-macro = true are allowed to depend on proc_macro // (just none of the APIs work when called outside of a proc macro). if let Some(proc_macro) = libproc_macro { - add_proc_macro_dep(crate_graph, crate_id, proc_macro, is_proc_macro); + add_proc_macro_dep( + crate_graph, + crate_id, + proc_macro, + matches!(kind, TargetKind::Lib { is_proc_macro: true }), + ); } pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, kind)); @@ -1016,7 +1026,7 @@ fn cargo_to_crate_graph( // cargo metadata does not do any normalization, // so we do it ourselves currently let name = CrateName::normalize_dashes(&name); - add_dep(crate_graph, from, name, to, DependencyKind::Normal); + add_dep(crate_graph, from, name, to); } } } @@ -1036,17 +1046,7 @@ fn cargo_to_crate_graph( continue; } - add_dep( - crate_graph, - from, - name.clone(), - to, - match dep.kind { - DepKind::Normal => DependencyKind::Normal, - DepKind::Dev => DependencyKind::Dev, - DepKind::Build => DependencyKind::Build, - }, - ) + add_dep(crate_graph, from, name.clone(), to) } } } @@ -1074,8 +1074,6 @@ fn cargo_to_crate_graph( } else { rustc_build_scripts }, - target_layout, - toolchain, ); } } @@ -1087,19 +1085,11 @@ fn detached_files_to_crate_graph( load: &mut dyn FnMut(&AbsPath) -> Option, detached_files: &[AbsPathBuf], sysroot: Option<&Sysroot>, - target_layout: TargetLayoutLoadResult, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::span!(tracing::Level::INFO, "detached_files_to_crate_graph").entered(); let mut crate_graph = CrateGraph::default(); let (public_deps, _libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph( - &mut crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - None, - ), + Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load), None => (SysrootPublicDeps::default(), None), }; @@ -1131,8 +1121,6 @@ fn detached_files_to_crate_graph( repo: None, name: display_name.map(|n| n.canonical_name().to_owned()), }, - target_layout.clone(), - None, ); public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate); @@ -1153,8 +1141,6 @@ fn handle_rustc_crates( cfg_options: &CfgOptions, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: TargetLayoutLoadResult, - toolchain: Option<&Version>, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -1194,9 +1180,9 @@ fn handle_rustc_crates( }; for &tgt in rustc_workspace[pkg].targets.iter() { - if rustc_workspace[tgt].kind != TargetKind::Lib { + let kind @ TargetKind::Lib { is_proc_macro } = rustc_workspace[tgt].kind else { continue; - } + }; if let Some(file_id) = load(&rustc_workspace[tgt].root) { let crate_id = add_target_crate_root( crate_graph, @@ -1206,21 +1192,14 @@ fn handle_rustc_crates( cfg_options.clone(), file_id, &rustc_workspace[tgt].name, - rustc_workspace[tgt].is_proc_macro, - target_layout.clone(), - true, - toolchain.cloned(), + kind, + CrateOrigin::Rustc { name: rustc_workspace[pkg].name.clone() }, ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate public_deps.add_to_crate_graph(crate_graph, crate_id); if let Some(proc_macro) = libproc_macro { - add_proc_macro_dep( - crate_graph, - crate_id, - proc_macro, - rustc_workspace[tgt].is_proc_macro, - ); + add_proc_macro_dep(crate_graph, crate_id, proc_macro, is_proc_macro); } rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); } @@ -1234,17 +1213,7 @@ fn handle_rustc_crates( let name = CrateName::new(&dep.name).unwrap(); if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() { - add_dep( - crate_graph, - from, - name.clone(), - to, - match dep.kind { - DepKind::Normal => DependencyKind::Normal, - DepKind::Dev => DependencyKind::Dev, - DepKind::Build => DependencyKind::Build, - }, - ); + add_dep(crate_graph, from, name.clone(), to); } } } @@ -1266,7 +1235,7 @@ fn handle_rustc_crates( // `rust_analyzer` thinks that it should use the one from the `rustc_source` // instead of the one from `crates.io` if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) { - add_dep(crate_graph, *from, name.clone(), to, DependencyKind::Normal); + add_dep(crate_graph, *from, name.clone(), to); } } } @@ -1282,10 +1251,8 @@ fn add_target_crate_root( cfg_options: CfgOptions, file_id: FileId, cargo_name: &str, - is_proc_macro: bool, - target_layout: TargetLayoutLoadResult, - rustc_crate: bool, - toolchain: Option, + kind: TargetKind, + origin: CrateOrigin, ) -> CrateId { let edition = pkg.edition; let potential_cfg_options = if pkg.features.is_empty() { @@ -1332,18 +1299,10 @@ fn add_target_crate_root( cfg_options, potential_cfg_options, env, - is_proc_macro, - if rustc_crate { - CrateOrigin::Rustc { name: pkg.name.clone() } - } else if pkg.is_member { - CrateOrigin::Local { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) } - } else { - CrateOrigin::Library { repo: pkg.repository.clone(), name: pkg.name.clone() } - }, - target_layout, - toolchain, + matches!(kind, TargetKind::Lib { is_proc_macro: true }), + origin, ); - if is_proc_macro { + if let TargetKind::Lib { is_proc_macro: true } = kind { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { Some(it) => it.cloned().map(|path| Ok((Some(cargo_name.to_owned()), path))), None => Some(Err("crate has not yet been built".to_owned())), @@ -1365,14 +1324,7 @@ impl SysrootPublicDeps { /// Makes `from` depend on the public sysroot crates. fn add_to_crate_graph(&self, crate_graph: &mut CrateGraph, from: CrateId) { for (name, krate, prelude) in &self.deps { - add_dep_with_prelude( - crate_graph, - from, - name.clone(), - *krate, - *prelude, - DependencyKind::Normal, - ); + add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude); } } } @@ -1381,9 +1333,7 @@ fn sysroot_to_crate_graph( crate_graph: &mut CrateGraph, sysroot: &Sysroot, rustc_cfg: Vec, - target_layout: TargetLayoutLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, - toolchain: Option<&Version>, ) -> (SysrootPublicDeps, Option) { let _p = tracing::span!(tracing::Level::INFO, "sysroot_to_crate_graph").entered(); match sysroot.mode() { @@ -1396,8 +1346,6 @@ fn sysroot_to_crate_graph( rustc_cfg, &CfgOverrides::default(), &WorkspaceBuildScripts::default(), - target_layout, - toolchain, ); let mut pub_deps = vec![]; @@ -1440,17 +1388,16 @@ fn sysroot_to_crate_graph( // Remove all crates except the ones we are interested in to keep the sysroot graph small. let removed_mapping = cg.remove_crates_except(&marker_set); + let mapping = crate_graph.extend(cg, &mut pm, |(_, a), (_, b)| a == b); - crate_graph.extend(cg, &mut pm, |mapping| { - // Map the id through the removal mapping first, then through the crate graph extension mapping. - pub_deps.iter_mut().for_each(|(_, cid, _)| { - *cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()] - }); - if let Some(libproc_macro) = &mut libproc_macro { - *libproc_macro = mapping - [&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()]; - } + // Map the id through the removal mapping first, then through the crate graph extension mapping. + pub_deps.iter_mut().for_each(|(_, cid, _)| { + *cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()] }); + if let Some(libproc_macro) = &mut libproc_macro { + *libproc_macro = mapping + [&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()]; + } (SysrootPublicDeps { deps: pub_deps }, libproc_macro) } @@ -1474,8 +1421,6 @@ fn sysroot_to_crate_graph( env, false, CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)), - target_layout.clone(), - toolchain.cloned(), ); Some((krate, crate_id)) }) @@ -1487,7 +1432,7 @@ fn sysroot_to_crate_graph( if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) { - add_dep(crate_graph, from, name, to, DependencyKind::Normal); + add_dep(crate_graph, from, name, to); } } } @@ -1508,14 +1453,8 @@ fn sysroot_to_crate_graph( } } -fn add_dep( - graph: &mut CrateGraph, - from: CrateId, - name: CrateName, - to: CrateId, - kind: DependencyKind, -) { - add_dep_inner(graph, from, Dependency::new(name, to, kind)) +fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) { + add_dep_inner(graph, from, Dependency::new(name, to)) } fn add_dep_with_prelude( @@ -1524,20 +1463,12 @@ fn add_dep_with_prelude( name: CrateName, to: CrateId, prelude: bool, - kind: DependencyKind, ) { - add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude, kind)) + add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude)) } fn add_proc_macro_dep(crate_graph: &mut CrateGraph, from: CrateId, to: CrateId, prelude: bool) { - add_dep_with_prelude( - crate_graph, - from, - CrateName::new("proc_macro").unwrap(), - to, - prelude, - DependencyKind::Normal, - ); + add_dep_with_prelude(crate_graph, from, CrateName::new("proc_macro").unwrap(), to, prelude); } fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) { @@ -1588,3 +1519,29 @@ fn create_cfg_options(rustc_cfg: Vec) -> CfgOptions { cfg_options.insert_atom("debug_assertions".into()); cfg_options } + +fn cargo_config_env( + cargo_toml: &ManifestPath, + extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, +) -> FxHashMap { + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); + cargo_config.envs(extra_env); + cargo_config + .current_dir(cargo_toml.parent()) + .args(["-Z", "unstable-options", "config", "get", "env"]) + .env("RUSTC_BOOTSTRAP", "1"); + // if successful we receive `env.key.value = "value" per entry + tracing::debug!("Discovering cargo config env by {:?}", cargo_config); + utf8_stdout(cargo_config).map(parse_output_cargo_config_env).unwrap_or_default() +} + +fn parse_output_cargo_config_env(stdout: String) -> FxHashMap { + stdout + .lines() + .filter_map(|l| l.strip_prefix("env.")) + .filter_map(|l| l.split_once(".value = ")) + .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned())) + .collect() +} diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index d8d9e559e5c1d..0ad19ca9f759d 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -48,7 +48,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -59,10 +58,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -113,7 +108,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -121,7 +115,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -132,10 +125,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -186,7 +175,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -194,7 +182,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -205,10 +192,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -259,7 +242,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -267,7 +249,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -278,10 +259,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -347,9 +324,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index d8d9e559e5c1d..0ad19ca9f759d 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -48,7 +48,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -59,10 +58,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -113,7 +108,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -121,7 +115,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -132,10 +125,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -186,7 +175,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -194,7 +182,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -205,10 +192,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -259,7 +242,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -267,7 +249,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -278,10 +259,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -347,9 +324,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index e0ba5ed498fa8..e2334dca87579 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -47,7 +47,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -58,10 +57,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -111,7 +106,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -119,7 +113,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -130,10 +123,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -183,7 +172,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -191,7 +179,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -202,10 +189,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -255,7 +238,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -263,7 +245,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -274,10 +255,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -343,9 +320,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index 0df99534c5bd9..ccaba963deda3 100644 --- a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -28,7 +28,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, ], @@ -36,10 +35,6 @@ Alloc, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -69,10 +64,6 @@ Core, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -102,10 +93,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -135,10 +122,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -169,7 +152,6 @@ name: CrateName( "std", ), - kind: Normal, prelude: true, }, Dependency { @@ -177,7 +159,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, ], @@ -185,10 +166,6 @@ ProcMacro, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 5: CrateData { root_file_id: FileId( @@ -218,10 +195,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 6: CrateData { root_file_id: FileId( @@ -252,7 +225,6 @@ name: CrateName( "alloc", ), - kind: Normal, prelude: true, }, Dependency { @@ -260,7 +232,6 @@ name: CrateName( "panic_unwind", ), - kind: Normal, prelude: true, }, Dependency { @@ -268,7 +239,6 @@ name: CrateName( "panic_abort", ), - kind: Normal, prelude: true, }, Dependency { @@ -276,7 +246,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, Dependency { @@ -284,7 +253,6 @@ name: CrateName( "profiler_builtins", ), - kind: Normal, prelude: true, }, Dependency { @@ -292,7 +260,6 @@ name: CrateName( "unwind", ), - kind: Normal, prelude: true, }, Dependency { @@ -300,7 +267,6 @@ name: CrateName( "std_detect", ), - kind: Normal, prelude: true, }, Dependency { @@ -308,7 +274,6 @@ name: CrateName( "test", ), - kind: Normal, prelude: true, }, ], @@ -316,10 +281,6 @@ Std, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 7: CrateData { root_file_id: FileId( @@ -349,10 +310,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 8: CrateData { root_file_id: FileId( @@ -382,10 +339,6 @@ Test, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 9: CrateData { root_file_id: FileId( @@ -415,10 +368,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 10: CrateData { root_file_id: FileId( @@ -449,7 +398,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, Dependency { @@ -457,7 +405,6 @@ name: CrateName( "alloc", ), - kind: Normal, prelude: true, }, Dependency { @@ -465,7 +412,6 @@ name: CrateName( "std", ), - kind: Normal, prelude: true, }, Dependency { @@ -473,7 +419,6 @@ name: CrateName( "test", ), - kind: Normal, prelude: false, }, Dependency { @@ -481,7 +426,6 @@ name: CrateName( "proc_macro", ), - kind: Normal, prelude: false, }, ], @@ -492,9 +436,5 @@ ), }, is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 269dd3cfffe95..07e04a8366173 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -11,7 +11,7 @@ extern crate rustc_driver as _; mod rustc_wrapper; -use std::{env, fs, path::PathBuf, process, sync::Arc}; +use std::{env, fs, path::PathBuf, process::ExitCode, sync::Arc}; use anyhow::Context; use lsp_server::Connection; @@ -27,21 +27,15 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc; #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; -fn main() -> anyhow::Result<()> { +fn main() -> anyhow::Result { if std::env::var("RA_RUSTC_WRAPPER").is_ok() { - let mut args = std::env::args_os(); - let _me = args.next().unwrap(); - let rustc = args.next().unwrap(); - let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) { - Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102), - Err(err) => { - eprintln!("{err}"); - 101 - } - }; - process::exit(code); + rustc_wrapper::main().map_err(Into::into) + } else { + actual_main() } +} +fn actual_main() -> anyhow::Result { let flags = flags::RustAnalyzer::from_env_or_exit(); #[cfg(debug_assertions)] @@ -58,14 +52,14 @@ fn main() -> anyhow::Result<()> { let verbosity = flags.verbosity(); match flags.subcommand { - flags::RustAnalyzerCmd::LspServer(cmd) => { + flags::RustAnalyzerCmd::LspServer(cmd) => 'lsp_server: { if cmd.print_config_schema { println!("{:#}", Config::json_schema()); - return Ok(()); + break 'lsp_server; } if cmd.version { println!("rust-analyzer {}", rust_analyzer::version()); - return Ok(()); + break 'lsp_server; } // rust-analyzer’s “main thread” is actually @@ -90,7 +84,7 @@ fn main() -> anyhow::Result<()> { flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?, flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?, } - Ok(()) + Ok(ExitCode::SUCCESS) } fn setup_logging(log_file_flag: Option) -> anyhow::Result<()> { diff --git a/crates/rust-analyzer/src/bin/rustc_wrapper.rs b/crates/rust-analyzer/src/bin/rustc_wrapper.rs index 38e9c7dd7e11c..684b3f52afc86 100644 --- a/crates/rust-analyzer/src/bin/rustc_wrapper.rs +++ b/crates/rust-analyzer/src/bin/rustc_wrapper.rs @@ -7,13 +7,17 @@ use std::{ ffi::OsString, io, - process::{Command, Stdio}, + process::{Command, ExitCode, Stdio}, }; -/// ExitCode/ExitStatus are impossible to create :(. -pub(crate) struct ExitCode(pub(crate) Option); +pub(crate) fn main() -> io::Result { + let mut args = std::env::args_os(); + let _me = args.next().unwrap(); + let rustc = args.next().unwrap(); + run_rustc_skipping_cargo_checking(rustc, args.collect()) +} -pub(crate) fn run_rustc_skipping_cargo_checking( +fn run_rustc_skipping_cargo_checking( rustc_executable: OsString, args: Vec, ) -> io::Result { @@ -35,9 +39,10 @@ pub(crate) fn run_rustc_skipping_cargo_checking( arg.starts_with("--emit=") && arg.contains("metadata") && !arg.contains("link") }); if not_invoked_by_build_script && is_cargo_check { - return Ok(ExitCode(Some(0))); + Ok(ExitCode::from(0)) + } else { + run_rustc(rustc_executable, args) } - run_rustc(rustc_executable, args) } fn run_rustc(rustc_executable: OsString, args: Vec) -> io::Result { @@ -47,5 +52,5 @@ fn run_rustc(rustc_executable: OsString, args: Vec) -> io::Result { + TargetKind::Lib { is_proc_macro: _ } => { buf.push("--lib".to_owned()); } TargetKind::Other | TargetKind::BuildScript => (), diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 2741b45222569..ce7e3b3cd6a44 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -32,7 +32,7 @@ use oorandom::Rand32; use profile::{Bytes, StopWatch}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use rayon::prelude::*; -use rustc_hash::FxHashSet; +use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{AstNode, SyntaxNode}; use vfs::{AbsPathBuf, FileId, Vfs, VfsPath}; @@ -91,7 +91,7 @@ impl flags::AnalysisStats { }; let (host, vfs, _proc_macro) = - load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?; + load_workspace(workspace.clone(), &cargo_config.extra_env, &load_cargo_config)?; let db = host.raw_database(); eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed()); eprint!(" (metadata {metadata_time}"); @@ -232,7 +232,11 @@ impl flags::AnalysisStats { } if self.run_all_ide_things { - self.run_ide_things(host.analysis(), file_ids); + self.run_ide_things(host.analysis(), file_ids.clone()); + } + + if self.run_term_search { + self.run_term_search(&workspace, db, &vfs, file_ids, verbosity); } let total_span = analysis_sw.elapsed(); @@ -321,6 +325,212 @@ impl flags::AnalysisStats { report_metric("const eval time", const_eval_time.time.as_millis() as u64, "ms"); } + fn run_term_search( + &self, + ws: &ProjectWorkspace, + db: &RootDatabase, + vfs: &Vfs, + mut file_ids: Vec, + verbosity: Verbosity, + ) { + let cargo_config = CargoConfig { + sysroot: match self.no_sysroot { + true => None, + false => Some(RustLibSource::Discover), + }, + ..Default::default() + }; + + let mut bar = match verbosity { + Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(), + _ if self.parallel || self.output.is_some() => ProgressReport::hidden(), + _ => ProgressReport::new(file_ids.len() as u64), + }; + + file_ids.sort(); + file_ids.dedup(); + + #[derive(Debug, Default)] + struct Acc { + tail_expr_syntax_hits: u64, + tail_expr_no_term: u64, + total_tail_exprs: u64, + error_codes: FxHashMap, + syntax_errors: u32, + } + + let mut acc: Acc = Default::default(); + bar.tick(); + let mut sw = self.stop_watch(); + + for &file_id in &file_ids { + let sema = hir::Semantics::new(db); + let _ = db.parse(file_id); + + let parse = sema.parse(file_id); + let file_txt = db.file_text(file_id); + let path = vfs.file_path(file_id).as_path().unwrap().to_owned(); + + for node in parse.syntax().descendants() { + let expr = match syntax::ast::Expr::cast(node.clone()) { + Some(it) => it, + None => continue, + }; + let block = match syntax::ast::BlockExpr::cast(expr.syntax().clone()) { + Some(it) => it, + None => continue, + }; + let target_ty = match sema.type_of_expr(&expr) { + Some(it) => it.adjusted(), + None => continue, // Failed to infer type + }; + + let expected_tail = match block.tail_expr() { + Some(it) => it, + None => continue, + }; + + if expected_tail.is_block_like() { + continue; + } + + let range = sema.original_range(expected_tail.syntax()).range; + let original_text: String = db + .file_text(file_id) + .chars() + .skip(usize::from(range.start())) + .take(usize::from(range.end()) - usize::from(range.start())) + .collect(); + + let scope = match sema.scope(expected_tail.syntax()) { + Some(it) => it, + None => continue, + }; + + let ctx = hir::term_search::TermSearchCtx { + sema: &sema, + scope: &scope, + goal: target_ty, + config: hir::term_search::TermSearchConfig { + enable_borrowcheck: true, + ..Default::default() + }, + }; + let found_terms = hir::term_search::term_search(&ctx); + + if found_terms.is_empty() { + acc.tail_expr_no_term += 1; + acc.total_tail_exprs += 1; + // println!("\n{}\n", &original_text); + continue; + }; + + fn trim(s: &str) -> String { + s.chars().filter(|c| !c.is_whitespace()).collect() + } + + let todo = syntax::ast::make::ext::expr_todo().to_string(); + let mut formatter = |_: &hir::Type| todo.clone(); + let mut syntax_hit_found = false; + for term in found_terms { + let generated = + term.gen_source_code(&scope, &mut formatter, false, true).unwrap(); + syntax_hit_found |= trim(&original_text) == trim(&generated); + + // Validate if type-checks + let mut txt = file_txt.to_string(); + + let edit = ide::TextEdit::replace(range, generated.clone()); + edit.apply(&mut txt); + + if self.validate_term_search { + std::fs::write(&path, txt).unwrap(); + + let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap(); + if let Some(err) = res.error() { + if err.contains("error: could not compile") { + if let Some(mut err_idx) = err.find("error[E") { + err_idx += 7; + let err_code = &err[err_idx..err_idx + 4]; + match err_code { + "0282" => continue, // Byproduct of testing method + "0277" if generated.contains(&todo) => continue, // See https://github.com/rust-lang/rust/issues/69882 + _ => (), + } + bar.println(err); + bar.println(generated); + acc.error_codes + .entry(err_code.to_owned()) + .and_modify(|n| *n += 1) + .or_insert(1); + } else { + acc.syntax_errors += 1; + bar.println(format!("Syntax error: \n{}", err)); + } + } + } + } + } + + if syntax_hit_found { + acc.tail_expr_syntax_hits += 1; + } + acc.total_tail_exprs += 1; + + let msg = move || { + format!( + "processing: {:<50}", + trim(&original_text).chars().take(50).collect::() + ) + }; + if verbosity.is_spammy() { + bar.println(msg()); + } + bar.set_message(msg); + } + // Revert file back to original state + if self.validate_term_search { + std::fs::write(&path, file_txt.to_string()).unwrap(); + } + + bar.inc(1); + } + let term_search_time = sw.elapsed(); + + bar.println(format!( + "Tail Expr syntactic hits: {}/{} ({}%)", + acc.tail_expr_syntax_hits, + acc.total_tail_exprs, + percentage(acc.tail_expr_syntax_hits, acc.total_tail_exprs) + )); + bar.println(format!( + "Tail Exprs found: {}/{} ({}%)", + acc.total_tail_exprs - acc.tail_expr_no_term, + acc.total_tail_exprs, + percentage(acc.total_tail_exprs - acc.tail_expr_no_term, acc.total_tail_exprs) + )); + if self.validate_term_search { + bar.println(format!( + "Tail Exprs total errors: {}, syntax errors: {}, error codes:", + acc.error_codes.values().sum::() + acc.syntax_errors, + acc.syntax_errors, + )); + for (err, count) in acc.error_codes { + bar.println(format!( + " E{err}: {count:>5} (https://doc.rust-lang.org/error_codes/E{err}.html)" + )); + } + } + bar.println(format!( + "Term search avg time: {}ms", + term_search_time.time.as_millis() as u64 / acc.total_tail_exprs + )); + bar.println(format!("{:<20} {}", "Term search:", term_search_time)); + report_metric("term search time", term_search_time.time.as_millis() as u64, "ms"); + + bar.finish_and_clear(); + } + fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) { let mut sw = self.stop_watch(); let mut all = 0; diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 252b1e1a48581..493e614dce682 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -93,6 +93,11 @@ xflags::xflags! { /// and annotations. This is useful for benchmarking the memory usage on a project that has /// been worked on for a bit in a longer running session. optional --run-all-ide-things + /// Run term search on all the tail expressions (of functions, block, if statements etc.) + optional --run-term-search + /// Validate term search by running `cargo check` on every response. + /// Note that this also temporarily modifies the files on disk, use with caution! + optional --validate-term-search } /// Run unit tests of the project using mir interpreter @@ -218,6 +223,8 @@ pub struct AnalysisStats { pub skip_data_layout: bool, pub skip_const_eval: bool, pub run_all_ide_things: bool, + pub run_term_search: bool, + pub validate_term_search: bool, } #[derive(Debug)] diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index f4aec288348f9..2d56830c87f30 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -135,12 +135,11 @@ impl flags::Scip { } if symbols_emitted.insert(id) { - let documentation = token - .hover - .as_ref() - .map(|hover| hover.markup.as_str()) - .filter(|it| !it.is_empty()) - .map(|it| vec![it.to_owned()]); + let documentation = match &token.documentation { + Some(doc) => vec![doc.as_str().to_owned()], + None => vec![], + }; + let position_encoding = scip_types::PositionEncoding::UTF8CodeUnitOffsetFromLineStart.into(); let signature_documentation = @@ -153,7 +152,7 @@ impl flags::Scip { }); let symbol_info = scip_types::SymbolInformation { symbol: symbol.clone(), - documentation: documentation.unwrap_or_default(), + documentation, relationships: Vec::new(), special_fields: Default::default(), kind: symbol_kind(token.kind).into(), @@ -599,4 +598,22 @@ pub mod example_mod { "rust-analyzer cargo main . MyTypeAlias#", ); } + + #[test] + fn documentation_matches_doc_comment() { + let s = "/// foo\nfn bar() {}"; + + let mut host = AnalysisHost::default(); + let change_fixture = ChangeFixture::parse(s); + host.raw_database_mut().apply_change(change_fixture.change); + + let analysis = host.analysis(); + let si = StaticIndex::compute(&analysis); + + let file = si.files.first().unwrap(); + let (_, token_id) = file.tokens.first().unwrap(); + let token = si.tokens.get(*token_id).unwrap(); + + assert_eq!(token.documentation.as_ref().map(|d| d.as_str()), Some("foo")); + } } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 7bdd9ec866a5a..16e1a2f544907 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -112,7 +112,7 @@ config_data! { cargo_buildScripts_overrideCommand: Option> = "null", /// Rerun proc-macros building/build-scripts running when proc-macro /// or build-script sources change and are saved. - cargo_buildScripts_rebuildOnSave: bool = "false", + cargo_buildScripts_rebuildOnSave: bool = "true", /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to /// avoid checking unnecessary things. cargo_buildScripts_useRustcWrapper: bool = "true", @@ -209,6 +209,11 @@ config_data! { /// by changing `#rust-analyzer.check.invocationStrategy#` and /// `#rust-analyzer.check.invocationLocation#`. /// + /// If `$saved_file` is part of the command, rust-analyzer will pass + /// the absolute path of the saved file to the provided command. This is + /// intended to be used with non-Cargo build systems. + /// Note that `$saved_file` is experimental and may be removed in the futureg. + /// /// An example command would be: /// /// ```bash @@ -286,6 +291,8 @@ config_data! { "scope": "expr" } }"#, + /// Whether to enable term search based snippets like `Some(foo.bar().baz())`. + completion_termSearch_enable: bool = "false", /// List of rust-analyzer diagnostics to disable. diagnostics_disabled: FxHashSet = "[]", @@ -504,9 +511,6 @@ config_data! { /// Exclude tests from find-all-references. references_excludeTests: bool = "false", - /// Allow renaming of items not belonging to the loaded workspaces. - rename_allowExternalItems: bool = "false", - /// Command to be executed instead of 'cargo' for runnables. runnables_command: Option = "null", @@ -1202,7 +1206,7 @@ impl Config { Some(AbsPathBuf::try_from(path).unwrap_or_else(|path| self.root_path.join(path))) } - pub fn dummy_replacements(&self) -> &FxHashMap, Box<[Box]>> { + pub fn ignored_proc_macros(&self) -> &FxHashMap, Box<[Box]>> { &self.data.procMacro_ignored } @@ -1535,6 +1539,7 @@ impl Config { && completion_item_edit_resolve(&self.caps), enable_self_on_the_fly: self.data.completion_autoself_enable, enable_private_editable: self.data.completion_privateEditable_enable, + enable_term_search: self.data.completion_termSearch_enable, full_function_signatures: self.data.completion_fullFunctionSignatures_enable, callable: match self.data.completion_callable_snippets { CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments), @@ -1766,10 +1771,6 @@ impl Config { self.data.typing_autoClosingAngleBrackets_enable } - pub fn rename(&self) -> bool { - self.data.rename_allowExternalItems - } - // FIXME: VSCode seems to work wrong sometimes, see https://github.com/microsoft/vscode/issues/193124 // hence, distinguish it for now. pub fn is_visual_studio_code(&self) -> bool { diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index da4422a60a8a9..293807a383baa 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -9,7 +9,7 @@ use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; use hir::Change; use ide::{Analysis, AnalysisHost, Cancellable, FileId}; -use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase}; +use ide_db::base_db::{CrateId, ProcMacroPaths}; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; use nohash_hasher::IntMap; @@ -74,8 +74,8 @@ pub(crate) struct GlobalState { pub(crate) last_reported_status: Option, // proc macros - pub(crate) proc_macro_changed: bool, pub(crate) proc_macro_clients: Arc<[anyhow::Result]>, + pub(crate) build_deps_changed: bool, // Flycheck pub(crate) flycheck: Arc<[FlycheckHandle]>, @@ -203,9 +203,10 @@ impl GlobalState { source_root_config: SourceRootConfig::default(), config_errors: Default::default(), - proc_macro_changed: false, proc_macro_clients: Arc::from_iter([]), + build_deps_changed: false, + flycheck: Arc::from_iter([]), flycheck_sender, flycheck_receiver, @@ -300,12 +301,19 @@ impl GlobalState { if let Some(path) = vfs_path.as_path() { let path = path.to_path_buf(); if reload::should_refresh_for_change(&path, file.kind()) { - workspace_structure_change = Some((path.clone(), false)); + workspace_structure_change = Some(( + path.clone(), + false, + AsRef::::as_ref(&path).ends_with("build.rs"), + )); } if file.is_created_or_deleted() { has_structure_changes = true; - workspace_structure_change = - Some((path, self.crate_graph_file_dependencies.contains(vfs_path))); + workspace_structure_change = Some(( + path, + self.crate_graph_file_dependencies.contains(vfs_path), + false, + )); } else if path.extension() == Some("rs".as_ref()) { modified_rust_files.push(file.file_id); } @@ -346,23 +354,28 @@ impl GlobalState { }; self.analysis_host.apply_change(change); + { - let raw_database = self.analysis_host.raw_database(); + if !matches!(&workspace_structure_change, Some((.., true))) { + _ = self + .deferred_task_queue + .sender + .send(crate::main_loop::QueuedTask::CheckProcMacroSources(modified_rust_files)); + } // FIXME: ideally we should only trigger a workspace fetch for non-library changes // but something's going wrong with the source root business when we add a new local // crate see https://github.com/rust-lang/rust-analyzer/issues/13029 - if let Some((path, force_crate_graph_reload)) = workspace_structure_change { + if let Some((path, force_crate_graph_reload, build_scripts_touched)) = + workspace_structure_change + { self.fetch_workspaces_queue.request_op( format!("workspace vfs file change: {path}"), force_crate_graph_reload, ); + if build_scripts_touched { + self.fetch_build_data_queue.request_op(format!("build.rs changed: {path}"), ()); + } } - self.proc_macro_changed = modified_rust_files.into_iter().any(|file_id| { - let crates = raw_database.relevant_crates(file_id); - let crate_graph = raw_database.crate_graph(); - - crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) - }); } true diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs index d3c2073f09d25..b13c709dbfe60 100644 --- a/crates/rust-analyzer/src/handlers/notification.rs +++ b/crates/rust-analyzer/src/handlers/notification.rs @@ -145,11 +145,11 @@ pub(crate) fn handle_did_save_text_document( state: &mut GlobalState, params: DidSaveTextDocumentParams, ) -> anyhow::Result<()> { - if state.config.script_rebuild_on_save() && state.proc_macro_changed { - // reset the flag - state.proc_macro_changed = false; - // rebuild the proc macros - state.fetch_build_data_queue.request_op("ScriptRebuildOnSave".to_owned(), ()); + if state.config.script_rebuild_on_save() && state.build_deps_changed { + state.build_deps_changed = false; + state + .fetch_build_data_queue + .request_op("build_deps_changed - save notification".to_owned(), ()); } if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { @@ -158,7 +158,7 @@ pub(crate) fn handle_did_save_text_document( if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) { state .fetch_workspaces_queue - .request_op(format!("DidSaveTextDocument {abs_path}"), false); + .request_op(format!("workspace vfs file change saved {abs_path}"), false); } } @@ -168,7 +168,7 @@ pub(crate) fn handle_did_save_text_document( } else if state.config.check_on_save() { // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(); + flycheck.restart_workspace(None); } } Ok(()) @@ -314,6 +314,8 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { Some((idx, package)) }); + let saved_file = vfs_path.as_path().map(|p| p.to_owned()); + // Find and trigger corresponding flychecks for flycheck in world.flycheck.iter() { for (id, package) in workspace_ids.clone() { @@ -321,7 +323,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { updated = true; match package.filter(|_| !world.config.flycheck_workspace()) { Some(package) => flycheck.restart_for_package(package), - None => flycheck.restart_workspace(), + None => flycheck.restart_workspace(saved_file.clone()), } continue; } @@ -330,7 +332,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { // No specific flycheck was triggered, so let's trigger all of them. if !updated { for flycheck in world.flycheck.iter() { - flycheck.restart_workspace(); + flycheck.restart_workspace(saved_file.clone()); } } Ok(()) @@ -372,7 +374,7 @@ pub(crate) fn handle_run_flycheck( } // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(); + flycheck.restart_workspace(None); } Ok(()) } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 2a3633a48e9fa..eb9d4bf0f02d7 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -52,7 +52,7 @@ use crate::{ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.proc_macro_clients = Arc::from_iter([]); - state.proc_macro_changed = false; + state.build_deps_changed = false; state.fetch_workspaces_queue.request_op("reload workspace request".to_owned(), false); Ok(()) @@ -60,7 +60,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow: pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.proc_macro_clients = Arc::from_iter([]); - state.proc_macro_changed = false; + state.build_deps_changed = false; state.fetch_build_data_queue.request_op("rebuild proc macros request".to_owned(), ()); Ok(()) @@ -1017,10 +1017,8 @@ pub(crate) fn handle_rename( let _p = tracing::span!(tracing::Level::INFO, "handle_rename").entered(); let position = from_proto::file_position(&snap, params.text_document_position)?; - let mut change = snap - .analysis - .rename(position, ¶ms.new_name, snap.config.rename())? - .map_err(to_proto::rename_error)?; + let mut change = + snap.analysis.rename(position, ¶ms.new_name)?.map_err(to_proto::rename_error)?; // this is kind of a hack to prevent double edits from happening when moving files // When a module gets renamed by renaming the mod declaration this causes the file to move @@ -1937,6 +1935,7 @@ fn run_rustfmt( let mut command = match snap.config.rustfmt() { RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => { + // FIXME: Set RUSTUP_TOOLCHAIN let mut cmd = process::Command::new(toolchain::rustfmt()); cmd.envs(snap.config.extra_env()); cmd.args(extra_args); diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index acc02d6447c63..f0eee77aff592 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -132,6 +132,7 @@ fn integrated_completion_benchmark() { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: true, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), @@ -175,6 +176,7 @@ fn integrated_completion_benchmark() { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: true, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), @@ -216,6 +218,7 @@ fn integrated_completion_benchmark() { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: true, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index b1809f58ae700..473ca991ad9b0 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -47,7 +47,9 @@ mod integrated_benchmarks; use serde::de::DeserializeOwned; -pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version}; +pub use crate::{ + caps::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version, +}; pub fn from_json( what: &'static str, diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 64f19f0b32d7e..727007bba083a 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -123,6 +123,7 @@ pub(crate) fn completion_item_kind( CompletionItemKind::Method => lsp_types::CompletionItemKind::METHOD, CompletionItemKind::Snippet => lsp_types::CompletionItemKind::SNIPPET, CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::REFERENCE, + CompletionItemKind::Expression => lsp_types::CompletionItemKind::SNIPPET, CompletionItemKind::SymbolKind(symbol) => match symbol { SymbolKind::Attribute => lsp_types::CompletionItemKind::FUNCTION, SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT, @@ -929,6 +930,16 @@ fn merge_text_and_snippet_edits( let mut edits: Vec = vec![]; let mut snippets = snippet_edit.into_edit_ranges().into_iter().peekable(); let text_edits = edit.into_iter(); + // offset to go from the final source location to the original source location + let mut source_text_offset = 0i32; + + let offset_range = |range: TextRange, offset: i32| -> TextRange { + // map the snippet range from the target location into the original source location + let start = u32::from(range.start()).checked_add_signed(offset).unwrap_or(0); + let end = u32::from(range.end()).checked_add_signed(offset).unwrap_or(0); + + TextRange::new(start.into(), end.into()) + }; for current_indel in text_edits { let new_range = { @@ -937,10 +948,17 @@ fn merge_text_and_snippet_edits( TextRange::at(current_indel.delete.start(), insert_len) }; + // figure out how much this Indel will shift future ranges from the initial source + let offset_adjustment = + u32::from(current_indel.delete.len()) as i32 - u32::from(new_range.len()) as i32; + // insert any snippets before the text edit - for (snippet_index, snippet_range) in - snippets.take_while_ref(|(_, range)| range.end() < new_range.start()) - { + for (snippet_index, snippet_range) in snippets.peeking_take_while(|(_, range)| { + offset_range(*range, source_text_offset).end() < new_range.start() + }) { + // adjust the snippet range into the corresponding initial source location + let snippet_range = offset_range(snippet_range, source_text_offset); + let snippet_range = if !stdx::always!( snippet_range.is_empty(), "placeholder range {:?} is before current text edit range {:?}", @@ -953,22 +971,23 @@ fn merge_text_and_snippet_edits( snippet_range }; - let range = range(line_index, snippet_range); - let new_text = format!("${snippet_index}"); - - edits.push(SnippetTextEdit { - range, - new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - }) + edits.push(snippet_text_edit( + line_index, + true, + Indel { insert: format!("${snippet_index}"), delete: snippet_range }, + )) } - if snippets.peek().is_some_and(|(_, range)| new_range.intersect(*range).is_some()) { + if snippets.peek().is_some_and(|(_, range)| { + new_range.intersect(offset_range(*range, source_text_offset)).is_some() + }) { // at least one snippet edit intersects this text edit, // so gather all of the edits that intersect this text edit let mut all_snippets = snippets - .take_while_ref(|(_, range)| new_range.intersect(*range).is_some()) + .peeking_take_while(|(_, range)| { + new_range.intersect(offset_range(*range, source_text_offset)).is_some() + }) + .map(|(tabstop, range)| (tabstop, offset_range(range, source_text_offset))) .collect_vec(); // ensure all of the ranges are wholly contained inside of the new range @@ -979,40 +998,59 @@ fn merge_text_and_snippet_edits( ) }); - let mut text_edit = text_edit(line_index, current_indel); + let mut new_text = current_indel.insert; - // escape out snippet text - stdx::replace(&mut text_edit.new_text, '\\', r"\\"); - stdx::replace(&mut text_edit.new_text, '$', r"\$"); + // find which snippet bits need to be escaped + let escape_places = new_text + .rmatch_indices(['\\', '$', '{', '}']) + .map(|(insert, _)| insert) + .collect_vec(); + let mut escape_places = escape_places.into_iter().peekable(); + let mut escape_prior_bits = |new_text: &mut String, up_to: usize| { + for before in escape_places.peeking_take_while(|insert| *insert >= up_to) { + new_text.insert(before, '\\'); + } + }; - // ...and apply! + // insert snippets, and escaping any needed bits along the way for (index, range) in all_snippets.iter().rev() { - let start = (range.start() - new_range.start()).into(); - let end = (range.end() - new_range.start()).into(); + let text_range = range - new_range.start(); + let (start, end) = (text_range.start().into(), text_range.end().into()); if range.is_empty() { - text_edit.new_text.insert_str(start, &format!("${index}")); + escape_prior_bits(&mut new_text, start); + new_text.insert_str(start, &format!("${index}")); } else { - text_edit.new_text.insert(end, '}'); - text_edit.new_text.insert_str(start, &format!("${{{index}:")); + escape_prior_bits(&mut new_text, end); + new_text.insert(end, '}'); + escape_prior_bits(&mut new_text, start); + new_text.insert_str(start, &format!("${{{index}:")); } } - edits.push(SnippetTextEdit { - range: text_edit.range, - new_text: text_edit.new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - }) + // escape any remaining bits + escape_prior_bits(&mut new_text, 0); + + edits.push(snippet_text_edit( + line_index, + true, + Indel { insert: new_text, delete: current_indel.delete }, + )) } else { // snippet edit was beyond the current one // since it wasn't consumed, it's available for the next pass edits.push(snippet_text_edit(line_index, false, current_indel)); } + + // update the final source -> initial source mapping offset + source_text_offset += offset_adjustment; } // insert any remaining tabstops edits.extend(snippets.map(|(snippet_index, snippet_range)| { + // adjust the snippet range into the corresponding initial source location + let snippet_range = offset_range(snippet_range, source_text_offset); + let snippet_range = if !stdx::always!( snippet_range.is_empty(), "found placeholder snippet {:?} without a text edit", @@ -1023,15 +1061,11 @@ fn merge_text_and_snippet_edits( snippet_range }; - let range = range(line_index, snippet_range); - let new_text = format!("${snippet_index}"); - - SnippetTextEdit { - range, - new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - } + snippet_text_edit( + line_index, + true, + Indel { insert: format!("${snippet_index}"), delete: snippet_range }, + ) })); edits @@ -1658,15 +1692,44 @@ fn bar(_: usize) {} assert!(!docs.contains("use crate::bar")); } + #[track_caller] fn check_rendered_snippets(edit: TextEdit, snippets: SnippetEdit, expect: Expect) { - let text = r#"/* place to put all ranges in */"#; + check_rendered_snippets_in_source( + r"/* place to put all ranges in */", + edit, + snippets, + expect, + ); + } + + #[track_caller] + fn check_rendered_snippets_in_source( + ra_fixture: &str, + edit: TextEdit, + snippets: SnippetEdit, + expect: Expect, + ) { + let source = stdx::trim_indent(ra_fixture); + let endings = if source.contains('\r') { LineEndings::Dos } else { LineEndings::Unix }; let line_index = LineIndex { - index: Arc::new(ide::LineIndex::new(text)), - endings: LineEndings::Unix, + index: Arc::new(ide::LineIndex::new(&source)), + endings, encoding: PositionEncoding::Utf8, }; let res = merge_text_and_snippet_edits(&line_index, edit, snippets); + + // Ensure that none of the ranges overlap + { + let mut sorted = res.clone(); + sorted.sort_by_key(|edit| (edit.range.start, edit.range.end)); + let disjoint_ranges = sorted + .iter() + .zip(sorted.iter().skip(1)) + .all(|(l, r)| l.range.end <= r.range.start || l == r); + assert!(disjoint_ranges, "ranges overlap for {res:#?}"); + } + expect.assert_debug_eq(&res); } @@ -1811,7 +1874,8 @@ fn bar(_: usize) {} let mut edit = TextEdit::builder(); edit.insert(0.into(), "abc".to_owned()); let edit = edit.finish(); - let snippets = SnippetEdit::new(vec![Snippet::Tabstop(7.into())]); + // Note: tabstops are positioned in the source where all text edits have been applied + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(10.into())]); check_rendered_snippets( edit, @@ -1928,8 +1992,9 @@ fn bar(_: usize) {} edit.insert(0.into(), "abc".to_owned()); edit.insert(7.into(), "abc".to_owned()); let edit = edit.finish(); + // Note: tabstops are positioned in the source where all text edits have been applied let snippets = - SnippetEdit::new(vec![Snippet::Tabstop(4.into()), Snippet::Tabstop(4.into())]); + SnippetEdit::new(vec![Snippet::Tabstop(7.into()), Snippet::Tabstop(7.into())]); check_rendered_snippets( edit, @@ -2085,13 +2150,502 @@ fn bar(_: usize) {} fn snippet_rendering_escape_snippet_bits() { // only needed for snippet formats let mut edit = TextEdit::builder(); - edit.insert(0.into(), r"abc\def$".to_owned()); - edit.insert(8.into(), r"ghi\jkl$".to_owned()); + edit.insert(0.into(), r"$ab{}$c\def".to_owned()); + edit.insert(8.into(), r"ghi\jk<-check_insert_here$".to_owned()); + edit.insert(10.into(), r"a\\b\\c{}$".to_owned()); let edit = edit.finish(); - let snippets = - SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(0.into(), 3.into()))]); + let snippets = SnippetEdit::new(vec![ + Snippet::Placeholder(TextRange::new(1.into(), 9.into())), + Snippet::Tabstop(25.into()), + ]); check_rendered_snippets( + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 0, + }, + }, + new_text: "\\$${1:ab\\{\\}\\$c\\\\d}ef", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 8, + }, + end: Position { + line: 0, + character: 8, + }, + }, + new_text: "ghi\\\\jk$0<-check_insert_here\\$", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 10, + }, + end: Position { + line: 0, + character: 10, + }, + }, + new_text: "a\\\\b\\\\c{}$", + insert_text_format: None, + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_deleted() { + // negative offset from inserting a smaller range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(51.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_added() { + // positive offset from inserting a larger range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(39.into(), 40.into()), "let".to_owned()); + edit.replace( + TextRange::new(41.into(), 73.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(43.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> P { + P { + disabled: false, + } +} + +struct P { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 5, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_placeholder_adjust_offset_deleted() { + // negative offset from inserting a smaller range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = + SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(51.into(), 59.into()))]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_placeholder_adjust_offset_added() { + // positive offset from inserting a larger range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(39.into(), 40.into()), "let".to_owned()); + edit.replace( + TextRange::new(41.into(), 73.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = + SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(43.into(), 51.into()))]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> P { + P { + disabled: false, + } +} + +struct P { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 5, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_between_text_edits() { + // inserting between edits, tabstop should be at (1, 14) + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(58.into(), 90.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(51.into())]); + + // add an extra space between `ProcMacro` and `{` to insert the tabstop at + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 14, + }, + }, + new_text: "$0", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 15, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: None, + annotation_id: None, + }, + ] +"#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_after_text_edits() { + // inserting after edits, tabstop should be before the closing curly of the fn + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(109.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 4, + character: 0, + }, + end: Position { + line: 4, + character: 0, + }, + }, + new_text: "$0", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] +"#]], + ); + } + + #[test] + fn snippet_rendering_handle_dos_line_endings() { + // unix -> dos conversion should be handled after placing snippets + let mut edit = TextEdit::builder(); + edit.insert(6.into(), "\n\n->".to_owned()); + + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(10.into())]); + + check_rendered_snippets_in_source( + "yeah\r\n<-tabstop here", edit, snippets, expect![[r#" @@ -2099,38 +2653,23 @@ fn bar(_: usize) {} SnippetTextEdit { range: Range { start: Position { - line: 0, + line: 1, character: 0, }, end: Position { - line: 0, + line: 1, character: 0, }, }, - new_text: "${0:abc}\\\\def\\$", + new_text: "\r\n\r\n->$0", insert_text_format: Some( Snippet, ), annotation_id: None, }, - SnippetTextEdit { - range: Range { - start: Position { - line: 0, - character: 8, - }, - end: Position { - line: 0, - character: 8, - }, - }, - new_text: "ghi\\jkl$", - insert_text_format: None, - annotation_id: None, - }, ] "#]], - ); + ) } // `Url` is not able to parse windows paths on unix machines. diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 88660db7e93b6..72f6d0fde5fe7 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -8,12 +8,10 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; -use flycheck::FlycheckHandle; -use ide_db::base_db::{SourceDatabaseExt, VfsPath}; +use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; use lsp_server::{Connection, Notification, Request}; use lsp_types::notification::Notification as _; use stdx::thread::ThreadIntent; -use triomphe::Arc; use vfs::FileId; use crate::{ @@ -77,6 +75,7 @@ impl fmt::Display for Event { #[derive(Debug)] pub(crate) enum QueuedTask { CheckIfIndexed(lsp_types::Url), + CheckProcMacroSources(Vec), } #[derive(Debug)] @@ -89,6 +88,7 @@ pub(crate) enum Task { FetchWorkspace(ProjectWorkspaceProgress), FetchBuildData(BuildDataProgress), LoadProcMacros(ProcMacroProgress), + BuildDepsHaveChanged, } #[derive(Debug)] @@ -337,7 +337,7 @@ impl GlobalState { if became_quiescent { if self.config.check_on_save() { // Project has loaded properly, kick off initial flycheck - self.flycheck.iter().for_each(FlycheckHandle::restart_workspace); + self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None)); } if self.config.prefill_caches() { self.prime_caches_queue.request_op("became quiescent".to_owned(), ()); @@ -358,9 +358,7 @@ impl GlobalState { } // Refresh inlay hints if the client supports it. - if (self.send_hint_refresh_query || self.proc_macro_changed) - && self.config.inlay_hints_refresh() - { + if self.send_hint_refresh_query && self.config.inlay_hints_refresh() { self.send_request::((), |_, _| ()); self.send_hint_refresh_query = false; } @@ -555,16 +553,7 @@ impl GlobalState { if let Err(e) = self.fetch_workspace_error() { tracing::error!("FetchWorkspaceError:\n{e}"); } - - let old = Arc::clone(&self.workspaces); self.switch_workspaces("fetched workspace".to_owned()); - let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces); - - if self.config.run_build_scripts() && workspaces_updated { - self.fetch_build_data_queue - .request_op("workspace updated".to_owned(), ()); - } - (Progress::End, None) } }; @@ -608,6 +597,7 @@ impl GlobalState { self.report_progress("Loading", state, msg, None, None); } } + Task::BuildDepsHaveChanged => self.build_deps_changed = true, } } @@ -686,6 +676,25 @@ impl GlobalState { } }); } + QueuedTask::CheckProcMacroSources(modified_rust_files) => { + let crate_graph = self.analysis_host.raw_database().crate_graph(); + let snap = self.snapshot(); + self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, { + move |sender| { + if modified_rust_files.into_iter().any(|file_id| { + // FIXME: Check whether these files could be build script related + match snap.analysis.crates_for(file_id) { + Ok(crates) => { + crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) + } + _ => false, + } + }) { + sender.send(Task::BuildDepsHaveChanged).unwrap(); + } + } + }); + } } } diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 7bd2877b00cba..5895459d1fcf8 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -17,8 +17,9 @@ use std::{iter, mem}; use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::{db::DefDatabase, Change, ProcMacros}; +use ide::CrateId; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, ProcMacroPaths}, + base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version}, FxHashMap, }; use itertools::Itertools; @@ -28,7 +29,7 @@ use project_model::{ProjectWorkspace, WorkspaceBuildScripts}; use rustc_hash::FxHashSet; use stdx::{format_to, thread::ThreadIntent}; use triomphe::Arc; -use vfs::{AbsPath, ChangeKind}; +use vfs::{AbsPath, AbsPathBuf, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, @@ -83,7 +84,7 @@ impl GlobalState { } if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects() { - self.fetch_workspaces_queue.request_op("linked projects changed".to_owned(), false) + self.fetch_workspaces_queue.request_op("discovered projects changed".to_owned(), false) } else if self.config.flycheck() != old_config.flycheck() { self.reload_flycheck(); } @@ -106,9 +107,11 @@ impl GlobalState { }; let mut message = String::new(); - if self.proc_macro_changed { + if self.build_deps_changed { status.health = lsp_ext::Health::Warning; - message.push_str("Proc-macros have changed and need to be rebuilt.\n\n"); + message.push_str( + "Proc-macros and/or build scripts have changed and need to be rebuilt.\n\n", + ); } if self.fetch_build_data_error().is_err() { status.health = lsp_ext::Health::Warning; @@ -234,7 +237,6 @@ impl GlobalState { it.clone(), cargo_config.target.as_deref(), &cargo_config.extra_env, - None, )) } }) @@ -300,13 +302,13 @@ impl GlobalState { pub(crate) fn fetch_proc_macros(&mut self, cause: Cause, paths: Vec) { tracing::info!(%cause, "will load proc macros"); - let dummy_replacements = self.config.dummy_replacements().clone(); + let ignored_proc_macros = self.config.ignored_proc_macros().clone(); let proc_macro_clients = self.proc_macro_clients.clone(); self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| { sender.send(Task::LoadProcMacros(ProcMacroProgress::Begin)).unwrap(); - let dummy_replacements = &dummy_replacements; + let ignored_proc_macros = &ignored_proc_macros; let progress = { let sender = sender.clone(); &move |msg| { @@ -334,7 +336,12 @@ impl GlobalState { crate_name .as_deref() .and_then(|crate_name| { - dummy_replacements.get(crate_name).map(|v| &**v) + ignored_proc_macros.iter().find_map( + |(name, macros)| { + eq_ignore_underscore(name, crate_name) + .then_some(&**macros) + }, + ) }) .unwrap_or_default(), ) @@ -404,6 +411,10 @@ impl GlobalState { if *force_reload_crate_graph { self.recreate_crate_graph(cause); } + if self.build_deps_changed && self.config.run_build_scripts() { + self.build_deps_changed = false; + self.fetch_build_data_queue.request_op("build_deps_changed".to_owned(), ()); + } // Current build scripts do not match the version of the active // workspace, so there's nothing for us to update. return; @@ -415,6 +426,11 @@ impl GlobalState { // we don't care about build-script results, they are stale. // FIXME: can we abort the build scripts here? self.workspaces = Arc::new(workspaces); + + if self.config.run_build_scripts() { + self.build_deps_changed = false; + self.fetch_build_data_queue.request_op("workspace updated".to_owned(), ()); + } } if let FilesWatcher::Client = self.config.files().watcher { @@ -464,8 +480,23 @@ impl GlobalState { None => ws.find_sysroot_proc_macro_srv()?, }; + let env = + match ws { + ProjectWorkspace::Cargo { cargo_config_extra_env, sysroot, .. } => { + cargo_config_extra_env + .iter() + .chain(self.config.extra_env()) + .map(|(a, b)| (a.clone(), b.clone())) + .chain(sysroot.as_ref().map(|it| { + ("RUSTUP_TOOLCHAIN".to_owned(), it.root().to_string()) + })) + .collect() + } + _ => Default::default(), + }; tracing::info!("Using proc-macro server at {path}"); - ProcMacroServer::spawn(path.clone()).map_err(|err| { + + ProcMacroServer::spawn(path.clone(), &env).map_err(|err| { tracing::error!( "Failed to run proc-macro server from path {path}, error: {err:?}", ); @@ -494,15 +525,15 @@ impl GlobalState { } fn recreate_crate_graph(&mut self, cause: String) { - // Create crate graph from all the workspaces - let (crate_graph, proc_macro_paths, crate_graph_file_dependencies) = { + { + // Create crate graph from all the workspaces let vfs = &mut self.vfs.write().0; let loader = &mut self.loader; // crate graph construction relies on these paths, record them so when one of them gets // deleted or created we trigger a reconstruction of the crate graph let mut crate_graph_file_dependencies = FxHashSet::default(); - let mut load = |path: &AbsPath| { + let load = |path: &AbsPath| { let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); let vfs_path = vfs::VfsPath::from(path.to_path_buf()); crate_graph_file_dependencies.insert(vfs_path.clone()); @@ -517,32 +548,26 @@ impl GlobalState { } }; - let mut crate_graph = CrateGraph::default(); - let mut proc_macros = Vec::default(); - for ws in &**self.workspaces { - let (other, mut crate_proc_macros) = - ws.to_crate_graph(&mut load, self.config.extra_env()); - crate_graph.extend(other, &mut crate_proc_macros, |_| {}); - proc_macros.push(crate_proc_macros); - } - (crate_graph, proc_macros, crate_graph_file_dependencies) - }; + let (crate_graph, proc_macro_paths, layouts, toolchains) = + ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load); - let mut change = Change::new(); - if self.config.expand_proc_macros() { - change.set_proc_macros( - crate_graph - .iter() - .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) - .collect(), - ); - self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); + let mut change = Change::new(); + if self.config.expand_proc_macros() { + change.set_proc_macros( + crate_graph + .iter() + .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) + .collect(), + ); + self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); + } + change.set_crate_graph(crate_graph); + change.set_target_data_layouts(layouts); + change.set_toolchains(toolchains); + self.analysis_host.apply_change(change); + self.crate_graph_file_dependencies = crate_graph_file_dependencies; } - change.set_crate_graph(crate_graph); - self.analysis_host.apply_change(change); - self.crate_graph_file_dependencies = crate_graph_file_dependencies; self.process_changes(); - self.reload_flycheck(); } @@ -605,6 +630,7 @@ impl GlobalState { 0, Box::new(move |msg| sender.send(msg).unwrap()), config, + None, self.config.root_path().clone(), )], flycheck::InvocationStrategy::PerWorkspace => { @@ -612,23 +638,32 @@ impl GlobalState { .iter() .enumerate() .filter_map(|(id, w)| match w { - ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())), - ProjectWorkspace::Json { project, .. } => { + ProjectWorkspace::Cargo { cargo, sysroot, .. } => Some(( + id, + cargo.workspace_root(), + sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()), + )), + ProjectWorkspace::Json { project, sysroot, .. } => { // Enable flychecks for json projects if a custom flycheck command was supplied // in the workspace configuration. match config { - FlycheckConfig::CustomCommand { .. } => Some((id, project.path())), + FlycheckConfig::CustomCommand { .. } => Some(( + id, + project.path(), + sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()), + )), _ => None, } } ProjectWorkspace::DetachedFiles { .. } => None, }) - .map(|(id, root)| { + .map(|(id, root, sysroot_root)| { let sender = sender.clone(); FlycheckHandle::spawn( id, Box::new(move |msg| sender.send(msg).unwrap()), config.clone(), + sysroot_root, root.to_path_buf(), ) }) @@ -639,6 +674,69 @@ impl GlobalState { } } +// FIXME: Move this into load-cargo? +pub fn ws_to_crate_graph( + workspaces: &[ProjectWorkspace], + extra_env: &FxHashMap, + mut load: impl FnMut(&AbsPath) -> Option, +) -> ( + CrateGraph, + Vec, AbsPathBuf), String>>>, + Vec, Arc>>, + Vec>, +) { + let mut crate_graph = CrateGraph::default(); + let mut proc_macro_paths = Vec::default(); + let mut layouts = Vec::default(); + let mut toolchains = Vec::default(); + let e = Err(Arc::from("missing layout")); + for ws in workspaces { + let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env); + let num_layouts = layouts.len(); + let num_toolchains = toolchains.len(); + let (toolchain, layout) = match ws { + ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } => { + (toolchain.clone(), target_layout.clone()) + } + ProjectWorkspace::DetachedFiles { .. } => { + (None, Err("detached files have no layout".into())) + } + }; + + let mapping = crate_graph.extend( + other, + &mut crate_proc_macros, + |(cg_id, cg_data), (_o_id, o_data)| { + // if the newly created crate graph's layout is equal to the crate of the merged graph, then + // we can merge the crates. + let id = cg_id.into_raw().into_u32() as usize; + layouts[id] == layout && toolchains[id] == toolchain && cg_data == o_data + }, + ); + // Populate the side tables for the newly merged crates + mapping.values().for_each(|val| { + let idx = val.into_raw().into_u32() as usize; + // we only need to consider crates that were not merged and remapped, as the + // ones that were remapped already have the correct layout and toolchain + if idx >= num_layouts { + if layouts.len() <= idx { + layouts.resize(idx + 1, e.clone()); + } + layouts[idx] = layout.clone(); + } + if idx >= num_toolchains { + if toolchains.len() <= idx { + toolchains.resize(idx + 1, None); + } + toolchains[idx] = toolchain.clone(); + } + }); + proc_macro_paths.push(crate_proc_macros); + } + (crate_graph, proc_macro_paths, layouts, toolchains) +} + pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; @@ -683,3 +781,18 @@ pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) } false } + +/// Similar to [`str::eq_ignore_ascii_case`] but instead of ignoring +/// case, we say that `-` and `_` are equal. +fn eq_ignore_underscore(s1: &str, s2: &str) -> bool { + if s1.len() != s2.len() { + return false; + } + + s1.as_bytes().iter().zip(s2.as_bytes()).all(|(c1, c2)| { + let c1_underscore = c1 == &b'_' || c1 == &b'-'; + let c2_underscore = c2 == &b'_' || c2 == &b'-'; + + c1 == c2 || (c1_underscore && c2_underscore) + }) +} diff --git a/crates/rust-analyzer/tests/crate_graph.rs b/crates/rust-analyzer/tests/crate_graph.rs new file mode 100644 index 0000000000000..efd42fadf7e96 --- /dev/null +++ b/crates/rust-analyzer/tests/crate_graph.rs @@ -0,0 +1,118 @@ +use std::path::PathBuf; + +use project_model::{CargoWorkspace, ProjectWorkspace, Sysroot, WorkspaceBuildScripts}; +use rust_analyzer::ws_to_crate_graph; +use rustc_hash::FxHashMap; +use serde::de::DeserializeOwned; +use vfs::{AbsPathBuf, FileId}; + +fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace { + let meta = get_test_json_file(file); + let cargo_workspace = CargoWorkspace::new(meta); + ProjectWorkspace::Cargo { + cargo: cargo_workspace, + build_scripts: WorkspaceBuildScripts::default(), + sysroot: Ok(get_fake_sysroot()), + rustc: Err(None), + rustc_cfg: Vec::new(), + cfg_overrides: Default::default(), + toolchain: None, + target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), + } +} + +fn get_test_json_file(file: &str) -> T { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let file = base.join("tests/test_data").join(file); + let data = std::fs::read_to_string(file).unwrap(); + let mut json = data.parse::().unwrap(); + fixup_paths(&mut json); + return serde_json::from_value(json).unwrap(); + + fn fixup_paths(val: &mut serde_json::Value) { + match val { + serde_json::Value::String(s) => replace_root(s, true), + serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths), + serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths), + serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => { + } + } + } +} + +fn replace_root(s: &mut String, direction: bool) { + if direction { + let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" }; + *s = s.replace("$ROOT$", root) + } else { + let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" }; + *s = s.replace(root, "$ROOT$") + } +} + +fn get_fake_sysroot_path() -> PathBuf { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + base.join("../project-model/test_data/fake-sysroot") +} + +fn get_fake_sysroot() -> Sysroot { + let sysroot_path = get_fake_sysroot_path(); + // there's no `libexec/` directory with a `proc-macro-srv` binary in that + // fake sysroot, so we give them both the same path: + let sysroot_dir = AbsPathBuf::assert(sysroot_path); + let sysroot_src_dir = sysroot_dir.clone(); + Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) +} + +#[test] +fn test_deduplicate_origin_dev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} + +#[test] +fn test_deduplicate_origin_dev_rev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 79ae0c30cfc4c..960f5b531d44f 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -911,20 +911,18 @@ fn root_contains_symlink_out_dirs_check() { #[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn resolve_proc_macro() { use expect_test::expect; + use vfs::AbsPathBuf; if skip_slow_tests() { return; } - // skip using the sysroot config as to prevent us from loading the sysroot sources - let mut rustc = std::process::Command::new(toolchain::rustc()); - rustc.args(["--print", "sysroot"]); - let output = rustc.output().unwrap(); - let sysroot = - vfs::AbsPathBuf::try_from(std::str::from_utf8(&output.stdout).unwrap().trim()).unwrap(); + let sysroot = project_model::Sysroot::discover_no_source( + &AbsPathBuf::assert(std::env::current_dir().unwrap()), + &Default::default(), + ) + .unwrap(); - let standalone_server_name = - format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); - let proc_macro_server_path = sysroot.join("libexec").join(&standalone_server_name); + let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap(); let server = Project::with_fixture( r###" diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index d02cb45b8e35f..392a71702070e 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -101,8 +101,13 @@ impl Project<'_> { }; }); - let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse(self.fixture); + let FixtureWithProjectMeta { + fixture, + mini_core, + proc_macro_names, + toolchain, + target_data_layout: _, + } = FixtureWithProjectMeta::parse(self.fixture); assert!(proc_macro_names.is_empty()); assert!(mini_core.is_none()); assert!(toolchain.is_none()); diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 3e38fc3ebcd7a..78da4487d4c9e 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -9,27 +9,6 @@ use xshell::Shell; #[cfg(not(feature = "in-rust-tree"))] use xshell::cmd; -#[cfg(not(feature = "in-rust-tree"))] -#[test] -fn check_code_formatting() { - let sh = &Shell::new().unwrap(); - sh.change_dir(sourcegen::project_root()); - - let out = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap(); - if !out.contains("stable") { - panic!( - "Failed to run rustfmt from toolchain 'stable'. \ - Please run `rustup component add rustfmt --toolchain stable` to install it.", - ) - } - - let res = cmd!(sh, "rustup run stable cargo fmt -- --check").run(); - if res.is_err() { - let _ = cmd!(sh, "rustup run stable cargo fmt").run(); - } - res.unwrap() -} - #[test] fn check_lsp_extensions_docs() { let sh = &Shell::new().unwrap(); diff --git a/crates/project-model/test_data/deduplication_crate_graph_A.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_A.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json diff --git a/crates/project-model/test_data/deduplication_crate_graph_B.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_B.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json diff --git a/crates/salsa/src/doctest.rs b/crates/salsa/src/doctest.rs deleted file mode 100644 index 29a80663567fe..0000000000000 --- a/crates/salsa/src/doctest.rs +++ /dev/null @@ -1,115 +0,0 @@ -//! -#![allow(dead_code)] - -/// Test that a database with a key/value that is not `Send` will, -/// indeed, not be `Send`. -/// -/// ```compile_fail,E0277 -/// use std::rc::Rc; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Rc; -/// fn no_send_sync_key(&self, key: Rc) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Rc { -/// Rc::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Rc) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// storage: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_send(_: T) { } -/// -/// fn assert_send() { -/// is_send(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_send_db_not_send() {} - -/// Test that a database with a key/value that is not `Sync` will not -/// be `Send`. -/// -/// ```compile_fail,E0277 -/// use std::rc::Rc; -/// use std::cell::Cell; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Cell; -/// fn no_send_sync_key(&self, key: Cell) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { -/// Cell::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// runtime: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_send(_: T) { } -/// -/// fn assert_send() { -/// is_send(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_sync_db_not_send() {} - -/// Test that a database with a key/value that is not `Sync` will -/// not be `Sync`. -/// -/// ```compile_fail,E0277 -/// use std::cell::Cell; -/// use std::rc::Rc; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Cell; -/// fn no_send_sync_key(&self, key: Cell) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { -/// Cell::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// runtime: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_sync(_: T) { } -/// -/// fn assert_send() { -/// is_sync(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_sync_db_not_sync() {} diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 2d58beafb2a0b..668dcfd925d8d 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -11,7 +11,6 @@ //! from previous invocations as appropriate. mod derived; -mod doctest; mod durability; mod hash; mod input; diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index c3010d090c6b9..c3d8e97c436cc 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -367,6 +367,7 @@ Expr = | RecordExpr | RefExpr | ReturnExpr +| BecomeExpr | TryExpr | TupleExpr | WhileExpr @@ -528,6 +529,9 @@ MatchGuard = ReturnExpr = Attr* 'return' Expr? +BecomeExpr = + Attr* 'become' Expr + YieldExpr = Attr* 'yield' Expr? @@ -610,7 +614,7 @@ TypeBoundList = TypeBound = Lifetime -| ('?' | '~' 'const')? Type +| ('~' 'const' | 'const')? 'async'? '?'? Type //************************// // Patterns // diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index bc9c54d0b73ec..41d33c457ce70 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -1007,20 +1007,24 @@ impl ast::IdentPat { } pub trait HasVisibilityEdit: ast::HasVisibility { - fn set_visibility(&self, visibility: ast::Visibility) { - match self.visibility() { - Some(current_visibility) => { - ted::replace(current_visibility.syntax(), visibility.syntax()) - } - None => { - let vis_before = self - .syntax() - .children_with_tokens() - .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) - .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); - - ted::insert(ted::Position::before(vis_before), visibility.syntax()); + fn set_visibility(&self, visibility: Option) { + if let Some(visibility) = visibility { + match self.visibility() { + Some(current_visibility) => { + ted::replace(current_visibility.syntax(), visibility.syntax()) + } + None => { + let vis_before = self + .syntax() + .children_with_tokens() + .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) + .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); + + ted::insert(ted::Position::before(vis_before), visibility.syntax()); + } } + } else if let Some(visibility) = self.visibility() { + ted::remove(visibility.syntax()); } } } diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 6c86e591044a8..75971861aa80e 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1095,6 +1095,16 @@ impl ReturnExpr { pub fn expr(&self) -> Option { support::child(&self.syntax) } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BecomeExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for BecomeExpr {} +impl BecomeExpr { + pub fn become_token(&self) -> Option { support::token(&self.syntax, T![become]) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TryExpr { pub(crate) syntax: SyntaxNode, @@ -1400,9 +1410,10 @@ pub struct TypeBound { } impl TypeBound { pub fn lifetime(&self) -> Option { support::child(&self.syntax) } - pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn tilde_token(&self) -> Option { support::token(&self.syntax, T![~]) } pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn async_token(&self) -> Option { support::token(&self.syntax, T![async]) } + pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn ty(&self) -> Option { support::child(&self.syntax) } } @@ -1633,6 +1644,7 @@ pub enum Expr { RecordExpr(RecordExpr), RefExpr(RefExpr), ReturnExpr(ReturnExpr), + BecomeExpr(BecomeExpr), TryExpr(TryExpr), TupleExpr(TupleExpr), WhileExpr(WhileExpr), @@ -2792,6 +2804,17 @@ impl AstNode for ReturnExpr { } fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for BecomeExpr { + fn can_cast(kind: SyntaxKind) -> bool { kind == BECOME_EXPR } + fn cast(syntax: SyntaxNode) -> Option { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for TryExpr { fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR } fn cast(syntax: SyntaxNode) -> Option { @@ -3540,6 +3563,9 @@ impl From for Expr { impl From for Expr { fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) } } +impl From for Expr { + fn from(node: BecomeExpr) -> Expr { Expr::BecomeExpr(node) } +} impl From for Expr { fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) } } @@ -3593,6 +3619,7 @@ impl AstNode for Expr { | RECORD_EXPR | REF_EXPR | RETURN_EXPR + | BECOME_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR @@ -3632,6 +3659,7 @@ impl AstNode for Expr { RECORD_EXPR => Expr::RecordExpr(RecordExpr { syntax }), REF_EXPR => Expr::RefExpr(RefExpr { syntax }), RETURN_EXPR => Expr::ReturnExpr(ReturnExpr { syntax }), + BECOME_EXPR => Expr::BecomeExpr(BecomeExpr { syntax }), TRY_EXPR => Expr::TryExpr(TryExpr { syntax }), TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }), WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }), @@ -3673,6 +3701,7 @@ impl AstNode for Expr { Expr::RecordExpr(it) => &it.syntax, Expr::RefExpr(it) => &it.syntax, Expr::ReturnExpr(it) => &it.syntax, + Expr::BecomeExpr(it) => &it.syntax, Expr::TryExpr(it) => &it.syntax, Expr::TupleExpr(it) => &it.syntax, Expr::WhileExpr(it) => &it.syntax, @@ -4150,6 +4179,7 @@ impl AstNode for AnyHasAttrs { | RANGE_EXPR | REF_EXPR | RETURN_EXPR + | BECOME_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR @@ -4851,6 +4881,11 @@ impl std::fmt::Display for ReturnExpr { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for BecomeExpr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for TryExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 120d801c8d17e..02246fc3291d3 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -1147,7 +1147,7 @@ pub mod tokens { pub(super) static SOURCE_FILE: Lazy> = Lazy::new(|| { SourceFile::parse( - "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\n", + "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", ) }); diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 6e5e4127f4d43..1bc1ef8434fc7 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -569,6 +569,26 @@ impl fmt::Display for NameOrNameRef { } } +impl ast::AstNode for NameOrNameRef { + fn can_cast(kind: SyntaxKind) -> bool { + matches!(kind, SyntaxKind::NAME | SyntaxKind::NAME_REF) + } + fn cast(syntax: SyntaxNode) -> Option { + let res = match syntax.kind() { + SyntaxKind::NAME => NameOrNameRef::Name(ast::Name { syntax }), + SyntaxKind::NAME_REF => NameOrNameRef::NameRef(ast::NameRef { syntax }), + _ => return None, + }; + Some(res) + } + fn syntax(&self) -> &SyntaxNode { + match self { + NameOrNameRef::NameRef(it) => it.syntax(), + NameOrNameRef::Name(it) => it.syntax(), + } + } +} + impl NameOrNameRef { pub fn text(&self) -> TokenText<'_> { match self { diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs index 9ddf5a0a9804c..9131cd2f17993 100644 --- a/crates/syntax/src/ast/prec.rs +++ b/crates/syntax/src/ast/prec.rs @@ -130,8 +130,8 @@ impl Expr { // ContinueExpr(_) => (0, 0), - ClosureExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | BreakExpr(_) - | OffsetOfExpr(_) | FormatArgsExpr(_) | AsmExpr(_) => (0, 1), + ClosureExpr(_) | ReturnExpr(_) | BecomeExpr(_) | YieldExpr(_) | YeetExpr(_) + | BreakExpr(_) | OffsetOfExpr(_) | FormatArgsExpr(_) | AsmExpr(_) => (0, 1), RangeExpr(_) => (5, 5), @@ -288,6 +288,7 @@ impl Expr { PrefixExpr(e) => e.op_token(), RefExpr(e) => e.amp_token(), ReturnExpr(e) => e.return_token(), + BecomeExpr(e) => e.become_token(), TryExpr(e) => e.question_mark_token(), YieldExpr(e) => e.yield_token(), YeetExpr(e) => e.do_token(), @@ -316,7 +317,8 @@ impl Expr { // For BinExpr and RangeExpr this is technically wrong -- the child can be on the left... BinExpr(_) | RangeExpr(_) | BreakExpr(_) | ContinueExpr(_) | PrefixExpr(_) - | RefExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | LetExpr(_) => self + | RefExpr(_) | ReturnExpr(_) | BecomeExpr(_) | YieldExpr(_) | YeetExpr(_) + | LetExpr(_) => self .syntax() .parent() .and_then(Expr::cast) diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 960889b742112..b755de86d32c5 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -27,11 +27,6 @@ extern crate ra_ap_rustc_lexer as rustc_lexer; #[cfg(feature = "in-rust-tree")] extern crate rustc_lexer; -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - mod parsing; mod ptr; mod syntax_error; diff --git a/crates/syntax/src/tests/ast_src.rs b/crates/syntax/src/tests/ast_src.rs index 341bda892ba1c..8221c577892de 100644 --- a/crates/syntax/src/tests/ast_src.rs +++ b/crates/syntax/src/tests/ast_src.rs @@ -67,8 +67,9 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { keywords: &[ "as", "async", "await", "box", "break", "const", "continue", "crate", "do", "dyn", "else", "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro", - "match", "mod", "move", "mut", "pub", "ref", "return", "self", "Self", "static", "struct", - "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield", + "match", "mod", "move", "mut", "pub", "ref", "return", "become", "self", "Self", "static", + "struct", "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", + "yield", ], contextual_keywords: &[ "auto", @@ -154,6 +155,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { "BLOCK_EXPR", "STMT_LIST", "RETURN_EXPR", + "BECOME_EXPR", "YIELD_EXPR", "YEET_EXPR", "LET_EXPR", diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs index 28e757e81bb2c..e118262b4edd9 100644 --- a/crates/test-fixture/src/lib.rs +++ b/crates/test-fixture/src/lib.rs @@ -1,10 +1,9 @@ //! A set of high-level utility fixture methods to use in tests. -use std::{mem, ops::Not, str::FromStr, sync}; +use std::{iter, mem, ops::Not, str::FromStr, sync}; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, - VfsPath, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, + FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, VfsPath, }; use cfg::CfgOptions; use hir_expand::{ @@ -118,8 +117,14 @@ impl ChangeFixture { ra_fixture: &str, mut proc_macro_defs: Vec<(String, ProcMacro)>, ) -> ChangeFixture { - let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse(ra_fixture); + let FixtureWithProjectMeta { + fixture, + mini_core, + proc_macro_names, + toolchain, + target_data_layout, + } = FixtureWithProjectMeta::parse(ra_fixture); + let target_data_layout = Ok(target_data_layout.into()); let toolchain = Some({ let channel = toolchain.as_deref().unwrap_or("stable"); Version::parse(&format!("1.76.0-{channel}")).unwrap() @@ -131,7 +136,6 @@ impl ChangeFixture { let mut crates = FxHashMap::default(); let mut crate_deps = Vec::new(); let mut default_crate_root: Option = None; - let mut default_target_data_layout: Option = None; let mut default_cfg = CfgOptions::default(); let mut default_env = Env::new_for_test_fixture(); @@ -187,11 +191,6 @@ impl ChangeFixture { meta.env, false, origin, - meta.target_data_layout - .as_deref() - .map(From::from) - .ok_or_else(|| "target_data_layout unset".into()), - toolchain.clone(), ); let prev = crates.insert(crate_name.clone(), crate_id); assert!(prev.is_none(), "multiple crates with same name: {}", crate_name); @@ -205,7 +204,6 @@ impl ChangeFixture { default_crate_root = Some(file_id); default_cfg.extend(meta.cfg.into_iter()); default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned()))); - default_target_data_layout = meta.target_data_layout; } source_change.change_file(file_id, Some(text.into())); @@ -228,10 +226,6 @@ impl ChangeFixture { default_env, false, CrateOrigin::Local { repo: None, name: None }, - default_target_data_layout - .map(|it| it.into()) - .ok_or_else(|| "target_data_layout unset".into()), - toolchain.clone(), ); } else { for (from, to, prelude) in crate_deps { @@ -240,20 +234,11 @@ impl ChangeFixture { crate_graph .add_dep( from_id, - Dependency::with_prelude( - CrateName::new(&to).unwrap(), - to_id, - prelude, - DependencyKind::Normal, - ), + Dependency::with_prelude(CrateName::new(&to).unwrap(), to_id, prelude), ) .unwrap(); } } - let target_layout = crate_graph.iter().next().map_or_else( - || Err("target_data_layout unset".into()), - |it| crate_graph[it].target_layout.clone(), - ); if let Some(mini_core) = mini_core { let core_file = file_id; @@ -277,20 +262,11 @@ impl ChangeFixture { Env::new_for_test_fixture(), false, CrateOrigin::Lang(LangCrateOrigin::Core), - target_layout.clone(), - toolchain.clone(), ); for krate in all_crates { crate_graph - .add_dep( - krate, - Dependency::new( - CrateName::new("core").unwrap(), - core_crate, - DependencyKind::Normal, - ), - ) + .add_dep(krate, Dependency::new(CrateName::new("core").unwrap(), core_crate)) .unwrap(); } } @@ -322,8 +298,6 @@ impl ChangeFixture { Env::new_for_test_fixture(), true, CrateOrigin::Local { repo: None, name: None }, - target_layout, - toolchain, ); proc_macros.insert(proc_macros_crate, Ok(proc_macro)); @@ -331,11 +305,7 @@ impl ChangeFixture { crate_graph .add_dep( krate, - Dependency::new( - CrateName::new("proc_macros").unwrap(), - proc_macros_crate, - DependencyKind::Normal, - ), + Dependency::new(CrateName::new("proc_macros").unwrap(), proc_macros_crate), ) .unwrap(); } @@ -346,17 +316,20 @@ impl ChangeFixture { SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)), }; roots.push(root); - source_change.set_roots(roots); - source_change.set_crate_graph(crate_graph); - - ChangeFixture { - file_position, - files, - change: Change { - source_change, - proc_macros: proc_macros.is_empty().not().then_some(proc_macros), - }, - } + + let mut change = Change { + source_change, + proc_macros: proc_macros.is_empty().not().then_some(proc_macros), + toolchains: Some(iter::repeat(toolchain).take(crate_graph.len()).collect()), + target_data_layouts: Some( + iter::repeat(target_data_layout).take(crate_graph.len()).collect(), + ), + }; + + change.source_change.set_roots(roots); + change.source_change.set_crate_graph(crate_graph); + + ChangeFixture { file_position, files, change } } } @@ -374,6 +347,7 @@ pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream { name: "identity".into(), kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityProcMacroExpander), + disabled: false, }, ), ( @@ -388,6 +362,7 @@ pub fn derive_identity(item: TokenStream) -> TokenStream { name: "DeriveIdentity".into(), kind: ProcMacroKind::CustomDerive, expander: sync::Arc::new(IdentityProcMacroExpander), + disabled: false, }, ), ( @@ -402,6 +377,7 @@ pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream { name: "input_replace".into(), kind: ProcMacroKind::Attr, expander: sync::Arc::new(AttributeInputReplaceProcMacroExpander), + disabled: false, }, ), ( @@ -416,6 +392,7 @@ pub fn mirror(input: TokenStream) -> TokenStream { name: "mirror".into(), kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(MirrorProcMacroExpander), + disabled: false, }, ), ( @@ -430,6 +407,7 @@ pub fn shorten(input: TokenStream) -> TokenStream { name: "shorten".into(), kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(ShortenProcMacroExpander), + disabled: false, }, ), ] @@ -470,7 +448,6 @@ struct FileMeta { edition: Edition, env: Env, introduce_new_source_root: Option, - target_data_layout: Option, } impl FileMeta { @@ -502,7 +479,6 @@ impl FileMeta { edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()), env: f.env.into_iter().collect(), introduce_new_source_root, - target_data_layout: f.target_data_layout, } } } diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs index 595281336d582..7e34c36189950 100644 --- a/crates/test-utils/src/fixture.rs +++ b/crates/test-utils/src/fixture.rs @@ -126,11 +126,6 @@ pub struct Fixture { /// /// Syntax: `library` pub library: bool, - /// Specifies LLVM data layout to be used. - /// - /// You probably don't want to manually specify this. See LLVM manual for the - /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout - pub target_data_layout: Option, /// Actual file contents. All meta comments are stripped. pub text: String, } @@ -145,6 +140,11 @@ pub struct FixtureWithProjectMeta { pub mini_core: Option, pub proc_macro_names: Vec, pub toolchain: Option, + /// Specifies LLVM data layout to be used. + /// + /// You probably don't want to manually specify this. See LLVM manual for the + /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout + pub target_data_layout: String, } impl FixtureWithProjectMeta { @@ -172,6 +172,8 @@ impl FixtureWithProjectMeta { let fixture = trim_indent(ra_fixture); let mut fixture = fixture.as_str(); let mut toolchain = None; + let mut target_data_layout = + "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(); let mut mini_core = None; let mut res: Vec = Vec::new(); let mut proc_macro_names = vec![]; @@ -182,6 +184,12 @@ impl FixtureWithProjectMeta { fixture = remain; } + if let Some(meta) = fixture.strip_prefix("//- target_data_layout:") { + let (meta, remain) = meta.split_once('\n').unwrap(); + target_data_layout = meta.trim().to_owned(); + fixture = remain; + } + if let Some(meta) = fixture.strip_prefix("//- proc_macros:") { let (meta, remain) = meta.split_once('\n').unwrap(); proc_macro_names = meta.split(',').map(|it| it.trim().to_owned()).collect(); @@ -225,7 +233,7 @@ impl FixtureWithProjectMeta { } } - Self { fixture: res, mini_core, proc_macro_names, toolchain } + Self { fixture: res, mini_core, proc_macro_names, toolchain, target_data_layout } } //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo @@ -245,9 +253,6 @@ impl FixtureWithProjectMeta { let mut env = FxHashMap::default(); let mut introduce_new_source_root = None; let mut library = false; - let mut target_data_layout = Some( - "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(), - ); for component in components { if component == "library" { library = true; @@ -284,7 +289,6 @@ impl FixtureWithProjectMeta { } } "new_source_root" => introduce_new_source_root = Some(value.to_owned()), - "target_data_layout" => target_data_layout = Some(value.to_owned()), _ => panic!("bad component: {component:?}"), } } @@ -307,7 +311,6 @@ impl FixtureWithProjectMeta { env, introduce_new_source_root, library, - target_data_layout, } } } @@ -476,16 +479,21 @@ fn parse_fixture_checks_further_indented_metadata() { #[test] fn parse_fixture_gets_full_meta() { - let FixtureWithProjectMeta { fixture: parsed, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse( - r#" + let FixtureWithProjectMeta { + fixture: parsed, + mini_core, + proc_macro_names, + toolchain, + target_data_layout: _, + } = FixtureWithProjectMeta::parse( + r#" //- toolchain: nightly //- proc_macros: identity //- minicore: coerce_unsized //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo mod m; "#, - ); + ); assert_eq!(toolchain, Some("nightly".to_owned())); assert_eq!(proc_macro_names, vec!["identity".to_owned()]); assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_owned()]); diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 23a3a7e0afa49..f125792d12587 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -60,6 +60,8 @@ //! try: infallible //! unpin: sized //! unsize: sized +//! todo: panic +//! unimplemented: panic #![rustc_coherence_is_core] @@ -927,6 +929,10 @@ pub mod fmt { use crate::mem::transmute; unsafe { Argument { formatter: transmute(f), value: transmute(x) } } } + + pub fn new_display<'b, T: Display>(x: &'b T) -> Argument<'_> { + Self::new(x, Display::fmt) + } } #[lang = "format_alignment"] @@ -1438,6 +1444,33 @@ mod macros { // endregion:fmt + // region:todo + #[macro_export] + #[allow_internal_unstable(core_panic)] + macro_rules! todo { + () => { + $crate::panicking::panic("not yet implemented") + }; + ($($arg:tt)+) => { + $crate::panic!("not yet implemented: {}", $crate::format_args!($($arg)+)) + }; + } + // endregion:todo + + // region:unimplemented + #[macro_export] + #[allow_internal_unstable(core_panic)] + macro_rules! unimplemented { + () => { + $crate::panicking::panic("not implemented") + }; + ($($arg:tt)+) => { + $crate::panic!("not implemented: {}", $crate::format_args!($($arg)+)) + }; + } + // endregion:unimplemented + + // region:derive pub(crate) mod builtin { #[rustc_builtin_macro] diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs index 997f339edc4d7..ae71b6700c0b2 100644 --- a/crates/toolchain/src/lib.rs +++ b/crates/toolchain/src/lib.rs @@ -2,7 +2,41 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -use std::{env, iter, path::PathBuf}; +use std::{ + env, iter, + path::{Path, PathBuf}, +}; + +#[derive(Copy, Clone)] +pub enum Tool { + Cargo, + Rustc, + Rustup, + Rustfmt, +} + +impl Tool { + pub fn path(self) -> PathBuf { + get_path_for_executable(self.name()) + } + + pub fn path_in(self, path: &Path) -> Option { + probe_for_binary(path.join(self.name())) + } + + pub fn path_in_or_discover(self, path: &Path) -> PathBuf { + probe_for_binary(path.join(self.name())).unwrap_or_else(|| self.path()) + } + + pub fn name(self) -> &'static str { + match self { + Tool::Cargo => "cargo", + Tool::Rustc => "rustc", + Tool::Rustup => "rustup", + Tool::Rustfmt => "rustfmt", + } + } +} pub fn cargo() -> PathBuf { get_path_for_executable("cargo") @@ -47,7 +81,7 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf { if let Some(mut path) = get_cargo_home() { path.push("bin"); path.push(executable_name); - if let Some(path) = probe(path) { + if let Some(path) = probe_for_binary(path) { return path; } } @@ -57,7 +91,7 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf { fn lookup_in_path(exec: &str) -> bool { let paths = env::var_os("PATH").unwrap_or_default(); - env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe).is_some() + env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe_for_binary).is_some() } fn get_cargo_home() -> Option { @@ -73,7 +107,7 @@ fn get_cargo_home() -> Option { None } -fn probe(path: PathBuf) -> Option { +pub fn probe_for_binary(path: PathBuf) -> Option { let with_extension = match env::consts::EXE_EXTENSION { "" => None, it => Some(path.with_extension(it)), diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 9004bff53a808..eec88f80688c7 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -152,6 +152,7 @@ pub struct Punct { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Spacing { Alone, + /// Whether the following token is joint to the current one. Joint, } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index a86ef709411cc..da7654b0f6447 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -71,7 +71,7 @@ cargo check --quiet --workspace --message-format=json --all-targets ``` . -- -[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `false`):: +[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `true`):: + -- Rerun proc-macros building/build-scripts running when proc-macro @@ -234,6 +234,11 @@ each of them, with the working directory being the workspace root by changing `#rust-analyzer.check.invocationStrategy#` and `#rust-analyzer.check.invocationLocation#`. +If `$saved_file` is part of the command, rust-analyzer will pass +the absolute path of the saved file to the provided command. This is +intended to be used with non-Cargo build systems. +Note that `$saved_file` is experimental and may be removed in the futureg. + An example command would be: ```bash @@ -343,6 +348,11 @@ Default: ---- Custom completion snippets. +-- +[[rust-analyzer.completion.termSearch.enable]]rust-analyzer.completion.termSearch.enable (default: `false`):: ++ +-- +Whether to enable term search based snippets like `Some(foo.bar().baz())`. -- [[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`):: + @@ -793,11 +803,6 @@ Exclude imports from find-all-references. -- Exclude tests from find-all-references. -- -[[rust-analyzer.rename.allowExternalItems]]rust-analyzer.rename.allowExternalItems (default: `false`):: -+ --- -Allow renaming of items not belonging to the loaded workspaces. --- [[rust-analyzer.runnables.command]]rust-analyzer.runnables.command (default: `null`):: + -- diff --git a/editors/code/.vscodeignore b/editors/code/.vscodeignore index 09dc27056b37a..5c48205694fe9 100644 --- a/editors/code/.vscodeignore +++ b/editors/code/.vscodeignore @@ -12,3 +12,6 @@ !ra_syntax_tree.tmGrammar.json !server !README.md +!language-configuration-rustdoc.json +!rustdoc-inject.json +!rustdoc.json diff --git a/editors/code/language-configuration-rustdoc.json b/editors/code/language-configuration-rustdoc.json new file mode 100644 index 0000000000000..c905d3b60674e --- /dev/null +++ b/editors/code/language-configuration-rustdoc.json @@ -0,0 +1,37 @@ +{ + "comments": { + "blockComment": [""] + }, + "brackets": [ + ["{", "}"], + ["[", "]"], + ["(", ")"] + ], + "colorizedBracketPairs": [], + "autoClosingPairs": [ + { "open": "{", "close": "}" }, + { "open": "[", "close": "]" }, + { "open": "(", "close": ")" } + ], + "surroundingPairs": [ + ["(", ")"], + ["[", "]"], + ["`", "`"], + ["_", "_"], + ["*", "*"], + ["{", "}"], + ["'", "'"], + ["\"", "\""] + ], + "folding": { + "offSide": true, + "markers": { + "start": "^\\s*", + "end": "^\\s*" + } + }, + "wordPattern": { + "pattern": "(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})(((\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})|[_])?(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark}))*", + "flags": "ug" + } +} diff --git a/editors/code/package.json b/editors/code/package.json index b474471e5a4b3..3a1df5a2f901c 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -68,7 +68,9 @@ "typescript": "^5.1.6" }, "activationEvents": [ + "workspaceContains:Cargo.toml", "workspaceContains:*/Cargo.toml", + "workspaceContains:rust-project.json", "workspaceContains:*/rust-project.json" ], "main": "./out/main", @@ -588,7 +590,7 @@ }, "rust-analyzer.cargo.buildScripts.rebuildOnSave": { "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.", - "default": false, + "default": true, "type": "boolean" }, "rust-analyzer.cargo.buildScripts.useRustcWrapper": { @@ -775,7 +777,7 @@ ] }, "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the futureg.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", "default": null, "type": [ "null", @@ -902,6 +904,11 @@ }, "type": "object" }, + "rust-analyzer.completion.termSearch.enable": { + "markdownDescription": "Whether to enable term search based snippets like `Some(foo.bar().baz())`.", + "default": false, + "type": "boolean" + }, "rust-analyzer.diagnostics.disabled": { "markdownDescription": "List of rust-analyzer diagnostics to disable.", "default": [], @@ -1520,11 +1527,6 @@ "default": false, "type": "boolean" }, - "rust-analyzer.rename.allowExternalItems": { - "markdownDescription": "Allow renaming of items not belonging to the loaded workspaces.", - "default": false, - "type": "boolean" - }, "rust-analyzer.runnables.command": { "markdownDescription": "Command to be executed instead of 'cargo' for runnables.", "default": null, @@ -1756,6 +1758,13 @@ "rs" ], "configuration": "language-configuration.json" + }, + { + "id": "rustdoc", + "extensions": [ + ".rustdoc" + ], + "configuration": "./language-configuration-rustdoc.json" } ], "grammars": [ @@ -1763,6 +1772,27 @@ "language": "ra_syntax_tree", "scopeName": "source.ra_syntax_tree", "path": "ra_syntax_tree.tmGrammar.json" + }, + { + "language": "rustdoc", + "scopeName": "text.html.markdown.rustdoc", + "path": "rustdoc.json", + "embeddedLanguages": { + "meta.embedded.block.html": "html", + "meta.embedded.block.markdown": "markdown", + "meta.embedded.block.rust": "rust" + } + }, + { + "injectTo": [ + "source.rust" + ], + "scopeName": "comment.markdown-cell-inject.rustdoc", + "path": "rustdoc-inject.json", + "embeddedLanguages": { + "meta.embedded.block.rustdoc": "rustdoc", + "meta.embedded.block.rust": "rust" + } } ], "problemMatchers": [ diff --git a/editors/code/rustdoc-inject.json b/editors/code/rustdoc-inject.json new file mode 100644 index 0000000000000..7a4498fea9d07 --- /dev/null +++ b/editors/code/rustdoc-inject.json @@ -0,0 +1,93 @@ +{ + "injectionSelector": "L:source.rust -string -comment -meta.embedded.block.rustdoc.md", + "patterns": [ + { + "include": "#triple-slash" + }, + { + "include": "#double-slash-exclamation" + }, + { + "include": "#slash-start-exclamation" + }, + { + "include": "#slash-double-start" + } + ], + "repository": { + "triple-slash": { + "begin": "(^|\\G)\\s*(///) ?", + "captures": { + "2": { + "name": "comment.line.double-slash.rust" + } + }, + "name": "comment.quote_code.triple-slash.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(///) ?" + }, + "double-slash-exclamation": { + "begin": "(^|\\G)\\s*(//!) ?", + "captures": { + "2": { + "name": "comment.line.double-slash.rust" + } + }, + "name": "comment.quote_code.double-slash-exclamation.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(//!) ?" + }, + "slash-start-exclamation": { + "begin": "(^)(/\\*!) ?$", + "captures": { + "2": { + "name": "comment.block.rust" + } + }, + "name": "comment.quote_code.slash-start-exclamation.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "end": "( ?)(\\*/)" + }, + "slash-double-start": { + "name": "comment.quote_code.slash-double-start-quote-star.rust", + "begin": "(?:^)\\s*/\\*\\* ?$", + "end": "\\*/", + "patterns": [ + { + "include": "#quote-star" + } + ] + }, + "quote-star": { + "begin": "(^|\\G)\\s*(\\*(?!/)) ?", + "captures": { + "2": { + "name": "comment.punctuation.definition.quote_code.slash-star.MR" + } + }, + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(\\*(?!/)) ?" + } + }, + "scopeName": "comment.markdown-cell-inject.rustdoc" +} diff --git a/editors/code/rustdoc.json b/editors/code/rustdoc.json new file mode 100644 index 0000000000000..cecfae9d753e2 --- /dev/null +++ b/editors/code/rustdoc.json @@ -0,0 +1,82 @@ +{ + "name": "rustdoc", + "patterns": [ + { + "include": "#fenced_code_block" + }, + { + "include": "#markdown" + } + ], + "scopeName": "text.html.markdown.rustdoc", + "repository": { + "markdown": { + "patterns": [ + { + "include": "text.html.markdown" + } + ] + }, + "fenced_code_block": { + "patterns": [ + { + "include": "#fenced_code_block_rust" + }, + { + "include": "#fenced_code_block_unknown" + } + ] + }, + "fenced_code_block_rust": { + "begin": "(^|\\G)(\\s*)(`{3,}|~{3,})\\s*(?i:(rust|not run|not_run)?((\\s+|:|,|\\{|\\?)[^`~]*)?$)", + "name": "markup.fenced_code.block.markdown", + "end": "(^|\\G)(\\2|\\s{0,3})(\\3)\\s*$", + "beginCaptures": { + "3": { + "name": "punctuation.definition.markdown" + }, + "4": { + "name": "fenced_code.block.language.markdown" + }, + "5": { + "name": "fenced_code.block.language.attributes.markdown" + } + }, + "endCaptures": { + "3": { + "name": "punctuation.definition.markdown" + } + }, + "patterns": [ + { + "begin": "(^|\\G)(\\s*)(.*)", + "while": "(^|\\G)(?!\\s*([`~]{3,})\\s*$)", + "contentName": "meta.embedded.block.rust", + "patterns": [ + { + "include": "source.rust" + } + ] + } + ] + }, + "fenced_code_block_unknown": { + "begin": "(^|\\G)(\\s*)(`{3,}|~{3,})\\s*(?=([^`~]+)?$)", + "beginCaptures": { + "3": { + "name": "punctuation.definition.markdown" + }, + "4": { + "name": "fenced_code.block.language" + } + }, + "end": "(^|\\G)(\\2|\\s{0,3})(\\3)\\s*$", + "endCaptures": { + "3": { + "name": "punctuation.definition.markdown" + } + }, + "name": "markup.fenced_code.block.markdown" + } + } +} diff --git a/editors/code/src/rust_project.ts b/editors/code/src/rust_project.ts index bf65ad43ba596..c983874fc009f 100644 --- a/editors/code/src/rust_project.ts +++ b/editors/code/src/rust_project.ts @@ -1,7 +1,26 @@ export interface JsonProject { + /// Path to the sysroot directory. + /// + /// The sysroot is where rustc looks for the + /// crates that are built-in to rust, such as + /// std. + /// + /// https://doc.rust-lang.org/rustc/command-line-arguments.html#--sysroot-override-the-system-root + /// + /// To see the current value of sysroot, you + /// can query rustc: + /// + /// ``` + /// $ rustc --print sysroot + /// /Users/yourname/.rustup/toolchains/stable-x86_64-apple-darwin + /// ``` + sysroot?: string; /// Path to the directory with *source code* of /// sysroot crates. /// + /// By default, this is `lib/rustlib/src/rust/library` + /// relative to the sysroot. + /// /// It should point to the directory where std, /// core, and friends can be found: /// diff --git a/lib/lsp-server/LICENSE-APACHE b/lib/lsp-server/LICENSE-APACHE new file mode 120000 index 0000000000000..1cd601d0a3aff --- /dev/null +++ b/lib/lsp-server/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/lib/lsp-server/LICENSE-MIT b/lib/lsp-server/LICENSE-MIT new file mode 120000 index 0000000000000..b2cfbdc7b0b46 --- /dev/null +++ b/lib/lsp-server/LICENSE-MIT @@ -0,0 +1 @@ +../../LICENSE-MIT \ No newline at end of file diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 9bd3a661c24de..2efafa10a828e 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -117,8 +117,6 @@ impl Metrics { sh, "./target/release/rust-analyzer -q analysis-stats {path} --query-sysroot-metadata" ) - // the sysroot uses `public-dependency`, so we make cargo think it's a nightly - .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") .read()?; for (metric, value, unit) in parse_metrics(&output) { self.report(&format!("analysis-stats/{name}/{metric}"), value, unit.into());