From 17073464ad9746995a733f87cf25b6d6eb3d9f35 Mon Sep 17 00:00:00 2001 From: beepster4096 <19316085+beepster4096@users.noreply.github.com> Date: Fri, 19 Apr 2024 16:50:46 -0700 Subject: [PATCH 01/56] remove optionality from MoveData::base_local --- compiler/rustc_borrowck/src/borrow_set.rs | 4 +--- compiler/rustc_mir_dataflow/src/move_paths/mod.rs | 15 +++++---------- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/compiler/rustc_borrowck/src/borrow_set.rs b/compiler/rustc_borrowck/src/borrow_set.rs index a38dd286be51b..ae6ac23c0f535 100644 --- a/compiler/rustc_borrowck/src/borrow_set.rs +++ b/compiler/rustc_borrowck/src/borrow_set.rs @@ -108,9 +108,7 @@ impl LocalsStateAtExit { has_storage_dead.visit_body(body); let mut has_storage_dead_or_moved = has_storage_dead.0; for move_out in &move_data.moves { - if let Some(index) = move_data.base_local(move_out.path) { - has_storage_dead_or_moved.insert(index); - } + has_storage_dead_or_moved.insert(move_data.base_local(move_out.path)); } LocalsStateAtExit::SomeAreInvalidated { has_storage_dead_or_moved } } diff --git a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs index 22cf3999239c1..830f44df5fb39 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs @@ -358,20 +358,15 @@ impl<'tcx> MoveData<'tcx> { builder::gather_moves(body, tcx, param_env, filter) } - /// For the move path `mpi`, returns the root local variable (if any) that starts the path. - /// (e.g., for a path like `a.b.c` returns `Some(a)`) - pub fn base_local(&self, mut mpi: MovePathIndex) -> Option { + /// For the move path `mpi`, returns the root local variable that starts the path. + /// (e.g., for a path like `a.b.c` returns `a`) + pub fn base_local(&self, mut mpi: MovePathIndex) -> Local { loop { let path = &self.move_paths[mpi]; if let Some(l) = path.place.as_local() { - return Some(l); - } - if let Some(parent) = path.parent { - mpi = parent; - continue; - } else { - return None; + return l; } + mpi = path.parent.expect("root move paths should be locals"); } } From 144bdf5bf1a722fc73f8bd24fa05c5cbb2845701 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Tue, 23 Apr 2024 15:16:21 +0200 Subject: [PATCH 02/56] Remove `master` CI job --- .github/workflows/ci.yml | 31 +++++-------------------------- src/ci/github-actions/ci.yml | 28 +++++++++------------------- 2 files changed, 14 insertions(+), 45 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3651ef2c614ef..765dffaaff5e0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -152,30 +152,6 @@ jobs: AWS_ACCESS_KEY_ID: "${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}" AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}" if: "success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')" - master: - name: master - runs-on: ubuntu-latest - env: - SCCACHE_BUCKET: rust-lang-ci-sccache2 - DEPLOY_BUCKET: rust-lang-ci2 - TOOLSTATE_REPO: "https://github.com/rust-lang-nursery/rust-toolstate" - TOOLSTATE_ISSUES_API_URL: "https://api.github.com/repos/rust-lang/rust/issues" - TOOLSTATE_PUBLISH: 1 - CACHES_AWS_ACCESS_KEY_ID: AKIA46X5W6CZI5DHEBFL - ARTIFACTS_AWS_ACCESS_KEY_ID: AKIA46X5W6CZN24CBO55 - AWS_REGION: us-west-1 - CACHE_DOMAIN: ci-caches.rust-lang.org - if: "github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository == 'rust-lang-ci/rust'" - steps: - - name: checkout the source code - uses: actions/checkout@v4 - with: - fetch-depth: 2 - - name: publish toolstate - run: src/ci/publish_toolstate.sh - shell: bash - env: - TOOLSTATE_REPO_ACCESS_TOKEN: "${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}" try-success: needs: - job @@ -201,9 +177,12 @@ jobs: - job if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" steps: - - name: mark the job as a success - run: exit 0 + - name: publish toolstate + run: src/ci/publish_toolstate.sh shell: bash + if: "success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')" + env: + TOOLSTATE_REPO_ACCESS_TOKEN: "${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}" name: bors build finished runs-on: ubuntu-latest auto-failure: diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index bc4b1b815cf37..081e5ed21e091 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -351,24 +351,6 @@ jobs: # This hack is taken from https://github.com/ferrocene/ferrocene/blob/d43edc6b7697cf1719ec1c17c54904ab94825763/.github/workflows/release.yml#L75-L82 if: fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null - master: - name: master - runs-on: ubuntu-latest - env: - <<: [*prod-variables] - if: github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository == 'rust-lang-ci/rust' - steps: - - name: checkout the source code - uses: actions/checkout@v4 - with: - fetch-depth: 2 - - - name: publish toolstate - run: src/ci/publish_toolstate.sh - shell: bash - env: - TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} - # These jobs don't actually test anything, but they're used to tell bors the # build completed, as there is no practical way to detect when a workflow is # successful listening to webhooks only. @@ -383,7 +365,15 @@ jobs: auto-success: needs: [ job ] if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" - <<: *base-success-job + <<: *base-outcome-job + steps: + - name: publish toolstate + run: src/ci/publish_toolstate.sh + shell: bash + if: success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1') + env: + TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} + auto-failure: needs: [ job ] if: "!success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" From 744dc2aa1d59c09a5cf3de5c4985dd914756219a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Wed, 24 Apr 2024 15:00:42 +0200 Subject: [PATCH 03/56] Inline `base-ci-job` YAML anchor and remove unused anchors --- .github/workflows/ci.yml | 10 +- src/ci/github-actions/ci.yml | 276 ++++++++++++----------------------- 2 files changed, 102 insertions(+), 184 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 765dffaaff5e0..477f521d3b4df 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -51,6 +51,11 @@ jobs: name: "${{ matrix.name }}" needs: - calculate_matrix + runs-on: "${{ matrix.os }}" + defaults: + run: + shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}" + timeout-minutes: 600 env: CI_JOB_NAME: "${{ matrix.image }}" CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse @@ -64,11 +69,6 @@ jobs: matrix: include: "${{ fromJSON(needs.calculate_matrix.outputs.jobs) }}" if: "fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null" - defaults: - run: - shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}" - timeout-minutes: 600 - runs-on: "${{ matrix.os }}" steps: - if: "contains(matrix.os, 'windows')" uses: msys2/setup-msys2@v2.22.0 diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 081e5ed21e091..e279699485149 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -29,91 +29,107 @@ # The expand-yaml-anchors tool will automatically remove this block from the # output YAML file. x--expand-yaml-anchors--remove: - - &shared-ci-variables - CI_JOB_NAME: ${{ matrix.name }} - CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse - # commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs. - HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }} - DOCKER_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - &public-variables - SCCACHE_BUCKET: rust-lang-ci-sccache2 - TOOLSTATE_REPO: https://github.com/rust-lang-nursery/rust-toolstate - CACHE_DOMAIN: ci-caches.rust-lang.org - - - &prod-variables - SCCACHE_BUCKET: rust-lang-ci-sccache2 - DEPLOY_BUCKET: rust-lang-ci2 - TOOLSTATE_REPO: https://github.com/rust-lang-nursery/rust-toolstate - TOOLSTATE_ISSUES_API_URL: https://api.github.com/repos/rust-lang/rust/issues - TOOLSTATE_PUBLISH: 1 - # AWS_SECRET_ACCESS_KEYs are stored in GitHub's secrets storage, named - # AWS_SECRET_ACCESS_KEY_. Including the key id in the name allows to - # rotate them in a single branch while keeping the old key in another - # branch, which wouldn't be possible if the key was named with the kind - # (caches, artifacts...). - CACHES_AWS_ACCESS_KEY_ID: AKIA46X5W6CZI5DHEBFL - ARTIFACTS_AWS_ACCESS_KEY_ID: AKIA46X5W6CZN24CBO55 - AWS_REGION: us-west-1 - CACHE_DOMAIN: ci-caches.rust-lang.org - - - &dummy-variables - SCCACHE_BUCKET: rust-lang-gha-caches - DEPLOY_BUCKET: rust-lang-gha - TOOLSTATE_REPO: https://github.com/pietroalbini/rust-toolstate - TOOLSTATE_ISSUES_API_URL: https://api.github.com/repos/pietroalbini/rust-toolstate/issues - TOOLSTATE_PUBLISH: 1 - # AWS_SECRET_ACCESS_KEYs are stored in GitHub's secrets storage, named - # AWS_SECRET_ACCESS_KEY_. Including the key id in the name allows to - # rotate them in a single branch while keeping the old key in another - # branch, which wouldn't be possible if the key was named with the kind - # (caches, artifacts...). - CACHES_AWS_ACCESS_KEY_ID: AKIA46X5W6CZOMUQATD5 - ARTIFACTS_AWS_ACCESS_KEY_ID: AKIA46X5W6CZH5AYXDVF - AWS_REGION: us-west-1 - CACHE_DOMAIN: ci-caches-gha.rust-lang.org - - - &base-job - env: {} - - - &job-linux-4c - os: ubuntu-20.04-4core-16gb - <<: *base-job - - - &job-linux-8c - os: ubuntu-20.04-8core-32gb - <<: *base-job - - - &job-linux-16c - os: ubuntu-20.04-16core-64gb - <<: *base-job - - - &job-macos-xl - os: macos-13 # We use the standard runner for now - <<: *base-job - - - &job-macos-m1 - os: macos-14 - <<: *base-job - - - &job-windows-8c - os: windows-2019-8core-32gb - <<: *base-job - - - &job-windows-16c - os: windows-2019-16core-64gb - <<: *base-job - - - &job-aarch64-linux - os: [self-hosted, ARM64, linux] - - - &base-ci-job + # These snippets are used by the try-success, try-failure, auto-success and auto-failure jobs. + # Check out their documentation for more information on why they're needed. + + - &base-outcome-job + name: bors build finished + runs-on: ubuntu-latest + + - &base-success-job + steps: + - name: mark the job as a success + run: exit 0 + shell: bash + <<: *base-outcome-job + + - &base-failure-job + steps: + - name: mark the job as a failure + run: exit 1 + shell: bash + <<: *base-outcome-job + +########################### +# Builders definition # +########################### + +name: CI +on: + push: + branches: + - auto + - try + - try-perf + - automation/bors/try + - master + pull_request: + branches: + - "**" + +permissions: + contents: read + packages: write + +defaults: + run: + # On Linux, macOS, and Windows, use the system-provided bash as the default + # shell. (This should only make a difference on Windows, where the default + # shell is PowerShell.) + shell: bash + +concurrency: + # For a given workflow, if we push to the same branch, cancel all previous builds on that branch. + # We add an exception for try builds (try branch) and unrolled rollup builds (try-perf), which + # are all triggered on the same branch, but which should be able to run concurrently. + group: ${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }} + cancel-in-progress: true + +jobs: + # The job matrix for `calculate_matrix` is defined in src/ci/github-actions/jobs.yml. + # It calculates which jobs should be executed, based on the data of the ${{ github }} context. + # If you want to modify CI jobs, take a look at src/ci/github-actions/jobs.yml. + calculate_matrix: + name: Calculate job matrix + runs-on: ubuntu-latest + outputs: + jobs: ${{ steps.jobs.outputs.jobs }} + steps: + - name: Checkout the source code + uses: actions/checkout@v4 + - name: Calculate the CI job matrix + run: python3 src/ci/github-actions/calculate-job-matrix.py >> $GITHUB_OUTPUT + id: jobs + job: + name: ${{ matrix.name }} + needs: [ calculate_matrix ] + runs-on: "${{ matrix.os }}" defaults: run: shell: ${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }} timeout-minutes: 600 - runs-on: "${{ matrix.os }}" - env: *shared-ci-variables + env: + CI_JOB_NAME: ${{ matrix.image }} + CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse + # commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs. + HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }} + DOCKER_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SCCACHE_BUCKET: rust-lang-ci-sccache2 + TOOLSTATE_REPO: https://github.com/rust-lang-nursery/rust-toolstate + CACHE_DOMAIN: ci-caches.rust-lang.org + continue-on-error: ${{ matrix.continue_on_error || false }} + strategy: + matrix: + # Check the `calculate_matrix` job to see how is the matrix defined. + include: ${{ fromJSON(needs.calculate_matrix.outputs.jobs) }} + # GitHub Actions fails the workflow if an empty list of jobs is provided to + # the workflow, so we need to skip this job if nothing was produced by + # the Python script. + # + # Unfortunately checking whether a list is empty is not possible in a nice + # way due to GitHub Actions expressions limits. + # This hack is taken from https://github.com/ferrocene/ferrocene/blob/d43edc6b7697cf1719ec1c17c54904ab94825763/.github/workflows/release.yml#L75-L82 + if: fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null steps: - if: contains(matrix.os, 'windows') uses: msys2/setup-msys2@v2.22.0 @@ -253,104 +269,6 @@ x--expand-yaml-anchors--remove: # erroring about invalid credentials instead. if: success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1') - # These snippets are used by the try-success, try-failure, auto-success and auto-failure jobs. - # Check out their documentation for more information on why they're needed. - - - &base-outcome-job - name: bors build finished - runs-on: ubuntu-latest - - - &base-success-job - steps: - - name: mark the job as a success - run: exit 0 - shell: bash - <<: *base-outcome-job - - - &base-failure-job - steps: - - name: mark the job as a failure - run: exit 1 - shell: bash - <<: *base-outcome-job - -########################### -# Builders definition # -########################### - -name: CI -on: - push: - branches: - - auto - - try - - try-perf - - automation/bors/try - - master - pull_request: - branches: - - "**" - -permissions: - contents: read - packages: write - -defaults: - run: - # On Linux, macOS, and Windows, use the system-provided bash as the default - # shell. (This should only make a difference on Windows, where the default - # shell is PowerShell.) - shell: bash - -concurrency: - # For a given workflow, if we push to the same branch, cancel all previous builds on that branch. - # We add an exception for try builds (try branch) and unrolled rollup builds (try-perf), which - # are all triggered on the same branch, but which should be able to run concurrently. - group: ${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }} - cancel-in-progress: true - -jobs: - # The job matrix for `calculate_matrix` is defined in src/ci/github-actions/jobs.yml. - # It calculates which jobs should be executed, based on the data of the ${{ github }} context. - # If you want to modify CI jobs, take a look at src/ci/github-actions/jobs.yml. - calculate_matrix: - name: Calculate job matrix - runs-on: ubuntu-latest - outputs: - jobs: ${{ steps.jobs.outputs.jobs }} - steps: - - name: Checkout the source code - uses: actions/checkout@v4 - - name: Calculate the CI job matrix - run: python3 src/ci/github-actions/calculate-job-matrix.py >> $GITHUB_OUTPUT - id: jobs - job: - <<: *base-ci-job - name: ${{ matrix.name }} - needs: [ calculate_matrix ] - env: - CI_JOB_NAME: ${{ matrix.image }} - CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse - # commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs. - HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }} - DOCKER_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SCCACHE_BUCKET: rust-lang-ci-sccache2 - TOOLSTATE_REPO: https://github.com/rust-lang-nursery/rust-toolstate - CACHE_DOMAIN: ci-caches.rust-lang.org - continue-on-error: ${{ matrix.continue_on_error || false }} - strategy: - matrix: - # Check the `calculate_matrix` job to see how is the matrix defined. - include: ${{ fromJSON(needs.calculate_matrix.outputs.jobs) }} - # GitHub Actions fails the workflow if an empty list of jobs is provided to - # the workflow, so we need to skip this job if nothing was produced by - # the Python script. - # - # Unfortunately checking whether a list is empty is not possible in a nice - # way due to GitHub Actions expressions limits. - # This hack is taken from https://github.com/ferrocene/ferrocene/blob/d43edc6b7697cf1719ec1c17c54904ab94825763/.github/workflows/release.yml#L75-L82 - if: fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null - # These jobs don't actually test anything, but they're used to tell bors the # build completed, as there is no practical way to detect when a workflow is # successful listening to webhooks only. From 86da0e758faaf46a8025660a815a673ebafa1933 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 25 Apr 2024 10:47:10 +0200 Subject: [PATCH 04/56] Remove useless condition Neither `env.DEPLOY` nor `env.DEPLOY_ALT` should be present in this job. --- .github/workflows/ci.yml | 1 - src/ci/github-actions/ci.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 477f521d3b4df..32bfc7170b9a6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -180,7 +180,6 @@ jobs: - name: publish toolstate run: src/ci/publish_toolstate.sh shell: bash - if: "success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')" env: TOOLSTATE_REPO_ACCESS_TOKEN: "${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}" name: bors build finished diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index e279699485149..0439b053fda17 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -288,7 +288,6 @@ jobs: - name: publish toolstate run: src/ci/publish_toolstate.sh shell: bash - if: success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1') env: TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} From e82c28debc2762055bf2ddecba9627facdb21c74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 25 Apr 2024 11:06:27 +0200 Subject: [PATCH 05/56] Remove `master` push hook --- .github/workflows/ci.yml | 1 - src/ci/github-actions/ci.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 32bfc7170b9a6..231bb971713f2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,6 @@ name: CI - try - try-perf - automation/bors/try - - master pull_request: branches: - "**" diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 0439b053fda17..98b27950fbb0f 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -62,7 +62,6 @@ on: - try - try-perf - automation/bors/try - - master pull_request: branches: - "**" From 411607bec493ac02c6038714561b52008f097a02 Mon Sep 17 00:00:00 2001 From: klensy Date: Thu, 25 Apr 2024 13:17:29 +0300 Subject: [PATCH 06/56] tests: remove some trailing ws --- compiler/rustc_errors/src/emitter.rs | 16 ++++++++++++--- .../huge_multispan_highlight.svg | 20 +++++++++---------- tests/ui/error-emitter/highlighting.svg | 10 +++++----- .../ui/error-emitter/highlighting.windows.svg | 10 +++++----- .../multiline-multipart-suggestion.svg | 10 +++++----- ...multiline-multipart-suggestion.windows.svg | 10 +++++----- tests/ui/imports/issue-59764.stderr | 4 ++-- tests/ui/issues/issue-22644.stderr | 4 ++-- tests/ui/lint/use_suggestion_json.stderr | 2 +- tests/ui/macros/issue-118048.stderr | 2 +- tests/ui/methods/method-call-err-msg.stderr | 2 +- tests/ui/str/str-escape.stderr | 4 ++-- .../struct-rec/issue-17431-2.stderr | 2 +- .../struct-rec/mutual-struct-recursion.stderr | 4 ++-- tests/ui/suggestions/issue-99240-2.stderr | 2 +- tests/ui/typeck/issue-100285.stderr | 2 +- tests/ui/typeck/issue-31173.stderr | 2 +- ...oxed-closure-sugar-lifetime-elision.stderr | 2 +- .../variance-regions-unused-indirect.stderr | 2 +- 19 files changed, 60 insertions(+), 50 deletions(-) diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs index 6ce3fa3535dc9..74852ba36985a 100644 --- a/compiler/rustc_errors/src/emitter.rs +++ b/compiler/rustc_errors/src/emitter.rs @@ -984,7 +984,7 @@ impl HumanEmitter { // 4 | } // | for pos in 0..=line_len { - draw_col_separator(buffer, line_offset + pos + 1, width_offset - 2); + draw_col_separator_no_space(buffer, line_offset + pos + 1, width_offset - 2); } // Write the horizontal lines for multiline annotations @@ -2260,13 +2260,23 @@ impl HumanEmitter { buffer.puts(*row_num, max_line_num_len + 1, "+ ", Style::Addition); } [] => { - draw_col_separator(buffer, *row_num, max_line_num_len + 1); + draw_col_separator_no_space(buffer, *row_num, max_line_num_len + 1); } _ => { buffer.puts(*row_num, max_line_num_len + 1, "~ ", Style::Addition); } } - buffer.append(*row_num, &normalize_whitespace(line_to_add), Style::NoStyle); + // LL | line_to_add + // ++^^^ + // | | + // | magic `3` + // `max_line_num_len` + buffer.puts( + *row_num, + max_line_num_len + 3, + &normalize_whitespace(line_to_add), + Style::NoStyle, + ); } else if let DisplaySuggestion::Add = show_code_change { buffer.puts(*row_num, 0, &self.maybe_anonymized(line_num), Style::LineNumber); buffer.puts(*row_num, max_line_num_len + 1, "+ ", Style::Addition); diff --git a/tests/ui/codemap_tests/huge_multispan_highlight.svg b/tests/ui/codemap_tests/huge_multispan_highlight.svg index f1e96583ff062..7b6dbb17c6f95 100644 --- a/tests/ui/codemap_tests/huge_multispan_highlight.svg +++ b/tests/ui/codemap_tests/huge_multispan_highlight.svg @@ -27,11 +27,11 @@ LL | let _ = match true { - | ---------- `match` arms have incompatible types + | ---------- `match` arms have incompatible types LL | true => ( - | _________________- + | _________________- LL | | // last line shown in multispan header @@ -41,11 +41,11 @@ LL | | ), - | |_________- this is found to be of type `()` + | |_________- this is found to be of type `()` LL | false => " - | __________________^ + | __________________^ ... | @@ -53,7 +53,7 @@ LL | | ", - | |_________^ expected `()`, found `&str` + | |_________^ expected `()`, found `&str` @@ -65,11 +65,11 @@ LL | let _ = match true { - | ---------- `match` arms have incompatible types + | ---------- `match` arms have incompatible types LL | true => ( - | _________________- + | _________________- LL | | @@ -81,11 +81,11 @@ LL | | ), - | |_________- this is found to be of type `{integer}` + | |_________- this is found to be of type `{integer}` LL | false => " - | __________________^ + | __________________^ LL | | @@ -99,7 +99,7 @@ LL | | ", - | |_________^ expected integer, found `&str` + | |_________^ expected integer, found `&str` diff --git a/tests/ui/error-emitter/highlighting.svg b/tests/ui/error-emitter/highlighting.svg index 1d82a97888ae0..be92c00c19bdc 100644 --- a/tests/ui/error-emitter/highlighting.svg +++ b/tests/ui/error-emitter/highlighting.svg @@ -29,11 +29,11 @@ LL | query(wrapped_fn); - | ----- ^^^^^^^^^^ one type is more general than the other + | ----- ^^^^^^^^^^ one type is more general than the other - | | + | | - | arguments to this function are incorrect + | arguments to this function are incorrect | @@ -49,13 +49,13 @@ LL | fn query(_: fn(Box<(dyn Any + Send + '_)>) -> Pin<Box<( - | ____^^^^^_- + | ____^^^^^_- LL | | dyn Future<Output = Result<Box<(dyn Any + 'static)>, String>> + Send + 'static LL | | )>>) {} - | |___- + | |___- diff --git a/tests/ui/error-emitter/highlighting.windows.svg b/tests/ui/error-emitter/highlighting.windows.svg index 88143f725a5bb..152245da9ddc1 100644 --- a/tests/ui/error-emitter/highlighting.windows.svg +++ b/tests/ui/error-emitter/highlighting.windows.svg @@ -30,11 +30,11 @@ LL | query(wrapped_fn); - | ----- ^^^^^^^^^^ one type is more general than the other + | ----- ^^^^^^^^^^ one type is more general than the other - | | + | | - | arguments to this function are incorrect + | arguments to this function are incorrect | @@ -50,13 +50,13 @@ LL | fn query(_: fn(Box<(dyn Any + Send + '_)>) -> Pin<Box<( - | ____^^^^^_- + | ____^^^^^_- LL | | dyn Future<Output = Result<Box<(dyn Any + 'static)>, String>> + Send + 'static LL | | )>>) {} - | |___- + | |___- diff --git a/tests/ui/error-emitter/multiline-multipart-suggestion.svg b/tests/ui/error-emitter/multiline-multipart-suggestion.svg index 26210fade74c8..c0fb98555ad86 100644 --- a/tests/ui/error-emitter/multiline-multipart-suggestion.svg +++ b/tests/ui/error-emitter/multiline-multipart-suggestion.svg @@ -29,7 +29,7 @@ LL | fn short(foo_bar: &Vec<&i32>) -> &i32 { - | ---------- ^ expected named lifetime parameter + | ---------- ^ expected named lifetime parameter | @@ -53,13 +53,13 @@ LL | foo_bar: &Vec<&i32>, - | ---------- + | ---------- LL | something_very_long_so_that_the_line_will_wrap_around__________: i32, LL | ) -> &i32 { - | ^ expected named lifetime parameter + | ^ expected named lifetime parameter | @@ -73,7 +73,7 @@ LL ~ foo_bar: &'a Vec<&'a i32>, - LL | something_very_long_so_that_the_line_will_wrap_around__________: i32, + LL | something_very_long_so_that_the_line_will_wrap_around__________: i32, LL ~ ) -> &'a i32 { @@ -89,7 +89,7 @@ LL | foo_bar: &Vec<&i32>) -> &i32 { - | ---------- ^ expected named lifetime parameter + | ---------- ^ expected named lifetime parameter | diff --git a/tests/ui/error-emitter/multiline-multipart-suggestion.windows.svg b/tests/ui/error-emitter/multiline-multipart-suggestion.windows.svg index 3fa9cc18f0de8..61b544001f08c 100644 --- a/tests/ui/error-emitter/multiline-multipart-suggestion.windows.svg +++ b/tests/ui/error-emitter/multiline-multipart-suggestion.windows.svg @@ -29,7 +29,7 @@ LL | fn short(foo_bar: &Vec<&i32>) -> &i32 { - | ---------- ^ expected named lifetime parameter + | ---------- ^ expected named lifetime parameter | @@ -53,13 +53,13 @@ LL | foo_bar: &Vec<&i32>, - | ---------- + | ---------- LL | something_very_long_so_that_the_line_will_wrap_around__________: i32, LL | ) -> &i32 { - | ^ expected named lifetime parameter + | ^ expected named lifetime parameter | @@ -73,7 +73,7 @@ LL ~ foo_bar: &'a Vec<&'a i32>, - LL | something_very_long_so_that_the_line_will_wrap_around__________: i32, + LL | something_very_long_so_that_the_line_will_wrap_around__________: i32, LL ~ ) -> &'a i32 { @@ -89,7 +89,7 @@ LL | foo_bar: &Vec<&i32>) -> &i32 { - | ---------- ^ expected named lifetime parameter + | ---------- ^ expected named lifetime parameter | diff --git a/tests/ui/imports/issue-59764.stderr b/tests/ui/imports/issue-59764.stderr index fe58eb97b8dbf..293c2a60d8033 100644 --- a/tests/ui/imports/issue-59764.stderr +++ b/tests/ui/imports/issue-59764.stderr @@ -208,9 +208,9 @@ LL | makro as foobar} help: a macro with this name exists at the root of the crate | LL ~ issue_59764::{makro as foobar, -LL | +LL | ... -LL | +LL | LL ~ foo::{baz} | diff --git a/tests/ui/issues/issue-22644.stderr b/tests/ui/issues/issue-22644.stderr index 0799e9ef11b14..7d8a0ff170a59 100644 --- a/tests/ui/issues/issue-22644.stderr +++ b/tests/ui/issues/issue-22644.stderr @@ -63,9 +63,9 @@ LL | 5); help: try comparing the cast value | LL ~ println!("{}", (a -LL | +LL | ... -LL | +LL | LL ~ usize) | diff --git a/tests/ui/lint/use_suggestion_json.stderr b/tests/ui/lint/use_suggestion_json.stderr index 16fb1682d4a69..acc36550642d6 100644 --- a/tests/ui/lint/use_suggestion_json.stderr +++ b/tests/ui/lint/use_suggestion_json.stderr @@ -384,7 +384,7 @@ mod foo { \u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m--> \u001b[0m\u001b[0m$DIR/use_suggestion_json.rs:12:12\u001b[0m \u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m \u001b[0m\u001b[1m\u001b[38;5;12mLL\u001b[0m\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m\u001b[0m \u001b[0m\u001b[0m let x: Iter;\u001b[0m -\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m| \u001b[0m\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;9m^^^^\u001b[0m\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;9mnot found in this scope\u001b[0m +\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;9m^^^^\u001b[0m\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;9mnot found in this scope\u001b[0m \u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m \u001b[0m\u001b[1m\u001b[38;5;14mhelp\u001b[0m\u001b[0m: consider importing one of these items\u001b[0m \u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m diff --git a/tests/ui/macros/issue-118048.stderr b/tests/ui/macros/issue-118048.stderr index 6acf78f63b2bd..4dc5ef71fec69 100644 --- a/tests/ui/macros/issue-118048.stderr +++ b/tests/ui/macros/issue-118048.stderr @@ -12,7 +12,7 @@ help: use type parameters instead LL ~ fn foo(_: $ty, _: $ty) {} LL | } LL | } -LL | +LL | LL ~ foo!(T); | diff --git a/tests/ui/methods/method-call-err-msg.stderr b/tests/ui/methods/method-call-err-msg.stderr index 7d9b38fb29b5f..5a76449e9f95b 100644 --- a/tests/ui/methods/method-call-err-msg.stderr +++ b/tests/ui/methods/method-call-err-msg.stderr @@ -55,7 +55,7 @@ LL | / y.zero() LL | | .take() | | -^^^^ `Foo` is not an iterator | |______| - | + | | = note: the following trait bounds were not satisfied: `Foo: Iterator` diff --git a/tests/ui/str/str-escape.stderr b/tests/ui/str/str-escape.stderr index 00fe5444e1a4e..c4aee2a110a5b 100644 --- a/tests/ui/str/str-escape.stderr +++ b/tests/ui/str/str-escape.stderr @@ -15,7 +15,7 @@ LL | let s = c"foo\ LL | |   bar | | ^ whitespace symbol '\u{a0}' is not skipped | |___| - | + | warning: whitespace symbol '\u{c}' is not skipped --> $DIR/str-escape.rs:26:16 @@ -25,7 +25,7 @@ LL | let s = b"a\ LL | | b"; | | ^- whitespace symbol '\u{c}' is not skipped | |____| - | + | warning: 3 warnings emitted diff --git a/tests/ui/structs-enums/struct-rec/issue-17431-2.stderr b/tests/ui/structs-enums/struct-rec/issue-17431-2.stderr index cdf51632acd0e..e818409366d6a 100644 --- a/tests/ui/structs-enums/struct-rec/issue-17431-2.stderr +++ b/tests/ui/structs-enums/struct-rec/issue-17431-2.stderr @@ -11,7 +11,7 @@ help: insert some indirection (e.g., a `Box`, `Rc`, or `&`) to break the cycle | LL ~ struct Baz { q: Option> } LL | -LL | +LL | LL ~ struct Foo { q: Option> } | diff --git a/tests/ui/structs-enums/struct-rec/mutual-struct-recursion.stderr b/tests/ui/structs-enums/struct-rec/mutual-struct-recursion.stderr index 881bc2819369b..afe523939acd2 100644 --- a/tests/ui/structs-enums/struct-rec/mutual-struct-recursion.stderr +++ b/tests/ui/structs-enums/struct-rec/mutual-struct-recursion.stderr @@ -16,7 +16,7 @@ help: insert some indirection (e.g., a `Box`, `Rc`, or `&`) to break the cycle | LL ~ y: Box>, LL | } -LL | +LL | LL | struct B { LL ~ z: Box> | @@ -39,7 +39,7 @@ help: insert some indirection (e.g., a `Box`, `Rc`, or `&`) to break the cycle | LL ~ y: Option>>>, LL | } -LL | +LL | LL | struct D { LL ~ z: Option>>>, | diff --git a/tests/ui/suggestions/issue-99240-2.stderr b/tests/ui/suggestions/issue-99240-2.stderr index 00bffee652938..5b86bc9880f6a 100644 --- a/tests/ui/suggestions/issue-99240-2.stderr +++ b/tests/ui/suggestions/issue-99240-2.stderr @@ -8,7 +8,7 @@ LL | // Alias:: LL | || Unit(); | ||________^_- call expression requires function | |________| - | + | | help: `Alias::Unit` is a unit enum variant, and does not take parentheses to be constructed | diff --git a/tests/ui/typeck/issue-100285.stderr b/tests/ui/typeck/issue-100285.stderr index 9c8685a77127b..c0deb63af59ac 100644 --- a/tests/ui/typeck/issue-100285.stderr +++ b/tests/ui/typeck/issue-100285.stderr @@ -49,7 +49,7 @@ LL ~ return Some(4); LL | } else { LL ~ return Some(5); LL | } -LL | +LL | LL ~ } LL ~ None | diff --git a/tests/ui/typeck/issue-31173.stderr b/tests/ui/typeck/issue-31173.stderr index 0983147a5f0c5..9598bc61352fd 100644 --- a/tests/ui/typeck/issue-31173.stderr +++ b/tests/ui/typeck/issue-31173.stderr @@ -34,7 +34,7 @@ LL | | .cloned() LL | | .collect(); | | -^^^^^^^ method cannot be called due to unsatisfied trait bounds | |_________| - | + | | = note: the following trait bounds were not satisfied: `, {closure@$DIR/issue-31173.rs:7:21: 7:25}> as Iterator>::Item = &_` diff --git a/tests/ui/unboxed-closures/unboxed-closure-sugar-lifetime-elision.stderr b/tests/ui/unboxed-closures/unboxed-closure-sugar-lifetime-elision.stderr index b7e9e1baa7bb4..c0fce5c2aaadc 100644 --- a/tests/ui/unboxed-closures/unboxed-closure-sugar-lifetime-elision.stderr +++ b/tests/ui/unboxed-closures/unboxed-closure-sugar-lifetime-elision.stderr @@ -15,7 +15,7 @@ help: consider introducing a named lifetime parameter LL ~ fn main<'a>() { LL | eq::< dyn for<'a> Foo<(&'a isize,), Output=&'a isize>, ... -LL | +LL | LL ~ let _: dyn Foo(&'a isize, &'a usize) -> &'a usize; | diff --git a/tests/ui/variance/variance-regions-unused-indirect.stderr b/tests/ui/variance/variance-regions-unused-indirect.stderr index ec4d480baab78..8cdbb3c0f5ebd 100644 --- a/tests/ui/variance/variance-regions-unused-indirect.stderr +++ b/tests/ui/variance/variance-regions-unused-indirect.stderr @@ -16,7 +16,7 @@ help: insert some indirection (e.g., a `Box`, `Rc`, or `&`) to break the cycle | LL ~ Foo1(Box>) LL | } -LL | +LL | LL | enum Bar<'a> { LL ~ Bar1(Box>) | From ce48c60c99c33f6df9d2fe6c9e943da4be68f370 Mon Sep 17 00:00:00 2001 From: klensy Date: Fri, 26 Apr 2024 13:47:14 +0300 Subject: [PATCH 07/56] clippy: bless tests --- src/tools/clippy/tests/ui/from_over_into.stderr | 2 +- src/tools/clippy/tests/ui/let_and_return.stderr | 2 +- src/tools/clippy/tests/ui/manual_strip.stderr | 4 ++-- src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr | 2 +- src/tools/clippy/tests/ui/suspicious_doc_comments.stderr | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/tools/clippy/tests/ui/from_over_into.stderr b/src/tools/clippy/tests/ui/from_over_into.stderr index 0649a6cb0f3ec..f913ae0bb5067 100644 --- a/src/tools/clippy/tests/ui/from_over_into.stderr +++ b/src/tools/clippy/tests/ui/from_over_into.stderr @@ -54,7 +54,7 @@ help: replace the `Into` implementation with `From` LL ~ impl core::convert::From for bool { LL ~ fn from(mut val: crate::ExplicitPaths) -> Self { LL ~ let in_closure = || val.0; -LL | +LL | LL ~ val.0 = false; LL ~ val.0 | diff --git a/src/tools/clippy/tests/ui/let_and_return.stderr b/src/tools/clippy/tests/ui/let_and_return.stderr index f614a5739a860..ff5962ec196e6 100644 --- a/src/tools/clippy/tests/ui/let_and_return.stderr +++ b/src/tools/clippy/tests/ui/let_and_return.stderr @@ -71,7 +71,7 @@ LL | result help: return the expression directly | LL ~ -LL | +LL | LL ~ (match self { LL + E::A(x) => x, LL + E::B(x) => x, diff --git a/src/tools/clippy/tests/ui/manual_strip.stderr b/src/tools/clippy/tests/ui/manual_strip.stderr index d2d4f765310b9..a70c988a0549e 100644 --- a/src/tools/clippy/tests/ui/manual_strip.stderr +++ b/src/tools/clippy/tests/ui/manual_strip.stderr @@ -17,7 +17,7 @@ LL ~ if let Some() = s.strip_prefix("ab") { LL ~ str::to_string(); LL | LL ~ .to_string(); -LL | +LL | LL ~ str::to_string(); LL ~ .to_string(); | @@ -39,7 +39,7 @@ LL ~ if let Some() = s.strip_suffix("bc") { LL ~ str::to_string(); LL | LL ~ .to_string(); -LL | +LL | LL ~ str::to_string(); LL ~ .to_string(); | diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr index fa2ac7a1b37e8..d69c86c70e291 100644 --- a/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr +++ b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr @@ -27,7 +27,7 @@ LL | || do_nothing(value) LL | || }); | ||______^- help: try: `if let Ok(value) = x.field { ... }` | |______| - | + | error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> tests/ui/result_map_unit_fn_unfixable.rs:37:5 diff --git a/src/tools/clippy/tests/ui/suspicious_doc_comments.stderr b/src/tools/clippy/tests/ui/suspicious_doc_comments.stderr index b54309b44d5d8..f12053b1595a1 100644 --- a/src/tools/clippy/tests/ui/suspicious_doc_comments.stderr +++ b/src/tools/clippy/tests/ui/suspicious_doc_comments.stderr @@ -85,7 +85,7 @@ LL | | ///! b help: use an inner doc comment to document the parent module or crate | LL ~ //! a -LL | +LL | LL ~ //! b | From 163ea3328b771a7092c1345de6a1122158651655 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Sat, 27 Apr 2024 11:36:42 +0200 Subject: [PATCH 08/56] Move `TOOLSTATE_REPO` to top-level env --- .github/workflows/ci.yml | 3 ++- src/ci/github-actions/ci.yml | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 231bb971713f2..3a607defdb2ca 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -34,6 +34,8 @@ defaults: concurrency: group: "${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }}" cancel-in-progress: true +env: + TOOLSTATE_REPO: "https://github.com/rust-lang-nursery/rust-toolstate" jobs: calculate_matrix: name: Calculate job matrix @@ -61,7 +63,6 @@ jobs: HEAD_SHA: "${{ github.event.pull_request.head.sha || github.sha }}" DOCKER_TOKEN: "${{ secrets.GITHUB_TOKEN }}" SCCACHE_BUCKET: rust-lang-ci-sccache2 - TOOLSTATE_REPO: "https://github.com/rust-lang-nursery/rust-toolstate" CACHE_DOMAIN: ci-caches.rust-lang.org continue-on-error: "${{ matrix.continue_on_error || false }}" strategy: diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 98b27950fbb0f..a96170089f0c6 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -84,6 +84,9 @@ concurrency: group: ${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }} cancel-in-progress: true +env: + TOOLSTATE_REPO: https://github.com/rust-lang-nursery/rust-toolstate + jobs: # The job matrix for `calculate_matrix` is defined in src/ci/github-actions/jobs.yml. # It calculates which jobs should be executed, based on the data of the ${{ github }} context. @@ -114,7 +117,6 @@ jobs: HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }} DOCKER_TOKEN: ${{ secrets.GITHUB_TOKEN }} SCCACHE_BUCKET: rust-lang-ci-sccache2 - TOOLSTATE_REPO: https://github.com/rust-lang-nursery/rust-toolstate CACHE_DOMAIN: ci-caches.rust-lang.org continue-on-error: ${{ matrix.continue_on_error || false }} strategy: From a700897d5a46c03f9cac060b6854de44ac33733b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Sat, 27 Apr 2024 15:48:35 +0200 Subject: [PATCH 09/56] Add missing checkout step --- .github/workflows/ci.yml | 4 ++++ src/ci/github-actions/ci.yml | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3a607defdb2ca..d691b72488b2a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -177,6 +177,10 @@ jobs: - job if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" steps: + - name: checkout the source code + uses: actions/checkout@v4 + with: + fetch-depth: 2 - name: publish toolstate run: src/ci/publish_toolstate.sh shell: bash diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index a96170089f0c6..95d2225a6b1c6 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -286,6 +286,10 @@ jobs: if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" <<: *base-outcome-job steps: + - name: checkout the source code + uses: actions/checkout@v4 + with: + fetch-depth: 2 - name: publish toolstate run: src/ci/publish_toolstate.sh shell: bash From 6faedd381b045079245c66970e66a5aa79af3475 Mon Sep 17 00:00:00 2001 From: yukang Date: Sat, 27 Apr 2024 16:21:10 +0800 Subject: [PATCH 10/56] Fix the assertion crash from rustdoc document indent widths --- compiler/rustc_resolve/src/rustdoc.rs | 25 ++++++++++++++----------- tests/rustdoc/resolve-ice-124363.rs | 7 +++++++ 2 files changed, 21 insertions(+), 11 deletions(-) create mode 100644 tests/rustdoc/resolve-ice-124363.rs diff --git a/compiler/rustc_resolve/src/rustdoc.rs b/compiler/rustc_resolve/src/rustdoc.rs index 0bc7579918ccf..aace0f3fef9f1 100644 --- a/compiler/rustc_resolve/src/rustdoc.rs +++ b/compiler/rustc_resolve/src/rustdoc.rs @@ -6,8 +6,8 @@ use rustc_middle::ty::TyCtxt; use rustc_span::def_id::DefId; use rustc_span::symbol::{kw, sym, Symbol}; use rustc_span::{InnerSpan, Span, DUMMY_SP}; +use std::mem; use std::ops::Range; -use std::{cmp, mem}; #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum DocFragmentKind { @@ -129,17 +129,20 @@ pub fn unindent_doc_fragments(docs: &mut [DocFragment]) { let Some(min_indent) = docs .iter() .map(|fragment| { - fragment.doc.as_str().lines().fold(usize::MAX, |min_indent, line| { - if line.chars().all(|c| c.is_whitespace()) { - min_indent - } else { + fragment + .doc + .as_str() + .lines() + .filter(|line| line.chars().any(|c| !c.is_whitespace())) + .map(|line| { // Compare against either space or tab, ignoring whether they are // mixed or not. let whitespace = line.chars().take_while(|c| *c == ' ' || *c == '\t').count(); - cmp::min(min_indent, whitespace) - + if fragment.kind == DocFragmentKind::SugaredDoc { 0 } else { add } - } - }) + whitespace + + (if fragment.kind == DocFragmentKind::SugaredDoc { 0 } else { add }) + }) + .min() + .unwrap_or(usize::MAX) }) .min() else { @@ -151,13 +154,13 @@ pub fn unindent_doc_fragments(docs: &mut [DocFragment]) { continue; } - let min_indent = if fragment.kind != DocFragmentKind::SugaredDoc && min_indent > 0 { + let indent = if fragment.kind != DocFragmentKind::SugaredDoc && min_indent > 0 { min_indent - add } else { min_indent }; - fragment.indent = min_indent; + fragment.indent = indent; } } diff --git a/tests/rustdoc/resolve-ice-124363.rs b/tests/rustdoc/resolve-ice-124363.rs new file mode 100644 index 0000000000000..111916cc59050 --- /dev/null +++ b/tests/rustdoc/resolve-ice-124363.rs @@ -0,0 +1,7 @@ +/** +*/ +pub mod A { + #![doc = "{ + Foo { }, + }"] +} From a0a84429a5f833381e376c216e54942838106f51 Mon Sep 17 00:00:00 2001 From: George Bateman Date: Sun, 28 Apr 2024 14:13:08 +0100 Subject: [PATCH 11/56] Remove direct dependencies on lazy_static, once_cell and byteorder The functionality of all three crates is now available in the standard library. --- Cargo.lock | 8 ------- src/librustdoc/Cargo.toml | 2 -- src/librustdoc/html/markdown.rs | 6 ++--- .../html/render/search_index/encode.rs | 23 +++++++++---------- src/tools/jsondocck/Cargo.toml | 1 - src/tools/jsondocck/src/main.rs | 9 ++++---- src/tools/linkchecker/Cargo.toml | 1 - src/tools/linkchecker/main.rs | 12 ++++++---- src/tools/miri/Cargo.toml | 1 - src/tools/miri/tests/ui.rs | 22 ++++++++++-------- src/tools/rustfmt/Cargo.toml | 1 - src/tools/rustfmt/src/comment.rs | 20 +++++----------- src/tools/rustfmt/src/lib.rs | 10 +++++--- .../rustfmt/src/test/configuration_snippet.rs | 22 ++++++------------ src/tools/suggest-tests/Cargo.toml | 1 - src/tools/suggest-tests/src/lib.rs | 4 ++-- .../suggest-tests/src/static_suggestions.rs | 8 +++++-- src/tools/tidy/Cargo.toml | 1 - src/tools/tidy/src/features.rs | 19 ++++----------- src/tools/tidy/src/fluent_alphabetical.rs | 8 +++---- src/tools/tidy/src/fluent_used.rs | 7 +----- src/tools/tidy/src/lib.rs | 7 ++++++ src/tools/tidy/src/style.rs | 18 +++++---------- src/tools/tidy/src/ui_tests.rs | 10 ++------ 24 files changed, 91 insertions(+), 130 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ac6e7974a127e..233f4af1e24bc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2113,7 +2113,6 @@ dependencies = [ "fs-err", "getopts", "jsonpath_lib", - "once_cell", "regex", "serde_json", "shlex", @@ -2232,7 +2231,6 @@ name = "linkchecker" version = "0.1.0" dependencies = [ "html5ever", - "once_cell", "regex", ] @@ -2491,7 +2489,6 @@ dependencies = [ "directories", "getrandom", "jemalloc-sys", - "lazy_static", "libc", "libffi", "libloading", @@ -4791,12 +4788,10 @@ dependencies = [ "arrayvec", "askama", "base64", - "byteorder", "expect-test", "indexmap", "itertools 0.12.1", "minifier", - "once_cell", "regex", "rustdoc-json-types", "serde", @@ -4889,7 +4884,6 @@ dependencies = [ "getopts", "ignore", "itertools 0.11.0", - "lazy_static", "regex", "rustfmt-config_proc_macro", "serde", @@ -5351,7 +5345,6 @@ version = "0.1.0" dependencies = [ "build_helper", "glob", - "once_cell", ] [[package]] @@ -5596,7 +5589,6 @@ version = "0.1.0" dependencies = [ "cargo_metadata 0.15.4", "ignore", - "lazy_static", "miropt-test-tools", "regex", "rustc-hash", diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml index 9a23811ed3f97..31222f213d800 100644 --- a/src/librustdoc/Cargo.toml +++ b/src/librustdoc/Cargo.toml @@ -10,11 +10,9 @@ path = "lib.rs" arrayvec = { version = "0.7", default-features = false } askama = { version = "0.12", default-features = false, features = ["config"] } base64 = "0.21.7" -byteorder = "1.5" itertools = "0.12" indexmap = "2" minifier = "0.3.0" -once_cell = "1.10.0" regex = "1" rustdoc-json-types = { path = "../rustdoc-json-types" } serde_json = "1.0" diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 64f0e096cd02e..5c5651f3ef0e6 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -35,13 +35,13 @@ use rustc_resolve::rustdoc::may_be_doc_link; use rustc_span::edition::Edition; use rustc_span::{Span, Symbol}; -use once_cell::sync::Lazy; use std::borrow::Cow; use std::collections::VecDeque; use std::fmt::Write; use std::iter::Peekable; use std::ops::{ControlFlow, Range}; use std::str::{self, CharIndices}; +use std::sync::OnceLock; use crate::clean::RenderedLink; use crate::doctest; @@ -1994,7 +1994,7 @@ pub struct IdMap { } // The map is pre-initialized and cloned each time to avoid reinitializing it repeatedly. -static DEFAULT_ID_MAP: Lazy, usize>> = Lazy::new(|| init_id_map()); +static DEFAULT_ID_MAP: OnceLock, usize>> = OnceLock::new(); fn init_id_map() -> FxHashMap, usize> { let mut map = FxHashMap::default(); @@ -2051,7 +2051,7 @@ fn init_id_map() -> FxHashMap, usize> { impl IdMap { pub fn new() -> Self { - IdMap { map: DEFAULT_ID_MAP.clone() } + IdMap { map: DEFAULT_ID_MAP.get_or_init(init_id_map).clone() } } pub(crate) fn derive + ToString>(&mut self, candidate: S) -> String { diff --git a/src/librustdoc/html/render/search_index/encode.rs b/src/librustdoc/html/render/search_index/encode.rs index 54407c614c4c7..8d715814faad7 100644 --- a/src/librustdoc/html/render/search_index/encode.rs +++ b/src/librustdoc/html/render/search_index/encode.rs @@ -166,13 +166,12 @@ pub(crate) fn write_bitmap_to_bytes( containers.push(container); } // https://github.com/RoaringBitmap/RoaringFormatSpec - use byteorder::{WriteBytesExt, LE}; const SERIAL_COOKIE_NO_RUNCONTAINER: u32 = 12346; const SERIAL_COOKIE: u32 = 12347; const NO_OFFSET_THRESHOLD: u32 = 4; let size: u32 = containers.len().try_into().unwrap(); let start_offset = if has_run { - out.write_u32::(SERIAL_COOKIE | ((size - 1) << 16))?; + out.write_all(&u32::to_le_bytes(SERIAL_COOKIE | ((size - 1) << 16)))?; for set in containers.chunks(8) { let mut b = 0; for (i, container) in set.iter().enumerate() { @@ -180,7 +179,7 @@ pub(crate) fn write_bitmap_to_bytes( b |= 1 << i; } } - out.write_u8(b)?; + out.write_all(&[b])?; } if size < NO_OFFSET_THRESHOLD { 4 + 4 * size + ((size + 7) / 8) @@ -188,21 +187,21 @@ pub(crate) fn write_bitmap_to_bytes( 4 + 8 * size + ((size + 7) / 8) } } else { - out.write_u32::(SERIAL_COOKIE_NO_RUNCONTAINER)?; - out.write_u32::(containers.len().try_into().unwrap())?; + out.write_all(&u32::to_le_bytes(SERIAL_COOKIE_NO_RUNCONTAINER))?; + out.write_all(&u32::to_le_bytes(containers.len().try_into().unwrap()))?; 4 + 4 + 4 * size + 4 * size }; for (&key, container) in keys.iter().zip(&containers) { // descriptive header let key: u32 = key.into(); let count: u32 = container.popcount() - 1; - out.write_u32::((count << 16) | key)?; + out.write_all(&u32::to_le_bytes((count << 16) | key))?; } if !has_run || size >= NO_OFFSET_THRESHOLD { // offset header let mut starting_offset = start_offset; for container in &containers { - out.write_u32::(starting_offset)?; + out.write_all(&u32::to_le_bytes(starting_offset))?; starting_offset += match container { Container::Bits(_) => 8192u32, Container::Array(array) => u32::try_from(array.len()).unwrap() * 2, @@ -214,19 +213,19 @@ pub(crate) fn write_bitmap_to_bytes( match container { Container::Bits(bits) => { for chunk in bits.iter() { - out.write_u64::(*chunk)?; + out.write_all(&u64::to_le_bytes(*chunk))?; } } Container::Array(array) => { for value in array.iter() { - out.write_u16::(*value)?; + out.write_all(&u16::to_le_bytes(*value))?; } } Container::Run(runs) => { - out.write_u16::((runs.len()).try_into().unwrap())?; + out.write_all(&u16::to_le_bytes(runs.len().try_into().unwrap()))?; for (start, lenm1) in runs.iter().copied() { - out.write_u16::(start)?; - out.write_u16::(lenm1)?; + out.write_all(&u16::to_le_bytes(start))?; + out.write_all(&u16::to_le_bytes(lenm1))?; } } } diff --git a/src/tools/jsondocck/Cargo.toml b/src/tools/jsondocck/Cargo.toml index 6326a9b1e79c3..e1eb6d0566513 100644 --- a/src/tools/jsondocck/Cargo.toml +++ b/src/tools/jsondocck/Cargo.toml @@ -10,4 +10,3 @@ regex = "1.4" shlex = "1.0" serde_json = "1.0" fs-err = "2.5.0" -once_cell = "1.0" diff --git a/src/tools/jsondocck/src/main.rs b/src/tools/jsondocck/src/main.rs index e3d05ec83159d..688b403bf0e0a 100644 --- a/src/tools/jsondocck/src/main.rs +++ b/src/tools/jsondocck/src/main.rs @@ -1,8 +1,8 @@ use jsonpath_lib::select; -use once_cell::sync::Lazy; use regex::{Regex, RegexBuilder}; use serde_json::Value; use std::borrow::Cow; +use std::sync::OnceLock; use std::{env, fmt, fs}; mod cache; @@ -95,7 +95,8 @@ impl fmt::Display for CommandKind { } } -static LINE_PATTERN: Lazy = Lazy::new(|| { +static LINE_PATTERN: OnceLock = OnceLock::new(); +fn line_pattern() -> Regex { RegexBuilder::new( r#" \s(?P!?)@(?P!?) @@ -107,7 +108,7 @@ static LINE_PATTERN: Lazy = Lazy::new(|| { .unicode(true) .build() .unwrap() -}); +} fn print_err(msg: &str, lineno: usize) { eprintln!("Invalid command: {} on line {}", msg, lineno) @@ -123,7 +124,7 @@ fn get_commands(template: &str) -> Result, ()> { for (lineno, line) in file.split('\n').enumerate() { let lineno = lineno + 1; - let cap = match LINE_PATTERN.captures(line) { + let cap = match LINE_PATTERN.get_or_init(line_pattern).captures(line) { Some(c) => c, None => continue, }; diff --git a/src/tools/linkchecker/Cargo.toml b/src/tools/linkchecker/Cargo.toml index 318a69ab8359e..05049aabc7d9c 100644 --- a/src/tools/linkchecker/Cargo.toml +++ b/src/tools/linkchecker/Cargo.toml @@ -9,5 +9,4 @@ path = "main.rs" [dependencies] regex = "1" -once_cell = "1" html5ever = "0.26.0" diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index f49c6e79f13c5..32f935de73025 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -18,8 +18,6 @@ use html5ever::tendril::ByteTendril; use html5ever::tokenizer::{ BufferQueue, TagToken, Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts, }; -use once_cell::sync::Lazy; -use regex::Regex; use std::cell::RefCell; use std::collections::{HashMap, HashSet}; use std::env; @@ -69,8 +67,12 @@ const INTRA_DOC_LINK_EXCEPTIONS: &[(&str, &[&str])] = &[ ]; -static BROKEN_INTRA_DOC_LINK: Lazy = - Lazy::new(|| Regex::new(r#"\[(.*)\]"#).unwrap()); +macro_rules! static_regex { + ($re:literal) => {{ + static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new(); + RE.get_or_init(|| ::regex::Regex::new($re).unwrap()) + }}; +} macro_rules! t { ($e:expr) => { @@ -373,7 +375,7 @@ impl Checker { // Search for intra-doc links that rustdoc didn't warn about // NOTE: only looks at one line at a time; in practice this should find most links for (i, line) in source.lines().enumerate() { - for broken_link in BROKEN_INTRA_DOC_LINK.captures_iter(line) { + for broken_link in static_regex!(r#"\[(.*)\]"#).captures_iter(line) { if is_intra_doc_exception(file, &broken_link[1]) { report.intra_doc_exceptions += 1; } else { diff --git a/src/tools/miri/Cargo.toml b/src/tools/miri/Cargo.toml index b00dae784d22c..ac8b4b37e9200 100644 --- a/src/tools/miri/Cargo.toml +++ b/src/tools/miri/Cargo.toml @@ -46,7 +46,6 @@ colored = "2" ui_test = "0.21.1" rustc_version = "0.4" regex = "1.5.5" -lazy_static = "1.4.0" tempfile = "3" [package.metadata.rust-analyzer] diff --git a/src/tools/miri/tests/ui.rs b/src/tools/miri/tests/ui.rs index ace0da0125361..efeefbe29fbce 100644 --- a/src/tools/miri/tests/ui.rs +++ b/src/tools/miri/tests/ui.rs @@ -1,6 +1,7 @@ use std::ffi::OsString; use std::num::NonZeroUsize; use std::path::{Path, PathBuf}; +use std::sync::OnceLock; use std::{env, process::Command}; use colored::*; @@ -67,8 +68,8 @@ fn miri_config(target: &str, path: &str, mode: Mode, with_dependencies: bool) -> let mut config = Config { target: Some(target.to_owned()), - stderr_filters: STDERR.clone(), - stdout_filters: STDOUT.clone(), + stderr_filters: stderr_filters().into(), + stdout_filters: stdout_filters().into(), mode, program, out_dir: PathBuf::from(std::env::var_os("CARGO_TARGET_DIR").unwrap()).join("ui"), @@ -174,15 +175,18 @@ fn run_tests( } macro_rules! regexes { - ($name:ident: $($regex:expr => $replacement:expr,)*) => {lazy_static::lazy_static! { - static ref $name: Vec<(Match, &'static [u8])> = vec![ - $((Regex::new($regex).unwrap().into(), $replacement.as_bytes()),)* - ]; - }}; + ($name:ident: $($regex:expr => $replacement:expr,)*) => { + fn $name() -> &'static [(Match, &'static [u8])] { + static S: OnceLock> = OnceLock::new(); + S.get_or_init(|| vec![ + $((Regex::new($regex).unwrap().into(), $replacement.as_bytes()),)* + ]) + } + }; } regexes! { - STDOUT: + stdout_filters: // Windows file paths r"\\" => "/", // erase borrow tags @@ -191,7 +195,7 @@ regexes! { } regexes! { - STDERR: + stderr_filters: // erase line and column info r"\.rs:[0-9]+:[0-9]+(: [0-9]+:[0-9]+)?" => ".rs:LL:CC", // erase alloc ids diff --git a/src/tools/rustfmt/Cargo.toml b/src/tools/rustfmt/Cargo.toml index 032b9b548104b..b18c80654a077 100644 --- a/src/tools/rustfmt/Cargo.toml +++ b/src/tools/rustfmt/Cargo.toml @@ -44,7 +44,6 @@ dirs = "4.0" getopts = "0.2" ignore = "0.4" itertools = "0.11" -lazy_static = "1.4" regex = "1.7" serde = { version = "1.0.160", features = ["derive"] } serde_json = "1.0" diff --git a/src/tools/rustfmt/src/comment.rs b/src/tools/rustfmt/src/comment.rs index 7d1b0384431be..099e12f86dd26 100644 --- a/src/tools/rustfmt/src/comment.rs +++ b/src/tools/rustfmt/src/comment.rs @@ -3,8 +3,6 @@ use std::{borrow::Cow, iter}; use itertools::{multipeek, MultiPeek}; -use lazy_static::lazy_static; -use regex::Regex; use rustc_span::Span; use crate::config::Config; @@ -17,17 +15,6 @@ use crate::utils::{ }; use crate::{ErrorKind, FormattingError}; -lazy_static! { - /// A regex matching reference doc links. - /// - /// ```markdown - /// /// An [example]. - /// /// - /// /// [example]: this::is::a::link - /// ``` - static ref REFERENCE_LINK_URL: Regex = Regex::new(r"^\[.+\]\s?:").unwrap(); -} - fn is_custom_comment(comment: &str) -> bool { if !comment.starts_with("//") { false @@ -980,11 +967,16 @@ fn trim_custom_comment_prefix(s: &str) -> String { /// Returns `true` if the given string MAY include URLs or alike. fn has_url(s: &str) -> bool { // This function may return false positive, but should get its job done in most cases. + // The regex is indended to capture text such as the below. + // + // /// An [example]. + // /// + // /// [example]: this::is::a::link s.contains("https://") || s.contains("http://") || s.contains("ftp://") || s.contains("file://") - || REFERENCE_LINK_URL.is_match(s) + || static_regex!(r"^\[.+\]\s?:").is_match(s) } /// Returns true if the given string may be part of a Markdown table. diff --git a/src/tools/rustfmt/src/lib.rs b/src/tools/rustfmt/src/lib.rs index 877d057a34baa..e07720f30ca5c 100644 --- a/src/tools/rustfmt/src/lib.rs +++ b/src/tools/rustfmt/src/lib.rs @@ -5,9 +5,6 @@ #![allow(clippy::match_like_matches_macro)] #![allow(unreachable_pub)] -#[cfg(test)] -#[macro_use] -extern crate lazy_static; #[macro_use] extern crate tracing; @@ -62,6 +59,13 @@ pub use crate::rustfmt_diff::{ModifiedChunk, ModifiedLines}; #[macro_use] mod utils; +macro_rules! static_regex { + ($re:literal) => {{ + static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new(); + RE.get_or_init(|| ::regex::Regex::new($re).unwrap()) + }}; +} + mod attr; mod chains; mod closures; diff --git a/src/tools/rustfmt/src/test/configuration_snippet.rs b/src/tools/rustfmt/src/test/configuration_snippet.rs index 80b61c88a001b..a195b306da386 100644 --- a/src/tools/rustfmt/src/test/configuration_snippet.rs +++ b/src/tools/rustfmt/src/test/configuration_snippet.rs @@ -24,19 +24,6 @@ impl ConfigurationSection { fn get_section>( file: &mut Enumerate, ) -> Option { - lazy_static! { - static ref CONFIG_NAME_REGEX: regex::Regex = - regex::Regex::new(r"^## `([^`]+)`").expect("failed creating configuration pattern"); - // Configuration values, which will be passed to `from_str`: - // - // - must be prefixed with `####` - // - must be wrapped in backticks - // - may by wrapped in double quotes (which will be stripped) - static ref CONFIG_VALUE_REGEX: regex::Regex = - regex::Regex::new(r#"^#### `"?([^`]+?)"?`"#) - .expect("failed creating configuration value pattern"); - } - loop { match file.next() { Some((i, line)) => { @@ -53,9 +40,14 @@ impl ConfigurationSection { let start_line = (i + 2) as u32; return Some(ConfigurationSection::CodeBlock((block, start_line))); - } else if let Some(c) = CONFIG_NAME_REGEX.captures(&line) { + } else if let Some(c) = static_regex!(r"^## `([^`]+)`").captures(&line) { return Some(ConfigurationSection::ConfigName(String::from(&c[1]))); - } else if let Some(c) = CONFIG_VALUE_REGEX.captures(&line) { + } else if let Some(c) = static_regex!(r#"^#### `"?([^`]+?)"?`"#).captures(&line) { + // Configuration values, which will be passed to `from_str` + // + // - must be prefixed with `####` + // - must be wrapped in backticks + // - may by wrapped in double quotes (which will be stripped) return Some(ConfigurationSection::ConfigValue(String::from(&c[1]))); } } diff --git a/src/tools/suggest-tests/Cargo.toml b/src/tools/suggest-tests/Cargo.toml index f4f4d548bb79e..7c048d53a505f 100644 --- a/src/tools/suggest-tests/Cargo.toml +++ b/src/tools/suggest-tests/Cargo.toml @@ -6,4 +6,3 @@ edition = "2021" [dependencies] glob = "0.3.0" build_helper = { version = "0.1.0", path = "../build_helper" } -once_cell = "1.17.1" diff --git a/src/tools/suggest-tests/src/lib.rs b/src/tools/suggest-tests/src/lib.rs index 1c1d9d0333ddb..8932403ac9fd5 100644 --- a/src/tools/suggest-tests/src/lib.rs +++ b/src/tools/suggest-tests/src/lib.rs @@ -5,7 +5,7 @@ use std::{ use dynamic_suggestions::DYNAMIC_SUGGESTIONS; use glob::Pattern; -use static_suggestions::STATIC_SUGGESTIONS; +use static_suggestions::static_suggestions; mod dynamic_suggestions; mod static_suggestions; @@ -33,7 +33,7 @@ pub fn get_suggestions>(modified_files: &[T]) -> Vec { let mut suggestions = Vec::new(); // static suggestions - for (globs, sugs) in STATIC_SUGGESTIONS.iter() { + for (globs, sugs) in static_suggestions().iter() { let globs = globs .iter() .map(|glob| Pattern::new(glob).expect("Found invalid glob pattern!")) diff --git a/src/tools/suggest-tests/src/static_suggestions.rs b/src/tools/suggest-tests/src/static_suggestions.rs index fbd265ea42a2e..a34a4b16ec1b6 100644 --- a/src/tools/suggest-tests/src/static_suggestions.rs +++ b/src/tools/suggest-tests/src/static_suggestions.rs @@ -1,10 +1,14 @@ use crate::{sug, Suggestion}; +use std::sync::OnceLock; // FIXME: perhaps this could use `std::lazy` when it is stablizied macro_rules! static_suggestions { ($( [ $( $glob:expr ),* $(,)? ] => [ $( $suggestion:expr ),* $(,)? ] ),* $(,)? ) => { - pub(crate) const STATIC_SUGGESTIONS: ::once_cell::unsync::Lazy, Vec)>> - = ::once_cell::unsync::Lazy::new(|| vec![ $( (vec![ $($glob),* ], vec![ $($suggestion),* ]) ),*]); + pub(crate) fn static_suggestions() -> &'static [(Vec<&'static str>, Vec)] + { + static S: OnceLock, Vec)>> = OnceLock::new(); + S.get_or_init(|| vec![ $( (vec![ $($glob),* ], vec![ $($suggestion),* ]) ),*]) + } } } diff --git a/src/tools/tidy/Cargo.toml b/src/tools/tidy/Cargo.toml index 96866e7424889..63963b0bd1ced 100644 --- a/src/tools/tidy/Cargo.toml +++ b/src/tools/tidy/Cargo.toml @@ -8,7 +8,6 @@ autobins = false cargo_metadata = "0.15" regex = "1" miropt-test-tools = { path = "../miropt-test-tools" } -lazy_static = "1" walkdir = "2" ignore = "0.4.18" semver = "1.0" diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index d673ce7a736d9..3e84bf3c34be5 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -17,8 +17,6 @@ use std::fs; use std::num::NonZeroU32; use std::path::{Path, PathBuf}; -use regex::Regex; - #[cfg(test)] mod tests; @@ -251,16 +249,10 @@ fn format_features<'a>( } fn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> { - lazy_static::lazy_static! { - static ref ISSUE: Regex = Regex::new(r#"issue\s*=\s*"([^"]*)""#).unwrap(); - static ref FEATURE: Regex = Regex::new(r#"feature\s*=\s*"([^"]*)""#).unwrap(); - static ref SINCE: Regex = Regex::new(r#"since\s*=\s*"([^"]*)""#).unwrap(); - } - let r = match attr { - "issue" => &*ISSUE, - "feature" => &*FEATURE, - "since" => &*SINCE, + "issue" => static_regex!(r#"issue\s*=\s*"([^"]*)""#), + "feature" => static_regex!(r#"feature\s*=\s*"([^"]*)""#), + "since" => static_regex!(r#"since\s*=\s*"([^"]*)""#), _ => unimplemented!("{attr} not handled"), }; @@ -528,11 +520,8 @@ fn map_lib_features( }}; } - lazy_static::lazy_static! { - static ref COMMENT_LINE: Regex = Regex::new(r"^\s*//").unwrap(); - } // exclude commented out lines - if COMMENT_LINE.is_match(line) { + if static_regex!(r"^\s*//").is_match(line) { continue; } diff --git a/src/tools/tidy/src/fluent_alphabetical.rs b/src/tools/tidy/src/fluent_alphabetical.rs index 9803b6eab2db5..a85367984deef 100644 --- a/src/tools/tidy/src/fluent_alphabetical.rs +++ b/src/tools/tidy/src/fluent_alphabetical.rs @@ -6,8 +6,8 @@ use std::{fs::OpenOptions, io::Write, path::Path}; use regex::Regex; -lazy_static::lazy_static! { - static ref MESSAGE: Regex = Regex::new(r#"(?m)^([a-zA-Z0-9_]+)\s*=\s*"#).unwrap(); +fn message() -> &'static Regex { + static_regex!(r#"(?m)^([a-zA-Z0-9_]+)\s*=\s*"#) } fn filter_fluent(path: &Path) -> bool { @@ -20,7 +20,7 @@ fn check_alphabetic( bad: &mut bool, all_defined_msgs: &mut HashMap, ) { - let mut matches = MESSAGE.captures_iter(fluent).peekable(); + let mut matches = message().captures_iter(fluent).peekable(); while let Some(m) = matches.next() { let name = m.get(1).unwrap(); if let Some(defined_filename) = all_defined_msgs.get(name.as_str()) { @@ -60,7 +60,7 @@ fn sort_messages( let mut chunks = vec![]; let mut cur = String::new(); for line in fluent.lines() { - if let Some(name) = MESSAGE.find(line) { + if let Some(name) = message().find(line) { if let Some(defined_filename) = all_defined_msgs.get(name.as_str()) { tidy_error!( bad, diff --git a/src/tools/tidy/src/fluent_used.rs b/src/tools/tidy/src/fluent_used.rs index b73e79cb38d94..8b6c6c18813d4 100644 --- a/src/tools/tidy/src/fluent_used.rs +++ b/src/tools/tidy/src/fluent_used.rs @@ -1,14 +1,9 @@ //! Checks that all Fluent messages appear at least twice use crate::walk::{filter_dirs, walk}; -use regex::Regex; use std::collections::HashMap; use std::path::Path; -lazy_static::lazy_static! { - static ref WORD: Regex = Regex::new(r"\w+").unwrap(); -} - fn filter_used_messages( contents: &str, msgs_not_appeared_yet: &mut HashMap, @@ -17,7 +12,7 @@ fn filter_used_messages( // we don't just check messages never appear in Rust files, // because messages can be used as parts of other fluent messages in Fluent files, // so we do checking messages appear only once in all Rust and Fluent files. - let mut matches = WORD.find_iter(contents); + let mut matches = static_regex!(r"\w+").find_iter(contents); while let Some(name) = matches.next() { if let Some((name, filename)) = msgs_not_appeared_yet.remove_entry(name.as_str()) { // if one msg appears for the first time, diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs index 23f303276aa2d..c15081a56d1a8 100644 --- a/src/tools/tidy/src/lib.rs +++ b/src/tools/tidy/src/lib.rs @@ -5,6 +5,13 @@ use termcolor::WriteColor; +macro_rules! static_regex { + ($re:literal) => {{ + static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new(); + RE.get_or_init(|| ::regex::Regex::new($re).unwrap()) + }}; +} + /// A helper macro to `unwrap` a result except also print out details like: /// /// * The expression that failed diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs index a1445ce5896b0..6b664f02b53b9 100644 --- a/src/tools/tidy/src/style.rs +++ b/src/tools/tidy/src/style.rs @@ -18,7 +18,7 @@ // ignore-tidy-dbg use crate::walk::{filter_dirs, walk}; -use regex::{Regex, RegexSet}; +use regex::RegexSet; use rustc_hash::FxHashMap; use std::{ffi::OsStr, path::Path}; @@ -178,20 +178,14 @@ fn should_ignore(line: &str) -> bool { // Matches test annotations like `//~ ERROR text`. // This mirrors the regex in src/tools/compiletest/src/runtest.rs, please // update both if either are changed. - lazy_static::lazy_static! { - static ref ANNOTATION_RE: Regex = Regex::new("\\s*//(\\[.*\\])?~.*").unwrap(); - } + static_regex!("\\s*//(\\[.*\\])?~.*").is_match(line) + || ANNOTATIONS_TO_IGNORE.iter().any(|a| line.contains(a)) + // For `ui_test`-style UI test directives, also ignore // - `//@[rev] compile-flags` // - `//@[rev] normalize-stderr-test` - lazy_static::lazy_static! { - static ref UI_TEST_LONG_DIRECTIVES_RE: Regex = - Regex::new("\\s*//@(\\[.*\\]) (compile-flags|normalize-stderr-test|error-pattern).*") - .unwrap(); - } - ANNOTATION_RE.is_match(line) - || ANNOTATIONS_TO_IGNORE.iter().any(|a| line.contains(a)) - || UI_TEST_LONG_DIRECTIVES_RE.is_match(line) + || static_regex!("\\s*//@(\\[.*\\]) (compile-flags|normalize-stderr-test|error-pattern).*") + .is_match(line) } /// Returns `true` if `line` is allowed to be longer than the normal limit. diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index f9985a757030b..94a0eee154d93 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -2,8 +2,6 @@ //! - the number of entries in each directory must be less than `ENTRY_LIMIT` //! - there are no stray `.stderr` files use ignore::Walk; -use lazy_static::lazy_static; -use regex::Regex; use std::collections::{BTreeSet, HashMap}; use std::ffi::OsStr; use std::fs; @@ -182,12 +180,8 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { } if ext == "rs" { - lazy_static! { - static ref ISSUE_NAME_REGEX: Regex = - Regex::new(r"^issues?[-_]?(\d{3,})").unwrap(); - } - - if let Some(test_name) = ISSUE_NAME_REGEX.captures(testname) { + if let Some(test_name) = static_regex!(r"^issues?[-_]?(\d{3,})").captures(testname) + { // these paths are always relative to the passed `path` and always UTF8 let stripped_path = file_path .strip_prefix(path) From 607497d57caf28c3d06adfdd5bca826e9c81205b Mon Sep 17 00:00:00 2001 From: Urgau Date: Sun, 28 Apr 2024 14:44:30 +0200 Subject: [PATCH 12/56] Port print-cfg run-make to Rust-based rmake.rs --- .../tidy/src/allowed_run_make_makefiles.txt | 1 - tests/run-make/print-cfg/Makefile | 37 ------- tests/run-make/print-cfg/rmake.rs | 100 ++++++++++++++++++ tests/run-make/print-to-output/rmake.rs | 67 ++++++++++++ 4 files changed, 167 insertions(+), 38 deletions(-) delete mode 100644 tests/run-make/print-cfg/Makefile create mode 100644 tests/run-make/print-cfg/rmake.rs create mode 100644 tests/run-make/print-to-output/rmake.rs diff --git a/src/tools/tidy/src/allowed_run_make_makefiles.txt b/src/tools/tidy/src/allowed_run_make_makefiles.txt index 1b560ee352c6c..a2179b895b3d6 100644 --- a/src/tools/tidy/src/allowed_run_make_makefiles.txt +++ b/src/tools/tidy/src/allowed_run_make_makefiles.txt @@ -219,7 +219,6 @@ run-make/prefer-rlib/Makefile run-make/pretty-print-to-file/Makefile run-make/pretty-print-with-dep-file/Makefile run-make/print-calling-conventions/Makefile -run-make/print-cfg/Makefile run-make/print-target-list/Makefile run-make/profile/Makefile run-make/prune-link-args/Makefile diff --git a/tests/run-make/print-cfg/Makefile b/tests/run-make/print-cfg/Makefile deleted file mode 100644 index 6b153e5b54edd..0000000000000 --- a/tests/run-make/print-cfg/Makefile +++ /dev/null @@ -1,37 +0,0 @@ -# needs-llvm-components: x86 arm - -include ../tools.mk - -all: default output_to_file - $(RUSTC) --target x86_64-pc-windows-gnu --print cfg | $(CGREP) windows - $(RUSTC) --target x86_64-pc-windows-gnu --print cfg | $(CGREP) x86_64 - $(RUSTC) --target i686-pc-windows-msvc --print cfg | $(CGREP) msvc - $(RUSTC) --target i686-apple-darwin --print cfg | $(CGREP) macos - $(RUSTC) --target i686-unknown-linux-gnu --print cfg | $(CGREP) gnu - $(RUSTC) --target arm-unknown-linux-gnueabihf --print cfg | $(CGREP) target_abi= - $(RUSTC) --target arm-unknown-linux-gnueabihf --print cfg | $(CGREP) eabihf - -output_to_file: - # Backend-independent, printed by rustc_driver_impl/src/lib.rs - $(RUSTC) --target x86_64-pc-windows-gnu --print cfg=$(TMPDIR)/cfg.txt - $(CGREP) windows < $(TMPDIR)/cfg.txt - - # Printed from CodegenBackend trait impl in rustc_codegen_llvm/src/lib.rs - $(RUSTC) --print relocation-models=$(TMPDIR)/relocation-models.txt - $(CGREP) dynamic-no-pic < $(TMPDIR)/relocation-models.txt - - # Printed by compiler/rustc_codegen_llvm/src/llvm_util.rs - $(RUSTC) --target wasm32-unknown-unknown --print target-features=$(TMPDIR)/target-features.txt - $(CGREP) reference-types < $(TMPDIR)/target-features.txt - - # Printed by C++ code in rustc_llvm/llvm-wrapper/PassWrapper.cpp - $(RUSTC) --target wasm32-unknown-unknown --print target-cpus=$(TMPDIR)/target-cpus.txt - $(CGREP) generic < $(TMPDIR)/target-cpus.txt - -ifdef IS_WINDOWS -default: - $(RUSTC) --print cfg | $(CGREP) windows -else -default: - $(RUSTC) --print cfg | $(CGREP) unix -endif diff --git a/tests/run-make/print-cfg/rmake.rs b/tests/run-make/print-cfg/rmake.rs new file mode 100644 index 0000000000000..0f829c9a95fca --- /dev/null +++ b/tests/run-make/print-cfg/rmake.rs @@ -0,0 +1,100 @@ +//! This checks the output of `--print=cfg` +//! +//! Specifically it checks that output is correctly formatted +//! (ie. no duplicated cfgs, values are between "", names are not). +//! +//! It also checks that some targets have the correct set cfgs. + +extern crate run_make_support; + +use std::collections::HashSet; +use std::ffi::OsString; +use std::io::BufRead; +use std::iter::FromIterator; + +use run_make_support::{rustc, tmp_dir}; + +fn main() { + check( + /*target*/ "x86_64-pc-windows-gnu", + /*includes*/ &["windows", "target_arch=\"x86_64\""], + /*disallow*/ &["unix"], + ); + check( + /*target*/ "i686-pc-windows-msvc", + /*includes*/ &["windows", "target_env=\"msvc\""], + /*disallow*/ &["unix"], + ); + check( + /*target*/ "i686-apple-darwin", + /*includes*/ &["unix", "target_os=\"macos\"", "target_vendor=\"apple\""], + /*disallow*/ &["windows"], + ); + check( + /*target*/ "i686-unknown-linux-gnu", + /*includes*/ &["unix", "target_env=\"gnu\""], + /*disallow*/ &["windows"], + ); + check( + /*target*/ "arm-unknown-linux-gnueabihf", + /*includes*/ &["unix", "target_abi=\"eabihf\""], + /*disallow*/ &["windows"], + ); +} + +fn check(target: &str, includes: &[&str], disallow: &[&str]) { + fn _inner(output: &str, includes: &[&str], disallow: &[&str]) { + let mut found = HashSet::::new(); + let mut recorded = HashSet::::new(); + + for l in output.lines() { + assert!(l == l.trim()); + if let Some((left, right)) = l.split_once('=') { + assert!(right.starts_with("\"")); + assert!(right.ends_with("\"")); + assert!(!left.contains("\"")); + } else { + assert!(!l.contains("\"")); + } + + assert!(recorded.insert(l.to_string()), "duplicated: {}", &l); + assert!(!disallow.contains(&l), "found disallowed: {}", &l); + if includes.contains(&l) { + assert!(found.insert(l.to_string()), "duplicated (includes): {}", &l); + } + } + + let should_found = HashSet::::from_iter(includes.iter().map(|s| s.to_string())); + let diff: Vec<_> = should_found.difference(&found).collect(); + + assert!( + diff.is_empty(), + "expected: {:?}, found: {:?} (~ {:?})", + &should_found, + &found, + &diff + ); + } + + // --print=cfg + { + let output = rustc().target(target).print("cfg").run(); + + let stdout = String::from_utf8(output.stdout).unwrap(); + + _inner(&stdout, includes, disallow); + } + + // --print=cfg=PATH + { + let tmp_path = tmp_dir().join(format!("{target}.cfg")); + let mut print_arg = OsString::from("--print=cfg="); + print_arg.push(tmp_path.as_os_str()); + + let output = rustc().target(target).arg(print_arg).run(); + + let output = std::fs::read_to_string(&tmp_path).unwrap(); + + _inner(&output, includes, disallow); + } +} diff --git a/tests/run-make/print-to-output/rmake.rs b/tests/run-make/print-to-output/rmake.rs new file mode 100644 index 0000000000000..91724b492f8d2 --- /dev/null +++ b/tests/run-make/print-to-output/rmake.rs @@ -0,0 +1,67 @@ +//! This checks the output of some `--print` options when +//! output to a file (instead of stdout) + +extern crate run_make_support; + +use std::ffi::OsString; + +use run_make_support::{rustc, target, tmp_dir}; + +fn main() { + // Printed from CodegenBackend trait impl in rustc_codegen_llvm/src/lib.rs + check( + /*target*/ &target(), + /*option*/ "relocation-models", + /*includes*/ &["dynamic-no-pic"], + ); + + // Printed by compiler/rustc_codegen_llvm/src/llvm_util.rs + check( + /*target*/ "wasm32-unknown-unknown", + /*option*/ "target-features", + /*includes*/ &["reference-types"], + ); + + // Printed by C++ code in rustc_llvm/llvm-wrapper/PassWrapper.cpp + check( + /*target*/ "wasm32-unknown-unknown", + /*option*/ "target-cpus", + /*includes*/ &["generic"], + ); +} + +fn check(target: &str, option: &str, includes: &[&str]) { + fn _inner(output: &str, includes: &[&str]) { + for i in includes { + assert!(output.contains(i), "output doesn't contains: {}", i); + } + } + + // --print={option} + let stdout = { + let output = rustc().target(target).print(option).run(); + + let stdout = String::from_utf8(output.stdout).unwrap(); + + _inner(&stdout, includes); + + stdout + }; + + // --print={option}=PATH + let output = { + let tmp_path = tmp_dir().join(format!("{option}.txt")); + let mut print_arg = OsString::from(format!("--print={option}=")); + print_arg.push(tmp_path.as_os_str()); + + let _output = rustc().target(target).arg(print_arg).run(); + + let output = std::fs::read_to_string(&tmp_path).unwrap(); + + _inner(&output, includes); + + output + }; + + assert_eq!(&stdout, &output); +} From 006c94cfa1a4658318f237d87afb2a4434d42b39 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sun, 28 Apr 2024 17:17:18 +0200 Subject: [PATCH 13/56] Use named struct arguments instead of comment named args --- tests/run-make/print-cfg/rmake.rs | 64 ++++++++++++++----------- tests/run-make/print-to-output/rmake.rs | 63 ++++++++++++------------ 2 files changed, 67 insertions(+), 60 deletions(-) diff --git a/tests/run-make/print-cfg/rmake.rs b/tests/run-make/print-cfg/rmake.rs index 0f829c9a95fca..71db7b8a6937e 100644 --- a/tests/run-make/print-cfg/rmake.rs +++ b/tests/run-make/print-cfg/rmake.rs @@ -14,36 +14,42 @@ use std::iter::FromIterator; use run_make_support::{rustc, tmp_dir}; +struct PrintCfg { + target: &'static str, + includes: &'static [&'static str], + disallow: &'static [&'static str], +} + fn main() { - check( - /*target*/ "x86_64-pc-windows-gnu", - /*includes*/ &["windows", "target_arch=\"x86_64\""], - /*disallow*/ &["unix"], - ); - check( - /*target*/ "i686-pc-windows-msvc", - /*includes*/ &["windows", "target_env=\"msvc\""], - /*disallow*/ &["unix"], - ); - check( - /*target*/ "i686-apple-darwin", - /*includes*/ &["unix", "target_os=\"macos\"", "target_vendor=\"apple\""], - /*disallow*/ &["windows"], - ); - check( - /*target*/ "i686-unknown-linux-gnu", - /*includes*/ &["unix", "target_env=\"gnu\""], - /*disallow*/ &["windows"], - ); - check( - /*target*/ "arm-unknown-linux-gnueabihf", - /*includes*/ &["unix", "target_abi=\"eabihf\""], - /*disallow*/ &["windows"], - ); + check(PrintCfg { + target: "x86_64-pc-windows-gnu", + includes: &["windows", "target_arch=\"x86_64\""], + disallow: &["unix"], + }); + check(PrintCfg { + target: "i686-pc-windows-msvc", + includes: &["windows", "target_env=\"msvc\""], + disallow: &["unix"], + }); + check(PrintCfg { + target: "i686-apple-darwin", + includes: &["unix", "target_os=\"macos\"", "target_vendor=\"apple\""], + disallow: &["windows"], + }); + check(PrintCfg { + target: "i686-unknown-linux-gnu", + includes: &["unix", "target_env=\"gnu\""], + disallow: &["windows"], + }); + check(PrintCfg { + target: "arm-unknown-linux-gnueabihf", + includes: &["unix", "target_abi=\"eabihf\""], + disallow: &["windows"], + }); } -fn check(target: &str, includes: &[&str], disallow: &[&str]) { - fn _inner(output: &str, includes: &[&str], disallow: &[&str]) { +fn check(PrintCfg { target, includes, disallow }: PrintCfg) { + fn check_(output: &str, includes: &[&str], disallow: &[&str]) { let mut found = HashSet::::new(); let mut recorded = HashSet::::new(); @@ -82,7 +88,7 @@ fn check(target: &str, includes: &[&str], disallow: &[&str]) { let stdout = String::from_utf8(output.stdout).unwrap(); - _inner(&stdout, includes, disallow); + check_(&stdout, includes, disallow); } // --print=cfg=PATH @@ -95,6 +101,6 @@ fn check(target: &str, includes: &[&str], disallow: &[&str]) { let output = std::fs::read_to_string(&tmp_path).unwrap(); - _inner(&output, includes, disallow); + check_(&output, includes, disallow); } } diff --git a/tests/run-make/print-to-output/rmake.rs b/tests/run-make/print-to-output/rmake.rs index 91724b492f8d2..8595a0c490b34 100644 --- a/tests/run-make/print-to-output/rmake.rs +++ b/tests/run-make/print-to-output/rmake.rs @@ -7,31 +7,37 @@ use std::ffi::OsString; use run_make_support::{rustc, target, tmp_dir}; +struct Option<'a> { + target: &'a str, + option: &'static str, + includes: &'static [&'static str], +} + fn main() { // Printed from CodegenBackend trait impl in rustc_codegen_llvm/src/lib.rs - check( - /*target*/ &target(), - /*option*/ "relocation-models", - /*includes*/ &["dynamic-no-pic"], - ); + check(Option { + target: &target(), + option: "relocation-models", + includes: &["dynamic-no-pic"], + }); // Printed by compiler/rustc_codegen_llvm/src/llvm_util.rs - check( - /*target*/ "wasm32-unknown-unknown", - /*option*/ "target-features", - /*includes*/ &["reference-types"], - ); + check(Option { + target: "wasm32-unknown-unknown", + option: "target-features", + includes: &["reference-types"], + }); // Printed by C++ code in rustc_llvm/llvm-wrapper/PassWrapper.cpp - check( - /*target*/ "wasm32-unknown-unknown", - /*option*/ "target-cpus", - /*includes*/ &["generic"], - ); + check(Option { + target: "wasm32-unknown-unknown", + option: "target-cpus", + includes: &["generic"], + }); } -fn check(target: &str, option: &str, includes: &[&str]) { - fn _inner(output: &str, includes: &[&str]) { +fn check(args: Option) { + fn check_(output: &str, includes: &[&str]) { for i in includes { assert!(output.contains(i), "output doesn't contains: {}", i); } @@ -39,29 +45,24 @@ fn check(target: &str, option: &str, includes: &[&str]) { // --print={option} let stdout = { - let output = rustc().target(target).print(option).run(); - - let stdout = String::from_utf8(output.stdout).unwrap(); - - _inner(&stdout, includes); + let output = rustc().target(args.target).print(args.option).run(); - stdout + String::from_utf8(output.stdout).unwrap() }; // --print={option}=PATH let output = { - let tmp_path = tmp_dir().join(format!("{option}.txt")); - let mut print_arg = OsString::from(format!("--print={option}=")); + let tmp_path = tmp_dir().join(format!("{}.txt", args.option)); + let mut print_arg = OsString::from(format!("--print={}=", args.option)); print_arg.push(tmp_path.as_os_str()); - let _output = rustc().target(target).arg(print_arg).run(); + let _output = rustc().target(args.target).arg(print_arg).run(); - let output = std::fs::read_to_string(&tmp_path).unwrap(); - - _inner(&output, includes); - - output + std::fs::read_to_string(&tmp_path).unwrap() }; + check_(&stdout, args.includes); + check_(&output, args.includes); + assert_eq!(&stdout, &output); } From f5b3b400f7c5f85d3e0a821d83f41b2b71985f9b Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Wed, 10 Apr 2024 20:14:18 +0200 Subject: [PATCH 14/56] Inline `clipboard.svg` into CSS --- src/librustdoc/html/static/css/rustdoc.css | 12 +++++++++++- src/librustdoc/html/static/images/clipboard.svg | 1 - src/librustdoc/html/static_files.rs | 1 - 3 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 src/librustdoc/html/static/images/clipboard.svg diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 74c9130fd77bd..6b0a9c91e6cd8 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -1636,7 +1636,17 @@ a.tooltip:hover::after { } #copy-path::before { filter: var(--copy-path-img-filter); - content: url('clipboard-24048e6d87f63d07.svg'); + /* clipboard */ + content: url('data:image/svg+xml,\ +\ +\ +'); + width: 19px; + height: 18px; } #copy-path:hover::before { filter: var(--copy-path-img-hover-filter); diff --git a/src/librustdoc/html/static/images/clipboard.svg b/src/librustdoc/html/static/images/clipboard.svg deleted file mode 100644 index e437c83fb6b6d..0000000000000 --- a/src/librustdoc/html/static/images/clipboard.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/src/librustdoc/html/static_files.rs b/src/librustdoc/html/static_files.rs index d8874c2fda0a0..d78cf0a028a41 100644 --- a/src/librustdoc/html/static_files.rs +++ b/src/librustdoc/html/static_files.rs @@ -100,7 +100,6 @@ static_files! { storage_js => "static/js/storage.js", scrape_examples_js => "static/js/scrape-examples.js", wheel_svg => "static/images/wheel.svg", - clipboard_svg => "static/images/clipboard.svg", copyright => "static/COPYRIGHT.txt", license_apache => "static/LICENSE-APACHE.txt", license_mit => "static/LICENSE-MIT.txt", From f780913c722a78ab87e3fea10af4b75c0e4c1b66 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Wed, 10 Apr 2024 20:29:15 +0200 Subject: [PATCH 15/56] Inline `wheel.svg` into CSS --- src/librustdoc/html/static/css/rustdoc.css | 15 ++++++++++++++- src/librustdoc/html/static/images/wheel.svg | 1 - src/librustdoc/html/static_files.rs | 1 - 3 files changed, 14 insertions(+), 3 deletions(-) delete mode 100644 src/librustdoc/html/static/images/wheel.svg diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 6b0a9c91e6cd8..887a5c39444fb 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -1608,7 +1608,20 @@ a.tooltip:hover::after { font-size: 0; } #settings-menu > a:before { - content: url('wheel-63255fc4502dca9a.svg'); + /* Wheel */ + content: url('data:image/svg+xml,\ + '); width: 22px; height: 22px; } diff --git a/src/librustdoc/html/static/images/wheel.svg b/src/librustdoc/html/static/images/wheel.svg deleted file mode 100644 index ba30f13dd58e9..0000000000000 --- a/src/librustdoc/html/static/images/wheel.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/src/librustdoc/html/static_files.rs b/src/librustdoc/html/static_files.rs index d78cf0a028a41..035376bace9c7 100644 --- a/src/librustdoc/html/static_files.rs +++ b/src/librustdoc/html/static_files.rs @@ -99,7 +99,6 @@ static_files! { src_script_js => "static/js/src-script.js", storage_js => "static/js/storage.js", scrape_examples_js => "static/js/scrape-examples.js", - wheel_svg => "static/images/wheel.svg", copyright => "static/COPYRIGHT.txt", license_apache => "static/LICENSE-APACHE.txt", license_mit => "static/LICENSE-MIT.txt", From 5a3509c2f47758cc714381bc2f2fa68583bd5a5a Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Wed, 10 Apr 2024 20:36:02 +0200 Subject: [PATCH 16/56] Add some missing comments to describe what the inlined SVG is --- src/librustdoc/html/static/css/rustdoc.css | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 887a5c39444fb..80a4ea0424a26 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -1133,6 +1133,7 @@ so that we can apply CSS-filters to change the arrow color in themes */ .setting-check input:checked { background-color: var(--settings-input-color); border-width: 1px; + /* cross-mark image in the settings checkboxes */ content: url('data:image/svg+xml,\ \ '); @@ -1627,6 +1628,7 @@ a.tooltip:hover::after { } #sidebar-button > a:before { + /* sidebar resizer image */ content: url('data:image/svg+xml,\ \ @@ -1860,6 +1862,7 @@ However, it's not needed with smaller screen width because the doc/code block is /* sidebar button opens modal use hamburger button */ .src #sidebar-button > a:before, .sidebar-menu-toggle:before { + /* hamburger button image */ content: url('data:image/svg+xml,\ '); @@ -1873,6 +1876,7 @@ However, it's not needed with smaller screen width because the doc/code block is /* src sidebar button opens a folder view */ .src #sidebar-button > a:before { + /* folder image */ content: url('data:image/svg+xml,\ \ From ca79086c874278bf362391996be7b49ccf30160a Mon Sep 17 00:00:00 2001 From: George Bateman Date: Sun, 28 Apr 2024 18:36:08 +0100 Subject: [PATCH 17/56] Fix #124478 - offset_of! returns a temporary This was due to the must_use() call. Adding HIR's OffsetOf to the must_use checking within the compiler avoids this issue. --- compiler/rustc_lint/src/unused.rs | 1 + library/core/src/mem/mod.rs | 4 +-- tests/ui/offset-of/offset-of-dst-field.stderr | 28 +++++++++---------- tests/ui/offset-of/offset-of-must-use.rs | 2 +- tests/ui/offset-of/offset-of-must-use.stderr | 4 +-- tests/ui/offset-of/offset-of-self.rs | 5 ++-- tests/ui/offset-of/offset-of-self.stderr | 10 +++---- 7 files changed, 28 insertions(+), 26 deletions(-) diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs index 0b0949e4dd861..98f2e95a3894a 100644 --- a/compiler/rustc_lint/src/unused.rs +++ b/compiler/rustc_lint/src/unused.rs @@ -176,6 +176,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults { | hir::BinOpKind::Shr => Some("bitwise operation"), }, hir::ExprKind::AddrOf(..) => Some("borrow"), + hir::ExprKind::OffsetOf(..) => Some("`offset_of` call"), hir::ExprKind::Unary(..) => Some("unary operation"), _ => None, }; diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index 75d42edbaa033..9054ade2d7968 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -1340,8 +1340,8 @@ impl SizedTypeProperties for T {} /// assert_eq!(mem::offset_of!(Option<&u8>, Some.0), 0); /// ``` #[stable(feature = "offset_of", since = "1.77.0")] -#[allow_internal_unstable(builtin_syntax, hint_must_use)] +#[allow_internal_unstable(builtin_syntax)] pub macro offset_of($Container:ty, $($fields:expr)+ $(,)?) { // The `{}` is for better error messages - crate::hint::must_use({builtin # offset_of($Container, $($fields)+)}) + {builtin # offset_of($Container, $($fields)+)} } diff --git a/tests/ui/offset-of/offset-of-dst-field.stderr b/tests/ui/offset-of/offset-of-dst-field.stderr index 753ba809e7da1..adfd16c6f2b98 100644 --- a/tests/ui/offset-of/offset-of-dst-field.stderr +++ b/tests/ui/offset-of/offset-of-dst-field.stderr @@ -34,20 +34,6 @@ LL | offset_of!((u8, dyn Trait), 1); = help: the trait `Sized` is not implemented for `dyn Trait` = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0277]: the size for values of type `[u8]` cannot be known at compilation time - --> $DIR/offset-of-dst-field.rs:44:5 - | -LL | offset_of!(Delta, z); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time - | - = help: within `Alpha`, the trait `Sized` is not implemented for `[u8]`, which is required by `Alpha: Sized` -note: required because it appears within the type `Alpha` - --> $DIR/offset-of-dst-field.rs:5:8 - | -LL | struct Alpha { - | ^^^^^ - = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) - error[E0277]: the size for values of type `Extern` cannot be known at compilation time --> $DIR/offset-of-dst-field.rs:45:5 | @@ -66,6 +52,20 @@ LL | offset_of!(Delta, z); = help: the trait `Sized` is not implemented for `dyn Trait` = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> $DIR/offset-of-dst-field.rs:44:5 + | +LL | offset_of!(Delta, z); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: within `Alpha`, the trait `Sized` is not implemented for `[u8]`, which is required by `Alpha: Sized` +note: required because it appears within the type `Alpha` + --> $DIR/offset-of-dst-field.rs:5:8 + | +LL | struct Alpha { + | ^^^^^ + = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) + error[E0277]: the size for values of type `T` cannot be known at compilation time --> $DIR/offset-of-dst-field.rs:50:5 | diff --git a/tests/ui/offset-of/offset-of-must-use.rs b/tests/ui/offset-of/offset-of-must-use.rs index f0c242891d8f9..87918b8ff9539 100644 --- a/tests/ui/offset-of/offset-of-must-use.rs +++ b/tests/ui/offset-of/offset-of-must-use.rs @@ -4,5 +4,5 @@ fn main() { core::mem::offset_of!((String,), 0); - //~^ WARN unused return value of `must_use` that must be used + //~^ WARN unused `offset_of` call that must be used } diff --git a/tests/ui/offset-of/offset-of-must-use.stderr b/tests/ui/offset-of/offset-of-must-use.stderr index b6d88e098d00c..9f0e37a59f43b 100644 --- a/tests/ui/offset-of/offset-of-must-use.stderr +++ b/tests/ui/offset-of/offset-of-must-use.stderr @@ -1,8 +1,8 @@ -warning: unused return value of `must_use` that must be used +warning: unused `offset_of` call that must be used --> $DIR/offset-of-must-use.rs:6:5 | LL | core::mem::offset_of!((String,), 0); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the `offset_of` call produces a value | note: the lint level is defined here --> $DIR/offset-of-must-use.rs:3:9 diff --git a/tests/ui/offset-of/offset-of-self.rs b/tests/ui/offset-of/offset-of-self.rs index 1558e13b53095..51b1a0b242733 100644 --- a/tests/ui/offset-of/offset-of-self.rs +++ b/tests/ui/offset-of/offset-of-self.rs @@ -23,8 +23,9 @@ impl S { fn offs_in_c() -> usize { offset_of!(C, w) } - fn offs_in_c_colon() -> usize { - offset_of!(C::, w) + // Put offset_of in a slice - test #124478. + fn offs_in_c_colon() -> &'static [usize] { + &[offset_of!(C::, w)] } } diff --git a/tests/ui/offset-of/offset-of-self.stderr b/tests/ui/offset-of/offset-of-self.stderr index 7c7576e066b6f..a2104a438ad8d 100644 --- a/tests/ui/offset-of/offset-of-self.stderr +++ b/tests/ui/offset-of/offset-of-self.stderr @@ -5,7 +5,7 @@ LL | offset_of!(Self, Self::v); | ^^^^^^^ error[E0412]: cannot find type `S` in module `self` - --> $DIR/offset-of-self.rs:34:26 + --> $DIR/offset-of-self.rs:35:26 | LL | offset_of!(self::S, v); | ^ not found in `self` @@ -21,7 +21,7 @@ LL + offset_of!(S, v); | error[E0411]: cannot find type `Self` in this scope - --> $DIR/offset-of-self.rs:51:16 + --> $DIR/offset-of-self.rs:52:16 | LL | fn main() { | ---- `Self` not allowed in a function @@ -38,13 +38,13 @@ LL | offset_of!(S, Self); = note: available fields are: `v`, `w` error[E0616]: field `v` of struct `T` is private - --> $DIR/offset-of-self.rs:40:30 + --> $DIR/offset-of-self.rs:41:30 | LL | offset_of!(Self, v) | ^ private field error[E0609]: no field `self` on type `S` - --> $DIR/offset-of-self.rs:53:19 + --> $DIR/offset-of-self.rs:54:19 | LL | offset_of!(S, self); | ^^^^ @@ -52,7 +52,7 @@ LL | offset_of!(S, self); = note: available fields are: `v`, `w` error[E0609]: no field `self` on type `u8` - --> $DIR/offset-of-self.rs:54:21 + --> $DIR/offset-of-self.rs:55:21 | LL | offset_of!(S, v.self); | ^^^^ From 4284bca7209fc9eb03a8db5a388479ff723cc70d Mon Sep 17 00:00:00 2001 From: est31 Date: Sun, 28 Apr 2024 15:14:32 +0200 Subject: [PATCH 18/56] Add a note to the ArbitraryExpressionInPattern error --- compiler/rustc_ast_lowering/messages.ftl | 1 + compiler/rustc_ast_lowering/src/errors.rs | 2 ++ compiler/rustc_ast_lowering/src/pat.rs | 6 +++++- tests/ui/issues/issue-43250.stderr | 4 ++++ tests/ui/lowering/expr-in-pat-issue-99380.rs | 11 +++++++++++ tests/ui/lowering/expr-in-pat-issue-99380.stderr | 10 ++++++++++ tests/ui/macros/vec-macro-in-pattern.stderr | 1 + tests/ui/match/expr_before_ident_pat.stderr | 2 ++ tests/ui/pattern/issue-92074-macro-ice.stderr | 3 +++ 9 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 tests/ui/lowering/expr-in-pat-issue-99380.rs create mode 100644 tests/ui/lowering/expr-in-pat-issue-99380.stderr diff --git a/compiler/rustc_ast_lowering/messages.ftl b/compiler/rustc_ast_lowering/messages.ftl index 73001c9990c8b..aa5a85b9c4ca2 100644 --- a/compiler/rustc_ast_lowering/messages.ftl +++ b/compiler/rustc_ast_lowering/messages.ftl @@ -5,6 +5,7 @@ ast_lowering_abi_specified_multiple_times = ast_lowering_arbitrary_expression_in_pattern = arbitrary expressions aren't allowed in patterns + .pattern_from_macro_note = the :expr fragment specifier forces the metavariable's content to be an expression ast_lowering_argument = argument diff --git a/compiler/rustc_ast_lowering/src/errors.rs b/compiler/rustc_ast_lowering/src/errors.rs index 6f70e135c72ac..02744d16b422f 100644 --- a/compiler/rustc_ast_lowering/src/errors.rs +++ b/compiler/rustc_ast_lowering/src/errors.rs @@ -368,6 +368,8 @@ pub struct NeverPatternWithGuard { pub struct ArbitraryExpressionInPattern { #[primary_span] pub span: Span, + #[note(ast_lowering_pattern_from_macro_note)] + pub pattern_from_macro_note: bool, } #[derive(Diagnostic)] diff --git a/compiler/rustc_ast_lowering/src/pat.rs b/compiler/rustc_ast_lowering/src/pat.rs index 118a7322fbdff..32de07a0755e2 100644 --- a/compiler/rustc_ast_lowering/src/pat.rs +++ b/compiler/rustc_ast_lowering/src/pat.rs @@ -339,7 +339,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ExprKind::Path(..) if allow_paths => {} ExprKind::Unary(UnOp::Neg, inner) if matches!(inner.kind, ExprKind::Lit(_)) => {} _ => { - let guar = self.dcx().emit_err(ArbitraryExpressionInPattern { span: expr.span }); + let pattern_from_macro = expr.is_approximately_pattern(); + let guar = self.dcx().emit_err(ArbitraryExpressionInPattern { + span: expr.span, + pattern_from_macro_note: pattern_from_macro, + }); return self.arena.alloc(self.expr_err(expr.span, guar)); } } diff --git a/tests/ui/issues/issue-43250.stderr b/tests/ui/issues/issue-43250.stderr index f729c5cf10cea..01c22a9e79166 100644 --- a/tests/ui/issues/issue-43250.stderr +++ b/tests/ui/issues/issue-43250.stderr @@ -3,12 +3,16 @@ error: arbitrary expressions aren't allowed in patterns | LL | m!(y); | ^ + | + = note: the :expr fragment specifier forces the metavariable's content to be an expression error: arbitrary expressions aren't allowed in patterns --> $DIR/issue-43250.rs:11:8 | LL | m!(C); | ^ + | + = note: the :expr fragment specifier forces the metavariable's content to be an expression error: aborting due to 2 previous errors diff --git a/tests/ui/lowering/expr-in-pat-issue-99380.rs b/tests/ui/lowering/expr-in-pat-issue-99380.rs new file mode 100644 index 0000000000000..1d4a047f717f7 --- /dev/null +++ b/tests/ui/lowering/expr-in-pat-issue-99380.rs @@ -0,0 +1,11 @@ +macro_rules! foo { + ($p:expr) => { + if let $p = Some(42) { + return; + } + }; +} + +fn main() { + foo!(Some(3)); //~ ERROR arbitrary expressions aren't allowed in patterns +} diff --git a/tests/ui/lowering/expr-in-pat-issue-99380.stderr b/tests/ui/lowering/expr-in-pat-issue-99380.stderr new file mode 100644 index 0000000000000..7438bba270c72 --- /dev/null +++ b/tests/ui/lowering/expr-in-pat-issue-99380.stderr @@ -0,0 +1,10 @@ +error: arbitrary expressions aren't allowed in patterns + --> $DIR/expr-in-pat-issue-99380.rs:10:10 + | +LL | foo!(Some(3)); + | ^^^^^^^ + | + = note: the :expr fragment specifier forces the metavariable's content to be an expression + +error: aborting due to 1 previous error + diff --git a/tests/ui/macros/vec-macro-in-pattern.stderr b/tests/ui/macros/vec-macro-in-pattern.stderr index 447f5dcf8644f..7c87f3b7b8e6e 100644 --- a/tests/ui/macros/vec-macro-in-pattern.stderr +++ b/tests/ui/macros/vec-macro-in-pattern.stderr @@ -4,6 +4,7 @@ error: arbitrary expressions aren't allowed in patterns LL | Some(vec![43]) => {} | ^^^^^^^^ | + = note: the :expr fragment specifier forces the metavariable's content to be an expression = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 1 previous error diff --git a/tests/ui/match/expr_before_ident_pat.stderr b/tests/ui/match/expr_before_ident_pat.stderr index 57a2d2b26cff0..2f59573b53f44 100644 --- a/tests/ui/match/expr_before_ident_pat.stderr +++ b/tests/ui/match/expr_before_ident_pat.stderr @@ -9,6 +9,8 @@ error: arbitrary expressions aren't allowed in patterns | LL | funny!(a, a); | ^ + | + = note: the :expr fragment specifier forces the metavariable's content to be an expression error: aborting due to 2 previous errors diff --git a/tests/ui/pattern/issue-92074-macro-ice.stderr b/tests/ui/pattern/issue-92074-macro-ice.stderr index b340afff010f7..bf53eb4bbcd47 100644 --- a/tests/ui/pattern/issue-92074-macro-ice.stderr +++ b/tests/ui/pattern/issue-92074-macro-ice.stderr @@ -7,6 +7,7 @@ LL | () => { force_expr!(Vec::new()) } LL | assert!(matches!(x, En::A(make_vec!()))); | ----------- in this macro invocation | + = note: the :expr fragment specifier forces the metavariable's content to be an expression = note: this error originates in the macro `make_vec` (in Nightly builds, run with -Z macro-backtrace for more info) error: arbitrary expressions aren't allowed in patterns @@ -18,6 +19,7 @@ LL | () => { force_pat!(get_usize(), get_usize()) } LL | assert!(matches!(5, make_pat!())); | ----------- in this macro invocation | + = note: the :expr fragment specifier forces the metavariable's content to be an expression = note: this error originates in the macro `make_pat` (in Nightly builds, run with -Z macro-backtrace for more info) error: arbitrary expressions aren't allowed in patterns @@ -29,6 +31,7 @@ LL | () => { force_pat!(get_usize(), get_usize()) } LL | assert!(matches!(5, make_pat!())); | ----------- in this macro invocation | + = note: the :expr fragment specifier forces the metavariable's content to be an expression = note: this error originates in the macro `make_pat` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 3 previous errors From 2348eb271c95f7ba75333f41b1a303b3b7261b4f Mon Sep 17 00:00:00 2001 From: George Bateman Date: Sun, 28 Apr 2024 21:10:09 +0100 Subject: [PATCH 19/56] Update mir-opt tests, add proper regression test --- .../offset_of.concrete.GVN.panic-abort.diff | 112 +++++------------- .../offset_of.concrete.GVN.panic-unwind.diff | 112 +++++------------- .../offset_of.generic.GVN.panic-abort.diff | 99 ++++------------ .../offset_of.generic.GVN.panic-unwind.diff | 99 ++++------------ ...oncrete.DataflowConstProp.panic-abort.diff | 64 +++------- ...ncrete.DataflowConstProp.panic-unwind.diff | 64 +++------- ...generic.DataflowConstProp.panic-abort.diff | 58 +++------ ...eneric.DataflowConstProp.panic-unwind.diff | 58 +++------ .../mir-opt/dataflow-const-prop/offset_of.rs | 16 +-- tests/ui/offset-of/offset-of-self.rs | 5 +- tests/ui/offset-of/offset-of-self.stderr | 10 +- tests/ui/offset-of/offset-of-temporaries.rs | 23 ++++ 12 files changed, 202 insertions(+), 518 deletions(-) create mode 100644 tests/ui/offset-of/offset-of-temporaries.rs diff --git a/tests/mir-opt/const_prop/offset_of.concrete.GVN.panic-abort.diff b/tests/mir-opt/const_prop/offset_of.concrete.GVN.panic-abort.diff index 5d94797905db4..77a2c5bcccc72 100644 --- a/tests/mir-opt/const_prop/offset_of.concrete.GVN.panic-abort.diff +++ b/tests/mir-opt/const_prop/offset_of.concrete.GVN.panic-abort.diff @@ -4,33 +4,26 @@ fn concrete() -> () { let mut _0: (); let _1: usize; - let mut _2: usize; - let mut _4: usize; - let mut _6: usize; - let mut _8: usize; - let mut _10: usize; - let mut _12: usize; - let mut _14: usize; scope 1 { debug x => _1; - let _3: usize; + let _2: usize; scope 2 { - debug y => _3; - let _5: usize; + debug y => _2; + let _3: usize; scope 3 { - debug z0 => _5; - let _7: usize; + debug z0 => _3; + let _4: usize; scope 4 { - debug z1 => _7; - let _9: usize; + debug z1 => _4; + let _5: usize; scope 5 { - debug eA0 => _9; - let _11: usize; + debug eA0 => _5; + let _6: usize; scope 6 { - debug eA1 => _11; - let _13: usize; + debug eA1 => _6; + let _7: usize; scope 7 { - debug eC => _13; + debug eC => _7; } } } @@ -41,82 +34,33 @@ bb0: { StorageLive(_1); +- _1 = OffsetOf(Alpha, [(0, 0)]); ++ _1 = const 4_usize; StorageLive(_2); -- _2 = OffsetOf(Alpha, [(0, 0)]); -- _1 = must_use::(move _2) -> [return: bb1, unwind unreachable]; -+ _2 = const 4_usize; -+ _1 = must_use::(const 4_usize) -> [return: bb1, unwind unreachable]; - } - - bb1: { - StorageDead(_2); +- _2 = OffsetOf(Alpha, [(0, 1)]); ++ _2 = const 0_usize; StorageLive(_3); +- _3 = OffsetOf(Alpha, [(0, 2), (0, 0)]); ++ _3 = const 2_usize; StorageLive(_4); -- _4 = OffsetOf(Alpha, [(0, 1)]); -- _3 = must_use::(move _4) -> [return: bb2, unwind unreachable]; -+ _4 = const 0_usize; -+ _3 = must_use::(const 0_usize) -> [return: bb2, unwind unreachable]; - } - - bb2: { - StorageDead(_4); +- _4 = OffsetOf(Alpha, [(0, 2), (0, 1)]); ++ _4 = const 3_usize; StorageLive(_5); +- _5 = OffsetOf(Epsilon, [(0, 0)]); ++ _5 = const 1_usize; StorageLive(_6); -- _6 = OffsetOf(Alpha, [(0, 2), (0, 0)]); -- _5 = must_use::(move _6) -> [return: bb3, unwind unreachable]; +- _6 = OffsetOf(Epsilon, [(0, 1)]); + _6 = const 2_usize; -+ _5 = must_use::(const 2_usize) -> [return: bb3, unwind unreachable]; - } - - bb3: { - StorageDead(_6); StorageLive(_7); - StorageLive(_8); -- _8 = OffsetOf(Alpha, [(0, 2), (0, 1)]); -- _7 = must_use::(move _8) -> [return: bb4, unwind unreachable]; -+ _8 = const 3_usize; -+ _7 = must_use::(const 3_usize) -> [return: bb4, unwind unreachable]; - } - - bb4: { - StorageDead(_8); - StorageLive(_9); - StorageLive(_10); -- _10 = OffsetOf(Epsilon, [(0, 0)]); -- _9 = must_use::(move _10) -> [return: bb5, unwind unreachable]; -+ _10 = const 1_usize; -+ _9 = must_use::(const 1_usize) -> [return: bb5, unwind unreachable]; - } - - bb5: { - StorageDead(_10); - StorageLive(_11); - StorageLive(_12); -- _12 = OffsetOf(Epsilon, [(0, 1)]); -- _11 = must_use::(move _12) -> [return: bb6, unwind unreachable]; -+ _12 = const 2_usize; -+ _11 = must_use::(const 2_usize) -> [return: bb6, unwind unreachable]; - } - - bb6: { - StorageDead(_12); - StorageLive(_13); - StorageLive(_14); -- _14 = OffsetOf(Epsilon, [(2, 0)]); -- _13 = must_use::(move _14) -> [return: bb7, unwind unreachable]; -+ _14 = const 4_usize; -+ _13 = must_use::(const 4_usize) -> [return: bb7, unwind unreachable]; - } - - bb7: { - StorageDead(_14); +- _7 = OffsetOf(Epsilon, [(2, 0)]); ++ _7 = const 4_usize; _0 = const (); - StorageDead(_13); - StorageDead(_11); - StorageDead(_9); StorageDead(_7); + StorageDead(_6); StorageDead(_5); + StorageDead(_4); StorageDead(_3); + StorageDead(_2); StorageDead(_1); return; } diff --git a/tests/mir-opt/const_prop/offset_of.concrete.GVN.panic-unwind.diff b/tests/mir-opt/const_prop/offset_of.concrete.GVN.panic-unwind.diff index 4d890742ee99c..77a2c5bcccc72 100644 --- a/tests/mir-opt/const_prop/offset_of.concrete.GVN.panic-unwind.diff +++ b/tests/mir-opt/const_prop/offset_of.concrete.GVN.panic-unwind.diff @@ -4,33 +4,26 @@ fn concrete() -> () { let mut _0: (); let _1: usize; - let mut _2: usize; - let mut _4: usize; - let mut _6: usize; - let mut _8: usize; - let mut _10: usize; - let mut _12: usize; - let mut _14: usize; scope 1 { debug x => _1; - let _3: usize; + let _2: usize; scope 2 { - debug y => _3; - let _5: usize; + debug y => _2; + let _3: usize; scope 3 { - debug z0 => _5; - let _7: usize; + debug z0 => _3; + let _4: usize; scope 4 { - debug z1 => _7; - let _9: usize; + debug z1 => _4; + let _5: usize; scope 5 { - debug eA0 => _9; - let _11: usize; + debug eA0 => _5; + let _6: usize; scope 6 { - debug eA1 => _11; - let _13: usize; + debug eA1 => _6; + let _7: usize; scope 7 { - debug eC => _13; + debug eC => _7; } } } @@ -41,82 +34,33 @@ bb0: { StorageLive(_1); +- _1 = OffsetOf(Alpha, [(0, 0)]); ++ _1 = const 4_usize; StorageLive(_2); -- _2 = OffsetOf(Alpha, [(0, 0)]); -- _1 = must_use::(move _2) -> [return: bb1, unwind continue]; -+ _2 = const 4_usize; -+ _1 = must_use::(const 4_usize) -> [return: bb1, unwind continue]; - } - - bb1: { - StorageDead(_2); +- _2 = OffsetOf(Alpha, [(0, 1)]); ++ _2 = const 0_usize; StorageLive(_3); +- _3 = OffsetOf(Alpha, [(0, 2), (0, 0)]); ++ _3 = const 2_usize; StorageLive(_4); -- _4 = OffsetOf(Alpha, [(0, 1)]); -- _3 = must_use::(move _4) -> [return: bb2, unwind continue]; -+ _4 = const 0_usize; -+ _3 = must_use::(const 0_usize) -> [return: bb2, unwind continue]; - } - - bb2: { - StorageDead(_4); +- _4 = OffsetOf(Alpha, [(0, 2), (0, 1)]); ++ _4 = const 3_usize; StorageLive(_5); +- _5 = OffsetOf(Epsilon, [(0, 0)]); ++ _5 = const 1_usize; StorageLive(_6); -- _6 = OffsetOf(Alpha, [(0, 2), (0, 0)]); -- _5 = must_use::(move _6) -> [return: bb3, unwind continue]; +- _6 = OffsetOf(Epsilon, [(0, 1)]); + _6 = const 2_usize; -+ _5 = must_use::(const 2_usize) -> [return: bb3, unwind continue]; - } - - bb3: { - StorageDead(_6); StorageLive(_7); - StorageLive(_8); -- _8 = OffsetOf(Alpha, [(0, 2), (0, 1)]); -- _7 = must_use::(move _8) -> [return: bb4, unwind continue]; -+ _8 = const 3_usize; -+ _7 = must_use::(const 3_usize) -> [return: bb4, unwind continue]; - } - - bb4: { - StorageDead(_8); - StorageLive(_9); - StorageLive(_10); -- _10 = OffsetOf(Epsilon, [(0, 0)]); -- _9 = must_use::(move _10) -> [return: bb5, unwind continue]; -+ _10 = const 1_usize; -+ _9 = must_use::(const 1_usize) -> [return: bb5, unwind continue]; - } - - bb5: { - StorageDead(_10); - StorageLive(_11); - StorageLive(_12); -- _12 = OffsetOf(Epsilon, [(0, 1)]); -- _11 = must_use::(move _12) -> [return: bb6, unwind continue]; -+ _12 = const 2_usize; -+ _11 = must_use::(const 2_usize) -> [return: bb6, unwind continue]; - } - - bb6: { - StorageDead(_12); - StorageLive(_13); - StorageLive(_14); -- _14 = OffsetOf(Epsilon, [(2, 0)]); -- _13 = must_use::(move _14) -> [return: bb7, unwind continue]; -+ _14 = const 4_usize; -+ _13 = must_use::(const 4_usize) -> [return: bb7, unwind continue]; - } - - bb7: { - StorageDead(_14); +- _7 = OffsetOf(Epsilon, [(2, 0)]); ++ _7 = const 4_usize; _0 = const (); - StorageDead(_13); - StorageDead(_11); - StorageDead(_9); StorageDead(_7); + StorageDead(_6); StorageDead(_5); + StorageDead(_4); StorageDead(_3); + StorageDead(_2); StorageDead(_1); return; } diff --git a/tests/mir-opt/const_prop/offset_of.generic.GVN.panic-abort.diff b/tests/mir-opt/const_prop/offset_of.generic.GVN.panic-abort.diff index 025241dd1bf96..130c31eff8ccc 100644 --- a/tests/mir-opt/const_prop/offset_of.generic.GVN.panic-abort.diff +++ b/tests/mir-opt/const_prop/offset_of.generic.GVN.panic-abort.diff @@ -4,33 +4,26 @@ fn generic() -> () { let mut _0: (); let _1: usize; - let mut _2: usize; - let mut _4: usize; - let mut _6: usize; - let mut _8: usize; - let mut _10: usize; - let mut _12: usize; - let mut _14: usize; scope 1 { debug gx => _1; - let _3: usize; + let _2: usize; scope 2 { - debug gy => _3; - let _5: usize; + debug gy => _2; + let _3: usize; scope 3 { - debug dx => _5; - let _7: usize; + debug dx => _3; + let _4: usize; scope 4 { - debug dy => _7; - let _9: usize; + debug dy => _4; + let _5: usize; scope 5 { - debug zA0 => _9; - let _11: usize; + debug zA0 => _5; + let _6: usize; scope 6 { - debug zA1 => _11; - let _13: usize; + debug zA1 => _6; + let _7: usize; scope 7 { - debug zB => _13; + debug zB => _7; } } } @@ -41,72 +34,28 @@ bb0: { StorageLive(_1); + _1 = OffsetOf(Gamma, [(0, 0)]); StorageLive(_2); - _2 = OffsetOf(Gamma, [(0, 0)]); - _1 = must_use::(move _2) -> [return: bb1, unwind unreachable]; - } - - bb1: { - StorageDead(_2); + _2 = OffsetOf(Gamma, [(0, 1)]); StorageLive(_3); +- _3 = OffsetOf(Delta, [(0, 1)]); ++ _3 = const 0_usize; StorageLive(_4); - _4 = OffsetOf(Gamma, [(0, 1)]); - _3 = must_use::(move _4) -> [return: bb2, unwind unreachable]; - } - - bb2: { - StorageDead(_4); +- _4 = OffsetOf(Delta, [(0, 2)]); ++ _4 = const 2_usize; StorageLive(_5); + _5 = OffsetOf(Zeta, [(0, 0)]); StorageLive(_6); -- _6 = OffsetOf(Delta, [(0, 1)]); -- _5 = must_use::(move _6) -> [return: bb3, unwind unreachable]; -+ _6 = const 0_usize; -+ _5 = must_use::(const 0_usize) -> [return: bb3, unwind unreachable]; - } - - bb3: { - StorageDead(_6); + _6 = OffsetOf(Zeta, [(0, 1)]); StorageLive(_7); - StorageLive(_8); -- _8 = OffsetOf(Delta, [(0, 2)]); -- _7 = must_use::(move _8) -> [return: bb4, unwind unreachable]; -+ _8 = const 2_usize; -+ _7 = must_use::(const 2_usize) -> [return: bb4, unwind unreachable]; - } - - bb4: { - StorageDead(_8); - StorageLive(_9); - StorageLive(_10); - _10 = OffsetOf(Zeta, [(0, 0)]); - _9 = must_use::(move _10) -> [return: bb5, unwind unreachable]; - } - - bb5: { - StorageDead(_10); - StorageLive(_11); - StorageLive(_12); - _12 = OffsetOf(Zeta, [(0, 1)]); - _11 = must_use::(move _12) -> [return: bb6, unwind unreachable]; - } - - bb6: { - StorageDead(_12); - StorageLive(_13); - StorageLive(_14); - _14 = OffsetOf(Zeta, [(1, 0)]); - _13 = must_use::(move _14) -> [return: bb7, unwind unreachable]; - } - - bb7: { - StorageDead(_14); + _7 = OffsetOf(Zeta, [(1, 0)]); _0 = const (); - StorageDead(_13); - StorageDead(_11); - StorageDead(_9); StorageDead(_7); + StorageDead(_6); StorageDead(_5); + StorageDead(_4); StorageDead(_3); + StorageDead(_2); StorageDead(_1); return; } diff --git a/tests/mir-opt/const_prop/offset_of.generic.GVN.panic-unwind.diff b/tests/mir-opt/const_prop/offset_of.generic.GVN.panic-unwind.diff index 27f2b2f7355ed..130c31eff8ccc 100644 --- a/tests/mir-opt/const_prop/offset_of.generic.GVN.panic-unwind.diff +++ b/tests/mir-opt/const_prop/offset_of.generic.GVN.panic-unwind.diff @@ -4,33 +4,26 @@ fn generic() -> () { let mut _0: (); let _1: usize; - let mut _2: usize; - let mut _4: usize; - let mut _6: usize; - let mut _8: usize; - let mut _10: usize; - let mut _12: usize; - let mut _14: usize; scope 1 { debug gx => _1; - let _3: usize; + let _2: usize; scope 2 { - debug gy => _3; - let _5: usize; + debug gy => _2; + let _3: usize; scope 3 { - debug dx => _5; - let _7: usize; + debug dx => _3; + let _4: usize; scope 4 { - debug dy => _7; - let _9: usize; + debug dy => _4; + let _5: usize; scope 5 { - debug zA0 => _9; - let _11: usize; + debug zA0 => _5; + let _6: usize; scope 6 { - debug zA1 => _11; - let _13: usize; + debug zA1 => _6; + let _7: usize; scope 7 { - debug zB => _13; + debug zB => _7; } } } @@ -41,72 +34,28 @@ bb0: { StorageLive(_1); + _1 = OffsetOf(Gamma, [(0, 0)]); StorageLive(_2); - _2 = OffsetOf(Gamma, [(0, 0)]); - _1 = must_use::(move _2) -> [return: bb1, unwind continue]; - } - - bb1: { - StorageDead(_2); + _2 = OffsetOf(Gamma, [(0, 1)]); StorageLive(_3); +- _3 = OffsetOf(Delta, [(0, 1)]); ++ _3 = const 0_usize; StorageLive(_4); - _4 = OffsetOf(Gamma, [(0, 1)]); - _3 = must_use::(move _4) -> [return: bb2, unwind continue]; - } - - bb2: { - StorageDead(_4); +- _4 = OffsetOf(Delta, [(0, 2)]); ++ _4 = const 2_usize; StorageLive(_5); + _5 = OffsetOf(Zeta, [(0, 0)]); StorageLive(_6); -- _6 = OffsetOf(Delta, [(0, 1)]); -- _5 = must_use::(move _6) -> [return: bb3, unwind continue]; -+ _6 = const 0_usize; -+ _5 = must_use::(const 0_usize) -> [return: bb3, unwind continue]; - } - - bb3: { - StorageDead(_6); + _6 = OffsetOf(Zeta, [(0, 1)]); StorageLive(_7); - StorageLive(_8); -- _8 = OffsetOf(Delta, [(0, 2)]); -- _7 = must_use::(move _8) -> [return: bb4, unwind continue]; -+ _8 = const 2_usize; -+ _7 = must_use::(const 2_usize) -> [return: bb4, unwind continue]; - } - - bb4: { - StorageDead(_8); - StorageLive(_9); - StorageLive(_10); - _10 = OffsetOf(Zeta, [(0, 0)]); - _9 = must_use::(move _10) -> [return: bb5, unwind continue]; - } - - bb5: { - StorageDead(_10); - StorageLive(_11); - StorageLive(_12); - _12 = OffsetOf(Zeta, [(0, 1)]); - _11 = must_use::(move _12) -> [return: bb6, unwind continue]; - } - - bb6: { - StorageDead(_12); - StorageLive(_13); - StorageLive(_14); - _14 = OffsetOf(Zeta, [(1, 0)]); - _13 = must_use::(move _14) -> [return: bb7, unwind continue]; - } - - bb7: { - StorageDead(_14); + _7 = OffsetOf(Zeta, [(1, 0)]); _0 = const (); - StorageDead(_13); - StorageDead(_11); - StorageDead(_9); StorageDead(_7); + StorageDead(_6); StorageDead(_5); + StorageDead(_4); StorageDead(_3); + StorageDead(_2); StorageDead(_1); return; } diff --git a/tests/mir-opt/dataflow-const-prop/offset_of.concrete.DataflowConstProp.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/offset_of.concrete.DataflowConstProp.panic-abort.diff index f8f8917503370..92691d0f80703 100644 --- a/tests/mir-opt/dataflow-const-prop/offset_of.concrete.DataflowConstProp.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/offset_of.concrete.DataflowConstProp.panic-abort.diff @@ -4,21 +4,17 @@ fn concrete() -> () { let mut _0: (); let _1: usize; - let mut _2: usize; - let mut _4: usize; - let mut _6: usize; - let mut _8: usize; scope 1 { debug x => _1; - let _3: usize; + let _2: usize; scope 2 { - debug y => _3; - let _5: usize; + debug y => _2; + let _3: usize; scope 3 { - debug z0 => _5; - let _7: usize; + debug z0 => _3; + let _4: usize; scope 4 { - debug z1 => _7; + debug z1 => _4; } } } @@ -26,49 +22,21 @@ bb0: { StorageLive(_1); +- _1 = OffsetOf(Alpha, [(0, 0)]); ++ _1 = const 4_usize; StorageLive(_2); -- _2 = OffsetOf(Alpha, [(0, 0)]); -- _1 = must_use::(move _2) -> [return: bb1, unwind unreachable]; -+ _2 = const 4_usize; -+ _1 = must_use::(const 4_usize) -> [return: bb1, unwind unreachable]; - } - - bb1: { - StorageDead(_2); +- _2 = OffsetOf(Alpha, [(0, 1)]); ++ _2 = const 0_usize; StorageLive(_3); +- _3 = OffsetOf(Alpha, [(0, 2), (0, 0)]); ++ _3 = const 2_usize; StorageLive(_4); -- _4 = OffsetOf(Alpha, [(0, 1)]); -- _3 = must_use::(move _4) -> [return: bb2, unwind unreachable]; -+ _4 = const 0_usize; -+ _3 = must_use::(const 0_usize) -> [return: bb2, unwind unreachable]; - } - - bb2: { - StorageDead(_4); - StorageLive(_5); - StorageLive(_6); -- _6 = OffsetOf(Alpha, [(0, 2), (0, 0)]); -- _5 = must_use::(move _6) -> [return: bb3, unwind unreachable]; -+ _6 = const 2_usize; -+ _5 = must_use::(const 2_usize) -> [return: bb3, unwind unreachable]; - } - - bb3: { - StorageDead(_6); - StorageLive(_7); - StorageLive(_8); -- _8 = OffsetOf(Alpha, [(0, 2), (0, 1)]); -- _7 = must_use::(move _8) -> [return: bb4, unwind unreachable]; -+ _8 = const 3_usize; -+ _7 = must_use::(const 3_usize) -> [return: bb4, unwind unreachable]; - } - - bb4: { - StorageDead(_8); +- _4 = OffsetOf(Alpha, [(0, 2), (0, 1)]); ++ _4 = const 3_usize; _0 = const (); - StorageDead(_7); - StorageDead(_5); + StorageDead(_4); StorageDead(_3); + StorageDead(_2); StorageDead(_1); return; } diff --git a/tests/mir-opt/dataflow-const-prop/offset_of.concrete.DataflowConstProp.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/offset_of.concrete.DataflowConstProp.panic-unwind.diff index d4f8cb66704ae..92691d0f80703 100644 --- a/tests/mir-opt/dataflow-const-prop/offset_of.concrete.DataflowConstProp.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/offset_of.concrete.DataflowConstProp.panic-unwind.diff @@ -4,21 +4,17 @@ fn concrete() -> () { let mut _0: (); let _1: usize; - let mut _2: usize; - let mut _4: usize; - let mut _6: usize; - let mut _8: usize; scope 1 { debug x => _1; - let _3: usize; + let _2: usize; scope 2 { - debug y => _3; - let _5: usize; + debug y => _2; + let _3: usize; scope 3 { - debug z0 => _5; - let _7: usize; + debug z0 => _3; + let _4: usize; scope 4 { - debug z1 => _7; + debug z1 => _4; } } } @@ -26,49 +22,21 @@ bb0: { StorageLive(_1); +- _1 = OffsetOf(Alpha, [(0, 0)]); ++ _1 = const 4_usize; StorageLive(_2); -- _2 = OffsetOf(Alpha, [(0, 0)]); -- _1 = must_use::(move _2) -> [return: bb1, unwind continue]; -+ _2 = const 4_usize; -+ _1 = must_use::(const 4_usize) -> [return: bb1, unwind continue]; - } - - bb1: { - StorageDead(_2); +- _2 = OffsetOf(Alpha, [(0, 1)]); ++ _2 = const 0_usize; StorageLive(_3); +- _3 = OffsetOf(Alpha, [(0, 2), (0, 0)]); ++ _3 = const 2_usize; StorageLive(_4); -- _4 = OffsetOf(Alpha, [(0, 1)]); -- _3 = must_use::(move _4) -> [return: bb2, unwind continue]; -+ _4 = const 0_usize; -+ _3 = must_use::(const 0_usize) -> [return: bb2, unwind continue]; - } - - bb2: { - StorageDead(_4); - StorageLive(_5); - StorageLive(_6); -- _6 = OffsetOf(Alpha, [(0, 2), (0, 0)]); -- _5 = must_use::(move _6) -> [return: bb3, unwind continue]; -+ _6 = const 2_usize; -+ _5 = must_use::(const 2_usize) -> [return: bb3, unwind continue]; - } - - bb3: { - StorageDead(_6); - StorageLive(_7); - StorageLive(_8); -- _8 = OffsetOf(Alpha, [(0, 2), (0, 1)]); -- _7 = must_use::(move _8) -> [return: bb4, unwind continue]; -+ _8 = const 3_usize; -+ _7 = must_use::(const 3_usize) -> [return: bb4, unwind continue]; - } - - bb4: { - StorageDead(_8); +- _4 = OffsetOf(Alpha, [(0, 2), (0, 1)]); ++ _4 = const 3_usize; _0 = const (); - StorageDead(_7); - StorageDead(_5); + StorageDead(_4); StorageDead(_3); + StorageDead(_2); StorageDead(_1); return; } diff --git a/tests/mir-opt/dataflow-const-prop/offset_of.generic.DataflowConstProp.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/offset_of.generic.DataflowConstProp.panic-abort.diff index 7f166e4fa356a..c6908166defb8 100644 --- a/tests/mir-opt/dataflow-const-prop/offset_of.generic.DataflowConstProp.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/offset_of.generic.DataflowConstProp.panic-abort.diff @@ -4,21 +4,17 @@ fn generic() -> () { let mut _0: (); let _1: usize; - let mut _2: usize; - let mut _4: usize; - let mut _6: usize; - let mut _8: usize; scope 1 { debug gx => _1; - let _3: usize; + let _2: usize; scope 2 { - debug gy => _3; - let _5: usize; + debug gy => _2; + let _3: usize; scope 3 { - debug dx => _5; - let _7: usize; + debug dx => _3; + let _4: usize; scope 4 { - debug dy => _7; + debug dy => _4; } } } @@ -26,45 +22,19 @@ bb0: { StorageLive(_1); + _1 = OffsetOf(Gamma, [(0, 0)]); StorageLive(_2); - _2 = OffsetOf(Gamma, [(0, 0)]); - _1 = must_use::(move _2) -> [return: bb1, unwind unreachable]; - } - - bb1: { - StorageDead(_2); + _2 = OffsetOf(Gamma, [(0, 1)]); StorageLive(_3); +- _3 = OffsetOf(Delta, [(0, 1)]); ++ _3 = const 0_usize; StorageLive(_4); - _4 = OffsetOf(Gamma, [(0, 1)]); - _3 = must_use::(move _4) -> [return: bb2, unwind unreachable]; - } - - bb2: { - StorageDead(_4); - StorageLive(_5); - StorageLive(_6); -- _6 = OffsetOf(Delta, [(0, 1)]); -- _5 = must_use::(move _6) -> [return: bb3, unwind unreachable]; -+ _6 = const 0_usize; -+ _5 = must_use::(const 0_usize) -> [return: bb3, unwind unreachable]; - } - - bb3: { - StorageDead(_6); - StorageLive(_7); - StorageLive(_8); -- _8 = OffsetOf(Delta, [(0, 2)]); -- _7 = must_use::(move _8) -> [return: bb4, unwind unreachable]; -+ _8 = const 2_usize; -+ _7 = must_use::(const 2_usize) -> [return: bb4, unwind unreachable]; - } - - bb4: { - StorageDead(_8); +- _4 = OffsetOf(Delta, [(0, 2)]); ++ _4 = const 2_usize; _0 = const (); - StorageDead(_7); - StorageDead(_5); + StorageDead(_4); StorageDead(_3); + StorageDead(_2); StorageDead(_1); return; } diff --git a/tests/mir-opt/dataflow-const-prop/offset_of.generic.DataflowConstProp.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/offset_of.generic.DataflowConstProp.panic-unwind.diff index 38ad6f7980160..c6908166defb8 100644 --- a/tests/mir-opt/dataflow-const-prop/offset_of.generic.DataflowConstProp.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/offset_of.generic.DataflowConstProp.panic-unwind.diff @@ -4,21 +4,17 @@ fn generic() -> () { let mut _0: (); let _1: usize; - let mut _2: usize; - let mut _4: usize; - let mut _6: usize; - let mut _8: usize; scope 1 { debug gx => _1; - let _3: usize; + let _2: usize; scope 2 { - debug gy => _3; - let _5: usize; + debug gy => _2; + let _3: usize; scope 3 { - debug dx => _5; - let _7: usize; + debug dx => _3; + let _4: usize; scope 4 { - debug dy => _7; + debug dy => _4; } } } @@ -26,45 +22,19 @@ bb0: { StorageLive(_1); + _1 = OffsetOf(Gamma, [(0, 0)]); StorageLive(_2); - _2 = OffsetOf(Gamma, [(0, 0)]); - _1 = must_use::(move _2) -> [return: bb1, unwind continue]; - } - - bb1: { - StorageDead(_2); + _2 = OffsetOf(Gamma, [(0, 1)]); StorageLive(_3); +- _3 = OffsetOf(Delta, [(0, 1)]); ++ _3 = const 0_usize; StorageLive(_4); - _4 = OffsetOf(Gamma, [(0, 1)]); - _3 = must_use::(move _4) -> [return: bb2, unwind continue]; - } - - bb2: { - StorageDead(_4); - StorageLive(_5); - StorageLive(_6); -- _6 = OffsetOf(Delta, [(0, 1)]); -- _5 = must_use::(move _6) -> [return: bb3, unwind continue]; -+ _6 = const 0_usize; -+ _5 = must_use::(const 0_usize) -> [return: bb3, unwind continue]; - } - - bb3: { - StorageDead(_6); - StorageLive(_7); - StorageLive(_8); -- _8 = OffsetOf(Delta, [(0, 2)]); -- _7 = must_use::(move _8) -> [return: bb4, unwind continue]; -+ _8 = const 2_usize; -+ _7 = must_use::(const 2_usize) -> [return: bb4, unwind continue]; - } - - bb4: { - StorageDead(_8); +- _4 = OffsetOf(Delta, [(0, 2)]); ++ _4 = const 2_usize; _0 = const (); - StorageDead(_7); - StorageDead(_5); + StorageDead(_4); StorageDead(_3); + StorageDead(_2); StorageDead(_1); return; } diff --git a/tests/mir-opt/dataflow-const-prop/offset_of.rs b/tests/mir-opt/dataflow-const-prop/offset_of.rs index cd4e1f6990dbb..12396b31ed0e9 100644 --- a/tests/mir-opt/dataflow-const-prop/offset_of.rs +++ b/tests/mir-opt/dataflow-const-prop/offset_of.rs @@ -36,16 +36,16 @@ fn concrete() { // CHECK: debug z0 => [[z0:_.*]]; // CHECK: debug z1 => [[z1:_.*]]; - // CHECK: [[x]] = must_use::(const 4_usize) -> {{.*}} + // CHECK: [[x]] = const 4_usize let x = offset_of!(Alpha, x); - // CHECK: [[y]] = must_use::(const 0_usize) -> {{.*}} + // CHECK: [[y]] = const 0_usize let y = offset_of!(Alpha, y); - // CHECK: [[z0]] = must_use::(const 2_usize) -> {{.*}} + // CHECK: [[z0]] = const 2_usize let z0 = offset_of!(Alpha, z.0); - // CHECK: [[z1]] = must_use::(const 3_usize) -> {{.*}} + // CHECK: [[z1]] = const 3_usize let z1 = offset_of!(Alpha, z.1); } @@ -58,16 +58,16 @@ fn generic() { // CHECK: debug dx => [[dx:_.*]]; // CHECK: debug dy => [[dy:_.*]]; - // CHECK: [[gx]] = must_use::(move {{_.*}}) -> {{.*}} + // CHECK: [[gx]] = OffsetOf(Gamma, [(0, 0)]); let gx = offset_of!(Gamma, x); - // CHECK: [[gy]] = must_use::(move {{_.*}}) -> {{.*}} + // CHECK: [[gy]] = OffsetOf(Gamma, [(0, 1)]); let gy = offset_of!(Gamma, y); - // CHECK: [[dx]] = must_use::(const 0_usize) -> {{.*}} + // CHECK: [[dx]] = const 0_usize let dx = offset_of!(Delta, x); - // CHECK: [[dy]] = must_use::(const 2_usize) -> {{.*}} + // CHECK: [[dy]] = const 2_usize let dy = offset_of!(Delta, y); } diff --git a/tests/ui/offset-of/offset-of-self.rs b/tests/ui/offset-of/offset-of-self.rs index 51b1a0b242733..1558e13b53095 100644 --- a/tests/ui/offset-of/offset-of-self.rs +++ b/tests/ui/offset-of/offset-of-self.rs @@ -23,9 +23,8 @@ impl S { fn offs_in_c() -> usize { offset_of!(C, w) } - // Put offset_of in a slice - test #124478. - fn offs_in_c_colon() -> &'static [usize] { - &[offset_of!(C::, w)] + fn offs_in_c_colon() -> usize { + offset_of!(C::, w) } } diff --git a/tests/ui/offset-of/offset-of-self.stderr b/tests/ui/offset-of/offset-of-self.stderr index a2104a438ad8d..7c7576e066b6f 100644 --- a/tests/ui/offset-of/offset-of-self.stderr +++ b/tests/ui/offset-of/offset-of-self.stderr @@ -5,7 +5,7 @@ LL | offset_of!(Self, Self::v); | ^^^^^^^ error[E0412]: cannot find type `S` in module `self` - --> $DIR/offset-of-self.rs:35:26 + --> $DIR/offset-of-self.rs:34:26 | LL | offset_of!(self::S, v); | ^ not found in `self` @@ -21,7 +21,7 @@ LL + offset_of!(S, v); | error[E0411]: cannot find type `Self` in this scope - --> $DIR/offset-of-self.rs:52:16 + --> $DIR/offset-of-self.rs:51:16 | LL | fn main() { | ---- `Self` not allowed in a function @@ -38,13 +38,13 @@ LL | offset_of!(S, Self); = note: available fields are: `v`, `w` error[E0616]: field `v` of struct `T` is private - --> $DIR/offset-of-self.rs:41:30 + --> $DIR/offset-of-self.rs:40:30 | LL | offset_of!(Self, v) | ^ private field error[E0609]: no field `self` on type `S` - --> $DIR/offset-of-self.rs:54:19 + --> $DIR/offset-of-self.rs:53:19 | LL | offset_of!(S, self); | ^^^^ @@ -52,7 +52,7 @@ LL | offset_of!(S, self); = note: available fields are: `v`, `w` error[E0609]: no field `self` on type `u8` - --> $DIR/offset-of-self.rs:55:21 + --> $DIR/offset-of-self.rs:54:21 | LL | offset_of!(S, v.self); | ^^^^ diff --git a/tests/ui/offset-of/offset-of-temporaries.rs b/tests/ui/offset-of/offset-of-temporaries.rs new file mode 100644 index 0000000000000..951a8ee2b5039 --- /dev/null +++ b/tests/ui/offset-of/offset-of-temporaries.rs @@ -0,0 +1,23 @@ +//@ build-pass + +//! Regression test #124478. + +use std::mem::offset_of; + +struct S { + v: u8, + w: u16, +} + +impl S { + fn return_static_slice() -> &'static [usize] { + &[offset_of!(Self, v), offset_of!(Self, w)] + } + fn use_reference() -> usize { + let r = &offset_of!(Self, v); + *r * 6 + } +} + +fn main() { +} From 8aa3c5975253c5795e2f20e200f6d45b0e18de7f Mon Sep 17 00:00:00 2001 From: George Bateman Date: Sun, 28 Apr 2024 21:38:55 +0100 Subject: [PATCH 20/56] Move rustfmt changes out Now in https://github.com/rust-lang/rustfmt/pull/6154 --- Cargo.lock | 1 + src/tools/rustfmt/Cargo.toml | 1 + src/tools/rustfmt/src/comment.rs | 20 ++++++++++++----- src/tools/rustfmt/src/lib.rs | 10 +++------ .../rustfmt/src/test/configuration_snippet.rs | 22 +++++++++++++------ 5 files changed, 34 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 233f4af1e24bc..900c12f1641b6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4884,6 +4884,7 @@ dependencies = [ "getopts", "ignore", "itertools 0.11.0", + "lazy_static", "regex", "rustfmt-config_proc_macro", "serde", diff --git a/src/tools/rustfmt/Cargo.toml b/src/tools/rustfmt/Cargo.toml index b18c80654a077..032b9b548104b 100644 --- a/src/tools/rustfmt/Cargo.toml +++ b/src/tools/rustfmt/Cargo.toml @@ -44,6 +44,7 @@ dirs = "4.0" getopts = "0.2" ignore = "0.4" itertools = "0.11" +lazy_static = "1.4" regex = "1.7" serde = { version = "1.0.160", features = ["derive"] } serde_json = "1.0" diff --git a/src/tools/rustfmt/src/comment.rs b/src/tools/rustfmt/src/comment.rs index 099e12f86dd26..7d1b0384431be 100644 --- a/src/tools/rustfmt/src/comment.rs +++ b/src/tools/rustfmt/src/comment.rs @@ -3,6 +3,8 @@ use std::{borrow::Cow, iter}; use itertools::{multipeek, MultiPeek}; +use lazy_static::lazy_static; +use regex::Regex; use rustc_span::Span; use crate::config::Config; @@ -15,6 +17,17 @@ use crate::utils::{ }; use crate::{ErrorKind, FormattingError}; +lazy_static! { + /// A regex matching reference doc links. + /// + /// ```markdown + /// /// An [example]. + /// /// + /// /// [example]: this::is::a::link + /// ``` + static ref REFERENCE_LINK_URL: Regex = Regex::new(r"^\[.+\]\s?:").unwrap(); +} + fn is_custom_comment(comment: &str) -> bool { if !comment.starts_with("//") { false @@ -967,16 +980,11 @@ fn trim_custom_comment_prefix(s: &str) -> String { /// Returns `true` if the given string MAY include URLs or alike. fn has_url(s: &str) -> bool { // This function may return false positive, but should get its job done in most cases. - // The regex is indended to capture text such as the below. - // - // /// An [example]. - // /// - // /// [example]: this::is::a::link s.contains("https://") || s.contains("http://") || s.contains("ftp://") || s.contains("file://") - || static_regex!(r"^\[.+\]\s?:").is_match(s) + || REFERENCE_LINK_URL.is_match(s) } /// Returns true if the given string may be part of a Markdown table. diff --git a/src/tools/rustfmt/src/lib.rs b/src/tools/rustfmt/src/lib.rs index e07720f30ca5c..877d057a34baa 100644 --- a/src/tools/rustfmt/src/lib.rs +++ b/src/tools/rustfmt/src/lib.rs @@ -5,6 +5,9 @@ #![allow(clippy::match_like_matches_macro)] #![allow(unreachable_pub)] +#[cfg(test)] +#[macro_use] +extern crate lazy_static; #[macro_use] extern crate tracing; @@ -59,13 +62,6 @@ pub use crate::rustfmt_diff::{ModifiedChunk, ModifiedLines}; #[macro_use] mod utils; -macro_rules! static_regex { - ($re:literal) => {{ - static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new(); - RE.get_or_init(|| ::regex::Regex::new($re).unwrap()) - }}; -} - mod attr; mod chains; mod closures; diff --git a/src/tools/rustfmt/src/test/configuration_snippet.rs b/src/tools/rustfmt/src/test/configuration_snippet.rs index a195b306da386..80b61c88a001b 100644 --- a/src/tools/rustfmt/src/test/configuration_snippet.rs +++ b/src/tools/rustfmt/src/test/configuration_snippet.rs @@ -24,6 +24,19 @@ impl ConfigurationSection { fn get_section>( file: &mut Enumerate, ) -> Option { + lazy_static! { + static ref CONFIG_NAME_REGEX: regex::Regex = + regex::Regex::new(r"^## `([^`]+)`").expect("failed creating configuration pattern"); + // Configuration values, which will be passed to `from_str`: + // + // - must be prefixed with `####` + // - must be wrapped in backticks + // - may by wrapped in double quotes (which will be stripped) + static ref CONFIG_VALUE_REGEX: regex::Regex = + regex::Regex::new(r#"^#### `"?([^`]+?)"?`"#) + .expect("failed creating configuration value pattern"); + } + loop { match file.next() { Some((i, line)) => { @@ -40,14 +53,9 @@ impl ConfigurationSection { let start_line = (i + 2) as u32; return Some(ConfigurationSection::CodeBlock((block, start_line))); - } else if let Some(c) = static_regex!(r"^## `([^`]+)`").captures(&line) { + } else if let Some(c) = CONFIG_NAME_REGEX.captures(&line) { return Some(ConfigurationSection::ConfigName(String::from(&c[1]))); - } else if let Some(c) = static_regex!(r#"^#### `"?([^`]+?)"?`"#).captures(&line) { - // Configuration values, which will be passed to `from_str` - // - // - must be prefixed with `####` - // - must be wrapped in backticks - // - may by wrapped in double quotes (which will be stripped) + } else if let Some(c) = CONFIG_VALUE_REGEX.captures(&line) { return Some(ConfigurationSection::ConfigValue(String::from(&c[1]))); } } From e0f8202ed5828670ac5125d3db2adbfb516cca3d Mon Sep 17 00:00:00 2001 From: Trevor Gross Date: Sun, 28 Apr 2024 15:27:36 -0500 Subject: [PATCH 21/56] Stabilize `non_null_convenience` Fully stabilize the following API, including const where applicable: impl NonNull { pub const unsafe fn offset(self, count: isize) -> Self; pub const unsafe fn add(self, count: usize) -> Self; pub const unsafe fn sub(self, count: usize) -> Self; pub const unsafe fn offset_from(self, origin: NonNull) -> isize; pub const unsafe fn read(self) -> T; pub unsafe fn read_volatile(self) -> T; pub const unsafe fn read_unaligned(self) -> T; pub unsafe fn write_volatile(self, val: T); pub unsafe fn replace(self, src: T) -> T; } impl NonNull { pub const unsafe fn byte_offset(self, count: isize) -> Self; pub const unsafe fn byte_add(self, count: usize) -> Self; pub const unsafe fn byte_sub(self, count: usize) -> Self; pub const unsafe fn byte_offset_from(self, origin: NonNull) -> isize; pub unsafe fn drop_in_place(self); } Stabilize the following without const: impl NonNull { // const under `const_intrinsic_copy` pub const unsafe fn copy_to(self, dest: NonNull, count: usize); pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull, count: usize); pub const unsafe fn copy_from(self, src: NonNull, count: usize); pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull, count: usize); // const under `const_ptr_write` pub const unsafe fn write(self, val: T); pub const unsafe fn write_bytes(self, val: u8, count: usize); pub const unsafe fn write_unaligned(self, val: T); // const under `const_swap` pub const unsafe fn swap(self, with: NonNull); // const under `const_align_offset` pub const fn align_offset(self, align: usize) -> usize; // const under `const_pointer_is_aligned` pub const fn is_aligned(self) -> bool; } Left the following unstable: impl NonNull { // moved gate to `ptr_sub_ptr` pub const unsafe fn sub_ptr(self, subtracted: NonNull) -> usize; } impl NonNull { // moved gate to `pointer_is_aligned_to` pub const fn is_aligned_to(self, align: usize) -> bool; } Fixes: https://github.com/rust-lang/rust/issues/117691 --- library/alloc/src/lib.rs | 1 - library/core/src/lib.rs | 1 - library/core/src/ptr/non_null.rs | 140 ++++++++++++++----------------- 3 files changed, 63 insertions(+), 79 deletions(-) diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 2bd8fca7e01b7..4e8792f6fb990 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -137,7 +137,6 @@ #![feature(maybe_uninit_slice)] #![feature(maybe_uninit_uninit_array)] #![feature(maybe_uninit_uninit_array_transpose)] -#![feature(non_null_convenience)] #![feature(panic_internals)] #![feature(pattern)] #![feature(ptr_internals)] diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 19e0ce7099d1e..cee3870c6292f 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -180,7 +180,6 @@ #![feature(isqrt)] #![feature(link_cfg)] #![feature(maybe_uninit_uninit_array)] -#![feature(non_null_convenience)] #![feature(offset_of_enum)] #![feature(offset_of_nested)] #![feature(panic_internals)] diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index 96ce3cd3a3fb5..8697b22278f83 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -512,7 +512,6 @@ impl NonNull { /// # Examples /// /// ``` - /// #![feature(non_null_convenience)] /// use std::ptr::NonNull; /// /// let mut s = [1, 2, 3]; @@ -523,12 +522,12 @@ impl NonNull { /// println!("{}", ptr.offset(2).read()); /// } /// ``` - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] - #[must_use = "returns a new pointer rather than modifying its argument"] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces - pub const unsafe fn offset(self, count: isize) -> NonNull + #[must_use = "returns a new pointer rather than modifying its argument"] + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + pub const unsafe fn offset(self, count: isize) -> Self where T: Sized, { @@ -549,11 +548,11 @@ impl NonNull { /// /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[must_use] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has // the same safety contract. @@ -599,7 +598,6 @@ impl NonNull { /// # Examples /// /// ``` - /// #![feature(non_null_convenience)] /// use std::ptr::NonNull; /// /// let s: &str = "123"; @@ -610,11 +608,11 @@ impl NonNull { /// println!("{}", ptr.add(2).read() as char); /// } /// ``` - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] - #[must_use = "returns a new pointer rather than modifying its argument"] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[must_use = "returns a new pointer rather than modifying its argument"] + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub const unsafe fn add(self, count: usize) -> Self where T: Sized, @@ -636,12 +634,12 @@ impl NonNull { /// /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[must_use] #[inline(always)] - #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_allow_const_fn_unstable(set_ptr_value)] + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same // safety contract. @@ -688,7 +686,6 @@ impl NonNull { /// # Examples /// /// ``` - /// #![feature(non_null_convenience)] /// use std::ptr::NonNull; /// /// let s: &str = "123"; @@ -699,11 +696,11 @@ impl NonNull { /// println!("{}", end.sub(2).read() as char); /// } /// ``` - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] - #[must_use = "returns a new pointer rather than modifying its argument"] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[must_use = "returns a new pointer rather than modifying its argument"] + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub const unsafe fn sub(self, count: usize) -> Self where T: Sized, @@ -730,12 +727,12 @@ impl NonNull { /// /// For non-`Sized` pointees this operation changes only the data pointer, /// leaving the metadata untouched. - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[must_use] #[inline(always)] - #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_allow_const_fn_unstable(set_ptr_value)] + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same // safety contract. @@ -816,7 +813,6 @@ impl NonNull { /// Basic usage: /// /// ``` - /// #![feature(non_null_convenience)] /// use std::ptr::NonNull; /// /// let a = [0; 5]; @@ -833,7 +829,7 @@ impl NonNull { /// *Incorrect* usage: /// /// ```rust,no_run - /// #![feature(non_null_convenience, strict_provenance)] + /// #![feature(strict_provenance)] /// use std::ptr::NonNull; /// /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap(); @@ -845,14 +841,13 @@ impl NonNull { /// // Since ptr2_other and ptr2 are derived from pointers to different objects, /// // computing their offset is undefined behavior, even though /// // they point to the same address! - /// unsafe { - /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior - /// } + /// + /// let zero = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior /// ``` - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub const unsafe fn offset_from(self, origin: NonNull) -> isize where T: Sized, @@ -870,10 +865,10 @@ impl NonNull { /// /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub const unsafe fn byte_offset_from(self, origin: NonNull) -> isize { // SAFETY: the caller must uphold the safety contract for `byte_offset_from`. unsafe { self.pointer.byte_offset_from(origin.pointer) } @@ -897,7 +892,7 @@ impl NonNull { /// to [`sub`](#method.sub)). The following are all equivalent, assuming /// that their safety preconditions are met: /// ```rust - /// # #![feature(non_null_convenience)] + /// # #![feature(ptr_sub_ptr)] /// # unsafe fn blah(ptr: std::ptr::NonNull, origin: std::ptr::NonNull, count: usize) -> bool { /// ptr.sub_ptr(origin) == count /// # && @@ -926,7 +921,7 @@ impl NonNull { /// # Examples /// /// ``` - /// #![feature(non_null_convenience)] + /// #![feature(ptr_sub_ptr)] /// use std::ptr::NonNull; /// /// let a = [0; 5]; @@ -942,12 +937,10 @@ impl NonNull { /// // This would be incorrect, as the pointers are not correctly ordered: /// // ptr1.sub_ptr(ptr2) /// ``` - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] - // #[unstable(feature = "ptr_sub_ptr", issue = "95892")] - // #[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[unstable(feature = "ptr_sub_ptr", issue = "95892")] + #[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")] pub const unsafe fn sub_ptr(self, subtracted: NonNull) -> usize where T: Sized, @@ -962,10 +955,10 @@ impl NonNull { /// See [`ptr::read`] for safety concerns and examples. /// /// [`ptr::read`]: crate::ptr::read() - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub const unsafe fn read(self) -> T where T: Sized, @@ -984,9 +977,9 @@ impl NonNull { /// See [`ptr::read_volatile`] for safety concerns and examples. /// /// [`ptr::read_volatile`]: crate::ptr::read_volatile() - #[unstable(feature = "non_null_convenience", issue = "117691")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub unsafe fn read_volatile(self) -> T where T: Sized, @@ -1003,10 +996,10 @@ impl NonNull { /// See [`ptr::read_unaligned`] for safety concerns and examples. /// /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned() - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub const unsafe fn read_unaligned(self) -> T where T: Sized, @@ -1023,10 +1016,10 @@ impl NonNull { /// See [`ptr::copy`] for safety concerns and examples. /// /// [`ptr::copy`]: crate::ptr::copy() - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")] pub const unsafe fn copy_to(self, dest: NonNull, count: usize) where T: Sized, @@ -1043,10 +1036,10 @@ impl NonNull { /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. /// /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")] pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull, count: usize) where T: Sized, @@ -1063,10 +1056,10 @@ impl NonNull { /// See [`ptr::copy`] for safety concerns and examples. /// /// [`ptr::copy`]: crate::ptr::copy() - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")] pub const unsafe fn copy_from(self, src: NonNull, count: usize) where T: Sized, @@ -1083,10 +1076,10 @@ impl NonNull { /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples. /// /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")] pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull, count: usize) where T: Sized, @@ -1100,8 +1093,8 @@ impl NonNull { /// See [`ptr::drop_in_place`] for safety concerns and examples. /// /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place() - #[unstable(feature = "non_null_convenience", issue = "117691")] #[inline(always)] + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub unsafe fn drop_in_place(self) { // SAFETY: the caller must uphold the safety contract for `drop_in_place`. unsafe { ptr::drop_in_place(self.as_ptr()) } @@ -1113,11 +1106,10 @@ impl NonNull { /// See [`ptr::write`] for safety concerns and examples. /// /// [`ptr::write`]: crate::ptr::write() - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] - //#[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] pub const unsafe fn write(self, val: T) where T: Sized, @@ -1132,12 +1124,11 @@ impl NonNull { /// See [`ptr::write_bytes`] for safety concerns and examples. /// /// [`ptr::write_bytes`]: crate::ptr::write_bytes() - #[doc(alias = "memset")] - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] - //#[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] #[inline(always)] + #[doc(alias = "memset")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] pub const unsafe fn write_bytes(self, val: u8, count: usize) where T: Sized, @@ -1156,9 +1147,9 @@ impl NonNull { /// See [`ptr::write_volatile`] for safety concerns and examples. /// /// [`ptr::write_volatile`]: crate::ptr::write_volatile() - #[unstable(feature = "non_null_convenience", issue = "117691")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub unsafe fn write_volatile(self, val: T) where T: Sized, @@ -1175,11 +1166,10 @@ impl NonNull { /// See [`ptr::write_unaligned`] for safety concerns and examples. /// /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned() - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] - //#[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] pub const unsafe fn write_unaligned(self, val: T) where T: Sized, @@ -1194,8 +1184,8 @@ impl NonNull { /// See [`ptr::replace`] for safety concerns and examples. /// /// [`ptr::replace`]: crate::ptr::replace() - #[unstable(feature = "non_null_convenience", issue = "117691")] #[inline(always)] + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] pub unsafe fn replace(self, src: T) -> T where T: Sized, @@ -1211,10 +1201,9 @@ impl NonNull { /// See [`ptr::swap`] for safety concerns and examples. /// /// [`ptr::swap`]: crate::ptr::swap() - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] - //#[rustc_const_unstable(feature = "const_swap", issue = "83163")] #[inline(always)] + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_unstable(feature = "const_swap", issue = "83163")] pub const unsafe fn swap(self, with: NonNull) where T: Sized, @@ -1246,7 +1235,6 @@ impl NonNull { /// Accessing adjacent `u8` as `u16` /// /// ``` - /// #![feature(non_null_convenience)] /// use std::mem::align_of; /// use std::ptr::NonNull; /// @@ -1264,11 +1252,10 @@ impl NonNull { /// } /// # } /// ``` - #[unstable(feature = "non_null_convenience", issue = "117691")] - #[rustc_const_unstable(feature = "non_null_convenience", issue = "117691")] - //#[rustc_const_unstable(feature = "const_align_offset", issue = "90962")] - #[must_use] #[inline] + #[must_use] + #[stable(feature = "non_null_convenience", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_unstable(feature = "const_align_offset", issue = "90962")] pub const fn align_offset(self, align: usize) -> usize where T: Sized, @@ -1312,10 +1299,9 @@ impl NonNull { /// underlying allocation. /// /// ``` - /// #![feature(const_pointer_is_aligned)] - /// #![feature(non_null_convenience)] - /// #![feature(const_option)] /// #![feature(const_nonnull_new)] + /// #![feature(const_option)] + /// #![feature(const_pointer_is_aligned)] /// use std::ptr::NonNull; /// /// // On some platforms, the alignment of primitives is less than their size. @@ -1390,10 +1376,10 @@ impl NonNull { /// ``` /// /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203 + #[inline] + #[must_use] #[stable(feature = "pointer_is_aligned", since = "CURRENT_RUSTC_VERSION")] #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")] - #[must_use] - #[inline] pub const fn is_aligned(self) -> bool where T: Sized, @@ -1505,10 +1491,10 @@ impl NonNull { /// ``` /// /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203 + #[inline] + #[must_use] #[unstable(feature = "pointer_is_aligned_to", issue = "96284")] #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")] - #[must_use] - #[inline] pub const fn is_aligned_to(self, align: usize) -> bool { self.pointer.is_aligned_to(align) } From 5969ad4caef5b6f28312fc00aab16306ad4d88f7 Mon Sep 17 00:00:00 2001 From: Nicholas Thompson Date: Sun, 28 Apr 2024 21:00:53 -0400 Subject: [PATCH 22/56] Update is_val_statically_known docs * Add `Type Requirements` section, listing the allowed inputs, as requested by #121115 * Add `Pointers` subsection, explaining is_val_statically_known handles pointers. * Add `Stability concerns` section, referring to other documentation relating to consistency in `const` functions. --- library/core/src/intrinsics.rs | 43 ++++++++++++++++++++++++++++++---- 1 file changed, 39 insertions(+), 4 deletions(-) diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index 4b00bbe4dc2ed..14b4ce39ab453 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -2717,10 +2717,45 @@ where /// particular value, ever. However, the compiler will generally make it /// return `true` only if the value of the argument is actually known. /// -/// When calling this in a `const fn`, both paths must be semantically -/// equivalent, that is, the result of the `true` branch and the `false` -/// branch must return the same value and have the same side-effects *no -/// matter what*. +/// # Stability concerns +/// +/// While it is safe to call, this intrinsic may behave differently in +/// a `const` context than otherwise. See the [`const_eval_select`] +/// documentation for an explanation of the issues this can cause. Unlike +/// `const_eval_select`, this intrinsic isn't guaranteed to behave +/// deterministically even in a `const` context. +/// +/// # Type Requirements +/// +/// `T` must be either a `bool`, a `char`, a primitive numeric type (e.g. `f32`, +/// but not `NonZeroISize`), or any thin pointer (e.g. `*mut String`). +/// Any other argument types *may* cause a compiler error. +/// +/// ## Pointers +/// +/// When the input is a pointer, only the pointer itself is +/// ever considered. The pointee has no effect. Currently, these functions +/// behave identically: +/// +/// ``` +/// #![feature(is_val_statically_known)] +/// #![feature(core_intrinsics)] +/// # #![allow(internal_features)] +/// #![feature(strict_provenance)] +/// use std::intrinsics::is_val_statically_known; +/// +/// fn foo(x: &i32) -> bool { +/// is_val_statically_known(x) +/// } +/// +/// fn bar(x: &i32) -> bool { +/// is_val_statically_known( +/// (x as *const i32).addr() +/// ) +/// } +/// # _ = foo(&5_i32); +/// # _ = bar(&5_i32); +/// ``` #[rustc_const_unstable(feature = "is_val_statically_known", issue = "none")] #[rustc_nounwind] #[unstable(feature = "core_intrinsics", issue = "none")] From 254a9fbe86a4a6d3c0481f7924c4b0393e1a230e Mon Sep 17 00:00:00 2001 From: Gurinder Singh Date: Mon, 29 Apr 2024 08:16:26 +0530 Subject: [PATCH 23/56] Prohibit const prop of unions in `KnownPanicsLint` as they have a potential to ICE during layout calculation --- .../src/known_panics_lint.rs | 20 ++++++++----- tests/crashes/123710.rs | 17 ----------- ...st-prop-unions-known-panics-lint-123710.rs | 22 ++++++++++++++ ...rop-unions-known-panics-lint-123710.stderr | 29 +++++++++++++++++++ 4 files changed, 64 insertions(+), 24 deletions(-) delete mode 100644 tests/crashes/123710.rs create mode 100644 tests/ui/lint/ice-const-prop-unions-known-panics-lint-123710.rs create mode 100644 tests/ui/lint/ice-const-prop-unions-known-panics-lint-123710.stderr diff --git a/compiler/rustc_mir_transform/src/known_panics_lint.rs b/compiler/rustc_mir_transform/src/known_panics_lint.rs index b8dbf8a759fcc..b2030efca8f0d 100644 --- a/compiler/rustc_mir_transform/src/known_panics_lint.rs +++ b/compiler/rustc_mir_transform/src/known_panics_lint.rs @@ -896,13 +896,19 @@ impl CanConstProp { }; for (local, val) in cpv.can_const_prop.iter_enumerated_mut() { let ty = body.local_decls[local].ty; - match tcx.layout_of(param_env.and(ty)) { - Ok(layout) if layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) => {} - // Either the layout fails to compute, then we can't use this local anyway - // or the local is too large, then we don't want to. - _ => { - *val = ConstPropMode::NoPropagation; - continue; + if ty.is_union() { + // Do not const prop unions as they can + // ICE during layout calc + *val = ConstPropMode::NoPropagation; + } else { + match tcx.layout_of(param_env.and(ty)) { + Ok(layout) if layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) => {} + // Either the layout fails to compute, then we can't use this local anyway + // or the local is too large, then we don't want to. + _ => { + *val = ConstPropMode::NoPropagation; + continue; + } } } } diff --git a/tests/crashes/123710.rs b/tests/crashes/123710.rs deleted file mode 100644 index f171fa7cebb4a..0000000000000 --- a/tests/crashes/123710.rs +++ /dev/null @@ -1,17 +0,0 @@ -//@ known-bug: #123710 - -#[repr(packed)] -#[repr(u32)] -enum E { - A, - B, - C, -} - -fn main() { - union InvalidTag { - int: u32, - e: E, - } - let _invalid_tag = InvalidTag { int: 4 }; -} diff --git a/tests/ui/lint/ice-const-prop-unions-known-panics-lint-123710.rs b/tests/ui/lint/ice-const-prop-unions-known-panics-lint-123710.rs new file mode 100644 index 0000000000000..2b93d0f8a609d --- /dev/null +++ b/tests/ui/lint/ice-const-prop-unions-known-panics-lint-123710.rs @@ -0,0 +1,22 @@ +// Regression test for issue 123710. +// Tests that the we do not ICE in KnownPanicsLint +// when a union contains an enum with an repr(packed), +// which is a repr not supported for enums + +#[repr(packed)] +//~^ ERROR attribute should be applied to a struct or union +#[repr(u32)] +enum E { + A, + B, + C, +} + +fn main() { + union InvalidTag { + int: u32, + e: E, +//~^ ERROR field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be used in a union + } + let _invalid_tag = InvalidTag { int: 4 }; +} diff --git a/tests/ui/lint/ice-const-prop-unions-known-panics-lint-123710.stderr b/tests/ui/lint/ice-const-prop-unions-known-panics-lint-123710.stderr new file mode 100644 index 0000000000000..44dde6120e8d6 --- /dev/null +++ b/tests/ui/lint/ice-const-prop-unions-known-panics-lint-123710.stderr @@ -0,0 +1,29 @@ +error[E0517]: attribute should be applied to a struct or union + --> $DIR/ice-const-prop-unions-known-panics-lint-123710.rs:6:8 + | +LL | #[repr(packed)] + | ^^^^^^ +... +LL | / enum E { +LL | | A, +LL | | B, +LL | | C, +LL | | } + | |_- not a struct or union + +error[E0740]: field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be used in a union + --> $DIR/ice-const-prop-unions-known-panics-lint-123710.rs:18:9 + | +LL | e: E, + | ^^^^ + | + = note: union fields must not have drop side-effects, which is currently enforced via either `Copy` or `ManuallyDrop<...>` +help: wrap the field type in `ManuallyDrop<...>` + | +LL | e: std::mem::ManuallyDrop, + | +++++++++++++++++++++++ + + +error: aborting due to 2 previous errors + +Some errors have detailed explanations: E0517, E0740. +For more information about an error, try `rustc --explain E0517`. From 448d527fd8d705393274198d858a4e2d2a2d8d54 Mon Sep 17 00:00:00 2001 From: Martin Nordholts Date: Mon, 29 Apr 2024 06:49:39 +0200 Subject: [PATCH 24/56] Typo fix: exec:ing -> exec'ing --- tests/ui/attributes/unix_sigpipe/unix_sigpipe-inherit.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ui/attributes/unix_sigpipe/unix_sigpipe-inherit.rs b/tests/ui/attributes/unix_sigpipe/unix_sigpipe-inherit.rs index 694fa460e9bb4..3e63349edb747 100644 --- a/tests/ui/attributes/unix_sigpipe/unix_sigpipe-inherit.rs +++ b/tests/ui/attributes/unix_sigpipe/unix_sigpipe-inherit.rs @@ -8,7 +8,7 @@ extern crate libc; -// By default the Rust runtime resets SIGPIPE to SIG_DFL before exec:ing child +// By default the Rust runtime resets SIGPIPE to SIG_DFL before exec'ing child // processes so opt-out of that with `#[unix_sigpipe = "sig_dfl"]`. See // https://github.com/rust-lang/rust/blob/bf4de3a874753bbee3323081c8b0c133444fed2d/library/std/src/sys/pal/unix/process/process_unix.rs#L359-L384 #[unix_sigpipe = "sig_dfl"] From de3eff79a6456aa3f31e37123e73ef713d00f781 Mon Sep 17 00:00:00 2001 From: Martin Nordholts Date: Mon, 29 Apr 2024 06:50:38 +0200 Subject: [PATCH 25/56] aux-bin: Avoid old .so files from old tests; clean auxiliary dir root --- src/tools/compiletest/src/runtest.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index b0f84b1162f4b..da51b33a9d6d9 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -2140,6 +2140,7 @@ impl<'test> TestCx<'test> { if !self.props.aux_bins.is_empty() { let aux_bin_dir = self.aux_bin_output_dir_name(); + remove_and_create_dir_all(&aux_dir); remove_and_create_dir_all(&aux_bin_dir); } From fa6db4c4281ec814798ff217a485b504a49a6411 Mon Sep 17 00:00:00 2001 From: ivmarkov Date: Mon, 29 Apr 2024 06:17:02 +0000 Subject: [PATCH 26/56] Fix ESP IDF build broken by #124210 --- library/std/src/sys/pal/unix/fs.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/library/std/src/sys/pal/unix/fs.rs b/library/std/src/sys/pal/unix/fs.rs index abd32b819e031..5987996a3f167 100644 --- a/library/std/src/sys/pal/unix/fs.rs +++ b/library/std/src/sys/pal/unix/fs.rs @@ -893,6 +893,7 @@ impl Drop for Dir { target_os = "nto", target_os = "vita", target_os = "hurd", + target_os = "espidf", )))] { let fd = unsafe { libc::dirfd(self.0) }; From a25a11ad73dba9d75c576874d7bb1c03dd69fb26 Mon Sep 17 00:00:00 2001 From: Zalathar Date: Mon, 29 Apr 2024 16:36:55 +1000 Subject: [PATCH 27/56] coverage: Avoid hard-coded values when visiting logical ops Instead of separately hard-coding the operation being visited, we can get it from the match arm pattern by using an as-pattern. --- compiler/rustc_mir_build/src/build/matches/mod.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs index d36c51f44727f..133cd36e71a5f 100644 --- a/compiler/rustc_mir_build/src/build/matches/mod.rs +++ b/compiler/rustc_mir_build/src/build/matches/mod.rs @@ -73,14 +73,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let expr_span = expr.span; match expr.kind { - ExprKind::LogicalOp { op: LogicalOp::And, lhs, rhs } => { - this.visit_coverage_branch_operation(LogicalOp::And, expr_span); + ExprKind::LogicalOp { op: op @ LogicalOp::And, lhs, rhs } => { + this.visit_coverage_branch_operation(op, expr_span); let lhs_then_block = unpack!(this.then_else_break_inner(block, lhs, args)); let rhs_then_block = unpack!(this.then_else_break_inner(lhs_then_block, rhs, args)); rhs_then_block.unit() } - ExprKind::LogicalOp { op: LogicalOp::Or, lhs, rhs } => { - this.visit_coverage_branch_operation(LogicalOp::Or, expr_span); + ExprKind::LogicalOp { op: op @ LogicalOp::Or, lhs, rhs } => { + this.visit_coverage_branch_operation(op, expr_span); let local_scope = this.local_scope(); let (lhs_success_block, failure_block) = this.in_if_then_scope(local_scope, expr_span, |this| { From 3c2f48ede9ce927950d74832722e6f716a69d974 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dorian=20P=C3=A9ron?= Date: Mon, 8 Apr 2024 14:23:50 +0000 Subject: [PATCH 28/56] mcdc-coverage: Add possibility for codegen llvm to handle several condition bitmaps --- compiler/rustc_codegen_llvm/src/builder.rs | 31 ++++++++++++------- .../src/coverageinfo/mod.rs | 25 ++++++++++----- 2 files changed, 38 insertions(+), 18 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 4c2bdb2f5ec27..bd09797a677bc 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -27,6 +27,7 @@ use rustc_target::abi::{self, call::FnAbi, Align, Size, WrappingRange}; use rustc_target::spec::{HasTargetSpec, SanitizerSet, Target}; use smallvec::SmallVec; use std::borrow::Cow; +use std::ffi::CString; use std::iter; use std::ops::Deref; use std::ptr; @@ -1708,7 +1709,8 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { fn_name: &'ll Value, hash: &'ll Value, bitmap_bytes: &'ll Value, - ) -> &'ll Value { + max_decision_depth: u32, + ) -> Vec<&'ll Value> { debug!("mcdc_parameters() with args ({:?}, {:?}, {:?})", fn_name, hash, bitmap_bytes); assert!(llvm_util::get_version() >= (18, 0, 0), "MCDC intrinsics require LLVM 18 or later"); @@ -1721,6 +1723,8 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { let args = &[fn_name, hash, bitmap_bytes]; let args = self.check_call("call", llty, llfn, args); + let mut cond_bitmaps = vec![]; + unsafe { let _ = llvm::LLVMRustBuildCall( self.llbuilder, @@ -1732,17 +1736,22 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { 0 as c_uint, ); // Create condition bitmap named `mcdc.addr`. - let mut bx = Builder::with_cx(self.cx); - bx.position_at_start(llvm::LLVMGetFirstBasicBlock(self.llfn())); - let cond_bitmap = { - let alloca = - llvm::LLVMBuildAlloca(bx.llbuilder, bx.cx.type_i32(), c"mcdc.addr".as_ptr()); - llvm::LLVMSetAlignment(alloca, 4); - alloca - }; - bx.store(self.const_i32(0), cond_bitmap, self.tcx().data_layout.i32_align.abi); - cond_bitmap + for i in 0..=max_decision_depth { + let mut bx = Builder::with_cx(self.cx); + bx.position_at_start(llvm::LLVMGetFirstBasicBlock(self.llfn())); + + let name = CString::new(format!("mcdc.addr.{i}")).unwrap(); + let cond_bitmap = { + let alloca = + llvm::LLVMBuildAlloca(bx.llbuilder, bx.cx.type_i32(), name.as_ptr()); + llvm::LLVMSetAlignment(alloca, 4); + alloca + }; + bx.store(self.const_i32(0), cond_bitmap, self.tcx().data_layout.i32_align.abi); + cond_bitmaps.push(cond_bitmap); + } } + cond_bitmaps } pub(crate) fn mcdc_tvbitmap_update( diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs index 085ce15d81fe8..03fc2ae425b04 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs @@ -30,7 +30,7 @@ pub struct CrateCoverageContext<'ll, 'tcx> { pub(crate) function_coverage_map: RefCell, FunctionCoverageCollector<'tcx>>>, pub(crate) pgo_func_name_var_map: RefCell, &'ll llvm::Value>>, - pub(crate) mcdc_condition_bitmap_map: RefCell, &'ll llvm::Value>>, + pub(crate) mcdc_condition_bitmap_map: RefCell, Vec<&'ll llvm::Value>>>, } impl<'ll, 'tcx> CrateCoverageContext<'ll, 'tcx> { @@ -49,9 +49,20 @@ impl<'ll, 'tcx> CrateCoverageContext<'ll, 'tcx> { } /// LLVM use a temp value to record evaluated mcdc test vector of each decision, which is called condition bitmap. - /// This value is named `mcdc.addr` (same as clang) and is a 32-bit integer. - fn try_get_mcdc_condition_bitmap(&self, instance: &Instance<'tcx>) -> Option<&'ll llvm::Value> { - self.mcdc_condition_bitmap_map.borrow().get(instance).copied() + /// In order to handle nested decisions, several condition bitmaps can be + /// allocated for a function body. + /// These values are named `mcdc.addr.{i}` and are a 32-bit integers. + /// They respectively hold the condition bitmaps for decisions with a depth of `i`. + fn try_get_mcdc_condition_bitmap( + &self, + instance: &Instance<'tcx>, + decision_depth: u16, + ) -> Option<&'ll llvm::Value> { + self.mcdc_condition_bitmap_map + .borrow() + .get(instance) + .and_then(|bitmap_map| bitmap_map.get(decision_depth as usize)) + .copied() // Dereference Option<&&Value> to Option<&Value> } } @@ -151,7 +162,7 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> { "ConditionId of evaluated conditions should never be zero" ); let cond_bitmap = coverage_context - .try_get_mcdc_condition_bitmap(&instance) + .try_get_mcdc_condition_bitmap(&instance, 0) .expect("mcdc cond bitmap should have been allocated for updating"); let cond_loc = bx.const_i32(id.as_u32() as i32 - 1); let bool_value = bx.const_bool(value); @@ -162,7 +173,7 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> { CoverageKind::TestVectorBitmapUpdate { bitmap_idx } => { drop(coverage_map); let cond_bitmap = coverage_context - .try_get_mcdc_condition_bitmap(&instance) + .try_get_mcdc_condition_bitmap(&instance, 0) .expect("mcdc cond bitmap should have been allocated for merging into the global bitmap"); let bitmap_bytes = bx.tcx().coverage_ids_info(instance.def).mcdc_bitmap_bytes; assert!(bitmap_idx < bitmap_bytes, "bitmap index of the decision out of range"); @@ -195,7 +206,7 @@ fn ensure_mcdc_parameters<'ll, 'tcx>( let fn_name = bx.get_pgo_func_name_var(instance); let hash = bx.const_u64(function_coverage_info.function_source_hash); let bitmap_bytes = bx.const_u32(function_coverage_info.mcdc_bitmap_bytes); - let cond_bitmap = bx.mcdc_parameters(fn_name, hash, bitmap_bytes); + let cond_bitmap = bx.mcdc_parameters(fn_name, hash, bitmap_bytes, 1_u32); bx.coverage_context() .expect("already checked above") .mcdc_condition_bitmap_map From ae8c023983247044838a1a8b5f6fb5dd606fa68d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dorian=20P=C3=A9ron?= Date: Mon, 8 Apr 2024 14:44:25 +0000 Subject: [PATCH 29/56] mcdc-coverage: Add decision_depth field in structs Add decision_depth field to TVBitmapUpdate/CondBitmapUpdate statements Add decision_depth field to BcbMappingKinds MCDCBranch and MCDCDecision Add decision_depth field to MCDCBranchSpan and MCDCDecisionSpan --- .../src/coverageinfo/mod.rs | 8 ++--- compiler/rustc_middle/src/mir/coverage.rs | 20 +++++++---- compiler/rustc_middle/src/mir/pretty.rs | 17 ++++++--- .../rustc_mir_build/src/build/coverageinfo.rs | 2 ++ .../rustc_mir_transform/src/coverage/mod.rs | 35 +++++++++++-------- .../rustc_mir_transform/src/coverage/spans.rs | 8 ++++- .../src/coverage/spans/from_mir.rs | 27 +++++++++----- 7 files changed, 78 insertions(+), 39 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs index 03fc2ae425b04..eb1071448ce23 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs @@ -154,7 +154,7 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> { CoverageKind::ExpressionUsed { id } => { func_coverage.mark_expression_id_seen(id); } - CoverageKind::CondBitmapUpdate { id, value, .. } => { + CoverageKind::CondBitmapUpdate { id, value, decision_depth } => { drop(coverage_map); assert_ne!( id.as_u32(), @@ -162,7 +162,7 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> { "ConditionId of evaluated conditions should never be zero" ); let cond_bitmap = coverage_context - .try_get_mcdc_condition_bitmap(&instance, 0) + .try_get_mcdc_condition_bitmap(&instance, decision_depth) .expect("mcdc cond bitmap should have been allocated for updating"); let cond_loc = bx.const_i32(id.as_u32() as i32 - 1); let bool_value = bx.const_bool(value); @@ -170,10 +170,10 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> { let hash = bx.const_u64(function_coverage_info.function_source_hash); bx.mcdc_condbitmap_update(fn_name, hash, cond_loc, cond_bitmap, bool_value); } - CoverageKind::TestVectorBitmapUpdate { bitmap_idx } => { + CoverageKind::TestVectorBitmapUpdate { bitmap_idx, decision_depth } => { drop(coverage_map); let cond_bitmap = coverage_context - .try_get_mcdc_condition_bitmap(&instance, 0) + .try_get_mcdc_condition_bitmap(&instance, decision_depth) .expect("mcdc cond bitmap should have been allocated for merging into the global bitmap"); let bitmap_bytes = bx.tcx().coverage_ids_info(instance.def).mcdc_bitmap_bytes; assert!(bitmap_idx < bitmap_bytes, "bitmap index of the decision out of range"); diff --git a/compiler/rustc_middle/src/mir/coverage.rs b/compiler/rustc_middle/src/mir/coverage.rs index 04011fd41948e..6383074742d7d 100644 --- a/compiler/rustc_middle/src/mir/coverage.rs +++ b/compiler/rustc_middle/src/mir/coverage.rs @@ -132,7 +132,7 @@ pub enum CoverageKind { /// /// If this statement does not survive MIR optimizations, the condition would never be /// taken as evaluated. - CondBitmapUpdate { id: ConditionId, value: bool }, + CondBitmapUpdate { id: ConditionId, value: bool, decision_depth: u16 }, /// Marks the point in MIR control flow represented by a evaluated decision. /// @@ -140,7 +140,7 @@ pub enum CoverageKind { /// /// If this statement does not survive MIR optimizations, the decision would never be /// taken as evaluated. - TestVectorBitmapUpdate { bitmap_idx: u32 }, + TestVectorBitmapUpdate { bitmap_idx: u32, decision_depth: u16 }, } impl Debug for CoverageKind { @@ -151,11 +151,17 @@ impl Debug for CoverageKind { BlockMarker { id } => write!(fmt, "BlockMarker({:?})", id.index()), CounterIncrement { id } => write!(fmt, "CounterIncrement({:?})", id.index()), ExpressionUsed { id } => write!(fmt, "ExpressionUsed({:?})", id.index()), - CondBitmapUpdate { id, value } => { - write!(fmt, "CondBitmapUpdate({:?}, {:?})", id.index(), value) + CondBitmapUpdate { id, value, decision_depth } => { + write!( + fmt, + "CondBitmapUpdate({:?}, {:?}, depth={:?})", + id.index(), + value, + decision_depth + ) } - TestVectorBitmapUpdate { bitmap_idx } => { - write!(fmt, "TestVectorUpdate({:?})", bitmap_idx) + TestVectorBitmapUpdate { bitmap_idx, decision_depth } => { + write!(fmt, "TestVectorUpdate({:?}, depth={:?})", bitmap_idx, decision_depth) } } } @@ -319,6 +325,7 @@ pub struct MCDCBranchSpan { pub condition_info: Option, pub true_marker: BlockMarkerId, pub false_marker: BlockMarkerId, + pub decision_depth: u16, } #[derive(Copy, Clone, Debug)] @@ -334,4 +341,5 @@ pub struct MCDCDecisionSpan { pub span: Span, pub conditions_num: usize, pub end_markers: Vec, + pub decision_depth: u16, } diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 7a91d7383e51c..32cd318341049 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -496,20 +496,27 @@ fn write_coverage_branch_info( )?; } - for coverage::MCDCBranchSpan { span, condition_info, true_marker, false_marker } in - mcdc_branch_spans + for coverage::MCDCBranchSpan { + span, + condition_info, + true_marker, + false_marker, + decision_depth, + } in mcdc_branch_spans { writeln!( w, - "{INDENT}coverage mcdc branch {{ condition_id: {:?}, true: {true_marker:?}, false: {false_marker:?} }} => {span:?}", + "{INDENT}coverage mcdc branch {{ condition_id: {:?}, true: {true_marker:?}, false: {false_marker:?}, depth: {decision_depth:?} }} => {span:?}", condition_info.map(|info| info.condition_id) )?; } - for coverage::MCDCDecisionSpan { span, conditions_num, end_markers } in mcdc_decision_spans { + for coverage::MCDCDecisionSpan { span, conditions_num, end_markers, decision_depth } in + mcdc_decision_spans + { writeln!( w, - "{INDENT}coverage mcdc decision {{ conditions_num: {conditions_num:?}, end: {end_markers:?} }} => {span:?}" + "{INDENT}coverage mcdc decision {{ conditions_num: {conditions_num:?}, end: {end_markers:?}, depth: {decision_depth:?} }} => {span:?}" )?; } diff --git a/compiler/rustc_mir_build/src/build/coverageinfo.rs b/compiler/rustc_mir_build/src/build/coverageinfo.rs index 9e9ccd3dc2d01..1818be2c1bd6c 100644 --- a/compiler/rustc_mir_build/src/build/coverageinfo.rs +++ b/compiler/rustc_mir_build/src/build/coverageinfo.rs @@ -262,6 +262,7 @@ impl MCDCState { span, conditions_num: 0, end_markers: vec![], + decision_depth: 0, }), }; @@ -371,6 +372,7 @@ impl Builder<'_, '_> { condition_info, true_marker, false_marker, + decision_depth: 0, }); return; } diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index 0b15c52c28143..a4c9e5ac0bcc5 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -145,16 +145,17 @@ fn create_mappings<'tcx>( |BcbMapping { kind: bcb_mapping_kind, span }| { let kind = match *bcb_mapping_kind { BcbMappingKind::Code(bcb) => MappingKind::Code(term_for_bcb(bcb)), - BcbMappingKind::MCDCBranch { true_bcb, false_bcb, condition_info: None } => { - MappingKind::Branch { - true_term: term_for_bcb(true_bcb), - false_term: term_for_bcb(false_bcb), - } - } + BcbMappingKind::MCDCBranch { + true_bcb, false_bcb, condition_info: None, .. + } => MappingKind::Branch { + true_term: term_for_bcb(true_bcb), + false_term: term_for_bcb(false_bcb), + }, BcbMappingKind::MCDCBranch { true_bcb, false_bcb, condition_info: Some(mcdc_params), + .. } => MappingKind::MCDCBranch { true_term: term_for_bcb(true_bcb), false_term: term_for_bcb(false_bcb), @@ -246,24 +247,28 @@ fn inject_mcdc_statements<'tcx>( } // Inject test vector update first because `inject_statement` always insert new statement at head. - for (end_bcbs, bitmap_idx) in + for (end_bcbs, bitmap_idx, decision_depth) in coverage_spans.mappings.iter().filter_map(|mapping| match &mapping.kind { - BcbMappingKind::MCDCDecision { end_bcbs, bitmap_idx, .. } => { - Some((end_bcbs, *bitmap_idx)) + BcbMappingKind::MCDCDecision { end_bcbs, bitmap_idx, decision_depth, .. } => { + Some((end_bcbs, *bitmap_idx, *decision_depth)) } _ => None, }) { for end in end_bcbs { let end_bb = basic_coverage_blocks[*end].leader_bb(); - inject_statement(mir_body, CoverageKind::TestVectorBitmapUpdate { bitmap_idx }, end_bb); + inject_statement( + mir_body, + CoverageKind::TestVectorBitmapUpdate { bitmap_idx, decision_depth }, + end_bb, + ); } } - for (true_bcb, false_bcb, condition_id) in + for (true_bcb, false_bcb, condition_id, decision_depth) in coverage_spans.mappings.iter().filter_map(|mapping| match mapping.kind { - BcbMappingKind::MCDCBranch { true_bcb, false_bcb, condition_info } => { - Some((true_bcb, false_bcb, condition_info?.condition_id)) + BcbMappingKind::MCDCBranch { true_bcb, false_bcb, condition_info, decision_depth } => { + Some((true_bcb, false_bcb, condition_info?.condition_id, decision_depth)) } _ => None, }) @@ -271,13 +276,13 @@ fn inject_mcdc_statements<'tcx>( let true_bb = basic_coverage_blocks[true_bcb].leader_bb(); inject_statement( mir_body, - CoverageKind::CondBitmapUpdate { id: condition_id, value: true }, + CoverageKind::CondBitmapUpdate { id: condition_id, value: true, decision_depth }, true_bb, ); let false_bb = basic_coverage_blocks[false_bcb].leader_bb(); inject_statement( mir_body, - CoverageKind::CondBitmapUpdate { id: condition_id, value: false }, + CoverageKind::CondBitmapUpdate { id: condition_id, value: false, decision_depth }, false_bb, ); } diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index 88f18b7208574..b64b1212cecc6 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -26,9 +26,15 @@ pub(super) enum BcbMappingKind { /// If `None`, this actually represents a normal branch mapping inserted /// for code that was too complex for MC/DC. condition_info: Option, + decision_depth: u16, }, /// Associates a mcdc decision with its join BCB. - MCDCDecision { end_bcbs: BTreeSet, bitmap_idx: u32, conditions_num: u16 }, + MCDCDecision { + end_bcbs: BTreeSet, + bitmap_idx: u32, + conditions_num: u16, + decision_depth: u16, + }, } #[derive(Debug)] diff --git a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs index 64f21d74b1cd5..142dfc17fb8f1 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs @@ -453,15 +453,25 @@ pub(super) fn extract_mcdc_mappings( Some((span, true_bcb, false_bcb)) }; - let mcdc_branch_filter_map = - |&MCDCBranchSpan { span: raw_span, true_marker, false_marker, condition_info }| { - check_branch_bcb(raw_span, true_marker, false_marker).map( - |(span, true_bcb, false_bcb)| BcbMapping { - kind: BcbMappingKind::MCDCBranch { true_bcb, false_bcb, condition_info }, - span, + let mcdc_branch_filter_map = |&MCDCBranchSpan { + span: raw_span, + true_marker, + false_marker, + condition_info, + decision_depth, + }| { + check_branch_bcb(raw_span, true_marker, false_marker).map(|(span, true_bcb, false_bcb)| { + BcbMapping { + kind: BcbMappingKind::MCDCBranch { + true_bcb, + false_bcb, + condition_info, + decision_depth, }, - ) - }; + span, + } + }) + }; let mut next_bitmap_idx = 0; @@ -482,6 +492,7 @@ pub(super) fn extract_mcdc_mappings( end_bcbs, bitmap_idx, conditions_num: decision.conditions_num as u16, + decision_depth: decision.decision_depth, }, span, }) From 60ca9b6e29975ab36e5460d761d99681bef0f130 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dorian=20P=C3=A9ron?= Date: Wed, 10 Apr 2024 10:13:04 +0000 Subject: [PATCH 30/56] mcdc-coverage: Get decision_depth from THIR lowering Use decision context stack to handle nested decisions: - Introduce MCDCDecisionCtx - Use a stack of MCDCDecisionCtx to handle nested decisions --- .../src/coverageinfo/mod.rs | 3 +- compiler/rustc_middle/src/mir/coverage.rs | 3 + .../rustc_mir_build/src/build/coverageinfo.rs | 77 +++++++++++++++---- .../rustc_mir_build/src/build/matches/mod.rs | 6 ++ .../rustc_mir_transform/src/coverage/mod.rs | 11 +++ 5 files changed, 82 insertions(+), 18 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs index eb1071448ce23..679c6e1a2ff83 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs @@ -206,7 +206,8 @@ fn ensure_mcdc_parameters<'ll, 'tcx>( let fn_name = bx.get_pgo_func_name_var(instance); let hash = bx.const_u64(function_coverage_info.function_source_hash); let bitmap_bytes = bx.const_u32(function_coverage_info.mcdc_bitmap_bytes); - let cond_bitmap = bx.mcdc_parameters(fn_name, hash, bitmap_bytes, 1_u32); + let max_decision_depth = function_coverage_info.mcdc_max_decision_depth; + let cond_bitmap = bx.mcdc_parameters(fn_name, hash, bitmap_bytes, max_decision_depth as u32); bx.coverage_context() .expect("already checked above") .mcdc_condition_bitmap_map diff --git a/compiler/rustc_middle/src/mir/coverage.rs b/compiler/rustc_middle/src/mir/coverage.rs index 6383074742d7d..a3fa69160a707 100644 --- a/compiler/rustc_middle/src/mir/coverage.rs +++ b/compiler/rustc_middle/src/mir/coverage.rs @@ -275,6 +275,9 @@ pub struct FunctionCoverageInfo { pub mcdc_bitmap_bytes: u32, pub expressions: IndexVec, pub mappings: Vec, + /// The depth of the deepest decision is used to know how many + /// temp condbitmaps should be allocated for the function. + pub mcdc_max_decision_depth: u16, } /// Branch information recorded during THIR-to-MIR lowering, and stored in MIR. diff --git a/compiler/rustc_mir_build/src/build/coverageinfo.rs b/compiler/rustc_mir_build/src/build/coverageinfo.rs index 1818be2c1bd6c..a1c20fcfd8365 100644 --- a/compiler/rustc_mir_build/src/build/coverageinfo.rs +++ b/compiler/rustc_mir_build/src/build/coverageinfo.rs @@ -101,10 +101,14 @@ impl BranchInfoBuilder { tcx: TyCtxt<'_>, true_marker: BlockMarkerId, false_marker: BlockMarkerId, - ) -> Option { + ) -> Option<(u16, ConditionInfo)> { let mcdc_state = self.mcdc_state.as_mut()?; + let decision_depth = mcdc_state.decision_depth(); let (mut condition_info, decision_result) = mcdc_state.take_condition(true_marker, false_marker); + // take_condition() returns Some for decision_result when the decision stack + // is empty, i.e. when all the conditions of the decision were instrumented, + // and the decision is "complete". if let Some(decision) = decision_result { match decision.conditions_num { 0 => { @@ -131,7 +135,7 @@ impl BranchInfoBuilder { } } } - condition_info + condition_info.map(|cond_info| (decision_depth, cond_info)) } fn add_two_way_branch<'tcx>( @@ -199,17 +203,32 @@ impl BranchInfoBuilder { /// This limit may be relaxed if the [upstream change](https://github.com/llvm/llvm-project/pull/82448) is merged. const MAX_CONDITIONS_NUM_IN_DECISION: usize = 6; -struct MCDCState { +#[derive(Default)] +struct MCDCDecisionCtx { /// To construct condition evaluation tree. decision_stack: VecDeque, processing_decision: Option, } +struct MCDCState { + decision_ctx_stack: Vec, +} + impl MCDCState { fn new_if_enabled(tcx: TyCtxt<'_>) -> Option { tcx.sess .instrument_coverage_mcdc() - .then(|| Self { decision_stack: VecDeque::new(), processing_decision: None }) + .then(|| Self { decision_ctx_stack: vec![MCDCDecisionCtx::default()] }) + } + + /// Decision depth is given as a u16 to reduce the size of the `CoverageKind`, + /// as it is very unlikely that the depth ever reaches 2^16. + #[inline] + fn decision_depth(&self) -> u16 { + u16::try_from( + self.decision_ctx_stack.len().checked_sub(1).expect("Unexpected empty decision stack"), + ) + .expect("decision depth did not fit in u16, this is likely to be an instrumentation error") } // At first we assign ConditionIds for each sub expression. @@ -253,20 +272,23 @@ impl MCDCState { // - If the op is AND, the "false_next" of LHS and RHS should be the parent's "false_next". While "true_next" of the LHS is the RHS, the "true next" of RHS is the parent's "true_next". // - If the op is OR, the "true_next" of LHS and RHS should be the parent's "true_next". While "false_next" of the LHS is the RHS, the "false next" of RHS is the parent's "false_next". fn record_conditions(&mut self, op: LogicalOp, span: Span) { - let decision = match self.processing_decision.as_mut() { + let decision_depth = self.decision_depth(); + let decision_ctx = + self.decision_ctx_stack.last_mut().expect("Unexpected empty decision_ctx_stack"); + let decision = match decision_ctx.processing_decision.as_mut() { Some(decision) => { decision.span = decision.span.to(span); decision } - None => self.processing_decision.insert(MCDCDecisionSpan { + None => decision_ctx.processing_decision.insert(MCDCDecisionSpan { span, conditions_num: 0, end_markers: vec![], - decision_depth: 0, + decision_depth, }), }; - let parent_condition = self.decision_stack.pop_back().unwrap_or_default(); + let parent_condition = decision_ctx.decision_stack.pop_back().unwrap_or_default(); let lhs_id = if parent_condition.condition_id == ConditionId::NONE { decision.conditions_num += 1; ConditionId::from(decision.conditions_num) @@ -306,8 +328,8 @@ impl MCDCState { } }; // We visit expressions tree in pre-order, so place the left-hand side on the top. - self.decision_stack.push_back(rhs); - self.decision_stack.push_back(lhs); + decision_ctx.decision_stack.push_back(rhs); + decision_ctx.decision_stack.push_back(lhs); } fn take_condition( @@ -315,10 +337,12 @@ impl MCDCState { true_marker: BlockMarkerId, false_marker: BlockMarkerId, ) -> (Option, Option) { - let Some(condition_info) = self.decision_stack.pop_back() else { + let decision_ctx = + self.decision_ctx_stack.last_mut().expect("Unexpected empty decision_ctx_stack"); + let Some(condition_info) = decision_ctx.decision_stack.pop_back() else { return (None, None); }; - let Some(decision) = self.processing_decision.as_mut() else { + let Some(decision) = decision_ctx.processing_decision.as_mut() else { bug!("Processing decision should have been created before any conditions are taken"); }; if condition_info.true_next_id == ConditionId::NONE { @@ -328,8 +352,8 @@ impl MCDCState { decision.end_markers.push(false_marker); } - if self.decision_stack.is_empty() { - (Some(condition_info), self.processing_decision.take()) + if decision_ctx.decision_stack.is_empty() { + (Some(condition_info), decision_ctx.processing_decision.take()) } else { (Some(condition_info), None) } @@ -365,14 +389,17 @@ impl Builder<'_, '_> { |block| branch_info.inject_block_marker(&mut self.cfg, source_info, block); let true_marker = inject_block_marker(then_block); let false_marker = inject_block_marker(else_block); - let condition_info = - branch_info.fetch_mcdc_condition_info(self.tcx, true_marker, false_marker); + let (decision_depth, condition_info) = branch_info + .fetch_mcdc_condition_info(self.tcx, true_marker, false_marker) + .map_or((0, None), |(decision_depth, condition_info)| { + (decision_depth, Some(condition_info)) + }); branch_info.mcdc_branch_spans.push(MCDCBranchSpan { span: source_info.span, condition_info, true_marker, false_marker, - decision_depth: 0, + decision_depth, }); return; } @@ -387,4 +414,20 @@ impl Builder<'_, '_> { mcdc_state.record_conditions(logical_op, span); } } + + pub(crate) fn mcdc_increment_depth_if_enabled(&mut self) { + if let Some(branch_info) = self.coverage_branch_info.as_mut() + && let Some(mcdc_state) = branch_info.mcdc_state.as_mut() + { + mcdc_state.decision_ctx_stack.push(MCDCDecisionCtx::default()); + }; + } + + pub(crate) fn mcdc_decrement_depth_if_enabled(&mut self) { + if let Some(branch_info) = self.coverage_branch_info.as_mut() + && let Some(mcdc_state) = branch_info.mcdc_state.as_mut() + { + mcdc_state.decision_ctx_stack.pop().expect("Unexpected empty decision stack"); + }; + } } diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs index f46dceeeedf58..b36e97194c940 100644 --- a/compiler/rustc_mir_build/src/build/matches/mod.rs +++ b/compiler/rustc_mir_build/src/build/matches/mod.rs @@ -151,8 +151,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let mut block = block; let temp_scope = args.temp_scope_override.unwrap_or_else(|| this.local_scope()); let mutability = Mutability::Mut; + + // Increment the decision depth, in case we encounter boolean expressions + // further down. + this.mcdc_increment_depth_if_enabled(); let place = unpack!(block = this.as_temp(block, Some(temp_scope), expr_id, mutability)); + this.mcdc_decrement_depth_if_enabled(); + let operand = Operand::Move(Place::from(place)); let then_block = this.cfg.start_new_block(); diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index a4c9e5ac0bcc5..159c099fac507 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -102,12 +102,23 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: inject_mcdc_statements(mir_body, &basic_coverage_blocks, &coverage_spans); + let mcdc_max_decision_depth = coverage_spans + .mappings + .iter() + .filter_map(|bcb_mapping| match bcb_mapping.kind { + BcbMappingKind::MCDCDecision { decision_depth, .. } => Some(decision_depth), + _ => None, + }) + .max() + .unwrap_or(0); + mir_body.function_coverage_info = Some(Box::new(FunctionCoverageInfo { function_source_hash: hir_info.function_source_hash, num_counters: coverage_counters.num_counters(), mcdc_bitmap_bytes: coverage_spans.test_vector_bitmap_bytes(), expressions: coverage_counters.into_expressions(), mappings, + mcdc_max_decision_depth, })); } From eb422d5c7e6adccf83533ddb5a9836f6575bcd35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dorian=20P=C3=A9ron?= Date: Thu, 11 Apr 2024 10:08:11 +0000 Subject: [PATCH 31/56] tests(mcdc-coverage): Add tests for nested decision structures in mcdc_nested_if.rs --- tests/coverage/mcdc_nested_if.cov-map | 201 +++++++++++++++++++++ tests/coverage/mcdc_nested_if.coverage | 235 +++++++++++++++++++++++++ tests/coverage/mcdc_nested_if.rs | 70 ++++++++ 3 files changed, 506 insertions(+) create mode 100644 tests/coverage/mcdc_nested_if.cov-map create mode 100644 tests/coverage/mcdc_nested_if.coverage create mode 100644 tests/coverage/mcdc_nested_if.rs diff --git a/tests/coverage/mcdc_nested_if.cov-map b/tests/coverage/mcdc_nested_if.cov-map new file mode 100644 index 0000000000000..2f35ffad8a98e --- /dev/null +++ b/tests/coverage/mcdc_nested_if.cov-map @@ -0,0 +1,201 @@ +Function name: mcdc_nested_if::doubly_nested_if_in_condition +Raw bytes (168): 0x[01, 01, 0e, 01, 05, 05, 11, 05, 11, 26, 19, 05, 11, 19, 1d, 19, 1d, 1d, 22, 26, 19, 05, 11, 11, 15, 09, 02, 0d, 37, 09, 02, 14, 01, 0f, 01, 01, 09, 28, 02, 02, 01, 08, 00, 4e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 4e, 05, 00, 10, 00, 11, 28, 01, 02, 00, 10, 00, 36, 30, 11, 26, 01, 00, 02, 00, 10, 00, 11, 30, 15, 21, 02, 00, 00, 00, 15, 00, 36, 26, 00, 18, 00, 19, 28, 00, 02, 00, 18, 00, 1e, 30, 19, 22, 01, 02, 00, 00, 18, 00, 19, 19, 00, 1d, 00, 1e, 30, 1a, 1d, 02, 00, 00, 00, 1d, 00, 1e, 1a, 00, 21, 00, 25, 1f, 00, 2f, 00, 34, 2b, 00, 39, 00, 3e, 21, 00, 48, 00, 4c, 0d, 00, 4f, 02, 06, 37, 02, 0c, 02, 06, 33, 03, 01, 00, 02] +Number of files: 1 +- file 0 => global file 1 +Number of expressions: 14 +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(1), rhs = Counter(4) +- expression 2 operands: lhs = Counter(1), rhs = Counter(4) +- expression 3 operands: lhs = Expression(9, Sub), rhs = Counter(6) +- expression 4 operands: lhs = Counter(1), rhs = Counter(4) +- expression 5 operands: lhs = Counter(6), rhs = Counter(7) +- expression 6 operands: lhs = Counter(6), rhs = Counter(7) +- expression 7 operands: lhs = Counter(7), rhs = Expression(8, Sub) +- expression 8 operands: lhs = Expression(9, Sub), rhs = Counter(6) +- expression 9 operands: lhs = Counter(1), rhs = Counter(4) +- expression 10 operands: lhs = Counter(4), rhs = Counter(5) +- expression 11 operands: lhs = Counter(2), rhs = Expression(0, Sub) +- expression 12 operands: lhs = Counter(3), rhs = Expression(13, Add) +- expression 13 operands: lhs = Counter(2), rhs = Expression(0, Sub) +Number of file 0 mappings: 20 +- Code(Counter(0)) at (prev + 15, 1) to (start + 1, 9) +- MCDCDecision { bitmap_idx: 2, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 78) +- MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) + true = c1 + false = (c0 - c1) +- MCDCBranch { true: Counter(3), false: Counter(2), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 78) + true = c3 + false = c2 +- Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17) +- MCDCDecision { bitmap_idx: 1, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 54) +- MCDCBranch { true: Counter(4), false: Expression(9, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17) + true = c4 + false = (c1 - c4) +- MCDCBranch { true: Counter(5), false: Counter(8), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 21) to (start + 0, 54) + true = c5 + false = c8 +- Code(Expression(9, Sub)) at (prev + 0, 24) to (start + 0, 25) + = (c1 - c4) +- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 0, 24) to (start + 0, 30) +- MCDCBranch { true: Counter(6), false: Expression(8, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 24) to (start + 0, 25) + true = c6 + false = ((c1 - c4) - c6) +- Code(Counter(6)) at (prev + 0, 29) to (start + 0, 30) +- MCDCBranch { true: Expression(6, Sub), false: Counter(7), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 29) to (start + 0, 30) + true = (c6 - c7) + false = c7 +- Code(Expression(6, Sub)) at (prev + 0, 33) to (start + 0, 37) + = (c6 - c7) +- Code(Expression(7, Add)) at (prev + 0, 47) to (start + 0, 52) + = (c7 + ((c1 - c4) - c6)) +- Code(Expression(10, Add)) at (prev + 0, 57) to (start + 0, 62) + = (c4 + c5) +- Code(Counter(8)) at (prev + 0, 72) to (start + 0, 76) +- Code(Counter(3)) at (prev + 0, 79) to (start + 2, 6) +- Code(Expression(13, Add)) at (prev + 2, 12) to (start + 2, 6) + = (c2 + (c0 - c1)) +- Code(Expression(12, Add)) at (prev + 3, 1) to (start + 0, 2) + = (c3 + (c2 + (c0 - c1))) + +Function name: mcdc_nested_if::nested_if_in_condition +Raw bytes (120): 0x[01, 01, 0b, 01, 05, 05, 11, 05, 11, 1e, 15, 05, 11, 11, 15, 1e, 15, 05, 11, 09, 02, 0d, 2b, 09, 02, 0e, 01, 07, 01, 01, 09, 28, 01, 02, 01, 08, 00, 2e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 2e, 05, 00, 10, 00, 11, 28, 00, 02, 00, 10, 00, 16, 30, 11, 1e, 01, 00, 02, 00, 10, 00, 11, 1e, 00, 15, 00, 16, 30, 15, 1a, 02, 00, 00, 00, 15, 00, 16, 17, 00, 19, 00, 1d, 1a, 00, 27, 00, 2c, 0d, 00, 2f, 02, 06, 2b, 02, 0c, 02, 06, 27, 03, 01, 00, 02] +Number of files: 1 +- file 0 => global file 1 +Number of expressions: 11 +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(1), rhs = Counter(4) +- expression 2 operands: lhs = Counter(1), rhs = Counter(4) +- expression 3 operands: lhs = Expression(7, Sub), rhs = Counter(5) +- expression 4 operands: lhs = Counter(1), rhs = Counter(4) +- expression 5 operands: lhs = Counter(4), rhs = Counter(5) +- expression 6 operands: lhs = Expression(7, Sub), rhs = Counter(5) +- expression 7 operands: lhs = Counter(1), rhs = Counter(4) +- expression 8 operands: lhs = Counter(2), rhs = Expression(0, Sub) +- expression 9 operands: lhs = Counter(3), rhs = Expression(10, Add) +- expression 10 operands: lhs = Counter(2), rhs = Expression(0, Sub) +Number of file 0 mappings: 14 +- Code(Counter(0)) at (prev + 7, 1) to (start + 1, 9) +- MCDCDecision { bitmap_idx: 1, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 46) +- MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) + true = c1 + false = (c0 - c1) +- MCDCBranch { true: Counter(3), false: Counter(2), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 46) + true = c3 + false = c2 +- Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17) +- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 22) +- MCDCBranch { true: Counter(4), false: Expression(7, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17) + true = c4 + false = (c1 - c4) +- Code(Expression(7, Sub)) at (prev + 0, 21) to (start + 0, 22) + = (c1 - c4) +- MCDCBranch { true: Counter(5), false: Expression(6, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 21) to (start + 0, 22) + true = c5 + false = ((c1 - c4) - c5) +- Code(Expression(5, Add)) at (prev + 0, 25) to (start + 0, 29) + = (c4 + c5) +- Code(Expression(6, Sub)) at (prev + 0, 39) to (start + 0, 44) + = ((c1 - c4) - c5) +- Code(Counter(3)) at (prev + 0, 47) to (start + 2, 6) +- Code(Expression(10, Add)) at (prev + 2, 12) to (start + 2, 6) + = (c2 + (c0 - c1)) +- Code(Expression(9, Add)) at (prev + 3, 1) to (start + 0, 2) + = (c3 + (c2 + (c0 - c1))) + +Function name: mcdc_nested_if::nested_in_then_block_in_condition +Raw bytes (176): 0x[01, 01, 12, 01, 05, 05, 11, 05, 11, 3a, 15, 05, 11, 11, 15, 33, 19, 11, 15, 19, 1d, 19, 1d, 1d, 2e, 33, 19, 11, 15, 3a, 15, 05, 11, 09, 02, 0d, 47, 09, 02, 14, 01, 22, 01, 01, 09, 28, 02, 02, 01, 08, 00, 4b, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 4b, 05, 00, 10, 00, 11, 28, 00, 02, 00, 10, 00, 16, 30, 11, 3a, 01, 00, 02, 00, 10, 00, 11, 3a, 00, 15, 00, 16, 30, 15, 36, 02, 00, 00, 00, 15, 00, 16, 33, 00, 1c, 00, 1d, 28, 01, 02, 00, 1c, 00, 22, 30, 19, 2e, 01, 02, 00, 00, 1c, 00, 1d, 19, 00, 21, 00, 22, 30, 26, 1d, 02, 00, 00, 00, 21, 00, 22, 26, 00, 25, 00, 29, 2b, 00, 33, 00, 38, 36, 00, 44, 00, 49, 0d, 00, 4c, 02, 06, 47, 02, 0c, 02, 06, 43, 03, 01, 00, 02] +Number of files: 1 +- file 0 => global file 1 +Number of expressions: 18 +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(1), rhs = Counter(4) +- expression 2 operands: lhs = Counter(1), rhs = Counter(4) +- expression 3 operands: lhs = Expression(14, Sub), rhs = Counter(5) +- expression 4 operands: lhs = Counter(1), rhs = Counter(4) +- expression 5 operands: lhs = Counter(4), rhs = Counter(5) +- expression 6 operands: lhs = Expression(12, Add), rhs = Counter(6) +- expression 7 operands: lhs = Counter(4), rhs = Counter(5) +- expression 8 operands: lhs = Counter(6), rhs = Counter(7) +- expression 9 operands: lhs = Counter(6), rhs = Counter(7) +- expression 10 operands: lhs = Counter(7), rhs = Expression(11, Sub) +- expression 11 operands: lhs = Expression(12, Add), rhs = Counter(6) +- expression 12 operands: lhs = Counter(4), rhs = Counter(5) +- expression 13 operands: lhs = Expression(14, Sub), rhs = Counter(5) +- expression 14 operands: lhs = Counter(1), rhs = Counter(4) +- expression 15 operands: lhs = Counter(2), rhs = Expression(0, Sub) +- expression 16 operands: lhs = Counter(3), rhs = Expression(17, Add) +- expression 17 operands: lhs = Counter(2), rhs = Expression(0, Sub) +Number of file 0 mappings: 20 +- Code(Counter(0)) at (prev + 34, 1) to (start + 1, 9) +- MCDCDecision { bitmap_idx: 2, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 75) +- MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) + true = c1 + false = (c0 - c1) +- MCDCBranch { true: Counter(3), false: Counter(2), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 75) + true = c3 + false = c2 +- Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17) +- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 22) +- MCDCBranch { true: Counter(4), false: Expression(14, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17) + true = c4 + false = (c1 - c4) +- Code(Expression(14, Sub)) at (prev + 0, 21) to (start + 0, 22) + = (c1 - c4) +- MCDCBranch { true: Counter(5), false: Expression(13, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 21) to (start + 0, 22) + true = c5 + false = ((c1 - c4) - c5) +- Code(Expression(12, Add)) at (prev + 0, 28) to (start + 0, 29) + = (c4 + c5) +- MCDCDecision { bitmap_idx: 1, conditions_num: 2 } at (prev + 0, 28) to (start + 0, 34) +- MCDCBranch { true: Counter(6), false: Expression(11, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 28) to (start + 0, 29) + true = c6 + false = ((c4 + c5) - c6) +- Code(Counter(6)) at (prev + 0, 33) to (start + 0, 34) +- MCDCBranch { true: Expression(9, Sub), false: Counter(7), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 33) to (start + 0, 34) + true = (c6 - c7) + false = c7 +- Code(Expression(9, Sub)) at (prev + 0, 37) to (start + 0, 41) + = (c6 - c7) +- Code(Expression(10, Add)) at (prev + 0, 51) to (start + 0, 56) + = (c7 + ((c4 + c5) - c6)) +- Code(Expression(13, Sub)) at (prev + 0, 68) to (start + 0, 73) + = ((c1 - c4) - c5) +- Code(Counter(3)) at (prev + 0, 76) to (start + 2, 6) +- Code(Expression(17, Add)) at (prev + 2, 12) to (start + 2, 6) + = (c2 + (c0 - c1)) +- Code(Expression(16, Add)) at (prev + 3, 1) to (start + 0, 2) + = (c3 + (c2 + (c0 - c1))) + +Function name: mcdc_nested_if::nested_single_condition_decision +Raw bytes (85): 0x[01, 01, 06, 01, 05, 05, 11, 05, 11, 09, 02, 0d, 17, 09, 02, 0b, 01, 17, 01, 04, 09, 28, 00, 02, 04, 08, 00, 29, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 29, 05, 00, 10, 00, 11, 20, 11, 0a, 00, 10, 00, 11, 11, 00, 14, 00, 19, 0a, 00, 23, 00, 27, 0d, 00, 2a, 02, 06, 17, 02, 0c, 02, 06, 13, 03, 01, 00, 02] +Number of files: 1 +- file 0 => global file 1 +Number of expressions: 6 +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(1), rhs = Counter(4) +- expression 2 operands: lhs = Counter(1), rhs = Counter(4) +- expression 3 operands: lhs = Counter(2), rhs = Expression(0, Sub) +- expression 4 operands: lhs = Counter(3), rhs = Expression(5, Add) +- expression 5 operands: lhs = Counter(2), rhs = Expression(0, Sub) +Number of file 0 mappings: 11 +- Code(Counter(0)) at (prev + 23, 1) to (start + 4, 9) +- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 4, 8) to (start + 0, 41) +- MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) + true = c1 + false = (c0 - c1) +- MCDCBranch { true: Counter(3), false: Counter(2), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 41) + true = c3 + false = c2 +- Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17) +- Branch { true: Counter(4), false: Expression(2, Sub) } at (prev + 0, 16) to (start + 0, 17) + true = c4 + false = (c1 - c4) +- Code(Counter(4)) at (prev + 0, 20) to (start + 0, 25) +- Code(Expression(2, Sub)) at (prev + 0, 35) to (start + 0, 39) + = (c1 - c4) +- Code(Counter(3)) at (prev + 0, 42) to (start + 2, 6) +- Code(Expression(5, Add)) at (prev + 2, 12) to (start + 2, 6) + = (c2 + (c0 - c1)) +- Code(Expression(4, Add)) at (prev + 3, 1) to (start + 0, 2) + = (c3 + (c2 + (c0 - c1))) + diff --git a/tests/coverage/mcdc_nested_if.coverage b/tests/coverage/mcdc_nested_if.coverage new file mode 100644 index 0000000000000..19529cd6aa432 --- /dev/null +++ b/tests/coverage/mcdc_nested_if.coverage @@ -0,0 +1,235 @@ + LL| |#![feature(coverage_attribute)] + LL| |//@ edition: 2021 + LL| |//@ min-llvm-version: 18 + LL| |//@ compile-flags: -Zcoverage-options=mcdc + LL| |//@ llvm-cov-flags: --show-mcdc + LL| | + LL| 4|fn nested_if_in_condition(a: bool, b: bool, c: bool) { + LL| 4| if a && if b || c { true } else { false } { + ^3 ^2 ^2 ^1 + ------------------ + |---> MC/DC Decision Region (LL:8) to (LL:46) + | + | Number of Conditions: 2 + | Condition C1 --> (LL:8) + | Condition C2 --> (LL:13) + | + | Executed MC/DC Test Vectors: + | + | C1, C2 Result + | 1 { F, - = F } + | 2 { T, F = F } + | 3 { T, T = T } + | + | C1-Pair: covered: (1,3) + | C2-Pair: covered: (2,3) + | MC/DC Coverage for Decision: 100.00% + | + |---> MC/DC Decision Region (LL:16) to (LL:22) + | + | Number of Conditions: 2 + | Condition C1 --> (LL:16) + | Condition C2 --> (LL:21) + | + | Executed MC/DC Test Vectors: + | + | C1, C2 Result + | 1 { F, F = F } + | 2 { T, - = T } + | 3 { F, T = T } + | + | C1-Pair: covered: (1,2) + | C2-Pair: covered: (1,3) + | MC/DC Coverage for Decision: 100.00% + | + ------------------ + LL| 2| say("yes"); + LL| 2| } else { + LL| 2| say("no"); + LL| 2| } + LL| 4|} + LL| | + LL| 4|fn doubly_nested_if_in_condition(a: bool, b: bool, c: bool, d: bool) { + LL| 4| if a && if b || if c && d { true } else { false } { false } else { true } { + ^3 ^2 ^1 ^1 ^1 ^2 ^1 + ------------------ + |---> MC/DC Decision Region (LL:8) to (LL:78) + | + | Number of Conditions: 2 + | Condition C1 --> (LL:8) + | Condition C2 --> (LL:13) + | + | Executed MC/DC Test Vectors: + | + | C1, C2 Result + | 1 { F, - = F } + | 2 { T, F = F } + | 3 { T, T = T } + | + | C1-Pair: covered: (1,3) + | C2-Pair: covered: (2,3) + | MC/DC Coverage for Decision: 100.00% + | + |---> MC/DC Decision Region (LL:16) to (LL:54) + | + | Number of Conditions: 2 + | Condition C1 --> (LL:16) + | Condition C2 --> (LL:21) + | + | Executed MC/DC Test Vectors: + | + | C1, C2 Result + | 1 { F, F = F } + | 2 { T, - = T } + | 3 { F, T = T } + | + | C1-Pair: covered: (1,2) + | C2-Pair: covered: (1,3) + | MC/DC Coverage for Decision: 100.00% + | + |---> MC/DC Decision Region (LL:24) to (LL:30) + | + | Number of Conditions: 2 + | Condition C1 --> (LL:24) + | Condition C2 --> (LL:29) + | + | Executed MC/DC Test Vectors: + | + | C1, C2 Result + | 1 { F, - = F } + | 2 { T, T = T } + | + | C1-Pair: covered: (1,2) + | C2-Pair: not covered + | MC/DC Coverage for Decision: 50.00% + | + ------------------ + LL| 1| say("yes"); + LL| 3| } else { + LL| 3| say("no"); + LL| 3| } + LL| 4|} + LL| | + LL| 3|fn nested_single_condition_decision(a: bool, b: bool) { + LL| 3| // Decision with only 1 decision should not be instrumented by MCDC because + LL| 3| // branch-coverage is equivalent to MCDC coverage in this case, and we don't + LL| 3| // want to waste bitmap space for this. + LL| 3| if a && if b { false } else { true } { + ^2 ^1 ^1 + ------------------ + |---> MC/DC Decision Region (LL:8) to (LL:41) + | + | Number of Conditions: 2 + | Condition C1 --> (LL:8) + | Condition C2 --> (LL:13) + | + | Executed MC/DC Test Vectors: + | + | C1, C2 Result + | 1 { F, - = F } + | 2 { T, F = F } + | 3 { T, T = T } + | + | C1-Pair: covered: (1,3) + | C2-Pair: covered: (2,3) + | MC/DC Coverage for Decision: 100.00% + | + ------------------ + LL| 1| say("yes"); + LL| 2| } else { + LL| 2| say("no"); + LL| 2| } + LL| 3|} + LL| | + LL| 7|fn nested_in_then_block_in_condition(a: bool, b: bool, c: bool, d: bool, e: bool) { + LL| 7| if a && if b || c { if d && e { true } else { false } } else { false } { + ^6 ^5 ^5 ^2 ^1 ^4 ^1 + ------------------ + |---> MC/DC Decision Region (LL:8) to (LL:75) + | + | Number of Conditions: 2 + | Condition C1 --> (LL:8) + | Condition C2 --> (LL:13) + | + | Executed MC/DC Test Vectors: + | + | C1, C2 Result + | 1 { F, - = F } + | 2 { T, F = F } + | 3 { T, T = T } + | + | C1-Pair: covered: (1,3) + | C2-Pair: covered: (2,3) + | MC/DC Coverage for Decision: 100.00% + | + |---> MC/DC Decision Region (LL:16) to (LL:22) + | + | Number of Conditions: 2 + | Condition C1 --> (LL:16) + | Condition C2 --> (LL:21) + | + | Executed MC/DC Test Vectors: + | + | C1, C2 Result + | 1 { F, F = F } + | 2 { T, - = T } + | 3 { F, T = T } + | + | C1-Pair: covered: (1,2) + | C2-Pair: covered: (1,3) + | MC/DC Coverage for Decision: 100.00% + | + |---> MC/DC Decision Region (LL:28) to (LL:34) + | + | Number of Conditions: 2 + | Condition C1 --> (LL:28) + | Condition C2 --> (LL:33) + | + | Executed MC/DC Test Vectors: + | + | C1, C2 Result + | 1 { F, - = F } + | 2 { T, F = F } + | 3 { T, T = T } + | + | C1-Pair: covered: (1,3) + | C2-Pair: covered: (2,3) + | MC/DC Coverage for Decision: 100.00% + | + ------------------ + LL| 1| say("yes"); + LL| 6| } else { + LL| 6| say("no"); + LL| 6| } + LL| 7|} + LL| | + LL| |#[coverage(off)] + LL| |fn main() { + LL| | nested_if_in_condition(true, false, false); + LL| | nested_if_in_condition(true, true, true); + LL| | nested_if_in_condition(true, false, true); + LL| | nested_if_in_condition(false, true, true); + LL| | + LL| | doubly_nested_if_in_condition(true, false, false, true); + LL| | doubly_nested_if_in_condition(true, true, true, true); + LL| | doubly_nested_if_in_condition(true, false, true, true); + LL| | doubly_nested_if_in_condition(false, true, true, true); + LL| | + LL| | nested_single_condition_decision(true, true); + LL| | nested_single_condition_decision(true, false); + LL| | nested_single_condition_decision(false, false); + LL| | + LL| | nested_in_then_block_in_condition(false, false, false, false, false); + LL| | nested_in_then_block_in_condition(true, false, false, false, false); + LL| | nested_in_then_block_in_condition(true, true, false, false, false); + LL| | nested_in_then_block_in_condition(true, false, true, false, false); + LL| | nested_in_then_block_in_condition(true, false, true, true, false); + LL| | nested_in_then_block_in_condition(true, false, true, false, true); + LL| | nested_in_then_block_in_condition(true, false, true, true, true); + LL| |} + LL| | + LL| |#[coverage(off)] + LL| |fn say(message: &str) { + LL| | core::hint::black_box(message); + LL| |} + diff --git a/tests/coverage/mcdc_nested_if.rs b/tests/coverage/mcdc_nested_if.rs new file mode 100644 index 0000000000000..3d869771f75b5 --- /dev/null +++ b/tests/coverage/mcdc_nested_if.rs @@ -0,0 +1,70 @@ +#![feature(coverage_attribute)] +//@ edition: 2021 +//@ min-llvm-version: 18 +//@ compile-flags: -Zcoverage-options=mcdc +//@ llvm-cov-flags: --show-mcdc + +fn nested_if_in_condition(a: bool, b: bool, c: bool) { + if a && if b || c { true } else { false } { + say("yes"); + } else { + say("no"); + } +} + +fn doubly_nested_if_in_condition(a: bool, b: bool, c: bool, d: bool) { + if a && if b || if c && d { true } else { false } { false } else { true } { + say("yes"); + } else { + say("no"); + } +} + +fn nested_single_condition_decision(a: bool, b: bool) { + // Decision with only 1 decision should not be instrumented by MCDC because + // branch-coverage is equivalent to MCDC coverage in this case, and we don't + // want to waste bitmap space for this. + if a && if b { false } else { true } { + say("yes"); + } else { + say("no"); + } +} + +fn nested_in_then_block_in_condition(a: bool, b: bool, c: bool, d: bool, e: bool) { + if a && if b || c { if d && e { true } else { false } } else { false } { + say("yes"); + } else { + say("no"); + } +} + +#[coverage(off)] +fn main() { + nested_if_in_condition(true, false, false); + nested_if_in_condition(true, true, true); + nested_if_in_condition(true, false, true); + nested_if_in_condition(false, true, true); + + doubly_nested_if_in_condition(true, false, false, true); + doubly_nested_if_in_condition(true, true, true, true); + doubly_nested_if_in_condition(true, false, true, true); + doubly_nested_if_in_condition(false, true, true, true); + + nested_single_condition_decision(true, true); + nested_single_condition_decision(true, false); + nested_single_condition_decision(false, false); + + nested_in_then_block_in_condition(false, false, false, false, false); + nested_in_then_block_in_condition(true, false, false, false, false); + nested_in_then_block_in_condition(true, true, false, false, false); + nested_in_then_block_in_condition(true, false, true, false, false); + nested_in_then_block_in_condition(true, false, true, true, false); + nested_in_then_block_in_condition(true, false, true, false, true); + nested_in_then_block_in_condition(true, false, true, true, true); +} + +#[coverage(off)] +fn say(message: &str) { + core::hint::black_box(message); +} From c8ff8a4dc7c9867c54df8f6d31120f4cd3923508 Mon Sep 17 00:00:00 2001 From: Sasha Pourcelot Date: Mon, 22 Apr 2024 18:48:41 +0200 Subject: [PATCH 32/56] Pretty-print parenthesis around binary in postfix match Signed-off-by: Sasha Pourcelot --- .../rustc_ast_pretty/src/pprust/state/expr.rs | 2 +- tests/pretty/postfix-match/precedence.pp | 34 +++++++++++++++++++ tests/pretty/postfix-match/precedence.rs | 34 +++++++++++++++++++ .../simple-matches.rs} | 0 4 files changed, 69 insertions(+), 1 deletion(-) create mode 100644 tests/pretty/postfix-match/precedence.pp create mode 100644 tests/pretty/postfix-match/precedence.rs rename tests/pretty/{postfix-match.rs => postfix-match/simple-matches.rs} (100%) diff --git a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs index 4cbdc9f256dfe..b5bb781acdfe0 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs @@ -488,7 +488,7 @@ impl<'a> State<'a> { self.space(); } MatchKind::Postfix => { - self.print_expr_as_cond(expr); + self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX, fixup); self.word_nbsp(".match"); } } diff --git a/tests/pretty/postfix-match/precedence.pp b/tests/pretty/postfix-match/precedence.pp new file mode 100644 index 0000000000000..967aa7bc39ef3 --- /dev/null +++ b/tests/pretty/postfix-match/precedence.pp @@ -0,0 +1,34 @@ +#![feature(prelude_import)] +#![no_std] +#![feature(postfix_match)] +#[prelude_import] +use ::std::prelude::rust_2015::*; +#[macro_use] +extern crate std; + +use std::ops::Add; + +//@ pretty-mode:expanded +//@ pp-exact:precedence.pp + +macro_rules! repro { ($e:expr) => { $e.match { _ => {} } }; } + +struct Struct {} + +impl Add for usize { + type Output = (); + fn add(self, _: Struct) -> () { () } +} +pub fn main() { + let a; + ( + { 1 } + 1).match { + _ => {} + }; + (4 as usize).match { _ => {} }; + (return).match { _ => {} }; + (a = 42).match { _ => {} }; + (|| {}).match { _ => {} }; + (42..101).match { _ => {} }; + (1 + Struct {}).match { _ => {} }; +} diff --git a/tests/pretty/postfix-match/precedence.rs b/tests/pretty/postfix-match/precedence.rs new file mode 100644 index 0000000000000..ee947e161ddcf --- /dev/null +++ b/tests/pretty/postfix-match/precedence.rs @@ -0,0 +1,34 @@ +#![feature(postfix_match)] + +use std::ops::Add; + +//@ pretty-mode:expanded +//@ pp-exact:precedence.pp + +macro_rules! repro { + ($e:expr) => { + $e.match { + _ => {} + } + }; +} + +struct Struct {} + +impl Add for usize { + type Output = (); + fn add(self, _: Struct) -> () { + () + } +} +pub fn main() { + let a; + + repro!({ 1 } + 1); + repro!(4 as usize); + repro!(return); + repro!(a = 42); + repro!(|| {}); + repro!(42..101); + repro!(1 + Struct {}); +} diff --git a/tests/pretty/postfix-match.rs b/tests/pretty/postfix-match/simple-matches.rs similarity index 100% rename from tests/pretty/postfix-match.rs rename to tests/pretty/postfix-match/simple-matches.rs From f9263374fb8143c7fa1850a43759249a73415cdf Mon Sep 17 00:00:00 2001 From: Zalathar Date: Mon, 29 Apr 2024 16:19:55 +1000 Subject: [PATCH 33/56] coverage: Replace boolean options with a `CoverageLevel` enum --- compiler/rustc_interface/src/tests.rs | 14 +++++------ compiler/rustc_session/src/config.rs | 24 +++++++++++++++---- compiler/rustc_session/src/options.rs | 14 ++++------- compiler/rustc_session/src/session.rs | 10 ++++---- src/doc/rustc/src/instrument-coverage.md | 6 +---- .../src/compiler-flags/coverage-options.md | 11 ++++++++- .../coverage-options.bad.stderr | 2 +- .../instrument-coverage/coverage-options.rs | 11 ++++----- 8 files changed, 53 insertions(+), 39 deletions(-) diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index 68c7f187f52d2..db150cc1f875d 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -4,12 +4,12 @@ use rustc_data_structures::profiling::TimePassesFormat; use rustc_errors::{emitter::HumanReadableErrorType, registry, ColorConfig}; use rustc_session::config::{ build_configuration, build_session_options, rustc_optgroups, BranchProtection, CFGuard, Cfg, - CollapseMacroDebuginfo, CoverageOptions, DebugInfo, DumpMonoStatsFormat, ErrorOutputType, - ExternEntry, ExternLocation, Externs, FunctionReturn, InliningThreshold, Input, - InstrumentCoverage, InstrumentXRay, LinkSelfContained, LinkerPluginLto, LocationDetail, LtoCli, - NextSolverConfig, OomStrategy, Options, OutFileName, OutputType, OutputTypes, PAuthKey, PacRet, - Passes, Polonius, ProcMacroExecutionStrategy, Strip, SwitchWithOptPath, SymbolManglingVersion, - WasiExecModel, + CollapseMacroDebuginfo, CoverageLevel, CoverageOptions, DebugInfo, DumpMonoStatsFormat, + ErrorOutputType, ExternEntry, ExternLocation, Externs, FunctionReturn, InliningThreshold, + Input, InstrumentCoverage, InstrumentXRay, LinkSelfContained, LinkerPluginLto, LocationDetail, + LtoCli, NextSolverConfig, OomStrategy, Options, OutFileName, OutputType, OutputTypes, PAuthKey, + PacRet, Passes, Polonius, ProcMacroExecutionStrategy, Strip, SwitchWithOptPath, + SymbolManglingVersion, WasiExecModel, }; use rustc_session::lint::Level; use rustc_session::search_paths::SearchPath; @@ -761,7 +761,7 @@ fn test_unstable_options_tracking_hash() { }) ); tracked!(codegen_backend, Some("abc".to_string())); - tracked!(coverage_options, CoverageOptions { branch: true, mcdc: true }); + tracked!(coverage_options, CoverageOptions { level: CoverageLevel::Mcdc }); tracked!(crate_attr, vec!["abc".to_string()]); tracked!(cross_crate_inline_threshold, InliningThreshold::Always); tracked!(debug_info_for_profiling, true); diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 4f259960ac3a9..a3978cc14a43f 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -146,10 +146,26 @@ pub enum InstrumentCoverage { /// Individual flag values controlled by `-Z coverage-options`. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] pub struct CoverageOptions { - /// Add branch coverage instrumentation. - pub branch: bool, - /// Add mcdc coverage instrumentation. - pub mcdc: bool, + pub level: CoverageLevel, + // Other boolean or enum-valued options might be added here. +} + +/// Controls whether branch coverage or MC/DC coverage is enabled. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub enum CoverageLevel { + /// Instrument for coverage at the MIR block level. + Block, + /// Also instrument branch points (includes block coverage). + Branch, + /// Instrument for MC/DC. Mostly a superset of branch coverage, but might + /// differ in some corner cases. + Mcdc, +} + +impl Default for CoverageLevel { + fn default() -> Self { + Self::Block + } } /// Settings for `-Z instrument-xray` flag. diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index e1474ee365b70..7355e5b695347 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -394,7 +394,7 @@ mod desc { pub const parse_optimization_fuel: &str = "crate=integer"; pub const parse_dump_mono_stats: &str = "`markdown` (default) or `json`"; pub const parse_instrument_coverage: &str = parse_bool; - pub const parse_coverage_options: &str = "either `no-branch`, `branch` or `mcdc`"; + pub const parse_coverage_options: &str = "`block` | `branch` | `mcdc`"; pub const parse_instrument_xray: &str = "either a boolean (`yes`, `no`, `on`, `off`, etc), or a comma separated list of settings: `always` or `never` (mutually exclusive), `ignore-loops`, `instruction-threshold=N`, `skip-entry`, `skip-exit`"; pub const parse_unpretty: &str = "`string` or `string=string`"; pub const parse_treat_err_as_bug: &str = "either no value or a non-negative number"; @@ -946,15 +946,9 @@ mod parse { for option in v.split(',') { match option { - "no-branch" => { - slot.branch = false; - slot.mcdc = false; - } - "branch" => slot.branch = true, - "mcdc" => { - slot.branch = true; - slot.mcdc = true; - } + "block" => slot.level = CoverageLevel::Block, + "branch" => slot.level = CoverageLevel::Branch, + "mcdc" => slot.level = CoverageLevel::Mcdc, _ => return false, } } diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 2bc14b43234d0..db01bb90d6fac 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -1,8 +1,8 @@ use crate::code_stats::CodeStats; pub use crate::code_stats::{DataTypeKind, FieldInfo, FieldKind, SizeKind, VariantInfo}; use crate::config::{ - self, CrateType, FunctionReturn, InstrumentCoverage, OptLevel, OutFileName, OutputType, - RemapPathScopeComponents, SwitchWithOptPath, + self, CoverageLevel, CrateType, FunctionReturn, InstrumentCoverage, OptLevel, OutFileName, + OutputType, RemapPathScopeComponents, SwitchWithOptPath, }; use crate::config::{ErrorOutputType, Input}; use crate::errors; @@ -349,11 +349,13 @@ impl Session { } pub fn instrument_coverage_branch(&self) -> bool { - self.instrument_coverage() && self.opts.unstable_opts.coverage_options.branch + self.instrument_coverage() + && self.opts.unstable_opts.coverage_options.level >= CoverageLevel::Branch } pub fn instrument_coverage_mcdc(&self) -> bool { - self.instrument_coverage() && self.opts.unstable_opts.coverage_options.mcdc + self.instrument_coverage() + && self.opts.unstable_opts.coverage_options.level >= CoverageLevel::Mcdc } pub fn is_sanitizer_cfi_enabled(&self) -> bool { diff --git a/src/doc/rustc/src/instrument-coverage.md b/src/doc/rustc/src/instrument-coverage.md index bbd81e7437c67..32dc992c42fac 100644 --- a/src/doc/rustc/src/instrument-coverage.md +++ b/src/doc/rustc/src/instrument-coverage.md @@ -348,11 +348,7 @@ $ llvm-cov report \ ## `-Z coverage-options=` -This unstable option provides finer control over some aspects of coverage -instrumentation. Pass one or more of the following values, separated by commas. - -- Either `no-branch`, `branch` or `mcdc` - - `branch` enables branch coverage instrumentation and `mcdc` further enables modified condition/decision coverage instrumentation. `no-branch` disables branch coverage instrumentation, which is same as do not pass `branch` or `mcdc`. +[This unstable option is described in the Unstable Book.](../unstable-book/compiler-flags/coverage-options.html) ## Other references diff --git a/src/doc/unstable-book/src/compiler-flags/coverage-options.md b/src/doc/unstable-book/src/compiler-flags/coverage-options.md index 5e192d9aca13e..2127883355025 100644 --- a/src/doc/unstable-book/src/compiler-flags/coverage-options.md +++ b/src/doc/unstable-book/src/compiler-flags/coverage-options.md @@ -5,4 +5,13 @@ This option controls details of the coverage instrumentation performed by Multiple options can be passed, separated by commas. Valid options are: -- `no-branch`, `branch` or `mcdc`: `branch` enables branch coverage instrumentation and `mcdc` further enables modified condition/decision coverage instrumentation. `no-branch` disables branch coverage instrumentation as well as mcdc instrumentation, which is same as do not pass `branch` or `mcdc`. +- `block`, `branch`, `mcdc`: + Sets the level of coverage instrumentation. + Setting the level will override any previously-specified level. + - `block` (default): + Blocks in the control-flow graph will be instrumented for coverage. + - `branch`: + In addition to block coverage, also enables branch coverage instrumentation. + - `mcdc`: + In addition to block and branch coverage, also enables MC/DC instrumentation. + (Branch coverage instrumentation may differ in some cases.) diff --git a/tests/ui/instrument-coverage/coverage-options.bad.stderr b/tests/ui/instrument-coverage/coverage-options.bad.stderr index f6e5421f87845..9e2c19e90d596 100644 --- a/tests/ui/instrument-coverage/coverage-options.bad.stderr +++ b/tests/ui/instrument-coverage/coverage-options.bad.stderr @@ -1,2 +1,2 @@ -error: incorrect value `bad` for unstable option `coverage-options` - either `no-branch`, `branch` or `mcdc` was expected +error: incorrect value `bad` for unstable option `coverage-options` - `block` | `branch` | `mcdc` was expected diff --git a/tests/ui/instrument-coverage/coverage-options.rs b/tests/ui/instrument-coverage/coverage-options.rs index 50c01ed29b5ec..332da32e435dc 100644 --- a/tests/ui/instrument-coverage/coverage-options.rs +++ b/tests/ui/instrument-coverage/coverage-options.rs @@ -1,20 +1,17 @@ //@ needs-profiler-support -//@ revisions: branch no-branch bad +//@ revisions: block branch bad //@ compile-flags -Cinstrument-coverage +//@ [block] check-pass +//@ [block] compile-flags: -Zcoverage-options=block + //@ [branch] check-pass //@ [branch] compile-flags: -Zcoverage-options=branch -//@ [no-branch] check-pass -//@ [no-branch] compile-flags: -Zcoverage-options=no-branch - //@ [mcdc] check-pass //@ [mcdc] compile-flags: -Zcoverage-options=mcdc //@ [bad] check-fail //@ [bad] compile-flags: -Zcoverage-options=bad -//@ [conflict] check-fail -//@ [conflict] compile-flags: -Zcoverage-options=no-branch,mcdc - fn main() {} From c6e946d0f0405fb7641b7dd2cad2d9432e2313f8 Mon Sep 17 00:00:00 2001 From: est31 Date: Mon, 29 Apr 2024 14:53:38 +0200 Subject: [PATCH 34/56] Change wording --- compiler/rustc_ast_lowering/messages.ftl | 2 +- tests/ui/issues/issue-43250.stderr | 4 ++-- tests/ui/lowering/expr-in-pat-issue-99380.stderr | 2 +- tests/ui/macros/vec-macro-in-pattern.stderr | 2 +- tests/ui/match/expr_before_ident_pat.stderr | 2 +- tests/ui/pattern/issue-92074-macro-ice.stderr | 6 +++--- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_ast_lowering/messages.ftl b/compiler/rustc_ast_lowering/messages.ftl index aa5a85b9c4ca2..10efe6fba6550 100644 --- a/compiler/rustc_ast_lowering/messages.ftl +++ b/compiler/rustc_ast_lowering/messages.ftl @@ -5,7 +5,7 @@ ast_lowering_abi_specified_multiple_times = ast_lowering_arbitrary_expression_in_pattern = arbitrary expressions aren't allowed in patterns - .pattern_from_macro_note = the :expr fragment specifier forces the metavariable's content to be an expression + .pattern_from_macro_note = the `expr` fragment specifier forces the metavariable's content to be an expression ast_lowering_argument = argument diff --git a/tests/ui/issues/issue-43250.stderr b/tests/ui/issues/issue-43250.stderr index 01c22a9e79166..e74342b85adb3 100644 --- a/tests/ui/issues/issue-43250.stderr +++ b/tests/ui/issues/issue-43250.stderr @@ -4,7 +4,7 @@ error: arbitrary expressions aren't allowed in patterns LL | m!(y); | ^ | - = note: the :expr fragment specifier forces the metavariable's content to be an expression + = note: the `expr` fragment specifier forces the metavariable's content to be an expression error: arbitrary expressions aren't allowed in patterns --> $DIR/issue-43250.rs:11:8 @@ -12,7 +12,7 @@ error: arbitrary expressions aren't allowed in patterns LL | m!(C); | ^ | - = note: the :expr fragment specifier forces the metavariable's content to be an expression + = note: the `expr` fragment specifier forces the metavariable's content to be an expression error: aborting due to 2 previous errors diff --git a/tests/ui/lowering/expr-in-pat-issue-99380.stderr b/tests/ui/lowering/expr-in-pat-issue-99380.stderr index 7438bba270c72..29438c9b06360 100644 --- a/tests/ui/lowering/expr-in-pat-issue-99380.stderr +++ b/tests/ui/lowering/expr-in-pat-issue-99380.stderr @@ -4,7 +4,7 @@ error: arbitrary expressions aren't allowed in patterns LL | foo!(Some(3)); | ^^^^^^^ | - = note: the :expr fragment specifier forces the metavariable's content to be an expression + = note: the `expr` fragment specifier forces the metavariable's content to be an expression error: aborting due to 1 previous error diff --git a/tests/ui/macros/vec-macro-in-pattern.stderr b/tests/ui/macros/vec-macro-in-pattern.stderr index 7c87f3b7b8e6e..1a446b8c3edbc 100644 --- a/tests/ui/macros/vec-macro-in-pattern.stderr +++ b/tests/ui/macros/vec-macro-in-pattern.stderr @@ -4,7 +4,7 @@ error: arbitrary expressions aren't allowed in patterns LL | Some(vec![43]) => {} | ^^^^^^^^ | - = note: the :expr fragment specifier forces the metavariable's content to be an expression + = note: the `expr` fragment specifier forces the metavariable's content to be an expression = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 1 previous error diff --git a/tests/ui/match/expr_before_ident_pat.stderr b/tests/ui/match/expr_before_ident_pat.stderr index 2f59573b53f44..1657c51545cb2 100644 --- a/tests/ui/match/expr_before_ident_pat.stderr +++ b/tests/ui/match/expr_before_ident_pat.stderr @@ -10,7 +10,7 @@ error: arbitrary expressions aren't allowed in patterns LL | funny!(a, a); | ^ | - = note: the :expr fragment specifier forces the metavariable's content to be an expression + = note: the `expr` fragment specifier forces the metavariable's content to be an expression error: aborting due to 2 previous errors diff --git a/tests/ui/pattern/issue-92074-macro-ice.stderr b/tests/ui/pattern/issue-92074-macro-ice.stderr index bf53eb4bbcd47..025592116e511 100644 --- a/tests/ui/pattern/issue-92074-macro-ice.stderr +++ b/tests/ui/pattern/issue-92074-macro-ice.stderr @@ -7,7 +7,7 @@ LL | () => { force_expr!(Vec::new()) } LL | assert!(matches!(x, En::A(make_vec!()))); | ----------- in this macro invocation | - = note: the :expr fragment specifier forces the metavariable's content to be an expression + = note: the `expr` fragment specifier forces the metavariable's content to be an expression = note: this error originates in the macro `make_vec` (in Nightly builds, run with -Z macro-backtrace for more info) error: arbitrary expressions aren't allowed in patterns @@ -19,7 +19,7 @@ LL | () => { force_pat!(get_usize(), get_usize()) } LL | assert!(matches!(5, make_pat!())); | ----------- in this macro invocation | - = note: the :expr fragment specifier forces the metavariable's content to be an expression + = note: the `expr` fragment specifier forces the metavariable's content to be an expression = note: this error originates in the macro `make_pat` (in Nightly builds, run with -Z macro-backtrace for more info) error: arbitrary expressions aren't allowed in patterns @@ -31,7 +31,7 @@ LL | () => { force_pat!(get_usize(), get_usize()) } LL | assert!(matches!(5, make_pat!())); | ----------- in this macro invocation | - = note: the :expr fragment specifier forces the metavariable's content to be an expression + = note: the `expr` fragment specifier forces the metavariable's content to be an expression = note: this error originates in the macro `make_pat` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 3 previous errors From 52ea73a54064e4737a065c5060d15f74e18e04d3 Mon Sep 17 00:00:00 2001 From: Krasimir Georgiev Date: Mon, 29 Apr 2024 13:03:45 +0000 Subject: [PATCH 35/56] adapt a codegen test for llvm 19 No functional changes intended. Found by our experimental rust + LLVM @ HEAD bot: https://buildkite.com/llvm-project/rust-llvm-integrate-prototype/builds/27747#018f2570-018c-4b12-9c5a-38cf81453683/957-965 --- tests/codegen/ptr-read-metadata.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/codegen/ptr-read-metadata.rs b/tests/codegen/ptr-read-metadata.rs index 4c623dee5e1e9..e3565c962f738 100644 --- a/tests/codegen/ptr-read-metadata.rs +++ b/tests/codegen/ptr-read-metadata.rs @@ -47,7 +47,7 @@ pub unsafe fn read_byte_assume_init(p: &MaybeUninit) -> u8 { p.assume_init_read() } -// CHECK-LABEL: define {{(dso_local )?}}noundef i32 @copy_char( +// CHECK-LABEL: define {{(dso_local )?}}noundef {{(range\(.*\) )?}}i32 @copy_char( #[no_mangle] pub unsafe fn copy_char(p: *const char) -> char { // CHECK-NOT: load @@ -58,7 +58,7 @@ pub unsafe fn copy_char(p: *const char) -> char { *p } -// CHECK-LABEL: define {{(dso_local )?}}noundef i32 @read_char( +// CHECK-LABEL: define {{(dso_local )?}}noundef {{(range\(.*\) )?}}i32 @read_char( #[no_mangle] pub unsafe fn read_char(p: *const char) -> char { // CHECK-NOT: load @@ -80,7 +80,7 @@ pub unsafe fn read_char_maybe_uninit(p: *const MaybeUninit) -> MaybeUninit p.read() } -// CHECK-LABEL: define {{(dso_local )?}}noundef i32 @read_char_assume_init( +// CHECK-LABEL: define {{(dso_local )?}}noundef {{(range\(.*\) )?}}i32 @read_char_assume_init( #[no_mangle] pub unsafe fn read_char_assume_init(p: &MaybeUninit) -> char { // CHECK-NOT: load From d31b7db8e46857853cc722de0158e73e16b09ed9 Mon Sep 17 00:00:00 2001 From: blyxyas Date: Mon, 29 Apr 2024 15:44:51 +0200 Subject: [PATCH 36/56] [Refactor] Rename Lint and LintGroup\'s is_loaded to is_externally_loaded --- compiler/rustc_driver_impl/src/lib.rs | 2 +- compiler/rustc_lint/src/context.rs | 18 ++++++++++-------- compiler/rustc_lint_defs/src/lib.rs | 9 +++++---- tests/ui/statics/nested_struct.rs | 4 ++-- 4 files changed, 18 insertions(+), 15 deletions(-) diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index b3cba4dbfc20e..c1007ccd8e902 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -971,7 +971,7 @@ Available lint options: let lint_store = unerased_lint_store(sess); let (loaded, builtin): (Vec<_>, _) = - lint_store.get_lints().iter().cloned().partition(|&lint| lint.is_loaded); + lint_store.get_lints().iter().cloned().partition(|&lint| lint.is_externally_loaded); let loaded = sort_lints(sess, loaded); let builtin = sort_lints(sess, builtin); diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index d1dcfc524b55f..d56d616809a34 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -110,7 +110,7 @@ struct LintAlias { struct LintGroup { lint_ids: Vec, - is_loaded: bool, + is_externally_loaded: bool, depr: Option, } @@ -159,7 +159,9 @@ impl LintStore { // Don't display deprecated lint groups. depr.is_none() }) - .map(|(k, LintGroup { lint_ids, is_loaded, .. })| (*k, lint_ids.clone(), *is_loaded)) + .map(|(k, LintGroup { lint_ids, is_externally_loaded, .. })| { + (*k, lint_ids.clone(), *is_externally_loaded) + }) } pub fn register_early_pass( @@ -218,7 +220,7 @@ impl LintStore { .entry(edition.lint_name()) .or_insert(LintGroup { lint_ids: vec![], - is_loaded: lint.is_loaded, + is_externally_loaded: lint.is_externally_loaded, depr: None, }) .lint_ids @@ -231,7 +233,7 @@ impl LintStore { .entry("future_incompatible") .or_insert(LintGroup { lint_ids: vec![], - is_loaded: lint.is_loaded, + is_externally_loaded: lint.is_externally_loaded, depr: None, }) .lint_ids @@ -246,7 +248,7 @@ impl LintStore { alias, LintGroup { lint_ids: vec![], - is_loaded: false, + is_externally_loaded: false, depr: Some(LintAlias { name: lint_name, silent: true }), }, ); @@ -254,21 +256,21 @@ impl LintStore { pub fn register_group( &mut self, - is_loaded: bool, + is_externally_loaded: bool, name: &'static str, deprecated_name: Option<&'static str>, to: Vec, ) { let new = self .lint_groups - .insert(name, LintGroup { lint_ids: to, is_loaded, depr: None }) + .insert(name, LintGroup { lint_ids: to, is_externally_loaded, depr: None }) .is_none(); if let Some(deprecated) = deprecated_name { self.lint_groups.insert( deprecated, LintGroup { lint_ids: vec![], - is_loaded, + is_externally_loaded, depr: Some(LintAlias { name, silent: false }), }, ); diff --git a/compiler/rustc_lint_defs/src/lib.rs b/compiler/rustc_lint_defs/src/lib.rs index 7f200a7b623d6..0569e30fd113c 100644 --- a/compiler/rustc_lint_defs/src/lib.rs +++ b/compiler/rustc_lint_defs/src/lib.rs @@ -323,7 +323,8 @@ pub struct Lint { pub future_incompatible: Option, - pub is_loaded: bool, + /// `true` if this lint is being loaded by another tool (e.g. Clippy). + pub is_externally_loaded: bool, /// `Some` if this lint is feature gated, otherwise `None`. pub feature_gate: Option, @@ -468,7 +469,7 @@ impl Lint { default_level: Level::Forbid, desc: "", edition_lint_opts: None, - is_loaded: false, + is_externally_loaded: false, report_in_external_macro: false, future_incompatible: None, feature_gate: None, @@ -817,7 +818,7 @@ macro_rules! declare_lint { name: stringify!($NAME), default_level: $crate::$Level, desc: $desc, - is_loaded: false, + is_externally_loaded: false, $($v: true,)* $(feature_gate: Some($gate),)? $(future_incompatible: Some($crate::FutureIncompatibleInfo { @@ -859,7 +860,7 @@ macro_rules! declare_tool_lint { edition_lint_opts: None, report_in_external_macro: $external, future_incompatible: None, - is_loaded: true, + is_externally_loaded: true, $(feature_gate: Some($gate),)? crate_level_only: false, ..$crate::Lint::default_fields_for_macro() diff --git a/tests/ui/statics/nested_struct.rs b/tests/ui/statics/nested_struct.rs index f5819f50789c2..6745e10296241 100644 --- a/tests/ui/statics/nested_struct.rs +++ b/tests/ui/statics/nested_struct.rs @@ -9,7 +9,7 @@ pub struct Lint { pub name: &'static str, pub desc: &'static str, pub report_in_external_macro: bool, - pub is_loaded: bool, + pub is_externally_loaded: bool, pub crate_level_only: bool, } @@ -17,7 +17,7 @@ static FOO: &Lint = &Lint { name: &"foo", desc: "desc", report_in_external_macro: false, - is_loaded: true, + is_externally_loaded: true, crate_level_only: false, }; From 5776aec662f3dd367ee04b5c9e34cba0c761df10 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Fri, 26 Apr 2024 12:36:33 -0400 Subject: [PATCH 37/56] Make names more accurate --- .../src/solve/assembly/mod.rs | 18 +++++++++++------- .../src/solve/normalizes_to/mod.rs | 15 ++++++++++----- .../src/solve/trait_goals.rs | 11 ++++++++--- 3 files changed, 29 insertions(+), 15 deletions(-) diff --git a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs index 9522152909326..6cecdeec9de24 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs @@ -56,7 +56,7 @@ pub(super) trait GoalKind<'tcx>: /// Consider a clause, which consists of a "assumption" and some "requirements", /// to satisfy a goal. If the requirements hold, then attempt to satisfy our /// goal by equating it with the assumption. - fn consider_implied_clause( + fn probe_and_consider_implied_clause( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, assumption: ty::Clause<'tcx>, @@ -73,7 +73,7 @@ pub(super) trait GoalKind<'tcx>: /// Consider a clause specifically for a `dyn Trait` self type. This requires /// additionally checking all of the supertraits and object bounds to hold, /// since they're not implied by the well-formedness of the object type. - fn consider_object_bound_candidate( + fn probe_and_consider_object_bound_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, assumption: ty::Clause<'tcx>, @@ -81,7 +81,7 @@ pub(super) trait GoalKind<'tcx>: Self::probe_and_match_goal_against_assumption(ecx, goal, assumption, |ecx| { let tcx = ecx.tcx(); let ty::Dynamic(bounds, _, _) = *goal.predicate.self_ty().kind() else { - bug!("expected object type in `consider_object_bound_candidate`"); + bug!("expected object type in `probe_and_consider_object_bound_candidate`"); }; // FIXME(-Znext-solver=coinductive): Should this be `GoalSource::ImplWhereBound`? ecx.add_goals( @@ -557,7 +557,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { candidates: &mut Vec>, ) { for (i, assumption) in goal.param_env.caller_bounds().iter().enumerate() { - match G::consider_implied_clause(self, goal, assumption, []) { + match G::probe_and_consider_implied_clause(self, goal, assumption, []) { Ok(result) => { candidates.push(Candidate { source: CandidateSource::ParamEnv(i), result }) } @@ -648,7 +648,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { for assumption in self.tcx().item_bounds(alias_ty.def_id).instantiate(self.tcx(), alias_ty.args) { - match G::consider_implied_clause(self, goal, assumption, []) { + match G::probe_and_consider_implied_clause(self, goal, assumption, []) { Ok(result) => { candidates.push(Candidate { source: CandidateSource::AliasBound, result }); } @@ -728,7 +728,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { } ty::ExistentialPredicate::Projection(_) | ty::ExistentialPredicate::AutoTrait(_) => { - match G::consider_object_bound_candidate( + match G::probe_and_consider_object_bound_candidate( self, goal, bound.with_self_ty(tcx, self_ty), @@ -749,7 +749,11 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { if let Some(principal) = bounds.principal() { let principal_trait_ref = principal.with_self_ty(tcx, self_ty); self.walk_vtable(principal_trait_ref, |ecx, assumption, vtable_base, _| { - match G::consider_object_bound_candidate(ecx, goal, assumption.to_predicate(tcx)) { + match G::probe_and_consider_object_bound_candidate( + ecx, + goal, + assumption.to_predicate(tcx), + ) { Ok(result) => candidates.push(Candidate { source: CandidateSource::BuiltinImpl(BuiltinImplSource::Object { vtable_base, diff --git a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs index c662ab23c53b6..008927a234026 100644 --- a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs @@ -385,7 +385,12 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { // A built-in `Fn` impl only holds if the output is sized. // (FIXME: technically we only need to check this if the type is a fn ptr...) - Self::consider_implied_clause(ecx, goal, pred, [goal.with(tcx, output_is_sized_pred)]) + Self::probe_and_consider_implied_clause( + ecx, + goal, + pred, + [goal.with(tcx, output_is_sized_pred)], + ) } fn consider_builtin_async_fn_trait_candidates( @@ -461,7 +466,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { // A built-in `AsyncFn` impl only holds if the output is sized. // (FIXME: technically we only need to check this if the type is a fn ptr...) - Self::consider_implied_clause( + Self::probe_and_consider_implied_clause( ecx, goal, pred, @@ -623,7 +628,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { let term = args.as_coroutine().return_ty().into(); - Self::consider_implied_clause( + Self::probe_and_consider_implied_clause( ecx, goal, ty::ProjectionPredicate { @@ -654,7 +659,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { let term = args.as_coroutine().yield_ty().into(); - Self::consider_implied_clause( + Self::probe_and_consider_implied_clause( ecx, goal, ty::ProjectionPredicate { @@ -737,7 +742,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { bug!("unexpected associated item `<{self_ty} as Coroutine>::{name}`") }; - Self::consider_implied_clause( + Self::probe_and_consider_implied_clause( ecx, goal, ty::ProjectionPredicate { diff --git a/compiler/rustc_trait_selection/src/solve/trait_goals.rs b/compiler/rustc_trait_selection/src/solve/trait_goals.rs index f6d12a9a0138e..07cdc4fb77a29 100644 --- a/compiler/rustc_trait_selection/src/solve/trait_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/trait_goals.rs @@ -307,7 +307,12 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { .to_predicate(tcx); // A built-in `Fn` impl only holds if the output is sized. // (FIXME: technically we only need to check this if the type is a fn ptr...) - Self::consider_implied_clause(ecx, goal, pred, [goal.with(tcx, output_is_sized_pred)]) + Self::probe_and_consider_implied_clause( + ecx, + goal, + pred, + [goal.with(tcx, output_is_sized_pred)], + ) } fn consider_builtin_async_fn_trait_candidates( @@ -345,7 +350,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { .to_predicate(tcx); // A built-in `AsyncFn` impl only holds if the output is sized. // (FIXME: technically we only need to check this if the type is a fn ptr...) - Self::consider_implied_clause( + Self::probe_and_consider_implied_clause( ecx, goal, pred, @@ -521,7 +526,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { } let coroutine = args.as_coroutine(); - Self::consider_implied_clause( + Self::probe_and_consider_implied_clause( ecx, goal, ty::TraitRef::new(tcx, goal.predicate.def_id(), [self_ty, coroutine.resume_ty()]) From 7cf1c547c2ea1eb2ceae943b505faee6aca1e4d4 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Fri, 26 Apr 2024 13:31:18 -0400 Subject: [PATCH 38/56] Actually use probes when needed and stop relying on existing outer probes --- compiler/rustc_middle/src/traits/solve.rs | 5 + .../rustc_middle/src/traits/solve/inspect.rs | 6 +- .../src/traits/solve/inspect/format.rs | 4 +- .../src/solve/assembly/mod.rs | 166 ++++---- .../src/solve/eval_ctxt/mod.rs | 28 +- .../src/solve/eval_ctxt/probe.rs | 23 +- .../src/solve/eval_ctxt/select.rs | 2 + .../src/solve/inspect/analyse.rs | 6 +- .../src/solve/normalizes_to/mod.rs | 78 ++-- .../src/solve/trait_goals.rs | 362 ++++++++++-------- .../src/traits/coherence.rs | 5 +- 11 files changed, 350 insertions(+), 335 deletions(-) diff --git a/compiler/rustc_middle/src/traits/solve.rs b/compiler/rustc_middle/src/traits/solve.rs index 13504c6ae930a..2c3f033925abc 100644 --- a/compiler/rustc_middle/src/traits/solve.rs +++ b/compiler/rustc_middle/src/traits/solve.rs @@ -332,4 +332,9 @@ pub enum CandidateSource { /// } /// ``` AliasBound, + /// A candidate that is registered only during coherence to represent some + /// yet-unknown impl that could be produced downstream without violating orphan + /// rules. + // FIXME: Merge this with the forced ambiguity candidates, so those don't use `Misc`. + CoherenceUnknowable, } diff --git a/compiler/rustc_middle/src/traits/solve/inspect.rs b/compiler/rustc_middle/src/traits/solve/inspect.rs index 1b2e2781bfe72..66542431149fc 100644 --- a/compiler/rustc_middle/src/traits/solve/inspect.rs +++ b/compiler/rustc_middle/src/traits/solve/inspect.rs @@ -141,10 +141,6 @@ pub enum ProbeKind<'tcx> { TryNormalizeNonRigid { result: QueryResult<'tcx> }, /// Probe entered when normalizing the self ty during candidate assembly NormalizedSelfTyAssembly, - /// Some candidate to prove the current goal. - /// - /// FIXME: Remove this in favor of always using more strongly typed variants. - MiscCandidate { name: &'static str, result: QueryResult<'tcx> }, /// A candidate for proving a trait or alias-relate goal. TraitCandidate { source: CandidateSource, result: QueryResult<'tcx> }, /// Used in the probe that wraps normalizing the non-self type for the unsize @@ -154,4 +150,6 @@ pub enum ProbeKind<'tcx> { /// do a probe to find out what projection type(s) may be used to prove that /// the source type upholds all of the target type's object bounds. UpcastProjectionCompatibility, + /// Try to unify an opaque type with an existing + OpaqueTypeStorageLookup, } diff --git a/compiler/rustc_middle/src/traits/solve/inspect/format.rs b/compiler/rustc_middle/src/traits/solve/inspect/format.rs index 2e2d1df8d1935..91f0880fd2085 100644 --- a/compiler/rustc_middle/src/traits/solve/inspect/format.rs +++ b/compiler/rustc_middle/src/traits/solve/inspect/format.rs @@ -112,8 +112,8 @@ impl<'a, 'b> ProofTreeFormatter<'a, 'b> { ProbeKind::UpcastProjectionCompatibility => { write!(self.f, "PROBING FOR PROJECTION COMPATIBILITY FOR UPCASTING:") } - ProbeKind::MiscCandidate { name, result } => { - write!(self.f, "CANDIDATE {name}: {result:?}") + ProbeKind::OpaqueTypeStorageLookup => { + write!(self.f, "PROBING FOR AN EXISTING OPAQUE:") } ProbeKind::TraitCandidate { source, result } => { write!(self.f, "CANDIDATE {source:?}: {result:?}") diff --git a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs index 6cecdeec9de24..b082b0bd4c8a0 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs @@ -48,21 +48,23 @@ pub(super) trait GoalKind<'tcx>: /// [`EvalCtxt::evaluate_added_goals_and_make_canonical_response`]). fn probe_and_match_goal_against_assumption( ecx: &mut EvalCtxt<'_, 'tcx>, + source: CandidateSource, goal: Goal<'tcx, Self>, assumption: ty::Clause<'tcx>, then: impl FnOnce(&mut EvalCtxt<'_, 'tcx>) -> QueryResult<'tcx>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// Consider a clause, which consists of a "assumption" and some "requirements", /// to satisfy a goal. If the requirements hold, then attempt to satisfy our /// goal by equating it with the assumption. fn probe_and_consider_implied_clause( ecx: &mut EvalCtxt<'_, 'tcx>, + source: CandidateSource, goal: Goal<'tcx, Self>, assumption: ty::Clause<'tcx>, requirements: impl IntoIterator>>, - ) -> QueryResult<'tcx> { - Self::probe_and_match_goal_against_assumption(ecx, goal, assumption, |ecx| { + ) -> Result, NoSolution> { + Self::probe_and_match_goal_against_assumption(ecx, source, goal, assumption, |ecx| { // FIXME(-Znext-solver=coinductive): check whether this should be // `GoalSource::ImplWhereBound` for any caller. ecx.add_goals(GoalSource::Misc, requirements); @@ -75,10 +77,11 @@ pub(super) trait GoalKind<'tcx>: /// since they're not implied by the well-formedness of the object type. fn probe_and_consider_object_bound_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, + source: CandidateSource, goal: Goal<'tcx, Self>, assumption: ty::Clause<'tcx>, - ) -> QueryResult<'tcx> { - Self::probe_and_match_goal_against_assumption(ecx, goal, assumption, |ecx| { + ) -> Result, NoSolution> { + Self::probe_and_match_goal_against_assumption(ecx, source, goal, assumption, |ecx| { let tcx = ecx.tcx(); let ty::Dynamic(bounds, _, _) = *goal.predicate.self_ty().kind() else { bug!("expected object type in `probe_and_consider_object_bound_candidate`"); @@ -112,7 +115,7 @@ pub(super) trait GoalKind<'tcx>: fn consider_error_guaranteed_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, guar: ErrorGuaranteed, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A type implements an `auto trait` if its components do as well. /// @@ -121,13 +124,13 @@ pub(super) trait GoalKind<'tcx>: fn consider_auto_trait_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A trait alias holds if the RHS traits and `where` clauses hold. fn consider_trait_alias_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A type is `Sized` if its tail component is `Sized`. /// @@ -136,7 +139,7 @@ pub(super) trait GoalKind<'tcx>: fn consider_builtin_sized_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A type is `Copy` or `Clone` if its components are `Copy` or `Clone`. /// @@ -145,20 +148,20 @@ pub(super) trait GoalKind<'tcx>: fn consider_builtin_copy_clone_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A type is `PointerLike` if we can compute its layout, and that layout /// matches the layout of `usize`. fn consider_builtin_pointer_like_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A type is a `FnPtr` if it is of `FnPtr` type. fn consider_builtin_fn_ptr_trait_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A callable type (a closure, fn def, or fn ptr) is known to implement the `Fn` /// family of traits where `A` is given by the signature of the type. @@ -166,7 +169,7 @@ pub(super) trait GoalKind<'tcx>: ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, kind: ty::ClosureKind, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// An async closure is known to implement the `AsyncFn` family of traits /// where `A` is given by the signature of the type. @@ -174,7 +177,7 @@ pub(super) trait GoalKind<'tcx>: ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, kind: ty::ClosureKind, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// Compute the built-in logic of the `AsyncFnKindHelper` helper trait, which /// is used internally to delay computation for async closures until after @@ -182,13 +185,13 @@ pub(super) trait GoalKind<'tcx>: fn consider_builtin_async_fn_kind_helper_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// `Tuple` is implemented if the `Self` type is a tuple. fn consider_builtin_tuple_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// `Pointee` is always implemented. /// @@ -198,7 +201,7 @@ pub(super) trait GoalKind<'tcx>: fn consider_builtin_pointee_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A coroutine (that comes from an `async` desugaring) is known to implement /// `Future`, where `O` is given by the coroutine's return type @@ -206,7 +209,7 @@ pub(super) trait GoalKind<'tcx>: fn consider_builtin_future_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A coroutine (that comes from a `gen` desugaring) is known to implement /// `Iterator`, where `O` is given by the generator's yield type @@ -214,19 +217,19 @@ pub(super) trait GoalKind<'tcx>: fn consider_builtin_iterator_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A coroutine (that comes from a `gen` desugaring) is known to implement /// `FusedIterator` fn consider_builtin_fused_iterator_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; fn consider_builtin_async_iterator_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// A coroutine (that doesn't come from an `async` or `gen` desugaring) is known to /// implement `Coroutine`, given the resume, yield, @@ -234,27 +237,27 @@ pub(super) trait GoalKind<'tcx>: fn consider_builtin_coroutine_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; fn consider_builtin_discriminant_kind_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; fn consider_builtin_async_destruct_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; fn consider_builtin_destruct_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; fn consider_builtin_transmute_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx>; + ) -> Result, NoSolution>; /// Consider (possibly several) candidates to upcast or unsize a type to another /// type, excluding the coercion of a sized type into a `dyn Trait`. @@ -266,7 +269,7 @@ pub(super) trait GoalKind<'tcx>: fn consider_structural_builtin_unsize_candidates( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> Vec<(CanonicalResponse<'tcx>, BuiltinImplSource)>; + ) -> Vec>; } impl<'tcx> EvalCtxt<'_, 'tcx> { @@ -282,7 +285,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { if normalized_self_ty.is_ty_var() { debug!("self type has been normalized to infer"); - return self.forced_ambiguity(MaybeCause::Ambiguity); + return self.forced_ambiguity(MaybeCause::Ambiguity).into_iter().collect(); } let goal = @@ -315,7 +318,10 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { candidates } - fn forced_ambiguity(&mut self, cause: MaybeCause) -> Vec> { + pub(super) fn forced_ambiguity( + &mut self, + cause: MaybeCause, + ) -> Result, NoSolution> { // This may fail if `try_evaluate_added_goals` overflows because it // fails to reach a fixpoint but ends up getting an error after // running for some additional step. @@ -323,10 +329,8 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // cc trait-system-refactor-initiative#105 let source = CandidateSource::BuiltinImpl(BuiltinImplSource::Misc); let certainty = Certainty::Maybe(cause); - let result = self - .probe_trait_candidate(source) - .enter(|this| this.evaluate_added_goals_and_make_canonical_response(certainty)); - if let Ok(cand) = result { vec![cand] } else { vec![] } + self.probe_trait_candidate(source) + .enter(|this| this.evaluate_added_goals_and_make_canonical_response(certainty)) } #[instrument(level = "debug", skip_all)] @@ -533,20 +537,12 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { Err(NoSolution) }; - match result { - Ok(result) => candidates.push(Candidate { - source: CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), - result, - }), - Err(NoSolution) => (), - } + candidates.extend(result); // There may be multiple unsize candidates for a trait with several supertraits: // `trait Foo: Bar + Bar` and `dyn Foo: Unsize>` if lang_items.unsize_trait() == Some(trait_def_id) { - for (result, source) in G::consider_structural_builtin_unsize_candidates(self, goal) { - candidates.push(Candidate { source: CandidateSource::BuiltinImpl(source), result }); - } + candidates.extend(G::consider_structural_builtin_unsize_candidates(self, goal)); } } @@ -557,12 +553,13 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { candidates: &mut Vec>, ) { for (i, assumption) in goal.param_env.caller_bounds().iter().enumerate() { - match G::probe_and_consider_implied_clause(self, goal, assumption, []) { - Ok(result) => { - candidates.push(Candidate { source: CandidateSource::ParamEnv(i), result }) - } - Err(NoSolution) => (), - } + candidates.extend(G::probe_and_consider_implied_clause( + self, + CandidateSource::ParamEnv(i), + goal, + assumption, + [], + )); } } @@ -648,12 +645,13 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { for assumption in self.tcx().item_bounds(alias_ty.def_id).instantiate(self.tcx(), alias_ty.args) { - match G::probe_and_consider_implied_clause(self, goal, assumption, []) { - Ok(result) => { - candidates.push(Candidate { source: CandidateSource::AliasBound, result }); - } - Err(NoSolution) => {} - } + candidates.extend(G::probe_and_consider_implied_clause( + self, + CandidateSource::AliasBound, + goal, + assumption, + [], + )); } if kind != ty::Projection { @@ -728,17 +726,12 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { } ty::ExistentialPredicate::Projection(_) | ty::ExistentialPredicate::AutoTrait(_) => { - match G::probe_and_consider_object_bound_candidate( + candidates.extend(G::probe_and_consider_object_bound_candidate( self, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, bound.with_self_ty(tcx, self_ty), - ) { - Ok(result) => candidates.push(Candidate { - source: CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), - result, - }), - Err(NoSolution) => (), - } + )); } } } @@ -749,19 +742,12 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { if let Some(principal) = bounds.principal() { let principal_trait_ref = principal.with_self_ty(tcx, self_ty); self.walk_vtable(principal_trait_ref, |ecx, assumption, vtable_base, _| { - match G::probe_and_consider_object_bound_candidate( + candidates.extend(G::probe_and_consider_object_bound_candidate( ecx, + CandidateSource::BuiltinImpl(BuiltinImplSource::Object { vtable_base }), goal, assumption.to_predicate(tcx), - ) { - Ok(result) => candidates.push(Candidate { - source: CandidateSource::BuiltinImpl(BuiltinImplSource::Object { - vtable_base, - }), - result, - }), - Err(NoSolution) => (), - } + )); }); } } @@ -779,25 +765,20 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { candidates: &mut Vec>, ) { let tcx = self.tcx(); - let result = self.probe_misc_candidate("coherence unknowable").enter(|ecx| { - let trait_ref = goal.predicate.trait_ref(tcx); - let lazily_normalize_ty = |ty| ecx.structurally_normalize_ty(goal.param_env, ty); - - match coherence::trait_ref_is_knowable(tcx, trait_ref, lazily_normalize_ty)? { - Ok(()) => Err(NoSolution), - Err(_) => { - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) - } - } - }); - match result { - Ok(result) => candidates.push(Candidate { - source: CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), - result, - }), - Err(NoSolution) => {} - } + candidates.extend(self.probe_trait_candidate(CandidateSource::CoherenceUnknowable).enter( + |ecx| { + let trait_ref = goal.predicate.trait_ref(tcx); + let lazily_normalize_ty = |ty| ecx.structurally_normalize_ty(goal.param_env, ty); + + match coherence::trait_ref_is_knowable(tcx, trait_ref, lazily_normalize_ty)? { + Ok(()) => Err(NoSolution), + Err(_) => { + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) + } + } + }, + )) } /// If there's a where-bound for the current goal, do not use any impl candidates @@ -842,6 +823,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { false } CandidateSource::ParamEnv(_) | CandidateSource::AliasBound => true, + CandidateSource::CoherenceUnknowable => bug!("uh oh"), }); } // If it is still ambiguous we instead just force the whole goal @@ -849,7 +831,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // tests/ui/traits/next-solver/env-shadows-impls/ambig-env-no-shadow.rs Certainty::Maybe(cause) => { debug!(?cause, "force ambiguity"); - *candidates = self.forced_ambiguity(cause); + *candidates = self.forced_ambiguity(cause).into_iter().collect(); } } } diff --git a/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs b/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs index 21efce7487924..225479fcff347 100644 --- a/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs @@ -998,19 +998,21 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { if candidate_key.def_id != key.def_id { continue; } - values.extend(self.probe_misc_candidate("opaque type storage").enter(|ecx| { - for (a, b) in std::iter::zip(candidate_key.args, key.args) { - ecx.eq(param_env, a, b)?; - } - ecx.eq(param_env, candidate_ty, ty)?; - ecx.add_item_bounds_for_hidden_type( - candidate_key.def_id.to_def_id(), - candidate_key.args, - param_env, - candidate_ty, - ); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - })); + values.extend(self.probe(|_| inspect::ProbeKind::OpaqueTypeStorageLookup).enter( + |ecx| { + for (a, b) in std::iter::zip(candidate_key.args, key.args) { + ecx.eq(param_env, a, b)?; + } + ecx.eq(param_env, candidate_ty, ty)?; + ecx.add_item_bounds_for_hidden_type( + candidate_key.def_id.to_def_id(), + candidate_key.args, + param_env, + candidate_ty, + ); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }, + )); } values } diff --git a/compiler/rustc_trait_selection/src/solve/eval_ctxt/probe.rs b/compiler/rustc_trait_selection/src/solve/eval_ctxt/probe.rs index 9d59723e44157..ee23f49939b7c 100644 --- a/compiler/rustc_trait_selection/src/solve/eval_ctxt/probe.rs +++ b/compiler/rustc_trait_selection/src/solve/eval_ctxt/probe.rs @@ -1,6 +1,7 @@ use crate::solve::assembly::Candidate; use super::EvalCtxt; +use rustc_infer::traits::BuiltinImplSource; use rustc_middle::traits::{ query::NoSolution, solve::{inspect, CandidateSource, QueryResult}, @@ -75,24 +76,12 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { ProbeCtxt { ecx: self, probe_kind, _result: PhantomData } } - pub(in crate::solve) fn probe_misc_candidate( + pub(in crate::solve) fn probe_builtin_trait_candidate( &mut self, - name: &'static str, - ) -> ProbeCtxt< - '_, - 'a, - 'tcx, - impl FnOnce(&QueryResult<'tcx>) -> inspect::ProbeKind<'tcx>, - QueryResult<'tcx>, - > { - ProbeCtxt { - ecx: self, - probe_kind: move |result: &QueryResult<'tcx>| inspect::ProbeKind::MiscCandidate { - name, - result: *result, - }, - _result: PhantomData, - } + source: BuiltinImplSource, + ) -> TraitProbeCtxt<'_, 'a, 'tcx, impl FnOnce(&QueryResult<'tcx>) -> inspect::ProbeKind<'tcx>> + { + self.probe_trait_candidate(CandidateSource::BuiltinImpl(source)) } pub(in crate::solve) fn probe_trait_candidate( diff --git a/compiler/rustc_trait_selection/src/solve/eval_ctxt/select.rs b/compiler/rustc_trait_selection/src/solve/eval_ctxt/select.rs index 6644d3c77afd9..6cce37642c11b 100644 --- a/compiler/rustc_trait_selection/src/solve/eval_ctxt/select.rs +++ b/compiler/rustc_trait_selection/src/solve/eval_ctxt/select.rs @@ -107,6 +107,8 @@ impl<'tcx> InferCtxt<'tcx> { Ok(Some(ImplSource::Param(vec![]))) } + (_, CandidateSource::CoherenceUnknowable) => bug!(), + (Certainty::Maybe(_), _) => Ok(None), } }) diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index e918f20577cc8..7daf99cb7e563 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -198,7 +198,8 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { match probe.kind { inspect::ProbeKind::NormalizedSelfTyAssembly | inspect::ProbeKind::UnsizeAssembly - | inspect::ProbeKind::UpcastProjectionCompatibility => (), + | inspect::ProbeKind::UpcastProjectionCompatibility + | inspect::ProbeKind::OpaqueTypeStorageLookup => (), // We add a candidate for the root evaluation if there // is only one way to prove a given goal, e.g. for `WellFormed`. // @@ -218,8 +219,7 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { }) } } - inspect::ProbeKind::MiscCandidate { name: _, result } - | inspect::ProbeKind::TraitCandidate { source: _, result } => { + inspect::ProbeKind::TraitCandidate { source: _, result } => { candidates.push(InspectCandidate { goal: self, kind: probe.kind, diff --git a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs index 008927a234026..dab87fffe461e 100644 --- a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs @@ -8,11 +8,10 @@ use rustc_hir::def_id::DefId; use rustc_hir::LangItem; use rustc_infer::traits::query::NoSolution; use rustc_infer::traits::solve::inspect::ProbeKind; +use rustc_infer::traits::solve::MaybeCause; use rustc_infer::traits::specialization_graph::LeafDef; use rustc_infer::traits::Reveal; -use rustc_middle::traits::solve::{ - CandidateSource, CanonicalResponse, Certainty, Goal, QueryResult, -}; +use rustc_middle::traits::solve::{CandidateSource, Certainty, Goal, QueryResult}; use rustc_middle::traits::BuiltinImplSource; use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; use rustc_middle::ty::NormalizesTo; @@ -119,14 +118,15 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn probe_and_match_goal_against_assumption( ecx: &mut EvalCtxt<'_, 'tcx>, + source: CandidateSource, goal: Goal<'tcx, Self>, assumption: ty::Clause<'tcx>, then: impl FnOnce(&mut EvalCtxt<'_, 'tcx>) -> QueryResult<'tcx>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if let Some(projection_pred) = assumption.as_projection_clause() { if projection_pred.projection_def_id() == goal.predicate.def_id() { let tcx = ecx.tcx(); - ecx.probe_misc_candidate("assumption").enter(|ecx| { + ecx.probe_trait_candidate(source).enter(|ecx| { let assumption_projection_pred = ecx.instantiate_binder_with_infer(projection_pred); ecx.eq( @@ -300,14 +300,14 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_error_guaranteed_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, _guar: ErrorGuaranteed, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { Err(NoSolution) } fn consider_auto_trait_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { ecx.tcx().dcx().span_delayed_bug( ecx.tcx().def_span(goal.predicate.def_id()), "associated types not allowed on auto traits", @@ -318,35 +318,35 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_trait_alias_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { bug!("trait aliases do not have associated types: {:?}", goal); } fn consider_builtin_sized_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { bug!("`Sized` does not have an associated type: {:?}", goal); } fn consider_builtin_copy_clone_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { bug!("`Copy`/`Clone` does not have an associated type: {:?}", goal); } fn consider_builtin_pointer_like_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { bug!("`PointerLike` does not have an associated type: {:?}", goal); } fn consider_builtin_fn_ptr_trait_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { bug!("`FnPtr` does not have an associated type: {:?}", goal); } @@ -354,7 +354,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, goal_kind: ty::ClosureKind, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let tcx = ecx.tcx(); let tupled_inputs_and_output = match structural_traits::extract_tupled_inputs_and_output_from_callable( @@ -364,8 +364,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { )? { Some(tupled_inputs_and_output) => tupled_inputs_and_output, None => { - return ecx - .evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); + return ecx.forced_ambiguity(MaybeCause::Ambiguity); } }; let output_is_sized_pred = tupled_inputs_and_output.map_bound(|(_, output)| { @@ -387,6 +386,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { // (FIXME: technically we only need to check this if the type is a fn ptr...) Self::probe_and_consider_implied_clause( ecx, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, pred, [goal.with(tcx, output_is_sized_pred)], @@ -397,7 +397,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, goal_kind: ty::ClosureKind, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let tcx = ecx.tcx(); let env_region = match goal_kind { @@ -468,6 +468,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { // (FIXME: technically we only need to check this if the type is a fn ptr...) Self::probe_and_consider_implied_clause( ecx, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, pred, [goal.with(tcx, output_is_sized_pred)] @@ -479,7 +480,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_builtin_async_fn_kind_helper_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let [ closure_fn_kind_ty, goal_kind_ty, @@ -494,7 +495,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { // Bail if the upvars haven't been constrained. if tupled_upvars_ty.expect_ty().is_ty_var() { - return ecx.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); + return ecx.forced_ambiguity(MaybeCause::Ambiguity); } let Some(closure_kind) = closure_fn_kind_ty.expect_ty().to_opt_closure_kind() else { @@ -517,25 +518,27 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { borrow_region.expect_region(), ); - ecx.instantiate_normalizes_to_term(goal, upvars_ty.into()); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { + ecx.instantiate_normalizes_to_term(goal, upvars_ty.into()); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) } fn consider_builtin_tuple_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { bug!("`Tuple` does not have an associated type: {:?}", goal); } fn consider_builtin_pointee_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let tcx = ecx.tcx(); let metadata_def_id = tcx.require_lang_item(LangItem::Metadata, None); assert_eq!(metadata_def_id, goal.predicate.def_id()); - ecx.probe_misc_candidate("builtin pointee").enter(|ecx| { + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { let metadata_ty = match goal.predicate.self_ty().kind() { ty::Bool | ty::Char @@ -614,7 +617,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_builtin_future_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let self_ty = goal.predicate.self_ty(); let ty::Coroutine(def_id, args) = *self_ty.kind() else { return Err(NoSolution); @@ -630,6 +633,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { Self::probe_and_consider_implied_clause( ecx, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, ty::ProjectionPredicate { projection_ty: ty::AliasTy::new(ecx.tcx(), goal.predicate.def_id(), [self_ty]), @@ -645,7 +649,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_builtin_iterator_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let self_ty = goal.predicate.self_ty(); let ty::Coroutine(def_id, args) = *self_ty.kind() else { return Err(NoSolution); @@ -661,6 +665,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { Self::probe_and_consider_implied_clause( ecx, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, ty::ProjectionPredicate { projection_ty: ty::AliasTy::new(ecx.tcx(), goal.predicate.def_id(), [self_ty]), @@ -676,14 +681,14 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_builtin_fused_iterator_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { bug!("`FusedIterator` does not have an associated type: {:?}", goal); } fn consider_builtin_async_iterator_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let self_ty = goal.predicate.self_ty(); let ty::Coroutine(def_id, args) = *self_ty.kind() else { return Err(NoSolution); @@ -695,7 +700,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { return Err(NoSolution); } - ecx.probe_misc_candidate("builtin AsyncIterator kind").enter(|ecx| { + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { let expected_ty = ecx.next_ty_infer(); // Take `AsyncIterator` and turn it into the corresponding // coroutine yield ty `Poll>`. @@ -719,7 +724,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_builtin_coroutine_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let self_ty = goal.predicate.self_ty(); let ty::Coroutine(def_id, args) = *self_ty.kind() else { return Err(NoSolution); @@ -744,6 +749,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { Self::probe_and_consider_implied_clause( ecx, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, ty::ProjectionPredicate { projection_ty: ty::AliasTy::new( @@ -763,14 +769,14 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_structural_builtin_unsize_candidates( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> Vec<(CanonicalResponse<'tcx>, BuiltinImplSource)> { + ) -> Vec> { bug!("`Unsize` does not have an associated type: {:?}", goal); } fn consider_builtin_discriminant_kind_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let self_ty = goal.predicate.self_ty(); let discriminant_ty = match *self_ty.kind() { ty::Bool @@ -813,7 +819,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { ), }; - ecx.probe_misc_candidate("builtin discriminant kind").enter(|ecx| { + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { ecx.instantiate_normalizes_to_term(goal, discriminant_ty.into()); ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) }) @@ -822,7 +828,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_builtin_async_destruct_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let self_ty = goal.predicate.self_ty(); let async_destructor_ty = match *self_ty.kind() { ty::Bool @@ -865,7 +871,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { ), }; - ecx.probe_misc_candidate("builtin async destruct").enter(|ecx| { + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { ecx.eq(goal.param_env, goal.predicate.term, async_destructor_ty.into()) .expect("expected goal term to be fully unconstrained"); ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) @@ -875,14 +881,14 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { fn consider_builtin_destruct_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { bug!("`Destruct` does not have an associated type: {:?}", goal); } fn consider_builtin_transmute_candidate( _ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { bug!("`BikeshedIntrinsicFrom` does not have an associated type: {:?}", goal) } } diff --git a/compiler/rustc_trait_selection/src/solve/trait_goals.rs b/compiler/rustc_trait_selection/src/solve/trait_goals.rs index 07cdc4fb77a29..c8cb14abb554d 100644 --- a/compiler/rustc_trait_selection/src/solve/trait_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/trait_goals.rs @@ -9,10 +9,9 @@ use rustc_data_structures::fx::FxIndexSet; use rustc_hir::def_id::DefId; use rustc_hir::{LangItem, Movability}; use rustc_infer::traits::query::NoSolution; +use rustc_infer::traits::solve::MaybeCause; use rustc_middle::traits::solve::inspect::ProbeKind; -use rustc_middle::traits::solve::{ - CandidateSource, CanonicalResponse, Certainty, Goal, QueryResult, -}; +use rustc_middle::traits::solve::{CandidateSource, Certainty, Goal, QueryResult}; use rustc_middle::traits::{BuiltinImplSource, Reveal}; use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; use rustc_middle::ty::{self, ToPredicate, Ty, TyCtxt}; @@ -94,21 +93,24 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_error_guaranteed_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, _guar: ErrorGuaranteed, - ) -> QueryResult<'tcx> { - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ) -> Result, NoSolution> { + // FIXME: don't need to enter a probe here. + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } fn probe_and_match_goal_against_assumption( ecx: &mut EvalCtxt<'_, 'tcx>, + source: CandidateSource, goal: Goal<'tcx, Self>, assumption: ty::Clause<'tcx>, then: impl FnOnce(&mut EvalCtxt<'_, 'tcx>) -> QueryResult<'tcx>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if let Some(trait_clause) = assumption.as_trait_clause() { if trait_clause.def_id() == goal.predicate.def_id() && trait_clause.polarity() == goal.predicate.polarity { - ecx.probe_misc_candidate("assumption").enter(|ecx| { + ecx.probe_trait_candidate(source).enter(|ecx| { let assumption_trait_pred = ecx.instantiate_binder_with_infer(trait_clause); ecx.eq( goal.param_env, @@ -128,7 +130,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_auto_trait_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -162,6 +164,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { } ecx.probe_and_evaluate_goal_for_constituent_tys( + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, structural_traits::instantiate_constituent_tys_for_auto_trait, ) @@ -170,14 +173,14 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_trait_alias_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } let tcx = ecx.tcx(); - ecx.probe_misc_candidate("trait alias").enter(|ecx| { + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { let nested_obligations = tcx .predicates_of(goal.predicate.def_id()) .instantiate(tcx, goal.predicate.trait_ref.args); @@ -193,12 +196,13 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_builtin_sized_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } ecx.probe_and_evaluate_goal_for_constituent_tys( + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, structural_traits::instantiate_constituent_tys_for_sized_trait, ) @@ -207,12 +211,13 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_builtin_copy_clone_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } ecx.probe_and_evaluate_goal_for_constituent_tys( + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, structural_traits::instantiate_constituent_tys_for_copy_clone_trait, ) @@ -221,7 +226,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_builtin_pointer_like_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -234,14 +239,15 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { let key = tcx.erase_regions(goal.param_env.and(goal.predicate.self_ty())); // But if there are inference variables, we have to wait until it's resolved. if key.has_non_region_infer() { - return ecx.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); + return ecx.forced_ambiguity(MaybeCause::Ambiguity); } if let Ok(layout) = tcx.layout_of(key) && layout.layout.is_pointer_like(&tcx.data_layout) { // FIXME: We could make this faster by making a no-constraints response - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } else { Err(NoSolution) } @@ -250,13 +256,15 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_builtin_fn_ptr_trait_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let self_ty = goal.predicate.self_ty(); match goal.predicate.polarity { // impl FnPtr for FnPtr {} ty::PredicatePolarity::Positive => { if self_ty.is_fn_ptr() { - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) } else { Err(NoSolution) } @@ -266,7 +274,9 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { // If a type is rigid and not a fn ptr, then we know for certain // that it does *not* implement `FnPtr`. if !self_ty.is_fn_ptr() && self_ty.is_known_rigid() { - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) } else { Err(NoSolution) } @@ -278,7 +288,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, goal_kind: ty::ClosureKind, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -292,8 +302,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { )? { Some(a) => a, None => { - return ecx - .evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); + return ecx.forced_ambiguity(MaybeCause::Ambiguity); } }; let output_is_sized_pred = tupled_inputs_and_output.map_bound(|(_, output)| { @@ -309,6 +318,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { // (FIXME: technically we only need to check this if the type is a fn ptr...) Self::probe_and_consider_implied_clause( ecx, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, pred, [goal.with(tcx, output_is_sized_pred)], @@ -319,7 +329,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, goal_kind: ty::ClosureKind, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -352,6 +362,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { // (FIXME: technically we only need to check this if the type is a fn ptr...) Self::probe_and_consider_implied_clause( ecx, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, pred, [goal.with(tcx, output_is_sized_pred)] @@ -363,7 +374,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_builtin_async_fn_kind_helper_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let [closure_fn_kind_ty, goal_kind_ty] = **goal.predicate.trait_ref.args else { bug!(); }; @@ -374,7 +385,8 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { }; let goal_kind = goal_kind_ty.expect_ty().to_opt_closure_kind().unwrap(); if closure_kind.extends(goal_kind) { - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } else { Err(NoSolution) } @@ -389,13 +401,14 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_builtin_tuple_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } if let ty::Tuple(..) = goal.predicate.self_ty().kind() { - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } else { Err(NoSolution) } @@ -404,18 +417,19 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_builtin_pointee_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } fn consider_builtin_future_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -433,13 +447,15 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { // Async coroutine unconditionally implement `Future` // Technically, we need to check that the future output type is Sized, // but that's already proven by the coroutine being WF. - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + // FIXME: use `consider_implied` + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } fn consider_builtin_iterator_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -457,13 +473,15 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { // Gen coroutines unconditionally implement `Iterator` // Technically, we need to check that the iterator output type is Sized, // but that's already proven by the coroutines being WF. - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + // FIXME: use `consider_implied` + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } fn consider_builtin_fused_iterator_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -479,13 +497,15 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { } // Gen coroutines unconditionally implement `FusedIterator` - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + // FIXME: use `consider_implied` + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } fn consider_builtin_async_iterator_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -503,13 +523,15 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { // Gen coroutines unconditionally implement `Iterator` // Technically, we need to check that the iterator output type is Sized, // but that's already proven by the coroutines being WF. - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + // FIXME: use `consider_implied` + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } fn consider_builtin_coroutine_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -528,6 +550,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { let coroutine = args.as_coroutine(); Self::probe_and_consider_implied_clause( ecx, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, ty::TraitRef::new(tcx, goal.predicate.def_id(), [self_ty, coroutine.resume_ty()]) .to_predicate(tcx), @@ -540,31 +563,33 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_builtin_discriminant_kind_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } // `DiscriminantKind` is automatically implemented for every type. - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } fn consider_builtin_async_destruct_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } // `AsyncDestruct` is automatically implemented for every type. - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } fn consider_builtin_destruct_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -573,13 +598,14 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { // `Destruct` is automatically implemented for every type in // non-const environments. - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } fn consider_builtin_transmute_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -599,11 +625,15 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { return Err(NoSolution); }; - let certainty = ecx.is_transmutable( - rustc_transmute::Types { dst: args.type_at(0), src: args.type_at(1) }, - assume, - )?; - ecx.evaluate_added_goals_and_make_canonical_response(certainty) + // FIXME: This actually should destructure the `Result` we get from transmutability and + // register candiates. We probably need to register >1 since we may have an OR of ANDs. + ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { + let certainty = ecx.is_transmutable( + rustc_transmute::Types { dst: args.type_at(0), src: args.type_at(1) }, + assume, + )?; + ecx.evaluate_added_goals_and_make_canonical_response(certainty) + }) } /// ```ignore (builtin impl example) @@ -616,20 +646,13 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { fn consider_structural_builtin_unsize_candidates( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, - ) -> Vec<(CanonicalResponse<'tcx>, BuiltinImplSource)> { + ) -> Vec> { if goal.predicate.polarity != ty::PredicatePolarity::Positive { return vec![]; } - let misc_candidate = |ecx: &mut EvalCtxt<'_, 'tcx>, certainty| { - ( - ecx.evaluate_added_goals_and_make_canonical_response(certainty).unwrap(), - BuiltinImplSource::Misc, - ) - }; - - let result_to_single = |result, source| match result { - Ok(resp) => vec![(resp, source)], + let result_to_single = |result| match result { + Ok(resp) => vec![resp], Err(NoSolution) => vec![], }; @@ -647,7 +670,10 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { let goal = goal.with(ecx.tcx(), (a_ty, b_ty)); match (a_ty.kind(), b_ty.kind()) { (ty::Infer(ty::TyVar(..)), ..) => bug!("unexpected infer {a_ty:?} {b_ty:?}"), - (_, ty::Infer(ty::TyVar(..))) => vec![misc_candidate(ecx, Certainty::AMBIGUOUS)], + + (_, ty::Infer(ty::TyVar(..))) => { + result_to_single(ecx.forced_ambiguity(MaybeCause::Ambiguity)) + } // Trait upcasting, or `dyn Trait + Auto + 'a` -> `dyn Trait + 'b`. ( @@ -660,14 +686,12 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { // `T` -> `dyn Trait` unsizing. (_, &ty::Dynamic(b_region, b_data, ty::Dyn)) => result_to_single( ecx.consider_builtin_unsize_to_dyn_candidate(goal, b_region, b_data), - BuiltinImplSource::Misc, ), // `[T; N]` -> `[T]` unsizing - (&ty::Array(a_elem_ty, ..), &ty::Slice(b_elem_ty)) => result_to_single( - ecx.consider_builtin_array_unsize(goal, a_elem_ty, b_elem_ty), - BuiltinImplSource::Misc, - ), + (&ty::Array(a_elem_ty, ..), &ty::Slice(b_elem_ty)) => { + result_to_single(ecx.consider_builtin_array_unsize(goal, a_elem_ty, b_elem_ty)) + } // `Struct` -> `Struct` where `T: Unsize` (&ty::Adt(a_def, a_args), &ty::Adt(b_def, b_args)) @@ -675,7 +699,6 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { { result_to_single( ecx.consider_builtin_struct_unsize(goal, a_def, a_args, b_args), - BuiltinImplSource::Misc, ) } @@ -683,10 +706,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { (&ty::Tuple(a_tys), &ty::Tuple(b_tys)) if a_tys.len() == b_tys.len() && !a_tys.is_empty() => { - result_to_single( - ecx.consider_builtin_tuple_unsize(goal, a_tys, b_tys), - BuiltinImplSource::TupleUnsizing, - ) + result_to_single(ecx.consider_builtin_tuple_unsize(goal, a_tys, b_tys)) } _ => vec![], @@ -712,7 +732,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { a_region: ty::Region<'tcx>, b_data: &'tcx ty::List>, b_region: ty::Region<'tcx>, - ) -> Vec<(CanonicalResponse<'tcx>, BuiltinImplSource)> { + ) -> Vec> { let tcx = self.tcx(); let Goal { predicate: (a_ty, _b_ty), .. } = goal; @@ -720,35 +740,32 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // If the principal def ids match (or are both none), then we're not doing // trait upcasting. We're just removing auto traits (or shortening the lifetime). if a_data.principal_def_id() == b_data.principal_def_id() { - if let Ok(resp) = self.consider_builtin_upcast_to_principal( + responses.extend(self.consider_builtin_upcast_to_principal( goal, + CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), a_data, a_region, b_data, b_region, a_data.principal(), - ) { - responses.push((resp, BuiltinImplSource::Misc)); - } + )); } else if let Some(a_principal) = a_data.principal() { self.walk_vtable( a_principal.with_self_ty(tcx, a_ty), |ecx, new_a_principal, _, vtable_vptr_slot| { - if let Ok(resp) = ecx.probe_misc_candidate("dyn upcast").enter(|ecx| { - ecx.consider_builtin_upcast_to_principal( - goal, - a_data, - a_region, - b_data, - b_region, - Some(new_a_principal.map_bound(|trait_ref| { - ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref) - })), - ) - }) { - responses - .push((resp, BuiltinImplSource::TraitUpcasting { vtable_vptr_slot })); - } + responses.extend(ecx.consider_builtin_upcast_to_principal( + goal, + CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting { + vtable_vptr_slot, + }), + a_data, + a_region, + b_data, + b_region, + Some(new_a_principal.map_bound(|trait_ref| { + ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref) + })), + )); }, ); } @@ -761,7 +778,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { goal: Goal<'tcx, (Ty<'tcx>, Ty<'tcx>)>, b_data: &'tcx ty::List>, b_region: ty::Region<'tcx>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let tcx = self.tcx(); let Goal { predicate: (a_ty, _), .. } = goal; @@ -770,37 +787,40 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { return Err(NoSolution); } - // Check that the type implements all of the predicates of the trait object. - // (i.e. the principal, all of the associated types match, and any auto traits) - self.add_goals( - GoalSource::ImplWhereBound, - b_data.iter().map(|pred| goal.with(tcx, pred.with_self_ty(tcx, a_ty))), - ); - - // The type must be `Sized` to be unsized. - if let Some(sized_def_id) = tcx.lang_items().sized_trait() { - self.add_goal( + self.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { + // Check that the type implements all of the predicates of the trait object. + // (i.e. the principal, all of the associated types match, and any auto traits) + ecx.add_goals( GoalSource::ImplWhereBound, - goal.with(tcx, ty::TraitRef::new(tcx, sized_def_id, [a_ty])), + b_data.iter().map(|pred| goal.with(tcx, pred.with_self_ty(tcx, a_ty))), ); - } else { - return Err(NoSolution); - } - // The type must outlive the lifetime of the `dyn` we're unsizing into. - self.add_goal(GoalSource::Misc, goal.with(tcx, ty::OutlivesPredicate(a_ty, b_region))); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + // The type must be `Sized` to be unsized. + if let Some(sized_def_id) = tcx.lang_items().sized_trait() { + ecx.add_goal( + GoalSource::ImplWhereBound, + goal.with(tcx, ty::TraitRef::new(tcx, sized_def_id, [a_ty])), + ); + } else { + return Err(NoSolution); + } + + // The type must outlive the lifetime of the `dyn` we're unsizing into. + ecx.add_goal(GoalSource::Misc, goal.with(tcx, ty::OutlivesPredicate(a_ty, b_region))); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) } fn consider_builtin_upcast_to_principal( &mut self, goal: Goal<'tcx, (Ty<'tcx>, Ty<'tcx>)>, + source: CandidateSource, a_data: &'tcx ty::List>, a_region: ty::Region<'tcx>, b_data: &'tcx ty::List>, b_region: ty::Region<'tcx>, upcast_principal: Option>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let param_env = goal.param_env; // We may upcast to auto traits that are either explicitly listed in @@ -819,7 +839,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // having any inference side-effects. We process obligations because // unification may initially succeed due to deferred projection equality. let projection_may_match = - |ecx: &mut Self, + |ecx: &mut EvalCtxt<'_, 'tcx>, source_projection: ty::PolyExistentialProjection<'tcx>, target_projection: ty::PolyExistentialProjection<'tcx>| { source_projection.item_def_id() == target_projection.item_def_id() @@ -833,54 +853,60 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { .is_ok() }; - for bound in b_data { - match bound.skip_binder() { - // Check that a's supertrait (upcast_principal) is compatible - // with the target (b_ty). - ty::ExistentialPredicate::Trait(target_principal) => { - self.eq(param_env, upcast_principal.unwrap(), bound.rebind(target_principal))?; - } - // Check that b_ty's projection is satisfied by exactly one of - // a_ty's projections. First, we look through the list to see if - // any match. If not, error. Then, if *more* than one matches, we - // return ambiguity. Otherwise, if exactly one matches, equate - // it with b_ty's projection. - ty::ExistentialPredicate::Projection(target_projection) => { - let target_projection = bound.rebind(target_projection); - let mut matching_projections = - a_data.projection_bounds().filter(|source_projection| { - projection_may_match(self, *source_projection, target_projection) - }); - let Some(source_projection) = matching_projections.next() else { - return Err(NoSolution); - }; - if matching_projections.next().is_some() { - return self.evaluate_added_goals_and_make_canonical_response( - Certainty::AMBIGUOUS, - ); + self.probe_trait_candidate(source).enter(|ecx| { + for bound in b_data { + match bound.skip_binder() { + // Check that a's supertrait (upcast_principal) is compatible + // with the target (b_ty). + ty::ExistentialPredicate::Trait(target_principal) => { + ecx.eq( + param_env, + upcast_principal.unwrap(), + bound.rebind(target_principal), + )?; } - self.eq(param_env, source_projection, target_projection)?; - } - // Check that b_ty's auto traits are present in a_ty's bounds. - ty::ExistentialPredicate::AutoTrait(def_id) => { - if !a_auto_traits.contains(&def_id) { - return Err(NoSolution); + // Check that b_ty's projection is satisfied by exactly one of + // a_ty's projections. First, we look through the list to see if + // any match. If not, error. Then, if *more* than one matches, we + // return ambiguity. Otherwise, if exactly one matches, equate + // it with b_ty's projection. + ty::ExistentialPredicate::Projection(target_projection) => { + let target_projection = bound.rebind(target_projection); + let mut matching_projections = + a_data.projection_bounds().filter(|source_projection| { + projection_may_match(ecx, *source_projection, target_projection) + }); + let Some(source_projection) = matching_projections.next() else { + return Err(NoSolution); + }; + if matching_projections.next().is_some() { + return ecx.evaluate_added_goals_and_make_canonical_response( + Certainty::AMBIGUOUS, + ); + } + ecx.eq(param_env, source_projection, target_projection)?; + } + // Check that b_ty's auto traits are present in a_ty's bounds. + ty::ExistentialPredicate::AutoTrait(def_id) => { + if !a_auto_traits.contains(&def_id) { + return Err(NoSolution); + } } } } - } - // Also require that a_ty's lifetime outlives b_ty's lifetime. - self.add_goal( - GoalSource::ImplWhereBound, - Goal::new( - self.tcx(), - param_env, - ty::Binder::dummy(ty::OutlivesPredicate(a_region, b_region)), - ), - ); + // Also require that a_ty's lifetime outlives b_ty's lifetime. + ecx.add_goal( + GoalSource::ImplWhereBound, + Goal::new( + ecx.tcx(), + param_env, + ty::Binder::dummy(ty::OutlivesPredicate(a_region, b_region)), + ), + ); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) } /// We have the following builtin impls for arrays: @@ -896,9 +922,10 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { goal: Goal<'tcx, (Ty<'tcx>, Ty<'tcx>)>, a_elem_ty: Ty<'tcx>, b_elem_ty: Ty<'tcx>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { self.eq(goal.param_env, a_elem_ty, b_elem_ty)?; - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + self.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } /// We generate a builtin `Unsize` impls for structs with generic parameters only @@ -920,7 +947,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { def: ty::AdtDef<'tcx>, a_args: ty::GenericArgsRef<'tcx>, b_args: ty::GenericArgsRef<'tcx>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let tcx = self.tcx(); let Goal { predicate: (_a_ty, b_ty), .. } = goal; @@ -962,7 +989,8 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { ), ), ); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + self.probe_builtin_trait_candidate(BuiltinImplSource::Misc) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } /// We generate the following builtin impl for tuples of all sizes. @@ -980,7 +1008,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { goal: Goal<'tcx, (Ty<'tcx>, Ty<'tcx>)>, a_tys: &'tcx ty::List>, b_tys: &'tcx ty::List>, - ) -> QueryResult<'tcx> { + ) -> Result, NoSolution> { let tcx = self.tcx(); let Goal { predicate: (_a_ty, b_ty), .. } = goal; @@ -1004,7 +1032,8 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { ), ), ); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + self.probe_builtin_trait_candidate(BuiltinImplSource::TupleUnsizing) + .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } // Return `Some` if there is an impl (built-in or user provided) that may @@ -1014,7 +1043,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { fn disqualify_auto_trait_candidate_due_to_possible_impl( &mut self, goal: Goal<'tcx, TraitPredicate<'tcx>>, - ) -> Option> { + ) -> Option, NoSolution>> { let self_ty = goal.predicate.self_ty(); match *self_ty.kind() { // Stall int and float vars until they are resolved to a concrete @@ -1023,7 +1052,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // we probably don't want to treat an `impl !AutoTrait for i32` as // disqualifying the built-in auto impl for `i64: AutoTrait` either. ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) => { - Some(self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS)) + Some(self.forced_ambiguity(MaybeCause::Ambiguity)) } // These types cannot be structurally decomposed into constituent @@ -1044,9 +1073,11 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { { match self.tcx().coroutine_movability(def_id) { Movability::Static => Some(Err(NoSolution)), - Movability::Movable => { - Some(self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) - } + Movability::Movable => Some( + self.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }), + ), } } @@ -1111,13 +1142,14 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { /// wrapped in one. fn probe_and_evaluate_goal_for_constituent_tys( &mut self, + source: CandidateSource, goal: Goal<'tcx, TraitPredicate<'tcx>>, constituent_tys: impl Fn( &EvalCtxt<'_, 'tcx>, Ty<'tcx>, ) -> Result>>, NoSolution>, - ) -> QueryResult<'tcx> { - self.probe_misc_candidate("constituent tys").enter(|ecx| { + ) -> Result, NoSolution> { + self.probe_trait_candidate(source).enter(|ecx| { ecx.add_goals( GoalSource::ImplWhereBound, constituent_tys(ecx, goal.predicate.self_ty())? diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index a4208cdc3c62c..c430da10b0cbf 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -1078,9 +1078,8 @@ impl<'a, 'tcx> ProofTreeVisitor<'tcx> for AmbiguityCausesVisitor<'a, 'tcx> { // Add ambiguity causes for unknowable goals. let mut ambiguity_cause = None; for cand in goal.candidates() { - // FIXME: boiiii, using string comparisions here sure is scuffed. - if let inspect::ProbeKind::MiscCandidate { - name: "coherence unknowable", + if let inspect::ProbeKind::TraitCandidate { + source: CandidateSource::CoherenceUnknowable, result: Ok(_), } = cand.kind() { From 2eb7c8196bbbd0b990f053365f7c934f10963af0 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Sun, 28 Apr 2024 15:35:35 -0400 Subject: [PATCH 39/56] Only register candidate if it is associated w a shallow certainty --- .../rustc_middle/src/traits/solve/inspect.rs | 4 +- .../src/traits/solve/inspect/format.rs | 4 +- .../src/solve/eval_ctxt/mod.rs | 11 ++-- .../src/solve/inspect/analyse.rs | 55 +++++++------------ 4 files changed, 32 insertions(+), 42 deletions(-) diff --git a/compiler/rustc_middle/src/traits/solve/inspect.rs b/compiler/rustc_middle/src/traits/solve/inspect.rs index 66542431149fc..0f3ace2d55d38 100644 --- a/compiler/rustc_middle/src/traits/solve/inspect.rs +++ b/compiler/rustc_middle/src/traits/solve/inspect.rs @@ -150,6 +150,6 @@ pub enum ProbeKind<'tcx> { /// do a probe to find out what projection type(s) may be used to prove that /// the source type upholds all of the target type's object bounds. UpcastProjectionCompatibility, - /// Try to unify an opaque type with an existing - OpaqueTypeStorageLookup, + /// Try to unify an opaque type with an existing key in the storage. + OpaqueTypeStorageLookup { result: QueryResult<'tcx> }, } diff --git a/compiler/rustc_middle/src/traits/solve/inspect/format.rs b/compiler/rustc_middle/src/traits/solve/inspect/format.rs index 91f0880fd2085..2d73be387fdcd 100644 --- a/compiler/rustc_middle/src/traits/solve/inspect/format.rs +++ b/compiler/rustc_middle/src/traits/solve/inspect/format.rs @@ -112,8 +112,8 @@ impl<'a, 'b> ProofTreeFormatter<'a, 'b> { ProbeKind::UpcastProjectionCompatibility => { write!(self.f, "PROBING FOR PROJECTION COMPATIBILITY FOR UPCASTING:") } - ProbeKind::OpaqueTypeStorageLookup => { - write!(self.f, "PROBING FOR AN EXISTING OPAQUE:") + ProbeKind::OpaqueTypeStorageLookup { result } => { + write!(self.f, "PROBING FOR AN EXISTING OPAQUE: {result:?}") } ProbeKind::TraitCandidate { source, result } => { write!(self.f, "CANDIDATE {source:?}: {result:?}") diff --git a/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs b/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs index 225479fcff347..d8fd5c0591141 100644 --- a/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs @@ -998,8 +998,11 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { if candidate_key.def_id != key.def_id { continue; } - values.extend(self.probe(|_| inspect::ProbeKind::OpaqueTypeStorageLookup).enter( - |ecx| { + values.extend( + self.probe(|result| inspect::ProbeKind::OpaqueTypeStorageLookup { + result: *result, + }) + .enter(|ecx| { for (a, b) in std::iter::zip(candidate_key.args, key.args) { ecx.eq(param_env, a, b)?; } @@ -1011,8 +1014,8 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { candidate_ty, ); ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }, - )); + }), + ); } values } diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index 7daf99cb7e563..2d58111e80385 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -45,7 +45,7 @@ pub struct InspectCandidate<'a, 'tcx> { nested_goals: Vec>>>, final_state: inspect::CanonicalState<'tcx, ()>, result: QueryResult<'tcx>, - candidate_certainty: Option, + shallow_certainty: Certainty, } impl<'a, 'tcx> InspectCandidate<'a, 'tcx> { @@ -59,15 +59,14 @@ impl<'a, 'tcx> InspectCandidate<'a, 'tcx> { /// Certainty passed into `evaluate_added_goals_and_make_canonical_response`. /// - /// If this certainty is `Some(Yes)`, then we must be confident that the candidate + /// If this certainty is `Yes`, then we must be confident that the candidate /// must hold iff it's nested goals hold. This is not true if the certainty is - /// `Some(Maybe)`, which suggests we forced ambiguity instead, or if it is `None`, - /// which suggests we may have not assembled any candidates at all. + /// `Maybe(..)`, which suggests we forced ambiguity instead. /// - /// This is *not* the certainty of the candidate's nested evaluation, which can be - /// accessed with [`Self::result`] instead. - pub fn candidate_certainty(&self) -> Option { - self.candidate_certainty + /// This is *not* the certainty of the candidate's full nested evaluation, which + /// can be accessed with [`Self::result`] instead. + pub fn shallow_certainty(&self) -> Certainty { + self.shallow_certainty } /// Visit all nested goals of this candidate without rolling @@ -174,9 +173,7 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { nested_goals: &mut Vec>>>, probe: &inspect::Probe<'tcx>, ) { - let mut candidate_certainty = None; - let num_candidates = candidates.len(); - + let mut shallow_certainty = None; for step in &probe.steps { match step { &inspect::ProbeStep::AddGoal(_source, goal) => nested_goals.push(goal), @@ -188,8 +185,8 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { self.candidates_recur(candidates, nested_goals, probe); nested_goals.truncate(num_goals); } - inspect::ProbeStep::MakeCanonicalResponse { shallow_certainty } => { - assert_eq!(candidate_certainty.replace(*shallow_certainty), None); + inspect::ProbeStep::MakeCanonicalResponse { shallow_certainty: c } => { + assert_eq!(shallow_certainty.replace(*c), None); } inspect::ProbeStep::EvaluateGoals(_) => (), } @@ -198,37 +195,27 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { match probe.kind { inspect::ProbeKind::NormalizedSelfTyAssembly | inspect::ProbeKind::UnsizeAssembly - | inspect::ProbeKind::UpcastProjectionCompatibility - | inspect::ProbeKind::OpaqueTypeStorageLookup => (), - // We add a candidate for the root evaluation if there + | inspect::ProbeKind::UpcastProjectionCompatibility => (), + + // We add a candidate even for the root evaluation if there // is only one way to prove a given goal, e.g. for `WellFormed`. - // - // FIXME: This is currently wrong if we don't even try any - // candidates, e.g. for a trait goal, as in this case `candidates` is - // actually supposed to be empty. inspect::ProbeKind::Root { result } - | inspect::ProbeKind::TryNormalizeNonRigid { result } => { - if candidates.len() == num_candidates { + | inspect::ProbeKind::TryNormalizeNonRigid { result } + | inspect::ProbeKind::TraitCandidate { source: _, result } + | inspect::ProbeKind::OpaqueTypeStorageLookup { result } => { + // We only add a candidate if `shallow_certainty` was set, which means + // that we ended up calling `evaluate_added_goals_and_make_canonical_response`. + if let Some(shallow_certainty) = shallow_certainty { candidates.push(InspectCandidate { goal: self, kind: probe.kind, nested_goals: nested_goals.clone(), final_state: probe.final_state, result, - candidate_certainty, - }) + shallow_certainty, + }); } } - inspect::ProbeKind::TraitCandidate { source: _, result } => { - candidates.push(InspectCandidate { - goal: self, - kind: probe.kind, - nested_goals: nested_goals.clone(), - final_state: probe.final_state, - result, - candidate_certainty, - }); - } } } From f06e0f783793ab5592b6799876d9329b1490bc30 Mon Sep 17 00:00:00 2001 From: Santiago Pastorino Date: Mon, 29 Apr 2024 11:27:14 -0300 Subject: [PATCH 40/56] Add StaticForeignItem and use it on ForeignItemKind --- compiler/rustc_ast/src/ast.rs | 43 ++++++++++++++++--- compiler/rustc_ast/src/mut_visit.rs | 2 +- compiler/rustc_ast/src/visit.rs | 2 +- compiler/rustc_ast_lowering/src/item.rs | 8 ++-- .../rustc_ast_passes/src/ast_validation.rs | 4 +- .../rustc_ast_pretty/src/pprust/state/item.rs | 20 +++++---- compiler/rustc_parse/src/parser/item.rs | 6 ++- compiler/rustc_resolve/src/def_collector.rs | 2 +- .../clippy/clippy_utils/src/ast_utils.rs | 13 +++++- src/tools/rustfmt/src/items.rs | 6 +-- tests/ui/stats/hir-stats.stderr | 20 ++++----- 11 files changed, 86 insertions(+), 40 deletions(-) diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index af246e3137187..f24e2a22c0730 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -3126,6 +3126,35 @@ pub struct StaticItem { pub expr: Option>, } +/// A static item in `extern` block. +// This struct is identical to StaticItem for now but it's going to have a safety attribute. +#[derive(Clone, Encodable, Decodable, Debug)] +pub struct StaticForeignItem { + pub ty: P, + pub mutability: Mutability, + pub expr: Option>, +} + +impl From for StaticForeignItem { + fn from(static_item: StaticItem) -> StaticForeignItem { + StaticForeignItem { + ty: static_item.ty, + mutability: static_item.mutability, + expr: static_item.expr, + } + } +} + +impl From for StaticItem { + fn from(static_item: StaticForeignItem) -> StaticItem { + StaticItem { + ty: static_item.ty, + mutability: static_item.mutability, + expr: static_item.expr, + } + } +} + #[derive(Clone, Encodable, Decodable, Debug)] pub struct ConstItem { pub defaultness: Defaultness, @@ -3329,7 +3358,7 @@ impl TryFrom for AssocItemKind { #[derive(Clone, Encodable, Decodable, Debug)] pub enum ForeignItemKind { /// A foreign static item (`static FOO: u8`). - Static(P, Mutability, Option>), + Static(Box), /// An foreign function. Fn(Box), /// An foreign type. @@ -3341,8 +3370,8 @@ pub enum ForeignItemKind { impl From for ItemKind { fn from(foreign_item_kind: ForeignItemKind) -> ItemKind { match foreign_item_kind { - ForeignItemKind::Static(a, b, c) => { - ItemKind::Static(StaticItem { ty: a, mutability: b, expr: c }.into()) + ForeignItemKind::Static(box static_foreign_item) => { + ItemKind::Static(Box::new(static_foreign_item.into())) } ForeignItemKind::Fn(fn_kind) => ItemKind::Fn(fn_kind), ForeignItemKind::TyAlias(ty_alias_kind) => ItemKind::TyAlias(ty_alias_kind), @@ -3356,8 +3385,8 @@ impl TryFrom for ForeignItemKind { fn try_from(item_kind: ItemKind) -> Result { Ok(match item_kind { - ItemKind::Static(box StaticItem { ty: a, mutability: b, expr: c }) => { - ForeignItemKind::Static(a, b, c) + ItemKind::Static(box static_item) => { + ForeignItemKind::Static(Box::new(static_item.into())) } ItemKind::Fn(fn_kind) => ForeignItemKind::Fn(fn_kind), ItemKind::TyAlias(ty_alias_kind) => ForeignItemKind::TyAlias(ty_alias_kind), @@ -3382,8 +3411,8 @@ mod size_asserts { static_assert_size!(Expr, 72); static_assert_size!(ExprKind, 40); static_assert_size!(Fn, 160); - static_assert_size!(ForeignItem, 96); - static_assert_size!(ForeignItemKind, 24); + static_assert_size!(ForeignItem, 88); + static_assert_size!(ForeignItemKind, 16); static_assert_size!(GenericArg, 24); static_assert_size!(GenericBound, 88); static_assert_size!(Generics, 40); diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index fc445600e77b9..4d28ef56df173 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -1261,7 +1261,7 @@ pub fn noop_flat_map_item( impl NoopVisitItemKind for ForeignItemKind { fn noop_visit(&mut self, visitor: &mut impl MutVisitor) { match self { - ForeignItemKind::Static(ty, _, expr) => { + ForeignItemKind::Static(box StaticForeignItem { ty, mutability: _, expr }) => { visitor.visit_ty(ty); visit_opt(expr, |expr| visitor.visit_expr(expr)); } diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs index a0ada9a7788d7..1d8fd63e4592e 100644 --- a/compiler/rustc_ast/src/visit.rs +++ b/compiler/rustc_ast/src/visit.rs @@ -642,7 +642,7 @@ impl WalkItemKind for ForeignItemKind { ) -> V::Result { let &Item { id, span, ident, ref vis, .. } = item; match self { - ForeignItemKind::Static(ty, _, expr) => { + ForeignItemKind::Static(box StaticForeignItem { ty, mutability: _, expr }) => { try_visit!(visitor.visit_ty(ty)); visit_opt!(visitor, visit_expr, expr); } diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index dcce54d66c2d9..ccbc44bff5780 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -662,10 +662,10 @@ impl<'hir> LoweringContext<'_, 'hir> { hir::ForeignItemKind::Fn(fn_dec, fn_args, generics) } - ForeignItemKind::Static(t, m, _) => { - let ty = - self.lower_ty(t, ImplTraitContext::Disallowed(ImplTraitPosition::StaticTy)); - hir::ForeignItemKind::Static(ty, *m) + ForeignItemKind::Static(box StaticForeignItem { ty, mutability, expr: _ }) => { + let ty = self + .lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::StaticTy)); + hir::ForeignItemKind::Static(ty, *mutability) } ForeignItemKind::TyAlias(..) => hir::ForeignItemKind::Type, ForeignItemKind::MacCall(_) => panic!("macro shouldn't exist here"), diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index 1fb410253d1e7..1eefad2af4dc3 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -1185,8 +1185,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self.check_foreign_ty_genericless(generics, where_clauses); self.check_foreign_item_ascii_only(fi.ident); } - ForeignItemKind::Static(_, _, body) => { - self.check_foreign_kind_bodyless(fi.ident, "static", body.as_ref().map(|b| b.span)); + ForeignItemKind::Static(box StaticForeignItem { ty: _, mutability: _, expr }) => { + self.check_foreign_kind_bodyless(fi.ident, "static", expr.as_ref().map(|b| b.span)); self.check_foreign_item_ascii_only(fi.ident); } ForeignItemKind::MacCall(..) => {} diff --git a/compiler/rustc_ast_pretty/src/pprust/state/item.rs b/compiler/rustc_ast_pretty/src/pprust/state/item.rs index 10886aace53f2..b90182c2b3e6b 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/item.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/item.rs @@ -30,15 +30,17 @@ impl<'a> State<'a> { ast::ForeignItemKind::Fn(box ast::Fn { defaultness, sig, generics, body }) => { self.print_fn_full(sig, ident, generics, vis, *defaultness, body.as_deref(), attrs); } - ast::ForeignItemKind::Static(ty, mutbl, body) => self.print_item_const( - ident, - Some(*mutbl), - &ast::Generics::default(), - ty, - body.as_deref(), - vis, - ast::Defaultness::Final, - ), + ast::ForeignItemKind::Static(box ast::StaticForeignItem { ty, mutability, expr }) => { + self.print_item_const( + ident, + Some(*mutability), + &ast::Generics::default(), + ty, + expr.as_deref(), + vis, + ast::Defaultness::Final, + ) + } ast::ForeignItemKind::TyAlias(box ast::TyAlias { defaultness, generics, diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index ed51710564a42..848277c4611e3 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1191,7 +1191,11 @@ impl<'a> Parser<'a> { ident_span: ident.span, const_span, }); - ForeignItemKind::Static(ty, Mutability::Not, expr) + ForeignItemKind::Static(Box::new(StaticForeignItem { + ty, + mutability: Mutability::Not, + expr, + })) } _ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"), }, diff --git a/compiler/rustc_resolve/src/def_collector.rs b/compiler/rustc_resolve/src/def_collector.rs index d4d999f6f9ce2..a27a6bceda335 100644 --- a/compiler/rustc_resolve/src/def_collector.rs +++ b/compiler/rustc_resolve/src/def_collector.rs @@ -209,7 +209,7 @@ impl<'a, 'b, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'b, 'tcx> { fn visit_foreign_item(&mut self, fi: &'a ForeignItem) { let def_kind = match fi.kind { - ForeignItemKind::Static(_, mutability, _) => { + ForeignItemKind::Static(box StaticForeignItem { ty: _, mutability, expr: _ }) => { DefKind::Static { mutability, nested: false } } ForeignItemKind::Fn(_) => DefKind::Fn, diff --git a/src/tools/clippy/clippy_utils/src/ast_utils.rs b/src/tools/clippy/clippy_utils/src/ast_utils.rs index 0395eb1449b48..529d20126b22c 100644 --- a/src/tools/clippy/clippy_utils/src/ast_utils.rs +++ b/src/tools/clippy/clippy_utils/src/ast_utils.rs @@ -446,7 +446,18 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool { pub fn eq_foreign_item_kind(l: &ForeignItemKind, r: &ForeignItemKind) -> bool { use ForeignItemKind::*; match (l, r) { - (Static(lt, lm, le), Static(rt, rm, re)) => lm == rm && eq_ty(lt, rt) && eq_expr_opt(le, re), + ( + Static(box StaticForeignItem { + ty: lt, + mutability: lm, + expr: le, + }), + Static(box StaticForeignItem { + ty: rt, + mutability: rm, + expr: re, + }), + ) => lm == rm && eq_ty(lt, rt) && eq_expr_opt(le, re), ( Fn(box ast::Fn { defaultness: ld, diff --git a/src/tools/rustfmt/src/items.rs b/src/tools/rustfmt/src/items.rs index f6f51fbd8eaf9..e196d1817f34a 100644 --- a/src/tools/rustfmt/src/items.rs +++ b/src/tools/rustfmt/src/items.rs @@ -3325,11 +3325,11 @@ impl Rewrite for ast::ForeignItem { .map(|(s, _, _)| format!("{};", s)) } } - ast::ForeignItemKind::Static(ref ty, mutability, _) => { + ast::ForeignItemKind::Static(ref static_foreign_item) => { // FIXME(#21): we're dropping potential comments in between the // function kw here. let vis = format_visibility(context, &self.vis); - let mut_str = format_mutability(mutability); + let mut_str = format_mutability(static_foreign_item.mutability); let prefix = format!( "{}static {}{}:", vis, @@ -3340,7 +3340,7 @@ impl Rewrite for ast::ForeignItem { rewrite_assign_rhs( context, prefix, - &**ty, + &static_foreign_item.ty, &RhsAssignKind::Ty, shape.sub_width(1)?, ) diff --git a/tests/ui/stats/hir-stats.stderr b/tests/ui/stats/hir-stats.stderr index dc24833c267c7..1a67c9438449b 100644 --- a/tests/ui/stats/hir-stats.stderr +++ b/tests/ui/stats/hir-stats.stderr @@ -11,15 +11,15 @@ ast-stats-1 Attribute 64 ( 1.0%) 2 32 ast-stats-1 - Normal 32 ( 0.5%) 1 ast-stats-1 - DocComment 32 ( 0.5%) 1 ast-stats-1 Local 80 ( 1.2%) 1 80 -ast-stats-1 Arm 96 ( 1.4%) 2 48 -ast-stats-1 ForeignItem 96 ( 1.4%) 1 96 -ast-stats-1 - Fn 96 ( 1.4%) 1 +ast-stats-1 ForeignItem 88 ( 1.3%) 1 88 +ast-stats-1 - Fn 88 ( 1.3%) 1 +ast-stats-1 Arm 96 ( 1.5%) 2 48 ast-stats-1 FnDecl 120 ( 1.8%) 5 24 ast-stats-1 FieldDef 160 ( 2.4%) 2 80 ast-stats-1 Stmt 160 ( 2.4%) 5 32 ast-stats-1 - Let 32 ( 0.5%) 1 ast-stats-1 - MacCall 32 ( 0.5%) 1 -ast-stats-1 - Expr 96 ( 1.4%) 3 +ast-stats-1 - Expr 96 ( 1.5%) 3 ast-stats-1 Param 160 ( 2.4%) 4 40 ast-stats-1 Block 192 ( 2.9%) 6 32 ast-stats-1 Variant 208 ( 3.1%) 2 104 @@ -28,7 +28,7 @@ ast-stats-1 - Trait 352 ( 5.3%) 4 ast-stats-1 AssocItem 352 ( 5.3%) 4 88 ast-stats-1 - Type 176 ( 2.7%) 2 ast-stats-1 - Fn 176 ( 2.7%) 2 -ast-stats-1 GenericParam 480 ( 7.2%) 5 96 +ast-stats-1 GenericParam 480 ( 7.3%) 5 96 ast-stats-1 Pat 504 ( 7.6%) 7 72 ast-stats-1 - Struct 72 ( 1.1%) 1 ast-stats-1 - Wild 72 ( 1.1%) 1 @@ -53,7 +53,7 @@ ast-stats-1 - Impl 136 ( 2.1%) 1 ast-stats-1 - Fn 272 ( 4.1%) 2 ast-stats-1 - Use 408 ( 6.2%) 3 ast-stats-1 ---------------------------------------------------------------- -ast-stats-1 Total 6_624 +ast-stats-1 Total 6_616 ast-stats-1 ast-stats-2 POST EXPANSION AST STATS ast-stats-2 Name Accumulated Size Count Item Size @@ -65,9 +65,9 @@ ast-stats-2 ExprField 48 ( 0.7%) 1 48 ast-stats-2 WherePredicate 56 ( 0.8%) 1 56 ast-stats-2 - BoundPredicate 56 ( 0.8%) 1 ast-stats-2 Local 80 ( 1.1%) 1 80 +ast-stats-2 ForeignItem 88 ( 1.2%) 1 88 +ast-stats-2 - Fn 88 ( 1.2%) 1 ast-stats-2 Arm 96 ( 1.3%) 2 48 -ast-stats-2 ForeignItem 96 ( 1.3%) 1 96 -ast-stats-2 - Fn 96 ( 1.3%) 1 ast-stats-2 InlineAsm 120 ( 1.7%) 1 120 ast-stats-2 FnDecl 120 ( 1.7%) 5 24 ast-stats-2 Attribute 128 ( 1.8%) 4 32 @@ -86,7 +86,7 @@ ast-stats-2 - Trait 352 ( 4.9%) 4 ast-stats-2 AssocItem 352 ( 4.9%) 4 88 ast-stats-2 - Type 176 ( 2.4%) 2 ast-stats-2 - Fn 176 ( 2.4%) 2 -ast-stats-2 GenericParam 480 ( 6.6%) 5 96 +ast-stats-2 GenericParam 480 ( 6.7%) 5 96 ast-stats-2 Pat 504 ( 7.0%) 7 72 ast-stats-2 - Struct 72 ( 1.0%) 1 ast-stats-2 - Wild 72 ( 1.0%) 1 @@ -113,7 +113,7 @@ ast-stats-2 - Impl 136 ( 1.9%) 1 ast-stats-2 - Fn 272 ( 3.8%) 2 ast-stats-2 - Use 544 ( 7.5%) 4 ast-stats-2 ---------------------------------------------------------------- -ast-stats-2 Total 7_224 +ast-stats-2 Total 7_216 ast-stats-2 hir-stats HIR STATS hir-stats Name Accumulated Size Count Item Size From 982a58e900783a68f6a9b6bfea33f46b7be68386 Mon Sep 17 00:00:00 2001 From: David Koloski Date: Mon, 29 Apr 2024 17:00:03 +0000 Subject: [PATCH 41/56] Fix Fuchsia build broken by #124210 Fuchsia doesn't support dirfd although we have a symbol stubbed for it. --- library/std/src/sys/pal/unix/fs.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/library/std/src/sys/pal/unix/fs.rs b/library/std/src/sys/pal/unix/fs.rs index 5987996a3f167..f8fdc6369cdfb 100644 --- a/library/std/src/sys/pal/unix/fs.rs +++ b/library/std/src/sys/pal/unix/fs.rs @@ -894,6 +894,7 @@ impl Drop for Dir { target_os = "vita", target_os = "hurd", target_os = "espidf", + target_os = "fuchsia", )))] { let fd = unsafe { libc::dirfd(self.0) }; From 23341500a619b61fab3bae82c01d18e6407def0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 25 Apr 2024 11:04:49 +0200 Subject: [PATCH 42/56] Unify outcome jobs --- .github/workflows/ci.yml | 40 ++++++++---------------------------- src/ci/github-actions/ci.yml | 37 ++++++++++++++++----------------- 2 files changed, 26 insertions(+), 51 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d691b72488b2a..cfcbb8656b1af 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -152,49 +152,25 @@ jobs: AWS_ACCESS_KEY_ID: "${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}" AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}" if: "success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')" - try-success: - needs: - - job - if: "success() && github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'" - steps: - - name: mark the job as a success - run: exit 0 - shell: bash - name: bors build finished - runs-on: ubuntu-latest - try-failure: - needs: - - job - if: "!success() && github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'" - steps: - - name: mark the job as a failure - run: exit 1 - shell: bash + outcome: name: bors build finished runs-on: ubuntu-latest - auto-success: needs: - job - if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" + if: "!cancelled() && github.event_name == 'push'" steps: - name: checkout the source code uses: actions/checkout@v4 with: fetch-depth: 2 + - name: calculate the correct exit status + id: status + run: "jq --exit-status 'all(.result == \"success\" or .result == \"skipped\")' <<< '${{ toJson(needs) }}'\necho \"status=$?\" >> $GITHUB_OUTPUT\n" - name: publish toolstate run: src/ci/publish_toolstate.sh shell: bash + if: "steps.outputs.status == 0 && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" env: TOOLSTATE_REPO_ACCESS_TOKEN: "${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}" - name: bors build finished - runs-on: ubuntu-latest - auto-failure: - needs: - - job - if: "!success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" - steps: - - name: mark the job as a failure - run: exit 1 - shell: bash - name: bors build finished - runs-on: ubuntu-latest + - name: set the correct exit status + run: "exit ${{ steps.outputs.status == 0 }}" diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 95d2225a6b1c6..4ca7b52bc02da 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -270,33 +270,32 @@ jobs: # erroring about invalid credentials instead. if: success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1') - # These jobs don't actually test anything, but they're used to tell bors the - # build completed, as there is no practical way to detect when a workflow is - # successful listening to webhooks only. - try-success: - needs: [ job ] - if: "success() && github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'" - <<: *base-success-job - try-failure: - needs: [ job ] - if: "!success() && github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'" - <<: *base-failure-job - auto-success: + # This job isused to tell bors the final status of the build, as there is no practical way to detect + # when a workflow is successful listening to webhooks only in our current bors implementation (homu). + outcome: + name: bors build finished + runs-on: ubuntu-latest needs: [ job ] - if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" - <<: *base-outcome-job + # !cancelled() executes the job regardless of whether the previous jobs passed or failed + if: "!cancelled() && github.event_name == 'push'" steps: - name: checkout the source code uses: actions/checkout@v4 with: fetch-depth: 2 + # Calculate the exit status of the whole CI workflow (0 if all dependent jobs were either successful + # or skipped, otherwise 1). + - name: calculate the correct exit status + id: status + run: | + jq --exit-status 'all(.result == "success" or .result == "skipped")' <<< '${{ toJson(needs) }}' + echo "status=$?" >> $GITHUB_OUTPUT + # Publish the toolstate if an auto build succeeds (just before push to master) - name: publish toolstate run: src/ci/publish_toolstate.sh shell: bash + if: steps.outputs.status == 0 && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust' env: TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} - - auto-failure: - needs: [ job ] - if: "!success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" - <<: *base-failure-job + - name: set the correct exit status + run: exit ${{ steps.outputs.status }} From 1ca92c085788f68ff9b23cf597da5c62924e3f37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Mon, 29 Apr 2024 21:32:35 +0200 Subject: [PATCH 43/56] Replace dynamically generated `ci.yml` file with the original template --- .github/workflows/ci.yml | 170 ++++++++++++++------ src/ci/github-actions/ci.yml | 301 ----------------------------------- 2 files changed, 121 insertions(+), 350 deletions(-) delete mode 100644 src/ci/github-actions/ci.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cfcbb8656b1af..6fdc8d4d72ddf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,21 +1,14 @@ -############################################################# -# WARNING: automatically generated file, DO NOT CHANGE! # -############################################################# - -# This file was automatically generated by the expand-yaml-anchors tool. The -# source file that generated this one is: -# -# src/ci/github-actions/ci.yml -# -# Once you make changes to that file you need to run: +# This file defines our primary CI workflow that runs on pull requests +# and also on pushes to special branches (auto, try). # -# ./x.py run src/tools/expand-yaml-anchors/ -# -# The CI build will fail if the tool is not run after changes to this file. +# The actual definition of the executed jobs is calculated by a Python +# script located at src/ci/github-actions/calculate-job-matrix.py, which +# uses job definition data from src/ci/github-actions/jobs.yml. +# You should primarily modify the `jobs.yml` file if you want to modify +# what jobs are executed in CI. ---- name: CI -"on": +on: push: branches: - auto @@ -25,23 +18,35 @@ name: CI pull_request: branches: - "**" + permissions: contents: read packages: write + defaults: run: + # On Linux, macOS, and Windows, use the system-provided bash as the default + # shell. (This should only make a difference on Windows, where the default + # shell is PowerShell.) shell: bash + concurrency: - group: "${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }}" + # For a given workflow, if we push to the same branch, cancel all previous builds on that branch. + # We add an exception for try builds (try branch) and unrolled rollup builds (try-perf), which + # are all triggered on the same branch, but which should be able to run concurrently. + group: ${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }} cancel-in-progress: true env: TOOLSTATE_REPO: "https://github.com/rust-lang-nursery/rust-toolstate" jobs: + # The job matrix for `calculate_matrix` is defined in src/ci/github-actions/jobs.yml. + # It calculates which jobs should be executed, based on the data of the ${{ github }} context. + # If you want to modify CI jobs, take a look at src/ci/github-actions/jobs.yml. calculate_matrix: name: Calculate job matrix runs-on: ubuntu-latest outputs: - jobs: "${{ steps.jobs.outputs.jobs }}" + jobs: ${{ steps.jobs.outputs.jobs }} steps: - name: Checkout the source code uses: actions/checkout@v4 @@ -49,128 +54,195 @@ jobs: run: python3 src/ci/github-actions/calculate-job-matrix.py >> $GITHUB_OUTPUT id: jobs job: - name: "${{ matrix.name }}" - needs: - - calculate_matrix + name: ${{ matrix.name }} + needs: [ calculate_matrix ] runs-on: "${{ matrix.os }}" defaults: run: - shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}" + shell: ${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }} timeout-minutes: 600 env: - CI_JOB_NAME: "${{ matrix.image }}" + CI_JOB_NAME: ${{ matrix.image }} CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse - HEAD_SHA: "${{ github.event.pull_request.head.sha || github.sha }}" - DOCKER_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + # commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs. + HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }} + DOCKER_TOKEN: ${{ secrets.GITHUB_TOKEN }} SCCACHE_BUCKET: rust-lang-ci-sccache2 CACHE_DOMAIN: ci-caches.rust-lang.org - continue-on-error: "${{ matrix.continue_on_error || false }}" + continue-on-error: ${{ matrix.continue_on_error || false }} strategy: matrix: - include: "${{ fromJSON(needs.calculate_matrix.outputs.jobs) }}" - if: "fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null" + # Check the `calculate_matrix` job to see how is the matrix defined. + include: ${{ fromJSON(needs.calculate_matrix.outputs.jobs) }} + # GitHub Actions fails the workflow if an empty list of jobs is provided to + # the workflow, so we need to skip this job if nothing was produced by + # the Python script. + # + # Unfortunately checking whether a list is empty is not possible in a nice + # way due to GitHub Actions expressions limits. + # This hack is taken from https://github.com/ferrocene/ferrocene/blob/d43edc6b7697cf1719ec1c17c54904ab94825763/.github/workflows/release.yml#L75-L82 + if: fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null steps: - - if: "contains(matrix.os, 'windows')" + - if: contains(matrix.os, 'windows') uses: msys2/setup-msys2@v2.22.0 with: - msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}" + # i686 jobs use mingw32. x86_64 and cross-compile jobs use mingw64. + msystem: ${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }} + # don't try to download updates for already installed packages update: false + # don't try to use the msys that comes built-in to the github runner, + # so we can control what is installed (i.e. not python) release: true + # Inherit the full path from the Windows environment, with MSYS2's */bin/ + # dirs placed in front. This lets us run Windows-native Python etc. path-type: inherit - install: "make dos2unix diffutils\n" + install: > + make + dos2unix + diffutils + - name: disable git crlf conversion run: git config --global core.autocrlf false + - name: checkout the source code uses: actions/checkout@v4 with: fetch-depth: 2 + + # Rust Log Analyzer can't currently detect the PR number of a GitHub + # Actions build on its own, so a hint in the log message is needed to + # point it in the right direction. - name: configure the PR in which the error message will be posted - run: "echo \"[CI_PR_NUMBER=$num]\"" + run: echo "[CI_PR_NUMBER=$num]" env: - num: "${{ github.event.number }}" - if: "success() && github.event_name == 'pull_request'" + num: ${{ github.event.number }} + if: success() && github.event_name == 'pull_request' + - name: add extra environment variables run: src/ci/scripts/setup-environment.sh env: - EXTRA_VARIABLES: "${{ toJson(matrix.env) }}" + # Since it's not possible to merge `${{ matrix.env }}` with the other + # variables in `job..env`, the variables defined in the matrix + # are passed to the `setup-environment.sh` script encoded in JSON, + # which then uses log commands to actually set them. + EXTRA_VARIABLES: ${{ toJson(matrix.env) }} + - name: ensure the channel matches the target branch run: src/ci/scripts/verify-channel.sh + - name: collect CPU statistics run: src/ci/scripts/collect-cpu-stats.sh + - name: show the current environment run: src/ci/scripts/dump-environment.sh + - name: install awscli run: src/ci/scripts/install-awscli.sh + - name: install sccache run: src/ci/scripts/install-sccache.sh + - name: select Xcode run: src/ci/scripts/select-xcode.sh + - name: install clang run: src/ci/scripts/install-clang.sh + - name: install tidy run: src/ci/scripts/install-tidy.sh + - name: install WIX run: src/ci/scripts/install-wix.sh + - name: disable git crlf conversion run: src/ci/scripts/disable-git-crlf-conversion.sh + - name: checkout submodules run: src/ci/scripts/checkout-submodules.sh + - name: install MSYS2 run: src/ci/scripts/install-msys2.sh + - name: install MinGW run: src/ci/scripts/install-mingw.sh + - name: install ninja run: src/ci/scripts/install-ninja.sh + - name: enable ipv6 on Docker run: src/ci/scripts/enable-docker-ipv6.sh + + # Disable automatic line ending conversion (again). On Windows, when we're + # installing dependencies, something switches the git configuration directory or + # re-enables autocrlf. We've not tracked down the exact cause -- and there may + # be multiple -- but this should ensure submodules are checked out with the + # appropriate line endings. - name: disable git crlf conversion run: src/ci/scripts/disable-git-crlf-conversion.sh + - name: ensure line endings are correct run: src/ci/scripts/verify-line-endings.sh + - name: ensure backported commits are in upstream branches run: src/ci/scripts/verify-backported-commits.sh + - name: ensure the stable version number is correct run: src/ci/scripts/verify-stable-version-number.sh + - name: run the build + # Redirect stderr to stdout to avoid reordering the two streams in the GHA logs. run: src/ci/scripts/run-build-from-ci.sh 2>&1 env: - AWS_ACCESS_KEY_ID: "${{ env.CACHES_AWS_ACCESS_KEY_ID }}" - AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}" - TOOLSTATE_REPO_ACCESS_TOKEN: "${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}" + AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }} + TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} + - name: create github artifacts run: src/ci/scripts/create-doc-artifacts.sh + - name: upload artifacts to github uses: actions/upload-artifact@v4 with: - name: "${{ env.DOC_ARTIFACT_NAME }}" + # name is set in previous step + name: ${{ env.DOC_ARTIFACT_NAME }} path: obj/artifacts/doc if-no-files-found: ignore retention-days: 5 + - name: upload artifacts to S3 run: src/ci/scripts/upload-artifacts.sh env: - AWS_ACCESS_KEY_ID: "${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}" - AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}" - if: "success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')" + AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }} + # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy + # builders *should* have the AWS credentials available. Still, explicitly + # adding the condition is helpful as this way CI will not silently skip + # deploying artifacts from a dist builder if the variables are misconfigured, + # erroring about invalid credentials instead. + if: success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1') + + # This job isused to tell bors the final status of the build, as there is no practical way to detect + # when a workflow is successful listening to webhooks only in our current bors implementation (homu). outcome: name: bors build finished runs-on: ubuntu-latest - needs: - - job + needs: [ job ] + # !cancelled() executes the job regardless of whether the previous jobs passed or failed if: "!cancelled() && github.event_name == 'push'" steps: - name: checkout the source code uses: actions/checkout@v4 with: fetch-depth: 2 + # Calculate the exit status of the whole CI workflow. + # If all dependent jobs were successful, this exits with 0 (and the outcome job continues successfully). + # If a some dependent job has failed, this exits with 1. - name: calculate the correct exit status - id: status - run: "jq --exit-status 'all(.result == \"success\" or .result == \"skipped\")' <<< '${{ toJson(needs) }}'\necho \"status=$?\" >> $GITHUB_OUTPUT\n" + run: jq --exit-status 'all(.result == "success" or .result == "skipped")' <<< '${{ toJson(needs) }}' + # Publish the toolstate if an auto build succeeds (just before push to master) - name: publish toolstate run: src/ci/publish_toolstate.sh shell: bash - if: "steps.outputs.status == 0 && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust'" + if: success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust' env: - TOOLSTATE_REPO_ACCESS_TOKEN: "${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}" - - name: set the correct exit status - run: "exit ${{ steps.outputs.status == 0 }}" + TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml deleted file mode 100644 index 4ca7b52bc02da..0000000000000 --- a/src/ci/github-actions/ci.yml +++ /dev/null @@ -1,301 +0,0 @@ -###################################################### -# WARNING! Action needed when changing this file # -###################################################### - -# Due to GitHub Actions limitations, we can't use YAML Anchors directly in the -# CI configuration stored on the repository. To work around that this file is -# expanded by a tool in the repository, and the expansion is committed as well. -# -# After you make any change to the file you'll need to run this command: -# -# ./x.py run src/tools/expand-yaml-anchors -# -# ...and commit the file it updated in addition to this one. If you forget this -# step CI will fail. - ---- -############################### -# YAML Anchors Definition # -############################### - -# This key contains most of the YAML anchors that will be used later in the -# document. YAML anchors allows us to greatly reduce duplication inside the CI -# configuration by reusing parts of the configuration. -# -# YAML anchors work by defining an anchor with `&anchor-name` and reusing its -# content in another place with `*anchor-name`. The special `<<` map key merges -# the content of the map with the content of the anchor (or list of anchors). -# -# The expand-yaml-anchors tool will automatically remove this block from the -# output YAML file. -x--expand-yaml-anchors--remove: - # These snippets are used by the try-success, try-failure, auto-success and auto-failure jobs. - # Check out their documentation for more information on why they're needed. - - - &base-outcome-job - name: bors build finished - runs-on: ubuntu-latest - - - &base-success-job - steps: - - name: mark the job as a success - run: exit 0 - shell: bash - <<: *base-outcome-job - - - &base-failure-job - steps: - - name: mark the job as a failure - run: exit 1 - shell: bash - <<: *base-outcome-job - -########################### -# Builders definition # -########################### - -name: CI -on: - push: - branches: - - auto - - try - - try-perf - - automation/bors/try - pull_request: - branches: - - "**" - -permissions: - contents: read - packages: write - -defaults: - run: - # On Linux, macOS, and Windows, use the system-provided bash as the default - # shell. (This should only make a difference on Windows, where the default - # shell is PowerShell.) - shell: bash - -concurrency: - # For a given workflow, if we push to the same branch, cancel all previous builds on that branch. - # We add an exception for try builds (try branch) and unrolled rollup builds (try-perf), which - # are all triggered on the same branch, but which should be able to run concurrently. - group: ${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }} - cancel-in-progress: true - -env: - TOOLSTATE_REPO: https://github.com/rust-lang-nursery/rust-toolstate - -jobs: - # The job matrix for `calculate_matrix` is defined in src/ci/github-actions/jobs.yml. - # It calculates which jobs should be executed, based on the data of the ${{ github }} context. - # If you want to modify CI jobs, take a look at src/ci/github-actions/jobs.yml. - calculate_matrix: - name: Calculate job matrix - runs-on: ubuntu-latest - outputs: - jobs: ${{ steps.jobs.outputs.jobs }} - steps: - - name: Checkout the source code - uses: actions/checkout@v4 - - name: Calculate the CI job matrix - run: python3 src/ci/github-actions/calculate-job-matrix.py >> $GITHUB_OUTPUT - id: jobs - job: - name: ${{ matrix.name }} - needs: [ calculate_matrix ] - runs-on: "${{ matrix.os }}" - defaults: - run: - shell: ${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }} - timeout-minutes: 600 - env: - CI_JOB_NAME: ${{ matrix.image }} - CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse - # commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs. - HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }} - DOCKER_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SCCACHE_BUCKET: rust-lang-ci-sccache2 - CACHE_DOMAIN: ci-caches.rust-lang.org - continue-on-error: ${{ matrix.continue_on_error || false }} - strategy: - matrix: - # Check the `calculate_matrix` job to see how is the matrix defined. - include: ${{ fromJSON(needs.calculate_matrix.outputs.jobs) }} - # GitHub Actions fails the workflow if an empty list of jobs is provided to - # the workflow, so we need to skip this job if nothing was produced by - # the Python script. - # - # Unfortunately checking whether a list is empty is not possible in a nice - # way due to GitHub Actions expressions limits. - # This hack is taken from https://github.com/ferrocene/ferrocene/blob/d43edc6b7697cf1719ec1c17c54904ab94825763/.github/workflows/release.yml#L75-L82 - if: fromJSON(needs.calculate_matrix.outputs.jobs)[0] != null - steps: - - if: contains(matrix.os, 'windows') - uses: msys2/setup-msys2@v2.22.0 - with: - # i686 jobs use mingw32. x86_64 and cross-compile jobs use mingw64. - msystem: ${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }} - # don't try to download updates for already installed packages - update: false - # don't try to use the msys that comes built-in to the github runner, - # so we can control what is installed (i.e. not python) - release: true - # Inherit the full path from the Windows environment, with MSYS2's */bin/ - # dirs placed in front. This lets us run Windows-native Python etc. - path-type: inherit - install: > - make - dos2unix - diffutils - - - name: disable git crlf conversion - run: git config --global core.autocrlf false - - - name: checkout the source code - uses: actions/checkout@v4 - with: - fetch-depth: 2 - - # Rust Log Analyzer can't currently detect the PR number of a GitHub - # Actions build on its own, so a hint in the log message is needed to - # point it in the right direction. - - name: configure the PR in which the error message will be posted - run: echo "[CI_PR_NUMBER=$num]" - env: - num: ${{ github.event.number }} - if: success() && github.event_name == 'pull_request' - - - name: add extra environment variables - run: src/ci/scripts/setup-environment.sh - env: - # Since it's not possible to merge `${{ matrix.env }}` with the other - # variables in `job..env`, the variables defined in the matrix - # are passed to the `setup-environment.sh` script encoded in JSON, - # which then uses log commands to actually set them. - EXTRA_VARIABLES: ${{ toJson(matrix.env) }} - - - name: ensure the channel matches the target branch - run: src/ci/scripts/verify-channel.sh - - - name: collect CPU statistics - run: src/ci/scripts/collect-cpu-stats.sh - - - name: show the current environment - run: src/ci/scripts/dump-environment.sh - - - name: install awscli - run: src/ci/scripts/install-awscli.sh - - - name: install sccache - run: src/ci/scripts/install-sccache.sh - - - name: select Xcode - run: src/ci/scripts/select-xcode.sh - - - name: install clang - run: src/ci/scripts/install-clang.sh - - - name: install tidy - run: src/ci/scripts/install-tidy.sh - - - name: install WIX - run: src/ci/scripts/install-wix.sh - - - name: disable git crlf conversion - run: src/ci/scripts/disable-git-crlf-conversion.sh - - - name: checkout submodules - run: src/ci/scripts/checkout-submodules.sh - - - name: install MSYS2 - run: src/ci/scripts/install-msys2.sh - - - name: install MinGW - run: src/ci/scripts/install-mingw.sh - - - name: install ninja - run: src/ci/scripts/install-ninja.sh - - - name: enable ipv6 on Docker - run: src/ci/scripts/enable-docker-ipv6.sh - - # Disable automatic line ending conversion (again). On Windows, when we're - # installing dependencies, something switches the git configuration directory or - # re-enables autocrlf. We've not tracked down the exact cause -- and there may - # be multiple -- but this should ensure submodules are checked out with the - # appropriate line endings. - - name: disable git crlf conversion - run: src/ci/scripts/disable-git-crlf-conversion.sh - - - name: ensure line endings are correct - run: src/ci/scripts/verify-line-endings.sh - - - name: ensure backported commits are in upstream branches - run: src/ci/scripts/verify-backported-commits.sh - - - name: ensure the stable version number is correct - run: src/ci/scripts/verify-stable-version-number.sh - - - name: run the build - # Redirect stderr to stdout to avoid reordering the two streams in the GHA logs. - run: src/ci/scripts/run-build-from-ci.sh 2>&1 - env: - AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }} - TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} - - - name: create github artifacts - run: src/ci/scripts/create-doc-artifacts.sh - - - name: upload artifacts to github - uses: actions/upload-artifact@v4 - with: - # name is set in previous step - name: ${{ env.DOC_ARTIFACT_NAME }} - path: obj/artifacts/doc - if-no-files-found: ignore - retention-days: 5 - - - name: upload artifacts to S3 - run: src/ci/scripts/upload-artifacts.sh - env: - AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }} - # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy - # builders *should* have the AWS credentials available. Still, explicitly - # adding the condition is helpful as this way CI will not silently skip - # deploying artifacts from a dist builder if the variables are misconfigured, - # erroring about invalid credentials instead. - if: success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1') - - # This job isused to tell bors the final status of the build, as there is no practical way to detect - # when a workflow is successful listening to webhooks only in our current bors implementation (homu). - outcome: - name: bors build finished - runs-on: ubuntu-latest - needs: [ job ] - # !cancelled() executes the job regardless of whether the previous jobs passed or failed - if: "!cancelled() && github.event_name == 'push'" - steps: - - name: checkout the source code - uses: actions/checkout@v4 - with: - fetch-depth: 2 - # Calculate the exit status of the whole CI workflow (0 if all dependent jobs were either successful - # or skipped, otherwise 1). - - name: calculate the correct exit status - id: status - run: | - jq --exit-status 'all(.result == "success" or .result == "skipped")' <<< '${{ toJson(needs) }}' - echo "status=$?" >> $GITHUB_OUTPUT - # Publish the toolstate if an auto build succeeds (just before push to master) - - name: publish toolstate - run: src/ci/publish_toolstate.sh - shell: bash - if: steps.outputs.status == 0 && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust' - env: - TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} - - name: set the correct exit status - run: exit ${{ steps.outputs.status }} From 0b1ecf1d8d9318eedf68c855ac1112f0063dd495 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Thu, 25 Apr 2024 11:13:37 +0200 Subject: [PATCH 44/56] Remove the `expand-yaml-anchors` tool --- Cargo.lock | 34 --- Cargo.toml | 1 - src/bootstrap/src/core/build_steps/run.rs | 26 --- src/bootstrap/src/core/build_steps/test.rs | 35 ---- src/bootstrap/src/core/build_steps/tool.rs | 1 - src/bootstrap/src/core/builder.rs | 2 - src/bootstrap/src/core/config/flags.rs | 2 +- .../docker/host-x86_64/mingw-check/Dockerfile | 9 +- src/ci/github-actions/jobs.yml | 2 - src/tools/expand-yaml-anchors/Cargo.toml | 8 - src/tools/expand-yaml-anchors/src/main.rs | 198 ------------------ 11 files changed, 5 insertions(+), 313 deletions(-) delete mode 100644 src/tools/expand-yaml-anchors/Cargo.toml delete mode 100644 src/tools/expand-yaml-anchors/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index ac6e7974a127e..05ec7bef318f6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1254,14 +1254,6 @@ dependencies = [ "mdbook", ] -[[package]] -name = "expand-yaml-anchors" -version = "0.1.0" -dependencies = [ - "yaml-merge-keys", - "yaml-rust", -] - [[package]] name = "expect-test" version = "1.5.0" @@ -2236,12 +2228,6 @@ dependencies = [ "regex", ] -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - [[package]] name = "lint-docs" version = "0.1.0" @@ -6547,26 +6533,6 @@ dependencies = [ "lzma-sys", ] -[[package]] -name = "yaml-merge-keys" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd236a7dc9bb598f349fe4a8754f49181fee50284daa15cd1ba652d722280004" -dependencies = [ - "lazy_static", - "thiserror", - "yaml-rust", -] - -[[package]] -name = "yaml-rust" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" -dependencies = [ - "linked-hash-map", -] - [[package]] name = "yansi-term" version = "0.1.2" diff --git a/Cargo.toml b/Cargo.toml index 1379e4cd3bfdb..a601ebf4369e0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,7 +31,6 @@ members = [ "src/tools/miri/cargo-miri", "src/tools/rustdoc-themes", "src/tools/unicode-table-generator", - "src/tools/expand-yaml-anchors", "src/tools/jsondocck", "src/tools/jsondoclint", "src/tools/llvm-bitcode-linker", diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index 0a428ec5ca0a2..354eb2b600381 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -15,32 +15,6 @@ use crate::core::config::TargetSelection; use crate::utils::helpers::output; use crate::Mode; -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct ExpandYamlAnchors; - -impl Step for ExpandYamlAnchors { - type Output = (); - - /// Runs the `expand-yaml_anchors` tool. - /// - /// This tool in `src/tools` reads the CI configuration files written in YAML and expands the - /// anchors in them, since GitHub Actions doesn't support them. - fn run(self, builder: &Builder<'_>) { - builder.info("Expanding YAML anchors in the GitHub Actions configuration"); - builder.run_delaying_failure( - builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src), - ); - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/expand-yaml-anchors") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(ExpandYamlAnchors); - } -} - #[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct BuildManifest; diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 411ed99532f7c..9a585a39e1eeb 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -1146,8 +1146,6 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to builder.info("tidy check"); builder.run_delaying_failure(&mut cmd); - builder.ensure(ExpandYamlAnchors); - builder.info("x.py completions check"); let [bash, zsh, fish, powershell] = ["x.py.sh", "x.py.zsh", "x.py.fish", "x.py.ps1"] .map(|filename| builder.src.join("src/etc/completions").join(filename)); @@ -1175,39 +1173,6 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct ExpandYamlAnchors; - -impl Step for ExpandYamlAnchors { - type Output = (); - const ONLY_HOSTS: bool = true; - - /// Ensure the `generate-ci-config` tool was run locally. - /// - /// The tool in `src/tools` reads the CI definition in `src/ci/builders.yml` and generates the - /// appropriate configuration for all our CI providers. This step ensures the tool was called - /// by the user before committing CI changes. - fn run(self, builder: &Builder<'_>) { - // NOTE: `.github/` is not included in dist-src tarballs - if !builder.src.join(".github/workflows/ci.yml").exists() { - builder.info("Skipping YAML anchors check: GitHub Actions config not found"); - return; - } - builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded"); - builder.run_delaying_failure( - builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src), - ); - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/expand-yaml-anchors") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(ExpandYamlAnchors); - } -} - fn testdir(builder: &Builder<'_>, host: TargetSelection) -> PathBuf { builder.out.join(host.triple).join("test") } diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index 994f0bef0dc74..2e2c5e9e6f8b2 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -302,7 +302,6 @@ bootstrap_tool!( RemoteTestClient, "src/tools/remote-test-client", "remote-test-client"; RustInstaller, "src/tools/rust-installer", "rust-installer"; RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes"; - ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors"; LintDocs, "src/tools/lint-docs", "lint-docs"; JsonDocCk, "src/tools/jsondocck", "jsondocck"; JsonDocLint, "src/tools/jsondoclint", "jsondoclint"; diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index ae5440de57293..07488282983fa 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -791,7 +791,6 @@ impl<'a> Builder<'a> { ), Kind::Test => describe!( crate::core::build_steps::toolstate::ToolStateCheck, - test::ExpandYamlAnchors, test::Tidy, test::Ui, test::Crashes, @@ -933,7 +932,6 @@ impl<'a> Builder<'a> { install::Src, ), Kind::Run => describe!( - run::ExpandYamlAnchors, run::BuildManifest, run::BumpStage0, run::ReplaceVersionPlaceholder, diff --git a/src/bootstrap/src/core/config/flags.rs b/src/bootstrap/src/core/config/flags.rs index a5af87ad5c72b..f4ed7e76fba11 100644 --- a/src/bootstrap/src/core/config/flags.rs +++ b/src/bootstrap/src/core/config/flags.rs @@ -420,7 +420,7 @@ pub enum Subcommand { Arguments: This subcommand accepts a number of paths to tools to build and run. For example: - ./x.py run src/tools/expand-yaml-anchors + ./x.py run src/tools/bump-stage0 At least a tool needs to be called.")] /// Run tools contained in this repository Run { diff --git a/src/ci/docker/host-x86_64/mingw-check/Dockerfile b/src/ci/docker/host-x86_64/mingw-check/Dockerfile index 0eff7ca5962bb..d0da7d3066013 100644 --- a/src/ci/docker/host-x86_64/mingw-check/Dockerfile +++ b/src/ci/docker/host-x86_64/mingw-check/Dockerfile @@ -42,11 +42,10 @@ COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/ ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1 -ENV SCRIPT python3 ../x.py --stage 2 test src/tools/expand-yaml-anchors && \ - # Check library crates on all tier 1 targets. - # We disable optimized compiler built-ins because that requires a C toolchain for the target. - # We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs. - python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \ +# Check library crates on all tier 1 targets. +# We disable optimized compiler built-ins because that requires a C toolchain for the target. +# We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs. +ENV SCRIPT python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \ python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \ python3 ../x.py clippy bootstrap -Dwarnings && \ python3 ../x.py clippy compiler library -Aclippy::all -Dclippy::correctness && \ diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index df6c5d6079d1a..1fabf889e38a1 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -1,7 +1,5 @@ # This file contains definitions of CI job parameters that are loaded # dynamically in CI from ci.yml. -# You *do not* need to re-run `src/tools/expand-yaml-anchors` when you -# modify this file. runners: - &base-job env: { } diff --git a/src/tools/expand-yaml-anchors/Cargo.toml b/src/tools/expand-yaml-anchors/Cargo.toml deleted file mode 100644 index 9a25b6c1f1c87..0000000000000 --- a/src/tools/expand-yaml-anchors/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "expand-yaml-anchors" -version = "0.1.0" -edition = "2021" - -[dependencies] -yaml-rust = "0.4.3" -yaml-merge-keys = "0.4.0" diff --git a/src/tools/expand-yaml-anchors/src/main.rs b/src/tools/expand-yaml-anchors/src/main.rs deleted file mode 100644 index 60651734b9e30..0000000000000 --- a/src/tools/expand-yaml-anchors/src/main.rs +++ /dev/null @@ -1,198 +0,0 @@ -use std::error::Error; -use std::path::{Path, PathBuf}; -use yaml_rust::{Yaml, YamlEmitter, YamlLoader}; - -/// List of files to expand. The first tuple element is the source -/// file, while the second tuple element is the destination file. -#[rustfmt::skip] -static TO_EXPAND: &[(&str, &str)] = &[ - ("src/ci/github-actions/ci.yml", ".github/workflows/ci.yml"), -]; - -/// Name of a special key that will be removed from all the maps in expanded configuration files. -/// This key can then be used to contain shared anchors. -static REMOVE_MAP_KEY: &str = "x--expand-yaml-anchors--remove"; - -/// Message that will be included at the top of all the expanded files. {source} will be replaced -/// with the source filename relative to the base path. -static HEADER_MESSAGE: &str = "\ -############################################################# -# WARNING: automatically generated file, DO NOT CHANGE! # -############################################################# - -# This file was automatically generated by the expand-yaml-anchors tool. The -# source file that generated this one is: -# -# {source} -# -# Once you make changes to that file you need to run: -# -# ./x.py run src/tools/expand-yaml-anchors/ -# -# The CI build will fail if the tool is not run after changes to this file. - -"; - -enum Mode { - Check, - Generate, -} - -struct App { - mode: Mode, - base: PathBuf, -} - -impl App { - fn from_args() -> Result> { - // Parse CLI arguments - let args = std::env::args().skip(1).collect::>(); - let (mode, base) = match args.iter().map(|s| s.as_str()).collect::>().as_slice() { - ["generate", ref base] => (Mode::Generate, PathBuf::from(base)), - ["check", ref base] => (Mode::Check, PathBuf::from(base)), - _ => { - eprintln!("usage: expand-yaml-anchors "); - std::process::exit(1); - } - }; - - Ok(App { mode, base }) - } - - fn run(&self) -> Result<(), Box> { - for (source, dest) in TO_EXPAND { - let source = self.base.join(source); - let dest_path = self.base.join(dest); - - self.expand(&source, &dest_path).with_context(|| match self.mode { - Mode::Generate => format!( - "failed to expand {} into {}", - self.path(&source), - self.path(&dest_path) - ), - Mode::Check => format!( - "{} is not up to date; please run \ - `x.py run src/tools/expand-yaml-anchors`.", - self.path(&dest_path) - ), - })?; - } - Ok(()) - } - - fn expand(&self, source: &Path, dest: &Path) -> Result<(), Box> { - let content = std::fs::read_to_string(source) - .with_context(|| format!("failed to read {}", self.path(source)))?; - - let mut buf = - HEADER_MESSAGE.replace("{source}", &self.path(source).to_string().replace("\\", "/")); - - let documents = YamlLoader::load_from_str(&content) - .with_context(|| format!("failed to parse {}", self.path(source)))?; - for mut document in documents.into_iter() { - document = yaml_merge_keys::merge_keys(document) - .with_context(|| format!("failed to expand {}", self.path(source)))?; - document = filter_document(document); - - YamlEmitter::new(&mut buf).dump(&document).map_err(|err| WithContext { - context: "failed to serialize the expanded yaml".into(), - source: Box::new(err), - })?; - buf.push('\n'); - } - - match self.mode { - Mode::Check => { - let old = std::fs::read_to_string(dest) - .with_context(|| format!("failed to read {}", self.path(dest)))?; - if old != buf { - return Err(Box::new(StrError(format!( - "{} and {} are different", - self.path(source), - self.path(dest), - )))); - } - } - Mode::Generate => { - std::fs::write(dest, buf.as_bytes()) - .with_context(|| format!("failed to write to {}", self.path(dest)))?; - } - } - Ok(()) - } - - fn path<'a>(&self, path: &'a Path) -> impl std::fmt::Display + 'a { - path.strip_prefix(&self.base).unwrap_or(path).display() - } -} - -fn filter_document(document: Yaml) -> Yaml { - match document { - Yaml::Hash(map) => Yaml::Hash( - map.into_iter() - .filter(|(key, _)| { - if let Yaml::String(string) = &key { string != REMOVE_MAP_KEY } else { true } - }) - .map(|(key, value)| (filter_document(key), filter_document(value))) - .collect(), - ), - Yaml::Array(vec) => Yaml::Array(vec.into_iter().map(filter_document).collect()), - other => other, - } -} - -fn main() { - if let Err(err) = App::from_args().and_then(|app| app.run()) { - eprintln!("error: {}", err); - - let mut source = err.as_ref() as &dyn Error; - while let Some(err) = source.source() { - eprintln!("caused by: {}", err); - source = err; - } - - std::process::exit(1); - } -} - -#[derive(Debug)] -struct StrError(String); - -impl Error for StrError {} - -impl std::fmt::Display for StrError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(&self.0, f) - } -} - -#[derive(Debug)] -struct WithContext { - context: String, - source: Box, -} - -impl std::fmt::Display for WithContext { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.context) - } -} - -impl Error for WithContext { - fn source(&self) -> Option<&(dyn Error + 'static)> { - Some(self.source.as_ref()) - } -} - -pub(crate) trait ResultExt { - fn with_context String>(self, f: F) -> Result>; -} - -impl>> ResultExt for Result { - fn with_context String>(self, f: F) -> Result> { - match self { - Ok(ok) => Ok(ok), - Err(err) => Err(WithContext { source: err.into(), context: f() }.into()), - } - } -} From 686baf55894eca650cdef1a476533d1f74662511 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Sat, 27 Apr 2024 11:51:31 +0200 Subject: [PATCH 45/56] Remove redundant `success()` condition --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6fdc8d4d72ddf..a62c18ddf6b88 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -243,6 +243,6 @@ jobs: - name: publish toolstate run: src/ci/publish_toolstate.sh shell: bash - if: success() && github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust' + if: github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust' env: TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} From 24bf3594a58d47b8d3bc65bb80ba7155fde48a40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Sat, 27 Apr 2024 11:56:39 +0200 Subject: [PATCH 46/56] Rename `JobType` to `WorkflowRunType` --- src/ci/github-actions/calculate-job-matrix.py | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/ci/github-actions/calculate-job-matrix.py b/src/ci/github-actions/calculate-job-matrix.py index 124c22bd979f1..62cc867fac791 100755 --- a/src/ci/github-actions/calculate-job-matrix.py +++ b/src/ci/github-actions/calculate-job-matrix.py @@ -44,7 +44,7 @@ def add_base_env(jobs: List[Job], environment: Dict[str, str]) -> List[Job]: return jobs -class JobType(enum.Enum): +class WorkflowRunType(enum.Enum): PR = enum.auto() Try = enum.auto() Auto = enum.auto() @@ -57,9 +57,9 @@ class GitHubCtx: repository: str -def find_job_type(ctx: GitHubCtx) -> Optional[JobType]: +def find_run_type(ctx: GitHubCtx) -> Optional[WorkflowRunType]: if ctx.event_name == "pull_request": - return JobType.PR + return WorkflowRunType.PR elif ctx.event_name == "push": old_bors_try_build = ( ctx.ref in ("refs/heads/try", "refs/heads/try-perf") and @@ -72,20 +72,20 @@ def find_job_type(ctx: GitHubCtx) -> Optional[JobType]: try_build = old_bors_try_build or new_bors_try_build if try_build: - return JobType.Try + return WorkflowRunType.Try if ctx.ref == "refs/heads/auto" and ctx.repository == "rust-lang-ci/rust": - return JobType.Auto + return WorkflowRunType.Auto return None -def calculate_jobs(job_type: JobType, job_data: Dict[str, Any]) -> List[Job]: - if job_type == JobType.PR: +def calculate_jobs(run_type: WorkflowRunType, job_data: Dict[str, Any]) -> List[Job]: + if run_type == WorkflowRunType.PR: return add_base_env(name_jobs(job_data["pr"], "PR"), job_data["envs"]["pr"]) - elif job_type == JobType.Try: + elif run_type == WorkflowRunType.Try: return add_base_env(name_jobs(job_data["try"], "try"), job_data["envs"]["try"]) - elif job_type == JobType.Auto: + elif run_type == WorkflowRunType.Auto: return add_base_env(name_jobs(job_data["auto"], "auto"), job_data["envs"]["auto"]) return [] @@ -114,15 +114,15 @@ def get_github_ctx() -> GitHubCtx: github_ctx = get_github_ctx() - job_type = find_job_type(github_ctx) - logging.info(f"Job type: {job_type}") + run_type = find_run_type(github_ctx) + logging.info(f"Job type: {run_type}") with open(CI_DIR / "channel") as f: channel = f.read().strip() jobs = [] - if job_type is not None: - jobs = calculate_jobs(job_type, data) + if run_type is not None: + jobs = calculate_jobs(run_type, data) jobs = skip_jobs(jobs, channel) logging.info(f"Output:\n{yaml.dump(jobs, indent=4)}") From adbc84cfacc1d6b73864cdf62592f8db507b6ac3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Sat, 27 Apr 2024 12:02:05 +0200 Subject: [PATCH 47/56] Remove redundant `success` expressions from steps --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a62c18ddf6b88..6f63bdce0aab0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -116,7 +116,7 @@ jobs: run: echo "[CI_PR_NUMBER=$num]" env: num: ${{ github.event.number }} - if: success() && github.event_name == 'pull_request' + if: github.event_name == 'pull_request' - name: add extra environment variables run: src/ci/scripts/setup-environment.sh @@ -219,7 +219,7 @@ jobs: # adding the condition is helpful as this way CI will not silently skip # deploying artifacts from a dist builder if the variables are misconfigured, # erroring about invalid credentials instead. - if: success() && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1') + if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1' # This job isused to tell bors the final status of the build, as there is no practical way to detect # when a workflow is successful listening to webhooks only in our current bors implementation (homu). From b194d5ce4460a843ba40cfc48d0073a455905711 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Sat, 27 Apr 2024 12:09:18 +0200 Subject: [PATCH 48/56] Output `run_type` from the matrix calculation job --- .github/workflows/ci.yml | 9 +++++---- src/ci/github-actions/calculate-job-matrix.py | 15 ++++++++++++++- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6f63bdce0aab0..a2769a2c2749d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -47,6 +47,7 @@ jobs: runs-on: ubuntu-latest outputs: jobs: ${{ steps.jobs.outputs.jobs }} + run_type: ${{ steps.jobs.outputs.run_type }} steps: - name: Checkout the source code uses: actions/checkout@v4 @@ -116,7 +117,7 @@ jobs: run: echo "[CI_PR_NUMBER=$num]" env: num: ${{ github.event.number }} - if: github.event_name == 'pull_request' + if: needs.calculate_matrix.outputs.run_type == 'pr' - name: add extra environment variables run: src/ci/scripts/setup-environment.sh @@ -226,9 +227,9 @@ jobs: outcome: name: bors build finished runs-on: ubuntu-latest - needs: [ job ] + needs: [ calculate_matrix, job ] # !cancelled() executes the job regardless of whether the previous jobs passed or failed - if: "!cancelled() && github.event_name == 'push'" + if: ${{ !cancelled() && contains(fromJSON('["auto", "try"]'), needs.calculate_matrix.outputs.run_type) }} steps: - name: checkout the source code uses: actions/checkout@v4 @@ -243,6 +244,6 @@ jobs: - name: publish toolstate run: src/ci/publish_toolstate.sh shell: bash - if: github.event_name == 'push' && github.ref == 'refs/heads/auto' && github.repository == 'rust-lang-ci/rust' + if: needs.calculate_matrix.outputs.run_type == 'auto' env: TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }} diff --git a/src/ci/github-actions/calculate-job-matrix.py b/src/ci/github-actions/calculate-job-matrix.py index 62cc867fac791..68565f489c939 100755 --- a/src/ci/github-actions/calculate-job-matrix.py +++ b/src/ci/github-actions/calculate-job-matrix.py @@ -106,6 +106,17 @@ def get_github_ctx() -> GitHubCtx: ) +def format_run_type(run_type: WorkflowRunType) -> str: + if run_type == WorkflowRunType.PR: + return "pr" + elif run_type == WorkflowRunType.Auto: + return "auto" + elif run_type == WorkflowRunType.Try: + return "try" + else: + raise AssertionError() + + if __name__ == "__main__": logging.basicConfig(level=logging.INFO) @@ -124,6 +135,8 @@ def get_github_ctx() -> GitHubCtx: if run_type is not None: jobs = calculate_jobs(run_type, data) jobs = skip_jobs(jobs, channel) + run_type = format_run_type(run_type) - logging.info(f"Output:\n{yaml.dump(jobs, indent=4)}") + logging.info(f"Output:\n{yaml.dump(dict(jobs=jobs, run_type=run_type), indent=4)}") print(f"jobs={json.dumps(jobs)}") + print(f"run_type={run_type}") From 13825dcc156ee848dcbd57f3c118e864fc158c56 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Mon, 29 Apr 2024 11:16:21 -0400 Subject: [PATCH 49/56] Take proof trees by value in inspect goal --- .../rustc_middle/src/traits/solve/inspect.rs | 6 ++--- .../src/solve/inspect/analyse.rs | 27 +++++++++---------- 2 files changed, 15 insertions(+), 18 deletions(-) diff --git a/compiler/rustc_middle/src/traits/solve/inspect.rs b/compiler/rustc_middle/src/traits/solve/inspect.rs index 0f3ace2d55d38..70d5a08229150 100644 --- a/compiler/rustc_middle/src/traits/solve/inspect.rs +++ b/compiler/rustc_middle/src/traits/solve/inspect.rs @@ -60,14 +60,14 @@ pub struct GoalEvaluation<'tcx> { pub evaluation: CanonicalGoalEvaluation<'tcx>, } -#[derive(Eq, PartialEq)] +#[derive(Eq, PartialEq, Debug)] pub struct CanonicalGoalEvaluation<'tcx> { pub goal: CanonicalInput<'tcx>, pub kind: CanonicalGoalEvaluationKind<'tcx>, pub result: QueryResult<'tcx>, } -#[derive(Eq, PartialEq)] +#[derive(Eq, PartialEq, Debug)] pub enum CanonicalGoalEvaluationKind<'tcx> { Overflow, CycleInStack, @@ -86,7 +86,7 @@ pub struct AddedGoalsEvaluation<'tcx> { pub result: Result, } -#[derive(Eq, PartialEq)] +#[derive(Eq, PartialEq, Debug)] pub struct GoalEvaluationStep<'tcx> { pub instantiated_goal: QueryInput<'tcx, ty::Predicate<'tcx>>, diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index 2d58111e80385..45b0e8d2eb26e 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -34,9 +34,9 @@ pub struct InspectConfig { pub struct InspectGoal<'a, 'tcx> { infcx: &'a InferCtxt<'tcx>, depth: usize, - orig_values: &'a [ty::GenericArg<'tcx>], + orig_values: Vec>, goal: Goal<'tcx, ty::Predicate<'tcx>>, - evaluation: &'a inspect::GoalEvaluation<'tcx>, + evaluation: inspect::CanonicalGoalEvaluation<'tcx>, } pub struct InspectCandidate<'a, 'tcx> { @@ -139,7 +139,7 @@ impl<'a, 'tcx> InspectCandidate<'a, 'tcx> { try_visit!(visitor.visit_goal(&InspectGoal::new( infcx, self.goal.depth + 1, - &proof_tree.unwrap(), + proof_tree.unwrap(), ))); } } @@ -164,7 +164,7 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { } pub fn result(&self) -> Result { - self.evaluation.evaluation.result.map(|c| c.value.certainty) + self.evaluation.result.map(|c| c.value.certainty) } fn candidates_recur( @@ -221,7 +221,7 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { pub fn candidates(&'a self) -> Vec> { let mut candidates = vec![]; - let last_eval_step = match self.evaluation.evaluation.kind { + let last_eval_step = match self.evaluation.kind { inspect::CanonicalGoalEvaluationKind::Overflow | inspect::CanonicalGoalEvaluationKind::CycleInStack | inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit => { @@ -254,18 +254,15 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { candidates.pop().filter(|_| candidates.is_empty()) } - fn new( - infcx: &'a InferCtxt<'tcx>, - depth: usize, - root: &'a inspect::GoalEvaluation<'tcx>, - ) -> Self { - match root.kind { - inspect::GoalEvaluationKind::Root { ref orig_values } => InspectGoal { + fn new(infcx: &'a InferCtxt<'tcx>, depth: usize, root: inspect::GoalEvaluation<'tcx>) -> Self { + let inspect::GoalEvaluation { uncanonicalized_goal, kind, evaluation } = root; + match kind { + inspect::GoalEvaluationKind::Root { orig_values } => InspectGoal { infcx, depth, orig_values, - goal: root.uncanonicalized_goal.fold_with(&mut EagerResolver::new(infcx)), - evaluation: root, + goal: uncanonicalized_goal.fold_with(&mut EagerResolver::new(infcx)), + evaluation, }, inspect::GoalEvaluationKind::Nested { .. } => unreachable!(), } @@ -294,6 +291,6 @@ impl<'tcx> InferCtxt<'tcx> { ) -> V::Result { let (_, proof_tree) = self.evaluate_root_goal(goal, GenerateProofTree::Yes); let proof_tree = proof_tree.unwrap(); - visitor.visit_goal(&InspectGoal::new(self, 0, &proof_tree)) + visitor.visit_goal(&InspectGoal::new(self, 0, proof_tree)) } } From 7597d1504e144e4bbfec9032b7c81e161d5fb1c5 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Mon, 29 Apr 2024 11:52:51 -0400 Subject: [PATCH 50/56] Split out instantiate_nested_goals --- .../src/solve/inspect/analyse.rs | 79 ++++++++++--------- 1 file changed, 43 insertions(+), 36 deletions(-) diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index 45b0e8d2eb26e..cba3262017580 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -57,6 +57,10 @@ impl<'a, 'tcx> InspectCandidate<'a, 'tcx> { self.result.map(|c| c.value.certainty) } + pub fn goal(&self) -> &'a InspectGoal<'a, 'tcx> { + self.goal + } + /// Certainty passed into `evaluate_added_goals_and_make_canonical_response`. /// /// If this certainty is `Yes`, then we must be confident that the candidate @@ -74,46 +78,55 @@ impl<'a, 'tcx> InspectCandidate<'a, 'tcx> { /// the state of the `infcx`. pub fn visit_nested_no_probe>(&self, visitor: &mut V) -> V::Result { if self.goal.depth < visitor.config().max_depth { - let infcx = self.goal.infcx; - let param_env = self.goal.goal.param_env; - let mut orig_values = self.goal.orig_values.to_vec(); - let mut instantiated_goals = vec![]; - for goal in &self.nested_goals { - let goal = canonical::instantiate_canonical_state( + for goal in self.instantiate_nested_goals(visitor.span()) { + try_visit!(visitor.visit_goal(&goal)); + } + } + + V::Result::output() + } + + /// Instantiate the nested goals for the candidate without rolling back their + /// inference constraints. This function modifies the state of the `infcx`. + pub fn instantiate_nested_goals(&self, span: Span) -> Vec> { + let infcx = self.goal.infcx; + let param_env = self.goal.goal.param_env; + let mut orig_values = self.goal.orig_values.to_vec(); + let instantiated_goals: Vec<_> = self + .nested_goals + .iter() + .map(|goal| { + canonical::instantiate_canonical_state( infcx, - visitor.span(), + span, param_env, &mut orig_values, *goal, - ); - instantiated_goals.push(goal); - } + ) + }) + .collect(); - let () = canonical::instantiate_canonical_state( - infcx, - visitor.span(), - param_env, - &mut orig_values, - self.final_state, - ); + let () = canonical::instantiate_canonical_state( + infcx, + span, + param_env, + &mut orig_values, + self.final_state, + ); - for &goal in &instantiated_goals { + instantiated_goals + .into_iter() + .map(|goal| { let proof_tree = match goal.predicate.kind().no_bound_vars() { Some(ty::PredicateKind::NormalizesTo(ty::NormalizesTo { alias, term })) => { let unconstrained_term = match term.unpack() { ty::TermKind::Ty(_) => infcx - .next_ty_var(TypeVariableOrigin { - param_def_id: None, - span: visitor.span(), - }) + .next_ty_var(TypeVariableOrigin { param_def_id: None, span }) .into(), ty::TermKind::Const(ct) => infcx .next_const_var( ct.ty(), - ConstVariableOrigin { - param_def_id: None, - span: visitor.span(), - }, + ConstVariableOrigin { param_def_id: None, span }, ) .into(), }; @@ -129,22 +142,16 @@ impl<'a, 'tcx> InspectCandidate<'a, 'tcx> { }) .1; let InferOk { value: (), obligations: _ } = infcx - .at(&ObligationCause::dummy(), param_env) + .at(&ObligationCause::dummy_with_span(span), param_env) .eq(DefineOpaqueTypes::Yes, term, unconstrained_term) .unwrap(); proof_tree } _ => infcx.evaluate_root_goal(goal, GenerateProofTree::Yes).1, }; - try_visit!(visitor.visit_goal(&InspectGoal::new( - infcx, - self.goal.depth + 1, - proof_tree.unwrap(), - ))); - } - } - - V::Result::output() + InspectGoal::new(infcx, self.goal.depth + 1, proof_tree.unwrap()) + }) + .collect() } /// Visit all nested goals of this candidate, rolling back From 8101884b37473a4aee62dc7553c8f6373d31c5f7 Mon Sep 17 00:00:00 2001 From: Matthew Maurer Date: Mon, 29 Apr 2024 23:54:27 +0000 Subject: [PATCH 51/56] codegen tests: Tolerate `range()` qualifications in enum tests Current LLVM can infer range bounds on the i8s involved with these tests, and annotates it. Accept these bounds if present. --- tests/codegen/enum/enum-match.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/codegen/enum/enum-match.rs b/tests/codegen/enum/enum-match.rs index f1c40f6695be1..ced26c0a43408 100644 --- a/tests/codegen/enum/enum-match.rs +++ b/tests/codegen/enum/enum-match.rs @@ -11,7 +11,7 @@ pub enum Enum0 { B, } -// CHECK: define noundef i8 @match0{{.*}} +// CHECK: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match0{{.*}} // CHECK-NEXT: start: // CHECK-NEXT: %1 = icmp eq i8 %0, 2 // CHECK-NEXT: %2 = and i8 %0, 1 @@ -32,7 +32,7 @@ pub enum Enum1 { C, } -// CHECK: define noundef i8 @match1{{.*}} +// CHECK: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match1{{.*}} // CHECK-NEXT: start: // CHECK-NEXT: %1 = add i8 %0, -2 // CHECK-NEXT: %2 = zext i8 %1 to i64 @@ -91,7 +91,7 @@ pub enum Enum2 { E, } -// CHECK: define noundef i8 @match2{{.*}} +// CHECK: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match2{{.*}} // CHECK-NEXT: start: // CHECK-NEXT: %1 = add i8 %0, 2 // CHECK-NEXT: %2 = zext i8 %1 to i64 From 7d1c6af3dc408c1f9811e6bd576804d544bfa90d Mon Sep 17 00:00:00 2001 From: Zalathar Date: Mon, 29 Apr 2024 22:25:09 +1000 Subject: [PATCH 52/56] coverage: Prepare to split `spans.rs` into two files --- .../rustc_mir_transform/src/coverage/mod.rs | 1 - .../rustc_mir_transform/src/coverage/spans.rs | 53 ++++++++++--------- .../src/coverage/spans/from_mir.rs | 2 +- 3 files changed, 30 insertions(+), 26 deletions(-) diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index 159c099fac507..0300232019bc1 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -3,7 +3,6 @@ pub mod query; mod counters; mod graph; mod spans; - #[cfg(test)] mod tests; diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index b64b1212cecc6..d27ce2fc317a6 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -1,12 +1,13 @@ +use std::collections::BTreeSet; + use rustc_data_structures::graph::DirectedGraph; use rustc_index::bit_set::BitSet; use rustc_middle::mir; use rustc_middle::mir::coverage::ConditionInfo; use rustc_span::{BytePos, Span}; -use std::collections::BTreeSet; use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph, START_BCB}; -use crate::coverage::spans::from_mir::SpanFromMir; +use crate::coverage::spans::from_mir::{extract_branch_pairs, extract_mcdc_mappings, SpanFromMir}; use crate::coverage::ExtractedHirInfo; mod from_mir; @@ -91,28 +92,11 @@ pub(super) fn generate_coverage_spans( mappings.push(BcbMapping { kind: BcbMappingKind::Code(START_BCB), span }); } } else { - let sorted_spans = from_mir::mir_to_initial_sorted_coverage_spans( - mir_body, - hir_info, - basic_coverage_blocks, - ); - let coverage_spans = SpansRefiner::refine_sorted_spans(sorted_spans); - mappings.extend(coverage_spans.into_iter().map(|RefinedCovspan { bcb, span, .. }| { - // Each span produced by the generator represents an ordinary code region. - BcbMapping { kind: BcbMappingKind::Code(bcb), span } - })); - - branch_pairs.extend(from_mir::extract_branch_pairs( - mir_body, - hir_info, - basic_coverage_blocks, - )); - - mappings.extend(from_mir::extract_mcdc_mappings( - mir_body, - hir_info.body_span, - basic_coverage_blocks, - )); + extract_refined_covspans(mir_body, hir_info, basic_coverage_blocks, &mut mappings); + + branch_pairs.extend(extract_branch_pairs(mir_body, hir_info, basic_coverage_blocks)); + + mappings.extend(extract_mcdc_mappings(mir_body, hir_info.body_span, basic_coverage_blocks)); } if mappings.is_empty() && branch_pairs.is_empty() { @@ -149,6 +133,27 @@ pub(super) fn generate_coverage_spans( Some(CoverageSpans { bcb_has_mappings, mappings, branch_pairs, test_vector_bitmap_bytes }) } +#[allow(unused_imports)] // Remove this line during the actual split. +// FIXME(#124545) It's awkward that we have to re-export this, because it's an +// internal detail of `from_mir` that is also needed when handling branch and +// MC/DC spans. Ideally we would find a more natural home for it. +pub(super) use from_mir::unexpand_into_body_span_with_visible_macro; + +pub(super) fn extract_refined_covspans( + mir_body: &mir::Body<'_>, + hir_info: &ExtractedHirInfo, + basic_coverage_blocks: &CoverageGraph, + mappings: &mut impl Extend, +) { + let sorted_spans = + from_mir::mir_to_initial_sorted_coverage_spans(mir_body, hir_info, basic_coverage_blocks); + let coverage_spans = SpansRefiner::refine_sorted_spans(sorted_spans); + mappings.extend(coverage_spans.into_iter().map(|RefinedCovspan { bcb, span, .. }| { + // Each span produced by the generator represents an ordinary code region. + BcbMapping { kind: BcbMappingKind::Code(bcb), span } + })); +} + #[derive(Debug)] struct CurrCovspan { span: Span, diff --git a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs index 142dfc17fb8f1..2b1e0b1f4bb56 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs @@ -287,7 +287,7 @@ fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option { /// /// [^1]Expansions result from Rust syntax including macros, syntactic sugar, /// etc.). -fn unexpand_into_body_span_with_visible_macro( +pub(crate) fn unexpand_into_body_span_with_visible_macro( original_span: Span, body_span: Span, ) -> Option<(Span, Option)> { From ba87e5bb3e2d6f086d0f5a6155fffc127be0da4e Mon Sep 17 00:00:00 2001 From: Zalathar Date: Mon, 29 Apr 2024 22:25:09 +1000 Subject: [PATCH 53/56] coverage: Split off `mappings.rs` from `spans.rs` and `from_mir.rs` --- .../src/coverage/mappings.rs | 275 ++++++++++++++++++ .../rustc_mir_transform/src/coverage/mod.rs | 5 +- .../rustc_mir_transform/src/coverage/spans.rs | 132 +-------- .../src/coverage/spans/from_mir.rs | 146 +--------- 4 files changed, 283 insertions(+), 275 deletions(-) create mode 100644 compiler/rustc_mir_transform/src/coverage/mappings.rs diff --git a/compiler/rustc_mir_transform/src/coverage/mappings.rs b/compiler/rustc_mir_transform/src/coverage/mappings.rs new file mode 100644 index 0000000000000..d364658efb6d6 --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/mappings.rs @@ -0,0 +1,275 @@ +use std::collections::BTreeSet; + +use rustc_data_structures::graph::DirectedGraph; +use rustc_index::bit_set::BitSet; +use rustc_middle::mir::coverage::{ + BlockMarkerId, BranchSpan, ConditionInfo, CoverageKind, MCDCBranchSpan, MCDCDecisionSpan, +}; +use rustc_middle::mir::{self, BasicBlock, StatementKind}; +use rustc_span::Span; + +use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph, START_BCB}; +use crate::coverage::spans::{ + extract_refined_covspans, unexpand_into_body_span_with_visible_macro, +}; +use crate::coverage::ExtractedHirInfo; +use rustc_index::IndexVec; + +#[derive(Clone, Debug)] +pub(super) enum BcbMappingKind { + /// Associates an ordinary executable code span with its corresponding BCB. + Code(BasicCoverageBlock), + + // Ordinary branch mappings are stored separately, so they don't have a + // variant in this enum. + // + /// Associates a mcdc branch span with condition info besides fields for normal branch. + MCDCBranch { + true_bcb: BasicCoverageBlock, + false_bcb: BasicCoverageBlock, + /// If `None`, this actually represents a normal branch mapping inserted + /// for code that was too complex for MC/DC. + condition_info: Option, + decision_depth: u16, + }, + /// Associates a mcdc decision with its join BCB. + MCDCDecision { + end_bcbs: BTreeSet, + bitmap_idx: u32, + conditions_num: u16, + decision_depth: u16, + }, +} + +#[derive(Debug)] +pub(super) struct BcbMapping { + pub(super) kind: BcbMappingKind, + pub(super) span: Span, +} + +/// This is separate from [`BcbMappingKind`] to help prepare for larger changes +/// that will be needed for improved branch coverage in the future. +/// (See .) +#[derive(Debug)] +pub(super) struct BcbBranchPair { + pub(super) span: Span, + pub(super) true_bcb: BasicCoverageBlock, + pub(super) false_bcb: BasicCoverageBlock, +} + +pub(super) struct CoverageSpans { + bcb_has_mappings: BitSet, + pub(super) mappings: Vec, + pub(super) branch_pairs: Vec, + test_vector_bitmap_bytes: u32, +} + +impl CoverageSpans { + pub(super) fn bcb_has_coverage_spans(&self, bcb: BasicCoverageBlock) -> bool { + self.bcb_has_mappings.contains(bcb) + } + + pub(super) fn test_vector_bitmap_bytes(&self) -> u32 { + self.test_vector_bitmap_bytes + } +} + +/// Extracts coverage-relevant spans from MIR, and associates them with +/// their corresponding BCBs. +/// +/// Returns `None` if no coverage-relevant spans could be extracted. +pub(super) fn generate_coverage_spans( + mir_body: &mir::Body<'_>, + hir_info: &ExtractedHirInfo, + basic_coverage_blocks: &CoverageGraph, +) -> Option { + let mut mappings = vec![]; + let mut branch_pairs = vec![]; + + if hir_info.is_async_fn { + // An async function desugars into a function that returns a future, + // with the user code wrapped in a closure. Any spans in the desugared + // outer function will be unhelpful, so just keep the signature span + // and ignore all of the spans in the MIR body. + if let Some(span) = hir_info.fn_sig_span_extended { + mappings.push(BcbMapping { kind: BcbMappingKind::Code(START_BCB), span }); + } + } else { + extract_refined_covspans(mir_body, hir_info, basic_coverage_blocks, &mut mappings); + + branch_pairs.extend(extract_branch_pairs(mir_body, hir_info, basic_coverage_blocks)); + + mappings.extend(extract_mcdc_mappings(mir_body, hir_info.body_span, basic_coverage_blocks)); + } + + if mappings.is_empty() && branch_pairs.is_empty() { + return None; + } + + // Identify which BCBs have one or more mappings. + let mut bcb_has_mappings = BitSet::new_empty(basic_coverage_blocks.num_nodes()); + let mut insert = |bcb| { + bcb_has_mappings.insert(bcb); + }; + let mut test_vector_bitmap_bytes = 0; + for BcbMapping { kind, span: _ } in &mappings { + match *kind { + BcbMappingKind::Code(bcb) => insert(bcb), + BcbMappingKind::MCDCBranch { true_bcb, false_bcb, .. } => { + insert(true_bcb); + insert(false_bcb); + } + BcbMappingKind::MCDCDecision { bitmap_idx, conditions_num, .. } => { + // `bcb_has_mappings` is used for inject coverage counters + // but they are not needed for decision BCBs. + // While the length of test vector bitmap should be calculated here. + test_vector_bitmap_bytes = test_vector_bitmap_bytes + .max(bitmap_idx + (1_u32 << conditions_num as u32).div_ceil(8)); + } + } + } + for &BcbBranchPair { true_bcb, false_bcb, .. } in &branch_pairs { + insert(true_bcb); + insert(false_bcb); + } + + Some(CoverageSpans { bcb_has_mappings, mappings, branch_pairs, test_vector_bitmap_bytes }) +} + +fn resolve_block_markers( + branch_info: &mir::coverage::BranchInfo, + mir_body: &mir::Body<'_>, +) -> IndexVec> { + let mut block_markers = IndexVec::>::from_elem_n( + None, + branch_info.num_block_markers, + ); + + // Fill out the mapping from block marker IDs to their enclosing blocks. + for (bb, data) in mir_body.basic_blocks.iter_enumerated() { + for statement in &data.statements { + if let StatementKind::Coverage(CoverageKind::BlockMarker { id }) = statement.kind { + block_markers[id] = Some(bb); + } + } + } + + block_markers +} + +// FIXME: There is currently a lot of redundancy between +// `extract_branch_pairs` and `extract_mcdc_mappings`. This is needed so +// that they can each be modified without interfering with the other, but in +// the long term we should try to bring them together again when branch coverage +// and MC/DC coverage support are more mature. + +pub(super) fn extract_branch_pairs( + mir_body: &mir::Body<'_>, + hir_info: &ExtractedHirInfo, + basic_coverage_blocks: &CoverageGraph, +) -> Vec { + let Some(branch_info) = mir_body.coverage_branch_info.as_deref() else { return vec![] }; + + let block_markers = resolve_block_markers(branch_info, mir_body); + + branch_info + .branch_spans + .iter() + .filter_map(|&BranchSpan { span: raw_span, true_marker, false_marker }| { + // For now, ignore any branch span that was introduced by + // expansion. This makes things like assert macros less noisy. + if !raw_span.ctxt().outer_expn_data().is_root() { + return None; + } + let (span, _) = + unexpand_into_body_span_with_visible_macro(raw_span, hir_info.body_span)?; + + let bcb_from_marker = + |marker: BlockMarkerId| basic_coverage_blocks.bcb_from_bb(block_markers[marker]?); + + let true_bcb = bcb_from_marker(true_marker)?; + let false_bcb = bcb_from_marker(false_marker)?; + + Some(BcbBranchPair { span, true_bcb, false_bcb }) + }) + .collect::>() +} + +pub(super) fn extract_mcdc_mappings( + mir_body: &mir::Body<'_>, + body_span: Span, + basic_coverage_blocks: &CoverageGraph, +) -> Vec { + let Some(branch_info) = mir_body.coverage_branch_info.as_deref() else { + return vec![]; + }; + + let block_markers = resolve_block_markers(branch_info, mir_body); + + let bcb_from_marker = + |marker: BlockMarkerId| basic_coverage_blocks.bcb_from_bb(block_markers[marker]?); + + let check_branch_bcb = + |raw_span: Span, true_marker: BlockMarkerId, false_marker: BlockMarkerId| { + // For now, ignore any branch span that was introduced by + // expansion. This makes things like assert macros less noisy. + if !raw_span.ctxt().outer_expn_data().is_root() { + return None; + } + let (span, _) = unexpand_into_body_span_with_visible_macro(raw_span, body_span)?; + + let true_bcb = bcb_from_marker(true_marker)?; + let false_bcb = bcb_from_marker(false_marker)?; + Some((span, true_bcb, false_bcb)) + }; + + let mcdc_branch_filter_map = |&MCDCBranchSpan { + span: raw_span, + true_marker, + false_marker, + condition_info, + decision_depth, + }| { + check_branch_bcb(raw_span, true_marker, false_marker).map(|(span, true_bcb, false_bcb)| { + BcbMapping { + kind: BcbMappingKind::MCDCBranch { + true_bcb, + false_bcb, + condition_info, + decision_depth, + }, + span, + } + }) + }; + + let mut next_bitmap_idx = 0; + + let decision_filter_map = |decision: &MCDCDecisionSpan| { + let (span, _) = unexpand_into_body_span_with_visible_macro(decision.span, body_span)?; + + let end_bcbs = decision + .end_markers + .iter() + .map(|&marker| bcb_from_marker(marker)) + .collect::>()?; + + let bitmap_idx = next_bitmap_idx; + next_bitmap_idx += (1_u32 << decision.conditions_num).div_ceil(8); + + Some(BcbMapping { + kind: BcbMappingKind::MCDCDecision { + end_bcbs, + bitmap_idx, + conditions_num: decision.conditions_num as u16, + decision_depth: decision.decision_depth, + }, + span, + }) + }; + + std::iter::empty() + .chain(branch_info.mcdc_branch_spans.iter().filter_map(mcdc_branch_filter_map)) + .chain(branch_info.mcdc_decision_spans.iter().filter_map(decision_filter_map)) + .collect::>() +} diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index 0300232019bc1..9edde6662469e 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -2,13 +2,14 @@ pub mod query; mod counters; mod graph; +mod mappings; mod spans; #[cfg(test)] mod tests; use self::counters::{CounterIncrementSite, CoverageCounters}; use self::graph::{BasicCoverageBlock, CoverageGraph}; -use self::spans::{BcbBranchPair, BcbMapping, BcbMappingKind, CoverageSpans}; +use self::mappings::{BcbBranchPair, BcbMapping, BcbMappingKind, CoverageSpans}; use crate::MirPass; @@ -70,7 +71,7 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: //////////////////////////////////////////////////// // Compute coverage spans from the `CoverageGraph`. let Some(coverage_spans) = - spans::generate_coverage_spans(mir_body, &hir_info, &basic_coverage_blocks) + mappings::generate_coverage_spans(mir_body, &hir_info, &basic_coverage_blocks) else { // No relevant spans were found in MIR, so skip instrumenting this function. return; diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index d27ce2fc317a6..d6432e2e9d41c 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -1,139 +1,13 @@ -use std::collections::BTreeSet; - -use rustc_data_structures::graph::DirectedGraph; -use rustc_index::bit_set::BitSet; use rustc_middle::mir; -use rustc_middle::mir::coverage::ConditionInfo; use rustc_span::{BytePos, Span}; -use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph, START_BCB}; -use crate::coverage::spans::from_mir::{extract_branch_pairs, extract_mcdc_mappings, SpanFromMir}; +use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; +use crate::coverage::mappings::{BcbMapping, BcbMappingKind}; +use crate::coverage::spans::from_mir::SpanFromMir; use crate::coverage::ExtractedHirInfo; mod from_mir; -#[derive(Clone, Debug)] -pub(super) enum BcbMappingKind { - /// Associates an ordinary executable code span with its corresponding BCB. - Code(BasicCoverageBlock), - - // Ordinary branch mappings are stored separately, so they don't have a - // variant in this enum. - // - /// Associates a mcdc branch span with condition info besides fields for normal branch. - MCDCBranch { - true_bcb: BasicCoverageBlock, - false_bcb: BasicCoverageBlock, - /// If `None`, this actually represents a normal branch mapping inserted - /// for code that was too complex for MC/DC. - condition_info: Option, - decision_depth: u16, - }, - /// Associates a mcdc decision with its join BCB. - MCDCDecision { - end_bcbs: BTreeSet, - bitmap_idx: u32, - conditions_num: u16, - decision_depth: u16, - }, -} - -#[derive(Debug)] -pub(super) struct BcbMapping { - pub(super) kind: BcbMappingKind, - pub(super) span: Span, -} - -/// This is separate from [`BcbMappingKind`] to help prepare for larger changes -/// that will be needed for improved branch coverage in the future. -/// (See .) -#[derive(Debug)] -pub(super) struct BcbBranchPair { - pub(super) span: Span, - pub(super) true_bcb: BasicCoverageBlock, - pub(super) false_bcb: BasicCoverageBlock, -} - -pub(super) struct CoverageSpans { - bcb_has_mappings: BitSet, - pub(super) mappings: Vec, - pub(super) branch_pairs: Vec, - test_vector_bitmap_bytes: u32, -} - -impl CoverageSpans { - pub(super) fn bcb_has_coverage_spans(&self, bcb: BasicCoverageBlock) -> bool { - self.bcb_has_mappings.contains(bcb) - } - - pub(super) fn test_vector_bitmap_bytes(&self) -> u32 { - self.test_vector_bitmap_bytes - } -} - -/// Extracts coverage-relevant spans from MIR, and associates them with -/// their corresponding BCBs. -/// -/// Returns `None` if no coverage-relevant spans could be extracted. -pub(super) fn generate_coverage_spans( - mir_body: &mir::Body<'_>, - hir_info: &ExtractedHirInfo, - basic_coverage_blocks: &CoverageGraph, -) -> Option { - let mut mappings = vec![]; - let mut branch_pairs = vec![]; - - if hir_info.is_async_fn { - // An async function desugars into a function that returns a future, - // with the user code wrapped in a closure. Any spans in the desugared - // outer function will be unhelpful, so just keep the signature span - // and ignore all of the spans in the MIR body. - if let Some(span) = hir_info.fn_sig_span_extended { - mappings.push(BcbMapping { kind: BcbMappingKind::Code(START_BCB), span }); - } - } else { - extract_refined_covspans(mir_body, hir_info, basic_coverage_blocks, &mut mappings); - - branch_pairs.extend(extract_branch_pairs(mir_body, hir_info, basic_coverage_blocks)); - - mappings.extend(extract_mcdc_mappings(mir_body, hir_info.body_span, basic_coverage_blocks)); - } - - if mappings.is_empty() && branch_pairs.is_empty() { - return None; - } - - // Identify which BCBs have one or more mappings. - let mut bcb_has_mappings = BitSet::new_empty(basic_coverage_blocks.num_nodes()); - let mut insert = |bcb| { - bcb_has_mappings.insert(bcb); - }; - let mut test_vector_bitmap_bytes = 0; - for BcbMapping { kind, span: _ } in &mappings { - match *kind { - BcbMappingKind::Code(bcb) => insert(bcb), - BcbMappingKind::MCDCBranch { true_bcb, false_bcb, .. } => { - insert(true_bcb); - insert(false_bcb); - } - BcbMappingKind::MCDCDecision { bitmap_idx, conditions_num, .. } => { - // `bcb_has_mappings` is used for inject coverage counters - // but they are not needed for decision BCBs. - // While the length of test vector bitmap should be calculated here. - test_vector_bitmap_bytes = test_vector_bitmap_bytes - .max(bitmap_idx + (1_u32 << conditions_num as u32).div_ceil(8)); - } - } - } - for &BcbBranchPair { true_bcb, false_bcb, .. } in &branch_pairs { - insert(true_bcb); - insert(false_bcb); - } - - Some(CoverageSpans { bcb_has_mappings, mappings, branch_pairs, test_vector_bitmap_bytes }) -} - -#[allow(unused_imports)] // Remove this line during the actual split. // FIXME(#124545) It's awkward that we have to re-export this, because it's an // internal detail of `from_mir` that is also needed when handling branch and // MC/DC spans. Ideally we would find a more natural home for it. diff --git a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs index 2b1e0b1f4bb56..4ce37b5defcee 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs @@ -1,11 +1,8 @@ use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxHashSet; -use rustc_index::IndexVec; -use rustc_middle::mir::coverage::{ - BlockMarkerId, BranchSpan, CoverageKind, MCDCBranchSpan, MCDCDecisionSpan, -}; +use rustc_middle::mir::coverage::CoverageKind; use rustc_middle::mir::{ - self, AggregateKind, BasicBlock, FakeReadCause, Rvalue, Statement, StatementKind, Terminator, + self, AggregateKind, FakeReadCause, Rvalue, Statement, StatementKind, Terminator, TerminatorKind, }; use rustc_span::{ExpnKind, MacroKind, Span, Symbol}; @@ -13,7 +10,6 @@ use rustc_span::{ExpnKind, MacroKind, Span, Symbol}; use crate::coverage::graph::{ BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph, START_BCB, }; -use crate::coverage::spans::{BcbBranchPair, BcbMapping, BcbMappingKind}; use crate::coverage::ExtractedHirInfo; /// Traverses the MIR body to produce an initial collection of coverage-relevant @@ -365,141 +361,3 @@ impl SpanFromMir { Self { span, visible_macro, bcb, is_hole } } } - -fn resolve_block_markers( - branch_info: &mir::coverage::BranchInfo, - mir_body: &mir::Body<'_>, -) -> IndexVec> { - let mut block_markers = IndexVec::>::from_elem_n( - None, - branch_info.num_block_markers, - ); - - // Fill out the mapping from block marker IDs to their enclosing blocks. - for (bb, data) in mir_body.basic_blocks.iter_enumerated() { - for statement in &data.statements { - if let StatementKind::Coverage(CoverageKind::BlockMarker { id }) = statement.kind { - block_markers[id] = Some(bb); - } - } - } - - block_markers -} - -// FIXME: There is currently a lot of redundancy between -// `extract_branch_pairs` and `extract_mcdc_mappings`. This is needed so -// that they can each be modified without interfering with the other, but in -// the long term we should try to bring them together again when branch coverage -// and MC/DC coverage support are more mature. - -pub(super) fn extract_branch_pairs( - mir_body: &mir::Body<'_>, - hir_info: &ExtractedHirInfo, - basic_coverage_blocks: &CoverageGraph, -) -> Vec { - let Some(branch_info) = mir_body.coverage_branch_info.as_deref() else { return vec![] }; - - let block_markers = resolve_block_markers(branch_info, mir_body); - - branch_info - .branch_spans - .iter() - .filter_map(|&BranchSpan { span: raw_span, true_marker, false_marker }| { - // For now, ignore any branch span that was introduced by - // expansion. This makes things like assert macros less noisy. - if !raw_span.ctxt().outer_expn_data().is_root() { - return None; - } - let (span, _) = - unexpand_into_body_span_with_visible_macro(raw_span, hir_info.body_span)?; - - let bcb_from_marker = - |marker: BlockMarkerId| basic_coverage_blocks.bcb_from_bb(block_markers[marker]?); - - let true_bcb = bcb_from_marker(true_marker)?; - let false_bcb = bcb_from_marker(false_marker)?; - - Some(BcbBranchPair { span, true_bcb, false_bcb }) - }) - .collect::>() -} - -pub(super) fn extract_mcdc_mappings( - mir_body: &mir::Body<'_>, - body_span: Span, - basic_coverage_blocks: &CoverageGraph, -) -> Vec { - let Some(branch_info) = mir_body.coverage_branch_info.as_deref() else { - return vec![]; - }; - - let block_markers = resolve_block_markers(branch_info, mir_body); - - let bcb_from_marker = - |marker: BlockMarkerId| basic_coverage_blocks.bcb_from_bb(block_markers[marker]?); - - let check_branch_bcb = - |raw_span: Span, true_marker: BlockMarkerId, false_marker: BlockMarkerId| { - // For now, ignore any branch span that was introduced by - // expansion. This makes things like assert macros less noisy. - if !raw_span.ctxt().outer_expn_data().is_root() { - return None; - } - let (span, _) = unexpand_into_body_span_with_visible_macro(raw_span, body_span)?; - - let true_bcb = bcb_from_marker(true_marker)?; - let false_bcb = bcb_from_marker(false_marker)?; - Some((span, true_bcb, false_bcb)) - }; - - let mcdc_branch_filter_map = |&MCDCBranchSpan { - span: raw_span, - true_marker, - false_marker, - condition_info, - decision_depth, - }| { - check_branch_bcb(raw_span, true_marker, false_marker).map(|(span, true_bcb, false_bcb)| { - BcbMapping { - kind: BcbMappingKind::MCDCBranch { - true_bcb, - false_bcb, - condition_info, - decision_depth, - }, - span, - } - }) - }; - - let mut next_bitmap_idx = 0; - - let decision_filter_map = |decision: &MCDCDecisionSpan| { - let (span, _) = unexpand_into_body_span_with_visible_macro(decision.span, body_span)?; - - let end_bcbs = decision - .end_markers - .iter() - .map(|&marker| bcb_from_marker(marker)) - .collect::>()?; - - let bitmap_idx = next_bitmap_idx; - next_bitmap_idx += (1_u32 << decision.conditions_num).div_ceil(8); - - Some(BcbMapping { - kind: BcbMappingKind::MCDCDecision { - end_bcbs, - bitmap_idx, - conditions_num: decision.conditions_num as u16, - decision_depth: decision.decision_depth, - }, - span, - }) - }; - - std::iter::empty() - .chain(branch_info.mcdc_branch_spans.iter().filter_map(mcdc_branch_filter_map)) - .chain(branch_info.mcdc_decision_spans.iter().filter_map(decision_filter_map)) - .collect::>() -} From a76e250abdc5a3e1929334a33655f0393f31183d Mon Sep 17 00:00:00 2001 From: zhuyunxing Date: Tue, 30 Apr 2024 10:05:50 +0800 Subject: [PATCH 54/56] coverage. Add BlockMarkerGen to avoid ownership gymnastics --- .../rustc_mir_build/src/build/coverageinfo.rs | 64 ++++++++++--------- 1 file changed, 35 insertions(+), 29 deletions(-) diff --git a/compiler/rustc_mir_build/src/build/coverageinfo.rs b/compiler/rustc_mir_build/src/build/coverageinfo.rs index a1c20fcfd8365..e3929f1053423 100644 --- a/compiler/rustc_mir_build/src/build/coverageinfo.rs +++ b/compiler/rustc_mir_build/src/build/coverageinfo.rs @@ -20,7 +20,7 @@ pub(crate) struct BranchInfoBuilder { /// Maps condition expressions to their enclosing `!`, for better instrumentation. nots: FxHashMap, - num_block_markers: usize, + markers: BlockMarkerGen, branch_spans: Vec, mcdc_branch_spans: Vec, @@ -38,6 +38,35 @@ struct NotInfo { is_flipped: bool, } +#[derive(Default)] +struct BlockMarkerGen { + num_block_markers: usize, +} + +impl BlockMarkerGen { + fn next_block_marker_id(&mut self) -> BlockMarkerId { + let id = BlockMarkerId::from_usize(self.num_block_markers); + self.num_block_markers += 1; + id + } + + fn inject_block_marker( + &mut self, + cfg: &mut CFG<'_>, + source_info: SourceInfo, + block: BasicBlock, + ) -> BlockMarkerId { + let id = self.next_block_marker_id(); + let marker_statement = mir::Statement { + source_info, + kind: mir::StatementKind::Coverage(CoverageKind::BlockMarker { id }), + }; + cfg.push(block, marker_statement); + + id + } +} + impl BranchInfoBuilder { /// Creates a new branch info builder, but only if branch coverage instrumentation /// is enabled and `def_id` represents a function that is eligible for coverage. @@ -45,7 +74,7 @@ impl BranchInfoBuilder { if tcx.sess.instrument_coverage_branch() && tcx.is_eligible_for_coverage(def_id) { Some(Self { nots: FxHashMap::default(), - num_block_markers: 0, + markers: BlockMarkerGen::default(), branch_spans: vec![], mcdc_branch_spans: vec![], mcdc_decision_spans: vec![], @@ -145,39 +174,16 @@ impl BranchInfoBuilder { true_block: BasicBlock, false_block: BasicBlock, ) { - let true_marker = self.inject_block_marker(cfg, source_info, true_block); - let false_marker = self.inject_block_marker(cfg, source_info, false_block); + let true_marker = self.markers.inject_block_marker(cfg, source_info, true_block); + let false_marker = self.markers.inject_block_marker(cfg, source_info, false_block); self.branch_spans.push(BranchSpan { span: source_info.span, true_marker, false_marker }); } - fn next_block_marker_id(&mut self) -> BlockMarkerId { - let id = BlockMarkerId::from_usize(self.num_block_markers); - self.num_block_markers += 1; - id - } - - fn inject_block_marker( - &mut self, - cfg: &mut CFG<'_>, - source_info: SourceInfo, - block: BasicBlock, - ) -> BlockMarkerId { - let id = self.next_block_marker_id(); - - let marker_statement = mir::Statement { - source_info, - kind: mir::StatementKind::Coverage(CoverageKind::BlockMarker { id }), - }; - cfg.push(block, marker_statement); - - id - } - pub(crate) fn into_done(self) -> Option> { let Self { nots: _, - num_block_markers, + markers: BlockMarkerGen { num_block_markers }, branch_spans, mcdc_branch_spans, mcdc_decision_spans, @@ -386,7 +392,7 @@ impl Builder<'_, '_> { // Separate path for handling branches when MC/DC is enabled. if branch_info.mcdc_state.is_some() { let mut inject_block_marker = - |block| branch_info.inject_block_marker(&mut self.cfg, source_info, block); + |block| branch_info.markers.inject_block_marker(&mut self.cfg, source_info, block); let true_marker = inject_block_marker(then_block); let false_marker = inject_block_marker(else_block); let (decision_depth, condition_info) = branch_info From e198c51f16e5ed8480bee455c3a85071cd64d947 Mon Sep 17 00:00:00 2001 From: zhuyunxing Date: Tue, 30 Apr 2024 10:21:46 +0800 Subject: [PATCH 55/56] coverage. Add MCDCInfoBuilder to isolate all mcdc stuff from BranchInfoBuilder --- .../rustc_mir_build/src/build/coverageinfo.rs | 196 ++++++++++-------- 1 file changed, 110 insertions(+), 86 deletions(-) diff --git a/compiler/rustc_mir_build/src/build/coverageinfo.rs b/compiler/rustc_mir_build/src/build/coverageinfo.rs index e3929f1053423..f0c0abfccec63 100644 --- a/compiler/rustc_mir_build/src/build/coverageinfo.rs +++ b/compiler/rustc_mir_build/src/build/coverageinfo.rs @@ -23,9 +23,7 @@ pub(crate) struct BranchInfoBuilder { markers: BlockMarkerGen, branch_spans: Vec, - mcdc_branch_spans: Vec, - mcdc_decision_spans: Vec, - mcdc_state: Option, + mcdc_info: Option, } #[derive(Clone, Copy)] @@ -76,9 +74,7 @@ impl BranchInfoBuilder { nots: FxHashMap::default(), markers: BlockMarkerGen::default(), branch_spans: vec![], - mcdc_branch_spans: vec![], - mcdc_decision_spans: vec![], - mcdc_state: MCDCState::new_if_enabled(tcx), + mcdc_info: tcx.sess.instrument_coverage_mcdc().then(MCDCInfoBuilder::new), }) } else { None @@ -125,48 +121,6 @@ impl BranchInfoBuilder { } } - fn fetch_mcdc_condition_info( - &mut self, - tcx: TyCtxt<'_>, - true_marker: BlockMarkerId, - false_marker: BlockMarkerId, - ) -> Option<(u16, ConditionInfo)> { - let mcdc_state = self.mcdc_state.as_mut()?; - let decision_depth = mcdc_state.decision_depth(); - let (mut condition_info, decision_result) = - mcdc_state.take_condition(true_marker, false_marker); - // take_condition() returns Some for decision_result when the decision stack - // is empty, i.e. when all the conditions of the decision were instrumented, - // and the decision is "complete". - if let Some(decision) = decision_result { - match decision.conditions_num { - 0 => { - unreachable!("Decision with no condition is not expected"); - } - 1..=MAX_CONDITIONS_NUM_IN_DECISION => { - self.mcdc_decision_spans.push(decision); - } - _ => { - // Do not generate mcdc mappings and statements for decisions with too many conditions. - let rebase_idx = self.mcdc_branch_spans.len() - decision.conditions_num + 1; - for branch in &mut self.mcdc_branch_spans[rebase_idx..] { - branch.condition_info = None; - } - - // ConditionInfo of this branch shall also be reset. - condition_info = None; - - tcx.dcx().emit_warn(MCDCExceedsConditionNumLimit { - span: decision.span, - conditions_num: decision.conditions_num, - max_conditions_num: MAX_CONDITIONS_NUM_IN_DECISION, - }); - } - } - } - condition_info.map(|cond_info| (decision_depth, cond_info)) - } - fn add_two_way_branch<'tcx>( &mut self, cfg: &mut CFG<'tcx>, @@ -185,9 +139,7 @@ impl BranchInfoBuilder { nots: _, markers: BlockMarkerGen { num_block_markers }, branch_spans, - mcdc_branch_spans, - mcdc_decision_spans, - mcdc_state: _, + mcdc_info, } = self; if num_block_markers == 0 { @@ -195,6 +147,9 @@ impl BranchInfoBuilder { return None; } + let (mcdc_decision_spans, mcdc_branch_spans) = + mcdc_info.map(MCDCInfoBuilder::into_done).unwrap_or_default(); + Some(Box::new(mir::coverage::BranchInfo { num_block_markers, branch_spans, @@ -221,20 +176,23 @@ struct MCDCState { } impl MCDCState { - fn new_if_enabled(tcx: TyCtxt<'_>) -> Option { - tcx.sess - .instrument_coverage_mcdc() - .then(|| Self { decision_ctx_stack: vec![MCDCDecisionCtx::default()] }) + fn new() -> Self { + Self { decision_ctx_stack: vec![MCDCDecisionCtx::default()] } } /// Decision depth is given as a u16 to reduce the size of the `CoverageKind`, /// as it is very unlikely that the depth ever reaches 2^16. #[inline] fn decision_depth(&self) -> u16 { - u16::try_from( - self.decision_ctx_stack.len().checked_sub(1).expect("Unexpected empty decision stack"), - ) - .expect("decision depth did not fit in u16, this is likely to be an instrumentation error") + match u16::try_from(self.decision_ctx_stack.len()) + .expect( + "decision depth did not fit in u16, this is likely to be an instrumentation error", + ) + .checked_sub(1) + { + Some(d) => d, + None => bug!("Unexpected empty decision stack"), + } } // At first we assign ConditionIds for each sub expression. @@ -279,8 +237,9 @@ impl MCDCState { // - If the op is OR, the "true_next" of LHS and RHS should be the parent's "true_next". While "false_next" of the LHS is the RHS, the "false next" of RHS is the parent's "false_next". fn record_conditions(&mut self, op: LogicalOp, span: Span) { let decision_depth = self.decision_depth(); - let decision_ctx = - self.decision_ctx_stack.last_mut().expect("Unexpected empty decision_ctx_stack"); + let Some(decision_ctx) = self.decision_ctx_stack.last_mut() else { + bug!("Unexpected empty decision_ctx_stack") + }; let decision = match decision_ctx.processing_decision.as_mut() { Some(decision) => { decision.span = decision.span.to(span); @@ -343,8 +302,9 @@ impl MCDCState { true_marker: BlockMarkerId, false_marker: BlockMarkerId, ) -> (Option, Option) { - let decision_ctx = - self.decision_ctx_stack.last_mut().expect("Unexpected empty decision_ctx_stack"); + let Some(decision_ctx) = self.decision_ctx_stack.last_mut() else { + bug!("Unexpected empty decision_ctx_stack") + }; let Some(condition_info) = decision_ctx.decision_stack.pop_back() else { return (None, None); }; @@ -366,6 +326,74 @@ impl MCDCState { } } +struct MCDCInfoBuilder { + branch_spans: Vec, + decision_spans: Vec, + state: MCDCState, +} + +impl MCDCInfoBuilder { + fn new() -> Self { + Self { branch_spans: vec![], decision_spans: vec![], state: MCDCState::new() } + } + + fn visit_evaluated_condition( + &mut self, + tcx: TyCtxt<'_>, + source_info: SourceInfo, + true_block: BasicBlock, + false_block: BasicBlock, + mut inject_block_marker: impl FnMut(SourceInfo, BasicBlock) -> BlockMarkerId, + ) { + let true_marker = inject_block_marker(source_info, true_block); + let false_marker = inject_block_marker(source_info, false_block); + + let decision_depth = self.state.decision_depth(); + let (mut condition_info, decision_result) = + self.state.take_condition(true_marker, false_marker); + // take_condition() returns Some for decision_result when the decision stack + // is empty, i.e. when all the conditions of the decision were instrumented, + // and the decision is "complete". + if let Some(decision) = decision_result { + match decision.conditions_num { + 0 => { + unreachable!("Decision with no condition is not expected"); + } + 1..=MAX_CONDITIONS_NUM_IN_DECISION => { + self.decision_spans.push(decision); + } + _ => { + // Do not generate mcdc mappings and statements for decisions with too many conditions. + let rebase_idx = self.branch_spans.len() - decision.conditions_num + 1; + for branch in &mut self.branch_spans[rebase_idx..] { + branch.condition_info = None; + } + + // ConditionInfo of this branch shall also be reset. + condition_info = None; + + tcx.dcx().emit_warn(MCDCExceedsConditionNumLimit { + span: decision.span, + conditions_num: decision.conditions_num, + max_conditions_num: MAX_CONDITIONS_NUM_IN_DECISION, + }); + } + } + } + self.branch_spans.push(MCDCBranchSpan { + span: source_info.span, + condition_info, + true_marker, + false_marker, + decision_depth, + }); + } + + fn into_done(self) -> (Vec, Vec) { + (self.decision_spans, self.branch_spans) + } +} + impl Builder<'_, '_> { /// If branch coverage is enabled, inject marker statements into `then_block` /// and `else_block`, and record their IDs in the table of branch spans. @@ -390,23 +418,17 @@ impl Builder<'_, '_> { let source_info = SourceInfo { span: self.thir[expr_id].span, scope: self.source_scope }; // Separate path for handling branches when MC/DC is enabled. - if branch_info.mcdc_state.is_some() { - let mut inject_block_marker = - |block| branch_info.markers.inject_block_marker(&mut self.cfg, source_info, block); - let true_marker = inject_block_marker(then_block); - let false_marker = inject_block_marker(else_block); - let (decision_depth, condition_info) = branch_info - .fetch_mcdc_condition_info(self.tcx, true_marker, false_marker) - .map_or((0, None), |(decision_depth, condition_info)| { - (decision_depth, Some(condition_info)) - }); - branch_info.mcdc_branch_spans.push(MCDCBranchSpan { - span: source_info.span, - condition_info, - true_marker, - false_marker, - decision_depth, - }); + if let Some(mcdc_info) = branch_info.mcdc_info.as_mut() { + let inject_block_marker = |source_info, block| { + branch_info.markers.inject_block_marker(&mut self.cfg, source_info, block) + }; + mcdc_info.visit_evaluated_condition( + self.tcx, + source_info, + then_block, + else_block, + inject_block_marker, + ); return; } @@ -415,25 +437,27 @@ impl Builder<'_, '_> { pub(crate) fn visit_coverage_branch_operation(&mut self, logical_op: LogicalOp, span: Span) { if let Some(branch_info) = self.coverage_branch_info.as_mut() - && let Some(mcdc_state) = branch_info.mcdc_state.as_mut() + && let Some(mcdc_info) = branch_info.mcdc_info.as_mut() { - mcdc_state.record_conditions(logical_op, span); + mcdc_info.state.record_conditions(logical_op, span); } } pub(crate) fn mcdc_increment_depth_if_enabled(&mut self) { if let Some(branch_info) = self.coverage_branch_info.as_mut() - && let Some(mcdc_state) = branch_info.mcdc_state.as_mut() + && let Some(mcdc_info) = branch_info.mcdc_info.as_mut() { - mcdc_state.decision_ctx_stack.push(MCDCDecisionCtx::default()); + mcdc_info.state.decision_ctx_stack.push(MCDCDecisionCtx::default()); }; } pub(crate) fn mcdc_decrement_depth_if_enabled(&mut self) { if let Some(branch_info) = self.coverage_branch_info.as_mut() - && let Some(mcdc_state) = branch_info.mcdc_state.as_mut() + && let Some(mcdc_info) = branch_info.mcdc_info.as_mut() { - mcdc_state.decision_ctx_stack.pop().expect("Unexpected empty decision stack"); + if mcdc_info.state.decision_ctx_stack.pop().is_none() { + bug!("Unexpected empty decision stack"); + } }; } } From 6c8b492f02733120a05b257033db849a2a82c5f4 Mon Sep 17 00:00:00 2001 From: zhuyunxing Date: Tue, 30 Apr 2024 10:25:54 +0800 Subject: [PATCH 56/56] coverage. Split mcdc builder to a sub module of coverageinfo --- .../rustc_mir_build/src/build/coverageinfo.rs | 273 +---------------- .../src/build/coverageinfo/mcdc.rs | 275 ++++++++++++++++++ 2 files changed, 279 insertions(+), 269 deletions(-) create mode 100644 compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs diff --git a/compiler/rustc_mir_build/src/build/coverageinfo.rs b/compiler/rustc_mir_build/src/build/coverageinfo.rs index f0c0abfccec63..e2a5f97a84744 100644 --- a/compiler/rustc_mir_build/src/build/coverageinfo.rs +++ b/compiler/rustc_mir_build/src/build/coverageinfo.rs @@ -1,20 +1,16 @@ +mod mcdc; use std::assert_matches::assert_matches; use std::collections::hash_map::Entry; -use std::collections::VecDeque; use rustc_data_structures::fx::FxHashMap; -use rustc_middle::mir::coverage::{ - BlockMarkerId, BranchSpan, ConditionId, ConditionInfo, CoverageKind, MCDCBranchSpan, - MCDCDecisionSpan, -}; +use rustc_middle::mir::coverage::{BlockMarkerId, BranchSpan, CoverageKind}; use rustc_middle::mir::{self, BasicBlock, SourceInfo, UnOp}; -use rustc_middle::thir::{ExprId, ExprKind, LogicalOp, Thir}; +use rustc_middle::thir::{ExprId, ExprKind, Thir}; use rustc_middle::ty::TyCtxt; use rustc_span::def_id::LocalDefId; -use rustc_span::Span; +use crate::build::coverageinfo::mcdc::MCDCInfoBuilder; use crate::build::{Builder, CFG}; -use crate::errors::MCDCExceedsConditionNumLimit; pub(crate) struct BranchInfoBuilder { /// Maps condition expressions to their enclosing `!`, for better instrumentation. @@ -159,241 +155,6 @@ impl BranchInfoBuilder { } } -/// The MCDC bitmap scales exponentially (2^n) based on the number of conditions seen, -/// So llvm sets a maximum value prevents the bitmap footprint from growing too large without the user's knowledge. -/// This limit may be relaxed if the [upstream change](https://github.com/llvm/llvm-project/pull/82448) is merged. -const MAX_CONDITIONS_NUM_IN_DECISION: usize = 6; - -#[derive(Default)] -struct MCDCDecisionCtx { - /// To construct condition evaluation tree. - decision_stack: VecDeque, - processing_decision: Option, -} - -struct MCDCState { - decision_ctx_stack: Vec, -} - -impl MCDCState { - fn new() -> Self { - Self { decision_ctx_stack: vec![MCDCDecisionCtx::default()] } - } - - /// Decision depth is given as a u16 to reduce the size of the `CoverageKind`, - /// as it is very unlikely that the depth ever reaches 2^16. - #[inline] - fn decision_depth(&self) -> u16 { - match u16::try_from(self.decision_ctx_stack.len()) - .expect( - "decision depth did not fit in u16, this is likely to be an instrumentation error", - ) - .checked_sub(1) - { - Some(d) => d, - None => bug!("Unexpected empty decision stack"), - } - } - - // At first we assign ConditionIds for each sub expression. - // If the sub expression is composite, re-assign its ConditionId to its LHS and generate a new ConditionId for its RHS. - // - // Example: "x = (A && B) || (C && D) || (D && F)" - // - // Visit Depth1: - // (A && B) || (C && D) || (D && F) - // ^-------LHS--------^ ^-RHS--^ - // ID=1 ID=2 - // - // Visit LHS-Depth2: - // (A && B) || (C && D) - // ^-LHS--^ ^-RHS--^ - // ID=1 ID=3 - // - // Visit LHS-Depth3: - // (A && B) - // LHS RHS - // ID=1 ID=4 - // - // Visit RHS-Depth3: - // (C && D) - // LHS RHS - // ID=3 ID=5 - // - // Visit RHS-Depth2: (D && F) - // LHS RHS - // ID=2 ID=6 - // - // Visit Depth1: - // (A && B) || (C && D) || (D && F) - // ID=1 ID=4 ID=3 ID=5 ID=2 ID=6 - // - // A node ID of '0' always means MC/DC isn't being tracked. - // - // If a "next" node ID is '0', it means it's the end of the test vector. - // - // As the compiler tracks expression in pre-order, we can ensure that condition info of parents are always properly assigned when their children are visited. - // - If the op is AND, the "false_next" of LHS and RHS should be the parent's "false_next". While "true_next" of the LHS is the RHS, the "true next" of RHS is the parent's "true_next". - // - If the op is OR, the "true_next" of LHS and RHS should be the parent's "true_next". While "false_next" of the LHS is the RHS, the "false next" of RHS is the parent's "false_next". - fn record_conditions(&mut self, op: LogicalOp, span: Span) { - let decision_depth = self.decision_depth(); - let Some(decision_ctx) = self.decision_ctx_stack.last_mut() else { - bug!("Unexpected empty decision_ctx_stack") - }; - let decision = match decision_ctx.processing_decision.as_mut() { - Some(decision) => { - decision.span = decision.span.to(span); - decision - } - None => decision_ctx.processing_decision.insert(MCDCDecisionSpan { - span, - conditions_num: 0, - end_markers: vec![], - decision_depth, - }), - }; - - let parent_condition = decision_ctx.decision_stack.pop_back().unwrap_or_default(); - let lhs_id = if parent_condition.condition_id == ConditionId::NONE { - decision.conditions_num += 1; - ConditionId::from(decision.conditions_num) - } else { - parent_condition.condition_id - }; - - decision.conditions_num += 1; - let rhs_condition_id = ConditionId::from(decision.conditions_num); - - let (lhs, rhs) = match op { - LogicalOp::And => { - let lhs = ConditionInfo { - condition_id: lhs_id, - true_next_id: rhs_condition_id, - false_next_id: parent_condition.false_next_id, - }; - let rhs = ConditionInfo { - condition_id: rhs_condition_id, - true_next_id: parent_condition.true_next_id, - false_next_id: parent_condition.false_next_id, - }; - (lhs, rhs) - } - LogicalOp::Or => { - let lhs = ConditionInfo { - condition_id: lhs_id, - true_next_id: parent_condition.true_next_id, - false_next_id: rhs_condition_id, - }; - let rhs = ConditionInfo { - condition_id: rhs_condition_id, - true_next_id: parent_condition.true_next_id, - false_next_id: parent_condition.false_next_id, - }; - (lhs, rhs) - } - }; - // We visit expressions tree in pre-order, so place the left-hand side on the top. - decision_ctx.decision_stack.push_back(rhs); - decision_ctx.decision_stack.push_back(lhs); - } - - fn take_condition( - &mut self, - true_marker: BlockMarkerId, - false_marker: BlockMarkerId, - ) -> (Option, Option) { - let Some(decision_ctx) = self.decision_ctx_stack.last_mut() else { - bug!("Unexpected empty decision_ctx_stack") - }; - let Some(condition_info) = decision_ctx.decision_stack.pop_back() else { - return (None, None); - }; - let Some(decision) = decision_ctx.processing_decision.as_mut() else { - bug!("Processing decision should have been created before any conditions are taken"); - }; - if condition_info.true_next_id == ConditionId::NONE { - decision.end_markers.push(true_marker); - } - if condition_info.false_next_id == ConditionId::NONE { - decision.end_markers.push(false_marker); - } - - if decision_ctx.decision_stack.is_empty() { - (Some(condition_info), decision_ctx.processing_decision.take()) - } else { - (Some(condition_info), None) - } - } -} - -struct MCDCInfoBuilder { - branch_spans: Vec, - decision_spans: Vec, - state: MCDCState, -} - -impl MCDCInfoBuilder { - fn new() -> Self { - Self { branch_spans: vec![], decision_spans: vec![], state: MCDCState::new() } - } - - fn visit_evaluated_condition( - &mut self, - tcx: TyCtxt<'_>, - source_info: SourceInfo, - true_block: BasicBlock, - false_block: BasicBlock, - mut inject_block_marker: impl FnMut(SourceInfo, BasicBlock) -> BlockMarkerId, - ) { - let true_marker = inject_block_marker(source_info, true_block); - let false_marker = inject_block_marker(source_info, false_block); - - let decision_depth = self.state.decision_depth(); - let (mut condition_info, decision_result) = - self.state.take_condition(true_marker, false_marker); - // take_condition() returns Some for decision_result when the decision stack - // is empty, i.e. when all the conditions of the decision were instrumented, - // and the decision is "complete". - if let Some(decision) = decision_result { - match decision.conditions_num { - 0 => { - unreachable!("Decision with no condition is not expected"); - } - 1..=MAX_CONDITIONS_NUM_IN_DECISION => { - self.decision_spans.push(decision); - } - _ => { - // Do not generate mcdc mappings and statements for decisions with too many conditions. - let rebase_idx = self.branch_spans.len() - decision.conditions_num + 1; - for branch in &mut self.branch_spans[rebase_idx..] { - branch.condition_info = None; - } - - // ConditionInfo of this branch shall also be reset. - condition_info = None; - - tcx.dcx().emit_warn(MCDCExceedsConditionNumLimit { - span: decision.span, - conditions_num: decision.conditions_num, - max_conditions_num: MAX_CONDITIONS_NUM_IN_DECISION, - }); - } - } - } - self.branch_spans.push(MCDCBranchSpan { - span: source_info.span, - condition_info, - true_marker, - false_marker, - decision_depth, - }); - } - - fn into_done(self) -> (Vec, Vec) { - (self.decision_spans, self.branch_spans) - } -} - impl Builder<'_, '_> { /// If branch coverage is enabled, inject marker statements into `then_block` /// and `else_block`, and record their IDs in the table of branch spans. @@ -434,30 +195,4 @@ impl Builder<'_, '_> { branch_info.add_two_way_branch(&mut self.cfg, source_info, then_block, else_block); } - - pub(crate) fn visit_coverage_branch_operation(&mut self, logical_op: LogicalOp, span: Span) { - if let Some(branch_info) = self.coverage_branch_info.as_mut() - && let Some(mcdc_info) = branch_info.mcdc_info.as_mut() - { - mcdc_info.state.record_conditions(logical_op, span); - } - } - - pub(crate) fn mcdc_increment_depth_if_enabled(&mut self) { - if let Some(branch_info) = self.coverage_branch_info.as_mut() - && let Some(mcdc_info) = branch_info.mcdc_info.as_mut() - { - mcdc_info.state.decision_ctx_stack.push(MCDCDecisionCtx::default()); - }; - } - - pub(crate) fn mcdc_decrement_depth_if_enabled(&mut self) { - if let Some(branch_info) = self.coverage_branch_info.as_mut() - && let Some(mcdc_info) = branch_info.mcdc_info.as_mut() - { - if mcdc_info.state.decision_ctx_stack.pop().is_none() { - bug!("Unexpected empty decision stack"); - } - }; - } } diff --git a/compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs b/compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs new file mode 100644 index 0000000000000..566dba460d43b --- /dev/null +++ b/compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs @@ -0,0 +1,275 @@ +use std::collections::VecDeque; + +use rustc_middle::mir::coverage::{ + BlockMarkerId, ConditionId, ConditionInfo, MCDCBranchSpan, MCDCDecisionSpan, +}; +use rustc_middle::mir::{BasicBlock, SourceInfo}; +use rustc_middle::thir::LogicalOp; +use rustc_middle::ty::TyCtxt; +use rustc_span::Span; + +use crate::build::Builder; +use crate::errors::MCDCExceedsConditionNumLimit; + +/// The MCDC bitmap scales exponentially (2^n) based on the number of conditions seen, +/// So llvm sets a maximum value prevents the bitmap footprint from growing too large without the user's knowledge. +/// This limit may be relaxed if the [upstream change](https://github.com/llvm/llvm-project/pull/82448) is merged. +const MAX_CONDITIONS_NUM_IN_DECISION: usize = 6; + +#[derive(Default)] +struct MCDCDecisionCtx { + /// To construct condition evaluation tree. + decision_stack: VecDeque, + processing_decision: Option, +} + +struct MCDCState { + decision_ctx_stack: Vec, +} + +impl MCDCState { + fn new() -> Self { + Self { decision_ctx_stack: vec![MCDCDecisionCtx::default()] } + } + + /// Decision depth is given as a u16 to reduce the size of the `CoverageKind`, + /// as it is very unlikely that the depth ever reaches 2^16. + #[inline] + fn decision_depth(&self) -> u16 { + match u16::try_from(self.decision_ctx_stack.len()) + .expect( + "decision depth did not fit in u16, this is likely to be an instrumentation error", + ) + .checked_sub(1) + { + Some(d) => d, + None => bug!("Unexpected empty decision stack"), + } + } + + // At first we assign ConditionIds for each sub expression. + // If the sub expression is composite, re-assign its ConditionId to its LHS and generate a new ConditionId for its RHS. + // + // Example: "x = (A && B) || (C && D) || (D && F)" + // + // Visit Depth1: + // (A && B) || (C && D) || (D && F) + // ^-------LHS--------^ ^-RHS--^ + // ID=1 ID=2 + // + // Visit LHS-Depth2: + // (A && B) || (C && D) + // ^-LHS--^ ^-RHS--^ + // ID=1 ID=3 + // + // Visit LHS-Depth3: + // (A && B) + // LHS RHS + // ID=1 ID=4 + // + // Visit RHS-Depth3: + // (C && D) + // LHS RHS + // ID=3 ID=5 + // + // Visit RHS-Depth2: (D && F) + // LHS RHS + // ID=2 ID=6 + // + // Visit Depth1: + // (A && B) || (C && D) || (D && F) + // ID=1 ID=4 ID=3 ID=5 ID=2 ID=6 + // + // A node ID of '0' always means MC/DC isn't being tracked. + // + // If a "next" node ID is '0', it means it's the end of the test vector. + // + // As the compiler tracks expression in pre-order, we can ensure that condition info of parents are always properly assigned when their children are visited. + // - If the op is AND, the "false_next" of LHS and RHS should be the parent's "false_next". While "true_next" of the LHS is the RHS, the "true next" of RHS is the parent's "true_next". + // - If the op is OR, the "true_next" of LHS and RHS should be the parent's "true_next". While "false_next" of the LHS is the RHS, the "false next" of RHS is the parent's "false_next". + fn record_conditions(&mut self, op: LogicalOp, span: Span) { + let decision_depth = self.decision_depth(); + let Some(decision_ctx) = self.decision_ctx_stack.last_mut() else { + bug!("Unexpected empty decision_ctx_stack") + }; + let decision = match decision_ctx.processing_decision.as_mut() { + Some(decision) => { + decision.span = decision.span.to(span); + decision + } + None => decision_ctx.processing_decision.insert(MCDCDecisionSpan { + span, + conditions_num: 0, + end_markers: vec![], + decision_depth, + }), + }; + + let parent_condition = decision_ctx.decision_stack.pop_back().unwrap_or_default(); + let lhs_id = if parent_condition.condition_id == ConditionId::NONE { + decision.conditions_num += 1; + ConditionId::from(decision.conditions_num) + } else { + parent_condition.condition_id + }; + + decision.conditions_num += 1; + let rhs_condition_id = ConditionId::from(decision.conditions_num); + + let (lhs, rhs) = match op { + LogicalOp::And => { + let lhs = ConditionInfo { + condition_id: lhs_id, + true_next_id: rhs_condition_id, + false_next_id: parent_condition.false_next_id, + }; + let rhs = ConditionInfo { + condition_id: rhs_condition_id, + true_next_id: parent_condition.true_next_id, + false_next_id: parent_condition.false_next_id, + }; + (lhs, rhs) + } + LogicalOp::Or => { + let lhs = ConditionInfo { + condition_id: lhs_id, + true_next_id: parent_condition.true_next_id, + false_next_id: rhs_condition_id, + }; + let rhs = ConditionInfo { + condition_id: rhs_condition_id, + true_next_id: parent_condition.true_next_id, + false_next_id: parent_condition.false_next_id, + }; + (lhs, rhs) + } + }; + // We visit expressions tree in pre-order, so place the left-hand side on the top. + decision_ctx.decision_stack.push_back(rhs); + decision_ctx.decision_stack.push_back(lhs); + } + + fn take_condition( + &mut self, + true_marker: BlockMarkerId, + false_marker: BlockMarkerId, + ) -> (Option, Option) { + let Some(decision_ctx) = self.decision_ctx_stack.last_mut() else { + bug!("Unexpected empty decision_ctx_stack") + }; + let Some(condition_info) = decision_ctx.decision_stack.pop_back() else { + return (None, None); + }; + let Some(decision) = decision_ctx.processing_decision.as_mut() else { + bug!("Processing decision should have been created before any conditions are taken"); + }; + if condition_info.true_next_id == ConditionId::NONE { + decision.end_markers.push(true_marker); + } + if condition_info.false_next_id == ConditionId::NONE { + decision.end_markers.push(false_marker); + } + + if decision_ctx.decision_stack.is_empty() { + (Some(condition_info), decision_ctx.processing_decision.take()) + } else { + (Some(condition_info), None) + } + } +} + +pub struct MCDCInfoBuilder { + branch_spans: Vec, + decision_spans: Vec, + state: MCDCState, +} + +impl MCDCInfoBuilder { + pub fn new() -> Self { + Self { branch_spans: vec![], decision_spans: vec![], state: MCDCState::new() } + } + + pub fn visit_evaluated_condition( + &mut self, + tcx: TyCtxt<'_>, + source_info: SourceInfo, + true_block: BasicBlock, + false_block: BasicBlock, + mut inject_block_marker: impl FnMut(SourceInfo, BasicBlock) -> BlockMarkerId, + ) { + let true_marker = inject_block_marker(source_info, true_block); + let false_marker = inject_block_marker(source_info, false_block); + + let decision_depth = self.state.decision_depth(); + let (mut condition_info, decision_result) = + self.state.take_condition(true_marker, false_marker); + // take_condition() returns Some for decision_result when the decision stack + // is empty, i.e. when all the conditions of the decision were instrumented, + // and the decision is "complete". + if let Some(decision) = decision_result { + match decision.conditions_num { + 0 => { + unreachable!("Decision with no condition is not expected"); + } + 1..=MAX_CONDITIONS_NUM_IN_DECISION => { + self.decision_spans.push(decision); + } + _ => { + // Do not generate mcdc mappings and statements for decisions with too many conditions. + let rebase_idx = self.branch_spans.len() - decision.conditions_num + 1; + for branch in &mut self.branch_spans[rebase_idx..] { + branch.condition_info = None; + } + + // ConditionInfo of this branch shall also be reset. + condition_info = None; + + tcx.dcx().emit_warn(MCDCExceedsConditionNumLimit { + span: decision.span, + conditions_num: decision.conditions_num, + max_conditions_num: MAX_CONDITIONS_NUM_IN_DECISION, + }); + } + } + } + self.branch_spans.push(MCDCBranchSpan { + span: source_info.span, + condition_info, + true_marker, + false_marker, + decision_depth, + }); + } + + pub fn into_done(self) -> (Vec, Vec) { + (self.decision_spans, self.branch_spans) + } +} + +impl Builder<'_, '_> { + pub(crate) fn visit_coverage_branch_operation(&mut self, logical_op: LogicalOp, span: Span) { + if let Some(branch_info) = self.coverage_branch_info.as_mut() + && let Some(mcdc_info) = branch_info.mcdc_info.as_mut() + { + mcdc_info.state.record_conditions(logical_op, span); + } + } + + pub(crate) fn mcdc_increment_depth_if_enabled(&mut self) { + if let Some(branch_info) = self.coverage_branch_info.as_mut() + && let Some(mcdc_info) = branch_info.mcdc_info.as_mut() + { + mcdc_info.state.decision_ctx_stack.push(MCDCDecisionCtx::default()); + }; + } + + pub(crate) fn mcdc_decrement_depth_if_enabled(&mut self) { + if let Some(branch_info) = self.coverage_branch_info.as_mut() + && let Some(mcdc_info) = branch_info.mcdc_info.as_mut() + { + if mcdc_info.state.decision_ctx_stack.pop().is_none() { + bug!("Unexpected empty decision stack"); + } + }; + } +}