diff --git a/.github/PULL_REQUEST_TEMPLATE/fix-issue.md b/.github/PULL_REQUEST_TEMPLATE/fix-issue.md index 506996510c7e..f7cf22eb59c7 100644 --- a/.github/PULL_REQUEST_TEMPLATE/fix-issue.md +++ b/.github/PULL_REQUEST_TEMPLATE/fix-issue.md @@ -6,14 +6,14 @@ assignees: '' --- - ## Fix #XYZ - + diff --git a/.github/PULL_REQUEST_TEMPLATE/other-pr.md b/.github/PULL_REQUEST_TEMPLATE/other-pr.md index 4b69a80460af..fad49836df92 100644 --- a/.github/PULL_REQUEST_TEMPLATE/other-pr.md +++ b/.github/PULL_REQUEST_TEMPLATE/other-pr.md @@ -6,14 +6,14 @@ assignees: '' --- - ## Description - + diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 72e7149fa762..370b66854051 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,7 +14,12 @@ on: ## - both (tags or tags-ignore) and (branches or branches-ignore), ## - or neither of them. ## But it's important to not have only one or the other. + tags: + - '*' + branches-ignore: + - 'gh-readonly-queue/**' pull_request: + merge_group: schedule: - cron: '0 3 * * *' # Every day at 3 AM workflow_dispatch: @@ -97,6 +102,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -142,6 +148,7 @@ jobs: github.event_name == 'push' && github.ref != 'refs/heads/main' ) + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -218,6 +225,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -258,6 +266,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -306,6 +315,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -354,6 +364,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' || github.event_name == 'push' + || github.event_name == 'merge_group' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -585,45 +596,6 @@ jobs: external_repository: lampepfl/dotty-website publish_branch: gh-pages - nightly_unmanaged_community_build: - # Self-hosted runner is used only for getting current build version - runs-on: [self-hosted, Linux] - container: - image: lampepfl/dotty:2021-03-22 - options: --cpu-shares 4096 - volumes: - - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [publish_nightly] - if: "(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && github.repository == 'lampepfl/dotty'" - env: - NIGHTLYBUILD: yes - steps: - - name: Reset existing repo - run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - - - name: Checkout cleanup script - uses: actions/checkout@v3 - - - name: Cleanup - run: .github/workflows/cleanup.sh - - - name: Git Checkout - uses: actions/checkout@v3 - - - name: Add SBT proxy repositories - run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - - - name: Get version string for this build - run: | - ver=$(./project/scripts/sbt "print scala3-compiler-bootstrapped/version" | tail -n1) - echo "This build version: $ver" - echo "THISBUILD_VERSION=$ver" >> $GITHUB_ENV - # Steps above are copy-pasted from publish_nightly, needed only to resolve THISBUILD_VERSION - - name: Trigger unmanaged community build - run: .github/workflows/scripts/triggerUnmanagedCommunityBuild.sh "${{ secrets.BUILD_TOKEN }}" "$THISBUILD_VERSION" - publish_release: permissions: contents: write # for actions/create-release to create a release diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 3ac31b0994f7..bb1aec1290c0 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -6,6 +6,7 @@ on: push: branches: - 'language-reference-stable' + merge_group: permissions: contents: write pull-requests: write diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 7415759078ac..ba4bae0456d0 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -13,9 +13,9 @@ jobs: options: --cpu-shares 4096 env: - SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} + SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} SDKMAN_TOKEN: ${{ secrets.SDKMAN_TOKEN }} - + steps: - name: Reset existing repo run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true diff --git a/.github/workflows/scaladoc.yaml b/.github/workflows/scaladoc.yaml index 9ccbe34788ce..3108f2b94562 100644 --- a/.github/workflows/scaladoc.yaml +++ b/.github/workflows/scaladoc.yaml @@ -4,9 +4,11 @@ on: push: branches-ignore: - 'language-reference-stable' + - 'gh-readonly-queue/**' pull_request: branches-ignore: - 'language-reference-stable' + merge_group: permissions: contents: read @@ -15,7 +17,8 @@ jobs: env: AZURE_STORAGE_SAS_TOKEN: ${{ secrets.AZURE_STORAGE_SAS_TOKEN }} runs-on: ubuntu-latest - if: "( github.event_name == 'pull_request' + if: "github.event_name == 'merge_group' + || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') && !contains(github.event.pull_request.body, '[skip docs]') ) diff --git a/.github/workflows/scripts/publish-sdkman.sh b/.github/workflows/scripts/publish-sdkman.sh index 07d35a72a65e..70987bff175b 100755 --- a/.github/workflows/scripts/publish-sdkman.sh +++ b/.github/workflows/scripts/publish-sdkman.sh @@ -9,11 +9,11 @@ set -u -# latest stable dotty version +# latest stable dotty version DOTTY_VERSION=$(curl -s https://api.github.com/repos/lampepfl/dotty/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') DOTTY_URL="https://github.com/lampepfl/dotty/releases/download/$DOTTY_VERSION/scala3-$DOTTY_VERSION.zip" -# checking if dotty version is available +# checking if dotty version is available if ! curl --output /dev/null --silent --head --fail "$DOTTY_URL"; then echo "URL doesn't exist: $DOTTY_URL" exit 1 diff --git a/.github/workflows/scripts/triggerUnmanagedCommunityBuild.sh b/.github/workflows/scripts/triggerUnmanagedCommunityBuild.sh deleted file mode 100755 index 694428e29bb5..000000000000 --- a/.github/workflows/scripts/triggerUnmanagedCommunityBuild.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash - -# This is script for triggering unamanged community build upon releasing nightly version. -# Script sends request to CB Jenkins instance to start the build for given released Scala version -# Prints url of created job to stdout -# -# Requirement: -# - the latest (nightly) version of scala should be published - -set -u - -if [ $# -ne 2 ]; then - echo "Wrong number of script arguments, expected , got $#: $@" - exit 1 -fi - -CB_ENDPOINT=https://scala3.westeurope.cloudapp.azure.com -CB_BUILD_TOKEN="$1" -SCALA_VERSION="$2" - -startRunResponse=$(curl "${CB_ENDPOINT}/job/runBuild/buildWithParameters?token=${CB_BUILD_TOKEN}&publishedScalaVersion=${SCALA_VERSION}" -v 2>&1) -echo "${startRunResponse}" -queueItem=$(echo "${startRunResponse}" | grep -oP "< Location: \K[\w\d:/.//]+") -# Wait until Jenkins does acknowledge the build (max 1 min ) -for i in {1..12}; do - buildUrl=$(curl -s "${queueItem}/api/json?tree=executable[url]" | jq .executable.url) - if [[ "null" == "${buildUrl}" ]]; then - echo "Waiting for build start..." - sleep 5 - else - echo "Created build url: ${buildUrl}" - exit 0 - fi -done - -# Set error if failed to resolve build url -exit 1 diff --git a/.gitignore b/.gitignore index 4ac67ddfbb06..5240662741bb 100644 --- a/.gitignore +++ b/.gitignore @@ -93,3 +93,5 @@ compiler/test-coursier/run/*.jar # docs related contributors.js content-contributors.css +docs/_spec/_site/ +docs/_spec/.jekyll-metadata diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3267d1f02700..90496bcd0c0a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,60 +1,5 @@ -# Dotty Developer Guidelines +# Contributing to Dotty -These guidelines are meant to be a living document that should be changed and adapted as needed. We encourage changes that make it easier to achieve our goals in an efficient way. - -## General Workflow - -This is the process for committing code to the Scala project. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc. - -1. Make sure you have signed the [Scala CLA](https://www.lightbend.com/contribute/cla/scala), if not, sign it. -2. Before starting to work on a feature or a fix, it's good practice to ensure that: - 1. There is a ticket for your work in the project's [issue tracker](https://github.com/lampepfl/dotty/issues); - 2. The ticket has been discussed and prioritized by the team. -3. You should always perform your work in its own Git branch. The branch should be given a descriptive name that explains its intent. Some teams also like adding the ticket number and/or the [GitHub](http://github.com) user ID to the branch name, these details is up to each of the individual teams. (See below for more details on branch naming.) -4. When the feature or fix is completed you should open a [Pull Request](https://help.github.com/articles/using-pull-requests) on GitHub. -5. The Pull Request should be reviewed by other maintainers (as many as feasible/practical). Note that a reviewer can also be an outside contributor—members of Typesafe or VirtusLab and independent contributors are encouraged to participate in the review process. It is not a closed process. Please try to avoid conflict of interest—the spirit of the review process is to evenly distribute the understanding of our code base across its maintainers as well as to load balance quality assurance. Assigning a review to a "sure win" reviewer is not a good long-term solution. -6. After the review, you should resolve issues brought up by the reviewers as needed (pushing a new commit to address reviewers' comments), iterating until the reviewers give their thumbs up, the "LGTM" (acronym for "Looks Good To Me"). -7. Once the code has passed review the Pull Request can be merged into the distribution. - -## Pull Request Requirements - -In order for a Pull Request to be considered, it has to meet these requirements: - -1. Live up to the current code standard: - - Not violate [DRY](https://www.oreilly.com/library/view/97-things-every/9780596809515/ch30.html). - - [Boy Scout Rule](https://www.oreilly.com/library/view/97-things-every/9780596809515/ch08.html) should be applied. -2. Tests are of paramount importance. -3. The code must be well documented in the project's standard documentation format (see the ‘Documentation’ section below). - -If *all* of these requirements are not met then the code should **not** be merged into the distribution, and need not even be reviewed. - -## Documentation - -All contributed code should come accompanied by documentation. Pull requests containing undocumented code will not be accepted. Both user-facing Scaladoc comments, as well as committer-facing internal documentation (i.e. essential design decisions that other maintainers should know about should be placed inline with line comments `//`) should be accompanying all contributed code where possible. - - -## Work In Progress - -It is ok to work on a public feature branch in the GitHub repository. Something that can sometimes be useful for early feedback etc. If so, then it is preferable to name the branch accordingly. This can be done by either prefixing the name with ``wip-`` as in ‘Work In Progress’, or use hierarchical names like ``wip/..``, ``feature/..`` or ``topic/..``. Either way is fine as long as it is clear that it is work in progress and not ready for merge. This work can temporarily have a lower standard. However, to be merged into master it will have to go through the regular process outlined above, with Pull Request, review etc.. - -Also, to facilitate both well-formed commits and working together, the ``wip`` and ``feature``/``topic`` identifiers also have special meaning. Any branch labeled with ``wip`` is considered “git-unstable” and may be rebased and have its history rewritten. Any branch with ``feature``/``topic`` in the name is considered “stable” enough for others to depend on when a group is working on a feature. - -## Creating Commits And Writing Commit Messages - -Follow these guidelines when creating public commits and writing commit messages. - -1. If your work spans multiple local commits (for example; if you do safe point commits while working in a feature branch or work in a branch for long time doing merges/rebases etc.) then please do not commit it all but rewrite the history by squashing the commits into one large commit which is accompanied by a detailed commit message for (as discussed in the following sections). For more info, see the article: [Git Workflow](http://sandofsky.com/blog/git-workflow.html). Additionally, every commit should be able to be used in isolation—that is, each commit must build and pass all tests. -2. The first line should be a descriptive sentence about what the commit is doing. It should be possible to fully understand what the commit does by just reading this single line. It is **not ok** to only list the ticket number, type "minor fix" or similar. If the commit has a corresponding ticket, include a reference to the ticket number, prefixed with "Closes #", at the beginning of the first line followed by the title of the ticket, assuming that it aptly and concisely summarizes the commit in a single line. If the commit is a small fix, then you are done. If not, go to 3. -3. Following the single line description (ideally no more than 70 characters long) should be a blank line followed by an enumerated list with the details of the commit. -4. Add keywords for your commit (depending on the degree of automation we reach, the list may change over time): - * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone is encouraged to give feedback, however. (Remember that @-mentions will result in notifications also when pushing to a WIP branch, so please only include this in your commit message when you're ready for your pull request to be reviewed. Alternatively, you may request a review in the pull request's description.) - * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the ticket as fixed in the issue tracker (Assembla understands this). - * ``backport to _branch name_`` - if the fix needs to be cherry-picked to another branch (like 2.9.x, 2.10.x, etc) - -Example: - - Closes #2 Fixes the build - - - Details 1 - - Details 2 - - Details 3 +Firstly, thanks for being willing to contribute to Dotty! Head on over the +[Scala 3 Contributing +Guide](https://dotty.epfl.ch/docs/contributing/index.html), which should have all the info you're looking for. diff --git a/MAINTENANCE.md b/MAINTENANCE.md index d1309a6b404d..54e74f7cb7ca 100644 --- a/MAINTENANCE.md +++ b/MAINTENANCE.md @@ -61,28 +61,36 @@ At the end of their supervision period, the supervisor reports to the team durin The following is the list of all the principal areas of the compiler and the core team members who are responsible for their maintenance: +### Compiler - Parser: @odersky - Typer: @odersky, @smarter, (@dwijnand) - Erasure: @smarter, @odersky - Enums: @bishabosha +- Derivation & Mirrors: @bishabosha, (@dwijnand) - Export: @bishabosha, @odersky - Pattern Matching: @dwijnand, (@liufengyun), @sjrd - Inline: @nicolasstucki, @odersky -- Metaprogramming (Quotes, Reflect, Staging): @nicolasstucki, @aherlihy -- Match types: @OlivierBlanvillain, @dwijnand -- GADT: @abgruszecki, @dwijnand -- Scaladoc: @KacperFKorban, @BarkingBad, @pikinier20 -- Initialization checker: @olhotak, @liufengyun, @anatoliykmetyuk +- Metaprogramming (Quotes, Reflect, Staging): @nicolasstucki, @jchyb +- Match types: @sjrd, @dwijnand, @Decel +- GADT: @dwijnand, @Linyxus +- Initialization checker: @olhotak, @liufengyun - Safe nulls: @noti0na1, @olhotak +- Lazy vals: @szymon-rd, @sjrd - tailrec: @sjrd, @mbovel - JS backend: @sjrd -- forward compat (-scala-release): @prolativ, @Kordyjan, (@nicolasstucki) -- Benchmarks: @anatoliykmetyuk, @mbovel -- REPL: @dwijnand, @anatoliykmetyuk, @prolativ +- JVM backend: @sjrd +- Java-compat: @smarter + +### Tooling +- REPL: @dwijnand, @prolativ +- Scaladoc: @Florian3k +- SemanticDB: @tanishiking +- Coverage: @TheElectronWill +- Linting (especially unused warnings) / Reporting UX: @szymon-rd + +### Infrastructure - CI: @anatoliykmetyuk - Community Build: @anatoliykmetyuk +- Open Community Build: @WojciechMazur - Vulpix: @dwijnand, @prolativ -- JVM backend: @Kordyjan, (@sjrd) -- Derivation & Mirrors: @bishabosha, (@dwijnand) -- Linting (especially unused warnings) / Reporting UX: VirtusLab TBD? -- Java-compat: @Kordyjan +- Benchmarks: @mbovel diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala index a2aaf3e88570..d413458d0049 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala @@ -18,7 +18,7 @@ class InitializedAccess { @Setup def prepare: Unit = { - holder = new LazyHolder + holder = new LazyHolder holder.value } diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala index 5a6b4ae1686d..8c75f6bb11a2 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala @@ -18,7 +18,7 @@ class InitializedAccessAny { @Setup def prepare: Unit = { - holder = new LazyAnyHolder + holder = new LazyAnyHolder holder.value } diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala index a95cb1de2980..a9fecae6281e 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala @@ -18,7 +18,7 @@ class InitializedAccessGeneric { @Setup def prepare: Unit = { - holder = new LazyGenericHolder[String]("foo") + holder = new LazyGenericHolder[String]("foo") holder.value } diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala index 25cc0f9b288d..e6c6cd5eb2e3 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala @@ -18,7 +18,7 @@ class InitializedAccessString { @Setup def prepare: Unit = { - holder = new LazyStringHolder + holder = new LazyStringHolder holder.value } diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala index 68379f9e142c..26ebb7b9d356 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/LazyVals.scala @@ -21,7 +21,7 @@ object LazyVals { } } } - + class LazyHolder { lazy val value: List[Int] = { diff --git a/changelogs/3.3.1-RC1.md b/changelogs/3.3.1-RC1.md new file mode 100644 index 000000000000..e7d9f8f87ea9 --- /dev/null +++ b/changelogs/3.3.1-RC1.md @@ -0,0 +1,299 @@ +# Highlights of the release + +- Support records in JavaParsers [#16762](https://github.com/lampepfl/dotty/pull/16762) +- Port JVM backend refactor from Scala 2 [#15322](https://github.com/lampepfl/dotty/pull/15322) + +# Other changes and fixes + +## Backend + +- Disallow mixins where super calls bind to vals [#16908](https://github.com/lampepfl/dotty/pull/16908) +- Fix #15107: Avoid re-emitting a LineNumber after only LabelNodes. [#16813](https://github.com/lampepfl/dotty/pull/16813) + +## Coverage + +- Fix #17042: Preserve the shape of secondary ctors in instrumentCoverage. [#17111](https://github.com/lampepfl/dotty/pull/17111) + +## Default parameters + +- Dupe fix when finding default arg getters [#17058](https://github.com/lampepfl/dotty/pull/17058) + +## Documentation + +- Fix: ensure syntax blocks for ebnf are marked as such [#16837](https://github.com/lampepfl/dotty/pull/16837) + +## Erasure + +- Handle `@companionClass` and `@companionMethod` meta-annotations [#17091](https://github.com/lampepfl/dotty/pull/17091) + +## Extension Methods + +- Support extension methods imported from different objects [#17050](https://github.com/lampepfl/dotty/pull/17050) + +## GADTs + +- Fix tuple member selection so it works with GADT healing [#16766](https://github.com/lampepfl/dotty/pull/16766) +- Fix upper bound constraints, that are higher-kinded [#16744](https://github.com/lampepfl/dotty/pull/16744) +- Split out immutable GadtConstraint [#16602](https://github.com/lampepfl/dotty/pull/16602) + +## Implicits + +- Improve subtyping check for not yet eta-expanded higher kinded types [#17139](https://github.com/lampepfl/dotty/pull/17139) +- Harden tpd.Apply/TypeApply in case of errors [#16887](https://github.com/lampepfl/dotty/pull/16887) +- Try to be more subtle when inferring type parameters of class parents [#16896](https://github.com/lampepfl/dotty/pull/16896) +- Include `P` in the implicit scope of `P.this.type` [#17088](https://github.com/lampepfl/dotty/pull/17088) +- Do not compute `protoFormal` if `param.tpt` is empty [#18288](http://github.com/lampepfl/dotty/pull/18288) + +## Incremental Compilation + +- Fix under-compilation when the method type in a SAM changes [#16996](https://github.com/lampepfl/dotty/pull/16996) + +## Infrastructure + +- Set reference version to 3.3.0-RC6 [#17504](https://github.com/lampepfl/dotty/pull/17504) +- Fix #17119: Download Coursier from GitHub directly [#17141](https://github.com/lampepfl/dotty/pull/17141) + +## Inline + +- Remove NamedArg from inlined arguments [#17228](https://github.com/lampepfl/dotty/pull/17228) +- Don't generate a Select for a TermRef with NoPrefix [#16754](https://github.com/lampepfl/dotty/pull/16754) +- Prepare bodies of inline forwarders eagerly [#16757](https://github.com/lampepfl/dotty/pull/16757) +- Do not remove inline method implementations until PruneErasedDefs [#17408](https://github.com/lampepfl/dotty/pull/17408) + +## Java Interop + +- ClassfileParser: allow missing param names (for JDK 21) [#17536](https://github.com/lampepfl/dotty/pull/17536) + +## Linting + +- Improve -Wunused: locals, privates with unset vars warning #16639 [#17160](https://github.com/lampepfl/dotty/pull/17160) +- Fix wunused false positive when deriving alias type [#17157](https://github.com/lampepfl/dotty/pull/17157) +- Port `-Wnonunit-statement` setting for dotty [#16936](https://github.com/lampepfl/dotty/pull/16936) + +## Match Types + +- Fix #13757: Explicitly disallow higher-kinded scrutinees of match types. [#17322](https://github.com/lampepfl/dotty/pull/17322) +- Fix match type reduction with wildcard type arguments [#17065](https://github.com/lampepfl/dotty/pull/17065) +- Fix check whether classtag can be generated for match types [#16708](https://github.com/lampepfl/dotty/pull/16708) + +## Parser + +- Allow lines starting with `.` to fall outside previous indentation widths [#17056](https://github.com/lampepfl/dotty/pull/17056) + +## Pattern Matching + +- Fix #11541: Specialize ClassTag[T] in exhaustivity check [#17385](https://github.com/lampepfl/dotty/pull/17385) +- Check outer class prefixes in type projections when pattern matching [#17136](https://github.com/lampepfl/dotty/pull/17136) +- Make unchecked cases non-`@unchecked` and non-unreachable [#16958](https://github.com/lampepfl/dotty/pull/16958) +- Fix #16899: Better handle X instanceOf P where X is T1 | T2 [#17382](https://github.com/lampepfl/dotty/pull/17382) +- Fix regression in exhaustivity of HK types [#18303](http://github.com/lampepfl/dotty/pull/18303) + +## Pickling + +- ClassfileParser: Avoid cycle when accessing companion in inner class lookup [#16882](https://github.com/lampepfl/dotty/pull/16882) + +## Polyfunctions + +- Fix type aliases in beta-reduction of polyfunctions [#17054](https://github.com/lampepfl/dotty/pull/17054) + +## Quotes + +- Register `paramProxy` and `thisProxy` in `Quote` type [#17541](https://github.com/lampepfl/dotty/pull/17541) +- Only check newVal/newMethod privateWithin on -Xcheck-macros [#17437](https://github.com/lampepfl/dotty/pull/17437) +- Unencode quote and splice trees [#17342](https://github.com/lampepfl/dotty/pull/17342) +- Correctly type Expr.ofTupleFromSeq for arity > 22 [#17261](https://github.com/lampepfl/dotty/pull/17261) +- Use TermRef to distinguish distinct Type[T] instances [#17205](https://github.com/lampepfl/dotty/pull/17205) +- Check level consistency of SingletonTypeTree as a type [#17209](https://github.com/lampepfl/dotty/pull/17209) +- Fix splice type variable pattern detection [#17048](https://github.com/lampepfl/dotty/pull/17048) +- Avoid creation of `@SplicedType` quote local refrences [#17051](https://github.com/lampepfl/dotty/pull/17051) +- Dealias type references when healing types in quotes [#17049](https://github.com/lampepfl/dotty/pull/17049) +- Replace quoted type variables in signature of HOAS pattern result [#16951](https://github.com/lampepfl/dotty/pull/16951) +- Beta-reduce directly applied PolymorphicFunction [#16623](https://github.com/lampepfl/dotty/pull/16623) +- Use `Object.toString` for `quoted.{Expr, Type}` [#16663](https://github.com/lampepfl/dotty/pull/16663) +- Fix Splicer.isEscapedVariable [#16838](https://github.com/lampepfl/dotty/pull/16838) +- Fix references to class members defined in quotes [#17107](https://github.com/lampepfl/dotty/pull/17107) +- Handle pickled forward references in pickled expressions [#16855](https://github.com/lampepfl/dotty/pull/16855) +- Fix #16615 - crashes of path dependent types in spliced Type.of [#16773](https://github.com/lampepfl/dotty/pull/16773) +- Disallow local term references in staged types [#16362](https://github.com/lampepfl/dotty/pull/16362) +- Refactor level checking / type healing logic [#17082](https://github.com/lampepfl/dotty/pull/17082) +- Dealias quoted types when staging [#17059](https://github.com/lampepfl/dotty/pull/17059) +- Fix quotes with references to path dependent types [#17081](https://github.com/lampepfl/dotty/pull/17081) +- Make arguments order in quote hole deterministic [#17405](https://github.com/lampepfl/dotty/pull/17405) +- Only transform the body of the quote with QuoteTransformer [#17451](https://github.com/lampepfl/dotty/pull/17451) +- Place staged type captures in Quote AST [#17424](https://github.com/lampepfl/dotty/pull/17424) +- Add SplicePattern AST to parse and type quote pattern splices [#17396](https://github.com/lampepfl/dotty/pull/17396) +- Dealias types in `New`` before matching quotes [#17615](https://github.com/lampepfl/dotty/pull/17615) + +## Reflection + +- -Xcheck-macros: add hint when a symbol in created twice [#16733](https://github.com/lampepfl/dotty/pull/16733) +- Assert that symbols created using reflect API have correct privateWithin symbols [#17352](https://github.com/lampepfl/dotty/pull/17352) +- Fix reflect.LambdaType type test [#16972](https://github.com/lampepfl/dotty/pull/16972) +- Improve `New`/`Select` -Ycheck message [#16746](https://github.com/lampepfl/dotty/pull/16746) +- Improve error message for CyclicReference in macros [#16749](https://github.com/lampepfl/dotty/pull/16749) + +## REPL + +- Always load REPL classes in macros including the output directory [#16866](https://github.com/lampepfl/dotty/pull/16866) + +## Reporting + +- Improve missing argument list error [#17126](https://github.com/lampepfl/dotty/pull/17126) +- Improve implicit parameter error message with aliases [#17125](https://github.com/lampepfl/dotty/pull/17125) +- Improve "constructor proxy shadows outer" handling [#17154](https://github.com/lampepfl/dotty/pull/17154) +- Clarify ambiguous reference error message [#16137](https://github.com/lampepfl/dotty/pull/16137) +- Hint about forbidden combination of implicit values and conversions [#16735](https://github.com/lampepfl/dotty/pull/16735) +- Attach explanation message to diagnostic message [#16787](https://github.com/lampepfl/dotty/pull/16787) +- Propagate implicit search errors from implicit macros [#16840](https://github.com/lampepfl/dotty/pull/16840) +- Detail UnapplyInvalidReturnType error message [#17167](https://github.com/lampepfl/dotty/pull/17167) +- Add way to debug -Xcheck-macros tree checking [#16973](https://github.com/lampepfl/dotty/pull/16973) +- Enrich and finesse compiler crash reporting [#17031](https://github.com/lampepfl/dotty/pull/17031) +- Allow @implicitNotFound messages as explanations [#16893](https://github.com/lampepfl/dotty/pull/16893) +- Include top-level symbols from same file in outer ambiguity error [#17033](https://github.com/lampepfl/dotty/pull/17033) +- Do not issue deprecation warnings when declaring deprecated case classes [#17165](https://github.com/lampepfl/dotty/pull/17165) + +## Scala-JS + +- Fix #17344: Make implicit references to this above dynamic imports explicit. [#17357](https://github.com/lampepfl/dotty/pull/17357) +- Fix #12621: Better error message for JS trait ctor param. [#16811](https://github.com/lampepfl/dotty/pull/16811) +- Fix #16801: Handle Closure's of s.r.FunctionXXL. [#16809](https://github.com/lampepfl/dotty/pull/16809) +- Fix #17549: Unify how Memoize and Constructors decide what fields need storing. [#17560](https://github.com/lampepfl/dotty/pull/17560) + +## Scaladoc + +- Feat: Add a blog configuration with yaml [#17214](https://github.com/lampepfl/dotty/pull/17214) +- Don't render the "$" for module [#17302](https://github.com/lampepfl/dotty/pull/17302) +- Fix: Add scrollbar to the sidebar [#17203](https://github.com/lampepfl/dotty/pull/17203) +- Scaladoc: fix crash when processing extends call [#17260](https://github.com/lampepfl/dotty/pull/17260) +- Fix: Modify the CSS so that the logo of the generated documentation is adaptive [#17172](https://github.com/lampepfl/dotty/pull/17172) +- Fix: Remove the duplicate parameter when generating the scaladoc. [#17097](https://github.com/lampepfl/dotty/pull/17097) +- Fix: padding top in mobile version [#17019](https://github.com/lampepfl/dotty/pull/17019) +- Fix: tap target of the menu in Mobile version [#17018](https://github.com/lampepfl/dotty/pull/17018) +- Scaladoc: Fix expand icon not changing on anchor link [#17053](https://github.com/lampepfl/dotty/pull/17053) +- Scaladoc: fix inkuire generation for PolyTypes [#17129](https://github.com/lampepfl/dotty/pull/17129) +- Re port scroll bar [#17463](https://github.com/lampepfl/dotty/pull/17463) +- Handle empty files and truncated YAML front matter [#17527](https://github.com/lampepfl/dotty/pull/17527) + +## SemanticDB + +- Make sure symbol exists before calling owner [#16860](https://github.com/lampepfl/dotty/pull/16860) +- Support LambdaType (convert from HKTypeLambda) [#16056](https://github.com/lampepfl/dotty/pull/16056) + +## Specification + +- Apply `class-shadowing.md` to the Spec [#16839](https://github.com/lampepfl/dotty/pull/16839) +- Adding base for future Spec into the compiler repo [#16825](https://github.com/lampepfl/dotty/pull/16825) + +## Standard Library + +- Optimization: avoid NotGiven allocations [#17090](https://github.com/lampepfl/dotty/pull/17090) + +## Tooling + +- Disable `ExtractSemanticDB` phase when writing to output directory defined as JAR. [#16790](https://github.com/lampepfl/dotty/pull/16790) +- Print owner of bind symbol with -Yprint-debug-owners [#16854](https://github.com/lampepfl/dotty/pull/16854) +- Small fixes to allow using Metals with scaladoc with sbt [#16816](https://github.com/lampepfl/dotty/pull/16816) + +## Transform + +- Move CrossVersionChecks before FirstTransform [#17301](https://github.com/lampepfl/dotty/pull/17301) +- Fix needsOuterIfReferenced [#17159](https://github.com/lampepfl/dotty/pull/17159) +- Drop incorrect super accessor in trait subclass [#17062](https://github.com/lampepfl/dotty/pull/17062) +- Generate toString only for synthetic companions of case classes [#16890](https://github.com/lampepfl/dotty/pull/16890) +- Check trait constructor for accessibility even if not called at Typer [#17094](https://github.com/lampepfl/dotty/pull/17094) +- Fix #17435: A simpler fix [#17436](https://github.com/lampepfl/dotty/pull/17436) + +## Typer + +- Preserve type bounds for inlined definitions in posttyper [#17190](https://github.com/lampepfl/dotty/pull/17190) +- Change logic to find members of recursive types [#17386](https://github.com/lampepfl/dotty/pull/17386) +- Recognize named arguments in isFunctionWithUnknownParamType [#17161](https://github.com/lampepfl/dotty/pull/17161) +- Better comparisons for type projections [#17092](https://github.com/lampepfl/dotty/pull/17092) +- Allow selectDynamic and applyDynamic to be extension methods [#17106](https://github.com/lampepfl/dotty/pull/17106) +- Fix use of accessibleFrom when finding default arg getters [#16977](https://github.com/lampepfl/dotty/pull/16977) +- Map class literal constant types [#16988](https://github.com/lampepfl/dotty/pull/16988) +- Always use adapted type in withDenotation [#16901](https://github.com/lampepfl/dotty/pull/16901) +- Restrict captureWildcards to only be used if needed [#16799](https://github.com/lampepfl/dotty/pull/16799) +- Don't capture wildcards if in closure or by-name [#16732](https://github.com/lampepfl/dotty/pull/16732) +- Infer: Don't minimise to Nothing if there's an upper bound [#16786](https://github.com/lampepfl/dotty/pull/16786) +- Perform Matchable check only if type test is needed [#16824](https://github.com/lampepfl/dotty/pull/16824) +- Don't eta expand unary varargs methods [#16892](https://github.com/lampepfl/dotty/pull/16892) +- Fix beta-reduction with `Nothing` and `null` args [#16938](https://github.com/lampepfl/dotty/pull/16938) +- Generate kind-correct wildcards when selecting from a wildcard [#17025](https://github.com/lampepfl/dotty/pull/17025) +- Fix #16405 ctd - wildcards prematurely resolving to Nothing [#16764](https://github.com/lampepfl/dotty/pull/16764) +- Test: add regression test for #7790 [#17473](https://github.com/lampepfl/dotty/pull/17473) +- Properly handle `AnyVal`s as refinement members of `Selectable`s [#16286](https://github.com/lampepfl/dotty/pull/16286) +- Fix `accessibleType` for package object prefixes [#18057](https://github.com/lampepfl/dotty/pull/18057) +- Add clause for protected visibility from package objects [#18134](https://github.com/lampepfl/dotty/pull/18134) +- Revert "Include top-level symbols from same file in outer ambiguity error" [#17438](https://github.com/lampepfl/dotty/pull/17438) +- Heal stage inconsistent prefixes of type projections [#18239](https://github.com/lampepfl/dotty/pull/18239) +- Fix regression #17245: Overloaded methods with ClassTags [#18286](http://github.com/lampepfl/dotty/pull/18286) +- Disallow taking singleton types of packages again [#18232](http://github.com/lampepfl/dotty/pull/18232) +- A slightly more conservative version of #14218 [#18352](http://github.com/lampepfl/dotty/pull/18352) +- Record failures to adapt application arguments [#18269](http://github.com/lampepfl/dotty/pull/18269) +- Refine `infoDependsOnPrefix` [#18204](httpsF://github.com/lampepfl/dotty/pull/18204) +- Tweak selection from self types [#18467](https://github.com/lampepfl/dotty/pull/18467) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.0..3.3.1-RC1` these are: + +``` + 148 Nicolas Stucki + 65 Martin Odersky + 51 Szymon Rodziewicz + 49 Dale Wijnand + 49 Quentin Bernet + 38 Chris Kipp + 19 David Hua + 18 Lucas + 18 ysthakur + 15 Fengyun Liu + 15 Paweł Marks + 14 Guillaume Martres + 14 Jamie Thompson + 11 Sébastien Doeraene + 9 Timothée Andres + 8 Kacper Korban + 7 Matt Bovel + 7 Som Snytt + 6 Julien Richard-Foy + 6 Lucas Leblanc + 5 Michał Pałka + 4 Anatolii Kmetiuk + 4 Guillaume Raffin + 4 Paul Coral + 4 Wojciech Mazur + 4 Yichen Xu + 3 Decel + 3 Jan Chyb + 2 Adrien Piquerez + 2 Arman Bilge + 2 Carl + 2 Florian3k + 2 Kenji Yoshida + 2 Michael Pilquist + 2 Natsu Kagami + 2 Seth Tisue + 2 Tomasz Godzik + 2 Vasil Vasilev + 2 Yadu Krishnan + 1 Bersier + 1 Flavio Brasil + 1 Jan-Pieter van den Heuvel + 1 Lukas Rytz + 1 Miles Yucht + 1 Mohammad Yousuf Minhaj Zia + 1 Ondra Pelech + 1 Philippus + 1 Rikito Taniguchi + 1 Simon R + 1 brandonspark + 1 github-actions[bot] + 1 liang3zy22 + 1 s.bazarsadaev + 1 Łukasz Wroński + +``` diff --git a/changelogs/3.3.1-RC2.md b/changelogs/3.3.1-RC2.md new file mode 100644 index 000000000000..f21bfa074b66 --- /dev/null +++ b/changelogs/3.3.1-RC2.md @@ -0,0 +1,16 @@ +# Backported fixes + +- Dealias types in `New`` before matching quotes [#17615](https://github.com/lampepfl/dotty/pull/17615) +- Fix `accessibleType` for package object prefixes [#18057](https://github.com/lampepfl/dotty/pull/18057) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC1..3.3.1-RC2` these are: + +``` + 2 Martin Odersky + 2 Paweł Marks + 1 Nicolas Stucki +``` diff --git a/changelogs/3.3.1-RC3.md b/changelogs/3.3.1-RC3.md new file mode 100644 index 000000000000..eb19f40b10dc --- /dev/null +++ b/changelogs/3.3.1-RC3.md @@ -0,0 +1,15 @@ +# Backported fixes + +- Add clause for protected visibility from package objects [#18134](https://github.com/lampepfl/dotty/pull/18134) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC2..3.3.1-RC3` these are: + +``` + 2 Paweł Marks + 1 Nicolas Stucki + +``` diff --git a/changelogs/3.3.1-RC4.md b/changelogs/3.3.1-RC4.md new file mode 100644 index 000000000000..7d95e0258fad --- /dev/null +++ b/changelogs/3.3.1-RC4.md @@ -0,0 +1,15 @@ +# Backported fixes + +- Revert "Include top-level symbols from same file in outer ambiguity error" [#17438](https://github.com/lampepfl/dotty/pull/17438) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC3..3.3.1-RC4` these are: + +``` + 2 Paweł Marks + 1 Nicolas Stucki + +``` diff --git a/changelogs/3.3.1-RC5.md b/changelogs/3.3.1-RC5.md new file mode 100644 index 000000000000..e0bfc2a7fea8 --- /dev/null +++ b/changelogs/3.3.1-RC5.md @@ -0,0 +1,22 @@ +# Backported fixes + +- Heal stage inconsistent prefixes of type projections [#18239](https://github.com/lampepfl/dotty/pull/18239) +- Fix regression #17245: Overloaded methods with ClassTags [#18286](http://github.com/lampepfl/dotty/pull/18286) +- Disallow taking singleton types of packages again [#18232](http://github.com/lampepfl/dotty/pull/18232) +- A slightly more conservative version of #14218 [#18352](http://github.com/lampepfl/dotty/pull/18352) +- Record failures to adapt application arguments [#18269](http://github.com/lampepfl/dotty/pull/18269) +- Fix regression in exhaustivity of HK types [#18303](http://github.com/lampepfl/dotty/pull/18303) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC4..3.3.1-RC5` these are: + +``` + 5 Dale Wijnand + 2 Martin Odersky + 2 Paweł Marks + 1 Jan Chyb + 1 Nicolas Stucki +``` diff --git a/changelogs/3.3.1-RC6.md b/changelogs/3.3.1-RC6.md new file mode 100644 index 000000000000..96181855f1a0 --- /dev/null +++ b/changelogs/3.3.1-RC6.md @@ -0,0 +1,17 @@ +# Backported fixes + +- Refine `infoDependsOnPrefix` [#18204](https://github.com/lampepfl/dotty/pull/18204) +- Do not compute `protoFormal` if `param.tpt` is empty [#18288](http://github.com/lampepfl/dotty/pull/18288) +- Revert "Normalize match type usage during implicit lookup" [#18440](http://github.com/lampepfl/dotty/pull/18440) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC5..3.3.1-RC6` these are: + +``` + 3 Paweł Marks + 2 Martin Odersky + 1 Nicolas Stucki +``` diff --git a/changelogs/3.3.1-RC7.md b/changelogs/3.3.1-RC7.md new file mode 100644 index 000000000000..f8f093a18d11 --- /dev/null +++ b/changelogs/3.3.1-RC7.md @@ -0,0 +1,16 @@ +# Backported fixes + +- Tweak selection from self types [#18467](https://github.com/lampepfl/dotty/pull/18467) +- Revert "Add reflect `defn.FunctionClass` overloads" [#18473](http://github.com/lampepfl/dotty/pull/18473) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.1-RC6..3.3.1-RC7` these are: + +``` + 3 Paweł Marks + 1 Martin Odersky + +``` diff --git a/changelogs/3.3.1.md b/changelogs/3.3.1.md new file mode 100644 index 000000000000..5bbd6eb2861c --- /dev/null +++ b/changelogs/3.3.1.md @@ -0,0 +1,287 @@ +# Highlights of the release + +- Support records in JavaParsers [#16762](https://github.com/lampepfl/dotty/pull/16762) +- Port JVM backend refactor from Scala 2 [#15322](https://github.com/lampepfl/dotty/pull/15322) + +# Other changes and fixes + +## Backend + +- Disallow mixins where super calls bind to vals [#16908](https://github.com/lampepfl/dotty/pull/16908) +- Fix #15107: Avoid re-emitting a LineNumber after only LabelNodes. [#16813](https://github.com/lampepfl/dotty/pull/16813) + +## Coverage + +- Fix #17042: Preserve the shape of secondary ctors in instrumentCoverage. [#17111](https://github.com/lampepfl/dotty/pull/17111) + +## Default parameters + +- Dupe fix when finding default arg getters [#17058](https://github.com/lampepfl/dotty/pull/17058) + +## Documentation + +- Fix: ensure syntax blocks for ebnf are marked as such [#16837](https://github.com/lampepfl/dotty/pull/16837) + +## Erasure + +- Handle `@companionClass` and `@companionMethod` meta-annotations [#17091](https://github.com/lampepfl/dotty/pull/17091) + +## Extension Methods + +- Support extension methods imported from different objects [#17050](https://github.com/lampepfl/dotty/pull/17050) + +## GADTs + +- Fix tuple member selection so it works with GADT healing [#16766](https://github.com/lampepfl/dotty/pull/16766) +- Fix upper bound constraints, that are higher-kinded [#16744](https://github.com/lampepfl/dotty/pull/16744) +- Split out immutable GadtConstraint [#16602](https://github.com/lampepfl/dotty/pull/16602) + +## Implicits + +- Improve subtyping check for not yet eta-expanded higher kinded types [#17139](https://github.com/lampepfl/dotty/pull/17139) +- Harden tpd.Apply/TypeApply in case of errors [#16887](https://github.com/lampepfl/dotty/pull/16887) +- Try to be more subtle when inferring type parameters of class parents [#16896](https://github.com/lampepfl/dotty/pull/16896) +- Include `P` in the implicit scope of `P.this.type` [#17088](https://github.com/lampepfl/dotty/pull/17088) + +## Incremental Compilation + +- Fix under-compilation when the method type in a SAM changes [#16996](https://github.com/lampepfl/dotty/pull/16996) + +## Infrastructure + +- Set reference version to 3.3.0-RC6 [#17504](https://github.com/lampepfl/dotty/pull/17504) +- Fix #17119: Download Coursier from GitHub directly [#17141](https://github.com/lampepfl/dotty/pull/17141) + +## Inline + +- Remove NamedArg from inlined arguments [#17228](https://github.com/lampepfl/dotty/pull/17228) +- Don't generate a Select for a TermRef with NoPrefix [#16754](https://github.com/lampepfl/dotty/pull/16754) +- Prepare bodies of inline forwarders eagerly [#16757](https://github.com/lampepfl/dotty/pull/16757) +- Do not remove inline method implementations until PruneErasedDefs [#17408](https://github.com/lampepfl/dotty/pull/17408) + +## Java Interop + +- ClassfileParser: allow missing param names (for JDK 21) [#17536](https://github.com/lampepfl/dotty/pull/17536) + +## Linting + +- Improve -Wunused: locals, privates with unset vars warning #16639 [#17160](https://github.com/lampepfl/dotty/pull/17160) +- Fix wunused false positive when deriving alias type [#17157](https://github.com/lampepfl/dotty/pull/17157) +- Port `-Wnonunit-statement` setting for dotty [#16936](https://github.com/lampepfl/dotty/pull/16936) + +## Match Types + +- Normalize match type usage during implicit lookup [#17457](https://github.com/lampepfl/dotty/pull/17457) +- Fix #13757: Explicitly disallow higher-kinded scrutinees of match types. [#17322](https://github.com/lampepfl/dotty/pull/17322) +- Fix match type reduction with wildcard type arguments [#17065](https://github.com/lampepfl/dotty/pull/17065) +- Fix check whether classtag can be generated for match types [#16708](https://github.com/lampepfl/dotty/pull/16708) + +## Parser + +- Allow lines starting with `.` to fall outside previous indentation widths [#17056](https://github.com/lampepfl/dotty/pull/17056) + +## Pattern Matching + +- Fix #11541: Specialize ClassTag[T] in exhaustivity check [#17385](https://github.com/lampepfl/dotty/pull/17385) +- Check outer class prefixes in type projections when pattern matching [#17136](https://github.com/lampepfl/dotty/pull/17136) +- Make unchecked cases non-`@unchecked` and non-unreachable [#16958](https://github.com/lampepfl/dotty/pull/16958) +- Fix #16899: Better handle X instanceOf P where X is T1 | T2 [#17382](https://github.com/lampepfl/dotty/pull/17382) + +## Pickling + +- ClassfileParser: Avoid cycle when accessing companion in inner class lookup [#16882](https://github.com/lampepfl/dotty/pull/16882) + +## Polyfunctions + +- Fix type aliases in beta-reduction of polyfunctions [#17054](https://github.com/lampepfl/dotty/pull/17054) + +## Quotes + +- Register `paramProxy` and `thisProxy` in `Quote` type [#17541](https://github.com/lampepfl/dotty/pull/17541) +- Only check newVal/newMethod privateWithin on -Xcheck-macros [#17437](https://github.com/lampepfl/dotty/pull/17437) +- Unencode quote and splice trees [#17342](https://github.com/lampepfl/dotty/pull/17342) +- Correctly type Expr.ofTupleFromSeq for arity > 22 [#17261](https://github.com/lampepfl/dotty/pull/17261) +- Use TermRef to distinguish distinct Type[T] instances [#17205](https://github.com/lampepfl/dotty/pull/17205) +- Check level consistency of SingletonTypeTree as a type [#17209](https://github.com/lampepfl/dotty/pull/17209) +- Fix splice type variable pattern detection [#17048](https://github.com/lampepfl/dotty/pull/17048) +- Avoid creation of `@SplicedType` quote local refrences [#17051](https://github.com/lampepfl/dotty/pull/17051) +- Dealias type references when healing types in quotes [#17049](https://github.com/lampepfl/dotty/pull/17049) +- Replace quoted type variables in signature of HOAS pattern result [#16951](https://github.com/lampepfl/dotty/pull/16951) +- Beta-reduce directly applied PolymorphicFunction [#16623](https://github.com/lampepfl/dotty/pull/16623) +- Use `Object.toString` for `quoted.{Expr, Type}` [#16663](https://github.com/lampepfl/dotty/pull/16663) +- Fix Splicer.isEscapedVariable [#16838](https://github.com/lampepfl/dotty/pull/16838) +- Fix references to class members defined in quotes [#17107](https://github.com/lampepfl/dotty/pull/17107) +- Handle pickled forward references in pickled expressions [#16855](https://github.com/lampepfl/dotty/pull/16855) +- Fix #16615 - crashes of path dependent types in spliced Type.of [#16773](https://github.com/lampepfl/dotty/pull/16773) +- Disallow local term references in staged types [#16362](https://github.com/lampepfl/dotty/pull/16362) +- Refactor level checking / type healing logic [#17082](https://github.com/lampepfl/dotty/pull/17082) +- Dealias quoted types when staging [#17059](https://github.com/lampepfl/dotty/pull/17059) +- Fix quotes with references to path dependent types [#17081](https://github.com/lampepfl/dotty/pull/17081) +- Make arguments order in quote hole deterministic [#17405](https://github.com/lampepfl/dotty/pull/17405) +- Only transform the body of the quote with QuoteTransformer [#17451](https://github.com/lampepfl/dotty/pull/17451) +- Place staged type captures in Quote AST [#17424](https://github.com/lampepfl/dotty/pull/17424) +- Add SplicePattern AST to parse and type quote pattern splices [#17396](https://github.com/lampepfl/dotty/pull/17396) + +## Reflection + +- -Xcheck-macros: add hint when a symbol in created twice [#16733](https://github.com/lampepfl/dotty/pull/16733) +- Assert that symbols created using reflect API have correct privateWithin symbols [#17352](https://github.com/lampepfl/dotty/pull/17352) +- Fix reflect.LambdaType type test [#16972](https://github.com/lampepfl/dotty/pull/16972) +- Improve `New`/`Select` -Ycheck message [#16746](https://github.com/lampepfl/dotty/pull/16746) +- Improve error message for CyclicReference in macros [#16749](https://github.com/lampepfl/dotty/pull/16749) +- Add reflect `defn.FunctionClass` overloads [#16849](https://github.com/lampepfl/dotty/pull/16849) + +## REPL + +- Always load REPL classes in macros including the output directory [#16866](https://github.com/lampepfl/dotty/pull/16866) + +## Reporting + +- Improve missing argument list error [#17126](https://github.com/lampepfl/dotty/pull/17126) +- Improve implicit parameter error message with aliases [#17125](https://github.com/lampepfl/dotty/pull/17125) +- Improve "constructor proxy shadows outer" handling [#17154](https://github.com/lampepfl/dotty/pull/17154) +- Clarify ambiguous reference error message [#16137](https://github.com/lampepfl/dotty/pull/16137) +- Hint about forbidden combination of implicit values and conversions [#16735](https://github.com/lampepfl/dotty/pull/16735) +- Attach explanation message to diagnostic message [#16787](https://github.com/lampepfl/dotty/pull/16787) +- Propagate implicit search errors from implicit macros [#16840](https://github.com/lampepfl/dotty/pull/16840) +- Detail UnapplyInvalidReturnType error message [#17167](https://github.com/lampepfl/dotty/pull/17167) +- Add way to debug -Xcheck-macros tree checking [#16973](https://github.com/lampepfl/dotty/pull/16973) +- Enrich and finesse compiler crash reporting [#17031](https://github.com/lampepfl/dotty/pull/17031) +- Allow @implicitNotFound messages as explanations [#16893](https://github.com/lampepfl/dotty/pull/16893) +- Include top-level symbols from same file in outer ambiguity error [#17033](https://github.com/lampepfl/dotty/pull/17033) +- Do not issue deprecation warnings when declaring deprecated case classes [#17165](https://github.com/lampepfl/dotty/pull/17165) + +## Scala-JS + +- Fix #17344: Make implicit references to this above dynamic imports explicit. [#17357](https://github.com/lampepfl/dotty/pull/17357) +- Fix #12621: Better error message for JS trait ctor param. [#16811](https://github.com/lampepfl/dotty/pull/16811) +- Fix #16801: Handle Closure's of s.r.FunctionXXL. [#16809](https://github.com/lampepfl/dotty/pull/16809) +- Fix #17549: Unify how Memoize and Constructors decide what fields need storing. [#17560](https://github.com/lampepfl/dotty/pull/17560) + +## Scaladoc + +- Feat: Add a blog configuration with yaml [#17214](https://github.com/lampepfl/dotty/pull/17214) +- Don't render the "$" for module [#17302](https://github.com/lampepfl/dotty/pull/17302) +- Fix: Add scrollbar to the sidebar [#17203](https://github.com/lampepfl/dotty/pull/17203) +- Scaladoc: fix crash when processing extends call [#17260](https://github.com/lampepfl/dotty/pull/17260) +- Fix: Modify the CSS so that the logo of the generated documentation is adaptive [#17172](https://github.com/lampepfl/dotty/pull/17172) +- Fix: Remove the duplicate parameter when generating the scaladoc. [#17097](https://github.com/lampepfl/dotty/pull/17097) +- Fix: padding top in mobile version [#17019](https://github.com/lampepfl/dotty/pull/17019) +- Fix: tap target of the menu in Mobile version [#17018](https://github.com/lampepfl/dotty/pull/17018) +- Scaladoc: Fix expand icon not changing on anchor link [#17053](https://github.com/lampepfl/dotty/pull/17053) +- Scaladoc: fix inkuire generation for PolyTypes [#17129](https://github.com/lampepfl/dotty/pull/17129) +- Re port scroll bar [#17463](https://github.com/lampepfl/dotty/pull/17463) +- Handle empty files and truncated YAML front matter [#17527](https://github.com/lampepfl/dotty/pull/17527) + +## SemanticDB + +- Make sure symbol exists before calling owner [#16860](https://github.com/lampepfl/dotty/pull/16860) +- Support LambdaType (convert from HKTypeLambda) [#16056](https://github.com/lampepfl/dotty/pull/16056) + +## Specification + +- Apply `class-shadowing.md` to the Spec [#16839](https://github.com/lampepfl/dotty/pull/16839) +- Adding base for future Spec into the compiler repo [#16825](https://github.com/lampepfl/dotty/pull/16825) + +## Standard Library + +- Optimization: avoid NotGiven allocations [#17090](https://github.com/lampepfl/dotty/pull/17090) + +## Tooling + +- Disable `ExtractSemanticDB` phase when writing to output directory defined as JAR. [#16790](https://github.com/lampepfl/dotty/pull/16790) +- Print owner of bind symbol with -Yprint-debug-owners [#16854](https://github.com/lampepfl/dotty/pull/16854) +- Small fixes to allow using Metals with scaladoc with sbt [#16816](https://github.com/lampepfl/dotty/pull/16816) + +## Transform + +- Move CrossVersionChecks before FirstTransform [#17301](https://github.com/lampepfl/dotty/pull/17301) +- Fix needsOuterIfReferenced [#17159](https://github.com/lampepfl/dotty/pull/17159) +- Drop incorrect super accessor in trait subclass [#17062](https://github.com/lampepfl/dotty/pull/17062) +- Generate toString only for synthetic companions of case classes [#16890](https://github.com/lampepfl/dotty/pull/16890) +- Check trait constructor for accessibility even if not called at Typer [#17094](https://github.com/lampepfl/dotty/pull/17094) +- Fix #17435: A simpler fix [#17436](https://github.com/lampepfl/dotty/pull/17436) + +## Typer + +- Preserve type bounds for inlined definitions in posttyper [#17190](https://github.com/lampepfl/dotty/pull/17190) +- Change logic to find members of recursive types [#17386](https://github.com/lampepfl/dotty/pull/17386) +- Recognize named arguments in isFunctionWithUnknownParamType [#17161](https://github.com/lampepfl/dotty/pull/17161) +- Better comparisons for type projections [#17092](https://github.com/lampepfl/dotty/pull/17092) +- Allow selectDynamic and applyDynamic to be extension methods [#17106](https://github.com/lampepfl/dotty/pull/17106) +- Fix use of accessibleFrom when finding default arg getters [#16977](https://github.com/lampepfl/dotty/pull/16977) +- Map class literal constant types [#16988](https://github.com/lampepfl/dotty/pull/16988) +- Always use adapted type in withDenotation [#16901](https://github.com/lampepfl/dotty/pull/16901) +- Restrict captureWildcards to only be used if needed [#16799](https://github.com/lampepfl/dotty/pull/16799) +- Don't capture wildcards if in closure or by-name [#16732](https://github.com/lampepfl/dotty/pull/16732) +- Infer: Don't minimise to Nothing if there's an upper bound [#16786](https://github.com/lampepfl/dotty/pull/16786) +- Perform Matchable check only if type test is needed [#16824](https://github.com/lampepfl/dotty/pull/16824) +- Don't eta expand unary varargs methods [#16892](https://github.com/lampepfl/dotty/pull/16892) +- Fix beta-reduction with `Nothing` and `null` args [#16938](https://github.com/lampepfl/dotty/pull/16938) +- Generate kind-correct wildcards when selecting from a wildcard [#17025](https://github.com/lampepfl/dotty/pull/17025) +- Fix #16405 ctd - wildcards prematurely resolving to Nothing [#16764](https://github.com/lampepfl/dotty/pull/16764) +- Test: add regression test for #7790 [#17473](https://github.com/lampepfl/dotty/pull/17473) +- Properly handle `AnyVal`s as refinement members of `Selectable`s [#16286](https://github.com/lampepfl/dotty/pull/16286) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.3.0..3.3.1` these are: + +``` + 152 Nicolas Stucki + 73 Martin Odersky + 54 Dale Wijnand + 51 Szymon Rodziewicz + 49 Quentin Bernet + 38 Chris Kipp + 31 Paweł Marks + 19 David Hua + 18 Lucas + 18 ysthakur + 15 Fengyun Liu + 14 Guillaume Martres + 14 Jamie Thompson + 11 Sébastien Doeraene + 9 Timothée Andres + 8 Kacper Korban + 7 Matt Bovel + 7 Som Snytt + 6 Julien Richard-Foy + 6 Lucas Leblanc + 5 Michał Pałka + 4 Anatolii Kmetiuk + 4 Guillaume Raffin + 4 Jan Chyb + 4 Paul Coral + 4 Wojciech Mazur + 4 Yichen Xu + 3 Decel + 2 Adrien Piquerez + 2 Arman Bilge + 2 Carl + 2 Florian3k + 2 Kenji Yoshida + 2 Michael Pilquist + 2 Natsu Kagami + 2 Seth Tisue + 2 Tomasz Godzik + 2 Vasil Vasilev + 2 Yadu Krishnan + 1 Bersier + 1 Flavio Brasil + 1 Jan-Pieter van den Heuvel + 1 Lukas Rytz + 1 Miles Yucht + 1 Mohammad Yousuf Minhaj Zia + 1 Ondra Pelech + 1 Philippus + 1 Rikito Taniguchi + 1 Simon R + 1 brandonspark + 1 github-actions[bot] + 1 liang3zy22 + 1 s.bazarsadaev + 1 Łukasz Wroński +``` diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index fe3f5cfed5a2..1349c3adc3b9 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -140,7 +140,7 @@ final case class SbtCommunityProject( case Some(ivyHome) => List(s"-Dsbt.ivy.home=$ivyHome") case _ => Nil extraSbtArgs ++ sbtProps ++ List( - "-sbt-version", "1.8.0", + "-sbt-version", "1.8.2", "-Dsbt.supershell=false", s"-Ddotty.communitybuild.dir=$communitybuildDir", s"--addPluginSbtFile=$sbtPluginFilePath" diff --git a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala index 146ad6f4f951..bf6b6d431509 100644 --- a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala +++ b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala @@ -94,6 +94,7 @@ class CommunityBuildTestC: @Test def shapeless = projects.shapeless.run() @Test def sourcecode = projects.sourcecode.run() @Test def specs2 = projects.specs2.run() + @Test def stdLib213 = projects.stdLib213.run() @Test def ujson = projects.ujson.run() @Test def upickle = projects.upickle.run() diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index 1d559c9950f1..e7b5a0dad1bf 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -1297,7 +1297,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { .toList // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower - if (classfileVersion < asm.Opcodes.V9) { + if (backendUtils.classfileVersion < asm.Opcodes.V9) { // Estimate capacity needed for the string builder val approxBuilderSize = concatArguments.view.map { diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index 3cf7d88b9282..c36c8c546635 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -42,18 +42,19 @@ import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions * @version 1.0 * */ -trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { +trait BCodeHelpers extends BCodeIdiomatic { // for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce - //import global._ - //import bTypes._ - //import coreBTypes._ import bTypes._ import tpd._ import coreBTypes._ import int.{_, given} import DottyBackendInterface._ + // We need to access GenBCode phase to get access to post-processor components. + // At this point it should always be initialized already. + protected lazy val backendUtils = genBCodePhase.asInstanceOf[GenBCode].postProcessor.backendUtils + def ScalaATTRName: String = "Scala" def ScalaSignatureATTRName: String = "ScalaSig" @@ -64,96 +65,12 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { val bCodeAsmCommon: BCodeAsmCommon[int.type] = new BCodeAsmCommon(int) - /* - * must-single-thread - */ - def getFileForClassfile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - getFile(base, clsName, suffix) - } - - /* - * must-single-thread - */ - def getOutFolder(csym: Symbol, cName: String): AbstractFile = { - try { - outputDirectory - } catch { - case ex: Throwable => - report.error(em"Couldn't create file for class $cName\n${ex.getMessage}", ctx.source.atSpan(csym.span)) - null - } - } - final def traitSuperAccessorName(sym: Symbol): String = { val nameString = sym.javaSimpleName.toString if (sym.name == nme.TRAIT_CONSTRUCTOR) nameString else nameString + "$" } - // ----------------------------------------------------------------------------------------- - // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) - // Background: - // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf - // http://comments.gmane.org/gmane.comp.java.vm.languages/2293 - // https://issues.scala-lang.org/browse/SI-3872 - // ----------------------------------------------------------------------------------------- - - /* An `asm.ClassWriter` that uses `jvmWiseLUB()` - * The internal name of the least common ancestor of the types given by inameA and inameB. - * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow - */ - final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) { - - /** - * This method is thread-safe: it depends only on the BTypes component, which does not depend - * on global. TODO @lry move to a different place where no global is in scope, on bTypes. - */ - override def getCommonSuperClass(inameA: String, inameB: String): String = { - val a = classBTypeFromInternalName(inameA) - val b = classBTypeFromInternalName(inameB) - val lub = a.jvmWiseLUB(b) - val lubName = lub.internalName - assert(lubName != "scala/Any") - lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. - } - } - - /* - * must-single-thread - */ - def initBytecodeWriter(): BytecodeWriter = { - (None: Option[AbstractFile] /*getSingleOutput*/) match { // todo: implement - case Some(f) if f.hasExtension("jar") => - new DirectToJarfileWriter(f.file) - case _ => - factoryNonJarBytecodeWriter() - } - } - - /* - * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner - * classes in BTypes.scala. - * - * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of - * each inner class it lists (those are looked up and included). - * - * This method serializes in the InnerClasses JVM attribute in an appropriate order, - * not necessarily that given by `refedInnerClasses`. - * - * can-multi-thread - */ - final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { - // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler - val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) - allNestedClasses ++= declaredInnerClasses - refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) - for nestedClass <- allNestedClasses - do { - // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. - val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked - jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) - } - } /* * can-multi-thread @@ -680,7 +597,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { val mirrorClass = new asm.tree.ClassNode mirrorClass.visit( - classfileVersion, + backendUtils.classfileVersion, bType.info.flags, mirrorName, null /* no java-generic-signature */, diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala index b86efb7cacb1..42f8ef7f4ef6 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala @@ -19,52 +19,13 @@ import dotty.tools.dotc.report */ trait BCodeIdiomatic { val int: DottyBackendInterface - final lazy val bTypes = new BTypesFromSymbols[int.type](int) + val bTypes: BTypesFromSymbols[int.type] import int.{_, given} import bTypes._ import coreBTypes._ - - lazy val target = - val releaseValue = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) - val targetValue = Option(ctx.settings.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) - val defaultTarget = "8" - (releaseValue, targetValue) match - case (Some(release), None) => release - case (None, Some(target)) => target - case (Some(release), Some(_)) => - report.warning(s"The value of ${ctx.settings.XuncheckedJavaOutputVersion.name} was overridden by ${ctx.settings.javaOutputVersion.name}") - release - case (None, None) => "8" // least supported version by default - - - // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings - lazy val classfileVersion: Int = target match { - case "8" => asm.Opcodes.V1_8 - case "9" => asm.Opcodes.V9 - case "10" => asm.Opcodes.V10 - case "11" => asm.Opcodes.V11 - case "12" => asm.Opcodes.V12 - case "13" => asm.Opcodes.V13 - case "14" => asm.Opcodes.V14 - case "15" => asm.Opcodes.V15 - case "16" => asm.Opcodes.V16 - case "17" => asm.Opcodes.V17 - case "18" => asm.Opcodes.V18 - case "19" => asm.Opcodes.V19 - case "20" => asm.Opcodes.V20 - } - - lazy val majorVersion: Int = (classfileVersion & 0xFF) - lazy val emitStackMapFrame = (majorVersion >= 50) - - val extraProc: Int = - import GenBCodeOps.addFlagIf - asm.ClassWriter.COMPUTE_MAXS - .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) - lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName val CLASS_CONSTRUCTOR_NAME = "" diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index 9c1ff1f26763..0a11fb898b48 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -271,7 +271,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { val flags = javaFlags(claszSymbol) val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner) - cnode.visit(classfileVersion, flags, + cnode.visit(backendUtils.classfileVersion, flags, thisName, thisSignature, superClass, interfaceNames.toArray) @@ -556,11 +556,17 @@ trait BCodeSkelBuilder extends BCodeHelpers { case _ => false } ) } def lineNumber(tree: Tree): Unit = { + @tailrec + def getNonLabelNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match { + case a: asm.tree.LabelNode => getNonLabelNode(a.getPrevious) + case _ => a + } + if (!emitLines || !tree.span.exists) return; val nr = ctx.source.offsetToLine(tree.span.point) + 1 if (nr != lastEmittedLineNr) { lastEmittedLineNr = nr - lastInsn match { + getNonLabelNode(lastInsn) match { case lnn: asm.tree.LineNumberNode => // overwrite previous landmark as no instructions have been emitted for it lnn.line = nr diff --git a/compiler/src/dotty/tools/backend/jvm/BTypes.scala b/compiler/src/dotty/tools/backend/jvm/BTypes.scala index 57bd343b6658..5539bf44aa17 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypes.scala @@ -14,7 +14,9 @@ import scala.tools.asm * This representation is immutable and independent of the compiler data structures, hence it can * be queried by concurrent threads. */ -abstract class BTypes { +abstract class BTypes { self => + val frontendAccess: PostProcessorFrontendAccess + import frontendAccess.{frontendSynch} val int: DottyBackendInterface import int.given @@ -37,10 +39,7 @@ abstract class BTypes { */ def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName) - // Some core BTypes are required here, in class BType, where no Global instance is available. - // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual - // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol. - val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type] + val coreBTypes: CoreBTypes { val bTypes: self.type} import coreBTypes._ /** @@ -862,3 +861,12 @@ abstract class BTypes { */ /*final*/ case class MethodNameAndType(name: String, methodType: MethodBType) } + +object BTypes { + /** + * A marker for strings that represent class internal names. + * Ideally the type would be incompatible with String, for example by making it a value class. + * But that would create overhead in a Collection[InternalName]. + */ + type InternalName = String +} diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index 54dafe6f0032..884dd19ee64f 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -14,20 +14,14 @@ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.core.Phases /** * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary * information from a symbol and its type to create the corresponding ClassBType. It requires * access to the compiler (global parameter). - * - * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes - * uses classBTypeFromSymbol, hence requires access to the compiler (global). - * - * BTypesFromSymbols extends BTypes because the implementation of BTypes requires access to some - * of the core btypes. They are declared in BTypes as abstract members. Note that BTypes does - * not have access to the compiler instance. */ -class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { +class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAccess: PostProcessorFrontendAccess) extends BTypes { import int.{_, given} import DottyBackendInterface.{symExtensions, _} @@ -37,39 +31,18 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int) import bCodeAsmCommon._ - // Why the proxy, see documentation of class [[CoreBTypes]]. - val coreBTypes: CoreBTypesProxy[this.type] = new CoreBTypesProxy[this.type](this) - import coreBTypes._ - - final def intializeCoreBTypes(): Unit = { - coreBTypes.setBTypes(new CoreBTypes[this.type](this)) - } - - private[this] val perRunCaches: Caches = new Caches { - def newAnyRefMap[K <: AnyRef, V](): mutable.AnyRefMap[K, V] = new mutable.AnyRefMap[K, V]() - def newWeakMap[K, V](): mutable.WeakHashMap[K, V] = new mutable.WeakHashMap[K, V]() - def recordCache[T <: Clearable](cache: T): T = cache - def newMap[K, V](): mutable.HashMap[K, V] = new mutable.HashMap[K, V]() - def newSet[K](): mutable.Set[K] = new mutable.HashSet[K] - } - - // TODO remove abstraction - private abstract class Caches { - def recordCache[T <: Clearable](cache: T): T - def newWeakMap[K, V](): collection.mutable.WeakHashMap[K, V] - def newMap[K, V](): collection.mutable.HashMap[K, V] - def newSet[K](): collection.mutable.Set[K] - def newAnyRefMap[K <: AnyRef, V](): collection.mutable.AnyRefMap[K, V] + val coreBTypes = new CoreBTypesFromSymbols[I]{ + val bTypes: BTypesFromSymbols.this.type = BTypesFromSymbols.this } + import coreBTypes._ - @threadUnsafe protected lazy val classBTypeFromInternalNameMap = { - perRunCaches.recordCache(collection.concurrent.TrieMap.empty[String, ClassBType]) - } + @threadUnsafe protected lazy val classBTypeFromInternalNameMap = + collection.concurrent.TrieMap.empty[String, ClassBType] /** * Cache for the method classBTypeFromSymbol. */ - @threadUnsafe private lazy val convertedClasses = perRunCaches.newMap[Symbol, ClassBType]() + @threadUnsafe private lazy val convertedClasses = collection.mutable.HashMap.empty[Symbol, ClassBType] /** * The ClassBType for a class symbol `sym`. diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala new file mode 100644 index 000000000000..2eaaccdd441d --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -0,0 +1,182 @@ +package dotty.tools.backend.jvm + +import scala.tools.asm +import scala.tools.asm.Handle +import scala.tools.asm.tree.InvokeDynamicInsnNode +import asm.tree.ClassNode +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import dotty.tools.dotc.report + +import scala.language.unsafeNulls + +/** + * This component hosts tools and utilities used in the backend that require access to a `BTypes` + * instance. + */ +class BackendUtils(val postProcessor: PostProcessor) { + import postProcessor.{bTypes, frontendAccess} + import frontendAccess.{compilerSettings} + import bTypes.* + import coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle + + // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings + lazy val classfileVersion: Int = compilerSettings.target match { + case "8" => asm.Opcodes.V1_8 + case "9" => asm.Opcodes.V9 + case "10" => asm.Opcodes.V10 + case "11" => asm.Opcodes.V11 + case "12" => asm.Opcodes.V12 + case "13" => asm.Opcodes.V13 + case "14" => asm.Opcodes.V14 + case "15" => asm.Opcodes.V15 + case "16" => asm.Opcodes.V16 + case "17" => asm.Opcodes.V17 + case "18" => asm.Opcodes.V18 + case "19" => asm.Opcodes.V19 + case "20" => asm.Opcodes.V20 + case "21" => asm.Opcodes.V21 + } + + lazy val extraProc: Int = { + import GenBCodeOps.addFlagIf + val majorVersion: Int = (classfileVersion & 0xFF) + val emitStackMapFrame = (majorVersion >= 50) + asm.ClassWriter.COMPUTE_MAXS + .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) + } + + def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { + val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] + for (m <- classNode.methods.asScala) { + val iter = m.instructions.iterator + while (iter.hasNext) { + val insn = iter.next() + insn match { + case indy: InvokeDynamicInsnNode + if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => + import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE + val metafactoryFlags = indy.bsmArgs(3).asInstanceOf[Integer].toInt + val isSerializable = (metafactoryFlags & FLAG_SERIALIZABLE) != 0 + if isSerializable then + val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] + indyLambdaBodyMethods += implMethod + case _ => + } + } + } + indyLambdaBodyMethods.toArray + } + + /* + * Add: + * + * private static Object $deserializeLambda$(SerializedLambda l) { + * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$0](l) + * catch { + * case i: IllegalArgumentException => + * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$1](l) + * catch { + * case i: IllegalArgumentException => + * ... + * return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup${NUM_GROUPS-1}](l) + * } + * + * We use invokedynamic here to enable caching within the deserializer without needing to + * host a static field in the enclosing class. This allows us to add this method to interfaces + * that define lambdas in default methods. + * + * SI-10232 we can't pass arbitrary number of method handles to the final varargs parameter of the bootstrap + * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target + * methods. + */ + def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { + import asm.Opcodes._ + import bTypes._ + import coreBTypes._ + + val cw = classNode + + // Make sure to reference the ClassBTypes of all types that are used in the code generated + // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to + // `classBTypeFromInternalNameMap`. When writing the classfile, the asm ClassWriter computes + // stack map frames and invokes the `getCommonSuperClass` method. This method expects all + // ClassBTypes mentioned in the source code to exist in the map. + + val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor + + val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) + def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { + mv.visitVarInsn(ALOAD, 0) + mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) + } + + val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java + val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray + val numGroups = groups.length + + import scala.tools.asm.Label + val initialLabels = Array.fill(numGroups - 1)(new Label()) + val terminalLabel = new Label + def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) + + for ((label, i) <- initialLabels.iterator.zipWithIndex) { + mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) + } + for ((label, i) <- initialLabels.iterator.zipWithIndex) { + mv.visitLabel(label) + emitLambdaDeserializeIndy(groups(i).toIndexedSeq) + mv.visitInsn(ARETURN) + } + mv.visitLabel(terminalLabel) + emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) + mv.visitInsn(ARETURN) + } + + /** + * Visit the class node and collect all referenced nested classes. + */ + def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { + // type InternalName = String + val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { + def declaredNestedClasses(internalName: InternalName): List[ClassBType] = + bTypes.classBTypeFromInternalName(internalName).info.memberClasses + + def getClassIfNested(internalName: InternalName): Option[ClassBType] = { + val c = bTypes.classBTypeFromInternalName(internalName) + Option.when(c.isNestedClass)(c) + } + + def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { + // don't crash on invalid generic signatures + } + } + c.visit(classNode) + (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) + } + + /* + * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner + * classes in BTypes.scala. + * + * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of + * each inner class it lists (those are looked up and included). + * + * This method serializes in the InnerClasses JVM attribute in an appropriate order, + * not necessarily that given by `refedInnerClasses`. + * + * can-multi-thread + */ + final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { + // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler + val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) + allNestedClasses ++= declaredInnerClasses + refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) + for nestedClass <- allNestedClasses + do { + // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. + val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked + jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) + } + } +} diff --git a/compiler/src/dotty/tools/backend/jvm/BytecodeWriters.scala b/compiler/src/dotty/tools/backend/jvm/BytecodeWriters.scala deleted file mode 100644 index 551d4f8d809e..000000000000 --- a/compiler/src/dotty/tools/backend/jvm/BytecodeWriters.scala +++ /dev/null @@ -1,147 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import java.io.{ DataOutputStream, FileOutputStream, IOException, File as JFile } -import java.nio.channels.ClosedByInterruptException -import dotty.tools.io._ -import dotty.tools.dotc.report - - -/** Can't output a file due to the state of the file system. */ -class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) - -/** For the last mile: turning generated bytecode in memory into - * something you can use. Has implementations for writing to class - * files, jars, and disassembled/javap output. - */ -trait BytecodeWriters { - val int: DottyBackendInterface - import int.{_, given} - - /** - * @param clsName cls.getName - */ - def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - def ensureDirectory(dir: AbstractFile): AbstractFile = - if (dir.isDirectory) dir - else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) - var dir = base - val pathParts = clsName.split("[./]").toList - for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part - ensureDirectory(dir) fileNamed pathParts.last + suffix - } - def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile = - getFile(outputDirectory, clsName, suffix) - - def factoryNonJarBytecodeWriter(): BytecodeWriter = { - val emitAsmp = None - val doDump = dumpClasses - (emitAsmp.isDefined, doDump.isDefined) match { - case (false, false) => new ClassBytecodeWriter { } - case (false, true ) => new ClassBytecodeWriter with DumpBytecodeWriter { } - case (true, false) => new ClassBytecodeWriter with AsmpBytecodeWriter - case (true, true ) => new ClassBytecodeWriter with AsmpBytecodeWriter with DumpBytecodeWriter { } - } - } - - trait BytecodeWriter { - def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit - def close(): Unit = () - } - - class DirectToJarfileWriter(jfile: JFile) extends BytecodeWriter { - val writer = new Jar(jfile).jarWriter() - - def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - assert(outfile == null, - "The outfile formal param is there just because ClassBytecodeWriter overrides this method and uses it.") - val path = jclassName + ".class" - val out = writer.newOutputStream(path) - - try out.write(jclassBytes, 0, jclassBytes.length) - finally out.flush() - - report.informProgress("added " + label + path + " to jar") - } - override def close() = writer.close() - } - - /* - * The ASM textual representation for bytecode overcomes disadvantages of javap output in three areas: - * (a) pickle dingbats undecipherable to the naked eye; - * (b) two constant pools, while having identical contents, are displayed differently due to physical layout. - * (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap, - * their expansion by ASM is more readable. - * - * */ - trait AsmpBytecodeWriter extends BytecodeWriter { - import scala.tools.asm - - private val baseDir = new Directory(None.get).createDirectory() // FIXME missing directoy - // new needed here since resolution of user-defined `apply` methods is ambiguous, and we want the constructor. - - private def emitAsmp(jclassBytes: Array[Byte], asmpFile: dotty.tools.io.File): Unit = { - val pw = asmpFile.printWriter() - try { - val cnode = new ClassNode1() - val cr = new asm.ClassReader(jclassBytes) - cr.accept(cnode, 0) - val trace = new scala.tools.asm.util.TraceClassVisitor(new java.io.PrintWriter(new java.io.StringWriter())) - cnode.accept(trace) - trace.p.print(pw) - } - finally pw.close() - } - - abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - super.writeClass(label, jclassName, jclassBytes, outfile) - - val segments = jclassName.split("[./]") - val asmpFile = segments.foldLeft(baseDir: Path)(_ / _).changeExtension("asmp").toFile - - asmpFile.parent.createDirectory() - emitAsmp(jclassBytes, asmpFile) - } - } - - trait ClassBytecodeWriter extends BytecodeWriter { - def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - assert(outfile != null, - "Precisely this override requires its invoker to hand out a non-null AbstractFile.") - val outstream = new DataOutputStream(outfile.bufferedOutput) - - try outstream.write(jclassBytes, 0, jclassBytes.length) - catch case ex: ClosedByInterruptException => - try - outfile.delete() // don't leave an empty or half-written classfile around after an interrupt - catch - case _: Throwable => - throw ex - finally outstream.close() - report.informProgress("wrote '" + label + "' to " + outfile) - } - } - - trait DumpBytecodeWriter extends BytecodeWriter { - val baseDir = Directory(dumpClasses.get).createDirectory() - - abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - super.writeClass(label, jclassName, jclassBytes, outfile) - - val pathName = jclassName - val dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _).changeExtension("class").toFile - dumpFile.parent.createDirectory() - val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path)) - - try outstream.write(jclassBytes, 0, jclassBytes.length) - finally outstream.close() - } - } - - private def dumpClasses: Option[String] = - if (ctx.settings.Ydumpclasses.isDefault) None - else Some(ctx.settings.Ydumpclasses.value) -} diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala new file mode 100644 index 000000000000..08e84de92dca --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala @@ -0,0 +1,142 @@ +package dotty.tools.backend.jvm + +import java.io.{DataOutputStream, IOException, PrintWriter, StringWriter} +import java.nio.file.Files +import java.util.jar.Attributes.Name + +import scala.tools.asm.ClassReader +import scala.tools.asm.tree.ClassNode +import dotty.tools.io.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.util.NoSourcePosition +import java.nio.charset.StandardCharsets +import java.nio.channels.ClosedByInterruptException +import BTypes.InternalName +import scala.language.unsafeNulls + +class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { + import frontendAccess.{backendReporting, compilerSettings} + + // if non-null, classfiles are additionally written to this directory + private val dumpOutputDir: AbstractFile = getDirectoryOrNull(compilerSettings.dumpClassesDirectory) + + // if non-null, classfiles are written to a jar instead of the output directory + private val jarWriter: JarWriter | Null = compilerSettings.outputDirectory match { + case jar: JarArchive => + val mainClass = compilerSettings.mainClass.orElse { + // If no main class was specified, see if there's only one + // entry point among the classes going into the jar. + frontendAccess.getEntryPoints match { + case name :: Nil => + backendReporting.log(i"Unique entry point: setting Main-Class to $name") + Some(name) + case names => + if names.isEmpty then backendReporting.warning(em"No Main-Class designated or discovered.") + else backendReporting.warning(em"No Main-Class due to multiple entry points:\n ${names.mkString("\n ")}") + None + } + } + jar.underlyingSource.map{ source => + if jar.isEmpty then + val jarMainAttrs = mainClass.map(Name.MAIN_CLASS -> _).toList + new Jar(source.file).jarWriter(jarMainAttrs: _*) + else + // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where + // created using `AbstractFile.bufferedOutputStream`instead of JarWritter + backendReporting.warning(em"Tried to write to non-empty JAR: $source") + null + }.orNull + + case _ => null + } + + private def getDirectoryOrNull(dir: Option[String]): AbstractFile = + dir.map(d => new PlainDirectory(Directory(d))).orNull + + private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { + if (base.file != null) { + fastGetFile(base, clsName, suffix) + } else { + def ensureDirectory(dir: AbstractFile): AbstractFile = + if (dir.isDirectory) dir + else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) + var dir = base + val pathParts = clsName.split("[./]").toList + for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part + ensureDirectory(dir) fileNamed pathParts.last + suffix + } + } + + private def fastGetFile(base: AbstractFile, clsName: String, suffix: String) = { + val index = clsName.lastIndexOf('/') + val (packageName, simpleName) = if (index > 0) { + (clsName.substring(0, index), clsName.substring(index + 1)) + } else ("", clsName) + val directory = base.file.toPath.resolve(packageName) + new PlainFile(Path(directory.resolve(simpleName + suffix))) + } + + private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { + if (outFile.file != null) { + val outPath = outFile.file.toPath + try Files.write(outPath, bytes) + catch { + case _: java.nio.file.NoSuchFileException => + Files.createDirectories(outPath.getParent) + Files.write(outPath, bytes) + } + } else { + val out = new DataOutputStream(outFile.bufferedOutput) + try out.write(bytes, 0, bytes.length) + finally out.close() + } + } + + def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile | Null = try { + // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) + val outFile = writeToJarOrFile(className, bytes, ".class") + // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) + + if (dumpOutputDir != null) { + val dumpFile = getFile(dumpOutputDir, className, ".class") + writeBytes(dumpFile, bytes) + } + outFile + } catch { + case e: FileConflictException => + backendReporting.error(em"error writing $className: ${e.getMessage}") + null + case e: java.nio.file.FileSystemException => + if compilerSettings.debug then e.printStackTrace() + backendReporting.error(em"error writing $className: ${e.getClass.getName} ${e.getMessage}") + null + } + + def writeTasty(className: InternalName, bytes: Array[Byte]): Unit = + writeToJarOrFile(className, bytes, ".tasty") + + private def writeToJarOrFile(className: InternalName, bytes: Array[Byte], suffix: String): AbstractFile | Null = { + if jarWriter == null then + val outFolder = compilerSettings.outputDirectory + val outFile = getFile(outFolder, className, suffix) + try writeBytes(outFile, bytes) + catch case ex: ClosedByInterruptException => + try outFile.delete() // don't leave an empty or half-written files around after an interrupt + catch case _: Throwable => () + finally throw ex + outFile + else + val path = className + suffix + val out = jarWriter.newOutputStream(path) + try out.write(bytes, 0, bytes.length) + finally out.flush() + null + } + + def close(): Unit = { + if (jarWriter != null) jarWriter.close() + } +} + +/** Can't output a file due to the state of the file system. */ +class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala new file mode 100644 index 000000000000..c9f9e4e23d90 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -0,0 +1,181 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Phases.Phase + +import scala.collection.mutable +import scala.jdk.CollectionConverters._ +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.interfaces +import dotty.tools.dotc.report + +import java.util.Optional +import dotty.tools.dotc.sbt.ExtractDependencies +import dotty.tools.dotc.core._ +import Contexts._ +import Phases._ +import Symbols._ +import StdNames.nme + +import java.io.DataOutputStream +import java.nio.channels.ClosedByInterruptException + +import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } + +import scala.tools.asm +import scala.tools.asm.tree._ +import tpd._ +import dotty.tools.io.AbstractFile +import dotty.tools.dotc.util.NoSourcePosition + + +class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( val bTypes: BTypesFromSymbols[int.type]) { self => + import DottyBackendInterface.symExtensions + import bTypes._ + import int.given + + private lazy val mirrorCodeGen = Impl.JMirrorBuilder() + + def genUnit(unit: CompilationUnit): GeneratedDefs = { + val generatedClasses = mutable.ListBuffer.empty[GeneratedClass] + val generatedTasty = mutable.ListBuffer.empty[GeneratedTasty] + + def genClassDef(cd: TypeDef): Unit = + try + val sym = cd.symbol + val sourceFile = unit.source.file + + def registerGeneratedClass(classNode: ClassNode, isArtifact: Boolean): Unit = + generatedClasses += GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated(classNode, sym, unit.source)) + + val plainC = genClass(cd, unit) + registerGeneratedClass(plainC, isArtifact = false) + + val attrNode = + if !sym.isTopLevelModuleClass then plainC + else if sym.companionClass == NoSymbol then + val mirrorC = genMirrorClass(sym, unit) + registerGeneratedClass(mirrorC, isArtifact = true) + mirrorC + else + report.log(s"No mirror class for module with linked class: ${sym.fullName}", NoSourcePosition) + plainC + + if sym.isClass then + genTastyAndSetAttributes(sym, attrNode) + catch + case ex: Throwable => + ex.printStackTrace() + report.error(s"Error while emitting ${unit.source}\n${ex.getMessage}", NoSourcePosition) + + + def genTastyAndSetAttributes(claszSymbol: Symbol, store: ClassNode): Unit = + import Impl.createJAttribute + for (binary <- unit.pickled.get(claszSymbol.asClass)) { + generatedTasty += GeneratedTasty(store, binary) + val tasty = + val uuid = new TastyHeaderUnpickler(binary()).readHeader() + val lo = uuid.getMostSignificantBits + val hi = uuid.getLeastSignificantBits + + // TASTY attribute is created but only the UUID bytes are stored in it. + // A TASTY attribute has length 16 if and only if the .tasty file exists. + val buffer = new TastyBuffer(16) + buffer.writeUncompressedLong(lo) + buffer.writeUncompressedLong(hi) + buffer.bytes + + val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) + store.visitAttribute(dataAttr) + } + + def genClassDefs(tree: Tree): Unit = + tree match { + case EmptyTree => () + case PackageDef(_, stats) => stats foreach genClassDefs + case ValDef(_, _, _) => () // module val not emitted + case td: TypeDef => genClassDef(td) + } + + genClassDefs(unit.tpdTree) + GeneratedDefs(generatedClasses.toList, generatedTasty.toList) + } + + // Creates a callback that will be evaluated in PostProcessor after creating a file + private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: interfaces.SourceFile): AbstractFile => Unit = clsFile => { + val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { + (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) + } + + val className = cls.name.replace('/', '.') + if (ctx.compilerCallback != null) + ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) + + if (ctx.sbtCallback != null) { + val jSourceFile = sourceFile.jfile.orElse(null) + val cb = ctx.sbtCallback + if (isLocal) cb.generatedLocalClass(jSourceFile, clsFile.file) + else cb.generatedNonLocalClass(jSourceFile, clsFile.file, className, fullClassName) + } + } + + /** Convert a `dotty.tools.io.AbstractFile` into a + * `dotty.tools.dotc.interfaces.AbstractFile`. + */ + private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = + new interfaces.AbstractFile { + override def name = absfile.name + override def path = absfile.path + override def jfile = Optional.ofNullable(absfile.file) + } + + private def genClass(cd: TypeDef, unit: CompilationUnit): ClassNode = { + val b = new Impl.PlainClassBuilder(unit) + b.genPlainClass(cd) + val cls = b.cnode + checkForCaseConflict(cls.name, cd.symbol) + cls + } + + private def genMirrorClass(classSym: Symbol, unit: CompilationUnit): ClassNode = { + val cls = mirrorCodeGen.genMirrorClass(classSym, unit) + checkForCaseConflict(cls.name, classSym) + cls + } + + private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] + private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { + val lowerCaseName = javaClassName.toLowerCase + lowerCaseNames.get(lowerCaseName) match { + case None => + lowerCaseNames.put(lowerCaseName, classSymbol) + case Some(dupClassSym) => + // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting + val (cl1, cl2) = + if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) + else (dupClassSym, classSymbol) + val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString + atPhase(typerPhase) { + if same then + // FIXME: This should really be an error, but then FromTasty tests fail + report.warning(s"${cl1.show} and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) + else + report.warning(s"${cl1.show} differs only in case from ${cl2.showLocated}. " + + "Such classes will overwrite one another on case-insensitive filesystems.", cl1.sourcePos) + } + } + } + + sealed transparent trait ImplEarlyInit{ + val int: self.int.type = self.int + val bTypes: self.bTypes.type = self.bTypes + protected val primitives: DottyPrimitives = self.primitives + } + object Impl extends ImplEarlyInit with BCodeSyncAndTry { + class PlainClassBuilder(unit: CompilationUnit) extends SyncAndTryBuilder(unit) + } +} diff --git a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala index d5fce3f53627..30ad6b29b9f0 100644 --- a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala @@ -7,38 +7,58 @@ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.transform.Erasure import scala.tools.asm.{Handle, Opcodes} import dotty.tools.dotc.core.StdNames +import BTypes.InternalName + +abstract class CoreBTypes { + val bTypes: BTypes + import bTypes._ + + def primitiveTypeMap: Map[Symbol, PrimitiveBType] + + def boxedClasses: Set[ClassBType] + + def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] + + def boxResultType: Map[Symbol, ClassBType] + + def unboxResultType: Map[Symbol, PrimitiveBType] + + def srNothingRef : ClassBType + def srNullRef : ClassBType + + def ObjectRef : ClassBType + def StringRef : ClassBType + def jlStringBuilderRef : ClassBType + def jlStringBufferRef : ClassBType + def jlCharSequenceRef : ClassBType + def jlClassRef : ClassBType + def jlThrowableRef : ClassBType + def jlCloneableRef : ClassBType + def jiSerializableRef : ClassBType + def jlClassCastExceptionRef : ClassBType + def jlIllegalArgExceptionRef : ClassBType + def jliSerializedLambdaRef : ClassBType + + def srBoxesRuntimeRef: ClassBType + + def jliLambdaMetaFactoryMetafactoryHandle : Handle + def jliLambdaMetaFactoryAltMetafactoryHandle : Handle + def jliLambdaDeserializeBootstrapHandle : Handle + def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle + + def asmBoxTo : Map[BType, MethodNameAndType] + def asmUnboxTo: Map[BType, MethodNameAndType] + + def typeOfArrayOp: Map[Int, BType] +} + +abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTypes { + val bTypes: BTypesFromSymbols[I] -/** - * Core BTypes and some other definitions. The initialization of these definitions requies access - * to symbols / types (global). - * - * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To - * make sure the definitions are consistent with the symbols in the current run, the - * `intializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each - * compiler run. - * - * The class BTypesFromSymbols does not directly reference CoreBTypes, but CoreBTypesProxy. The - * reason is that having a `var bTypes: CoreBTypes` would not allow `import bTypes._`. Instead, the - * proxy class holds a `CoreBTypes` in a variable field and forwards to this instance. - * - * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When - * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the - * constructor will actucally go through the proxy. The lazy vals make sure the instance is assigned - * in the proxy before the fields are initialized. - * - * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap - * could not be a perRunCache anymore: the classes defeined here need to be in that map, they are - * added when the ClassBTypes are created. The per run cache removes them, so they would be missing - * in the second run. - */ -class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) { import bTypes._ import int.given import DottyBackendInterface._ - - //import global._ - //import rootMirror.{requiredClass, getClassIfDefined} - //import definitions._ + import dotty.tools.dotc.core.Contexts.Context /** * Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above @@ -56,31 +76,21 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp defn.DoubleClass -> DOUBLE ) - private lazy val BOXED_UNIT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Void]) - private lazy val BOXED_BOOLEAN : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Boolean]) - private lazy val BOXED_BYTE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Byte]) - private lazy val BOXED_SHORT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Short]) - private lazy val BOXED_CHAR : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Character]) - private lazy val BOXED_INT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Integer]) - private lazy val BOXED_LONG : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Long]) - private lazy val BOXED_FLOAT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Float]) - private lazy val BOXED_DOUBLE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Double]) - /** * Map from primitive types to their boxed class type. Useful when pushing class literals onto the * operand stack (ldc instruction taking a class literal), see genConstant. */ lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map( - UNIT -> BOXED_UNIT, - BOOL -> BOXED_BOOLEAN, - BYTE -> BOXED_BYTE, - SHORT -> BOXED_SHORT, - CHAR -> BOXED_CHAR, - INT -> BOXED_INT, - LONG -> BOXED_LONG, - FLOAT -> BOXED_FLOAT, - DOUBLE -> BOXED_DOUBLE - ) + UNIT -> classBTypeFromSymbol(requiredClass[java.lang.Void]), + BOOL -> classBTypeFromSymbol(requiredClass[java.lang.Boolean]), + BYTE -> classBTypeFromSymbol(requiredClass[java.lang.Byte]), + SHORT -> classBTypeFromSymbol(requiredClass[java.lang.Short]), + CHAR -> classBTypeFromSymbol(requiredClass[java.lang.Character]), + INT -> classBTypeFromSymbol(requiredClass[java.lang.Integer]), + LONG -> classBTypeFromSymbol(requiredClass[java.lang.Long]), + FLOAT -> classBTypeFromSymbol(requiredClass[java.lang.Float]), + DOUBLE -> classBTypeFromSymbol(requiredClass[java.lang.Double]) + ) lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet @@ -114,33 +124,35 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp * names of NothingClass and NullClass can't be emitted as-is. * TODO @lry Once there's a 2.11.3 starr, use the commented argument list. The current starr crashes on the type literal `scala.runtime.Nothing$` */ - lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) // (requiredClass[scala.runtime.Nothing$]) - lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) // (requiredClass[scala.runtime.Null$]) - - lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) - lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) - lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) - lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) - lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) - lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) - lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) - lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) // java/lang/Cloneable - lazy val jioSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) // java/io/Serializable - lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) // java/lang/ClassCastException - lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) - lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) - - lazy val srBoxesRunTimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) - - private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) - private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) - private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(defn.MethodHandleClass) - private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(defn.MethodHandlesLookupClass) - private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) - private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 - private lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) - - lazy val jliLambdaMetaFactoryMetafactoryHandle: Handle = new Handle( + lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) + lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) + + lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) + lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) + + lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) + lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) + lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) + lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) + lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) + lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) + lazy val jiSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) + lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) + lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) + lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) + + lazy val srBoxesRuntimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) + + private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) + private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) + private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(defn.MethodHandleClass) + private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(defn.MethodHandlesLookupClass) + private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) + private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 + + lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) + + lazy val jliLambdaMetaFactoryMetafactoryHandle = new Handle( Opcodes.H_INVOKESTATIC, jliLambdaMetafactoryRef.internalName, "metafactory", @@ -150,7 +162,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp ).descriptor, /* itf = */ false) - lazy val jliLambdaMetaFactoryAltMetafactoryHandle: Handle = new Handle( + lazy val jliLambdaMetaFactoryAltMetafactoryHandle = new Handle( Opcodes.H_INVOKESTATIC, jliLambdaMetafactoryRef.internalName, "altMetafactory", @@ -159,7 +171,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp jliCallSiteRef ).descriptor, /* itf = */ false) - + lazy val jliLambdaDeserializeBootstrapHandle: Handle = new Handle( Opcodes.H_INVOKESTATIC, srLambdaDeserialize.internalName, @@ -179,19 +191,19 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp jliCallSiteRef ).descriptor, /* itf = */ false) - + /** * Methods in scala.runtime.BoxesRuntime */ lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map( - BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), BOXED_BOOLEAN)), - BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), BOXED_BYTE)), - CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), BOXED_CHAR)), - SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), BOXED_SHORT)), - INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), BOXED_INT)), - LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), BOXED_LONG)), - FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), BOXED_FLOAT)), - DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), BOXED_DOUBLE)) + BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), boxedClassOfPrimitive(BOOL))), + BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), boxedClassOfPrimitive(BYTE))), + CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), boxedClassOfPrimitive(CHAR))), + SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), boxedClassOfPrimitive(SHORT))), + INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), boxedClassOfPrimitive(INT))), + LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), boxedClassOfPrimitive(LONG))), + FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), boxedClassOfPrimitive(FLOAT))), + DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), boxedClassOfPrimitive(DOUBLE))) ) lazy val asmUnboxTo: Map[BType, MethodNameAndType] = Map( @@ -220,75 +232,3 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTyp ) } } - -/** - * This trait make some core BTypes availalbe that don't depend on a Global instance. Some core - * BTypes are required to be accessible in the BTypes trait, which does not have access to Global. - * - * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example - * the type Symbol in - * def primitiveTypeMap: Map[Symbol, PrimitiveBType] - */ -trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { - val bTypes: BTS - import bTypes._ - - def boxedClasses: Set[ClassBType] - - def srNothingRef : ClassBType - def srNullRef : ClassBType - - def ObjectRef : ClassBType - def jlCloneableRef : ClassBType - def jiSerializableRef : ClassBType -} - -/** - * See comment in class [[CoreBTypes]]. - */ -final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) extends CoreBTypesProxyGlobalIndependent[BTFS] { - import bTypes._ - - private var _coreBTypes: CoreBTypes[bTypes.type] = _ - def setBTypes(coreBTypes: CoreBTypes[BTFS]): Unit = { - _coreBTypes = coreBTypes.asInstanceOf[CoreBTypes[bTypes.type]] - } - - def primitiveTypeMap: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeMap - - def boxedClasses: Set[ClassBType] = _coreBTypes.boxedClasses - - def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _coreBTypes.boxedClassOfPrimitive - - def boxResultType: Map[Symbol, ClassBType] = _coreBTypes.boxResultType - - def unboxResultType: Map[Symbol, PrimitiveBType] = _coreBTypes.unboxResultType - - def srNothingRef : ClassBType = _coreBTypes.srNothingRef - def srNullRef : ClassBType = _coreBTypes.srNullRef - - def ObjectRef : ClassBType = _coreBTypes.ObjectRef - def StringRef : ClassBType = _coreBTypes.StringRef - def jlStringBuilderRef : ClassBType = _coreBTypes.jlStringBuilderRef - def jlStringBufferRef : ClassBType = _coreBTypes.jlStringBufferRef - def jlCharSequenceRef : ClassBType = _coreBTypes.jlCharSequenceRef - def jlClassRef : ClassBType = _coreBTypes.jlClassRef - def jlThrowableRef : ClassBType = _coreBTypes.jlThrowableRef - def jlCloneableRef : ClassBType = _coreBTypes.jlCloneableRef - def jiSerializableRef : ClassBType = _coreBTypes.jioSerializableRef - def jlClassCastExceptionRef : ClassBType = _coreBTypes.jlClassCastExceptionRef - def jlIllegalArgExceptionRef : ClassBType = _coreBTypes.jlIllegalArgExceptionRef - def jliSerializedLambdaRef : ClassBType = _coreBTypes.jliSerializedLambdaRef - - def srBoxesRuntimeRef: ClassBType = _coreBTypes.srBoxesRunTimeRef - - def jliLambdaMetaFactoryMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryMetafactoryHandle - def jliLambdaMetaFactoryAltMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - def jliLambdaDeserializeBootstrapHandle : Handle = _coreBTypes.jliLambdaDeserializeBootstrapHandle - def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle = _coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle - - def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo - def asmUnboxTo: Map[BType, MethodNameAndType] = _coreBTypes.asmUnboxTo - - def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp -} diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index f8f683a429f6..b2278c3f0ce8 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -25,7 +25,7 @@ import StdNames.nme import NameKinds.{LazyBitMapName, LazyLocalName} import Names.Name -class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { +class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index e788c2b2a4ec..469a6ea57679 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -1,43 +1,16 @@ package dotty.tools.backend.jvm -import scala.language.unsafeNulls - import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} -import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Phases.Phase - -import scala.collection.mutable -import scala.jdk.CollectionConverters._ -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.interfaces import dotty.tools.dotc.report - -import dotty.tools.dotc.util.SourceFile -import java.util.Optional - import dotty.tools.dotc.core._ -import dotty.tools.dotc.sbt.ExtractDependencies +import dotty.tools.dotc.interfaces.CompilerCallback import Contexts._ -import Phases._ import Symbols._ -import Decorators.em - -import java.io.DataOutputStream -import java.nio.channels.ClosedByInterruptException - -import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } - -import scala.tools.asm -import scala.tools.asm.Handle -import scala.tools.asm.tree._ -import tpd._ -import StdNames._ import dotty.tools.io._ -import scala.tools.asm.MethodTooLargeException -import scala.tools.asm.ClassTooLargeException +import scala.collection.mutable -class GenBCode extends Phase { +class GenBCode extends Phase { self => override def phaseName: String = GenBCode.name @@ -52,618 +25,85 @@ class GenBCode extends Phase { private val entryPoints = new mutable.HashSet[String]() def registerEntryPoint(s: String): Unit = entryPoints += s - private var myOutput: AbstractFile = _ - - private def outputDir(using Context): AbstractFile = { - if (myOutput eq null) - myOutput = ctx.settings.outputDir.value - myOutput + private var _backendInterface: DottyBackendInterface = _ + def backendInterface(using ctx: Context): DottyBackendInterface = { + if _backendInterface eq null then + // Enforce usage of FreshContext so we would be able to modify compilation unit between runs + val backendCtx = ctx match + case fc: FreshContext => fc + case ctx => ctx.fresh + _backendInterface = DottyBackendInterface(superCallsMap)(using backendCtx) + _backendInterface } - private var myPrimitives: DottyPrimitives = null - - override def run(using Context): Unit = - if myPrimitives == null then myPrimitives = new DottyPrimitives(ctx) - new GenBCodePipeline( - DottyBackendInterface(outputDir, superCallsMap), - myPrimitives - ).run(ctx.compilationUnit.tpdTree) - - - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - outputDir match - case jar: JarArchive => - updateJarManifestWithMainClass(jar, entryPoints.toList) - case _ => - try super.runOn(units) - finally outputDir match { - case jar: JarArchive => - if (ctx.run.nn.suspendedUnits.nonEmpty) - // If we close the jar the next run will not be able to write on the jar. - // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. - report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") - - jar.close() - case _ => - } + private var _codeGen: CodeGen = _ + def codeGen(using Context): CodeGen = { + if _codeGen eq null then + val int = backendInterface + val dottyPrimitives = new DottyPrimitives(ctx) + _codeGen = new CodeGen(int, dottyPrimitives)(bTypes.asInstanceOf[BTypesFromSymbols[int.type]]) + _codeGen } - private def updateJarManifestWithMainClass(jarArchive: JarArchive, entryPoints: List[String])(using Context): Unit = - val mainClass = Option.when(!ctx.settings.XmainClass.isDefault)(ctx.settings.XmainClass.value).orElse { - entryPoints match - case List(mainClass) => - Some(mainClass) - case Nil => - report.warning("No Main-Class designated or discovered.") - None - case mcs => - report.warning(s"No Main-Class due to multiple entry points:\n ${mcs.mkString("\n ")}") - None - } - mainClass.map { mc => - val manifest = Jar.WManifest() - manifest.mainClass = mc - val file = jarArchive.subdirectoryNamed("META-INF").fileNamed("MANIFEST.MF") - val os = file.output - manifest.underlying.write(os) - os.close() - } - end updateJarManifestWithMainClass -} - -object GenBCode { - val name: String = "genBCode" - val description: String = "generate JVM bytecode" -} - -class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrimitives)(using Context) extends BCodeSyncAndTry { - import DottyBackendInterface.symExtensions - - private var tree: Tree = _ - - private val sourceFile: SourceFile = ctx.compilationUnit.source - - /** Convert a `dotty.tools.io.AbstractFile` into a - * `dotty.tools.dotc.interfaces.AbstractFile`. - */ - private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = - new interfaces.AbstractFile { - override def name = absfile.name - override def path = absfile.path - override def jfile = Optional.ofNullable(absfile.file) - } - - final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit) - -// class BCodePhase() { - - private var bytecodeWriter : BytecodeWriter = null - private var mirrorCodeGen : JMirrorBuilder = null - - /* ---------------- q1 ---------------- */ - - case class Item1(arrivalPos: Int, cd: TypeDef, cunit: CompilationUnit) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } + private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = _ + def bTypes(using Context): BTypesFromSymbols[DottyBackendInterface] = { + if _bTypes eq null then + _bTypes = BTypesFromSymbols(backendInterface, frontendAccess) + _bTypes } - private val poison1 = Item1(Int.MaxValue, null, ctx.compilationUnit) - private val q1 = new java.util.LinkedList[Item1] - /* ---------------- q2 ---------------- */ - - case class SubItem2(classNode: asm.tree.ClassNode, - file: dotty.tools.io.AbstractFile) - - case class Item2(arrivalPos: Int, - mirror: SubItem2, - plain: SubItem2) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } + private var _frontendAccess: PostProcessorFrontendAccess | Null = _ + def frontendAccess(using Context): PostProcessorFrontendAccess = { + if _frontendAccess eq null then + _frontendAccess = PostProcessorFrontendAccess.Impl(backendInterface, entryPoints) + _frontendAccess.nn } - private val poison2 = Item2(Int.MaxValue, null, null) - private val q2 = new _root_.java.util.LinkedList[Item2] - - /* ---------------- q3 ---------------- */ - - /* - * An item of queue-3 (the last queue before serializing to disk) contains three of these - * (one for each of mirror and plain classes). - * - * @param jclassName internal name of the class - * @param jclassBytes bytecode emitted for the class SubItem3 represents - */ - case class SubItem3( - jclassName: String, - jclassBytes: Array[Byte], - jclassFile: dotty.tools.io.AbstractFile - ) - - case class Item3(arrivalPos: Int, - mirror: SubItem3, - plain: SubItem3) { - - def isPoison: Boolean = { arrivalPos == Int.MaxValue } - } - private val i3comparator = new java.util.Comparator[Item3] { - override def compare(a: Item3, b: Item3) = { - if (a.arrivalPos < b.arrivalPos) -1 - else if (a.arrivalPos == b.arrivalPos) 0 - else 1 - } + private var _postProcessor: PostProcessor | Null = _ + def postProcessor(using Context): PostProcessor = { + if _postProcessor eq null then + _postProcessor = new PostProcessor(frontendAccess, bTypes) + _postProcessor.nn } - private val poison3 = Item3(Int.MaxValue, null, null) - private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator) - - /* - * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2 - */ - class Worker1(needsOutFolder: Boolean) { - - private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] - private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { - val lowerCaseName = javaClassName.toLowerCase - lowerCaseNames.get(lowerCaseName) match { - case None => - lowerCaseNames.put(lowerCaseName, classSymbol) - case Some(dupClassSym) => - // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting - val (cl1, cl2) = - if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) - else (dupClassSym, classSymbol) - val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString - atPhase(typerPhase) { - if (same) - report.warning( // FIXME: This should really be an error, but then FromTasty tests fail - s"${cl1.show} and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) - else - report.warning(s"${cl1.show} differs only in case from ${cl2.showLocated}. " + - "Such classes will overwrite one another on case-insensitive filesystems.", cl1.sourcePos) - } - } - } - - def run(): Unit = { - while (true) { - val item = q1.poll - if (item.isPoison) { - q2 add poison2 - return - } - else { - try { /*withCurrentUnit(item.cunit)*/(visit(item)) } - catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.cunit.source.file.name}") - throw ex - } - } - } - } - - /* - * Checks for duplicate internal names case-insensitively, - * builds ASM ClassNodes for mirror and plain classes; - * enqueues them in queue-2. - * - */ - def visit(item: Item1): Boolean = { - val Item1(arrivalPos, cd, cunit) = item - val claszSymbol = cd.symbol - - // -------------- mirror class, if needed -------------- - val mirrorC = - if (claszSymbol.isTopLevelModuleClass) { - if (claszSymbol.companionClass == NoSymbol) { - mirrorCodeGen.genMirrorClass(claszSymbol, cunit) - } else { - report.log(s"No mirror class for module with linked class: ${claszSymbol.showFullName}") - null - } - } else null - - // -------------- "plain" class -------------- - val pcb = new PlainClassBuilder(cunit) - pcb.genPlainClass(cd) - val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName) else null; - val plainC = pcb.cnode - - if (claszSymbol.isClass) // @DarkDimius is this test needed here? - for (binary <- ctx.compilationUnit.pickled.get(claszSymbol.asClass)) { - val store = if (mirrorC ne null) mirrorC else plainC - val tasty = - val outTastyFile = getFileForClassfile(outF, store.name, ".tasty") - val outstream = new DataOutputStream(outTastyFile.bufferedOutput) - try outstream.write(binary()) - catch case ex: ClosedByInterruptException => - try - outTastyFile.delete() // don't leave an empty or half-written tastyfile around after an interrupt - catch - case _: Throwable => - throw ex - finally outstream.close() - - val uuid = new TastyHeaderUnpickler(binary()).readHeader() - val lo = uuid.getMostSignificantBits - val hi = uuid.getLeastSignificantBits - - // TASTY attribute is created but only the UUID bytes are stored in it. - // A TASTY attribute has length 16 if and only if the .tasty file exists. - val buffer = new TastyBuffer(16) - buffer.writeUncompressedLong(lo) - buffer.writeUncompressedLong(hi) - buffer.bytes - - val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) - store.visitAttribute(dataAttr) - } - - - // ----------- create files - - val classNodes = List(mirrorC, plainC) - val classFiles = classNodes.map(cls => - if (outF != null && cls != null) { - try { - checkForCaseConflict(cls.name, claszSymbol) - getFileForClassfile(outF, cls.name, ".class") - } catch { - case e: FileConflictException => - report.error(em"error writing ${cls.name}: ${e.getMessage}") - null - } - } else null - ) - - // ----------- compiler and sbt's callbacks - - val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { - (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) - } - - for ((cls, clsFile) <- classNodes.zip(classFiles)) { - if (cls != null) { - val className = cls.name.replace('/', '.') - if (ctx.compilerCallback != null) - ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - if (ctx.sbtCallback != null) { - if (isLocal) - ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), clsFile.file) - else { - ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), clsFile.file, - className, fullClassName) - } - } - } - } - - // ----------- hand over to pipeline-2 - - val item2 = - Item2(arrivalPos, - SubItem2(mirrorC, classFiles(0)), - SubItem2(plainC, classFiles(1))) - - q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. - } // end of method visit(Item1) - - } // end of class BCodePhase.Worker1 - - /* - * Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level: - * - * (a) no optimization involves: - * - converting the plain ClassNode to byte array and placing it on queue-3 - */ - class Worker2 { - import bTypes.ClassBType - import bTypes.coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - // lazy val localOpt = new LocalOpt(new Settings()) - - private def localOptimizations(classNode: ClassNode): Unit = { - // BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode)) - } - - - /* Return an array of all serializable lambdas in this class */ - private def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { - val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] - for (m <- classNode.methods.asScala) { - val iter = m.instructions.iterator - while (iter.hasNext) { - val insn = iter.next() - insn match { - case indy: InvokeDynamicInsnNode - if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => - import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE - val metafactoryFlags = indy.bsmArgs(3).asInstanceOf[Integer].toInt - val isSerializable = (metafactoryFlags & FLAG_SERIALIZABLE) != 0 - if isSerializable then - val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] - indyLambdaBodyMethods += implMethod - case _ => - } - } - } - indyLambdaBodyMethods.toArray - } - - /* - * Add: - * - * private static Object $deserializeLambda$(SerializedLambda l) { - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$0](l) - * catch { - * case i: IllegalArgumentException => - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$1](l) - * catch { - * case i: IllegalArgumentException => - * ... - * return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup${NUM_GROUPS-1}](l) - * } - * - * We use invokedynamic here to enable caching within the deserializer without needing to - * host a static field in the enclosing class. This allows us to add this method to interfaces - * that define lambdas in default methods. - * - * SI-10232 we can't pass arbitrary number of method handles to the final varargs parameter of the bootstrap - * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target - * methods. - */ - private def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { - import asm.Opcodes._ - import bTypes._ - import coreBTypes._ - - val cw = classNode - - // Make sure to reference the ClassBTypes of all types that are used in the code generated - // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to - // `classBTypeFromInternalNameMap`. When writing the classfile, the asm ClassWriter computes - // stack map frames and invokes the `getCommonSuperClass` method. This method expects all - // ClassBTypes mentioned in the source code to exist in the map. - - val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor - - val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) - def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { - mv.visitVarInsn(ALOAD, 0) - mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) - } - - val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java - val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray - val numGroups = groups.length - - import scala.tools.asm.Label - val initialLabels = Array.fill(numGroups - 1)(new Label()) - val terminalLabel = new Label - def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) - - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) - } - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitLabel(label) - emitLambdaDeserializeIndy(groups(i).toIndexedSeq) - mv.visitInsn(ARETURN) - } - mv.visitLabel(terminalLabel) - emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) - mv.visitInsn(ARETURN) - } - - private def setInnerClasses(classNode: ClassNode): Unit = if (classNode != null) { - classNode.innerClasses.clear() - val (declared, referred) = collectNestedClasses(classNode) - addInnerClasses(classNode, declared, referred) - } - - /** - * Visit the class node and collect all referenced nested classes. - */ - private def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { - // type InternalName = String - val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { - def declaredNestedClasses(internalName: InternalName): List[ClassBType] = - bTypes.classBTypeFromInternalName(internalName).info.memberClasses - - def getClassIfNested(internalName: InternalName): Option[ClassBType] = { - val c = bTypes.classBTypeFromInternalName(internalName) - Option.when(c.isNestedClass)(c) - } - - def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { - // don't crash on invalid generic signatures - } - } - c.visit(classNode) - (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) - } - - def run(): Unit = { - while (true) { - val item = q2.poll - if (item.isPoison) { - q3 add poison3 - return - } - else { - try { - val plainNode = item.plain.classNode - localOptimizations(plainNode) - val serializableLambdas = collectSerializableLambdas(plainNode) - if (serializableLambdas.nonEmpty) - addLambdaDeserialize(plainNode, serializableLambdas) - setInnerClasses(plainNode) - setInnerClasses(item.mirror.classNode) - addToQ3(item) - } catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.plain.classNode.name}") - throw ex - } + override def run(using ctx: Context): Unit = + // CompilationUnit is the only component that will differ between each run invocation + // We need to update it to have correct source positions. + // FreshContext is always enforced when creating backend interface + backendInterface.ctx + .asInstanceOf[FreshContext] + .setCompilationUnit(ctx.compilationUnit) + val generated = codeGen.genUnit(ctx.compilationUnit) + // In Scala 2, the backend might use global optimizations which might delay post-processing to build the call graph. + // In Scala 3, we don't perform backend optimizations and always perform post-processing immediately. + // https://github.com/scala/scala/pull/6057 + postProcessor.postProcessAndSendToDisk(generated) + (ctx.compilerCallback: CompilerCallback | Null) match { + case cb: CompilerCallback => cb.onSourceCompiled(ctx.source) + case null => () + } + + override def runOn(units: List[CompilationUnit])(using ctx:Context): List[CompilationUnit] = { + try super.runOn(units) + finally + // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized + if _frontendAccess ne null then + frontendAccess.compilerSettings.outputDirectory match { + case jar: JarArchive => + if (ctx.run.nn.suspendedUnits.nonEmpty) + // If we close the jar the next run will not be able to write on the jar. + // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. + report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") + + jar.close() + case _ => () } - } - } - - private def addToQ3(item: Item2) = { - - def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = { - val cw = new CClassWriter(extraProc) - cn.accept(cw) - cw.toByteArray - } - - val Item2(arrivalPos, SubItem2(mirror, mirrorFile), SubItem2(plain, plainFile)) = item - - val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror), mirrorFile) - val plainC = SubItem3(plain.name, getByteArray(plain), plainFile) - - if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) { - if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes) - AsmUtils.traceClass(plainC.jclassBytes) - } - - q3 add Item3(arrivalPos, mirrorC, plainC) - } - - } // end of class BCodePhase.Worker2 - - var arrivalPos: Int = 0 - - /* - * A run of the BCodePhase phase comprises: - * - * (a) set-up steps (most notably supporting maps in `BCodeTypes`, - * but also "the" writer where class files in byte-array form go) - * - * (b) building of ASM ClassNodes, their optimization and serialization. - * - * (c) tear down (closing the classfile-writer and clearing maps) - * - */ - def run(t: Tree)(using Context): Unit = { - this.tree = t - - // val bcodeStart = Statistics.startTimer(BackendStats.bcodeTimer) - - // val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer) - arrivalPos = 0 // just in case - // scalaPrimitives.init() - bTypes.intializeCoreBTypes() - // Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart) - - // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated. - bytecodeWriter = initBytecodeWriter() - mirrorCodeGen = new JMirrorBuilder - - val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter] - buildAndSendToDisk(needsOutfileForSymbol) - - // closing output files. - bytecodeWriter.close() - // Statistics.stopTimer(BackendStats.bcodeTimer, bcodeStart) - - if (ctx.compilerCallback != null) - ctx.compilerCallback.onSourceCompiled(sourceFile) - - /* TODO Bytecode can be verified (now that all classfiles have been written to disk) - * - * (1) asm.util.CheckAdapter.verify() - * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw) - * passing a custom ClassLoader to verify inter-dependent classes. - * Alternatively, - * - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool). - * - -Xverify:all - * - * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()` - * - */ + if _postProcessor ne null then + postProcessor.classfileWriter.close() } +} - /* - * Sequentially: - * (a) place all ClassDefs in queue-1 - * (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2 - * (c) dequeue one at a time from queue-2, convert it to byte-array, place in queue-3 - * (d) serialize to disk by draining queue-3. - */ - private def buildAndSendToDisk(needsOutFolder: Boolean)(using Context) = { - try - feedPipeline1() - // val genStart = Statistics.startTimer(BackendStats.bcodeGenStat) - (new Worker1(needsOutFolder)).run() - // Statistics.stopTimer(BackendStats.bcodeGenStat, genStart) - - (new Worker2).run() - - // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) - drainQ3() - // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) - catch - case e: MethodTooLargeException => - val method = - s"${e.getClassName.replaceAll("/", ".")}.${e.getMethodName}" - val msg = - em"Generated bytecode for method '$method' is too large. Size: ${e.getCodeSize} bytes. Limit is 64KB" - report.error(msg) - case e: ClassTooLargeException => - val msg = - em"Class '${e.getClassName.replaceAll("/", ".")}' is too large. Constant pool size: ${e.getConstantPoolCount}. Limit is 64K entries" - report.error(msg) - - } - - /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */ - private def feedPipeline1() = { - def gen(tree: Tree): Unit = { - tree match { - case EmptyTree => () - case PackageDef(_, stats) => stats foreach gen - case ValDef(name, tpt, rhs) => () // module val not emitted - case cd: TypeDef => - q1 add Item1(arrivalPos, cd, int.ctx.compilationUnit) - arrivalPos += 1 - } - } - gen(tree) - q1 add poison1 - } - - /* Pipeline that writes classfile representations to disk. */ - private def drainQ3() = { - - def sendToDisk(cfr: SubItem3): Unit = { - if (cfr != null){ - val SubItem3(jclassName, jclassBytes, jclassFile) = cfr - bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, jclassFile) - } - } - - var moreComing = true - // `expected` denotes the arrivalPos whose Item3 should be serialized next - var expected = 0 - - while (moreComing) { - val incoming = q3.poll - moreComing = !incoming.isPoison - if (moreComing) { - val item = incoming - sendToDisk(item.mirror) - sendToDisk(item.plain) - expected += 1 - } - } - - // we're done - assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1") - assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2") - assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3") - - } - //} // end of class BCodePhase +object GenBCode { + val name: String = "genBCode" + val description: String = "generate JVM bytecode" } diff --git a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala index e9e532933290..c16bc70fc3b0 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala @@ -185,13 +185,13 @@ abstract class GenericSignatureVisitor(nestedOnly: Boolean) { } // Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf -// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 +// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSignatureVisitor(nestedOnly) { - type InternalName = String + type InternalName = String def declaredNestedClasses(internalName: InternalName): List[T] def getClassIfNested(internalName: InternalName): Option[T] - + val declaredInnerClasses = mutable.Set.empty[T] val referredInnerClasses = mutable.Set.empty[T] diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala new file mode 100644 index 000000000000..606b5645aa24 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -0,0 +1,117 @@ +package dotty.tools.backend.jvm + +import scala.collection.mutable.ListBuffer +import dotty.tools.dotc.util.{SourcePosition, NoSourcePosition} +import dotty.tools.io.AbstractFile +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.em +import scala.tools.asm.ClassWriter +import scala.tools.asm.tree.ClassNode + +/** + * Implements late stages of the backend that don't depend on a Global instance, i.e., + * optimizations, post-processing and classfile serialization and writing. + */ +class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: BTypes) { + self => + import bTypes.* + import frontendAccess.{backendReporting, compilerSettings} + import int.given + + val backendUtils = new BackendUtils(this) + val classfileWriter = ClassfileWriter(frontendAccess) + + def postProcessAndSendToDisk(generatedDefs: GeneratedDefs): Unit = { + val GeneratedDefs(classes, tasty) = generatedDefs + for (GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated) <- classes) { + val bytes = + try + if !isArtifact then setSerializableLambdas(classNode) + setInnerClasses(classNode) + serializeClass(classNode) + catch + case e: java.lang.RuntimeException if e.getMessage != null && e.getMessage.nn.contains("too large!") => + backendReporting.error(em"Could not write class ${classNode.name} because it exceeds JVM code size limits. ${e.getMessage}") + null + case ex: Throwable => + ex.printStackTrace() + backendReporting.error(em"Error while emitting ${classNode.name}\n${ex.getMessage}") + null + + if (bytes != null) { + if (AsmUtils.traceSerializedClassEnabled && classNode.name.nn.contains(AsmUtils.traceSerializedClassPattern)) + AsmUtils.traceClass(bytes) + + val clsFile = classfileWriter.writeClass(classNode.name.nn, bytes, sourceFile) + if clsFile != null then onFileCreated(clsFile) + } + } + + for (GeneratedTasty(classNode, binaryGen) <- tasty){ + classfileWriter.writeTasty(classNode.name.nn, binaryGen()) + } + } + + private def setSerializableLambdas(classNode: ClassNode): Unit = { + import backendUtils.{collectSerializableLambdas, addLambdaDeserialize} + val serializableLambdas = collectSerializableLambdas(classNode) + if serializableLambdas.nonEmpty then + addLambdaDeserialize(classNode, serializableLambdas) + } + + private def setInnerClasses(classNode: ClassNode): Unit = { + import backendUtils.{collectNestedClasses, addInnerClasses} + classNode.innerClasses.nn.clear() + val (declared, referred) = collectNestedClasses(classNode) + addInnerClasses(classNode, declared, referred) + } + + def serializeClass(classNode: ClassNode): Array[Byte] = { + val cw = new ClassWriterWithBTypeLub(backendUtils.extraProc) + classNode.accept(cw) + cw.toByteArray.nn + } + + // ----------------------------------------------------------------------------------------- + // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) + // Background: + // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf + // http://comments.gmane.org/gmane.comp.java.vm.languages/2293 + // https://github.com/scala/bug/issues/3872 + // ----------------------------------------------------------------------------------------- + + /* An `asm.ClassWriter` that uses `jvmWiseLUB()` + * The internal name of the least common ancestor of the types given by inameA and inameB. + * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow + */ + final class ClassWriterWithBTypeLub(flags: Int) extends ClassWriter(flags) { + + /** + * This method is used by asm when computing stack map frames. It is thread-safe: it depends + * only on the BTypes component, which does not depend on global. + * TODO @lry move to a different place where no global is in scope, on bTypes. + */ + override def getCommonSuperClass(inameA: String, inameB: String): String = { + // All types that appear in a class node need to have their ClassBType cached, see [[cachedClassBType]]. + val a = classBTypeFromInternalName(inameA) + val b = classBTypeFromInternalName(inameB) + val lub = a.jvmWiseLUB(b) + val lubName = lub.internalName + assert(lubName != "scala/Any") + lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. + } + } +} + +/** + * The result of code generation. [[isArtifact]] is `true` for mirror. + */ +case class GeneratedClass(classNode: ClassNode, sourceFile: AbstractFile, isArtifact: Boolean, onFileCreated: AbstractFile => Unit) +case class GeneratedTasty(classNode: ClassNode, tastyGen: () => Array[Byte]) +case class GeneratedDefs(classes: List[GeneratedClass], tasty: List[GeneratedTasty]) + +// Temporary class, will be refactored in a future commit +trait ClassWriterForPostProcessor { + type InternalName = String + def write(bytes: Array[Byte], className: InternalName, sourceFile: AbstractFile): Unit +} diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala new file mode 100644 index 000000000000..80ee68bc94c3 --- /dev/null +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala @@ -0,0 +1,79 @@ +package dotty.tools.backend.jvm + +import scala.collection.mutable.{Clearable, HashSet} +import dotty.tools.dotc.util.* +import dotty.tools.dotc.reporting.Message +import dotty.tools.io.AbstractFile +import java.util.{Collection => JCollection, Map => JMap} +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.report +import dotty.tools.dotc.core.Phases + +/** + * Functionality needed in the post-processor whose implementation depends on the compiler + * frontend. All methods are synchronized. + */ +sealed abstract class PostProcessorFrontendAccess { + import PostProcessorFrontendAccess._ + + def compilerSettings: CompilerSettings + def backendReporting: BackendReporting + def getEntryPoints: List[String] + + private val frontendLock: AnyRef = new Object() + inline final def frontendSynch[T](inline x: => T): T = frontendLock.synchronized(x) +} + +object PostProcessorFrontendAccess { + sealed trait CompilerSettings { + def debug: Boolean + def target: String // javaOutputVersion + + def dumpClassesDirectory: Option[String] + def outputDirectory: AbstractFile + + def mainClass: Option[String] + } + + sealed trait BackendReporting { + def error(message: Context ?=> Message): Unit + def warning(message: Context ?=> Message): Unit + def log(message: Context ?=> String): Unit + } + + class Impl[I <: DottyBackendInterface](val int: I, entryPoints: HashSet[String]) extends PostProcessorFrontendAccess { + import int.given + lazy val compilerSettings: CompilerSettings = buildCompilerSettings() + + private def buildCompilerSettings(): CompilerSettings = new CompilerSettings { + extension [T](s: dotty.tools.dotc.config.Settings.Setting[T]) + def valueSetByUser: Option[T] = + Option(s.value).filter(_ != s.default) + def s = ctx.settings + + lazy val target = + val releaseValue = Option(s.javaOutputVersion.value).filter(_.nonEmpty) + val targetValue = Option(s.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) + (releaseValue, targetValue) match + case (Some(release), None) => release + case (None, Some(target)) => target + case (Some(release), Some(_)) => + report.warning(s"The value of ${s.XuncheckedJavaOutputVersion.name} was overridden by ${ctx.settings.javaOutputVersion.name}") + release + case (None, None) => "8" // least supported version by default + + lazy val debug: Boolean = ctx.debug + lazy val dumpClassesDirectory: Option[String] = s.Ydumpclasses.valueSetByUser + lazy val outputDirectory: AbstractFile = s.outputDir.value + lazy val mainClass: Option[String] = s.XmainClass.valueSetByUser + } + + object backendReporting extends BackendReporting { + def error(message: Context ?=> Message): Unit = frontendSynch(report.error(message)) + def warning(message: Context ?=> Message): Unit = frontendSynch(report.warning(message)) + def log(message: Context ?=> String): Unit = frontendSynch(report.log(message)) + } + + def getEntryPoints: List[String] = frontendSynch(entryPoints.toList) + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 4caf1f6b5fa2..eee791852fde 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -3532,13 +3532,16 @@ class JSCodeGen()(using genCtx: Context) { val closure = js.Closure(arrow = true, formalCaptures, formalParams, restParam, genBody, actualCaptures) if (!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym)) { - assert(!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym), - s"Invalid functional interface $funInterfaceSym reached the back-end") val formalCount = formalParams.size val cls = ClassName("scala.scalajs.runtime.AnonFunction" + formalCount) val ctorName = MethodName.constructor( jstpe.ClassRef(ClassName("scala.scalajs.js.Function" + formalCount)) :: Nil) js.New(cls, js.MethodIdent(ctorName), List(closure)) + } else if (funInterfaceSym.name == tpnme.FunctionXXL && funInterfaceSym.owner == defn.ScalaRuntimePackageClass) { + val cls = ClassName("scala.scalajs.runtime.AnonFunctionXXL") + val ctorName = MethodName.constructor( + jstpe.ClassRef(ClassName("scala.scalajs.js.Function1")) :: Nil) + js.New(cls, js.MethodIdent(ctorName), List(closure)) } else { assert(funInterfaceSym.isJSType, s"Invalid functional interface $funInterfaceSym reached the back-end") diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 046b649941b1..8415646eb16c 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -154,11 +154,13 @@ object CompilationUnit { var containsCaptureChecking = false var containsMacroAnnotation = false def traverse(tree: Tree)(using Context): Unit = { - if (tree.symbol.isQuote) - containsQuote = true if tree.symbol.is(Flags.Inline) then containsInline = true tree match + case _: tpd.Quote => + containsQuote = true + case tree: tpd.Apply if tree.symbol == defn.QuotedTypeModule_of => + containsQuote = true case Import(qual, selectors) => tpd.languageImport(qual) match case Some(prefix) => diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 15d4a39c511f..a6118732d4ae 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -59,7 +59,8 @@ class Compiler { /** Phases dealing with the transformation from pickled trees to backend trees */ protected def transformPhases: List[List[Phase]] = List(new InstrumentCoverage) :: // Perform instrumentation for code coverage (if -coverage-out is set) - List(new FirstTransform, // Some transformations to put trees into a canonical form + List(new CrossVersionChecks, // Check issues related to deprecated and experimental + new FirstTransform, // Some transformations to put trees into a canonical form new CheckReentrant, // Internal use only: Check that compiled program has no data races involving global vars new ElimPackagePrefixes, // Eliminate references to package prefixes in Select nodes new CookComments, // Cook the comments: expand variables, doc, etc. @@ -71,8 +72,7 @@ class Compiler { new ElimRepeated, // Rewrite vararg parameters and arguments new RefChecks) :: // Various checks mostly related to abstract members and overriding List(new init.Checker) :: // Check initialization of objects - List(new CrossVersionChecks, // Check issues related to deprecated and experimental - new ProtectedAccessors, // Add accessors for protected members + List(new ProtectedAccessors, // Add accessors for protected members new ExtensionMethods, // Expand methods of value classes with extension methods new UncacheGivenAliases, // Avoid caching RHS of simple parameterless given aliases new ElimByName, // Map by-name parameters to functions @@ -90,7 +90,7 @@ class Compiler { new ExplicitOuter, // Add accessors to outer classes from nested ones. new ExplicitSelf, // Make references to non-trivial self types explicit as casts new StringInterpolatorOpt, // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats - new DropBreaks) :: // Optimize local Break throws by rewriting them + new DropBreaks) :: // Optimize local Break throws by rewriting them List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions new UninitializedDefs, // Replaces `compiletime.uninitialized` by `_` new InlinePatterns, // Remove placeholders of inlined patterns diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 5a2c8b7be56e..e548cae55ddd 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -30,18 +30,20 @@ class Driver { protected def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = if files.nonEmpty then + var runOrNull = ctx.run try val run = compiler.newRun + runOrNull = run run.compile(files) finish(compiler, run) catch case ex: FatalError => report.error(ex.getMessage.nn) // signals that we should fail compilation. - case ex: TypeError => - println(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}") + case ex: TypeError if !runOrNull.enrichedErrorMessage => + println(runOrNull.enrichErrorMessage(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}")) throw ex - case ex: Throwable => - println(s"$ex while compiling ${files.map(_.path).mkString(", ")}") + case ex: Throwable if !runOrNull.enrichedErrorMessage => + println(runOrNull.enrichErrorMessage(s"Exception while compiling ${files.map(_.path).mkString(", ")}")) throw ex ctx.reporter diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 8cd1d160b42c..944ae794c94f 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -173,15 +173,14 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint */ var ccImportEncountered = false + private var myEnrichedErrorMessage = false + def compile(files: List[AbstractFile]): Unit = - try - val sources = files.map(runContext.getSource(_)) - compileSources(sources) - catch - case NonFatal(ex) => - if units.nonEmpty then report.echo(i"exception occurred while compiling $units%, %") - else report.echo(s"exception occurred while compiling ${files.map(_.name).mkString(", ")}") - throw ex + try compileSources(files.map(runContext.getSource(_))) + catch case NonFatal(ex) if !this.enrichedErrorMessage => + val files1 = if units.isEmpty then files else units.map(_.source.file) + report.echo(this.enrichErrorMessage(s"exception occurred while compiling ${files1.map(_.path)}")) + throw ex /** TODO: There's a fundamental design problem here: We assemble phases using `fusePhases` * when we first build the compiler. But we modify them with -Yskip, -Ystop @@ -398,3 +397,16 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint given runContext[Dummy_so_its_a_def]: Context = myCtx.nn assert(runContext.runId <= Periods.MaxPossibleRunId) } + +object Run { + extension (run: Run | Null) + def enrichedErrorMessage: Boolean = if run == null then false else run.myEnrichedErrorMessage + def enrichErrorMessage(errorMessage: String)(using Context): String = + if run == null then + report.enrichErrorMessage(errorMessage) + else if !run.enrichedErrorMessage then + run.myEnrichedErrorMessage = true + report.enrichErrorMessage(errorMessage) + else + errorMessage +} diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 5326361ada98..f0580c29e762 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -338,9 +338,9 @@ object desugar { def quotedPattern(tree: untpd.Tree, expectedTpt: untpd.Tree)(using Context): untpd.Tree = { def adaptToExpectedTpt(tree: untpd.Tree): untpd.Tree = tree match { // Add the expected type as an ascription - case _: untpd.Splice => + case _: untpd.SplicePattern => untpd.Typed(tree, expectedTpt).withSpan(tree.span) - case Typed(expr: untpd.Splice, tpt) => + case Typed(expr: untpd.SplicePattern, tpt) => cpy.Typed(tree)(expr, untpd.makeAndType(tpt, expectedTpt).withSpan(tpt.span)) // Propagate down the expected type to the leafs of the expression @@ -915,16 +915,16 @@ object desugar { name = normalizeName(mdef, mdef.tpt).asTermName, paramss = if mdef.name.isRightAssocOperatorName then - val (typaramss, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters + val (rightTyParams, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters paramss match - case params :: paramss1 => // `params` must have a single parameter and without `given` flag + case rightParam :: paramss1 => // `rightParam` must have a single parameter and without `given` flag def badRightAssoc(problem: String) = report.error(em"right-associative extension method $problem", mdef.srcPos) extParamss ++ mdef.paramss - params match + rightParam match case ValDefs(vparam :: Nil) => if !vparam.mods.is(Given) then // we merge the extension parameters with the method parameters, @@ -934,8 +934,10 @@ object desugar { // def %:[E](f: F)(g: G)(using H): Res = ??? // will be encoded as // def %:[A](using B)[E](f: F)(c: C)(using D)(g: G)(using H): Res = ??? - val (leadingUsing, otherExtParamss) = extParamss.span(isUsingOrTypeParamClause) - leadingUsing ::: typaramss ::: params :: otherExtParamss ::: paramss1 + // + // If you change the names of the clauses below, also change them in right-associative-extension-methods.md + val (leftTyParamsAndLeadingUsing, leftParamAndTrailingUsing) = extParamss.span(isUsingOrTypeParamClause) + leftTyParamsAndLeadingUsing ::: rightTyParams ::: rightParam :: leftParamAndTrailingUsing ::: paramss1 else badRightAssoc("cannot start with using clause") case _ => @@ -1496,10 +1498,10 @@ object desugar { case vd: ValDef => vd } - def makeContextualFunction(formals: List[Tree], body: Tree, isErased: Boolean)(using Context): Function = { - val mods = if (isErased) Given | Erased else Given + def makeContextualFunction(formals: List[Tree], body: Tree, erasedParams: List[Boolean])(using Context): Function = { + val mods = Given val params = makeImplicitParameters(formals, mods) - FunctionWithMods(params, body, Modifiers(mods)) + FunctionWithMods(params, body, Modifiers(mods), erasedParams) } private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { @@ -1730,7 +1732,7 @@ object desugar { val applyVParams = vargs.zipWithIndex.map { case (p: ValDef, _) => p.withAddedFlags(mods.flags) - case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags) + case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags.toTermFlags) } RefinedTypeTree(polyFunctionTpt, List( DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) @@ -1822,16 +1824,7 @@ object desugar { flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) case ext: ExtMethods => Block(List(ext), Literal(Constant(())).withSpan(ext.span)) - case CapturingTypeTree(refs, parent) => - // convert `{refs} T` to `T @retains refs` - // `{refs}-> T` to `-> (T @retainsByName refs)` - def annotate(annotName: TypeName, tp: Tree) = - Annotated(tp, New(scalaAnnotationDot(annotName), List(refs))) - parent match - case ByNameTypeTree(restpt) => - cpy.ByNameTypeTree(parent)(annotate(tpnme.retainsByName, restpt)) - case _ => - annotate(tpnme.retains, parent) + case f: FunctionWithMods if f.hasErasedParams => makeFunctionWithValDefs(f, pt) } desugared.withSpan(tree.span) } @@ -1907,6 +1900,28 @@ object desugar { TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) } + /** Ensure the given function tree use only ValDefs for parameters. + * For example, + * FunctionWithMods(List(TypeTree(A), TypeTree(B)), body, mods, erasedParams) + * gets converted to + * FunctionWithMods(List(ValDef(x$1, A), ValDef(x$2, B)), body, mods, erasedParams) + */ + def makeFunctionWithValDefs(tree: Function, pt: Type)(using Context): Function = { + val Function(args, result) = tree + args match { + case (_ : ValDef) :: _ => tree // ValDef case can be easily handled + case _ if !ctx.mode.is(Mode.Type) => tree + case _ => + val applyVParams = args.zipWithIndex.map { + case (p, n) => makeSyntheticParameter(n + 1, p) + } + tree match + case tree: FunctionWithMods => + untpd.FunctionWithMods(applyVParams, result, tree.mods, tree.erasedParams) + case _ => untpd.Function(applyVParams, result) + } + } + /** Returns list of all pattern variables, possibly with their types, * without duplicates */ @@ -1961,15 +1976,13 @@ object desugar { trees foreach collect case Block(Nil, expr) => collect(expr) - case Quote(expr) => + case Quote(body, _) => new UntypedTreeTraverser { def traverse(tree: untpd.Tree)(using Context): Unit = tree match { - case Splice(expr) => collect(expr) + case SplicePattern(body, _) => collect(body) case _ => traverseChildren(tree) } - }.traverse(expr) - case CapturingTypeTree(refs, parent) => - collect(parent) + }.traverse(body) case _ => } collect(tree) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 9b55db600d3d..2d335d1ed380 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -223,9 +223,6 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => /** Strip `=> T` to `T` and (under pureFunctions) `{refs}-> T` to `T` */ def stripByNameType(tree: Tree)(using Context): Tree = unsplice(tree) match case ByNameTypeTree(t1) => t1 - case untpd.CapturingTypeTree(_, parent) => - val parent1 = stripByNameType(parent) - if parent1 eq parent then tree else parent1 case _ => tree /** All type and value parameter symbols of this DefDef */ @@ -399,6 +396,8 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] Some(tree) case Block(Nil, expr) => functionWithUnknownParamType(expr) + case NamedArg(_, expr) => + functionWithUnknownParamType(expr) case _ => None } @@ -465,19 +464,21 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] } } - /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. - * Only trees of the form `=> T` are matched; trees written directly as `{*}-> T` + /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `->{cap} T`. + * Only trees of the form `=> T` are matched; trees written directly as `->{cap} T` * are ignored by the extractor. */ object ImpureByNameTypeTree: - def apply(tp: ByNameTypeTree)(using Context): untpd.CapturingTypeTree = - untpd.CapturingTypeTree( - untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp) + def apply(tp: Tree)(using Context): untpd.ByNameTypeTree = + untpd.ByNameTypeTree( + untpd.CapturesAndResult( + untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp)) - def unapply(tp: Tree)(using Context): Option[ByNameTypeTree] = tp match - case untpd.CapturingTypeTree(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) - if id.span == bntp.span.startPos => Some(bntp) + def unapply(tp: Tree)(using Context): Option[Tree] = tp match + case untpd.ByNameTypeTree( + untpd.CapturesAndResult(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, result)) + if id.span == result.span.startPos => Some(result) case _ => None end ImpureByNameTypeTree } @@ -960,7 +961,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => && tree.isTerm && { val qualType = tree.qualifier.tpe - hasRefinement(qualType) && !qualType.derivesFrom(defn.PolyFunctionClass) + hasRefinement(qualType) && !defn.isRefinedFunctionType(qualType) } def loop(tree: Tree): Boolean = tree match case TypeApply(fun, _) => @@ -1024,33 +1025,19 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case t => assert(t.span.exists, i"$t") } - /** Extractors for quotes */ - object Quoted { + object QuotedTypeOf { /** Extracts the content of a quoted tree. * The result can be the contents of a term or type quote, which * will return a term or type tree respectively. */ def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = - if tree.symbol == defn.QuotedRuntime_exprQuote then - // quoted.runtime.Expr.quote[T]() - Some(tree.args.head) - else if tree.symbol == defn.QuotedTypeModule_of then + if tree.symbol == defn.QuotedTypeModule_of then // quoted.Type.of[](quotes) val TypeApply(_, body :: _) = tree.fun: @unchecked Some(body) else None } - /** Extractors for splices */ - object Spliced { - /** Extracts the content of a spliced expression tree. - * The result can be the contents of a term splice, which - * will return a term tree. - */ - def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = - if tree.symbol.isExprSplice then Some(tree.args.head) else None - } - /** Extractors for type splices */ object SplicedType { /** Extracts the content of a spliced type tree. diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala index e52bf1064e4c..ae674c25dc3d 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala @@ -30,16 +30,10 @@ class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { private def patternScopeCtx(pattern: Tree)(using Context): Context = { val nestedCtx = ctx.fresh.setNewScope - new TreeTraverser { - def traverse(tree: Tree)(using Context): Unit = { - tree match { - case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => - nestedCtx.enter(d.symbol) - case _ => - } - traverseChildren(tree) - } - }.traverse(pattern) + pattern.foreachSubTree { + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) + case _ => + } nestedCtx } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index f5bf55802adf..955892b2ae22 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -43,7 +43,7 @@ class TreeTypeMap( def copy( typeMap: Type => Type, - treeMap: tpd.Tree => tpd.Tree, + treeMap: Tree => Tree, oldOwners: List[Symbol], newOwners: List[Symbol], substFrom: List[Symbol], @@ -85,13 +85,13 @@ class TreeTypeMap( updateDecls(prevStats.tail, newStats.tail) } - def transformInlined(tree: tpd.Inlined)(using Context): tpd.Tree = + def transformInlined(tree: Inlined)(using Context): Tree = val Inlined(call, bindings, expanded) = tree val (tmap1, bindings1) = transformDefs(bindings) val expanded1 = tmap1.transform(expanded) cpy.Inlined(tree)(call, bindings1, expanded1) - override def transform(tree: tpd.Tree)(using Context): tpd.Tree = treeMap(tree) match { + override def transform(tree: Tree)(using Context): Tree = treeMap(tree) match { case impl @ Template(constr, _, self, _) => val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) cpy.Template(impl)( @@ -103,8 +103,24 @@ class TreeTypeMap( ).withType(tmap.mapType(impl.tpe)) case tree1 => tree1.withType(mapType(tree1.tpe)) match { - case id: Ident if tpd.needsSelect(id.tpe) => - ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + case id: Ident => + if needsSelect(id.tpe) then + ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + else + super.transform(id) + case sel: Select => + if needsIdent(sel.tpe) then + ref(sel.tpe.asInstanceOf[TermRef]).withSpan(sel.span) + else + super.transform(sel) + case app: Apply => + super.transform(app) + case blk @ Block(stats, expr) => + val (tmap1, stats1) = transformDefs(stats) + val expr1 = tmap1.transform(expr) + cpy.Block(blk)(stats1, expr1) + case lit @ Literal(Constant(tpe: Type)) => + cpy.Literal(lit)(Constant(mapType(tpe))) case ddef @ DefDef(name, paramss, tpt, _) => val (tmap1, paramss1) = transformAllParamss(paramss) val res = cpy.DefDef(ddef)(name, paramss1, tmap1.transform(tpt), tmap1.transform(ddef.rhs)) @@ -117,10 +133,6 @@ class TreeTypeMap( case tdef @ LambdaTypeTree(tparams, body) => val (tmap1, tparams1) = transformDefs(tparams) cpy.LambdaTypeTree(tdef)(tparams1, tmap1.transform(body)) - case blk @ Block(stats, expr) => - val (tmap1, stats1) = transformDefs(stats) - val expr1 = tmap1.transform(expr) - cpy.Block(blk)(stats1, expr1) case inlined: Inlined => transformInlined(inlined) case cdef @ CaseDef(pat, guard, rhs) => @@ -134,23 +146,16 @@ class TreeTypeMap( val bind1 = tmap.transformSub(bind) val expr1 = tmap.transform(expr) cpy.Labeled(labeled)(bind1, expr1) - case tree @ Hole(_, _, args, content, tpt) => - val args1 = args.mapConserve(transform) - val content1 = transform(content) - val tpt1 = transform(tpt) - cpy.Hole(tree)(args = args1, content = content1, tpt = tpt1) - case lit @ Literal(Constant(tpe: Type)) => - cpy.Literal(lit)(Constant(mapType(tpe))) case tree1 => super.transform(tree1) } } - override def transformStats(trees: List[tpd.Tree], exprOwner: Symbol)(using Context): List[Tree] = + override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = transformDefs(trees)._2 - def transformDefs[TT <: tpd.Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { - val tmap = withMappedSyms(tpd.localSyms(trees)) + def transformDefs[TT <: Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { + val tmap = withMappedSyms(localSyms(trees)) (tmap, tmap.transformSub(trees)) } @@ -165,7 +170,7 @@ class TreeTypeMap( case nil => (this, paramss) - def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] + def apply[ThisTree <: Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree)) diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index c0b5987c3875..54c15b9909fa 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -17,6 +17,7 @@ import annotation.unchecked.uncheckedVariance import annotation.constructorOnly import compiletime.uninitialized import Decorators._ +import staging.StagingLevel.* object Trees { @@ -677,6 +678,81 @@ object Trees { override def isType = expansion.isType } + /** A tree representing a quote `'{ body }` or `'[ body ]`. + * `Quote`s are created by the `Parser`. In typer they can be typed as a + * `Quote` with a known `tpt` or desugared and typed as a quote pattern. + * + * `Quotes` are checked and transformed in the `staging`, `splicing` and `pickleQuotes` + * phases. After `pickleQuotes` phase, the only quotes that exist are in `inline` + * methods. These are dropped when we remove the inline method implementations. + * + * Type quotes `'[body]` from the parser are desugared into quote patterns (using a `Type.of[T]]`) + * when type checking. TASTy files will not contain type quotes. Type quotes are used again + * in the `staging` phase to represent the reification of `Type.of[T]]`. + * + * Type tags `tags` are always empty before the `staging` phase. Tags for stage inconsistent + * types are added in the `staging` phase to level 0 quotes. Tags for types that refer to + * definitions in an outer quote are added in the `splicing` phase + * + * @param body The tree that was quoted + * @param tags Term references to instances of `Type[T]` for `T`s that are used in the quote + */ + case class Quote[+T <: Untyped] private[ast] (body: Tree[T], tags: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = Quote[T] + + /** Is this a type quote `'[tpe]' */ + def isTypeQuote = body.isType + + /** Type of the quoted expression as seen from outside the quote */ + def bodyType(using Context): Type = + val quoteType = typeOpt // `Quotes ?=> Expr[T]` or `Quotes ?=> Type[T]` + val exprType = quoteType.argInfos.last // `Expr[T]` or `Type[T]` + exprType.argInfos.head // T + + /** Set the type of the body of the quote */ + def withBodyType(tpe: Type)(using Context): Quote[Type] = + val exprType = // `Expr[T]` or `Type[T]` + if body.isTerm then defn.QuotedExprClass.typeRef.appliedTo(tpe) + else defn.QuotedTypeClass.typeRef.appliedTo(tpe) + val quoteType = // `Quotes ?=> Expr[T]` or `Quotes ?=> Type[T]` + defn.FunctionType(1, isContextual = true) + .appliedTo(defn.QuotesClass.typeRef, exprType) + withType(quoteType) + } + + /** A tree representing a splice `${ expr }` + * + * `Splice`s are created by the `Parser`. In typer they can be typed as a + * `Splice` with a known `tpt` or desugared and typed as a quote pattern holes. + * + * `Splice` are checked and transformed in the `staging` and `splicing` phases. + * After `splicing` phase, the only splices that exist are in `inline` + * methods. These are dropped when we remove the inline method implementations. + * + * @param expr The tree that was spliced + */ + case class Splice[+T <: Untyped] private[ast] (expr: Tree[T])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = Splice[T] + } + + /** A tree representing a pattern splice `${ pattern }`, `$ident` or `$ident(args*)` in a quote pattern. + * + * Parser will only create `${ pattern }` and `$ident`, hence they will not have args. + * While typing, the `$ident(args*)` the args are identified and desugared into a `SplicePattern` + * containing them. + * + * SplicePattern are removed after typing the pattern and are not present in TASTy. + * + * @param body The tree that was spliced + * @param args The arguments of the splice (the HOAS arguments) + */ + case class SplicePattern[+T <: Untyped] private[ast] (body: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + extends TermTree[T] { + type ThisTree[+T <: Untyped] = SplicePattern[T] + } + /** A type tree that represents an existing or inferred type */ case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) extends DenotingTree[T] with TypTree[T] { @@ -686,6 +762,19 @@ object Trees { s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" } + /** Tree that replaces a level 1 splices in pickled (level 0) quotes. + * It is only used when picking quotes (will never be in a TASTy file). + * + * @param isTerm If this hole is a term, otherwise it is a type hole. + * @param idx The index of the hole in it's enclosing level 0 quote. + * @param args The arguments of the splice to compute its content + * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. + */ + case class Hole[+T <: Untyped](override val isTerm: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[+T <: Untyped] <: Hole[T] + override def isType: Boolean = !isTerm + } + /** A type tree whose type is inferred. These trees appear in two contexts * - as an argument of a TypeApply. In that case its type is always a TypeVar * - as a (result-)type of an inferred ValDef or DefDef. @@ -855,9 +944,9 @@ object Trees { } /** extends parents { self => body } - * @param parentsOrDerived A list of parents followed by a list of derived classes, - * if this is of class untpd.DerivingTemplate. - * Typed templates only have parents. + * @param preParentsOrDerived A list of parents followed by a list of derived classes, + * if this is of class untpd.DerivingTemplate. + * Typed templates only have parents. */ case class Template[+T <: Untyped] private[ast] (constr: DefDef[T], private var preParentsOrDerived: LazyTreeList[T], self: ValDef[T], private var preBody: LazyTreeList[T])(implicit @constructorOnly src: SourceFile) extends DefTree[T] with WithLazyFields { @@ -975,21 +1064,6 @@ object Trees { def genericEmptyValDef[T <: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] def genericEmptyTree[T <: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] - /** Tree that replaces a level 1 splices in pickled (level 0) quotes. - * It is only used when picking quotes (will never be in a TASTy file). - * - * @param isTermHole If this hole is a term, otherwise it is a type hole. - * @param idx The index of the hole in it's enclosing level 0 quote. - * @param args The arguments of the splice to compute its content - * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. - * @param tpt Type of the hole - */ - case class Hole[+T <: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[+T <: Untyped] <: Hole[T] - override def isTerm: Boolean = isTermHole - override def isType: Boolean = !isTermHole - } - def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = remaining match { @@ -1087,6 +1161,9 @@ object Trees { type SeqLiteral = Trees.SeqLiteral[T] type JavaSeqLiteral = Trees.JavaSeqLiteral[T] type Inlined = Trees.Inlined[T] + type Quote = Trees.Quote[T] + type Splice = Trees.Splice[T] + type SplicePattern = Trees.SplicePattern[T] type TypeTree = Trees.TypeTree[T] type InferredTypeTree = Trees.InferredTypeTree[T] type SingletonTypeTree = Trees.SingletonTypeTree[T] @@ -1257,6 +1334,18 @@ object Trees { case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree case _ => finalize(tree, untpd.Inlined(call, bindings, expansion)(sourceFile(tree))) } + def Quote(tree: Tree)(body: Tree, tags: List[Tree])(using Context): Quote = tree match { + case tree: Quote if (body eq tree.body) && (tags eq tree.tags) => tree + case _ => finalize(tree, untpd.Quote(body, tags)(sourceFile(tree))) + } + def Splice(tree: Tree)(expr: Tree)(using Context): Splice = tree match { + case tree: Splice if (expr eq tree.expr) => tree + case _ => finalize(tree, untpd.Splice(expr)(sourceFile(tree))) + } + def SplicePattern(tree: Tree)(body: Tree, args: List[Tree])(using Context): SplicePattern = tree match { + case tree: SplicePattern if (body eq tree.body) && (args eq tree.args) => tree + case _ => finalize(tree, untpd.SplicePattern(body, args)(sourceFile(tree))) + } def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { case tree: SingletonTypeTree if (ref eq tree.ref) => tree case _ => finalize(tree, untpd.SingletonTypeTree(ref)(sourceFile(tree))) @@ -1337,9 +1426,9 @@ object Trees { case tree: Thicket if (trees eq tree.trees) => tree case _ => finalize(tree, untpd.Thicket(trees)(sourceFile(tree))) } - def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = tree match { + def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree)(using Context): Hole = tree match { case tree: Hole if isTerm == tree.isTerm && idx == tree.idx && args.eq(tree.args) && content.eq(tree.content) && content.eq(tree.content) => tree - case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content, tpt)(sourceFile(tree))) + case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content)(sourceFile(tree))) } // Copier methods with default arguments; these demand that the original tree @@ -1362,8 +1451,8 @@ object Trees { TypeDef(tree: Tree)(name, rhs) def Template(tree: Template)(using Context)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody): Template = Template(tree: Tree)(constr, parents, derived, self, body) - def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content, tpt: Tree = tree.tpt)(using Context): Hole = - Hole(tree: Tree)(isTerm, idx, args, content, tpt) + def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content)(using Context): Hole = + Hole(tree: Tree)(isTerm, idx, args, content) } @@ -1494,8 +1583,14 @@ object Trees { case Thicket(trees) => val trees1 = transform(trees) if (trees1 eq trees) tree else Thicket(trees1) - case tree @ Hole(_, _, args, content, tpt) => - cpy.Hole(tree)(args = transform(args), content = transform(content), tpt = transform(tpt)) + case Quote(body, tags) => + cpy.Quote(tree)(transform(body)(using quoteContext), transform(tags)) + case tree @ Splice(expr) => + cpy.Splice(tree)(transform(expr)(using spliceContext)) + case tree @ SplicePattern(body, args) => + cpy.SplicePattern(tree)(transform(body)(using spliceContext), transform(args)) + case tree @ Hole(isTerm, idx, args, content) => + cpy.Hole(tree)(isTerm, idx, transform(args), transform(content)) case _ => transformMoreCases(tree) } @@ -1635,8 +1730,14 @@ object Trees { this(this(x, arg), annot) case Thicket(ts) => this(x, ts) - case Hole(_, _, args, content, tpt) => - this(this(this(x, args), content), tpt) + case Quote(body, tags) => + this(this(x, body)(using quoteContext), tags) + case Splice(expr) => + this(x, expr)(using spliceContext) + case SplicePattern(body, args) => + this(this(x, body)(using spliceContext), args) + case Hole(_, _, args, content) => + this(this(x, args), content) case _ => foldMoreCases(x, tree) } diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 01d61986dee4..76e16cc00a90 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -47,12 +47,18 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { Apply(expr, args) case _: RefTree | _: GenericApply | _: Inlined | _: Hole => ta.assignType(untpd.Apply(fn, args), fn, args) + case _ => + assert(ctx.reporter.errorsReported) + ta.assignType(untpd.Apply(fn, args), fn, args) def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match case Block(Nil, expr) => TypeApply(expr, args) case _: RefTree | _: GenericApply => ta.assignType(untpd.TypeApply(fn, args), fn, args) + case _ => + assert(ctx.reporter.errorsReported) + ta.assignType(untpd.TypeApply(fn, args), fn, args) def Literal(const: Constant)(using Context): Literal = ta.assignType(untpd.Literal(const)) @@ -164,6 +170,15 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Inlined(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = ta.assignType(untpd.Inlined(call, bindings, expansion), bindings, expansion) + def Quote(body: Tree, tags: List[Tree])(using Context): Quote = + untpd.Quote(body, tags).withBodyType(body.tpe) + + def Splice(expr: Tree, tpe: Type)(using Context): Splice = + untpd.Splice(expr).withType(tpe) + + def Hole(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpe: Type)(using Context): Hole = + untpd.Hole(isTerm, idx, args, content).withType(tpe) + def TypeTree(tp: Type, inferred: Boolean = false)(using Context): TypeTree = (if inferred then untpd.InferredTypeTree() else untpd.TypeTree()).withType(tp) @@ -254,12 +269,12 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { // If `isParamDependent == false`, the value of `previousParamRefs` is not used. if isParamDependent then mutable.ListBuffer[TermRef]() else (null: ListBuffer[TermRef] | Null).uncheckedNN - def valueParam(name: TermName, origInfo: Type): TermSymbol = + def valueParam(name: TermName, origInfo: Type, isErased: Boolean): TermSymbol = val maybeImplicit = if tp.isContextualMethod then Given else if tp.isImplicitMethod then Implicit else EmptyFlags - val maybeErased = if tp.isErasedMethod then Erased else EmptyFlags + val maybeErased = if isErased then Erased else EmptyFlags def makeSym(info: Type) = newSymbol(sym, name, TermParam | maybeImplicit | maybeErased, info, coord = sym.coord) @@ -277,7 +292,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { assert(vparams.hasSameLengthAs(tp.paramNames) && vparams.head.isTerm) (vparams.asInstanceOf[List[TermSymbol]], remaining1) case nil => - (tp.paramNames.lazyZip(tp.paramInfos).map(valueParam), Nil) + (tp.paramNames.lazyZip(tp.paramInfos).lazyZip(tp.erasedParams).map(valueParam), Nil) val (rtp, paramss) = recur(tp.instantiate(vparams.map(_.termRef)), remaining1) (rtp, vparams :: paramss) case _ => @@ -385,9 +400,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Throw(expr: Tree)(using Context): Tree = ref(defn.throwMethod).appliedTo(expr) - def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = - ta.assignType(untpd.Hole(isTermHole, idx, args, content, tpt), tpt) - // ------ Making references ------------------------------------------------------ def prefixIsElidable(tp: NamedType)(using Context): Boolean = { @@ -414,6 +426,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _ => false } + def needsIdent(tp: Type)(using Context): Boolean = tp match + case tp: TermRef => tp.prefix eq NoPrefix + case _ => false + /** A tree representing the same reference as the given type */ def ref(tp: NamedType, needLoad: Boolean = true)(using Context): Tree = if (tp.isType) TypeTree(tp) @@ -1130,10 +1146,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def etaExpandCFT(using Context): Tree = def expand(target: Tree, tp: Type)(using Context): Tree = tp match - case defn.ContextFunctionType(argTypes, resType, isErased) => + case defn.ContextFunctionType(argTypes, resType, _) => val anonFun = newAnonFun( ctx.owner, - MethodType.companion(isContextual = true, isErased = isErased)(argTypes, resType), + MethodType.companion(isContextual = true)(argTypes, resType), coord = ctx.owner.coord) def lambdaBody(refss: List[List[Tree]]) = expand(target.select(nme.apply).appliedToArgss(refss), resType)( @@ -1538,7 +1554,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * * @param trees the elements the list represented by * the resulting tree should contain. - * @param tpe the type of the elements of the resulting list. + * @param tpt the type of the elements of the resulting list. * */ def mkList(trees: List[Tree], tpt: Tree)(using Context): Tree = diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index aeebb1f203e8..e3488034fef8 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -76,9 +76,13 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { override def isType: Boolean = body.isType } - /** A function type or closure with `implicit`, `erased`, or `given` modifiers */ - class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers)(implicit @constructorOnly src: SourceFile) - extends Function(args, body) + /** A function type or closure with `implicit` or `given` modifiers and information on which parameters are `erased` */ + class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers, val erasedParams: List[Boolean])(implicit @constructorOnly src: SourceFile) + extends Function(args, body) { + assert(args.length == erasedParams.length) + + def hasErasedParams = erasedParams.contains(true) + } /** A polymorphic function type */ case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -107,10 +111,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { override def isType: Boolean = !isTerm } case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class Quote(quoted: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class Splice(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree { - def isInBraces: Boolean = span.end != expr.span.end - } case class ForYield(enums: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree case class ForDo(enums: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree case class GenFrom(pat: Tree, expr: Tree, checkMode: GenCheckMode)(implicit @constructorOnly src: SourceFile) extends Tree @@ -148,7 +148,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { } /** {x1, ..., xN} T (only relevant under captureChecking) */ - case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree + case class CapturesAndResult(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ case class DependentTypeTree(tp: List[Symbol] => Type)(implicit @constructorOnly src: SourceFile) extends Tree @@ -397,6 +397,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def SeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): SeqLiteral = new SeqLiteral(elems, elemtpt) def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): JavaSeqLiteral = new JavaSeqLiteral(elems, elemtpt) def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit src: SourceFile): Inlined = new Inlined(call, bindings, expansion) + def Quote(body: Tree, tags: List[Tree])(implicit src: SourceFile): Quote = new Quote(body, tags) + def Splice(expr: Tree)(implicit src: SourceFile): Splice = new Splice(expr) + def SplicePattern(body: Tree, args: List[Tree])(implicit src: SourceFile): SplicePattern = new SplicePattern(body, args) def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() def SingletonTypeTree(ref: Tree)(implicit src: SourceFile): SingletonTypeTree = new SingletonTypeTree(ref) @@ -422,7 +425,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def Export(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Export = new Export(expr, selectors) def PackageDef(pid: RefTree, stats: List[Tree])(implicit src: SourceFile): PackageDef = new PackageDef(pid, stats) def Annotated(arg: Tree, annot: Tree)(implicit src: SourceFile): Annotated = new Annotated(arg, annot) - def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(implicit src: SourceFile): Hole = new Hole(isTermHole, idx, args, content, tpt) + def Hole(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree)(implicit src: SourceFile): Hole = new Hole(isTerm, idx, args, content) // ------ Additional creation methods for untyped only ----------------- @@ -499,6 +502,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def captureRoot(using Context): Select = Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) + def makeRetaining(parent: Tree, refs: List[Tree], annotName: TypeName)(using Context): Annotated = + Annotated(parent, New(scalaAnnotationDot(annotName), List(refs))) + def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) @@ -618,14 +624,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case tree: Throw if expr eq tree.expr => tree case _ => finalize(tree, untpd.Throw(expr)(tree.source)) } - def Quote(tree: Tree)(quoted: Tree)(using Context): Tree = tree match { - case tree: Quote if quoted eq tree.quoted => tree - case _ => finalize(tree, untpd.Quote(quoted)(tree.source)) - } - def Splice(tree: Tree)(expr: Tree)(using Context): Tree = tree match { - case tree: Splice if expr eq tree.expr => tree - case _ => finalize(tree, untpd.Splice(expr)(tree.source)) - } def ForYield(tree: Tree)(enums: List[Tree], expr: Tree)(using Context): TermTree = tree match { case tree: ForYield if (enums eq tree.enums) && (expr eq tree.expr) => tree case _ => finalize(tree, untpd.ForYield(enums, expr)(tree.source)) @@ -664,9 +662,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case tree: Number if (digits == tree.digits) && (kind == tree.kind) => tree case _ => finalize(tree, untpd.Number(digits, kind)) } - def CapturingTypeTree(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match - case tree: CapturingTypeTree if (refs eq tree.refs) && (parent eq tree.parent) => tree - case _ => finalize(tree, untpd.CapturingTypeTree(refs, parent)) + def CapturesAndResult(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match + case tree: CapturesAndResult if (refs eq tree.refs) && (parent eq tree.parent) => tree + case _ => finalize(tree, untpd.CapturesAndResult(refs, parent)) def TypedSplice(tree: Tree)(splice: tpd.Tree)(using Context): ProxyTree = tree match { case tree: TypedSplice if splice `eq` tree.splice => tree @@ -707,10 +705,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.Tuple(tree)(transform(trees)) case Throw(expr) => cpy.Throw(tree)(transform(expr)) - case Quote(t) => - cpy.Quote(tree)(transform(t)) - case Splice(expr) => - cpy.Splice(tree)(transform(expr)) case ForYield(enums, expr) => cpy.ForYield(tree)(transform(enums), transform(expr)) case ForDo(enums, body) => @@ -733,8 +727,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { tree case MacroTree(expr) => cpy.MacroTree(tree)(transform(expr)) - case CapturingTypeTree(refs, parent) => - cpy.CapturingTypeTree(tree)(transform(refs), transform(parent)) + case CapturesAndResult(refs, parent) => + cpy.CapturesAndResult(tree)(transform(refs), transform(parent)) case _ => super.transformMoreCases(tree) } @@ -768,10 +762,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(x, trees) case Throw(expr) => this(x, expr) - case Quote(t) => - this(x, t) - case Splice(expr) => - this(x, expr) case ForYield(enums, expr) => this(this(x, enums), expr) case ForDo(enums, body) => @@ -796,7 +786,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(x, splice) case MacroTree(expr) => this(x, expr) - case CapturingTypeTree(refs, parent) => + case CapturesAndResult(refs, parent) => this(this(x, refs), parent) case _ => super.foldMoreCases(x, tree) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index e4533aa73ce0..3ba26c92cab5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -6,6 +6,7 @@ import core.* import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* import ast.{tpd, untpd} import Decorators.*, NameOps.* +import config.SourceVersion import config.Printers.capt import util.Property.Key import tpd.* @@ -19,6 +20,9 @@ private[cc] def retainedElems(tree: Tree)(using Context): List[Tree] = tree matc case Apply(_, Typed(SeqLiteral(elems, _), _) :: Nil) => elems case _ => Nil +def allowUniversalInBoxed(using Context) = + Feature.sourceVersion.isAtLeast(SourceVersion.`3.3`) + /** An exception thrown if a @retains argument is not syntactically a CaptureRef */ class IllegalCaptureRef(tpe: Type) extends Exception @@ -146,7 +150,6 @@ extension (tp: Type) defn.FunctionType( fname.functionArity, isContextual = fname.isContextFunction, - isErased = fname.isErasedFunction, isImpure = true).appliedTo(args) case _ => tp diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 2b9fe9d3d923..fdc4f66beafa 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -70,7 +70,7 @@ sealed abstract class CaptureSet extends Showable: assert(!isConst) asInstanceOf[Var] - /** Does this capture set contain the root reference `*` as element? */ + /** Does this capture set contain the root reference `cap` as element? */ final def isUniversal(using Context) = elems.exists { case ref: TermRef => ref.symbol == defn.captureRoot @@ -133,7 +133,7 @@ sealed abstract class CaptureSet extends Showable: * for `x` in a state where we assume all supersets of `x` have just the elements * known at this point. On the other hand if x's capture set has no known elements, * a set `cs` might account for `x` only if it subsumes `x` or it contains the - * root capability `*`. + * root capability `cap`. */ def mightAccountFor(x: CaptureRef)(using Context): Boolean = reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { @@ -270,11 +270,16 @@ sealed abstract class CaptureSet extends Showable: def substParams(tl: BindingType, to: List[Type])(using Context) = map(Substituters.SubstParamsMap(tl, to)) - /** Invoke handler if this set has (or later aquires) the root capability `*` */ + /** Invoke handler if this set has (or later aquires) the root capability `cap` */ def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = if isUniversal then handler() this + /** Invoke handler on the elements to check wellformedness of the capture set */ + def ensureWellformed(handler: List[CaptureRef] => Context ?=> Unit)(using Context): this.type = + handler(elems.toList) + this + /** An upper approximation of this capture set, i.e. a constant set that is * subcaptured by this set. If the current set is a variable * it is the intersection of all upper approximations of known supersets @@ -319,7 +324,7 @@ object CaptureSet: /** The empty capture set `{}` */ val empty: CaptureSet.Const = Const(emptySet) - /** The universal capture set `{*}` */ + /** The universal capture set `{cap}` */ def universal(using Context): CaptureSet = defn.captureRoot.termRef.singletonCaptureSet @@ -372,9 +377,12 @@ object CaptureSet: def isConst = isSolved def isAlwaysEmpty = false - /** A handler to be invoked if the root reference `*` is added to this set */ + /** A handler to be invoked if the root reference `cap` is added to this set */ var rootAddedHandler: () => Context ?=> Unit = () => () + /** A handler to be invoked when new elems are added to this set */ + var newElemAddedHandler: List[CaptureRef] => Context ?=> Unit = _ => () + var description: String = "" /** Record current elements in given VarState provided it does not yet @@ -405,7 +413,8 @@ object CaptureSet: if !isConst && recordElemsState() then elems ++= newElems if isUniversal then rootAddedHandler() - // assert(id != 2 || elems.size != 2, this) + newElemAddedHandler(newElems.toList) + // assert(id != 5 || elems.size != 3, this) (CompareResult.OK /: deps) { (r, dep) => r.andAlso(dep.tryInclude(newElems, this)) } @@ -425,11 +434,15 @@ object CaptureSet: rootAddedHandler = handler super.disallowRootCapability(handler) + override def ensureWellformed(handler: List[CaptureRef] => (Context) ?=> Unit)(using Context): this.type = + newElemAddedHandler = handler + super.ensureWellformed(handler) + private var computingApprox = false /** Roughly: the intersection of all constant known supersets of this set. * The aim is to find an as-good-as-possible constant set that is a superset - * of this set. The universal set {*} is a sound fallback. + * of this set. The universal set {cap} is a sound fallback. */ final def upperApprox(origin: CaptureSet)(using Context): CaptureSet = if computingApprox then universal diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 077d345d792d..380b6ce5fb81 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -52,12 +52,12 @@ object CheckCaptures: * @param outer0 the next enclosing environment */ case class Env( - owner: Symbol, - nestedInOwner: Boolean, - captured: CaptureSet, - isBoxed: Boolean, - outer0: Env | Null - ): + owner: Symbol, + nestedInOwner: Boolean, + captured: CaptureSet, + isBoxed: Boolean, + outer0: Env | Null): + def outer = outer0.nn def isOutermost = outer0 == null @@ -72,16 +72,23 @@ object CheckCaptures: */ final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) extends ApproximatingTypeMap, IdempotentCaptRefMap: - def apply(tp: Type): Type = tp match - case tp: ParamRef => - if tp.binder == from then to(tp.paramNum) else tp - case tp: NamedType => - if tp.prefix `eq` NoPrefix then tp - else tp.derivedSelect(apply(tp.prefix)) - case _: ThisType => - tp - case _ => - mapOver(tp) + /** This SubstParamsMap is exact if `to` only contains `CaptureRef`s. */ + private val isExactSubstitution: Boolean = to.forall(_.isInstanceOf[CaptureRef]) + + /** As long as this substitution is exact, there is no need to create `Range`s when mapping invariant positions. */ + override protected def needsRangeIfInvariant(refs: CaptureSet): Boolean = !isExactSubstitution + + def apply(tp: Type): Type = + tp match + case tp: ParamRef => + if tp.binder == from then to(tp.paramNum) else tp + case tp: NamedType => + if tp.prefix `eq` NoPrefix then tp + else tp.derivedSelect(apply(tp.prefix)) + case _: ThisType => + tp + case _ => + mapOver(tp) /** Check that a @retains annotation only mentions references that can be tracked. * This check is performed at Typer. @@ -128,6 +135,20 @@ object CheckCaptures: if remaining.accountsFor(firstRef) then report.warning(em"redundant capture: $remaining already accounts for $firstRef", ann.srcPos) + def disallowRootCapabilitiesIn(tp: Type, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = + val check = new TypeTraverser: + def traverse(t: Type) = + if variance >= 0 then + t.captureSet.disallowRootCapability: () => + def part = if t eq tp then "" else i"the part $t of " + report.error( + em"""$what cannot $have $tp since + |${part}that type captures the root capability `cap`. + |$addendum""", + pos) + traverseChildren(t) + check.traverse(tp) + class CheckCaptures extends Recheck, SymTransformer: thisPhase => @@ -336,8 +357,8 @@ class CheckCaptures extends Recheck, SymTransformer: mapArgUsing(_.forceBoxStatus(false)) else if meth == defn.Caps_unsafeBoxFunArg then mapArgUsing { - case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual, isErased) => - defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual, isErased) + case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual) => + defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual) } else super.recheckApply(tree, pt) match @@ -430,10 +451,10 @@ class CheckCaptures extends Recheck, SymTransformer: block match case closureDef(mdef) => pt.dealias match - case defn.FunctionOf(ptformals, _, _, _) + case defn.FunctionOf(ptformals, _, _) if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => // Redo setup of the anonymous function so that formal parameters don't - // get capture sets. This is important to avoid false widenings to `*` + // get capture sets. This is important to avoid false widenings to `cap` // when taking the base type of the actual closures's dependent function // type so that it conforms to the expected non-dependent function type. // See withLogFile.scala for a test case. @@ -525,6 +546,15 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => super.recheckTyped(tree) + override def recheckTry(tree: Try, pt: Type)(using Context): Type = + val tp = super.recheckTry(tree, pt) + if allowUniversalInBoxed && Feature.enabled(Feature.saferExceptions) then + disallowRootCapabilitiesIn(tp, + "Result of `try`", "have type", + "This is often caused by a locally generated exception capability leaking as part of its result.", + tree.srcPos) + tp + /* Currently not needed, since capture checking takes place after ElimByName. * Keep around in case we need to get back to it def recheckByNameArg(tree: Tree, pt: Type)(using Context): Type = @@ -582,13 +612,13 @@ class CheckCaptures extends Recheck, SymTransformer: refs.disallowRootCapability { () => val kind = if tree.isInstanceOf[ValDef] then "mutable variable" else "expression" report.error( - em"""The $kind's type $wtp is not allowed to capture the root capability `*`. + em"""The $kind's type $wtp is not allowed to capture the root capability `cap`. |This usually means that a capability persists longer than its allowed lifetime.""", tree.srcPos) } checkNotUniversal(parent) case _ => - checkNotUniversal(typeToCheck) + if !allowUniversalInBoxed then checkNotUniversal(typeToCheck) super.recheckFinish(tpe, tree, pt) /** Massage `actual` and `expected` types using the methods below before checking conformance */ @@ -598,8 +628,8 @@ class CheckCaptures extends Recheck, SymTransformer: //println(i"check conforms $actual1 <<< $expected1") super.checkConformsExpr(actual1, expected1, tree) - private def toDepFun(args: List[Type], resultType: Type, isContextual: Boolean, isErased: Boolean)(using Context): Type = - MethodType.companion(isContextual = isContextual, isErased = isErased)(args, resultType) + private def toDepFun(args: List[Type], resultType: Type, isContextual: Boolean)(using Context): Type = + MethodType.companion(isContextual = isContextual)(args, resultType) .toFunctionType(isJava = false, alwaysDependent = true) /** Turn `expected` into a dependent function when `actual` is dependent. */ @@ -607,9 +637,9 @@ class CheckCaptures extends Recheck, SymTransformer: def recur(expected: Type): Type = expected.dealias match case expected @ CapturingType(eparent, refs) => CapturingType(recur(eparent), refs, boxed = expected.isBoxed) - case expected @ defn.FunctionOf(args, resultType, isContextual, isErased) + case expected @ defn.FunctionOf(args, resultType, isContextual) if defn.isNonRefinedFunction(expected) && defn.isFunctionType(actual) && !defn.isNonRefinedFunction(actual) => - val expected1 = toDepFun(args, resultType, isContextual, isErased) + val expected1 = toDepFun(args, resultType, isContextual) expected1 case _ => expected @@ -656,7 +686,7 @@ class CheckCaptures extends Recheck, SymTransformer: expected /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions - * + * * @param alwaysConst always make capture set variables constant after adaptation */ def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, alwaysConst: Boolean = false)(using Context): Type = @@ -675,7 +705,7 @@ class CheckCaptures extends Recheck, SymTransformer: try val (eargs, eres) = expected.dealias.stripCapturing match - case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) + case defn.FunctionOf(eargs, eres, _) => (eargs, eres) case expected: MethodType => (expected.paramInfos, expected.resType) case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) case _ => (aargs.map(_ => WildcardType), WildcardType) @@ -721,66 +751,57 @@ class CheckCaptures extends Recheck, SymTransformer: val arrow = if covariant then "~~>" else "<~~" i"adapting $actual $arrow $expected" - /** Destruct a capturing type `tp` to a tuple (cs, tp0, boxed), - * where `tp0` is not a capturing type. - * - * If `tp` is a nested capturing type, the return tuple always represents - * the innermost capturing type. The outer capture annotations can be - * reconstructed with the returned function. - */ - def destructCapturingType(tp: Type, reconstruct: Type => Type = x => x): ((Type, CaptureSet, Boolean), Type => Type) = - tp.dealias match - case tp @ CapturingType(parent, cs) => - if parent.dealias.isCapturingType then - destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) - else - ((parent, cs, tp.isBoxed), reconstruct) - case actual => - val res = if tp.isFromJavaObject then tp else actual - ((res, CaptureSet(), false), reconstruct) - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { if expected.isInstanceOf[WildcardType] then actual else - val ((parent, cs, actualIsBoxed), recon) = destructCapturingType(actual) - - val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing - val insertBox = needsAdaptation && covariant != actualIsBoxed - - val (parent1, cs1) = parent match { + // Decompose the actual type into the inner shape type, the capture set and the box status + val styp = if actual.isFromJavaObject then actual else actual.stripCapturing + val cs = actual.captureSet + val boxed = actual.isBoxedCapturing + + // A box/unbox should be inserted, if the actual box status mismatches with the expectation + val needsAdaptation = boxed != expected.isBoxedCapturing + // Whether to insert a box or an unbox? + val insertBox = needsAdaptation && covariant != boxed + + // Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation + val (styp1, leaked) = styp match { case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => - val (parent1, leaked) = adaptFun(parent, args.init, args.last, expected, covariant, insertBox, + adaptFun(actual, args.init, args.last, expected, covariant, insertBox, (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) - (parent1, leaked ++ cs) - case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => + case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionOrPolyType(actual) => // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) - val (parent1, leaked) = adaptFun(parent, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, + adaptFun(actual, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, (aargs1, ares1) => rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) .toFunctionType(isJava = false, alwaysDependent = true)) - (parent1, leaked ++ cs) case actual: MethodType => - val (parent1, leaked) = adaptFun(parent, actual.paramInfos, actual.resType, expected, covariant, insertBox, + adaptFun(actual, actual.paramInfos, actual.resType, expected, covariant, insertBox, (aargs1, ares1) => actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) - (parent1, leaked ++ cs) case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => - val (parent1, leaked) = adaptTypeFun(parent, rinfo.resType, expected, covariant, insertBox, + adaptTypeFun(actual, rinfo.resType, expected, covariant, insertBox, ares1 => val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) val actual1 = actual.derivedRefinedType(p, nme, rinfo1) actual1 ) - (parent1, leaked ++ cs) case _ => - (parent, cs) + (styp, CaptureSet()) } + // Capture set of the term after adaptation + val cs1 = cs ++ leaked + + // Compute the adapted type + def adaptedType(resultBoxed: Boolean) = + styp1.capturing(if alwaysConst then CaptureSet(cs1.elems) else cs1).forceBoxStatus(resultBoxed) + if needsAdaptation then - val criticalSet = // the set which is not allowed to have `*` - if covariant then cs1 // can't box with `*` - else expected.captureSet // can't unbox with `*` - if criticalSet.isUniversal && expected.isValueType then + val criticalSet = // the set which is not allowed to have `cap` + if covariant then cs1 // can't box with `cap` + else expected.captureSet // can't unbox with `cap` + if criticalSet.isUniversal && expected.isValueType && !allowUniversalInBoxed then // We can't box/unbox the universal capability. Leave `actual` as it is // so we get an error in checkConforms. This tends to give better error // messages than disallowing the root capability in `criticalSet`. @@ -788,18 +809,19 @@ class CheckCaptures extends Recheck, SymTransformer: println(i"cannot box/unbox $actual vs $expected") actual else - // Disallow future addition of `*` to `criticalSet`. - criticalSet.disallowRootCapability { () => - report.error( - em"""$actual cannot be box-converted to $expected - |since one of their capture sets contains the root capability `*`""", - pos) - } + if !allowUniversalInBoxed then + // Disallow future addition of `cap` to `criticalSet`. + criticalSet.disallowRootCapability { () => + report.error( + em"""$actual cannot be box-converted to $expected + |since one of their capture sets contains the root capability `cap`""", + pos) + } if !insertBox then // unboxing markFree(criticalSet, pos) - recon(CapturingType(parent1, if alwaysConst then CaptureSet(cs1.elems) else cs1, !actualIsBoxed)) + adaptedType(!boxed) else - recon(CapturingType(parent1, if alwaysConst then CaptureSet(cs1.elems) else cs1, actualIsBoxed)) + adaptedType(boxed) } var actualw = actual.widenDealias @@ -869,7 +891,7 @@ class CheckCaptures extends Recheck, SymTransformer: /** Check that self types of subclasses conform to self types of super classes. * (See comment below how this is achieved). The check assumes that classes - * without an explicit self type have the universal capture set `{*}` on the + * without an explicit self type have the universal capture set `{cap}` on the * self type. If a class without explicit self type is not `effectivelyFinal` * it is checked that the inferred self type is universal, in order to assure * that joint and separate compilation give the same result. @@ -926,13 +948,13 @@ class CheckCaptures extends Recheck, SymTransformer: * that this type parameter can't see. * For example, when capture checking the following expression: * - * def usingLogFile[T](op: (f: {*} File) => T): T = ... + * def usingLogFile[T](op: (f: {cap} File) => T): T = ... * - * usingLogFile[box ?1 () -> Unit] { (f: {*} File) => () => { f.write(0) } } + * usingLogFile[box ?1 () -> Unit] { (f: {cap} File) => () => { f.write(0) } } * * We may propagate `f` into ?1, making ?1 ill-formed. - * This also causes soundness issues, since `f` in ?1 should be widened to `*`, - * giving rise to an error that `*` cannot be included in a boxed capture set. + * This also causes soundness issues, since `f` in ?1 should be widened to `cap`, + * giving rise to an error that `cap` cannot be included in a boxed capture set. * * To solve this, we still allow ?1 to capture parameter refs like `f`, but * compensate this by pushing the widened capture set of `f` into ?1. @@ -961,8 +983,11 @@ class CheckCaptures extends Recheck, SymTransformer: recur(refs, Nil) private def healCaptureSet(cs: CaptureSet): Unit = - val toInclude = widenParamRefs(cs.elems.toList.filter(!isAllowed(_)).asInstanceOf) - toInclude.foreach(checkSubset(_, cs, tree.srcPos)) + def avoidance(elems: List[CaptureRef])(using Context): Unit = + val toInclude = widenParamRefs(elems.filter(!isAllowed(_)).asInstanceOf) + //println(i"HEAL $cs by widening to $toInclude") + toInclude.foreach(checkSubset(_, cs, tree.srcPos)) + cs.ensureWellformed(avoidance) private var allowed: SimpleIdentitySet[TermParamRef] = SimpleIdentitySet.empty @@ -971,7 +996,7 @@ class CheckCaptures extends Recheck, SymTransformer: case CapturingType(parent, refs) => healCaptureSet(refs) traverse(parent) - case tp @ RefinedType(parent, rname, rinfo: MethodType) if defn.isFunctionType(tp) => + case tp @ RefinedType(parent, rname, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => traverse(rinfo) case tp: TermLambda => val saved = allowed @@ -995,61 +1020,67 @@ class CheckCaptures extends Recheck, SymTransformer: * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. */ def postCheck(unit: tpd.Tree)(using Context): Unit = - unit.foreachSubTree { - case _: InferredTypeTree => - case tree: TypeTree if !tree.span.isZeroExtent => - tree.knownType.foreachPart { tp => - checkWellformedPost(tp, tree.srcPos) - tp match - case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => - warnIfRedundantCaptureSet(annot.tree) - case _ => - } - case t: ValOrDefDef - if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => - val sym = t.symbol - val isLocal = - sym.owner.ownersIterator.exists(_.isTerm) - || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) - def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly - sym.is(Private) // private symbols can always have inferred types - || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be - // too annoying. This is a hole since a defualt getter's result type - // might leak into a type variable. - || // non-local symbols cannot have inferred types since external capture types are not inferred - isLocal // local symbols still need explicit types if - && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference - def isNotPureThis(ref: CaptureRef) = ref match { - case ref: ThisType => !ref.cls.isPureClass - case _ => true - } - if !canUseInferred then - val inferred = t.tpt.knownType - def checkPure(tp: Type) = tp match - case CapturingType(_, refs) - if !refs.elems.filter(isNotPureThis).isEmpty => - val resultStr = if t.isInstanceOf[DefDef] then " result" else "" - report.error( - em"""Non-local $sym cannot have an inferred$resultStr type - |$inferred - |with non-empty capture set $refs. - |The type needs to be declared explicitly.""".withoutDisambiguation(), - t.srcPos) + val checker = new TreeTraverser: + def traverse(tree: Tree)(using Context): Unit = + traverseChildren(tree) + check(tree) + def check(tree: Tree) = tree match + case _: InferredTypeTree => + case tree: TypeTree if !tree.span.isZeroExtent => + tree.knownType.foreachPart { tp => + checkWellformedPost(tp, tree.srcPos) + tp match + case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => + warnIfRedundantCaptureSet(annot.tree) + case _ => + } + case t: ValOrDefDef + if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => + val sym = t.symbol + val isLocal = + sym.owner.ownersIterator.exists(_.isTerm) + || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) + def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly + sym.is(Private) // private symbols can always have inferred types + || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be + // too annoying. This is a hole since a defualt getter's result type + // might leak into a type variable. + || // non-local symbols cannot have inferred types since external capture types are not inferred + isLocal // local symbols still need explicit types if + && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference + def isNotPureThis(ref: CaptureRef) = ref match { + case ref: ThisType => !ref.cls.isPureClass + case _ => true + } + if !canUseInferred then + val inferred = t.tpt.knownType + def checkPure(tp: Type) = tp match + case CapturingType(_, refs) + if !refs.elems.filter(isNotPureThis).isEmpty => + val resultStr = if t.isInstanceOf[DefDef] then " result" else "" + report.error( + em"""Non-local $sym cannot have an inferred$resultStr type + |$inferred + |with non-empty capture set $refs. + |The type needs to be declared explicitly.""".withoutDisambiguation(), + t.srcPos) + case _ => + inferred.foreachPart(checkPure, StopAt.Static) + case t @ TypeApply(fun, args) => + fun.knownType.widen match + case tl: PolyType => + val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => + arg.withType(arg.knownType.forceBoxStatus( + bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) + } + checkBounds(normArgs, tl) case _ => - inferred.foreachPart(checkPure, StopAt.Static) - case t @ TypeApply(fun, args) => - fun.knownType.widen match - case tl: PolyType => - val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => - arg.withType(arg.knownType.forceBoxStatus( - bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) - } - checkBounds(normArgs, tl) - case _ => - args.foreach(healTypeParam(_)) - case _ => - } + args.foreach(healTypeParam(_)) + case _ => + end check + end checker + checker.traverse(unit) if !ctx.reporter.errorsReported then // We dont report errors here if previous errors were reported, because other // errors often result in bad applied types, but flagging these bad types gives diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 461c18ea0980..bbe54f14b86c 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -12,6 +12,7 @@ import transform.Recheck.* import CaptureSet.IdentityCaptRefMap import Synthetics.isExcluded import util.Property +import dotty.tools.dotc.core.Annotations.Annotation /** A tree traverser that prepares a compilation unit to be capture checked. * It does the following: @@ -38,7 +39,6 @@ extends tpd.TreeTraverser: private def depFun(tycon: Type, argTypes: List[Type], resType: Type)(using Context): Type = MethodType.companion( isContextual = defn.isContextFunctionClass(tycon.classSymbol), - isErased = defn.isErasedFunctionClass(tycon.classSymbol) )(argTypes, resType) .toFunctionType(isJava = false, alwaysDependent = true) @@ -54,7 +54,7 @@ extends tpd.TreeTraverser: val boxedRes = recur(res) if boxedRes eq res then tp else tp1.derivedAppliedType(tycon, args.init :+ boxedRes) - case tp1 @ RefinedType(_, _, rinfo) if defn.isFunctionType(tp1) => + case tp1 @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionOrPolyType(tp1) => val boxedRinfo = recur(rinfo) if boxedRinfo eq rinfo then tp else boxedRinfo.toFunctionType(isJava = false, alwaysDependent = true) @@ -122,7 +122,7 @@ extends tpd.TreeTraverser: val sym = tp.typeSymbol if sym.isClass then sym == defn.AnyClass - // we assume Any is a shorthand of {*} Any, so if Any is an upper + // we assume Any is a shorthand of {cap} Any, so if Any is an upper // bound, the type is taken to be impure. else superTypeIsImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => @@ -155,7 +155,7 @@ extends tpd.TreeTraverser: case CapturingType(parent, refs) => needsVariable(parent) && refs.isConst // if refs is a variable, no need to add another - && !refs.isUniversal // if refs is {*}, an added variable would not change anything + && !refs.isUniversal // if refs is {cap}, an added variable would not change anything case _ => false }.showing(i"can have inferred capture $tp = $result", capt) @@ -231,7 +231,7 @@ extends tpd.TreeTraverser: tp.derivedAppliedType(tycon1, args1 :+ res1) else tp.derivedAppliedType(tycon1, args.mapConserve(arg => this(arg))) - case tp @ RefinedType(core, rname, rinfo) if defn.isFunctionType(tp) => + case tp @ RefinedType(core, rname, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => val rinfo1 = apply(rinfo) if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) else tp @@ -260,7 +260,13 @@ extends tpd.TreeTraverser: private def expandThrowsAlias(tp: Type)(using Context) = tp match case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => // hard-coded expansion since $throws aliases in stdlib are defined with `?=>` rather than `?->` - defn.FunctionOf(defn.CanThrowClass.typeRef.appliedTo(exc) :: Nil, res, isContextual = true, isErased = true) + defn.FunctionOf( + AnnotatedType( + defn.CanThrowClass.typeRef.appliedTo(exc), + Annotation(defn.ErasedParamAnnot, defn.CanThrowClass.span)) :: Nil, + res, + isContextual = true + ) case _ => tp private def expandThrowsAliases(using Context) = new TypeMap: @@ -323,7 +329,7 @@ extends tpd.TreeTraverser: args.last, CaptureSet.empty, currentCs ++ outerCs) tp.derivedAppliedType(tycon1, args1 :+ resType1) tp1.capturing(outerCs) - case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionType(tp) => + case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionOrPolyType(tp) => propagateDepFunctionResult(mapOver(tp), currentCs ++ outerCs) .capturing(outerCs) case _ => @@ -405,11 +411,28 @@ extends tpd.TreeTraverser: boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set ) + if allowUniversalInBoxed && tree.symbol.is(Mutable) + && !tree.symbol.hasAnnotation(defn.UncheckedCapturesAnnot) + then + CheckCaptures.disallowRootCapabilitiesIn(tpt.knownType, + i"Mutable variable ${tree.symbol.name}", "have type", + "This restriction serves to prevent local capabilities from escaping the scope where they are defined.", + tree.srcPos) traverse(tree.rhs) case tree @ TypeApply(fn, args) => traverse(fn) for case arg: TypeTree <- args do transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + + if allowUniversalInBoxed then + val polyType = fn.tpe.widen.asInstanceOf[TypeLambda] + for case (arg: TypeTree, pinfo, pname) <- args.lazyZip(polyType.paramInfos).lazyZip((polyType.paramNames)) do + if pinfo.bounds.hi.hasAnnotation(defn.Caps_SealedAnnot) then + def where = if fn.symbol.exists then i" in the body of ${fn.symbol}" else "" + CheckCaptures.disallowRootCapabilitiesIn(arg.knownType, + i"Sealed type variable $pname", " be instantiated to", + i"This is often caused by a local capability$where\nleaking as part of its result.", + tree.srcPos) case _ => traverseChildren(tree) tree match @@ -488,11 +511,10 @@ extends tpd.TreeTraverser: def apply(tree: Tree)(using Context): Unit = traverse(tree)(using ctx.withProperty(Setup.IsDuringSetupKey, Some(()))) -end Setup object Setup: val IsDuringSetupKey = new Property.Key[Unit] def isDuringSetup(using Context): Boolean = ctx.property(IsDuringSetupKey).isDefined - +end Setup \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index dacbd27e0f35..5fe68dd6a7ac 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -54,9 +54,9 @@ object Synthetics: /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. * An apply method in a case class like this: - * case class CC(a: {d} A, b: B, {*} c: C) + * case class CC(a: {d} A, b: B, {cap} c: C) * would get type - * def apply(a': {d} A, b: B, {*} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } + * def apply(a': {d} A, b: B, {cap} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } * where `'` is used to indicate the difference between parameter symbol and refinement name. * Analogous for the copy method. */ @@ -123,7 +123,7 @@ object Synthetics: case _ => info - /** Augment an unapply of type `(x: C): D` to `(x: {*} C): {x} D` */ + /** Augment an unapply of type `(x: C): D` to `(x: {cap} C): {x} D` */ private def addUnapplyCaptures(info: Type)(using Context): Type = info match case info: MethodType => val paramInfo :: Nil = info.paramInfos: @unchecked diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 7f20d7c7d9ea..1411493bcbfd 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -6,7 +6,7 @@ package dotty.tools.dotc.classpath import scala.language.unsafeNulls import java.io.{File => JFile} -import java.net.URL +import java.net.{URI, URL} import java.nio.file.{FileSystems, Files} import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} @@ -194,7 +194,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) else ClassPathEntries(packages(inPackage), classes(inPackage)) - def asURLs: Seq[URL] = Seq(new URL("jrt:/")) + def asURLs: Seq[URL] = Seq(new URI("jrt:/").toURL) // We don't yet have a scheme to represent the JDK modules in our `-classpath`. // java models them as entries in the new "module path", we'll probably need to follow this. def asClassPathStrings: Seq[String] = Nil diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 0cb0ba59c52e..e750d9ccacc0 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -2,12 +2,10 @@ package dotty.tools.dotc.classpath import scala.language.unsafeNulls -import dotty.tools.io.ClassRepresentation +import dotty.tools.io.{ClassPath, ClassRepresentation} import dotty.tools.io.{AbstractFile, VirtualDirectory} import FileUtils._ -import java.net.URL - -import dotty.tools.io.ClassPath +import java.net.{URI, URL} case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { type F = AbstractFile @@ -37,7 +35,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi def isPackage(f: AbstractFile): Boolean = f.isPackage // mimic the behavior of the old nsc.util.DirectoryClassPath - def asURLs: Seq[URL] = Seq(new URL(dir.name)) + def asURLs: Seq[URL] = Seq(new URI(dir.name).toURL) def asClassPathStrings: Seq[String] = Seq(dir.path) override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 188526bb094f..e5ab8f65f55b 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -28,6 +28,8 @@ object Feature: val symbolLiterals = deprecated("symbolLiterals") val fewerBraces = experimental("fewerBraces") val saferExceptions = experimental("saferExceptions") + val clauseInterleaving = experimental("clauseInterleaving") + val relaxedExtensionImports = experimental("relaxedExtensionImports") val pureFunctions = experimental("pureFunctions") val captureChecking = experimental("captureChecking") val into = experimental("into") @@ -76,6 +78,8 @@ object Feature: def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) + def clauseInterleavingEnabled(using Context) = enabled(clauseInterleaving) + def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala index afa30e38dc2a..8b4eedb0e9d2 100644 --- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala +++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala @@ -211,7 +211,7 @@ class PathResolver(using c: Context) { import classPathFactory._ // Assemble the elements! - def basis: List[Traversable[ClassPath]] = + def basis: List[Iterable[ClassPath]] = val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) List( diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 5ae99ec7e6fa..c06aa304ef72 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -15,9 +15,9 @@ import scala.util.chaining._ class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: - // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` + // Keep synchronized with `classfileVersion` in `BackendUtils` private val minTargetVersion = 8 - private val maxTargetVersion = 20 + private val maxTargetVersion = 21 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) @@ -160,6 +160,7 @@ private sealed trait WarningSettings: val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) val WvalueDiscard: Setting[Boolean] = BooleanSetting("-Wvalue-discard", "Warn when non-Unit expression results are unused.") + val WNonUnitStatement = BooleanSetting("-Wnonunit-statement", "Warn when block statements are non-Unit expressions.") val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( name = "-Wunused", diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 4b9b1b247856..b8fa7994ce0c 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -18,6 +18,8 @@ enum SourceVersion: def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal + def isAtMost(v: SourceVersion) = stable.ordinal <= v.ordinal + object SourceVersion extends Property.Key[SourceVersion]: def defaultSourceVersion = `3.3` diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index 3b00f2915f1c..202f3eb26e41 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -194,7 +194,7 @@ object Annotations { object Annotation { def apply(tree: Tree): ConcreteAnnotation = ConcreteAnnotation(tree) - + def apply(cls: ClassSymbol, span: Span)(using Context): Annotation = apply(cls, Nil, span) @@ -206,7 +206,7 @@ object Annotations { def apply(atp: Type, arg: Tree, span: Span)(using Context): Annotation = apply(atp, arg :: Nil, span) - + def apply(atp: Type, args: List[Tree], span: Span)(using Context): Annotation = apply(New(atp, args).withSpan(span)) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 2f28975dd066..e0e43169820a 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -444,6 +444,12 @@ object Contexts { def useColors: Boolean = base.settings.color.value == "always" + def withColors: FreshContext = + fresh.setSetting(ctx.settings.color, "always") + + def withoutColors: FreshContext = + fresh.setSetting(ctx.settings.color, "never") + /** Is the explicit nulls option set? */ def explicitNulls: Boolean = base.settings.YexplicitNulls.value diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 9f55e29c0f59..4ef0dbc9a43b 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -280,10 +280,7 @@ object Decorators { case ex: CyclicReference => "... (caught cyclic reference) ..." case NonFatal(ex) if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => - val msg = ex match - case te: TypeError => te.toMessage.message - case _ => ex.getMessage - s"[cannot display due to $msg, raw string = $x]" + s"... (cannot display due to ${ex.className} ${ex.getMessage}) ..." case _ => String.valueOf(x).nn /** Returns the simple class name of `x`. */ diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 56409ad050f6..027aec16e9a3 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -86,7 +86,7 @@ class Definitions { newPermanentClassSymbol(ScalaPackageClass, name, Artifact, completer).entered } - /** The trait FunctionN, ContextFunctionN, ErasedFunctionN or ErasedContextFunction, for some N + /** The trait FunctionN and ContextFunctionN for some N * @param name The name of the trait to be created * * FunctionN traits follow this template: @@ -104,24 +104,9 @@ class Definitions { * trait ContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { * def apply(using $x0: T0, ..., $x{N_1}: T{N-1}): R * } - * - * ErasedFunctionN traits follow this template: - * - * trait ErasedFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedContextFunctionN traits follow this template: - * - * trait ErasedContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(using erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedFunctionN and ErasedContextFunctionN erase to Function0. - * * ImpureXYZFunctionN follow this template: * - * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {*} XYZFunctionN[T0,...,T{N-1}, R] + * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {cap} XYZFunctionN[T0,...,T{N-1}, R] */ private def newFunctionNType(name: TypeName): Symbol = { val impure = name.startsWith("Impure") @@ -149,8 +134,7 @@ class Definitions { val resParamRef = enterTypeParam(cls, paramNamePrefix ++ "R", Covariant, decls).typeRef val methodType = MethodType.companion( isContextual = name.isContextFunction, - isImplicit = false, - isErased = name.isErasedFunction) + isImplicit = false) decls.enter(newMethod(cls, nme.apply, methodType(argParamRefs, resParamRef), Deferred)) denot.info = ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) @@ -530,9 +514,12 @@ class Definitions { }) @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") + def ListType: TypeRef = ListClass.typeRef @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") + def NilType: TermRef = NilModule.termRef @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") + def ConsType: TypeRef = ConsClass.typeRef @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") @tu lazy val SingletonClass: ClassSymbol = @@ -701,6 +688,7 @@ class Definitions { @tu lazy val JavaCalendarClass: ClassSymbol = requiredClass("java.util.Calendar") @tu lazy val JavaDateClass: ClassSymbol = requiredClass("java.util.Date") @tu lazy val JavaFormattableClass: ClassSymbol = requiredClass("java.util.Formattable") + @tu lazy val JavaRecordClass: Symbol = getClassIfDefined("java.lang.Record") @tu lazy val JavaEnumClass: ClassSymbol = { val cls = requiredClass("java.lang.Enum") @@ -811,9 +799,12 @@ class Definitions { @tu lazy val ReflectPackageClass: Symbol = requiredPackage("scala.reflect.package").moduleClass @tu lazy val ClassTagClass: ClassSymbol = requiredClass("scala.reflect.ClassTag") + @tu lazy val ClassTagClass_unapply: Symbol = ClassTagClass.requiredMethod("unapply") @tu lazy val ClassTagModule: Symbol = ClassTagClass.companionModule @tu lazy val ClassTagModule_apply: Symbol = ClassTagModule.requiredMethod(nme.apply) + @tu lazy val ReflectSelectableTypeRef: TypeRef = requiredClassRef("scala.reflect.Selectable") + @tu lazy val TypeTestClass: ClassSymbol = requiredClass("scala.reflect.TypeTest") @tu lazy val TypeTest_unapply: Symbol = TypeTestClass.requiredMethod(nme.unapply) @tu lazy val TypeTestModule_identity: Symbol = TypeTestClass.companionModule.requiredMethod(nme.identity) @@ -978,11 +969,12 @@ class Definitions { @tu lazy val BreakClass: Symbol = requiredClass("scala.util.boundary.Break") @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") - @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") + @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") + @tu lazy val Caps_SealedAnnot: ClassSymbol = requiredClass("scala.caps.Sealed") // Annotation base classes @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") @@ -1031,6 +1023,7 @@ class Definitions { @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") + @tu lazy val UncheckedCapturesAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedCaptures") @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") @tu lazy val BeanGetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanGetter") @@ -1039,6 +1032,8 @@ class Definitions { @tu lazy val GetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.getter") @tu lazy val ParamMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.param") @tu lazy val SetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.setter") + @tu lazy val CompanionClassMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.companionClass") + @tu lazy val CompanionMethodMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.companionMethod") @tu lazy val ShowAsInfixAnnot: ClassSymbol = requiredClass("scala.annotation.showAsInfix") @tu lazy val FunctionalInterfaceAnnot: ClassSymbol = requiredClass("java.lang.FunctionalInterface") @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") @@ -1052,7 +1047,7 @@ class Definitions { // A list of meta-annotations that are relevant for fields and accessors @tu lazy val NonBeanMetaAnnots: Set[Symbol] = - Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot) + Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot, CompanionClassMetaAnnot, CompanionMethodMetaAnnot) @tu lazy val MetaAnnots: Set[Symbol] = NonBeanMetaAnnots + BeanGetterMetaAnnot + BeanSetterMetaAnnot @@ -1104,15 +1099,23 @@ class Definitions { sym.owner.linkedClass.typeRef object FunctionOf { - def apply(args: List[Type], resultType: Type, isContextual: Boolean = false, isErased: Boolean = false)(using Context): Type = - FunctionType(args.length, isContextual, isErased).appliedTo(args ::: resultType :: Nil) - def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean, Boolean)] = { - val tsym = ft.typeSymbol - if isFunctionClass(tsym) && ft.isRef(tsym) then - val targs = ft.dealias.argInfos - if (targs.isEmpty) None - else Some(targs.init, targs.last, tsym.name.isContextFunction, tsym.name.isErasedFunction) - else None + def apply(args: List[Type], resultType: Type, isContextual: Boolean = false)(using Context): Type = + val mt = MethodType.companion(isContextual, false)(args, resultType) + if mt.hasErasedParams then + RefinedType(ErasedFunctionClass.typeRef, nme.apply, mt) + else + FunctionType(args.length, isContextual).appliedTo(args ::: resultType :: Nil) + def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean)] = { + ft.dealias match + case RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) => + Some(mt.paramInfos, mt.resType, mt.isContextualMethod) + case _ => + val tsym = ft.dealias.typeSymbol + if isFunctionSymbol(tsym) && ft.isRef(tsym) then + val targs = ft.dealias.argInfos + if (targs.isEmpty) None + else Some(targs.init, targs.last, tsym.name.isContextFunction) + else None } } @@ -1431,24 +1434,22 @@ class Definitions { classRefs(n).nn end FunType - private def funTypeIdx(isContextual: Boolean, isErased: Boolean, isImpure: Boolean): Int = + private def funTypeIdx(isContextual: Boolean, isImpure: Boolean): Int = (if isContextual then 1 else 0) - + (if isErased then 2 else 0) - + (if isImpure then 4 else 0) + + (if isImpure then 2 else 0) private val funTypeArray: IArray[FunType] = val arr = Array.ofDim[FunType](8) val choices = List(false, true) - for contxt <- choices; erasd <- choices; impure <- choices do + for contxt <- choices; impure <- choices do var str = "Function" if contxt then str = "Context" + str - if erasd then str = "Erased" + str if impure then str = "Impure" + str - arr(funTypeIdx(contxt, erasd, impure)) = FunType(str) + arr(funTypeIdx(contxt, impure)) = FunType(str) IArray.unsafeFromArray(arr) - def FunctionSymbol(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = - funTypeArray(funTypeIdx(isContextual, isErased, isImpure))(n).symbol + def FunctionSymbol(n: Int, isContextual: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = + funTypeArray(funTypeIdx(isContextual, isImpure))(n).symbol @tu lazy val Function0_apply: Symbol = Function0.requiredMethod(nme.apply) @tu lazy val ContextFunction0_apply: Symbol = ContextFunction0.requiredMethod(nme.apply) @@ -1458,12 +1459,14 @@ class Definitions { @tu lazy val Function2: Symbol = FunctionSymbol(2) @tu lazy val ContextFunction0: Symbol = FunctionSymbol(0, isContextual = true) - def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = - FunctionSymbol(n, isContextual && !ctx.erasedTypes, isErased, isImpure).typeRef + def FunctionType(n: Int, isContextual: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = + FunctionSymbol(n, isContextual && !ctx.erasedTypes, isImpure).typeRef lazy val PolyFunctionClass = requiredClass("scala.PolyFunction") def PolyFunctionType = PolyFunctionClass.typeRef + lazy val ErasedFunctionClass = requiredClass("scala.runtime.ErasedFunction") + /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */ def scalaClassName(cls: Symbol)(using Context): TypeName = cls.denot match case clsd: ClassDenotation if clsd.owner eq ScalaPackageClass => @@ -1496,8 +1499,6 @@ class Definitions { * - FunctionXXL * - FunctionN for N >= 0 * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 */ def isFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isFunction @@ -1512,16 +1513,9 @@ class Definitions { /** Is an context function class. * - ContextFunctionN for N >= 0 - * - ErasedContextFunctionN for N > 0 */ def isContextFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isContextFunction - /** Is an erased function class. - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 - */ - def isErasedFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isErasedFunction - /** Is either FunctionXXL or a class that will be erased to FunctionXXL * - FunctionXXL * - FunctionN for N >= 22 @@ -1558,8 +1552,7 @@ class Definitions { */ def functionTypeErasure(cls: Symbol): Type = val arity = scalaClassName(cls).functionArity - if cls.name.isErasedFunction then FunctionType(0) - else if arity > 22 then FunctionXXLClass.typeRef + if arity > 22 then FunctionXXLClass.typeRef else if arity >= 0 then FunctionType(arity) else NoType @@ -1678,6 +1671,15 @@ class Definitions { rec(tp.stripTypeVar, Nil, bound) } + def isSmallGenericTuple(tp: Type)(using Context): Boolean = + if tp.derivesFrom(defn.PairClass) && !defn.isTupleNType(tp.widenDealias) then + // If this is a generic tuple we need to cast it to make the TupleN/ members accessible. + // This works only for generic tuples of known size up to 22. + defn.tupleTypes(tp.widenTermRefExpr) match + case Some(elems) if elems.length <= Definitions.MaxTupleArity => true + case _ => false + else false + def isProductSubType(tp: Type)(using Context): Boolean = tp.derivesFrom(ProductClass) /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ContextFunctionN @@ -1690,16 +1692,29 @@ class Definitions { arity >= 0 && isFunctionClass(sym) && tp.isRef( - FunctionType(arity, sym.name.isContextFunction, sym.name.isErasedFunction).typeSymbol, + FunctionType(arity, sym.name.isContextFunction).typeSymbol, skipRefined = false) end isNonRefinedFunction - /** Is `tp` a representation of a (possibly dependent) function type or an alias of such? */ + /** Returns whether `tp` is an instance or a refined instance of: + * - scala.FunctionN + * - scala.ContextFunctionN + */ def isFunctionType(tp: Type)(using Context): Boolean = isNonRefinedFunction(tp.dropDependentRefinement) + /** Is `tp` a specialized, refined function type? Either an `ErasedFunction` or a `PolyFunction`. */ + def isRefinedFunctionType(tp: Type)(using Context): Boolean = + tp.derivesFrom(defn.PolyFunctionClass) || isErasedFunctionType(tp) + + /** Returns whether `tp` is an instance or a refined instance of: + * - scala.FunctionN + * - scala.ContextFunctionN + * - ErasedFunction + * - PolyFunction + */ def isFunctionOrPolyType(tp: Type)(using Context): Boolean = - isFunctionType(tp) || (tp.typeSymbol eq defn.PolyFunctionClass) + isFunctionType(tp) || isRefinedFunctionType(tp) private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = for base <- bases; tp <- paramTypes do @@ -1788,7 +1803,7 @@ class Definitions { @tu lazy val FunctionSpecializedApplyNames: collection.Set[Name] = Function0SpecializedApplyNames ++ Function1SpecializedApplyNames ++ Function2SpecializedApplyNames - def functionArity(tp: Type)(using Context): Int = tp.dropDependentRefinement.dealias.argInfos.length - 1 + def functionArity(tp: Type)(using Context): Int = tp.functionArgInfos.length - 1 /** Return underlying context function type (i.e. instance of an ContextFunctionN class) * or NoType if none exists. The following types are considered as underlying types: @@ -1800,6 +1815,8 @@ class Definitions { tp.stripTypeVar.dealias match case tp1: TypeParamRef if ctx.typerState.constraint.contains(tp1) => asContextFunctionType(TypeComparer.bounds(tp1).hiBound) + case tp1 @ RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) && mt.isContextualMethod => + tp1 case tp1 => if tp1.typeSymbol.name.isContextFunction && isFunctionType(tp1) then tp1 else NoType @@ -1813,18 +1830,28 @@ class Definitions { * types `As`, the result type `B` and a whether the type is an erased context function. */ object ContextFunctionType: - def unapply(tp: Type)(using Context): Option[(List[Type], Type, Boolean)] = + def unapply(tp: Type)(using Context): Option[(List[Type], Type, List[Boolean])] = if ctx.erasedTypes then atPhase(erasurePhase)(unapply(tp)) else - val tp1 = asContextFunctionType(tp) - if tp1.exists then - val args = tp1.dropDependentRefinement.argInfos - Some((args.init, args.last, tp1.typeSymbol.name.isErasedFunction)) - else None + asContextFunctionType(tp) match + case RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) => + Some((mt.paramInfos, mt.resType, mt.erasedParams)) + case tp1 if tp1.exists => + val args = tp1.functionArgInfos + val erasedParams = erasedFunctionParameters(tp1) + Some((args.init, args.last, erasedParams)) + case _ => None + + /* Returns a list of erased booleans marking whether parameters are erased, for a function type. */ + def erasedFunctionParameters(tp: Type)(using Context): List[Boolean] = tp.dealias match { + case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams + case tp if isFunctionType(tp) => List.fill(functionArity(tp)) { false } + case _ => Nil + } def isErasedFunctionType(tp: Type)(using Context): Boolean = - tp.dealias.typeSymbol.name.isErasedFunction && isFunctionType(tp) + tp.derivesFrom(defn.ErasedFunctionClass) /** A whitelist of Scala-2 classes that are known to be pure */ def isAssuredNoInits(sym: Symbol): Boolean = diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 723f9408d805..e56cc453d34d 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -545,8 +545,7 @@ object Denotations { tp2 match case tp2: MethodType if TypeComparer.matchingMethodParams(tp1, tp2) - && tp1.isImplicitMethod == tp2.isImplicitMethod - && tp1.isErasedMethod == tp2.isErasedMethod => + && tp1.isImplicitMethod == tp2.isImplicitMethod => val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) if resType.exists then tp1.derivedLambdaType(mergeParamNames(tp1, tp2), tp1.paramInfos, resType) @@ -1270,8 +1269,8 @@ object Denotations { def hasAltWith(p: SingleDenotation => Boolean): Boolean = denot1.hasAltWith(p) || denot2.hasAltWith(p) def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = { - val d1 = denot1 accessibleFrom (pre, superAccess) - val d2 = denot2 accessibleFrom (pre, superAccess) + val d1 = denot1.accessibleFrom(pre, superAccess) + val d2 = denot2.accessibleFrom(pre, superAccess) if (!d1.exists) d2 else if (!d2.exists) d1 else derivedUnionDenotation(d1, d2) diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index f23dce020f10..8100bea374eb 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -315,7 +315,7 @@ object Flags { val (SuperParamAliasOrScala2x @ _, SuperParamAlias @ _, Scala2x @ _) = newFlags(26, "", "") /** A parameter with a default value / an impure untpd.FunctionWithMods type */ - val (_, HasDefault @ _, Impure @ _) = newFlags(27, "", "<{*}>") + val (_, HasDefault @ _, Impure @ _) = newFlags(27, "", "") /** An extension method, or a collective extension instance */ val (Extension @ _, ExtensionMethod @ _, _) = newFlags(28, "") @@ -604,6 +604,7 @@ object Flags { val Scala2Trait: FlagSet = Scala2x | Trait val SyntheticArtifact: FlagSet = Synthetic | Artifact val SyntheticCase: FlagSet = Synthetic | Case + val SyntheticMethod: FlagSet = Synthetic | Method val SyntheticModule: FlagSet = Synthetic | Module val SyntheticOpaque: FlagSet = Synthetic | Opaque val SyntheticParam: FlagSet = Synthetic | Param diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index 062ddd5e846c..60ebc95e7bed 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -100,16 +100,16 @@ object MatchTypeTrace: case TryReduce(scrut: Type) => i" trying to reduce $scrut" case NoMatches(scrut, cases) => - i""" failed since selector $scrut + i""" failed since selector $scrut | matches none of the cases | | ${casesText(cases)}""" case EmptyScrutinee(scrut) => - i""" failed since selector $scrut + i""" failed since selector $scrut | is uninhabited (there are no values of that type).""" case Stuck(scrut, stuckCase, otherCases) => val msg = - i""" failed since selector $scrut + i""" failed since selector $scrut | does not match ${caseText(stuckCase)} | and cannot be shown to be disjoint from it either.""" if otherCases.length == 0 then msg @@ -121,14 +121,14 @@ object MatchTypeTrace: | ${casesText(otherCases)}""" case NoInstance(scrut, stuckCase, fails) => def params = if fails.length == 1 then "parameter" else "parameters" - i""" failed since selector $scrut + i""" failed since selector $scrut | does not uniquely determine $params ${fails.map(_._1)}%, % in | ${caseText(stuckCase)} | The computed bounds for the $params are: | ${fails.map((name, bounds) => i"$name$bounds")}%\n %""" def noMatchesText(scrut: Type, cases: List[Type])(using Context): String = - i"""failed since selector $scrut + i"""failed since selector $scrut |matches none of the cases | | ${casesText(cases)}""" diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 40a45b9f4678..ea63eb6a419b 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -141,4 +141,7 @@ object Mode { * Type `Null` becomes a subtype of non-primitive value types in TypeComparer. */ val RelaxedOverriding: Mode = newMode(30, "RelaxedOverriding") + + /** We are checking the original call of an Inlined node */ + val InlinedCall: Mode = newMode(31, "InlinedCall") } diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index 4e075953d7fa..04440c9e9b39 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -214,7 +214,7 @@ object NameOps { if str == mustHave then found = true idx + str.length else idx - skip(skip(skip(0, "Impure"), "Erased"), "Context") == suffixStart + skip(skip(0, "Impure"), "Context") == suffixStart && found } @@ -225,10 +225,11 @@ object NameOps { private def checkedFunArity(suffixStart: Int)(using Context): Int = if isFunctionPrefix(suffixStart) then funArity(suffixStart) else -1 - /** Is a function name, i.e one of FunctionXXL, FunctionN, ContextFunctionN, ErasedFunctionN, ErasedContextFunctionN for N >= 0 + /** Is a function name, i.e one of FunctionXXL, FunctionN, ContextFunctionN, ImpureFunctionN, ImpureContextFunctionN for N >= 0 */ def isFunction(using Context): Boolean = - (name eq tpnme.FunctionXXL) || checkedFunArity(functionSuffixStart) >= 0 + (name eq tpnme.FunctionXXL) + || checkedFunArity(functionSuffixStart) >= 0 /** Is a function name * - FunctionN for N >= 0 @@ -241,14 +242,11 @@ object NameOps { isFunctionPrefix(suffixStart, mustHave) && funArity(suffixStart) >= 0 def isContextFunction(using Context): Boolean = isSpecificFunction("Context") - def isErasedFunction(using Context): Boolean = isSpecificFunction("Erased") def isImpureFunction(using Context): Boolean = isSpecificFunction("Impure") /** Is a synthetic function name, i.e. one of * - FunctionN for N > 22 * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N >= 0 - * - ErasedContextFunctionN for N >= 0 */ def isSyntheticFunction(using Context): Boolean = val suffixStart = functionSuffixStart diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index db6f72590818..dc09edd79781 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -42,10 +42,10 @@ object NamerOps: case Nil => resultType case TermSymbols(params) :: paramss1 => - val (isContextual, isImplicit, isErased) = - if params.isEmpty then (false, false, false) - else (params.head.is(Given), params.head.is(Implicit), params.head.is(Erased)) - val make = MethodType.companion(isContextual = isContextual, isImplicit = isImplicit, isErased = isErased) + val (isContextual, isImplicit) = + if params.isEmpty then (false, false) + else (params.head.is(Given), params.head.is(Implicit)) + val make = MethodType.companion(isContextual = isContextual, isImplicit = isImplicit) if isJava then for param <- params do if param.info.isDirectRef(defn.ObjectClass) then param.info = defn.AnyType diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index faea30390d2b..0328cea9b3ca 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -344,7 +344,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, if newSet.isEmpty then deps.remove(referenced) else deps.updated(referenced, newSet) - def traverse(t: Type) = t match + def traverse(t: Type) = try + t match case param: TypeParamRef => if hasBounds(param) then if variance >= 0 then coDeps = update(coDeps, param) @@ -356,6 +357,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, seen += tp traverse(tp.ref) case _ => traverseChildren(t) + catch case ex: Throwable => handleRecursive("adjust", t.show, ex) end Adjuster /** Adjust dependencies to account for the delta of previous entry `prevEntry` diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 205554e418ed..3c4c45ab254a 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -211,6 +211,7 @@ object Phases { private var mySbtExtractDependenciesPhase: Phase = _ private var myPicklerPhase: Phase = _ private var myInliningPhase: Phase = _ + private var myStagingPhase: Phase = _ private var mySplicingPhase: Phase = _ private var myFirstTransformPhase: Phase = _ private var myCollectNullableFieldsPhase: Phase = _ @@ -235,6 +236,7 @@ object Phases { final def sbtExtractDependenciesPhase: Phase = mySbtExtractDependenciesPhase final def picklerPhase: Phase = myPicklerPhase final def inliningPhase: Phase = myInliningPhase + final def stagingPhase: Phase = myStagingPhase final def splicingPhase: Phase = mySplicingPhase final def firstTransformPhase: Phase = myFirstTransformPhase final def collectNullableFieldsPhase: Phase = myCollectNullableFieldsPhase @@ -262,6 +264,7 @@ object Phases { mySbtExtractDependenciesPhase = phaseOfClass(classOf[sbt.ExtractDependencies]) myPicklerPhase = phaseOfClass(classOf[Pickler]) myInliningPhase = phaseOfClass(classOf[Inlining]) + myStagingPhase = phaseOfClass(classOf[Staging]) mySplicingPhase = phaseOfClass(classOf[Splicing]) myFirstTransformPhase = phaseOfClass(classOf[FirstTransform]) myCollectNullableFieldsPhase = phaseOfClass(classOf[CollectNullableFields]) @@ -322,8 +325,8 @@ object Phases { units.map { unit => val unitCtx = ctx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports try run(using unitCtx) - catch case ex: Throwable => - println(s"$ex while running $phaseName on $unit") + catch case ex: Throwable if !ctx.run.enrichedErrorMessage => + println(ctx.run.enrichErrorMessage(s"unhandled exception while running $phaseName on $unit")) throw ex unitCtx.compilationUnit } @@ -449,6 +452,7 @@ object Phases { def sbtExtractDependenciesPhase(using Context): Phase = ctx.base.sbtExtractDependenciesPhase def picklerPhase(using Context): Phase = ctx.base.picklerPhase def inliningPhase(using Context): Phase = ctx.base.inliningPhase + def stagingPhase(using Context): Phase = ctx.base.stagingPhase def splicingPhase(using Context): Phase = ctx.base.splicingPhase def firstTransformPhase(using Context): Phase = ctx.base.firstTransformPhase def refchecksPhase(using Context): Phase = ctx.base.refchecksPhase diff --git a/compiler/src/dotty/tools/dotc/core/StagingContext.scala b/compiler/src/dotty/tools/dotc/core/StagingContext.scala deleted file mode 100644 index 41e77655d5d6..000000000000 --- a/compiler/src/dotty/tools/dotc/core/StagingContext.scala +++ /dev/null @@ -1,58 +0,0 @@ -package dotty.tools.dotc.core - -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.util.Property -import dotty.tools.dotc.transform.PCPCheckAndHeal - -object StagingContext { - - /** A key to be used in a context property that tracks the quotation level */ - private val QuotationLevel = new Property.Key[Int] - - /** A key to be used in a context property that tracks the quotation stack. - * Stack containing the Quotes references received by the surrounding quotes. - */ - private val QuotesStack = new Property.Key[List[tpd.Tree]] - - private val TaggedTypes = new Property.Key[PCPCheckAndHeal.QuoteTypeTags] - - /** All enclosing calls that are currently inlined, from innermost to outermost. */ - def level(using Context): Int = - ctx.property(QuotationLevel).getOrElse(0) - - /** Context with an incremented quotation level. */ - def quoteContext(using Context): Context = - ctx.fresh.setProperty(QuotationLevel, level + 1) - - /** Context with an incremented quotation level and pushes a reference to a Quotes on the quote context stack */ - def pushQuotes(qctxRef: tpd.Tree)(using Context): Context = - val old = ctx.property(QuotesStack).getOrElse(List.empty) - ctx.fresh.setProperty(QuotationLevel, level + 1) - .setProperty(QuotesStack, qctxRef :: old) - - /** Context with a decremented quotation level. */ - def spliceContext(using Context): Context = - ctx.fresh.setProperty(QuotationLevel, level - 1) - - def contextWithQuoteTypeTags(taggedTypes: PCPCheckAndHeal.QuoteTypeTags)(using Context) = - ctx.fresh.setProperty(TaggedTypes, taggedTypes) - - def getQuoteTypeTags(using Context): PCPCheckAndHeal.QuoteTypeTags = - ctx.property(TaggedTypes).get - - /** Context with a decremented quotation level and pops the Some of top of the quote context stack or None if the stack is empty. - * The quotation stack could be empty if we are in a top level splice or an erroneous splice directly within a top level splice. - */ - def popQuotes()(using Context): (Option[tpd.Tree], Context) = - val ctx1 = ctx.fresh.setProperty(QuotationLevel, level - 1) - val head = - ctx.property(QuotesStack) match - case Some(x :: xs) => - ctx1.setProperty(QuotesStack, xs) - Some(x) - case _ => - None // Splice at level 0 or lower - (head, ctx1) -} diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 92f2e55a49bf..f2624e26cba5 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -204,6 +204,7 @@ object StdNames { final val Null: N = "Null" final val Object: N = "Object" final val FromJavaObject: N = "" + final val Record: N = "Record" final val Product: N = "Product" final val PartialFunction: N = "PartialFunction" final val PrefixType: N = "PrefixType" @@ -212,6 +213,7 @@ object StdNames { final val Throwable: N = "Throwable" final val IOOBException: N = "IndexOutOfBoundsException" final val FunctionXXL: N = "FunctionXXL" + final val ErasedFunction: N = "ErasedFunction" final val Abs: N = "Abs" final val And: N = "&&" @@ -285,7 +287,7 @@ object StdNames { // ----- Term names ----------------------------------------- // Compiler-internal - val CAPTURE_ROOT: N = "*" + val CAPTURE_ROOT: N = "cap" val CONSTRUCTOR: N = "" val STATIC_CONSTRUCTOR: N = "" val EVT2U: N = "evt2u$" @@ -300,6 +302,7 @@ object StdNames { val THROWS: N = "$throws" val U2EVT: N = "u2evt$" val ALLARGS: N = "$allArgs" + val UPARROW: N = "^" final val Nil: N = "Nil" final val Predef: N = "Predef" @@ -911,6 +914,10 @@ object StdNames { final val VOLATILEkw: N = kw("volatile") final val WHILEkw: N = kw("while") + final val RECORDid: N = "record" + final val VARid: N = "var" + final val YIELDid: N = "yield" + final val BoxedBoolean: N = "java.lang.Boolean" final val BoxedByte: N = "java.lang.Byte" final val BoxedCharacter: N = "java.lang.Character" @@ -943,6 +950,8 @@ object StdNames { final val JavaSerializable: N = "java.io.Serializable" } + + class JavaTermNames extends JavaNames[TermName] { protected def fromString(s: String): TermName = termName(s) } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 9d7a3945a1ca..b8c17ff61e9e 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -255,10 +255,13 @@ object SymDenotations { def annotationsCarrying(meta: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): List[Annotation] = annotations.filterConserve(_.hasOneOfMetaAnnotation(meta, orNoneOf = orNoneOf)) - def copyAndKeepAnnotationsCarrying(phase: DenotTransformer, meta: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): Unit = - if annotations.nonEmpty then + def keepAnnotationsCarrying(phase: DenotTransformer, meta: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): Unit = + updateAnnotationsAfter(phase, annotationsCarrying(meta, orNoneOf = orNoneOf)) + + def updateAnnotationsAfter(phase: DenotTransformer, annots: List[Annotation])(using Context): Unit = + if annots ne annotations then val cpy = copySymDenotation() - cpy.annotations = annotationsCarrying(meta, orNoneOf = orNoneOf) + cpy.annotations = annots cpy.installAfter(phase) /** Optionally, the annotation matching the given class symbol */ @@ -296,7 +299,7 @@ object SymDenotations { } /** Add all given annotations to this symbol */ - final def addAnnotations(annots: TraversableOnce[Annotation])(using Context): Unit = + final def addAnnotations(annots: IterableOnce[Annotation])(using Context): Unit = annots.iterator.foreach(addAnnotation) @tailrec @@ -904,10 +907,13 @@ object SymDenotations { false val cls = owner.enclosingSubClass if !cls.exists then - val encl = if ctx.owner.isConstructor then ctx.owner.enclosingClass.owner.enclosingClass else ctx.owner.enclosingClass - fail(i""" - | Access to protected $this not permitted because enclosing ${encl.showLocated} - | is not a subclass of ${owner.showLocated} where target is defined""") + if pre.termSymbol.isPackageObject && accessWithin(pre.termSymbol.owner) then + true + else + val encl = if ctx.owner.isConstructor then ctx.owner.enclosingClass.owner.enclosingClass else ctx.owner.enclosingClass + fail(i""" + | Access to protected $this not permitted because enclosing ${encl.showLocated} + | is not a subclass of ${owner.showLocated} where target is defined""") else if isType || pre.derivesFrom(cls) || isConstructor || owner.is(ModuleClass) then // allow accesses to types from arbitrary subclasses fixes #4737 // don't perform this check for static members @@ -1190,6 +1196,7 @@ object SymDenotations { isOneOf(EffectivelyFinalFlags) || is(Inline, butNot = Deferred) || is(JavaDefinedVal, butNot = Method) + || isConstructor || !owner.isExtensibleClass /** A class is effectively sealed if has the `final` or `sealed` modifier, or it @@ -1404,9 +1411,9 @@ object SymDenotations { case Nil => Iterator.empty } - /** The symbol overriding this symbol in given subclass `ofclazz`. + /** The symbol overriding this symbol in given subclass `inClass`. * - * @param ofclazz is a subclass of this symbol's owner + * @pre `inClass` is a subclass of this symbol's owner */ final def overridingSymbol(inClass: ClassSymbol)(using Context): Symbol = if (canMatchInheritedSymbols) matchingDecl(inClass, inClass.thisType) @@ -2223,13 +2230,12 @@ object SymDenotations { def computeApplied = { btrCache(tp) = NoPrefix val baseTp = - if (tycon.typeSymbol eq symbol) tp - else (tycon.typeParams: @unchecked) match { + if (tycon.typeSymbol eq symbol) && !tycon.isLambdaSub then tp + else (tycon.typeParams: @unchecked) match case LambdaParam(_, _) :: _ => recur(tp.superType) case tparams: List[Symbol @unchecked] => recur(tycon).substApprox(tparams, args) - } record(tp, baseTp) baseTp } diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index aa3ae0c3c513..07ac2be90819 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -40,7 +40,7 @@ object Symbols { val Ids: Property.Key[Array[String]] = new Property.Key /** A Symbol represents a Scala definition/declaration or a package. - * @param coord The coordinates of the symbol (a position or an index) + * @param myCoord The coordinates of the symbol (a position or an index) * @param id A unique identifier of the symbol (unique per ContextBase) */ class Symbol private[Symbols] (private var myCoord: Coord, val id: Int, val nestingLevel: Int) @@ -170,7 +170,7 @@ object Symbols { asInstanceOf[TermSymbol] } final def asType(using Context): TypeSymbol = { - assert(isType, s"isType called on not-a-Type $this"); + assert(isType, s"asType called on not-a-Type $this"); asInstanceOf[TypeSymbol] } diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 7c25ecd21ebf..2e8aee4df96c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -9,9 +9,11 @@ import SymDenotations.LazyType import Decorators._ import util.Stats._ import Names._ +import StdNames.nme import Flags.{Module, Provisional} import dotty.tools.dotc.config.Config import cc.boxedUnlessFun +import dotty.tools.dotc.transform.TypeUtils.isErasedValueType object TypeApplications { @@ -503,6 +505,14 @@ class TypeApplications(val self: Type) extends AnyVal { case AppliedType(tycon, args) => args.boxedUnlessFun(tycon) case _ => Nil + /** If this is an encoding of a function type, return its arguments, otherwise return Nil. + * Handles `ErasedFunction`s and poly functions gracefully. + */ + final def functionArgInfos(using Context): List[Type] = self.dealias match + case RefinedType(parent, nme.apply, mt: MethodType) if defn.isErasedFunctionType(parent) => (mt.paramInfos :+ mt.resultType) + case RefinedType(parent, nme.apply, mt: MethodType) if parent.typeSymbol eq defn.PolyFunctionClass => (mt.paramInfos :+ mt.resultType) + case _ => self.dropDependentRefinement.dealias.argInfos + /** Argument types where existential types in arguments are disallowed */ def argTypes(using Context): List[Type] = argInfos mapConserve noBounds diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 6428c5315263..6857e3da38ed 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -283,17 +283,28 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val ctx = comparerContext given Context = ctx // optimization for performance val info2 = tp2.info + + /** Does `tp2` have a stable prefix? + * If that's not the case, following an alias via asSeenFrom could be lossy + * so we should not conclude `false` if comparing aliases fails. + * See pos/i17064.scala for a test case + */ + def hasStablePrefix(tp: NamedType) = + tp.prefix.isStable + info2 match case info2: TypeAlias => if recur(tp1, info2.alias) then return true - if tp2.asInstanceOf[TypeRef].canDropAlias then return false + if tp2.asInstanceOf[TypeRef].canDropAlias && hasStablePrefix(tp2) then + return false case _ => tp1 match case tp1: NamedType => tp1.info match { case info1: TypeAlias => if recur(info1.alias, tp2) then return true - if tp1.asInstanceOf[TypeRef].canDropAlias then return false + if tp1.asInstanceOf[TypeRef].canDropAlias && hasStablePrefix(tp2) then + return false case _ => } val sym2 = tp2.symbol @@ -302,7 +313,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // For convenience we want X$ <:< X.type // This is safe because X$ self-type is X.type sym1 = sym1.companionModule - if ((sym1 ne NoSymbol) && (sym1 eq sym2)) + if (sym1 ne NoSymbol) && (sym1 eq sym2) then ctx.erasedTypes || sym1.isStaticOwner || isSubPrefix(tp1.prefix, tp2.prefix) || @@ -580,7 +591,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val base = nonExprBaseType(tp1, cls2) if (base.typeSymbol == cls2) return true } - else if tp1.isLambdaSub && !tp1.isAnyKind then + else if tp1.typeParams.nonEmpty && !tp1.isAnyKind then return recur(tp1, EtaExpansion(tp2)) fourthTry } @@ -1339,8 +1350,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } } || tryLiftedToThis2 - case _: TypeVar => - recur(tp1, tp2.superType) + case tv: TypeVar => + if tv.isInstantiated then + recur(tp1, tp2.superType) + else + compareAppliedType2(tp2, tv.origin, args2) case tycon2: AnnotatedType if !tycon2.isRefining => recur(tp1, tp2.superType) case tycon2: AppliedType => @@ -2116,7 +2130,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case nil => formals2.isEmpty } - loop(tp1.paramInfos, tp2.paramInfos) + // If methods have erased parameters, then the erased parameters must match + val erasedValid = (!tp1.hasErasedParams && !tp2.hasErasedParams) || (tp1.erasedParams == tp2.erasedParams) + + erasedValid && loop(tp1.paramInfos, tp2.paramInfos) } /** Do the parameter types of `tp1` and `tp2` match in a way that allows `tp1` @@ -2719,7 +2736,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling x && { t.dealias match { case tp: TypeRef if !tp.symbol.isClass => false - case _: SkolemType | _: TypeVar | _: TypeParamRef => false + case _: SkolemType | _: TypeVar | _: TypeParamRef | _: TypeBounds => false case _ => foldOver(x, t) } } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 0e67fd40991b..9bcb3eca36bb 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -520,8 +520,9 @@ object TypeErasure { case _: ClassInfo => true case _ => false } - case tp: TypeParamRef => false - case tp: TypeBounds => false + case _: TypeParamRef => false + case _: TypeBounds => false + case _: MatchType => false case tp: TypeProxy => hasStableErasure(tp.translucentSuperType) case tp: AndType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2) case tp: OrType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2) @@ -535,7 +536,14 @@ object TypeErasure { val paramss = res.paramNamess assert(paramss.length == 1) erasure(defn.FunctionType(paramss.head.length, - isContextual = res.isImplicitMethod, isErased = res.isErasedMethod)) + isContextual = res.isImplicitMethod)) + + def eraseErasedFunctionApply(erasedFn: MethodType)(using Context): Type = + val fnType = defn.FunctionType( + n = erasedFn.erasedParams.count(_ == false), + isContextual = erasedFn.isContextualMethod, + ) + erasure(fnType) } import TypeErasure._ @@ -612,6 +620,8 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst defn.FunctionType(0) case RefinedType(parent, nme.apply, refinedInfo) if parent.typeSymbol eq defn.PolyFunctionClass => erasePolyFunctionApply(refinedInfo) + case RefinedType(parent, nme.apply, refinedInfo: MethodType) if defn.isErasedFunctionType(parent) => + eraseErasedFunctionApply(refinedInfo) case tp: TypeProxy => this(tp.underlying) case tp @ AndType(tp1, tp2) => @@ -638,7 +648,13 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst case tp: MethodType => def paramErasure(tpToErase: Type) = erasureFn(sourceLanguage, semiEraseVCs, isConstructor, isSymbol, wildcardOK)(tpToErase) - val (names, formals0) = if (tp.isErasedMethod) (Nil, Nil) else (tp.paramNames, tp.paramInfos) + val (names, formals0) = if tp.hasErasedParams then + tp.paramNames + .zip(tp.paramInfos) + .zip(tp.erasedParams) + .collect{ case (param, isErased) if !isErased => param } + .unzip + else (tp.paramNames, tp.paramInfos) val formals = formals0.mapConserve(paramErasure) eraseResult(tp.resultType) match { case rt: MethodType => @@ -870,6 +886,8 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst // because RefinedTypes <: TypeProxy and it would be caught by // the case immediately below sigName(this(tp)) + case tp @ RefinedType(parent, nme.apply, refinedInfo) if defn.isErasedFunctionType(parent) => + sigName(this(tp)) case tp: TypeProxy => sigName(tp.underlying) case tp: WildcardType => diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index c91412988e82..6809e4b9083c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -765,7 +765,7 @@ object TypeOps: * * Otherwise, return NoType. */ - private def instantiateToSubType(tp1: NamedType, tp2: Type, mixins: List[Type])(using Context): Type = { + private def instantiateToSubType(tp1: NamedType, tp2: Type, mixins: List[Type])(using Context): Type = trace(i"instantiateToSubType($tp1, $tp2, $mixins)", typr) { // In order for a child type S to qualify as a valid subtype of the parent // T, we need to test whether it is possible S <: T. // @@ -854,6 +854,12 @@ object TypeOps: case tp: TypeRef if tp.symbol.isAbstractOrParamType => gadtSyms += tp.symbol traverseChildren(tp) + val owners = Iterator.iterate(tp.symbol)(_.maybeOwner).takeWhile(_.exists) + for sym <- owners do + // add ThisType's for the classes symbols in the ownership of `tp` + // for example, i16451.CanForward.scala, add `Namer.this`, as one of the owners of the type parameter `A1` + if sym.isClass && !sym.isAnonymousClass && !sym.isStaticOwner then + traverse(sym.thisType) case _ => traverseChildren(tp) } diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 03fc7274beaa..fb66d133c0ba 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -43,6 +43,7 @@ import scala.annotation.internal.sharable import scala.annotation.threadUnsafe import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.TypeUtils.isErasedClass object Types { @@ -245,6 +246,11 @@ object Types { case _ => false } + /** Is this type exactly `Any`, or a type lambda ending in `Any`? */ + def isTopOfSomeKind(using Context): Boolean = dealias match + case tp: TypeLambda => tp.resType.isTopOfSomeKind + case _ => isExactlyAny + def isBottomType(using Context): Boolean = if ctx.mode.is(Mode.SafeNulls) && !ctx.phase.erasedTypes then hasClassSymbol(defn.NothingClass) else isBottomTypeAfterErasure @@ -425,7 +431,7 @@ object Types { def isContextualMethod: Boolean = false /** Is this a MethodType for which the parameters will not be used? */ - def isErasedMethod: Boolean = false + def hasErasedParams(using Context): Boolean = false /** Is this a match type or a higher-kinded abstraction of one? */ @@ -734,33 +740,24 @@ object Types { // TODO: change tp.parent to nullable or other values if ((tp.parent: Type | Null) == null) NoDenotation else if (tp eq pre) go(tp.parent) - else { + else //println(s"find member $pre . $name in $tp") // We have to be careful because we might open the same (wrt eq) recursive type - // twice during findMember which risks picking the wrong prefix in the `substRecThis(rt, pre)` - // call below. To avoid this problem we do a defensive copy of the recursive - // type first. But if we do this always we risk being inefficient and we ran into - // stackoverflows when compiling pos/hk.scala under the refinement encoding - // of hk-types. So we only do a copy if the type - // is visited again in a recursive call to `findMember`, as tracked by `tp.opened`. - // Furthermore, if this happens we mark the original recursive type with `openedTwice` - // which means that we always defensively copy the type in the future. This second - // measure is necessary because findMember calls might be cached, so do not - // necessarily appear in nested order. - // Without the `openedTwice` trick, Typer.scala fails to Ycheck - // at phase resolveSuper. + // twice during findMember with two different prefixes, which risks picking the wrong prefix + // in the `substRecThis(rt, pre)` call below. To avoid this problem we do a defensive copy + // of the recursive type if the new prefix `pre` is neq the prefix with which the + // type was previously opened. + + val openedPre = tp.openedWithPrefix val rt = - if (tp.opened) { // defensive copy - tp.openedTwice = true + if openedPre.exists && (openedPre ne pre) then // defensive copy RecType(rt => tp.parent.substRecThis(tp, rt.recThis)) - } else tp - rt.opened = true + rt.openedWithPrefix = pre try go(rt.parent).mapInfo(_.substRecThis(rt, pre)) - finally - if (!rt.openedTwice) rt.opened = false - } + finally rt.openedWithPrefix = NoType + end goRec def goRefined(tp: RefinedType) = { val pdenot = go(tp.parent) @@ -811,9 +808,14 @@ object Types { // is made to save execution time in the common case. See i9844.scala for test cases. def qualifies(sd: SingleDenotation) = !sd.symbol.is(Private) || sd.symbol.owner == tp.cls - d match + d.match case d: SingleDenotation => if qualifies(d) then d else NoDenotation case d => d.filterWithPredicate(qualifies) + .orElse: + // Only inaccessible private symbols were found. But there could still be + // shadowed non-private symbols, so as a fallback search for those. + // Test case is i18361.scala. + findMember(name, pre, required, excluded | Private) else d else // There is a special case to handle: @@ -1180,7 +1182,8 @@ object Types { /** Remove all AnnotatedTypes wrapping this type. */ - def stripAnnots(using Context): Type = this + def stripAnnots(keep: Annotation => Context ?=> Boolean)(using Context): Type = this + final def stripAnnots(using Context): Type = stripAnnots(_ => false) /** Strip TypeVars and Annotation and CapturingType wrappers */ def stripped(using Context): Type = this @@ -1470,7 +1473,7 @@ object Types { /** Dealias, and if result is a dependent function type, drop the `apply` refinement. */ final def dropDependentRefinement(using Context): Type = dealias match { - case RefinedType(parent, nme.apply, _) => parent + case RefinedType(parent, nme.apply, mt) if defn.isNonRefinedFunction(parent) => parent case tp => tp } @@ -1573,8 +1576,6 @@ object Types { else NoType case SkolemType(tp) => loop(tp) - case pre: WildcardType => - WildcardType case pre: TypeRef => pre.info match { case TypeAlias(alias) => loop(alias) @@ -1712,6 +1713,8 @@ object Types { else NoType case t if defn.isNonRefinedFunction(t) => t + case t if defn.isErasedFunctionType(t) => + t case t @ SAMType(_) => t case _ => @@ -1839,15 +1842,15 @@ object Types { case mt: MethodType if !mt.isParamDependent => val formals1 = if (dropLast == 0) mt.paramInfos else mt.paramInfos dropRight dropLast val isContextual = mt.isContextualMethod && !ctx.erasedTypes - val isErased = mt.isErasedMethod && !ctx.erasedTypes val result1 = mt.nonDependentResultApprox match { case res: MethodType => res.toFunctionType(isJava) case res => res } val funType = defn.FunctionOf( formals1 mapConserve (_.translateFromRepeated(toArray = isJava)), - result1, isContextual, isErased) - if alwaysDependent || mt.isResultDependent then RefinedType(funType, nme.apply, mt) + result1, isContextual) + if alwaysDependent || mt.isResultDependent then + RefinedType(funType, nme.apply, mt) else funType } @@ -2100,7 +2103,7 @@ object Types { */ final def isTracked(using Context): Boolean = canBeTracked && !captureSetOfInfo.isAlwaysEmpty - /** Is this reference the root capability `*` ? */ + /** Is this reference the root capability `cap` ? */ def isRootCapability(using Context): Boolean = false /** Normalize reference so that it can be compared with `eq` for equality */ @@ -2181,7 +2184,7 @@ object Types { // --- NamedTypes ------------------------------------------------------------------ - abstract class NamedType extends CachedProxyType, ValueType { self => + abstract class NamedType extends CachedProxyType, ValueType, Product { self => type ThisType >: this.type <: NamedType type ThisName <: Name @@ -2189,6 +2192,8 @@ object Types { val prefix: Type def designator: Designator protected def designator_=(d: Designator): Unit + def _1: Type + def _2: Designator assert(NamedType.validPrefix(prefix), s"invalid prefix $prefix") @@ -2500,10 +2505,49 @@ object Types { /** A reference with the initial symbol in `symd` has an info that * might depend on the given prefix. + * Note: If M is an abstract type or non-final term member in trait or class C, + * its info depends even on C.this if class C has a self type that refines + * the info of M. */ private def infoDependsOnPrefix(symd: SymDenotation, prefix: Type)(using Context): Boolean = + + def refines(tp: Type, name: Name): Boolean = tp match + case tp: TypeRef => + tp.symbol match + case cls: ClassSymbol => + val otherd = cls.nonPrivateMembersNamed(name) + otherd.exists && !otherd.containsSym(symd.symbol) + case tsym => + refines(tsym.info.hiBound, name) + // avoid going through tp.denot, since that might call infoDependsOnPrefix again + case RefinedType(parent, rname, _) => + rname == name || refines(parent, name) + case tp: TypeProxy => + refines(tp.underlying, name) + case AndType(tp1, tp2) => + refines(tp1, name) || refines(tp2, name) + case _ => + false + + def givenSelfTypeOrCompleter(cls: Symbol) = cls.infoOrCompleter match + case cinfo: ClassInfo => + cinfo.selfInfo match + case sym: Symbol => sym.infoOrCompleter + case tpe: Type => tpe + case _ => NoType + symd.maybeOwner.membersNeedAsSeenFrom(prefix) && !symd.is(NonMember) - || prefix.isInstanceOf[Types.ThisType] && symd.is(Opaque) // see pos/i11277.scala for a test where this matters + || prefix.match + case prefix: Types.ThisType => + (symd.isAbstractType + || symd.isTerm + && !symd.flagsUNSAFE.isOneOf(Module | Final | Param) + && !symd.isConstructor + && !symd.maybeOwner.isEffectivelyFinal) + && prefix.sameThis(symd.maybeOwner.thisType) + && refines(givenSelfTypeOrCompleter(prefix.cls), symd.name) + case _ => false + end infoDependsOnPrefix /** Is this a reference to a class or object member with an info that might depend * on the prefix? @@ -2513,10 +2557,7 @@ object Types { case _ => true } - /** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type - * to an (unbounded) wildcard type. - * - * (2) Reduce a type-ref `T { X = U; ... } # X` to `U` + /** Reduce a type-ref `T { X = U; ... } # X` to `U` * provided `U` does not refer with a RecThis to the * refinement type `T { X = U; ... }` */ @@ -2638,45 +2679,33 @@ object Types { case _ => } } - if (prefix.isInstanceOf[WildcardType]) WildcardType + if (prefix.isInstanceOf[WildcardType]) WildcardType.sameKindAs(this) else withPrefix(prefix) } /** A reference like this one, but with the given symbol, if it exists */ - final def withSym(sym: Symbol)(using Context): ThisType = - if ((designator ne sym) && sym.exists) NamedType(prefix, sym).asInstanceOf[ThisType] + private def withSym(sym: Symbol)(using Context): ThisType = + if designator ne sym then NamedType(prefix, sym).asInstanceOf[ThisType] + else this + + private def withName(name: Name)(using Context): ThisType = + if designator ne name then NamedType(prefix, name).asInstanceOf[ThisType] else this /** A reference like this one, but with the given denotation, if it exists. - * Returns a new named type with the denotation's symbol if that symbol exists, and - * one of the following alternatives applies: - * 1. The current designator is a symbol and the symbols differ, or - * 2. The current designator is a name and the new symbolic named type - * does not have a currently known denotation. - * 3. The current designator is a name and the new symbolic named type - * has the same info as the current info - * Otherwise the current denotation is overwritten with the given one. - * - * Note: (2) and (3) are a "lock in mechanism" where a reference with a name as - * designator can turn into a symbolic reference. - * - * Note: This is a subtle dance to keep the balance between going to symbolic - * references as much as we can (since otherwise we'd risk getting cycles) - * and to still not lose any type info in the denotation (since symbolic - * references often recompute their info directly from the symbol's info). - * A test case is neg/opaque-self-encoding.scala. + * Returns a new named type with the denotation's symbol as designator + * if that symbol exists and it is different from the current designator. + * Returns a new named type with the denotations's name as designator + * if the denotation is overloaded and its name is different from the + * current designator. */ final def withDenot(denot: Denotation)(using Context): ThisType = if denot.exists then - val adapted = withSym(denot.symbol) - val result = - if (adapted.eq(this) - || designator.isInstanceOf[Symbol] - || !adapted.denotationIsCurrent - || adapted.info.eq(denot.info)) - adapted + val adapted = + if denot.symbol.exists then withSym(denot.symbol) + else if denot.isOverloaded then withName(denot.name) else this - val lastDenot = result.lastDenotation + val lastDenot = adapted.lastDenotation denot match case denot: SymDenotation if denot.validFor.firstPhaseId < ctx.phase.id @@ -2686,15 +2715,15 @@ object Types { // In this case the new SymDenotation might be valid for all phases, which means // we would not recompute the denotation when travelling to an earlier phase, maybe // in the next run. We fix that problem by creating a UniqueRefDenotation instead. - core.println(i"overwrite ${result.toString} / ${result.lastDenotation}, ${result.lastDenotation.getClass} with $denot at ${ctx.phaseId}") - result.setDenot( + core.println(i"overwrite ${adapted.toString} / ${adapted.lastDenotation}, ${adapted.lastDenotation.getClass} with $denot at ${ctx.phaseId}") + adapted.setDenot( UniqueRefDenotation( denot.symbol, denot.info, Period(ctx.runId, ctx.phaseId, denot.validFor.lastPhaseId), this.prefix)) case _ => - result.setDenot(denot) - result.asInstanceOf[ThisType] + adapted.setDenot(denot) + adapted.asInstanceOf[ThisType] else // don't assign NoDenotation, we might need to recover later. Test case is pos/avoid.scala. this @@ -2904,6 +2933,7 @@ object Types { def apply(prefix: Type, designator: Name, denot: Denotation)(using Context): NamedType = if (designator.isTermName) TermRef.apply(prefix, designator.asTermName, denot) else TypeRef.apply(prefix, designator.asTypeName, denot) + def unapply(tp: NamedType): NamedType = tp def validPrefix(prefix: Type): Boolean = prefix.isValueType || (prefix eq NoPrefix) } @@ -3163,9 +3193,8 @@ object Types { */ class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType { - // See discussion in findMember#goRec why these vars are needed - private[Types] var opened: Boolean = false - private[Types] var openedTwice: Boolean = false + // See discussion in findMember#goRec why this field is needed + private[Types] var openedWithPrefix: Type = NoType val parent: Type = parentExp(this: @unchecked) @@ -3619,6 +3648,8 @@ object Types { def companion: LambdaTypeCompanion[ThisName, PInfo, This] + def erasedParams(using Context) = List.fill(paramInfos.size)(false) + /** The type `[tparams := paramRefs] tp`, where `tparams` can be * either a list of type parameter symbols or a list of lambda parameters * @@ -3696,7 +3727,11 @@ object Types { else Signature(tp, sourceLanguage) this match case tp: MethodType => - val params = if (isErasedMethod) Nil else tp.paramInfos + val params = if (hasErasedParams) + tp.paramInfos + .zip(tp.erasedParams) + .collect { case (param, isErased) if !isErased => param } + else tp.paramInfos resultSignature.prependTermParams(params, sourceLanguage) case tp: PolyType => resultSignature.prependTypeParams(tp.paramNames.length) @@ -3856,7 +3891,8 @@ object Types { /** Does one of the parameter types contain references to earlier parameters * of this method type which cannot be eliminated by de-aliasing? */ - def isParamDependent(using Context): Boolean = paramDependencyStatus == TrueDeps + def isParamDependent(using Context): Boolean = + paramDependencyStatus == TrueDeps || paramDependencyStatus == CaptureDeps /** Is there a dependency involving a reference in a capture set, but * otherwise no true result dependency? @@ -3903,16 +3939,14 @@ object Types { def companion: MethodTypeCompanion final override def isImplicitMethod: Boolean = - companion.eq(ImplicitMethodType) || - companion.eq(ErasedImplicitMethodType) || - isContextualMethod - final override def isErasedMethod: Boolean = - companion.eq(ErasedMethodType) || - companion.eq(ErasedImplicitMethodType) || - companion.eq(ErasedContextualMethodType) + companion.eq(ImplicitMethodType) || isContextualMethod + final override def hasErasedParams(using Context): Boolean = + erasedParams.contains(true) final override def isContextualMethod: Boolean = - companion.eq(ContextualMethodType) || - companion.eq(ErasedContextualMethodType) + companion.eq(ContextualMethodType) + + override def erasedParams(using Context): List[Boolean] = + paramInfos.map(p => p.hasAnnotation(defn.ErasedParamAnnot)) protected def prefixString: String = companion.prefixString } @@ -3938,10 +3972,15 @@ object Types { protected def toPInfo(tp: Type)(using Context): PInfo + /** If `tparam` is a sealed type parameter symbol of a polymorphic method, add + * a @caps.Sealed annotation to the upperbound in `tp`. + */ + protected def addSealed(tparam: ParamInfo, tp: Type)(using Context): Type = tp + def fromParams[PI <: ParamInfo.Of[N]](params: List[PI], resultType: Type)(using Context): Type = if (params.isEmpty) resultType else apply(params.map(_.paramName))( - tl => params.map(param => toPInfo(tl.integrate(params, param.paramInfo))), + tl => params.map(param => toPInfo(addSealed(param, tl.integrate(params, param.paramInfo)))), tl => tl.integrate(params, resultType)) } @@ -4009,7 +4048,7 @@ object Types { tl => tl.integrate(params, resultType)) end fromSymbols - final def apply(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type)(using Context): MethodType = + def apply(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type)(using Context): MethodType = checkValid(unique(new CachedMethodType(paramNames)(paramInfosExp, resultTypeExp, self))) def checkValid(mt: MethodType)(using Context): mt.type = { @@ -4024,19 +4063,14 @@ object Types { } object MethodType extends MethodTypeCompanion("MethodType") { - def companion(isContextual: Boolean = false, isImplicit: Boolean = false, isErased: Boolean = false): MethodTypeCompanion = - if (isContextual) - if (isErased) ErasedContextualMethodType else ContextualMethodType - else if (isImplicit) - if (isErased) ErasedImplicitMethodType else ImplicitMethodType - else - if (isErased) ErasedMethodType else MethodType + def companion(isContextual: Boolean = false, isImplicit: Boolean = false): MethodTypeCompanion = + if (isContextual) ContextualMethodType + else if (isImplicit) ImplicitMethodType + else MethodType } - object ErasedMethodType extends MethodTypeCompanion("ErasedMethodType") + object ContextualMethodType extends MethodTypeCompanion("ContextualMethodType") - object ErasedContextualMethodType extends MethodTypeCompanion("ErasedContextualMethodType") object ImplicitMethodType extends MethodTypeCompanion("ImplicitMethodType") - object ErasedImplicitMethodType extends MethodTypeCompanion("ErasedImplicitMethodType") /** A ternary extractor for MethodType */ object MethodTpe { @@ -4268,6 +4302,16 @@ object Types { resultTypeExp: PolyType => Type)(using Context): PolyType = unique(new PolyType(paramNames)(paramInfosExp, resultTypeExp)) + override protected def addSealed(tparam: ParamInfo, tp: Type)(using Context): Type = + tparam match + case tparam: Symbol if tparam.is(Sealed) => + tp match + case tp @ TypeBounds(lo, hi) => + tp.derivedTypeBounds(lo, + AnnotatedType(hi, Annotation(defn.Caps_SealedAnnot, tparam.span))) + case _ => tp + case _ => tp + def unapply(tl: PolyType): Some[(List[LambdaParam], Type)] = Some((tl.typeParams, tl.resType)) } @@ -4780,7 +4824,7 @@ object Types { def hasLowerBound(using Context): Boolean = !currentEntry.loBound.isExactlyNothing /** For uninstantiated type variables: Is the upper bound different from Any? */ - def hasUpperBound(using Context): Boolean = !currentEntry.hiBound.isRef(defn.AnyClass) + def hasUpperBound(using Context): Boolean = !currentEntry.hiBound.isTopOfSomeKind /** Unwrap to instance (if instantiated) or origin (if not), until result * is no longer a TypeVar @@ -4972,9 +5016,9 @@ object Types { if (!givenSelf.isValueType) appliedRef else if (clsd.is(Module)) givenSelf else if (ctx.erasedTypes) appliedRef - else givenSelf match - case givenSelf @ EventuallyCapturingType(tp, _) => - givenSelf.derivedAnnotatedType(tp & appliedRef, givenSelf.annot) + else givenSelf.dealiasKeepAnnots match + case givenSelf1 @ EventuallyCapturingType(tp, _) => + givenSelf1.derivedAnnotatedType(tp & appliedRef, givenSelf1.annot) case _ => AndType(givenSelf, appliedRef) } @@ -5223,6 +5267,10 @@ object Types { else result def emptyPolyKind(using Context): TypeBounds = apply(defn.NothingType, defn.AnyKindType) + /** An interval covering all types of the same kind as `tp`. */ + def emptySameKindAs(tp: Type)(using Context): TypeBounds = + val top = tp.topType + if top.isExactlyAny then empty else apply(defn.NothingType, top) def upper(hi: Type)(using Context): TypeBounds = apply(defn.NothingType, hi) def lower(lo: Type)(using Context): TypeBounds = apply(lo, defn.AnyType) } @@ -5251,7 +5299,10 @@ object Types { override def stripTypeVar(using Context): Type = derivedAnnotatedType(parent.stripTypeVar, annot) - override def stripAnnots(using Context): Type = parent.stripAnnots + override def stripAnnots(keep: Annotation => (Context) ?=> Boolean)(using Context): Type = + val p = parent.stripAnnots(keep) + if keep(annot) then derivedAnnotatedType(p, annot) + else p override def stripped(using Context): Type = parent.stripped @@ -5398,6 +5449,9 @@ object Types { else result else unique(CachedWildcardType(bounds)) + /** A wildcard matching any type of the same kind as `tp`. */ + def sameKindAs(tp: Type)(using Context): WildcardType = + apply(TypeBounds.emptySameKindAs(tp)) } /** An extractor for single abstract method types. @@ -5706,6 +5760,12 @@ object Types { case tp @ SuperType(thistp, supertp) => derivedSuperType(tp, this(thistp), this(supertp)) + case tp @ ConstantType(const @ Constant(_: Type)) => + val classType = const.tpe + val classType1 = this(classType) + if classType eq classType1 then tp + else classType1 + case tp: LazyRef => LazyRef { refCtx => given Context = refCtx diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 7702e6a93446..0c701eb03d38 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -758,10 +758,12 @@ class ClassfileParser( case tpnme.MethodParametersATTR => val paramCount = in.nextByte for i <- 0 until paramCount do - val name = pool.getName(in.nextChar) + val index = in.nextChar val flags = in.nextChar - if (flags & JAVA_ACC_SYNTHETIC) == 0 then - res.namedParams += (i -> name.name) + if index != 0 then + val name = pool.getName(index) + if (flags & JAVA_ACC_SYNTHETIC) == 0 then + res.namedParams += (i -> name.name) case tpnme.AnnotationDefaultATTR => sym.addAnnotation(Annotation(defn.AnnotationDefaultAnnot, Nil, sym.span)) @@ -1086,10 +1088,10 @@ class ClassfileParser( if (sym == classRoot.symbol) staticScope.lookup(name) else { - var module = sym.companionModule - if (!module.exists && sym.isAbsent()) - module = sym.scalacLinkedClass - module.info.member(name).symbol + var moduleClass = sym.registeredCompanion + if (!moduleClass.exists && sym.isAbsent()) + moduleClass = sym.scalacLinkedClass + moduleClass.info.member(name).symbol } else if (sym == classRoot.symbol) instanceScope.lookup(name) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index bef28545592a..645c6f81e539 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -287,7 +287,6 @@ class TreePickler(pickler: TastyPickler) { var mods = EmptyFlags if tpe.isContextualMethod then mods |= Given else if tpe.isImplicitMethod then mods |= Implicit - if tpe.isErasedMethod then mods |= Erased pickleMethodic(METHODtype, tpe, mods) case tpe: ParamRef => assert(pickleParamRef(tpe), s"orphan parameter reference: $tpe") @@ -666,11 +665,31 @@ class TreePickler(pickler: TastyPickler) { pickleTree(hi) pickleTree(alias) } - case Hole(_, idx, args, _, tpt) => + case tree @ Quote(body, Nil) => + // TODO: Add QUOTE tag to TASTy + assert(body.isTerm, + """Quote with type should not be pickled. + |Quote with type should only exists after staging phase at staging level 0.""".stripMargin) + pickleTree( + // scala.quoted.runtime.Expr.quoted[]() + ref(defn.QuotedRuntime_exprQuote) + .appliedToType(tree.bodyType) + .appliedTo(body) + .withSpan(tree.span) + ) + case Splice(expr) => + pickleTree( // TODO: Add SPLICE tag to TASTy + // scala.quoted.runtime.Expr.splice[]() + ref(defn.QuotedRuntime_exprSplice) + .appliedToType(tree.tpe) + .appliedTo(expr) + .withSpan(tree.span) + ) + case Hole(_, idx, args, _) => writeByte(HOLE) withLength { writeNat(idx) - pickleType(tpt.tpe, richTypes = true) + pickleType(tree.tpe, richTypes = true) args.foreach(pickleTree) } } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 91290b4ddd41..98bd7152ff37 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -74,6 +74,9 @@ class TreeUnpickler(reader: TastyReader, */ private val typeAtAddr = new mutable.HashMap[Addr, Type] + /** If this is a pickled quote, the owner of the quote, otherwise NoSymbol. */ + private var rootOwner: Symbol = NoSymbol + /** The root symbol denotation which are defined by the Tasty file associated with this * TreeUnpickler. Set by `enterTopLevel`. */ @@ -106,11 +109,12 @@ class TreeUnpickler(reader: TastyReader, /** The unpickled trees */ def unpickle(mode: UnpickleMode)(using Context): List[Tree] = { + if mode != UnpickleMode.TopLevel then rootOwner = ctx.owner assert(roots != null, "unpickle without previous enterTopLevel") val rdr = new TreeReader(reader) mode match { case UnpickleMode.TopLevel => rdr.readTopLevel() - case UnpickleMode.Term => rdr.readTerm() :: Nil + case UnpickleMode.Term => rdr.readTree() :: Nil case UnpickleMode.TypeTree => rdr.readTpt() :: Nil } } @@ -245,7 +249,6 @@ class TreeUnpickler(reader: TastyReader, while currentAddr != end do // avoid boxing the mods readByte() match case IMPLICIT => mods |= Implicit - case ERASED => mods |= Erased case GIVEN => mods |= Given (names, mods) @@ -386,7 +389,7 @@ class TreeUnpickler(reader: TastyReader, val hi = readVariances(readType()) createNullableTypeBounds(lo, hi) case ANNOTATEDtype => - AnnotatedType(readType(), Annotation(readTerm())) + AnnotatedType(readType(), Annotation(readTree())) case ANDtype => AndType(readType(), readType()) case ORtype => @@ -402,9 +405,7 @@ class TreeUnpickler(reader: TastyReader, case METHODtype => def methodTypeCompanion(mods: FlagSet): MethodTypeCompanion = if mods.is(Implicit) then ImplicitMethodType - else if mods.isAllOf(Erased | Given) then ErasedContextualMethodType else if mods.is(Given) then ContextualMethodType - else if mods.is(Erased) then ErasedMethodType else MethodType readMethodic(methodTypeCompanion, _.toTermName) case TYPELAMBDAtype => @@ -487,7 +488,7 @@ class TreeUnpickler(reader: TastyReader, def readTypeRef(): Type = typeAtAddr(readAddr()) - def readTermRef()(using Context): TermRef = + def readTreeRef()(using Context): TermRef = readType().asInstanceOf[TermRef] /** Under pureFunctions, map all function types to impure function types, @@ -655,7 +656,7 @@ class TreeUnpickler(reader: TastyReader, val ctx1 = localContext(sym)(using ctx0).addMode(Mode.ReadPositions) inContext(sourceChangeContext(Addr(0))(using ctx1)) { // avoids space leaks by not capturing the current context - forkAt(rhsStart).readTerm() + forkAt(rhsStart).readTree() } }) goto(start) @@ -738,7 +739,7 @@ class TreeUnpickler(reader: TastyReader, readByte() val end = readEnd() val tp = readType() - val lazyAnnotTree = readLaterWithOwner(end, _.readTerm()) + val lazyAnnotTree = readLaterWithOwner(end, _.readTree()) owner => new DeferredSymAndTree(tp.typeSymbol, lazyAnnotTree(owner).complete): // Only force computation of symbol if it has the right name. This added @@ -789,7 +790,7 @@ class TreeUnpickler(reader: TastyReader, if (sctx `ne` ctx) return processPackage(op)(using sctx) readByte() val end = readEnd() - val pid = ref(readTermRef()).asInstanceOf[RefTree] + val pid = ref(readTreeRef()).asInstanceOf[RefTree] op(pid, end)(using localContext(pid.symbol.moduleClass)) } @@ -857,7 +858,7 @@ class TreeUnpickler(reader: TastyReader, def complete(using Context) = inlines.Inlines.bodyToInline(sym) } else - readLater(end, _.readTerm()) + readLater(end, _.readTree()) def ValDef(tpt: Tree) = ta.assignType(untpd.ValDef(sym.name.asTermName, tpt, readRhs(using localCtx)), sym) @@ -983,7 +984,7 @@ class TreeUnpickler(reader: TastyReader, case SELECTin => val end = readEnd() readName() - readTerm() match + readTree() match case nu: New => try nu.tpe finally goto(end) @@ -999,7 +1000,7 @@ class TreeUnpickler(reader: TastyReader, collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { nextUnsharedTag match case APPLY | TYPEAPPLY | BLOCK => - if withArgs then readTerm() + if withArgs then readTree() else InferredTypeTree().withType(readParentType()) case _ => readTpt() } @@ -1094,7 +1095,7 @@ class TreeUnpickler(reader: TastyReader, setSpan(start, PackageDef(pid, readIndexedStats(exprOwner, end))) } case _ => - readTerm()(using ctx.withOwner(exprOwner)) + readTree()(using ctx.withOwner(exprOwner)) } inline def readImportOrExport(inline mkTree: @@ -1103,7 +1104,7 @@ class TreeUnpickler(reader: TastyReader, assert(sourcePathAt(start).isEmpty) readByte() readEnd() - val expr = readTerm() + val expr = readTree() setSpan(start, mkTree(expr, readSelectors())) } @@ -1161,14 +1162,14 @@ class TreeUnpickler(reader: TastyReader, // ------ Reading trees ----------------------------------------------------- - def readTerm()(using Context): Tree = { // TODO: rename to readTree + def readTree()(using Context): Tree = { val sctx = sourceChangeContext() - if (sctx `ne` ctx) return readTerm()(using sctx) + if (sctx `ne` ctx) return readTree()(using sctx) val start = currentAddr val tag = readByte() pickling.println(s"reading term ${astTagToString(tag)} at $start, ${ctx.source}") - def readPathTerm(): Tree = { + def readPathTree(): Tree = { goto(start) readType() match { case path: TypeRef => TypeTree(path) @@ -1188,12 +1189,12 @@ class TreeUnpickler(reader: TastyReader, ConstFold.Select(untpd.Select(qual, name).withType(tpe)) def completeSelect(name: Name, sig: Signature, target: Name): Select = - val qual = readTerm() + val qual = readTree() val denot = accessibleDenot(qual.tpe.widenIfUnstable, name, sig, target) makeSelect(qual, name, denot) def readQualId(): (untpd.Ident, TypeRef) = - val qual = readTerm().asInstanceOf[untpd.Ident] + val qual = readTree().asInstanceOf[untpd.Ident] (untpd.Ident(qual.name).withSpan(qual.span), qual.tpe.asInstanceOf[TypeRef]) def accessibleDenot(qualType: Type, name: Name, sig: Signature, target: Name) = { @@ -1203,9 +1204,9 @@ class TreeUnpickler(reader: TastyReader, else qualType.findMember(name, pre, excluded = Private).atSignature(sig, target) } - def readSimpleTerm(): Tree = tag match { + def readSimpleTree(): Tree = tag match { case SHAREDterm => - forkAt(readAddr()).readTerm() + forkAt(readAddr()).readTree() case IDENT => untpd.Ident(readName()).withType(readType()) case IDENTtpt => @@ -1224,16 +1225,16 @@ class TreeUnpickler(reader: TastyReader, case NEW => New(readTpt()) case THROW => - Throw(readTerm()) + Throw(readTree()) case SINGLETONtpt => - SingletonTypeTree(readTerm()) + SingletonTypeTree(readTree()) case BYNAMEtpt => val arg = readTpt() ByNameTypeTree(if knowsPureFuns then arg else arg.adaptByNameArgUnderPureFuns) case NAMEDARG => - NamedArg(readName(), readTerm()) + NamedArg(readName(), readTree()) case _ => - readPathTerm() + readPathTree() } /** Adapt constructor calls where class has only using clauses from old to new scheme. @@ -1266,62 +1267,80 @@ class TreeUnpickler(reader: TastyReader, res.withAttachment(SuppressedApplyToNone, ()) else res + def quotedExpr(fn: Tree, args: List[Tree]): Tree = + val TypeApply(_, targs) = fn: @unchecked + untpd.Quote(args.head, Nil).withBodyType(targs.head.tpe) + + def splicedExpr(fn: Tree, args: List[Tree]): Tree = + val TypeApply(_, targs) = fn: @unchecked + Splice(args.head, targs.head.tpe) + + def nestedSpliceExpr(fn: Tree, args: List[Tree]): Tree = + fn match + case Apply(TypeApply(_, targs), _ :: Nil) => // nestedSplice[T](quotes)(expr) + Splice(args.head, targs.head.tpe) + case _ => // nestedSplice[T](quotes) + tpd.Apply(fn, args) + def simplifyLub(tree: Tree): Tree = tree.overwriteType(tree.tpe.simplified) tree - def readLengthTerm(): Tree = { + def readLengthTree(): Tree = { val end = readEnd() val result = (tag: @switch) match { case SUPER => - val qual = readTerm() + val qual = readTree() val (mixId, mixTpe) = ifBefore(end)(readQualId(), (untpd.EmptyTypeIdent, NoType)) tpd.Super(qual, mixId, mixTpe.typeSymbol) case APPLY => - val fn = readTerm() - val args = until(end)(readTerm()) + val fn = readTree() + val args = until(end)(readTree()) if fn.symbol.isConstructor then constructorApply(fn, args) + else if fn.symbol == defn.QuotedRuntime_exprQuote then quotedExpr(fn, args) + else if fn.symbol == defn.QuotedRuntime_exprSplice then splicedExpr(fn, args) + else if fn.symbol == defn.QuotedRuntime_exprNestedSplice then nestedSpliceExpr(fn, args) else tpd.Apply(fn, args) case TYPEAPPLY => - tpd.TypeApply(readTerm(), until(end)(readTpt())) + tpd.TypeApply(readTree(), until(end)(readTpt())) case APPLYsigpoly => - val fn = readTerm() + val fn = readTree() val methType = readType() - val args = until(end)(readTerm()) + val args = until(end)(readTree()) val fun2 = typer.Applications.retypeSignaturePolymorphicFn(fn, methType) tpd.Apply(fun2, args) case TYPED => - val expr = readTerm() + val expr = readTree() val tpt = readTpt() Typed(expr, tpt) case ASSIGN => - Assign(readTerm(), readTerm()) + Assign(readTree(), readTree()) case BLOCK => val exprReader = fork skipTree() readStats(ctx.owner, end, - (stats, ctx) => Block(stats, exprReader.readTerm()(using ctx))) + (stats, ctx) => Block(stats, exprReader.readTree()(using ctx))) case INLINED => val exprReader = fork skipTree() def maybeCall = nextUnsharedTag match { case VALDEF | DEFDEF => EmptyTree - case _ => readTerm() + case _ => readTree() } val call = ifBefore(end)(maybeCall, EmptyTree) val bindings = readStats(ctx.owner, end).asInstanceOf[List[ValOrDefDef]] - val expansion = exprReader.readTerm() // need bindings in scope, so needs to be read before + val expansion = exprReader.readTree() // need bindings in scope, so needs to be read before Inlined(call, bindings, expansion) case IF => if (nextByte == INLINE) { readByte() - InlineIf(readTerm(), readTerm(), readTerm()) + InlineIf(readTree(), readTree(), readTree()) } else - If(readTerm(), readTerm(), readTerm()) + If(readTree(), readTree(), readTree()) case LAMBDA => - val meth = readTerm() + val meth = readTree() val tpt = ifBefore(end)(readTpt(), EmptyTree) Closure(Nil, meth, tpt) case MATCH => @@ -1332,24 +1351,24 @@ class TreeUnpickler(reader: TastyReader, } else if (nextByte == INLINE) { readByte() - InlineMatch(readTerm(), readCases(end)) + InlineMatch(readTree(), readCases(end)) } - else Match(readTerm(), readCases(end))) + else Match(readTree(), readCases(end))) case RETURN => val from = readSymRef() - val expr = ifBefore(end)(readTerm(), EmptyTree) + val expr = ifBefore(end)(readTree(), EmptyTree) Return(expr, Ident(from.termRef)) case WHILE => - WhileDo(readTerm(), readTerm()) + WhileDo(readTree(), readTree()) case TRY => simplifyLub( - Try(readTerm(), readCases(end), ifBefore(end)(readTerm(), EmptyTree))) + Try(readTree(), readCases(end), ifBefore(end)(readTree(), EmptyTree))) case SELECTouter => val levels = readNat() - readTerm().outerSelect(levels, SkolemType(readType())) + readTree().outerSelect(levels, SkolemType(readType())) case SELECTin => var sname = readName() - val qual = readTerm() + val qual = readTree() val ownerTpe = readType() val owner = ownerTpe.typeSymbol val SignedName(name, sig, target) = sname: @unchecked // only methods with params use SELECTin @@ -1380,26 +1399,26 @@ class TreeUnpickler(reader: TastyReader, makeSelect(qual, name, denot) case REPEATED => val elemtpt = readTpt() - SeqLiteral(until(end)(readTerm()), elemtpt) + SeqLiteral(until(end)(readTree()), elemtpt) case BIND => val sym = symAtAddr.getOrElse(start, forkAt(start).createSymbol()) readName() readType() - val body = readTerm() + val body = readTree() val (givenFlags, _, _) = readModifiers(end) sym.setFlag(givenFlags) Bind(sym, body) case ALTERNATIVE => - Alternative(until(end)(readTerm())) + Alternative(until(end)(readTree())) case UNAPPLY => - val fn = readTerm() + val fn = readTree() val implicitArgs = collectWhile(nextByte == IMPLICITarg) { readByte() - readTerm() + readTree() } val patType = readType() - val argPats = until(end)(readTerm()) + val argPats = until(end)(readTree()) UnApply(fn, implicitArgs, argPats, patType) case REFINEDtpt => val refineCls = symAtAddr.getOrElse(start, @@ -1419,7 +1438,7 @@ class TreeUnpickler(reader: TastyReader, val ownType = ctx.typeAssigner.processAppliedType(tree, tycon.tpe.safeAppliedTo(args.tpes)) tree.withType(postProcessFunction(ownType)) case ANNOTATEDtpt => - Annotated(readTpt(), readTerm()) + Annotated(readTpt(), readTree()) case LAMBDAtpt => val tparams = readParams[TypeDef](TYPEPARAM) val body = readTpt() @@ -1437,16 +1456,16 @@ class TreeUnpickler(reader: TastyReader, case HOLE => val idx = readNat() val tpe = readType() - val args = until(end)(readTerm()) - Hole(true, idx, args, EmptyTree, TypeTree(tpe)).withType(tpe) + val args = until(end)(readTree()) + Hole(true, idx, args, EmptyTree, tpe) case _ => - readPathTerm() + readPathTree() } assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") result } - val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm() + val tree = if (tag < firstLengthTreeTag) readSimpleTree() else readLengthTree() setSpan(start, tree) } @@ -1471,10 +1490,10 @@ class TreeUnpickler(reader: TastyReader, val end = readEnd() val idx = readNat() val tpe = readType() - val args = until(end)(readTerm()) - Hole(false, idx, args, EmptyTree, TypeTree(tpe)).withType(tpe) + val args = until(end)(readTree()) + Hole(false, idx, args, EmptyTree, tpe) case _ => - if (isTypeTreeTag(nextByte)) readTerm() + if (isTypeTreeTag(nextByte)) readTree() else { val start = currentAddr val tp = readType() @@ -1499,9 +1518,9 @@ class TreeUnpickler(reader: TastyReader, val start = currentAddr assert(readByte() == CASEDEF) val end = readEnd() - val pat = readTerm() - val rhs = readTerm() - val guard = ifBefore(end)(readTerm(), EmptyTree) + val pat = readTree() + val rhs = readTree() + val guard = ifBefore(end)(readTree(), EmptyTree) setSpan(start, CaseDef(pat, guard, rhs)) } @@ -1635,7 +1654,7 @@ class TreeUnpickler(reader: TastyReader, pickling.println(i"no owner for $addr among $cs%, %") throw ex } - try search(children, NoSymbol) + try search(children, rootOwner) catch { case ex: TreeWithoutOwner => pickling.println(s"ownerTree = $ownerTree") diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 50b0b875c1fc..deb022d3c261 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -133,9 +133,8 @@ object Scala2Unpickler { /** Unpickle symbol table information descending from a class and/or module root * from an array of bytes. * @param bytes bytearray from which we unpickle - * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable - * @param moduleroot the top-level module class which is unpickled, or NoSymbol if inapplicable - * @param filename filename associated with bytearray, only used for error messages + * @param classRoot the top-level class which is unpickled, or NoSymbol if inapplicable + * @param moduleClassRoot the top-level module class which is unpickled, or NoSymbol if inapplicable */ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot: ClassDenotation)(ictx: Context) extends PickleBuffer(bytes, 0, -1) with ClassfileParser.Embedded { diff --git a/compiler/src/dotty/tools/dotc/coverage/Location.scala b/compiler/src/dotty/tools/dotc/coverage/Location.scala index c565c2bb1116..aa7a586d4b57 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Location.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Location.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.util.SourceFile * @param className name of the closest enclosing class * @param fullClassName fully qualified name of the closest enclosing class * @param classType "type" of the closest enclosing class: Class, Trait or Object - * @param method name of the closest enclosing method + * @param methodName name of the closest enclosing method * @param sourcePath absolute path of the source file */ final case class Location( diff --git a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala index e1b2aaa02866..ebb76e9e9bf9 100644 --- a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala +++ b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala @@ -12,8 +12,6 @@ import NameKinds.{InlineAccessorName, InlineBinderName, InlineScrutineeName} import config.Printers.inlining import util.SimpleIdentityMap -import dotty.tools.dotc.transform.BetaReduce - import collection.mutable /** A utility class offering methods for rewriting inlined code */ @@ -150,44 +148,6 @@ class InlineReducer(inliner: Inliner)(using Context): binding1.withSpan(call.span) } - /** Rewrite an application - * - * ((x1, ..., xn) => b)(e1, ..., en) - * - * to - * - * val/def x1 = e1; ...; val/def xn = en; b - * - * where `def` is used for call-by-name parameters. However, we shortcut any NoPrefix - * refs among the ei's directly without creating an intermediate binding. - * - * This variant of beta-reduction preserves the integrity of `Inlined` tree nodes. - */ - def betaReduce(tree: Tree)(using Context): Tree = tree match { - case Apply(Select(cl, nme.apply), args) if defn.isFunctionType(cl.tpe) => - val bindingsBuf = new mutable.ListBuffer[ValDef] - def recur(cl: Tree): Option[Tree] = cl match - case Block((ddef : DefDef) :: Nil, closure: Closure) if ddef.symbol == closure.meth.symbol => - ddef.tpe.widen match - case mt: MethodType if ddef.paramss.head.length == args.length => - Some(BetaReduce.reduceApplication(ddef, args, bindingsBuf)) - case _ => None - case Block(stats, expr) if stats.forall(isPureBinding) => - recur(expr).map(cpy.Block(cl)(stats, _)) - case Inlined(call, bindings, expr) if bindings.forall(isPureBinding) => - recur(expr).map(cpy.Inlined(cl)(call, bindings, _)) - case Typed(expr, tpt) => - recur(expr) - case _ => None - recur(cl) match - case Some(reduced) => - seq(bindingsBuf.result(), reduced).withSpan(tree.span) - case None => - tree - case _ => - tree - } - /** The result type of reducing a match. It consists optionally of a list of bindings * for the pattern-bound variables and the RHS of the selected case. * Returns `None` if no case was selected. diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 76494c1bf405..73fa2a2871a2 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -21,7 +21,9 @@ import collection.mutable import reporting.trace import util.Spans.Span import dotty.tools.dotc.transform.Splicer +import dotty.tools.dotc.transform.BetaReduce import quoted.QuoteUtils +import staging.StagingLevel.{level, spliceContext} import scala.annotation.constructorOnly /** General support for inlining */ @@ -199,17 +201,24 @@ class Inliner(val call: tpd.Tree)(using Context): * to `buf`. * @param name the name of the parameter * @param formal the type of the parameter - * @param arg the argument corresponding to the parameter + * @param arg0 the argument corresponding to the parameter * @param buf the buffer to which the definition should be appended */ private[inlines] def paramBindingDef(name: Name, formal: Type, arg0: Tree, buf: DefBuffer)(using Context): ValOrDefDef = { val isByName = formal.dealias.isInstanceOf[ExprType] - val arg = arg0 match { - case Typed(arg1, tpt) if tpt.tpe.isRepeatedParam && arg1.tpe.derivesFrom(defn.ArrayClass) => - wrapArray(arg1, arg0.tpe.elemType) - case _ => arg0 - } + val arg = + def dropNameArg(arg: Tree): Tree = arg match + case NamedArg(_, arg1) => arg1 + case SeqLiteral(elems, tpt) => + cpy.SeqLiteral(arg)(elems.mapConserve(dropNameArg), tpt) + case _ => arg + arg0 match + case Typed(seq, tpt) if tpt.tpe.isRepeatedParam => + if seq.tpe.derivesFrom(defn.ArrayClass) then wrapArray(dropNameArg(seq), arg0.tpe.elemType) + else cpy.Typed(arg0)(dropNameArg(seq), tpt) + case arg0 => + dropNameArg(arg0) val argtpe = arg.tpe.dealiasKeepAnnots.translateFromRepeated(toArray = false) val argIsBottom = argtpe.isBottomTypeAfterErasure val bindingType = @@ -477,6 +486,7 @@ class Inliner(val call: tpd.Tree)(using Context): /** Register type of leaf node */ private def registerLeaf(tree: Tree): Unit = tree match case _: This | _: Ident | _: TypeTree => registerTypes.traverse(tree.typeOpt) + case tree: Quote => registerTypes.traverse(tree.bodyType) case _ => /** Make `tree` part of inlined expansion. This means its owner has to be changed @@ -806,30 +816,31 @@ class Inliner(val call: tpd.Tree)(using Context): super.typedValDef(vdef1, sym) override def typedApply(tree: untpd.Apply, pt: Type)(using Context): Tree = - def cancelQuotes(tree: Tree): Tree = - tree match - case Quoted(Spliced(inner)) => inner - case _ => tree val locked = ctx.typerState.ownedVars - val res = cancelQuotes(constToLiteral(betaReduce(super.typedApply(tree, pt)))) match { - case res: Apply if res.symbol == defn.QuotedRuntime_exprSplice - && StagingContext.level == 0 - && !hasInliningErrors => - val expanded = expandMacro(res.args.head, tree.srcPos) - transform.TreeChecker.checkMacroGeneratedTree(res, expanded) - typedExpr(expanded) // Inline calls and constant fold code generated by the macro - case res => - specializeEq(inlineIfNeeded(res, pt, locked)) - } - res + specializeEq(inlineIfNeeded(constToLiteral(BetaReduce(super.typedApply(tree, pt))), pt, locked)) override def typedTypeApply(tree: untpd.TypeApply, pt: Type)(using Context): Tree = val locked = ctx.typerState.ownedVars - val tree1 = inlineIfNeeded(constToLiteral(betaReduce(super.typedTypeApply(tree, pt))), pt, locked) - if tree1.symbol.isQuote then + val tree1 = inlineIfNeeded(constToLiteral(BetaReduce(super.typedTypeApply(tree, pt))), pt, locked) + if tree1.symbol == defn.QuotedTypeModule_of then ctx.compilationUnit.needsStaging = true tree1 + override def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = + super.typedQuote(tree, pt) match + case Quote(Splice(inner), _) => inner + case tree1 => + ctx.compilationUnit.needsStaging = true + tree1 + + override def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = + super.typedSplice(tree, pt) match + case tree1 @ Splice(expr) if level == 0 && !hasInliningErrors => + val expanded = expandMacro(expr, tree1.srcPos) + transform.TreeChecker.checkMacroGeneratedTree(tree1, expanded) + typedExpr(expanded) // Inline calls and constant fold code generated by the macro + case tree1 => tree1 + override def typedMatch(tree: untpd.Match, pt: Type)(using Context): Tree = val tree1 = if tree.isInline then @@ -961,29 +972,24 @@ class Inliner(val call: tpd.Tree)(using Context): bindingOfSym(binding.symbol) = binding } - val countRefs = new TreeTraverser { - override def traverse(t: Tree)(using Context) = { - def updateRefCount(sym: Symbol, inc: Int) = - for (x <- refCount.get(sym)) refCount(sym) = x + inc - def updateTermRefCounts(t: Tree) = - t.typeOpt.foreachPart { - case ref: TermRef => updateRefCount(ref.symbol, 2) // can't be inlined, so make sure refCount is at least 2 - case _ => - } - - t match { - case t: RefTree => - updateRefCount(t.symbol, 1) - updateTermRefCounts(t) - case _: New | _: TypeTree => - updateTermRefCounts(t) - case _ => - } - traverseChildren(t) + def updateRefCount(sym: Symbol, inc: Int) = + for (x <- refCount.get(sym)) refCount(sym) = x + inc + def updateTermRefCounts(tree: Tree) = + tree.typeOpt.foreachPart { + case ref: TermRef => updateRefCount(ref.symbol, 2) // can't be inlined, so make sure refCount is at least 2 + case _ => } - } - countRefs.traverse(tree) - for (binding <- bindings) countRefs.traverse(binding) + def countRefs(tree: Tree) = + tree.foreachSubTree { + case t: RefTree => + updateRefCount(t.symbol, 1) + updateTermRefCounts(t) + case t @ (_: New | _: TypeTree) => + updateTermRefCounts(t) + case _ => + } + countRefs(tree) + for (binding <- bindings) countRefs(binding) def retain(boundSym: Symbol) = { refCount.get(boundSym) match { @@ -1006,7 +1012,7 @@ class Inliner(val call: tpd.Tree)(using Context): super.transform(t1) case t: Apply => val t1 = super.transform(t) - if (t1 `eq` t) t else reducer.betaReduce(t1) + if (t1 `eq` t) t else BetaReduce(t1) case Block(Nil, expr) => super.transform(expr) case _ => @@ -1025,9 +1031,9 @@ class Inliner(val call: tpd.Tree)(using Context): } private def expandMacro(body: Tree, splicePos: SrcPos)(using Context) = { - assert(StagingContext.level == 0) + assert(level == 0) val inlinedFrom = enclosingInlineds.last - val dependencies = macroDependencies(body) + val dependencies = macroDependencies(body)(using spliceContext) val suspendable = ctx.compilationUnit.isSuspendable if dependencies.nonEmpty && !ctx.reporter.errorsReported then for sym <- dependencies do @@ -1057,28 +1063,12 @@ class Inliner(val call: tpd.Tree)(using Context): */ private def macroDependencies(tree: Tree)(using Context) = new TreeAccumulator[List[Symbol]] { - private var level = -1 override def apply(syms: List[Symbol], tree: tpd.Tree)(using Context): List[Symbol] = - if level != -1 then foldOver(syms, tree) - else tree match { - case tree: RefTree if tree.isTerm && tree.symbol.isDefinedInCurrentRun && !tree.symbol.isLocal => + tree match { + case tree: RefTree if tree.isTerm && level == -1 && tree.symbol.isDefinedInCurrentRun && !tree.symbol.isLocal => foldOver(tree.symbol :: syms, tree) - case Quoted(body) => - level += 1 - try apply(syms, body) - finally level -= 1 - case Spliced(body) => - level -= 1 - try apply(syms, body) - finally level += 1 - case SplicedType(body) => - level -= 1 - try apply(syms, body) - finally level += 1 - case _: TypTree => - syms - case _ => - foldOver(syms, tree) + case _: TypTree => syms + case _ => foldOver(syms, tree) } }.apply(Nil, tree) end Inliner diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 8110fd2de195..36dc8a642afc 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -14,6 +14,7 @@ import ErrorReporting.errorTree import dotty.tools.dotc.util.{SourceFile, SourcePosition, SrcPos} import parsing.Parsers.Parser import transform.{PostTyper, Inlining, CrossVersionChecks} +import staging.StagingLevel import collection.mutable import reporting.trace @@ -56,7 +57,7 @@ object Inlines: case _ => isInlineable(tree.symbol) && !tree.tpe.widenTermRefExpr.isInstanceOf[MethodOrPoly] - && StagingContext.level == 0 + && StagingLevel.level == 0 && ( ctx.phase == Phases.inliningPhase || (ctx.phase == Phases.typerPhase && needsTransparentInlining(tree)) @@ -380,8 +381,7 @@ object Inlines: /** Expand call to scala.compiletime.codeOf */ def codeOf(arg: Tree, pos: SrcPos)(using Context): Tree = - val ctx1 = ctx.fresh.setSetting(ctx.settings.color, "never") - Literal(Constant(arg.show(using ctx1))).withSpan(pos.span) + Literal(Constant(arg.show(using ctx.withoutColors))).withSpan(pos.span) end Intrinsics /** Produces an inlined version of `call` via its `inlined` method. diff --git a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala index 85293d4a82d7..060c8d21f390 100644 --- a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala @@ -17,11 +17,12 @@ import NameKinds.{InlineAccessorName, UniqueInlineName} import inlines.Inlines import NameOps._ import Annotations._ -import transform.{AccessProxies, PCPCheckAndHeal, Splicer} +import transform.{AccessProxies, Splicer} +import staging.CrossStageSafety import transform.SymUtils.* import config.Printers.inlining import util.Property -import dotty.tools.dotc.transform.TreeMapWithStages._ +import staging.StagingLevel object PrepareInlineable { import tpd._ @@ -73,7 +74,7 @@ object PrepareInlineable { !sym.isContainedIn(inlineSym) && !(sym.isStableMember && sym.info.widenTermRefExpr.isInstanceOf[ConstantType]) && !sym.isInlineMethod && - (Inlines.inInlineMethod || StagingContext.level > 0) + (Inlines.inInlineMethod || StagingLevel.level > 0) def preTransform(tree: Tree)(using Context): Tree @@ -85,14 +86,7 @@ object PrepareInlineable { } override def transform(tree: Tree)(using Context): Tree = - inContext(stagingContext(tree)) { - postTransform(super.transform(preTransform(tree))) - } - - private def stagingContext(tree: Tree)(using Context): Context = tree match - case tree: Apply if tree.symbol.isQuote => StagingContext.quoteContext - case tree: Apply if tree.symbol.isExprSplice => StagingContext.spliceContext - case _ => ctx + postTransform(super.transform(preTransform(tree))) } /** Direct approach: place the accessor with the accessed symbol. This has the @@ -153,7 +147,7 @@ object PrepareInlineable { val qual = qualifier(refPart) inlining.println(i"adding receiver passing inline accessor for $tree/$refPart -> (${qual.tpe}, $refPart: ${refPart.getClass}, $argss%, %") - // Need to dealias in order to cagtch all possible references to abstracted over types in + // Need to dealias in order to catch all possible references to abstracted over types in // substitutions val dealiasMap = new TypeMap { def apply(t: Type) = mapOver(t.dealias) @@ -255,7 +249,7 @@ object PrepareInlineable { /** Register inline info for given inlineable method `sym`. * - * @param sym The symbol denotation of the inlineable method for which info is registered + * @param inlined The symbol denotation of the inlineable method for which info is registered * @param treeExpr A function that computes the tree to be inlined, given a context * This tree may still refer to non-public members. * @param ctx The context to use for evaluating `treeExpr`. It needs @@ -289,11 +283,11 @@ object PrepareInlineable { if (inlined.is(Macro) && !ctx.isAfterTyper) { def checkMacro(tree: Tree): Unit = tree match { - case Spliced(code) => + case Splice(code) => if (code.symbol.flags.is(Inline)) report.error("Macro cannot be implemented with an `inline` method", code.srcPos) Splicer.checkValidMacroBody(code) - new PCPCheckAndHeal(freshStagingContext).transform(body) // Ignore output, only check PCP + (new CrossStageSafety).transform(body) // Ignore output, only check cross-stage safety case Block(List(stat), Literal(Constants.Constant(()))) => checkMacro(stat) case Block(Nil, expr) => checkMacro(expr) case Typed(expr, _) => checkMacro(expr) diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index daeebcbcc17c..6ec896dcb200 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -20,7 +20,8 @@ import StdNames._ import reporting._ import dotty.tools.dotc.util.SourceFile import util.Spans._ -import scala.collection.mutable.ListBuffer + +import scala.collection.mutable.{ListBuffer, LinkedHashMap} object JavaParsers { @@ -96,8 +97,12 @@ object JavaParsers { def javaLangDot(name: Name): Tree = Select(javaDot(nme.lang), name) + /** Tree representing `java.lang.Object` */ def javaLangObject(): Tree = javaLangDot(tpnme.Object) + /** Tree representing `java.lang.Record` */ + def javaLangRecord(): Tree = javaLangDot(tpnme.Record) + def arrayOf(tpt: Tree): AppliedTypeTree = AppliedTypeTree(scalaDot(tpnme.Array), List(tpt)) @@ -555,6 +560,14 @@ object JavaParsers { def definesInterface(token: Int): Boolean = token == INTERFACE || token == AT + /** If the next token is the identifier "record", convert it into the RECORD token. + * This makes it easier to handle records in various parts of the code, + * in particular when a `parentToken` is passed to some functions. + */ + def adaptRecordIdentifier(): Unit = + if in.token == IDENTIFIER && in.name == jnme.RECORDid then + in.token = RECORD + def termDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = { val inInterface = definesInterface(parentToken) val tparams = if (in.token == LT) typeParams(Flags.JavaDefined | Flags.Param) else List() @@ -581,6 +594,16 @@ object JavaParsers { TypeTree(), methodBody()).withMods(mods) } } + } else if (in.token == LBRACE && rtptName != nme.EMPTY && parentToken == RECORD) { + /* + record RecordName(T param1, ...) { + RecordName { // <- here + // methodBody + } + } + */ + methodBody() + Nil } else { var mods1 = mods @@ -717,12 +740,11 @@ object JavaParsers { ValDef(name, tpt2, if (mods.is(Flags.Param)) EmptyTree else unimplementedExpr).withMods(mods1) } - def memberDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match { - case CLASS | ENUM | INTERFACE | AT => - typeDecl(start, if (definesInterface(parentToken)) mods | Flags.JavaStatic else mods) + def memberDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match + case CLASS | ENUM | RECORD | INTERFACE | AT => + typeDecl(start, if definesInterface(parentToken) then mods | Flags.JavaStatic else mods) case _ => termDecl(start, mods, parentToken, parentTParams) - } def makeCompanionObject(cdef: TypeDef, statics: List[Tree]): Tree = atSpan(cdef.span) { @@ -804,6 +826,51 @@ object JavaParsers { addCompanionObject(statics, cls) } + def recordDecl(start: Offset, mods: Modifiers): List[Tree] = + accept(RECORD) + val nameOffset = in.offset + val name = identForType() + val tparams = typeParams() + val header = formalParams() + val superclass = javaLangRecord() // records always extend java.lang.Record + val interfaces = interfacesOpt() // records may implement interfaces + val (statics, body) = typeBody(RECORD, name, tparams) + + // We need to generate accessors for every param, if no method with the same name is already defined + + var fieldsByName = header.map(v => (v.name, (v.tpt, v.mods.annotations))).to(LinkedHashMap) + + for case DefDef(name, paramss, _, _) <- body + if paramss.isEmpty && fieldsByName.contains(name) + do + fieldsByName -= name + end for + + val accessors = + (for (name, (tpt, annots)) <- fieldsByName yield + DefDef(name, Nil, tpt, unimplementedExpr) + .withMods(Modifiers(Flags.JavaDefined | Flags.Method | Flags.Synthetic)) + ).toList + + // generate the canonical constructor + val canonicalConstructor = + DefDef(nme.CONSTRUCTOR, joinParams(tparams, List(header)), TypeTree(), EmptyTree) + .withMods(Modifiers(Flags.JavaDefined | Flags.Synthetic, mods.privateWithin)) + + // return the trees + val recordTypeDef = atSpan(start, nameOffset) { + TypeDef(name, + makeTemplate( + parents = superclass :: interfaces, + stats = canonicalConstructor :: accessors ::: body, + tparams = tparams, + true + ) + ).withMods(mods) + } + addCompanionObject(statics, recordTypeDef) + end recordDecl + def interfaceDecl(start: Offset, mods: Modifiers): List[Tree] = { accept(INTERFACE) val nameOffset = in.offset @@ -846,7 +913,8 @@ object JavaParsers { else if (in.token == SEMI) in.nextToken() else { - if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.JavaStatic + adaptRecordIdentifier() + if (in.token == ENUM || in.token == RECORD || definesInterface(in.token)) mods |= Flags.JavaStatic val decls = memberDecl(start, mods, parentToken, parentTParams) (if (mods.is(Flags.JavaStatic) || inInterface && !(decls exists (_.isInstanceOf[DefDef]))) statics @@ -947,13 +1015,13 @@ object JavaParsers { } } - def typeDecl(start: Offset, mods: Modifiers): List[Tree] = in.token match { + def typeDecl(start: Offset, mods: Modifiers): List[Tree] = in.token match case ENUM => enumDecl(start, mods) case INTERFACE => interfaceDecl(start, mods) case AT => annotationDecl(start, mods) case CLASS => classDecl(start, mods) + case RECORD => recordDecl(start, mods) case _ => in.nextToken(); syntaxError(em"illegal start of type declaration", skipIt = true); List(errorTypeTree) - } def tryConstant: Option[Constant] = { val negate = in.token match { @@ -1004,6 +1072,7 @@ object JavaParsers { if (in.token != EOF) { val start = in.offset val mods = modifiers(inInterface = false) + adaptRecordIdentifier() // needed for typeDecl buf ++= typeDecl(start, mods) } } diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala index 3e73b6d95adb..2b7882173e00 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala @@ -41,6 +41,9 @@ object JavaTokens extends TokensCommon { inline val SWITCH = 133; enter(SWITCH, "switch") inline val ASSERT = 134; enter(ASSERT, "assert") + /** contextual keywords (turned into keywords in certain conditions, see JLS 3.9 of Java 9+) */ + inline val RECORD = 135; enter(RECORD, "record") + /** special symbols */ inline val EQEQ = 140 inline val BANGEQ = 141 diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 6c494db78c7f..7a29ac3f7a38 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -190,6 +190,8 @@ object Parsers { def isPureArrow(name: Name): Boolean = isIdent(name) && Feature.pureFunsEnabled def isPureArrow: Boolean = isPureArrow(nme.PUREARROW) || isPureArrow(nme.PURECTXARROW) def isErased = isIdent(nme.erased) && in.erasedEnabled + // Are we seeing an `erased` soft keyword that will not be an identifier? + def isErasedKw = isErased && in.isSoftModifierInParamModifierPosition def isSimpleLiteral = simpleLiteralTokens.contains(in.token) || isIdent(nme.raw.MINUS) && numericLitTokens.contains(in.lookahead.token) @@ -463,6 +465,15 @@ object Parsers { case _ => fail() + /** Checks that tuples don't contain a parameter. */ + def checkNonParamTuple(t: Tree) = t match + case Tuple(ts) => ts.collectFirst { + case param: ValDef => + syntaxError(em"invalid parameter definition syntax in tuple value", param.span) + } + case _ => + + /** Convert (qual)ident to type identifier */ def convertToTypeId(tree: Tree): Tree = tree match { @@ -959,29 +970,6 @@ object Parsers { isArrowIndent() else false - /** Under captureChecking language import: is the following token sequence a - * capture set `{ref1, ..., refN}` followed by a token that can start a type? - */ - def followingIsCaptureSet(): Boolean = - Feature.ccEnabled && { - val lookahead = in.LookaheadScanner() - def followingIsTypeStart() = - lookahead.nextToken() - canStartInfixTypeTokens.contains(lookahead.token) - || lookahead.token == LBRACKET - def recur(): Boolean = - (lookahead.isIdent || lookahead.token == THIS) && { - lookahead.nextToken() - if lookahead.token == COMMA then - lookahead.nextToken() - recur() - else - lookahead.token == RBRACE && followingIsTypeStart() - } - lookahead.nextToken() - if lookahead.token == RBRACE then followingIsTypeStart() else recur() - } - /* --------- OPERAND/OPERATOR STACK --------------------------------------- */ var opStack: List[OpInfo] = Nil @@ -1255,7 +1243,7 @@ object Parsers { } } in.nextToken() - Quote(t) + Quote(t, Nil) } else if !in.featureEnabled(Feature.symbolLiterals) then @@ -1426,13 +1414,30 @@ object Parsers { */ def toplevelTyp(): Tree = rejectWildcardType(typ()) - private def isFunction(tree: Tree): Boolean = tree match { - case Parens(tree1) => isFunction(tree1) - case Block(Nil, tree1) => isFunction(tree1) - case _: Function => true - case _ => false + private def getFunction(tree: Tree): Option[Function] = tree match { + case Parens(tree1) => getFunction(tree1) + case Block(Nil, tree1) => getFunction(tree1) + case t: Function => Some(t) + case _ => None } + private def checkFunctionNotErased(f: Function, context: String) = + def fail(span: Span) = + syntaxError(em"Implementation restriction: erased parameters are not supported in $context", span) + // erased parameter in type + val hasErasedParam = f match + case f: FunctionWithMods => f.hasErasedParams + case _ => false + if hasErasedParam then + fail(f.span) + // erased parameter in term + val hasErasedMods = f.args.collectFirst { + case v: ValDef if v.mods.is(Flags.Erased) => v + } + hasErasedMods match + case Some(param) => fail(param.span) + case _ => + /** CaptureRef ::= ident | `this` */ def captureRef(): Tree = @@ -1447,17 +1452,21 @@ object Parsers { if in.token == RBRACE then Nil else commaSeparated(captureRef) } + def capturesAndResult(core: () => Tree): Tree = + if Feature.ccEnabled && in.token == LBRACE && in.offset == in.lastOffset + then CapturesAndResult(captureSet(), core()) + else core() + /** Type ::= FunType * | HkTypeParamClause ‘=>>’ Type * | FunParamClause ‘=>>’ Type * | MatchType * | InfixType - * | CaptureSet Type -- under captureChecking * FunType ::= (MonoFunType | PolyFunType) * MonoFunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type - * | (‘->’ | ‘?->’ ) Type -- under pureFunctions + * | (‘->’ | ‘?->’ ) [CaptureSet] Type -- under pureFunctions * PolyFunType ::= HKTypeParamClause '=>' Type - * | HKTypeParamClause ‘->’ Type -- under pureFunctions + * | HKTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions * FunTypeArgs ::= InfixType * | `(' [ [ ‘[using]’ ‘['erased'] FunArgType {`,' FunArgType } ] `)' * | '(' [ ‘[using]’ ‘['erased'] TypedFunParam {',' TypedFunParam } ')' @@ -1465,13 +1474,17 @@ object Parsers { def typ(): Tree = val start = in.offset var imods = Modifiers() + var erasedArgs: ListBuffer[Boolean] = ListBuffer() def functionRest(params: List[Tree]): Tree = val paramSpan = Span(start, in.lastOffset) atSpan(start, in.offset) { var token = in.token + var isPure = false if isPureArrow(nme.PUREARROW) then + isPure = true token = ARROW else if isPureArrow(nme.PURECTXARROW) then + isPure = true token = CTXARROW else if token == TLARROW then if !imods.flags.isEmpty || params.isEmpty then @@ -1490,16 +1503,16 @@ object Parsers { else accept(ARROW) - val resultType = typ() + val resultType = if isPure then capturesAndResult(typ) else typ() if token == TLARROW then for case ValDef(_, tpt, _) <- params do if isByNameType(tpt) then syntaxError(em"parameter of type lambda may not be call-by-name", tpt.span) TermLambdaTypeTree(params.asInstanceOf[List[ValDef]], resultType) - else if imods.isOneOf(Given | Erased | Impure) then + else if imods.isOneOf(Given | Impure) || erasedArgs.contains(true) then if imods.is(Given) && params.isEmpty then syntaxError(em"context function types require at least one parameter", paramSpan) - FunctionWithMods(params, resultType, imods) + FunctionWithMods(params, resultType, imods, erasedArgs.toList) else if !ctx.settings.YkindProjector.isDefault then val (newParams :+ newResultType, tparams) = replaceKindProjectorPlaceholders(params :+ resultType): @unchecked lambdaAbstract(tparams, Function(newParams, newResultType)) @@ -1517,17 +1530,30 @@ object Parsers { functionRest(Nil) } else { - if isErased then imods = addModifier(imods) val paramStart = in.offset + def addErased() = + erasedArgs.addOne(isErasedKw) + if isErasedKw then { in.skipToken(); } + addErased() val ts = in.currentRegion.withCommasExpected { funArgType() match case Ident(name) if name != tpnme.WILDCARD && in.isColon => isValParamList = true + def funParam(start: Offset, mods: Modifiers) = { + atSpan(start) { + addErased() + typedFunParam(in.offset, ident(), imods) + } + } commaSeparatedRest( typedFunParam(paramStart, name.toTermName, imods), - () => typedFunParam(in.offset, ident(), imods)) + () => funParam(in.offset, imods)) case t => - commaSeparatedRest(t, funArgType) + def funParam() = { + addErased() + funArgType() + } + commaSeparatedRest(t, funParam) } accept(RPAREN) if isValParamList || in.isArrow || isPureArrow then @@ -1558,30 +1584,33 @@ object Parsers { val arrowOffset = in.skipToken() val body = toplevelTyp() atSpan(start, arrowOffset) { - if (isFunction(body)) - PolyFunction(tparams, body) - else { - syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) - Ident(nme.ERROR.toTypeName) + getFunction(body) match { + case Some(f) => + checkFunctionNotErased(f, "poly function") + PolyFunction(tparams, body) + case None => + syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) + Ident(nme.ERROR.toTypeName) } } } else { accept(TLARROW); typ() } } - else if in.token == LBRACE && followingIsCaptureSet() then - CapturingTypeTree(captureSet(), typ()) else if (in.token == INDENT) enclosed(INDENT, typ()) else infixType() in.token match - case ARROW | CTXARROW => functionRest(t :: Nil) + case ARROW | CTXARROW => + erasedArgs.addOne(false) + functionRest(t :: Nil) case MATCH => matchType(t) case FORSOME => syntaxError(ExistentialTypesNoLongerSupported()); t case _ => if isPureArrow then + erasedArgs.addOne(false) functionRest(t :: Nil) else - if (imods.is(Erased) && !t.isInstanceOf[FunctionWithMods]) + if (erasedArgs.contains(true) && !t.isInstanceOf[FunctionWithMods]) syntaxError(ErasedTypesCanOnlyBeFunctionTypes(), implicitKwPos(start)) t end typ @@ -1636,6 +1665,7 @@ object Parsers { if in.token == LPAREN then funParamClause() :: funParamClauses() else Nil /** InfixType ::= RefinedType {id [nl] RefinedType} + * | RefinedType `^` */ def infixType(): Tree = infixTypeRest(refinedType()) @@ -1643,19 +1673,41 @@ object Parsers { infixOps(t, canStartInfixTypeTokens, refinedTypeFn, Location.ElseWhere, ParseKind.Type, isOperator = !followingIsVararg() && !isPureArrow) - /** RefinedType ::= WithType {[nl] Refinement} + /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] */ val refinedTypeFn: Location => Tree = _ => refinedType() def refinedType() = refinedTypeRest(withType()) + /** Disambiguation: a `^` is treated as a postfix operator meaning `^{cap}` + * if followed by `{`, `->`, or `?->`, + * or followed by a new line (significant or not), + * or followed by a token that cannot start an infix type. + * Otherwise it is treated as an infix operator. + */ + private def isCaptureUpArrow = + val ahead = in.lookahead + ahead.token == LBRACE + || ahead.isIdent(nme.PUREARROW) + || ahead.isIdent(nme.PURECTXARROW) + || !canStartInfixTypeTokens.contains(ahead.token) + || ahead.lineOffset > 0 + def refinedTypeRest(t: Tree): Tree = { argumentStart() - if (in.isNestedStart) + if in.isNestedStart then refinedTypeRest(atSpan(startOffset(t)) { RefinedTypeTree(rejectWildcardType(t), refinement(indentOK = true)) }) - else t + else if Feature.ccEnabled && in.isIdent(nme.UPARROW) && isCaptureUpArrow then + val upArrowStart = in.offset + in.nextToken() + def cs = + if in.token == LBRACE then captureSet() + else atSpan(upArrowStart)(captureRoot) :: Nil + makeRetaining(t, cs, tpnme.retains) + else + t } /** WithType ::= AnnotType {`with' AnnotType} (deprecated) @@ -1698,10 +1750,10 @@ object Parsers { def splice(isType: Boolean): Tree = val start = in.offset atSpan(in.offset) { + val inPattern = (staged & StageKind.QuotedPattern) != 0 val expr = if (in.name.length == 1) { in.nextToken() - val inPattern = (staged & StageKind.QuotedPattern) != 0 withinStaged(StageKind.Spliced)(if (inPattern) inBraces(pattern()) else stagedBlock()) } else atSpan(in.offset + 1) { @@ -1717,6 +1769,8 @@ object Parsers { else "To use a given Type[T] in a quote just write T directly" syntaxError(em"$msg\n\nHint: $hint", Span(start, in.lastOffset)) Ident(nme.ERROR.toTypeName) + else if inPattern then + SplicePattern(expr, Nil) else Splice(expr) } @@ -1882,31 +1936,10 @@ object Parsers { def paramTypeOf(core: () => Tree): Tree = if in.token == ARROW || isPureArrow(nme.PUREARROW) then val isImpure = in.token == ARROW - val tp = atSpan(in.skipToken()) { ByNameTypeTree(core()) } - if isImpure && Feature.pureFunsEnabled then ImpureByNameTypeTree(tp) else tp - else if in.token == LBRACE && followingIsCaptureSet() then - val start = in.offset - val cs = captureSet() - val endCsOffset = in.lastOffset - val startTpOffset = in.offset - val tp = paramTypeOf(core) - val tp1 = tp match - case ImpureByNameTypeTree(tp1) => - syntaxError(em"explicit captureSet is superfluous for impure call-by-name type", start) - tp1 - case CapturingTypeTree(_, tp1: ByNameTypeTree) => - syntaxError(em"only one captureSet is allowed here", start) - tp1 - case _: ByNameTypeTree if startTpOffset > endCsOffset => - report.warning( - i"""Style: by-name `->` should immediately follow closing `}` of capture set - |to avoid confusion with function type. - |That is, `{c}-> T` instead of `{c} -> T`.""", - source.atSpan(Span(startTpOffset, startTpOffset))) - tp - case _ => - tp - CapturingTypeTree(cs, tp1) + atSpan(in.skipToken()): + val tp = if isImpure then core() else capturesAndResult(core) + if isImpure && Feature.pureFunsEnabled then ImpureByNameTypeTree(tp) + else ByNameTypeTree(tp) else core() @@ -1919,13 +1952,13 @@ object Parsers { /** FunArgType ::= Type * | `=>' Type - * | [CaptureSet] `->' Type + * | `->' [CaptureSet] Type */ val funArgType: () => Tree = () => paramTypeOf(typ) /** ParamType ::= ParamValueType * | `=>' ParamValueType - * | [CaptureSet] `->' ParamValueType + * | `->' [CaptureSet] ParamValueType */ def paramType(): Tree = paramTypeOf(paramValueType) @@ -1993,8 +2026,6 @@ object Parsers { def typeDependingOn(location: Location): Tree = if location.inParens then typ() else if location.inPattern then rejectWildcardType(refinedType()) - else if in.token == LBRACE && followingIsCaptureSet() then - CapturingTypeTree(captureSet(), infixType()) else infixType() /* ----------- EXPRESSIONS ------------------------------------------------ */ @@ -2066,7 +2097,7 @@ object Parsers { * | ‘inline’ InfixExpr MatchClause * Bindings ::= `(' [Binding {`,' Binding}] `)' * Binding ::= (id | `_') [`:' Type] - * Ascription ::= `:' [CaptureSet] InfixType + * Ascription ::= `:' InfixType * | `:' Annotation {Annotation} * | `:' `_' `*' * Catches ::= ‘catch’ (Expr | ExprCaseClause) @@ -2079,24 +2110,22 @@ object Parsers { def expr(location: Location): Tree = { val start = in.offset - def isSpecialClosureStart = in.lookahead.isIdent(nme.erased) && in.erasedEnabled in.token match case IMPLICIT => closure(start, location, modifiers(BitSet(IMPLICIT))) - case LPAREN if isSpecialClosureStart => - closure(start, location, Modifiers()) case LBRACKET => val start = in.offset val tparams = typeParamClause(ParamOwner.TypeParam) val arrowOffset = accept(ARROW) val body = expr(location) atSpan(start, arrowOffset) { - if (isFunction(body)) - PolyFunction(tparams, body) - else { - syntaxError(em"Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) - errorTermTree(arrowOffset) - } + getFunction(body) match + case Some(f) => + checkFunctionNotErased(f, "poly function") + PolyFunction(tparams, f) + case None => + syntaxError(em"Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) + errorTermTree(arrowOffset) } case _ => val saved = placeholderParams @@ -2114,7 +2143,9 @@ object Parsers { else if isWildcard(t) then placeholderParams = placeholderParams ::: saved t - else wrapPlaceholders(t) + else + checkNonParamTuple(t) + wrapPlaceholders(t) } def expr1(location: Location = Location.ElseWhere): Tree = in.token match @@ -2306,10 +2337,8 @@ object Parsers { if in.token == RPAREN then Nil else - var mods1 = mods - if isErased then mods1 = addModifier(mods1) try - commaSeparated(() => binding(mods1)) + commaSeparated(() => binding(mods)) finally accept(RPAREN) else { @@ -2333,10 +2362,13 @@ object Parsers { (atSpan(start) { makeParameter(name, t, mods) }) :: Nil } - /** Binding ::= (id | `_') [`:' Type] + /** Binding ::= [`erased`] (id | `_') [`:' Type] */ def binding(mods: Modifiers): Tree = - atSpan(in.offset) { makeParameter(bindingName(), typedOpt(), mods) } + atSpan(in.offset) { + val mods1 = if isErasedKw then addModifier(mods) else mods + makeParameter(bindingName(), typedOpt(), mods1) + } def bindingName(): TermName = if (in.token == USCORE) { @@ -2447,10 +2479,10 @@ object Parsers { case QUOTE => atSpan(in.skipToken()) { withinStaged(StageKind.Quoted | (if (location.inPattern) StageKind.QuotedPattern else 0)) { - Quote { + val body = if (in.token == LBRACKET) inBrackets(typ()) else stagedBlock() - } + Quote(body, Nil) } } case NEW => @@ -2533,6 +2565,7 @@ object Parsers { else in.currentRegion.withCommasExpected { var isFormalParams = false def exprOrBinding() = + if isErasedKw then isFormalParams = true if isFormalParams then binding(Modifiers()) else val t = exprInParens() @@ -3091,12 +3124,50 @@ object Parsers { /* -------- PARAMETERS ------------------------------------------- */ + /** DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent + * DefParamClause ::= DefTypeParamClause + * | DefTermParamClause + * | UsingParamClause + */ + def typeOrTermParamClauses( + ownerKind: ParamOwner, + numLeadParams: Int = 0 + ): List[List[TypeDef] | List[ValDef]] = + + def recur(firstClause: Boolean, numLeadParams: Int, prevIsTypeClause: Boolean): List[List[TypeDef] | List[ValDef]] = + newLineOptWhenFollowedBy(LPAREN) + newLineOptWhenFollowedBy(LBRACKET) + if in.token == LPAREN then + val paramsStart = in.offset + val params = termParamClause( + numLeadParams, + firstClause = firstClause) + val lastClause = params.nonEmpty && params.head.mods.flags.is(Implicit) + params :: ( + if lastClause then Nil + else recur(firstClause = false, numLeadParams + params.length, prevIsTypeClause = false)) + else if in.token == LBRACKET then + if prevIsTypeClause then + syntaxError( + em"Type parameter lists must be separated by a term or using parameter list", + in.offset + ) + typeParamClause(ownerKind) :: recur(firstClause, numLeadParams, prevIsTypeClause = true) + else Nil + end recur + + recur(firstClause = true, numLeadParams = numLeadParams, prevIsTypeClause = false) + end typeOrTermParamClauses + + /** ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ * ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] * id [HkTypeParamClause] TypeParamBounds * * DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ - * DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds + * DefTypeParam ::= {Annotation} + * [`sealed`] -- under captureChecking + * id [HkTypeParamClause] TypeParamBounds * * TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ * TypTypeParam ::= {Annotation} id [HkTypePamClause] TypeBounds @@ -3106,24 +3177,25 @@ object Parsers { */ def typeParamClause(ownerKind: ParamOwner): List[TypeDef] = inBrackets { - def variance(vflag: FlagSet): FlagSet = - if ownerKind == ParamOwner.Def || ownerKind == ParamOwner.TypeParam then - syntaxError(em"no `+/-` variance annotation allowed here") - in.nextToken() - EmptyFlags - else - in.nextToken() - vflag + def checkVarianceOK(): Boolean = + val ok = ownerKind != ParamOwner.Def && ownerKind != ParamOwner.TypeParam + if !ok then syntaxError(em"no `+/-` variance annotation allowed here") + in.nextToken() + ok def typeParam(): TypeDef = { val isAbstractOwner = ownerKind == ParamOwner.Type || ownerKind == ParamOwner.TypeParam val start = in.offset - val mods = - annotsAsMods() - | (if (ownerKind == ParamOwner.Class) Param | PrivateLocal else Param) - | (if isIdent(nme.raw.PLUS) then variance(Covariant) - else if isIdent(nme.raw.MINUS) then variance(Contravariant) - else EmptyFlags) + var mods = annotsAsMods() | Param + if ownerKind == ParamOwner.Class then mods |= PrivateLocal + if Feature.ccEnabled && in.token == SEALED then + if ownerKind == ParamOwner.Def then mods |= Sealed + else syntaxError(em"`sealed` modifier only allowed for method type parameters") + in.nextToken() + if isIdent(nme.raw.PLUS) && checkVarianceOK() then + mods |= Covariant + else if isIdent(nme.raw.MINUS) && checkVarianceOK() then + mods |= Contravariant atSpan(start, nameStart) { val name = if (isAbstractOwner && in.token == USCORE) { @@ -3144,34 +3216,39 @@ object Parsers { /** ContextTypes ::= FunArgType {‘,’ FunArgType} */ - def contextTypes(ofClass: Boolean, nparams: Int, impliedMods: Modifiers): List[ValDef] = + def contextTypes(ofClass: Boolean, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = val tps = commaSeparated(funArgType) - var counter = nparams + var counter = numLeadParams def nextIdx = { counter += 1; counter } val paramFlags = if ofClass then LocalParamAccessor else Param tps.map(makeSyntheticParameter(nextIdx, _, paramFlags | Synthetic | impliedMods.flags)) - /** ClsParamClause ::= ‘(’ [‘erased’] ClsParams ‘)’ | UsingClsParamClause - * UsingClsParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ + /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause + * UsingClsTermParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ * ClsParams ::= ClsParam {‘,’ ClsParam} * ClsParam ::= {Annotation} * - * DefParamClause ::= ‘(’ [‘erased’] DefParams ‘)’ | UsingParamClause - * UsingParamClause ::= ‘(’ ‘using’ [‘erased’] (DefParams | ContextTypes) ‘)’ - * DefParams ::= DefParam {‘,’ DefParam} - * DefParam ::= {Annotation} [‘inline’] Param + * TypelessClause ::= DefTermParamClause + * | UsingParamClause + * + * DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ + * UsingParamClause ::= ‘(’ ‘using’ (DefTermParams | ContextTypes) ‘)’ + * DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ + * DefTermParams ::= DefTermParam {‘,’ DefTermParam} + * DefTermParam ::= {Annotation} [‘erased’] [‘inline’] Param * * Param ::= id `:' ParamType [`=' Expr] * * @return the list of parameter definitions */ - def paramClause(nparams: Int, // number of parameters preceding this clause - ofClass: Boolean = false, // owner is a class - ofCaseClass: Boolean = false, // owner is a case class - prefix: Boolean = false, // clause precedes name of an extension method - givenOnly: Boolean = false, // only given parameters allowed - firstClause: Boolean = false // clause is the first in regular list of clauses - ): List[ValDef] = { + def termParamClause( + numLeadParams: Int, // number of parameters preceding this clause + ofClass: Boolean = false, // owner is a class + ofCaseClass: Boolean = false, // owner is a case class + prefix: Boolean = false, // clause precedes name of an extension method + givenOnly: Boolean = false, // only given parameters allowed + firstClause: Boolean = false // clause is the first in regular list of clauses + ): List[ValDef] = { var impliedMods: Modifiers = EmptyModifiers def addParamMod(mod: () => Mod) = impliedMods = addMod(impliedMods, atSpan(in.skipToken()) { mod() }) @@ -3182,12 +3259,12 @@ object Parsers { else if isIdent(nme.using) then addParamMod(() => Mod.Given()) - if isErased then - addParamMod(() => Mod.Erased()) def param(): ValDef = { val start = in.offset var mods = impliedMods.withAnnotations(annotations()) + if isErasedKw then + mods = addModifier(mods) if (ofClass) { mods = addFlag(modifiers(start = mods), ParamAccessor) mods = @@ -3198,7 +3275,7 @@ object Parsers { val mod = atSpan(in.skipToken()) { Mod.Var() } addMod(mods, mod) else - if (!(mods.flags &~ (ParamAccessor | Inline | impliedMods.flags)).isEmpty) + if (!(mods.flags &~ (ParamAccessor | Inline | Erased | impliedMods.flags)).isEmpty) syntaxError(em"`val` or `var` expected") if (firstClause && ofCaseClass) mods else mods | PrivateLocal @@ -3236,7 +3313,7 @@ object Parsers { checkVarArgsRules(rest) } - // begin paramClause + // begin termParamClause inParens { if in.token == RPAREN && !prefix && !impliedMods.is(Given) then Nil else @@ -3246,33 +3323,45 @@ object Parsers { paramMods() if givenOnly && !impliedMods.is(Given) then syntaxError(em"`using` expected") - val isParams = - !impliedMods.is(Given) - || startParamTokens.contains(in.token) - || isIdent && (in.name == nme.inline || in.lookahead.isColon) - if isParams then commaSeparated(() => param()) - else contextTypes(ofClass, nparams, impliedMods) + val (firstParamMod, isParams) = + var mods = EmptyModifiers + if in.lookahead.isColon then + (mods, true) + else + if isErased then mods = addModifier(mods) + val isParams = + !impliedMods.is(Given) + || startParamTokens.contains(in.token) + || isIdent && (in.name == nme.inline || in.lookahead.isColon) + (mods, isParams) + (if isParams then commaSeparated(() => param()) + else contextTypes(ofClass, numLeadParams, impliedMods)) match { + case Nil => Nil + case (h :: t) => h.withAddedFlags(firstParamMod.flags) :: t + } checkVarArgsRules(clause) clause } } - /** ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] - * DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] + /** ClsTermParamClauses ::= {ClsTermParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] + * TypelessClauses ::= TypelessClause {TypelessClause} * * @return The parameter definitions */ - def paramClauses(ofClass: Boolean = false, - ofCaseClass: Boolean = false, - givenOnly: Boolean = false, - numLeadParams: Int = 0): List[List[ValDef]] = + def termParamClauses( + ofClass: Boolean = false, + ofCaseClass: Boolean = false, + givenOnly: Boolean = false, + numLeadParams: Int = 0 + ): List[List[ValDef]] = - def recur(firstClause: Boolean, nparams: Int): List[List[ValDef]] = + def recur(firstClause: Boolean, numLeadParams: Int): List[List[ValDef]] = newLineOptWhenFollowedBy(LPAREN) if in.token == LPAREN then val paramsStart = in.offset - val params = paramClause( - nparams, + val params = termParamClause( + numLeadParams, ofClass = ofClass, ofCaseClass = ofCaseClass, givenOnly = givenOnly, @@ -3280,12 +3369,12 @@ object Parsers { val lastClause = params.nonEmpty && params.head.mods.flags.is(Implicit) params :: ( if lastClause then Nil - else recur(firstClause = false, nparams + params.length)) + else recur(firstClause = false, numLeadParams + params.length)) else Nil end recur recur(firstClause = true, numLeadParams) - end paramClauses + end termParamClauses /* -------- DEFS ------------------------------------------- */ @@ -3527,10 +3616,12 @@ object Parsers { } /** DefDef ::= DefSig [‘:’ Type] ‘=’ Expr - * | this ParamClause ParamClauses `=' ConstrExpr + * | this TypelessClauses [DefImplicitClause] `=' ConstrExpr * DefDcl ::= DefSig `:' Type - * DefSig ::= id [DefTypeParamClause] DefParamClauses - * | ExtParamClause [nl] [‘.’] id DefParamClauses + * DefSig ::= id [DefTypeParamClause] DefTermParamClauses + * + * if clauseInterleaving is enabled: + * DefSig ::= id [DefParamClauses] [DefImplicitClause] */ def defDefOrDcl(start: Offset, mods: Modifiers, numLeadParams: Int = 0): DefDef = atSpan(start, nameStart) { @@ -3549,7 +3640,7 @@ object Parsers { if (in.token == THIS) { in.nextToken() - val vparamss = paramClauses(numLeadParams = numLeadParams) + val vparamss = termParamClauses(numLeadParams = numLeadParams) if (vparamss.isEmpty || vparamss.head.take(1).exists(_.mods.isOneOf(GivenOrImplicit))) in.token match { case LBRACKET => syntaxError(em"no type parameters allowed here") @@ -3567,9 +3658,18 @@ object Parsers { val mods1 = addFlag(mods, Method) val ident = termIdent() var name = ident.name.asTermName - val tparams = typeParamClauseOpt(ParamOwner.Def) - val vparamss = paramClauses(numLeadParams = numLeadParams) + val paramss = + if in.featureEnabled(Feature.clauseInterleaving) then + // If you are making interleaving stable manually, please refer to the PR introducing it instead, section "How to make non-experimental" + typeOrTermParamClauses(ParamOwner.Def, numLeadParams = numLeadParams) + else + val tparams = typeParamClauseOpt(ParamOwner.Def) + val vparamss = termParamClauses(numLeadParams = numLeadParams) + + joinParams(tparams, vparamss) + var tpt = fromWithinReturnType { typedOpt() } + if (migrateTo3) newLineOptWhenFollowedBy(LBRACE) val rhs = if in.token == EQUALS then @@ -3586,7 +3686,7 @@ object Parsers { accept(EQUALS) expr() - val ddef = DefDef(name, joinParams(tparams, vparamss), tpt, rhs) + val ddef = DefDef(name, paramss, tpt, rhs) if (isBackquoted(ident)) ddef.pushAttachment(Backquoted, ()) finalizeDef(ddef, mods1, start) } @@ -3707,12 +3807,12 @@ object Parsers { val templ = templateOpt(constr) finalizeDef(TypeDef(name, templ), mods, start) - /** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses + /** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsTermParamClauses */ def classConstr(isCaseClass: Boolean = false): DefDef = atSpan(in.lastOffset) { val tparams = typeParamClauseOpt(ParamOwner.Class) val cmods = fromWithinClassConstr(constrModsOpt()) - val vparamss = paramClauses(ofClass = true, ofCaseClass = isCaseClass) + val vparamss = termParamClauses(ofClass = true, ofCaseClass = isCaseClass) makeConstructor(tparams, vparamss).withMods(cmods) } @@ -3814,7 +3914,7 @@ object Parsers { newLineOpt() val vparamss = if in.token == LPAREN && in.lookahead.isIdent(nme.using) - then paramClauses(givenOnly = true) + then termParamClauses(givenOnly = true) else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty @@ -3849,20 +3949,20 @@ object Parsers { finalizeDef(gdef, mods1, start) } - /** Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefParam ‘)’ + /** Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ * {UsingParamClause} ExtMethods */ def extension(): ExtMethods = val start = in.skipToken() val tparams = typeParamClauseOpt(ParamOwner.Def) val leadParamss = ListBuffer[List[ValDef]]() - def nparams = leadParamss.map(_.length).sum + def numLeadParams = leadParamss.map(_.length).sum while - val extParams = paramClause(nparams, prefix = true) + val extParams = termParamClause(numLeadParams, prefix = true) leadParamss += extParams isUsingClause(extParams) do () - leadParamss ++= paramClauses(givenOnly = true, numLeadParams = nparams) + leadParamss ++= termParamClauses(givenOnly = true, numLeadParams = numLeadParams) if in.isColon then syntaxError(em"no `:` expected here") in.nextToken() @@ -3870,11 +3970,11 @@ object Parsers { if in.token == EXPORT then exportClause() else if isDefIntro(modifierTokens) then - extMethod(nparams) :: Nil + extMethod(numLeadParams) :: Nil else in.observeIndented() newLineOptWhenFollowedBy(LBRACE) - if in.isNestedStart then inDefScopeBraces(extMethods(nparams)) + if in.isNestedStart then inDefScopeBraces(extMethods(numLeadParams)) else { syntaxErrorOrIncomplete(em"Extension without extension methods") ; Nil } val result = atSpan(start)(ExtMethods(joinParams(tparams, leadParamss.toList), methods)) val comment = in.getDocComment(start) @@ -4068,8 +4168,8 @@ object Parsers { stats.toList } - /** SelfType ::= id [‘:’ [CaptureSet] InfixType] ‘=>’ - * | ‘this’ ‘:’ [CaptureSet] InfixType ‘=>’ + /** SelfType ::= id [‘:’ InfixType] ‘=>’ + * | ‘this’ ‘:’ InfixType ‘=>’ */ def selfType(): ValDef = if (in.isIdent || in.token == THIS) @@ -4085,10 +4185,7 @@ object Parsers { val selfTpt = if in.isColon then in.nextToken() - if in.token == LBRACE && followingIsCaptureSet() then - CapturingTypeTree(captureSet(), infixType()) - else - infixType() + infixType() else if selfName == nme.WILDCARD then accept(COLONfollow) TypeTree() diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index b3d824a2efd2..fac73bfb4992 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -523,7 +523,8 @@ object Scanners { * * The following tokens can start an indentation region: * - * : = => <- if then else while do try catch finally for yield match + * : = => <- if then else while do try catch + * finally for yield match throw return with * * Inserting an INDENT starts a new indentation region with the indentation of the current * token as indentation width. @@ -610,11 +611,17 @@ object Scanners { case r: Indented => insert(OUTDENT, offset) handleNewIndentWidth(r.enclosing, ir => - val lw = lastWidth - errorButContinue( - em"""The start of this line does not match any of the previous indentation widths. - |Indentation width of current line : $nextWidth - |This falls between previous widths: ${ir.width} and $lw""")) + if next.token == DOT + && !nextWidth.isClose(r.indentWidth) + && !nextWidth.isClose(ir.indentWidth) + then + ir.otherIndentWidths += nextWidth + else + val lw = lastWidth + errorButContinue( + em"""The start of this line does not match any of the previous indentation widths. + |Indentation width of current line : $nextWidth + |This falls between previous widths: ${ir.width} and $lw""")) case r => if skipping then if r.enclosing.isClosedByUndentAt(nextWidth) then @@ -1665,6 +1672,17 @@ object Scanners { def < (that: IndentWidth): Boolean = this <= that && !(that <= this) + /** Does `this` differ from `that` by not more than a single space? */ + def isClose(that: IndentWidth): Boolean = this match + case Run(ch1, n1) => + that match + case Run(ch2, n2) => ch1 == ch2 && ch1 != '\t' && (n1 - n2).abs <= 1 + case Conc(l, r) => false + case Conc(l1, r1) => + that match + case Conc(l2, r2) => l1 == l2 && r1.isClose(r2) + case _ => false + def toPrefix: String = this match { case Run(ch, n) => ch.toString * n case Conc(l, r) => l.toPrefix ++ r.toPrefix diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala index 2c6c5361e51c..0f7d426fbd28 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala @@ -11,8 +11,7 @@ package xml import Utility._ import util.Chars.SU - - +import scala.collection.BufferedIterator /** This is not a public trait - it contains common code shared * between the library level XML parser and the compiler's. diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index 77c5a1bf376b..b3f41fab9eaa 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -6,6 +6,7 @@ package xml import scala.language.unsafeNulls import scala.collection.mutable +import scala.collection.BufferedIterator import core.Contexts.Context import mutable.{ Buffer, ArrayBuffer, ListBuffer } import scala.util.control.ControlThrowable diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index 21bb0fa2be54..1baf3a06ad9e 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -44,7 +44,7 @@ sealed trait Plugin { trait StandardPlugin extends Plugin { /** Non-research plugins should override this method to return the phases * - * @param options: commandline options to the plugin, `-P:plugname:opt1,opt2` becomes List(opt1, opt2) + * @param options commandline options to the plugin. * @return a list of phases to be added to the phase plan */ def init(options: List[String]): List[PluginPhase] @@ -57,8 +57,8 @@ trait StandardPlugin extends Plugin { trait ResearchPlugin extends Plugin { /** Research plugins should override this method to return the new phase plan * - * @param options: commandline options to the plugin, `-P:plugname:opt1,opt2` becomes List(opt1, opt2) - * @param plan: the given phase plan + * @param options commandline options to the plugin, `-P:plugname:opt1,opt2` becomes List(opt1, opt2) + * @param plan the given phase plan * @return the new phase plan */ def init(options: List[String], plan: List[List[Phase]])(using Context): List[List[Phase]] diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index 976b783c40f0..c44fe4cf59b4 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -116,8 +116,6 @@ trait Plugins { /** Add plugin phases to phase plan */ def addPluginPhases(plan: List[List[Phase]])(using Context): List[List[Phase]] = { - // plugin-specific options. - // The user writes `-P:plugname:opt1,opt2`, but the plugin sees `List(opt1, opt2)`. def options(plugin: Plugin): List[String] = { def namec = plugin.name + ":" ctx.settings.pluginOptions.value filter (_ startsWith namec) map (_ stripPrefix namec) diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index f4bbd74842c8..3f32b29654c9 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -71,6 +71,16 @@ object Formatting { given Show[TypeComparer.ApproxState] with def show(x: TypeComparer.ApproxState) = TypeComparer.ApproxState.Repr.show(x) + given Show[ast.TreeInfo.PurityLevel] with + def show(x: ast.TreeInfo.PurityLevel) = x match + case ast.TreeInfo.Path => "PurityLevel.Path" + case ast.TreeInfo.Pure => "PurityLevel.Pure" + case ast.TreeInfo.Idempotent => "PurityLevel.Idempotent" + case ast.TreeInfo.Impure => "PurityLevel.Impure" + case ast.TreeInfo.PurePath => "PurityLevel.PurePath" + case ast.TreeInfo.IdempotentPath => "PurityLevel.IdempotentPath" + case _ => s"PurityLevel(${x.x})" + given Show[Showable] = ShowAny given Show[Shown] = ShowAny given Show[Int] = ShowAny @@ -90,6 +100,7 @@ object Formatting { given Show[util.SourceFile] = ShowAny given Show[util.Spans.Span] = ShowAny given Show[tasty.TreeUnpickler#OwnerTree] = ShowAny + given Show[typer.ForceDegree.Value] = ShowAny private def show1[A: Show](x: A)(using Context) = show2(Show[A].show(x).ctxShow) private def show2(x: Shown)(using Context): String = x match diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 0da1993310c6..700b3fbf525f 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -120,10 +120,10 @@ class PlainPrinter(_ctx: Context) extends Printer { } (keyword ~ refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close - protected def argText(arg: Type): Text = homogenizeArg(arg) match { + protected def argText(arg: Type, isErased: Boolean = false): Text = keywordText("erased ").provided(isErased) ~ (homogenizeArg(arg) match { case arg: TypeBounds => "?" ~ toText(arg) case arg => toText(arg) - } + }) /** Pretty-print comma-separated type arguments for a constructor to be inserted among parentheses or brackets * (hence with `GlobalPrec` precedence). @@ -149,8 +149,21 @@ class PlainPrinter(_ctx: Context) extends Printer { + defn.ObjectClass + defn.FromJavaObjectSymbol - def toText(cs: CaptureSet): Text = - "{" ~ Text(cs.elems.toList.map(toTextCaptureRef), ", ") ~ "}" + def toTextCaptureSet(cs: CaptureSet): Text = + if printDebug && !cs.isConst then cs.toString + else if ctx.settings.YccDebug.value then cs.show + else if !cs.isConst && cs.elems.isEmpty then "?" + else "{" ~ Text(cs.elems.toList.map(toTextCaptureRef), ", ") ~ "}" + + /** Print capturing type, overridden in RefinedPrinter to account for + * capturing function types. + */ + protected def toTextCapturing(parent: Type, refsText: Text, boxText: Text): Text = + changePrec(InfixPrec): + boxText ~ toTextLocal(parent) ~ "^" + ~ (refsText provided refsText != rootSetText) + + final protected def rootSetText = Str("{cap}") def toText(tp: Type): Text = controlled { homogenize(tp) match { @@ -207,20 +220,9 @@ class PlainPrinter(_ctx: Context) extends Printer { (" <: " ~ toText(bound) provided !bound.isAny) }.close case tp @ EventuallyCapturingType(parent, refs) => - def box = - Str("box ") provided tp.isBoxed //&& ctx.settings.YccDebug.value - def printRegular(refsText: Text) = - changePrec(GlobalPrec)(box ~ refsText ~ " " ~ toText(parent)) - if printDebug && !refs.isConst then - printRegular(refs.toString) - else if ctx.settings.YccDebug.value then - printRegular(refs.show) - else if !refs.isConst && refs.elems.isEmpty then - printRegular("?") - else if Config.printCaptureSetsAsPrefix then - printRegular(toText(refs)) - else - changePrec(InfixPrec)(box ~ toText(parent) ~ " @retains(" ~ toText(refs.elems.toList, ",") ~ ")") + val boxText: Text = Str("box ") provided tp.isBoxed //&& ctx.settings.YccDebug.value + val refsText = if refs.isUniversal then rootSetText else toTextCaptureSet(refs) + toTextCapturing(parent, refsText, boxText) case tp: PreviousErrorType if ctx.settings.XprintTypes.value => "" // do not print previously reported error message because they may try to print this error type again recuresevely case tp: ErrorType => @@ -235,21 +237,19 @@ class PlainPrinter(_ctx: Context) extends Printer { changePrec(GlobalPrec) { "(" ~ keywordText("using ").provided(tp.isContextualMethod) - ~ keywordText("erased ").provided(tp.isErasedMethod) ~ keywordText("implicit ").provided(tp.isImplicitMethod && !tp.isContextualMethod) ~ paramsText(tp) ~ ")" ~ (Str(": ") provided !tp.resultType.isInstanceOf[MethodOrPoly]) ~ toText(tp.resultType) } - case ExprType(ct @ EventuallyCapturingType(parent, refs)) - if ct.annot.symbol == defn.RetainsByNameAnnot => - if refs.isUniversal then changePrec(GlobalPrec) { "=> " ~ toText(parent) } - else toText(CapturingType(ExprType(parent), refs)) case ExprType(restp) => - changePrec(GlobalPrec) { - (if Feature.pureFunsEnabled then "-> " else "=> ") ~ toText(restp) - } + def arrowText: Text = restp match + case ct @ EventuallyCapturingType(parent, refs) if ct.annot.symbol == defn.RetainsByNameAnnot => + if refs.isUniversal then Str("=>") else Str("->") ~ toTextCaptureSet(refs) + case _ => + if Feature.pureFunsEnabled then "->" else "=>" + changePrec(GlobalPrec)(arrowText ~ " " ~ toText(restp)) case tp: HKTypeLambda => changePrec(GlobalPrec) { "[" ~ paramsText(tp) ~ "]" ~ lambdaHash(tp) ~ Str(" =>> ") ~ toTextGlobal(tp.resultType) @@ -296,9 +296,10 @@ class PlainPrinter(_ctx: Context) extends Printer { "(" ~ toTextRef(tp) ~ " : " ~ toTextGlobal(tp.underlying) ~ ")" protected def paramsText(lam: LambdaType): Text = { - def paramText(name: Name, tp: Type) = - toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp, isParameter = true) - Text(lam.paramNames.lazyZip(lam.paramInfos).map(paramText), ", ") + val erasedParams = lam.erasedParams + def paramText(name: Name, tp: Type, erased: Boolean) = + keywordText("erased ").provided(erased) ~ toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp, isParameter = true) + Text(lam.paramNames.lazyZip(lam.paramInfos).lazyZip(erasedParams).map(paramText), ", ") } protected def ParamRefNameString(name: Name): String = nameString(name) @@ -385,7 +386,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toTextCaptureRef(tp: Type): Text = homogenize(tp) match - case tp: TermRef if tp.symbol == defn.captureRoot => Str("*") + case tp: TermRef if tp.symbol == defn.captureRoot => Str("cap") case tp: SingletonType => toTextRef(tp) case _ => toText(tp) @@ -639,6 +640,13 @@ class PlainPrinter(_ctx: Context) extends Printer { else if (pos.source.exists) s"${pos.source.file.name}:${pos.line + 1}" else s"(no source file, offset = ${pos.span.point})" + def toText(cand: Candidate): Text = + "Cand(" + ~ toTextRef(cand.ref) + ~ (if cand.isConversion then " conv" else "") + ~ (if cand.isExtension then " ext" else "") + ~ Str(" L" + cand.level) ~ ")" + def toText(result: SearchResult): Text = result match { case result: SearchSuccess => "SearchSuccess: " ~ toText(result.ref) ~ " via " ~ toText(result.tree) diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 326630844dde..ab0c867ec31f 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -7,7 +7,7 @@ import Texts._, ast.Trees._ import Types.{Type, SingletonType, LambdaParam}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation, Contexts.Context -import typer.Implicits.SearchResult +import typer.Implicits.* import util.SourcePosition import typer.ImportInfo @@ -153,6 +153,9 @@ abstract class Printer { /** Textual representation of source position */ def toText(pos: SourcePosition): Text + /** Textual representation of implicit candidates. */ + def toText(cand: Candidate): Text + /** Textual representation of implicit search result */ def toText(result: SearchResult): Text @@ -174,15 +177,15 @@ abstract class Printer { atPrec(GlobalPrec) { elem.toText(this) } /** Render elements alternating with `sep` string */ - def toText(elems: Traversable[Showable], sep: String): Text = + def toText(elems: Iterable[Showable], sep: String): Text = Text(elems map (_ toText this), sep) /** Render elements within highest precedence */ - def toTextLocal(elems: Traversable[Showable], sep: String): Text = + def toTextLocal(elems: Iterable[Showable], sep: String): Text = atPrec(DotPrec) { toText(elems, sep) } /** Render elements within lowest precedence */ - def toTextGlobal(elems: Traversable[Showable], sep: String): Text = + def toTextGlobal(elems: Iterable[Showable], sep: String): Text = atPrec(GlobalPrec) { toText(elems, sep) } /** A plain printer without any embellishments */ diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 9a4b53d4112c..51aaa0932e5e 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -3,6 +3,7 @@ package dotc package printing import core._ +import Constants.* import Texts._ import Types._ import Flags._ @@ -143,46 +144,55 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { private def arrow(isGiven: Boolean, isPure: Boolean): String = (if isGiven then "?" else "") + (if isPure then "->" else "=>") - override def toText(tp: Type): Text = controlled { - def toTextTuple(args: List[Type]): Text = - "(" ~ argsText(args) ~ ")" + private def toTextFunction(tp: AppliedType, refs: Text = Str("")): Text = + val AppliedType(tycon, args) = (tp: @unchecked) + val tsym = tycon.typeSymbol + val isGiven = tsym.name.isContextFunction + val capturesRoot = refs == rootSetText + val isPure = + Feature.pureFunsEnabled && !tsym.name.isImpureFunction && !capturesRoot + changePrec(GlobalPrec) { + val argStr: Text = + if args.length == 2 + && !defn.isTupleNType(args.head) + && !isGiven + then + atPrec(InfixPrec) { argText(args.head) } + else + "(" + ~ argsText(args.init) + ~ ")" + argStr + ~ " " ~ arrow(isGiven, isPure) + ~ (refs provided !capturesRoot) + ~ " " ~ argText(args.last) + } - def toTextFunction(args: List[Type], isGiven: Boolean, isErased: Boolean, isPure: Boolean): Text = + private def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: Text = Str("")): Text = info match + case info: MethodType => + val capturesRoot = refs == rootSetText changePrec(GlobalPrec) { - val argStr: Text = - if args.length == 2 - && !defn.isTupleNType(args.head) - && !isGiven && !isErased - then - atPrec(InfixPrec) { argText(args.head) } - else - "(" - ~ keywordText("erased ").provided(isErased) - ~ argsText(args.init) - ~ ")" - argStr ~ " " ~ arrow(isGiven, isPure) ~ " " ~ argText(args.last) + "(" + ~ paramsText(info) + ~ ") " + ~ arrow(info.isImplicitMethod, isPure && !capturesRoot) + ~ (refs provided !capturesRoot) + ~ " " + ~ toTextMethodAsFunction(info.resultType, isPure) + } + case info: PolyType => + changePrec(GlobalPrec) { + "[" + ~ paramsText(info) + ~ "] => " + ~ toTextMethodAsFunction(info.resultType, isPure) } + case _ => + toText(info) - def toTextMethodAsFunction(info: Type, isPure: Boolean): Text = info match - case info: MethodType => - changePrec(GlobalPrec) { - "(" - ~ keywordText("erased ").provided(info.isErasedMethod) - ~ paramsText(info) - ~ ") " - ~ arrow(info.isImplicitMethod, isPure) - ~ " " - ~ toTextMethodAsFunction(info.resultType, isPure) - } - case info: PolyType => - changePrec(GlobalPrec) { - "[" - ~ paramsText(info) - ~ "] => " - ~ toTextMethodAsFunction(info.resultType, isPure) - } - case _ => - toText(info) + override def toText(tp: Type): Text = controlled { + def toTextTuple(args: List[Type]): Text = + "(" ~ argsText(args) ~ ")" def isInfixType(tp: Type): Boolean = tp match case AppliedType(tycon, args) => @@ -224,9 +234,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val tsym = tycon.typeSymbol if tycon.isRepeatedParam then toTextLocal(args.head) ~ "*" else if tp.isConvertibleParam then "into " ~ toText(args.head) - else if defn.isFunctionSymbol(tsym) then - toTextFunction(args, tsym.name.isContextFunction, tsym.name.isErasedFunction, - isPure = Feature.pureFunsEnabled && !tsym.name.isImpureFunction) + else if defn.isFunctionSymbol(tsym) then toTextFunction(tp) else if isInfixType(tp) then val l :: r :: Nil = args: @unchecked val opName = tyconName(tycon) @@ -286,14 +294,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tp: ViewProto => toText(tp.argType) ~ " ?=>? " ~ toText(tp.resultType) case tp @ FunProto(args, resultType) => - val argsText = args match { - case dummyTreeOfType(tp) :: Nil if !(tp isRef defn.NullClass) => "null: " ~ toText(tp) - case _ => toTextGlobal(args, ", ") - } "[applied to (" ~ keywordText("using ").provided(tp.isContextualMethod) - ~ keywordText("erased ").provided(tp.isErasedMethod) - ~ argsText + ~ argsTreeText(args) ~ ") returning " ~ toText(resultType) ~ "]" @@ -309,6 +312,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { protected def exprToText(tp: ExprType): Text = "=> " ~ toText(tp.resType) + protected def argsTreeText(args: List[untpd.Tree]): Text = args match + case dummyTreeOfType(tp) :: Nil if !tp.isRef(defn.NullClass) && !homogenizedView => toText(Constant(null)) ~ ": " ~ toText(tp) + case _ => toTextGlobal(args, ", ") + protected def blockToText[T <: Untyped](block: Block[T]): Text = blockText(block.stats :+ block.expr) @@ -434,15 +441,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec (GlobalPrec) { keywordStr("throw ") ~ toText(args.head) } - else if (!printDebug && fun.hasType && fun.symbol == defn.QuotedRuntime_exprQuote) - keywordStr("'{") ~ toTextGlobal(args, ", ") ~ keywordStr("}") - else if (!printDebug && fun.hasType && fun.symbol.isExprSplice) - keywordStr("${") ~ toTextGlobal(args, ", ") ~ keywordStr("}") else toTextLocal(fun) ~ "(" ~ Str("using ").provided(app.applyKind == ApplyKind.Using && !homogenizedView) - ~ toTextGlobal(args, ", ") + ~ argsTreeText(args) ~ ")" case tree: TypeApply => typeApplyText(tree) @@ -563,14 +566,15 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toText(sel) ~ keywordStr(" match ") ~ blockText(cases) ~ (" <: " ~ toText(bound) provided !bound.isEmpty) } + case ImpureByNameTypeTree(tpt) => + "=> " ~ toTextLocal(tpt) case ByNameTypeTree(tpt) => - (if Feature.pureFunsEnabled then "-> " else "=> ") - ~ toTextLocal(tpt) + (if Feature.pureFunsEnabled then "-> " else "=> ") ~ toTextLocal(tpt) case TypeBoundsTree(lo, hi, alias) => if (lo eq hi) && alias.isEmpty then optText(lo)(" = " ~ _) else optText(lo)(" >: " ~ _) ~ optText(hi)(" <: " ~ _) ~ optText(alias)(" = " ~ _) case bind @ Bind(name, body) => - keywordText("given ").provided(tree.symbol.isOneOf(GivenOrImplicit) && !homogenizedView) ~ // Used for scala.quoted.Type in quote patterns (not pickled) + toTextOwner(bind) ~ keywordText("given ").provided(tree.symbol.isOneOf(GivenOrImplicit) && !homogenizedView) ~ // Used for scala.quoted.Type in quote patterns (not pickled) changePrec(InfixPrec) { nameIdText(bind) ~ " @ " ~ toText(body) } case Alternative(trees) => changePrec(OrPrec) { toText(trees, " | ") } @@ -624,7 +628,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextAnnot = toTextLocal(arg) ~~ annotText(annot.symbol.enclosingClass, annot) def toTextRetainsAnnot = - try changePrec(GlobalPrec)(toText(captureSet) ~ " " ~ toText(arg)) + try changePrec(GlobalPrec)(toText(arg) ~ "^" ~ toTextCaptureSet(captureSet)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner == defn.RetainsAnnot && Feature.ccEnabled && Config.printCaptureSetsAsPrefix && !printDebug @@ -649,27 +653,29 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case str: Literal => strText(str) } toText(id) ~ "\"" ~ Text(segments map segmentText, "") ~ "\"" - case Function(args, body) => + case fn @ Function(args, body) => var implicitSeen: Boolean = false var isGiven: Boolean = false - var isErased: Boolean = false - def argToText(arg: Tree) = arg match { + val erasedParams = fn match { + case fn: FunctionWithMods => fn.erasedParams + case _ => fn.args.map(_ => false) + } + def argToText(arg: Tree, isErased: Boolean) = arg match { case arg @ ValDef(name, tpt, _) => val implicitText = if ((arg.mods.is(Given))) { isGiven = true; "" } - else if ((arg.mods.is(Erased))) { isErased = true; "" } else if ((arg.mods.is(Implicit)) && !implicitSeen) { implicitSeen = true; keywordStr("implicit ") } else "" - implicitText ~ toText(name) ~ optAscription(tpt) + val erasedText = if isErased then keywordStr("erased ") else "" + implicitText ~ erasedText ~ toText(name) ~ optAscription(tpt) case _ => toText(arg) } val argsText = args match { - case (arg @ ValDef(_, tpt, _)) :: Nil if tpt.isEmpty => argToText(arg) + case (arg @ ValDef(_, tpt, _)) :: Nil if tpt.isEmpty => argToText(arg, erasedParams(0)) case _ => "(" - ~ keywordText("erased ").provided(isErased) - ~ Text(args.map(argToText), ", ") + ~ Text(args.zip(erasedParams).map(argToText), ", ") ~ ")" } val isPure = @@ -716,31 +722,46 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } case Number(digits, kind) => digits - case Quote(tree) if tree.isTerm => - keywordStr("'{") ~ toTextGlobal(dropBlock(tree)) ~ keywordStr("}") - case Splice(tree) => - keywordStr("${") ~ toTextGlobal(dropBlock(tree)) ~ keywordStr("}") case Thicket(trees) => "Thicket {" ~~ toTextGlobal(trees, "\n") ~~ "}" case MacroTree(call) => keywordStr("macro ") ~ toTextGlobal(call) - case Hole(isTermHole, idx, args, content, tpt) => - val (prefix, postfix) = if isTermHole then ("{{{", "}}}") else ("[[[", "]]]") + case tree @ Quote(body, tags) => + val tagsText = (keywordStr("<") ~ toTextGlobal(tags, ", ") ~ keywordStr(">")).provided(tree.tags.nonEmpty) + val exprTypeText = (keywordStr("[") ~ toTextGlobal(tree.bodyType) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) + val open = if (body.isTerm) keywordStr("{") else keywordStr("[") + val close = if (body.isTerm) keywordStr("}") else keywordStr("]") + keywordStr("'") ~ tagsText ~ exprTypeText ~ open ~ toTextGlobal(body) ~ close + case Splice(expr) => + val spliceTypeText = (keywordStr("[") ~ toTextGlobal(tree.typeOpt) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) + keywordStr("$") ~ spliceTypeText ~ keywordStr("{") ~ toTextGlobal(expr) ~ keywordStr("}") + case SplicePattern(pattern, args) => + val spliceTypeText = (keywordStr("[") ~ toTextGlobal(tree.typeOpt) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) + keywordStr("$") ~ spliceTypeText ~ { + if args.isEmpty then keywordStr("{") ~ inPattern(toText(pattern)) ~ keywordStr("}") + else toText(pattern.symbol.name) ~ "(" ~ toTextGlobal(args, ", ") ~ ")" + } + case Hole(isTerm, idx, args, content) => + val (prefix, postfix) = if isTerm then ("{{{", "}}}") else ("[[[", "]]]") val argsText = toTextGlobal(args, ", ") val contentText = toTextGlobal(content) - val tptText = toTextGlobal(tpt) - prefix ~~ idx.toString ~~ "|" ~~ tptText ~~ "|" ~~ argsText ~~ "|" ~~ contentText ~~ postfix - case CapturingTypeTree(refs, parent) => - parent match - case ImpureByNameTypeTree(bntpt) => - "=> " ~ toTextLocal(bntpt) - case _ => - changePrec(GlobalPrec)("{" ~ Text(refs.map(toText), ", ") ~ "} " ~ toText(parent)) + val tpeText = toTextGlobal(tree.typeOpt) + prefix ~~ idx.toString ~~ "|" ~~ tpeText ~~ "|" ~~ argsText ~~ "|" ~~ contentText ~~ postfix + case CapturesAndResult(refs, parent) => + changePrec(GlobalPrec)("^{" ~ Text(refs.map(toText), ", ") ~ "}" ~ toText(parent)) case _ => tree.fallbackToText(this) } } + override protected def toTextCapturing(tp: Type, refsText: Text, boxText: Text): Text = tp match + case tp: AppliedType if defn.isFunctionSymbol(tp.typeSymbol) && !printDebug => + boxText ~ toTextFunction(tp, refsText) + case tp: RefinedType if defn.isFunctionOrPolyType(tp) && !printDebug => + boxText ~ toTextMethodAsFunction(tp.refinedInfo, isPure = !tp.typeSymbol.name.isImpureFunction, refsText) + case _ => + super.toTextCapturing(tp, refsText, boxText) + override def toText[T <: Untyped](tree: Tree[T]): Text = controlled { import untpd._ @@ -869,7 +890,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { "()" case untpd.ValDefs(vparams @ (vparam :: _)) => "(" ~ keywordText("using ").provided(vparam.mods.is(Given)) - ~ keywordText("erased ").provided(vparam.mods.is(Erased)) ~ toText(vparams, ", ") ~ ")" case untpd.TypeDefs(tparams) => "[" ~ toText(tparams, ", ") ~ "]" @@ -895,30 +915,31 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if isExtension then val paramss = if tree.name.isRightAssocOperatorName then + // If you change the names of the clauses below, also change them in right-associative-extension-methods.md // we have the following encoding of tree.paramss: - // (leadingTyParamss ++ leadingUsing - // ++ rightTyParamss ++ rightParamss - // ++ leftParamss ++ trailingUsing ++ rest) + // (leftTyParams ++ leadingUsing + // ++ rightTyParams ++ rightParam + // ++ leftParam ++ trailingUsing ++ rest) // e.g. // extension [A](using B)(c: C)(using D) // def %:[E](f: F)(g: G)(using H): Res = ??? // will have the following values: - // - leadingTyParamss = List(`[A]`) + // - leftTyParams = List(`[A]`) // - leadingUsing = List(`(using B)`) - // - rightTyParamss = List(`[E]`) - // - rightParamss = List(`(f: F)`) - // - leftParamss = List(`(c: C)`) + // - rightTyParams = List(`[E]`) + // - rightParam = List(`(f: F)`) + // - leftParam = List(`(c: C)`) // - trailingUsing = List(`(using D)`) // - rest = List(`(g: G)`, `(using H)`) - // we need to swap (rightTyParams ++ rightParamss) with (leftParamss ++ trailingUsing) - val (leadingTyParamss, rest1) = tree.paramss.span(isTypeParamClause) + // we need to swap (rightTyParams ++ rightParam) with (leftParam ++ trailingUsing) + val (leftTyParams, rest1) = tree.paramss.span(isTypeParamClause) val (leadingUsing, rest2) = rest1.span(isUsingClause) - val (rightTyParamss, rest3) = rest2.span(isTypeParamClause) - val (rightParamss, rest4) = rest3.splitAt(1) - val (leftParamss, rest5) = rest4.splitAt(1) + val (rightTyParams, rest3) = rest2.span(isTypeParamClause) + val (rightParam, rest4) = rest3.splitAt(1) + val (leftParam, rest5) = rest4.splitAt(1) val (trailingUsing, rest6) = rest5.span(isUsingClause) - if leftParamss.nonEmpty then - leadingTyParamss ::: leadingUsing ::: leftParamss ::: trailingUsing ::: rightTyParamss ::: rightParamss ::: rest6 + if leftParam.nonEmpty then + leftTyParams ::: leadingUsing ::: leftParam ::: trailingUsing ::: rightTyParams ::: rightParam ::: rest6 else tree.paramss // it wasn't a binary operator, after all. else @@ -1030,7 +1051,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else PrintableFlags(isType) if (homogenizedView && mods.flags.isTypeFlags) flagMask &~= GivenOrImplicit // drop implicit/given from classes val rawFlags = if (sym.exists) sym.flagsUNSAFE else mods.flags - if (rawFlags.is(Param)) flagMask = flagMask &~ Given &~ Erased + if (rawFlags.is(Param)) flagMask = flagMask &~ Given val flags = rawFlags & flagMask var flagsText = toTextFlags(sym, flags) val annotTexts = diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala index 7c040a78de5e..475e2c6900d5 100644 --- a/compiler/src/dotty/tools/dotc/printing/Texts.scala +++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala @@ -173,7 +173,7 @@ object Texts { /** A concatenation of elements in `xs` and interspersed with * separator strings `sep`. */ - def apply(xs: Traversable[Text], sep: String = " "): Text = + def apply(xs: Iterable[Text], sep: String = " "): Text = if (sep == "\n") lines(xs) else { val ys = xs.filterNot(_.isEmpty) @@ -182,7 +182,7 @@ object Texts { } /** The given texts `xs`, each on a separate line */ - def lines(xs: Traversable[Text]): Vertical = Vertical(xs.toList.reverse) + def lines(xs: Iterable[Text]): Vertical = Vertical(xs.toList.reverse) extension (text: => Text) def provided(cond: Boolean): Text = if (cond) text else Str("") diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala index 38cecb7953b8..c9a77dbfa151 100644 --- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -19,23 +19,27 @@ import dotty.tools.dotc.core.Denotations.staticRef import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.NameKinds.FlatName import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StagingContext._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.core.Types._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import dotty.tools.dotc.typer.ImportInfo.withRootImports import dotty.tools.dotc.util.SrcPos import dotty.tools.dotc.reporting.Message import dotty.tools.repl.AbstractFileClassLoader +import dotty.tools.dotc.core.CyclicReference /** Tree interpreter for metaprogramming constructs */ -class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context): +class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): import Interpreter._ import tpd._ + val classLoader = + if ctx.owner.topLevelClass.name.startsWith(str.REPL_SESSION_LINE) then + new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader0) + else classLoader0 + /** Local variable environment */ type Env = Map[Symbol, Object] def emptyEnv: Env = Map.empty @@ -122,7 +126,7 @@ class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context): view.toList fnType.dealias match - case fnType: MethodType if fnType.isErasedMethod => interpretArgs(argss, fnType.resType) + case fnType: MethodType if fnType.hasErasedParams => interpretArgs(argss, fnType.resType) case fnType: MethodType => val argTypes = fnType.paramInfos assert(argss.head.size == argTypes.size) @@ -157,18 +161,12 @@ class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context): args.toSeq private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol, args: List[Object]): Object = { - val (inst, clazz) = - try - if (moduleClass.name.startsWith(str.REPL_SESSION_LINE)) - (null, loadReplLineClass(moduleClass)) - else { - val inst = loadModule(moduleClass) - (inst, inst.getClass) - } + val inst = + try loadModule(moduleClass) catch case MissingClassDefinedInCurrentRun(sym) => suspendOnMissing(sym, pos) - + val clazz = inst.getClass val name = fn.name.asTermName val method = getMethod(clazz, name, paramsSig(fn)) stopIfRuntimeException(method.invoke(inst, args: _*), method) @@ -253,8 +251,14 @@ class Interpreter(pos: SrcPos, classLoader: ClassLoader)(using Context): } val shortStackTrace = targetException.getStackTrace.take(end + 1) targetException.setStackTrace(shortStackTrace) + targetException.printStackTrace(new PrintWriter(sw)) + + targetException match + case _: CyclicReference => sw.write("\nSee full stack trace using -Ydebug") + case _ => + } else { + targetException.printStackTrace(new PrintWriter(sw)) } - targetException.printStackTrace(new PrintWriter(sw)) sw.write("\n") throw new StopInterpretation(sw.toString.toMessage, pos) } @@ -336,7 +340,7 @@ object Interpreter: case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) case fn: Select => Some((fn, Nil)) case Apply(f @ Call0(fn, args1), args2) => - if (f.tpe.widenDealias.isErasedMethod) Some((fn, args1)) + if (f.tpe.widenDealias.hasErasedParams) Some((fn, args1)) else Some((fn, args2 :: args1)) case TypeApply(Call0(fn, args), _) => Some((fn, args)) case _ => None diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 20bcba417a5e..7596549fe401 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -100,9 +100,9 @@ object PickledQuotes { private def spliceTerms(tree: Tree, typeHole: TypeHole, termHole: ExprHole)(using Context): Tree = { def evaluateHoles = new TreeMap { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match { - case Hole(isTermHole, idx, args, _, _) => + case Hole(isTerm, idx, args, _) => inContext(SpliceScope.contextWithNewSpliceScope(tree.sourcePos)) { - if isTermHole then + if isTerm then val quotedExpr = termHole match case ExprHole.V1(evalHole) => evalHole.nn.apply(idx, reifyExprHoleV1Args(args), QuotesImpl()) @@ -165,7 +165,7 @@ object PickledQuotes { val tree = typeHole match case TypeHole.V1(evalHole) => tdef.rhs match - case TypeBoundsTree(_, Hole(_, idx, args, _, _), _) => + case TypeBoundsTree(_, Hole(_, idx, args, _), _) => // To keep for backwards compatibility. In some older version holes where created in the bounds. val quotedType = evalHole.nn.apply(idx, reifyTypeHoleArgs(args)) PickledQuotes.quotedTypeToTree(quotedType) @@ -173,7 +173,7 @@ object PickledQuotes { // To keep for backwards compatibility. In some older version we missed the creation of some holes. tpt case TypeHole.V2(types) => - val Hole(_, idx, _, _, _) = tdef.rhs: @unchecked + val Hole(_, idx, _, _) = tdef.rhs: @unchecked PickledQuotes.quotedTypeToTree(types.nn.apply(idx)) (tdef.symbol, tree.tpe) }.toMap @@ -275,9 +275,7 @@ object PickledQuotes { QuotesCache(pickled) = tree // Make sure trees and positions are fully loaded - new TreeTraverser { - def traverse(tree: Tree)(using Context): Unit = traverseChildren(tree) - }.traverse(tree) + tree.foreachSubTree(identity) quotePickling.println(i"**** unpickled quote\n$tree") diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index c92fbe5daa56..38f2ab347c4c 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -4,13 +4,12 @@ import reporting._ import Diagnostic._ import util.{SourcePosition, NoSourcePosition, SrcPos} import core._ -import Contexts._, Symbols._, Decorators._ +import Contexts._, Flags.*, Symbols._, Decorators._ import config.SourceVersion import ast._ import config.Feature.sourceVersion import java.lang.System.currentTimeMillis - object report: /** For sending messages that are printed only if -verbose is set */ @@ -129,4 +128,64 @@ object report: case Nil => pos recur(pos.sourcePos, tpd.enclosingInlineds) + private object messageRendering extends MessageRendering + + // Should only be called from Run#enrichErrorMessage. + def enrichErrorMessage(errorMessage: String)(using Context): String = try { + def formatExplain(pairs: List[(String, Any)]) = pairs.map((k, v) => f"$k%20s: $v").mkString("\n") + + val settings = ctx.settings.userSetSettings(ctx.settingsState).sortBy(_.name) + val tree = ctx.tree + val sym = tree.symbol + val pos = tree.sourcePos + val path = pos.source.path + val site = ctx.outersIterator.map(_.owner).filter(sym => !sym.exists || sym.isClass || sym.is(Method)).next() + + import untpd.* + extension (tree: Tree) def summaryString: String = tree match + case Literal(const) => s"Literal($const)" + case Ident(name) => s"Ident(${name.decode})" + case Select(qual, name) => s"Select(${qual.summaryString}, ${name.decode})" + case tree: NameTree => (if tree.isType then "type " else "") + tree.name.decode + case tree => s"${tree.className}${if tree.symbol.exists then s"(${tree.symbol})" else ""}" + + val info1 = formatExplain(List( + "while compiling" -> ctx.compilationUnit, + "during phase" -> ctx.phase.prevMega, + "mode" -> ctx.mode, + "library version" -> scala.util.Properties.versionString, + "compiler version" -> dotty.tools.dotc.config.Properties.versionString, + "settings" -> settings.map(s => if s.value == "" then s"${s.name} \"\"" else s"${s.name} ${s.value}").mkString(" "), + )) + val symbolInfos = if sym eq NoSymbol then List("symbol" -> sym) else List( + "symbol" -> sym.showLocated, + "symbol definition" -> s"${sym.showDcl} (a ${sym.className})", + "symbol package" -> sym.enclosingPackageClass.fullName, + "symbol owners" -> sym.showExtendedLocation, + ) + val info2 = formatExplain(List( + "tree" -> tree.summaryString, + "tree position" -> (if pos.exists then s"$path:${pos.line + 1}:${pos.column}" else s"$path:"), + "tree type" -> tree.typeOpt.show, + ) ::: symbolInfos ::: List( + "call site" -> s"${site.showLocated} in ${site.enclosingPackageClass}" + )) + val context_s = try + s""" == Source file context for tree position == + | + |${messageRendering.messageAndPos(Diagnostic.Error("", pos))}""".stripMargin + catch case _: Exception => "" + s""" + | $errorMessage + | + | An unhandled exception was thrown in the compiler. + | Please file a crash report here: + | https://github.com/lampepfl/dotty/issues/new/choose + | + |$info1 + | + |$info2 + | + |$context_s""".stripMargin + } catch case _: Throwable => errorMessage // don't introduce new errors trying to report errors, so swallow exceptions end report diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 9f0d71645833..fc679210db17 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -189,6 +189,12 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case CannotBeAccessedID // errorNumber 173 case InlineGivenShouldNotBeFunctionID // errorNumber 174 case ValueDiscardingID // errorNumber 175 + case UnusedNonUnitValueID // errorNumber 176 + case ConstrProxyShadowsID // errorNumber 177 + case MissingArgumentListID // errorNumber: 178 + case MatchTypeScrutineeCannotBeHigherKindedID // errorNumber: 179 + case AmbiguousExtensionMethodID // errorNumber 180 + case UnqualifiedCallToAnyRefMethodID // errorNumber: 181 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index e8029d790d0a..d205b816214c 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -245,7 +245,7 @@ extends NotFoundMsg(MissingIdentID) { } } -class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) +class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) extends TypeMismatchMsg(found, expected)(TypeMismatchID): def msg(using Context) = @@ -1305,6 +1305,37 @@ extends SyntaxMsg(VarArgsParamMustComeLastID) { import typer.Typer.BindingPrec +class ConstrProxyShadows(proxy: TermRef, shadowed: Type, shadowedIsApply: Boolean)(using Context) + extends ReferenceMsg(ConstrProxyShadowsID), NoDisambiguation: + + def clsString(using Context) = proxy.symbol.companionClass.showLocated + def shadowedString(using Context) = shadowed.termSymbol.showLocated + def appClause = if shadowedIsApply then " the apply method of" else "" + def appSuffix = if shadowedIsApply then ".apply" else "" + + def msg(using Context) = + i"""Reference to constructor proxy for $clsString + |shadows outer reference to $shadowedString + | + |The instance needs to be created with an explicit `new`.""" + + def explain(using Context) = + i"""There is an ambiguity in the meaning of the call + | + | ${proxy.symbol.name}(...) + | + |It could mean creating an instance of $clsString with + | + | new ${proxy.symbol.companionClass.name}(...) + | + |Or it could mean calling$appClause $shadowedString as in + | + | ${shadowed.termSymbol.name}$appSuffix(...) + | + |To disambiguate, use an explicit `new` if you mean the former, + |or use a full prefix for ${shadowed.termSymbol.name} if you mean the latter.""" +end ConstrProxyShadows + class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(using Context) extends ReferenceMsg(AmbiguousReferenceID), NoDisambiguation { @@ -1328,21 +1359,32 @@ class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec } def msg(using Context) = - i"""|Reference to $name is ambiguous, - |it is both ${bindingString(newPrec, ctx)} + i"""|Reference to $name is ambiguous. + |It is both ${bindingString(newPrec, ctx)} |and ${bindingString(prevPrec, prevCtx, " subsequently")}""" def explain(using Context) = - i"""|The compiler can't decide which of the possible choices you - |are referencing with $name: A definition of lower precedence - |in an inner scope, or a definition with higher precedence in - |an outer scope. + val precedent = + if newPrec == prevPrec then """two name bindings of equal precedence + |were introduced in the same scope.""".stripMargin + else """a name binding of lower precedence + |in an inner scope cannot shadow a binding with higher precedence in + |an outer scope.""".stripMargin + + i"""|The identifier $name is ambiguous because $precedent + | + |The precedence of the different kinds of name bindings, from highest to lowest, is: + | - Definitions in an enclosing scope + | - Inherited definitions and top-level definitions in packages + | - Names introduced by import of a specific name + | - Names introduced by wildcard import + | - Definitions from packages in other files |Note: - | - Definitions in an enclosing scope take precedence over inherited definitions - | - Definitions take precedence over imports - | - Named imports take precedence over wildcard imports - | - You may replace a name when imported using - | ${hl("import")} scala.{ $name => ${name.show + "Tick"} } + | - As a rule, definitions take precedence over imports. + | - Definitions in an enclosing scope take precedence over inherited definitions, + | which can result in ambiguities in nested classes. + | - When importing, you can avoid naming conflicts by renaming: + | ${hl("import")} scala.{$name => ${name.show}Tick} |""" } @@ -1392,6 +1434,15 @@ extends ReferenceMsg(AmbiguousOverloadID), NoDisambiguation { |""" } +class AmbiguousExtensionMethod(tree: untpd.Tree, expansion1: tpd.Tree, expansion2: tpd.Tree)(using Context) + extends ReferenceMsg(AmbiguousExtensionMethodID), NoDisambiguation: + def msg(using Context) = + i"""Ambiguous extension methods: + |both $expansion1 + |and $expansion2 + |are possible expansions of $tree""" + def explain(using Context) = "" + class ReassignmentToVal(name: Name)(using Context) extends TypeMsg(ReassignmentToValID) { def msg(using Context) = i"""Reassignment to val $name""" @@ -1459,6 +1510,16 @@ class MissingArgument(pname: Name, methString: String)(using Context) else s"missing argument for parameter $pname of $methString" def explain(using Context) = "" +class MissingArgumentList(method: String, sym: Symbol)(using Context) + extends TypeMsg(MissingArgumentListID) { + def msg(using Context) = + val symDcl = if sym.exists then "\n\n " + hl(sym.showDcl(using ctx.withoutColors)) else "" + i"missing argument list for $method$symDcl" + def explain(using Context) = { + i"""Unapplied methods are only converted to functions when a function type is expected.""" + } +} + class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(using Context) extends TypeMismatchMsg( if which == "lower" then bound else tpe, @@ -1920,7 +1981,11 @@ class UnapplyInvalidReturnType(unapplyResult: Type, unapplyName: Name)(using Con |To be used as an extractor, an unapply method has to return a type that either: | - has members ${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} (usually an ${Green("Option[S]")}) | - is a ${Green("Boolean")} - | - is a ${Green("Product")} (like a ${Magenta("Tuple2[T1, T2]")}) + | - is a ${Green("Product")} (like a ${Magenta("Tuple2[T1, T2]")}) of arity i with i >= 1, and has members _1 to _i + | + |See: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html#fixed-arity-extractors + | + |Examples: | |class A(val i: Int) | @@ -2223,6 +2288,16 @@ class PureExpressionInStatementPosition(stat: untpd.Tree, val exprOwner: Symbol) |It can be removed without changing the semantics of the program. This may indicate an error.""" } +class UnqualifiedCallToAnyRefMethod(stat: untpd.Tree, method: Symbol)(using Context) + extends Message(UnqualifiedCallToAnyRefMethodID) { + def kind = MessageKind.PotentialIssue + def msg(using Context) = i"Suspicious top-level unqualified call to ${hl(method.name.toString)}" + def explain(using Context) = + i"""Top-level unqualified calls to ${hl("AnyRef")} or ${hl("Any")} methods such as ${hl(method.name.toString)} are + |resolved to calls on ${hl("Predef")} or on imported methods. This might not be what + |you intended.""" +} + class TraitCompanionWithMutableStatic()(using Context) extends SyntaxMsg(TraitCompanionWithMutableStaticID) { def msg(using Context) = i"Companion of traits cannot define mutable @static fields" @@ -2554,13 +2629,115 @@ class MissingImplicitArgument( pt: Type, where: String, paramSymWithMethodCallTree: Option[(Symbol, tpd.Tree)] = None, - ignoredInstanceNormalImport: => Option[SearchSuccess] + ignoredInstanceNormalImport: => Option[SearchSuccess], + ignoredConvertibleImplicits: => Iterable[TermRef] )(using Context) extends TypeMsg(MissingImplicitArgumentID), ShowMatchTrace(pt): arg.tpe match case ambi: AmbiguousImplicits => withoutDisambiguation() case _ => + /** Format `raw` implicitNotFound or implicitAmbiguous argument, replacing + * all occurrences of `${X}` where `X` is in `paramNames` with the + * corresponding shown type in `args`. + */ + def userDefinedErrorString(raw: String, paramNames: List[String], args: List[Type])(using Context): String = + def translate(name: String): Option[String] = + val idx = paramNames.indexOf(name) + if (idx >= 0) Some(i"${args(idx)}") else None + """\$\{\s*([^}\s]+)\s*\}""".r.replaceAllIn(raw, (_: Regex.Match) match + case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("")).nn + ) + + /** @param rawMsg Message template with variables, e.g. "Variable A is ${A}" + * @param sym Symbol of the annotated type or of the method whose parameter was annotated + * @param substituteType Function substituting specific types for abstract types associated with variables, e.g A -> Int + */ + def formatAnnotationMessage(rawMsg: String, sym: Symbol, substituteType: Type => Type)(using Context): String = + val substitutableTypesSymbols = substitutableTypeSymbolsInScope(sym) + userDefinedErrorString( + rawMsg, + paramNames = substitutableTypesSymbols.map(_.name.unexpandedName.toString), + args = substitutableTypesSymbols.map(_.typeRef).map(substituteType) + ) + + /** Extract a user defined error message from a symbol `sym` + * with an annotation matching the given class symbol `cls`. + */ + def userDefinedMsg(sym: Symbol, cls: Symbol)(using Context) = + for + ann <- sym.getAnnotation(cls) + msg <- ann.argumentConstantString(0) + yield msg + + def userDefinedImplicitNotFoundTypeMessageFor(sym: Symbol)(using Context): Option[String] = + for + rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) + if Feature.migrateTo3 || sym != defn.Function1 + // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore + yield + val substituteType = (_: Type).asSeenFrom(pt, sym) + formatAnnotationMessage(rawMsg, sym, substituteType) + + /** Extracting the message from a method parameter, e.g. in + * + * trait Foo + * + * def foo(implicit @annotation.implicitNotFound("Foo is missing") foo: Foo): Any = ??? + */ + def userDefinedImplicitNotFoundParamMessage(using Context): Option[String] = + paramSymWithMethodCallTree.flatMap: (sym, applTree) => + userDefinedMsg(sym, defn.ImplicitNotFoundAnnot).map: rawMsg => + val fn = tpd.funPart(applTree) + val targs = tpd.typeArgss(applTree).flatten + val methodOwner = fn.symbol.owner + val methodOwnerType = tpd.qualifier(fn).tpe + val methodTypeParams = fn.symbol.paramSymss.flatten.filter(_.isType) + val methodTypeArgs = targs.map(_.tpe) + val substituteType = (_: Type).asSeenFrom(methodOwnerType, methodOwner).subst(methodTypeParams, methodTypeArgs) + formatAnnotationMessage(rawMsg, sym.owner, substituteType) + + def userDefinedImplicitNotFoundTypeMessage(using Context): Option[String] = + def recur(tp: Type): Option[String] = tp match + case tp: TypeRef => + val sym = tp.symbol + userDefinedImplicitNotFoundTypeMessageFor(sym).orElse(recur(tp.info)) + case tp: ClassInfo => + tp.baseClasses.iterator + .map(userDefinedImplicitNotFoundTypeMessageFor) + .find(_.isDefined).flatten + case tp: TypeProxy => + recur(tp.superType) + case tp: AndType => + recur(tp.tp1).orElse(recur(tp.tp2)) + case _ => + None + recur(pt) + + /** The implicitNotFound annotation on the parameter, or else on the type. + * implicitNotFound message strings starting with `explain=` are intended for + * additional explanations, not the message proper. The leading `explain=` is + * dropped in this case. + * @param explain The message is used for an additional explanation, not + * the message proper. + */ + def userDefinedImplicitNotFoundMessage(explain: Boolean)(using Context): Option[String] = + val explainTag = "explain=" + def filter(msg: Option[String]) = msg match + case Some(str) => + if str.startsWith(explainTag) then + if explain then Some(str.drop(explainTag.length)) else None + else if explain then None + else msg + case None => None + filter(userDefinedImplicitNotFoundParamMessage) + .orElse(filter(userDefinedImplicitNotFoundTypeMessage)) + + object AmbiguousImplicitMsg { + def unapply(search: SearchSuccess): Option[String] = + userDefinedMsg(search.ref.symbol, defn.ImplicitAmbiguousAnnot) + } + def msg(using Context): String = def formatMsg(shortForm: String)(headline: String = shortForm) = arg match @@ -2584,29 +2761,6 @@ class MissingImplicitArgument( |But ${tpe.explanation}.""" case _ => headline - /** Format `raw` implicitNotFound or implicitAmbiguous argument, replacing - * all occurrences of `${X}` where `X` is in `paramNames` with the - * corresponding shown type in `args`. - */ - def userDefinedErrorString(raw: String, paramNames: List[String], args: List[Type]): String = { - def translate(name: String): Option[String] = { - val idx = paramNames.indexOf(name) - if (idx >= 0) Some(i"${args(idx)}") else None - } - - """\$\{\s*([^}\s]+)\s*\}""".r.replaceAllIn(raw, (_: Regex.Match) match { - case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("")).nn - }) - } - - /** Extract a user defined error message from a symbol `sym` - * with an annotation matching the given class symbol `cls`. - */ - def userDefinedMsg(sym: Symbol, cls: Symbol) = for { - ann <- sym.getAnnotation(cls) - msg <- ann.argumentConstantString(0) - } yield msg - def location(preposition: String) = if (where.isEmpty) "" else s" $preposition $where" def defaultAmbiguousImplicitMsg(ambi: AmbiguousImplicits) = @@ -2643,39 +2797,6 @@ class MissingImplicitArgument( userDefinedErrorString(raw, params, args) } - /** @param rawMsg Message template with variables, e.g. "Variable A is ${A}" - * @param sym Symbol of the annotated type or of the method whose parameter was annotated - * @param substituteType Function substituting specific types for abstract types associated with variables, e.g A -> Int - */ - def formatAnnotationMessage(rawMsg: String, sym: Symbol, substituteType: Type => Type): String = { - val substitutableTypesSymbols = substitutableTypeSymbolsInScope(sym) - - userDefinedErrorString( - rawMsg, - paramNames = substitutableTypesSymbols.map(_.name.unexpandedName.toString), - args = substitutableTypesSymbols.map(_.typeRef).map(substituteType) - ) - } - - /** Extracting the message from a method parameter, e.g. in - * - * trait Foo - * - * def foo(implicit @annotation.implicitNotFound("Foo is missing") foo: Foo): Any = ??? - */ - def userDefinedImplicitNotFoundParamMessage: Option[String] = paramSymWithMethodCallTree.flatMap { (sym, applTree) => - userDefinedMsg(sym, defn.ImplicitNotFoundAnnot).map { rawMsg => - val fn = tpd.funPart(applTree) - val targs = tpd.typeArgss(applTree).flatten - val methodOwner = fn.symbol.owner - val methodOwnerType = tpd.qualifier(fn).tpe - val methodTypeParams = fn.symbol.paramSymss.flatten.filter(_.isType) - val methodTypeArgs = targs.map(_.tpe) - val substituteType = (_: Type).asSeenFrom(methodOwnerType, methodOwner).subst(methodTypeParams, methodTypeArgs) - formatAnnotationMessage(rawMsg, sym.owner, substituteType) - } - } - /** Extracting the message from a type, e.g. in * * @annotation.implicitNotFound("Foo is missing") @@ -2683,37 +2804,6 @@ class MissingImplicitArgument( * * def foo(implicit foo: Foo): Any = ??? */ - def userDefinedImplicitNotFoundTypeMessage: Option[String] = - def recur(tp: Type): Option[String] = tp match - case tp: TypeRef => - val sym = tp.symbol - userDefinedImplicitNotFoundTypeMessageFor(sym).orElse(recur(tp.info)) - case tp: ClassInfo => - tp.baseClasses.iterator - .map(userDefinedImplicitNotFoundTypeMessageFor) - .find(_.isDefined).flatten - case tp: TypeProxy => - recur(tp.superType) - case tp: AndType => - recur(tp.tp1).orElse(recur(tp.tp2)) - case _ => - None - recur(pt) - - def userDefinedImplicitNotFoundTypeMessageFor(sym: Symbol): Option[String] = - for - rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) - if Feature.migrateTo3 || sym != defn.Function1 - // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore - yield - val substituteType = (_: Type).asSeenFrom(pt, sym) - formatAnnotationMessage(rawMsg, sym, substituteType) - - object AmbiguousImplicitMsg { - def unapply(search: SearchSuccess): Option[String] = - userDefinedMsg(search.ref.symbol, defn.ImplicitAmbiguousAnnot) - } - arg.tpe match case ambi: AmbiguousImplicits => (ambi.alt1, ambi.alt2) match @@ -2727,8 +2817,7 @@ class MissingImplicitArgument( i"""No implicit search was attempted${location("for")} |since the expected type $target is not specific enough""" case _ => - val shortMessage = userDefinedImplicitNotFoundParamMessage - .orElse(userDefinedImplicitNotFoundTypeMessage) + val shortMessage = userDefinedImplicitNotFoundMessage(explain = false) .getOrElse(defaultImplicitNotFoundMessage) formatMsg(shortMessage)() end msg @@ -2743,11 +2832,22 @@ class MissingImplicitArgument( // show all available additional info def hiddenImplicitNote(s: SearchSuccess) = i"\n\nNote: ${s.ref.symbol.showLocated} was not considered because it was not imported with `import given`." + def showImplicitAndConversions(imp: TermRef, convs: Iterable[TermRef]) = + i"\n- ${imp.symbol.showDcl}${convs.map(c => "\n - " + c.symbol.showDcl).mkString}" + def noChainConversionsNote(ignoredConvertibleImplicits: Iterable[TermRef]): Option[String] = + Option.when(ignoredConvertibleImplicits.nonEmpty)( + i"\n\nNote: implicit conversions are not automatically applied to arguments of using clauses. " + + i"You will have to pass the argument explicitly.\n" + + i"The following implicits in scope can be implicitly converted to ${pt.show}:" + + ignoredConvertibleImplicits.map { imp => s"\n- ${imp.symbol.showDcl}"}.mkString + ) super.msgPostscript ++ ignoredInstanceNormalImport.map(hiddenImplicitNote) + .orElse(noChainConversionsNote(ignoredConvertibleImplicits)) .getOrElse(ctx.typer.importSuggestionAddendum(pt)) - def explain(using Context) = "" + def explain(using Context) = userDefinedImplicitNotFoundMessage(explain = true) + .getOrElse("") end MissingImplicitArgument class CannotBeAccessed(tpe: NamedType, superAccess: Boolean)(using Context) @@ -2795,3 +2895,14 @@ class ValueDiscarding(tp: Type)(using Context) def kind = MessageKind.PotentialIssue def msg(using Context) = i"discarded non-Unit value of type $tp" def explain(using Context) = "" + +class UnusedNonUnitValue(tp: Type)(using Context) + extends Message(UnusedNonUnitValueID): + def kind = MessageKind.PotentialIssue + def msg(using Context) = i"unused value of type $tp" + def explain(using Context) = "" + +class MatchTypeScrutineeCannotBeHigherKinded(tp: Type)(using Context) + extends TypeMsg(MatchTypeScrutineeCannotBeHigherKindedID) : + def msg(using Context) = i"the scrutinee of a match type cannot be higher-kinded" + def explain(using Context) = "" diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 3fb7a66dc89e..fe5c8d061c78 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -306,6 +306,13 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT } } + private def addInheritanceDependencies(tree: Closure)(using Context): Unit = + // If the tpt is empty, this is a non-SAM lambda, so no need to register + // an inheritance relationship. + if !tree.tpt.isEmpty then + val from = resolveDependencySource + _dependencies += ClassDependency(from, tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) + private def addInheritanceDependencies(tree: Template)(using Context): Unit = if (tree.parents.nonEmpty) { val depContext = depContextOf(tree.symbol.owner) @@ -369,6 +376,8 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT case ref: RefTree => addMemberRefDependency(ref.symbol) addTypeDependency(ref.tpe) + case t: Closure => + addInheritanceDependencies(t) case t: Template => addInheritanceDependencies(t) case _ => diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 071efb1fb91c..91614aaccad2 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -24,6 +24,7 @@ import scala.annotation.{ threadUnsafe => tu, tailrec } import scala.PartialFunction.condOpt import dotty.tools.dotc.{semanticdb => s} +import dotty.tools.io.{AbstractFile, JarArchive} /** Extract symbol references and uses to semanticdb files. * See https://scalameta.org/docs/semanticdb/specification.html#symbol-1 @@ -38,7 +39,9 @@ class ExtractSemanticDB extends Phase: override val description: String = ExtractSemanticDB.description override def isRunnable(using Context) = - super.isRunnable && ctx.settings.Xsemanticdb.value + import ExtractSemanticDB.{semanticdbTarget, outputDirectory} + def writesToOutputJar = semanticdbTarget.isEmpty && outputDirectory.isInstanceOf[JarArchive] + super.isRunnable && ctx.settings.Xsemanticdb.value && !writesToOutputJar // Check not needed since it does not transform trees override def isCheckable: Boolean = false @@ -187,7 +190,7 @@ class ExtractSemanticDB extends Phase: registerUseGuarded(None, privateWithin, spanOfSymbol(privateWithin, tree.span, tree.source), tree.source) else if !excludeSymbol(tree.symbol) then registerSymbol(tree.symbol, symbolKinds(tree)) - case tree: Template if tree.symbol.owner.is(Invisible) => + case tree: Template if tree.symbol != NoSymbol && tree.symbol.owner.is(Invisible) => // do nothing // exclude the symbols and synthetics generated by @main annotation // (main class generated by @main has `Invisible` flag, see `MainProxies.scala`). @@ -198,7 +201,7 @@ class ExtractSemanticDB extends Phase: val selfSpan = tree.self.span if selfSpan.exists && selfSpan.hasLength then traverse(tree.self) - if tree.symbol.owner.isEnumClass then + if tree.symbol != NoSymbol && tree.symbol.owner.isEnumClass then tree.body.foreachUntilImport(traverse).foreach(traverse) // the first import statement else tree.body.foreach(traverse) @@ -475,6 +478,13 @@ object ExtractSemanticDB: val name: String = "extractSemanticDB" val description: String = "extract info into .semanticdb files" + private def semanticdbTarget(using Context): Option[Path] = + Option(ctx.settings.semanticdbTarget.value) + .filterNot(_.isEmpty) + .map(Paths.get(_)) + + private def outputDirectory(using Context): AbstractFile = ctx.settings.outputDir.value + def write( source: SourceFile, occurrences: List[SymbolOccurrence], @@ -482,14 +492,8 @@ object ExtractSemanticDB: synthetics: List[Synthetic], )(using Context): Unit = def absolutePath(path: Path): Path = path.toAbsolutePath.normalize - val semanticdbTarget = - val semanticdbTargetSetting = ctx.settings.semanticdbTarget.value - absolutePath( - if semanticdbTargetSetting.isEmpty then ctx.settings.outputDir.value.jpath - else Paths.get(semanticdbTargetSetting) - ) val relPath = SourceFile.relativePath(source, ctx.settings.sourceroot.value) - val outpath = semanticdbTarget + val outpath = absolutePath(semanticdbTarget.getOrElse(outputDirectory.jpath)) .resolve("META-INF") .resolve("semanticdb") .resolve(relPath) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala index 6814d923a062..b53ee787f501 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala @@ -196,6 +196,10 @@ class SymbolInformationPrinter (symtab: PrinterSymtab): s"${pprint(caseType.key)} => ${pprint(caseType.body)}" }.mkString(", ") s"${pprint(scrutinee)} match { ${casesStr} }" + case LambdaType(tparams, res) => + val params = tparams.infos.map(_.displayName).mkString("[", ", ", "]") + val resType = normal(res) + s"$params =>> $resType" case x => "" diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala index 0ccaab48889a..b0d032c7d83b 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala @@ -483,9 +483,23 @@ class TypeOps: case NoPrefix => s.Type.Empty - // Not yet supported - case _: HKTypeLambda => - s.Type.Empty + case lambda: HKTypeLambda => + val paramSyms: List[SemanticSymbol] = lambda.paramNames.zip(lambda.paramInfos).map { (paramName, bounds) => + // def x[T[_]] = ??? + if paramName.isWildcard then + WildcardTypeSymbol(sym, bounds).tap(registerFakeSymbol) + else + paramRefSymtab.lookup(lambda, paramName).getOrElse { + TypeParamRefSymbol(sym, paramName, bounds).tap(registerFakeSymbol) + } + } + val parameters = + paramSyms.sscopeOpt(using LinkMode.HardlinkChildren) + val resType = loop(lambda.resType) + s.LambdaType( + parameters, + resType + ) case tvar: TypeVar => loop(tvar.stripped) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala index da24b4847e19..be9cc6034f2c 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala @@ -39,6 +39,7 @@ object Type { case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ByNameType => __v.value case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType => __v.value case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.MatchType => __v.value + case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType => __v.value case dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty => Empty } override def toBase(__custom: dotty.tools.dotc.semanticdb.Type): dotty.tools.dotc.semanticdb.TypeMessage = dotty.tools.dotc.semanticdb.TypeMessage(__custom match { @@ -57,6 +58,7 @@ object Type { case __v: dotty.tools.dotc.semanticdb.ByNameType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ByNameType(__v) case __v: dotty.tools.dotc.semanticdb.RepeatedType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType(__v) case __v: dotty.tools.dotc.semanticdb.MatchType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.MatchType(__v) + case __v: dotty.tools.dotc.semanticdb.LambdaType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__v) case Empty => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty }) } @@ -129,6 +131,10 @@ final case class TypeMessage( val __value = sealedValue.matchType.get __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; + if (sealedValue.lambdaType.isDefined) { + val __value = sealedValue.lambdaType.get + __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize + }; __size } override def serializedSize: _root_.scala.Int = { @@ -231,6 +237,12 @@ final case class TypeMessage( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; + sealedValue.lambdaType.foreach { __v => + val __m = __v + _output__.writeTag(26, 2) + _output__.writeUInt32NoTag(__m.serializedSize) + __m.writeTo(_output__) + }; } def getTypeRef: dotty.tools.dotc.semanticdb.TypeRef = sealedValue.typeRef.getOrElse(dotty.tools.dotc.semanticdb.TypeRef.defaultInstance) def withTypeRef(__v: dotty.tools.dotc.semanticdb.TypeRef): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef(__v)) @@ -262,6 +274,8 @@ final case class TypeMessage( def withRepeatedType(__v: dotty.tools.dotc.semanticdb.RepeatedType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType(__v)) def getMatchType: dotty.tools.dotc.semanticdb.MatchType = sealedValue.matchType.getOrElse(dotty.tools.dotc.semanticdb.MatchType.defaultInstance) def withMatchType(__v: dotty.tools.dotc.semanticdb.MatchType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.MatchType(__v)) + def getLambdaType: dotty.tools.dotc.semanticdb.LambdaType = sealedValue.lambdaType.getOrElse(dotty.tools.dotc.semanticdb.LambdaType.defaultInstance) + def withLambdaType(__v: dotty.tools.dotc.semanticdb.LambdaType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__v)) def clearSealedValue: TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue): TypeMessage = copy(sealedValue = __v) @@ -311,6 +325,8 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType(__sealedValue.repeatedType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.RepeatedType](_input__))(LiteParser.readMessage(_input__, _))) case 202 => __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.MatchType(__sealedValue.matchType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.MatchType](_input__))(LiteParser.readMessage(_input__, _))) + case 210 => + __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__sealedValue.lambdaType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.LambdaType](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) } } @@ -345,6 +361,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc def isByNameType: _root_.scala.Boolean = false def isRepeatedType: _root_.scala.Boolean = false def isMatchType: _root_.scala.Boolean = false + def isLambdaType: _root_.scala.Boolean = false def typeRef: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeRef] = _root_.scala.None def singleType: _root_.scala.Option[dotty.tools.dotc.semanticdb.SingleType] = _root_.scala.None def thisType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ThisType] = _root_.scala.None @@ -360,6 +377,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc def byNameType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ByNameType] = _root_.scala.None def repeatedType: _root_.scala.Option[dotty.tools.dotc.semanticdb.RepeatedType] = _root_.scala.None def matchType: _root_.scala.Option[dotty.tools.dotc.semanticdb.MatchType] = _root_.scala.None + def lambdaType: _root_.scala.Option[dotty.tools.dotc.semanticdb.LambdaType] = _root_.scala.None } object SealedValue { @SerialVersionUID(0L) @@ -476,6 +494,13 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc override def matchType: _root_.scala.Option[dotty.tools.dotc.semanticdb.MatchType] = Some(value) override def number: _root_.scala.Int = 25 } + @SerialVersionUID(0L) + final case class LambdaType(value: dotty.tools.dotc.semanticdb.LambdaType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + type ValueType = dotty.tools.dotc.semanticdb.LambdaType + override def isLambdaType: _root_.scala.Boolean = true + override def lambdaType: _root_.scala.Option[dotty.tools.dotc.semanticdb.LambdaType] = Some(value) + override def number: _root_.scala.Int = 26 + } } final val TYPE_REF_FIELD_NUMBER = 2 final val SINGLE_TYPE_FIELD_NUMBER = 20 @@ -492,6 +517,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc final val BY_NAME_TYPE_FIELD_NUMBER = 13 final val REPEATED_TYPE_FIELD_NUMBER = 14 final val MATCH_TYPE_FIELD_NUMBER = 25 + final val LAMBDA_TYPE_FIELD_NUMBER = 26 def of( sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue ): _root_.dotty.tools.dotc.semanticdb.TypeMessage = _root_.dotty.tools.dotc.semanticdb.TypeMessage( @@ -2034,3 +2060,107 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType]) } + +@SerialVersionUID(0L) +final case class LambdaType( + parameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None, + returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + @transient @sharable + private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 + private[this] def __computeSerializedSize(): _root_.scala.Int = { + var __size = 0 + if (parameters.isDefined) { + val __value = parameters.get + __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize + }; + + { + val __value = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toBase(returnType) + if (__value.serializedSize != 0) { + __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize + } + }; + __size + } + override def serializedSize: _root_.scala.Int = { + var __size = __serializedSizeMemoized + if (__size == 0) { + __size = __computeSerializedSize() + 1 + __serializedSizeMemoized = __size + } + __size - 1 + + } + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + parameters.foreach { __v => + val __m = __v + _output__.writeTag(1, 2) + _output__.writeUInt32NoTag(__m.serializedSize) + __m.writeTo(_output__) + }; + { + val __v = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toBase(returnType) + if (__v.serializedSize != 0) { + _output__.writeTag(2, 2) + _output__.writeUInt32NoTag(__v.serializedSize) + __v.writeTo(_output__) + } + }; + } + def getParameters: dotty.tools.dotc.semanticdb.Scope = parameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) + def clearParameters: LambdaType = copy(parameters = _root_.scala.None) + def withParameters(__v: dotty.tools.dotc.semanticdb.Scope): LambdaType = copy(parameters = Option(__v)) + def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): LambdaType = copy(returnType = __v) + + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LambdaType]) +} + +object LambdaType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType] { + implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType] = this + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LambdaType = { + var __parameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None + var __returnType: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None + var _done__ = false + while (!_done__) { + val _tag__ = _input__.readTag() + _tag__ match { + case 0 => _done__ = true + case 10 => + __parameters = Option(__parameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) + case 18 => + __returnType = _root_.scala.Some(__returnType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) + case tag => _input__.skipField(tag) + } + } + dotty.tools.dotc.semanticdb.LambdaType( + parameters = __parameters, + returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) + ) + } + + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.LambdaType( + parameters = _root_.scala.None, + returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) + ) + final val PARAMETERS_FIELD_NUMBER = 1 + final val RETURN_TYPE_FIELD_NUMBER = 2 + @transient @sharable + private[semanticdb] val _typemapper_returnType: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]] + def of( + parameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope], + returnType: dotty.tools.dotc.semanticdb.Type + ): _root_.dotty.tools.dotc.semanticdb.LambdaType = _root_.dotty.tools.dotc.semanticdb.LambdaType( + parameters, + returnType + ) + // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType]) +} diff --git a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala new file mode 100644 index 000000000000..8360d8e08211 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala @@ -0,0 +1,240 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.ast.{tpd, untpd} +import dotty.tools.dotc.core.Annotations._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.NameKinds._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.QuoteTypeTags.* +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.util.SrcPos + +/** Checks that staging level consistency holds and heals staged types. + * + * Local term references are level consistent if and only if they are used at the same level as their definition. + * + * Local type references can be used at the level of their definition or lower. If used used at a higher level, + * it will be healed if possible, otherwise it is inconsistent. + * + * Healing a type consists in replacing locally defined types defined at staging level 0 and used in higher levels. + * For each type local `T` that is defined at level 0 and used in a quote, we summon a tag `t: Type[T]`. This `t` + * tag must be defined at level 0. The tags will be listed in the `tags` of the level 0 quote (`'{ ... }`) and + * each reference to `T` will be replaced by `t.Underlying` in the body of the quote. + * + * We delay the healing of types in quotes at level 1 or higher until those quotes reach level 0. At this point + * more types will be statically known and fewer types will need to be healed. This also keeps the nested quotes + * in their original form, we do not want macro users to see any artifacts of this phase in quoted expressions + * they might inspect. + * + * Type heal example: + * + * '{ + * val x: List[T] = List[T]() + * '{ .. T .. } + * () + * } + * + * is transformed to + * + * '{ // where `t` is a given term of type `Type[T]` + * val x: List[t.Underlying] = List[t.Underlying](); + * '{ .. t.Underlying .. } + * () + * } + * + */ +class CrossStageSafety extends TreeMapWithStages { + import tpd._ + + private val InAnnotation = Property.Key[Unit]() + + override def transform(tree: Tree)(using Context): Tree = + if (tree.source != ctx.source && tree.source.exists) + transform(tree)(using ctx.withSource(tree.source)) + else tree match + case CancelledQuote(tree) => + transform(tree) // Optimization: `'{ $x }` --> `x` + case tree: Quote => + if (ctx.property(InAnnotation).isDefined) + report.error("Cannot have a quote in an annotation", tree.srcPos) + + val tree1 = + val stripAnnotationsDeep: TypeMap = new TypeMap: + def apply(tp: Type): Type = mapOver(tp.stripAnnots) + val bodyType1 = healType(tree.srcPos)(stripAnnotationsDeep(tree.bodyType)) + tree.withBodyType(bodyType1) + + if level == 0 then + val (tags, body1) = inContextWithQuoteTypeTags { transform(tree1.body)(using quoteContext) } + cpy.Quote(tree1)(body1, tags) + else + super.transform(tree1) + + case CancelledSplice(tree) => + transform(tree) // Optimization: `${ 'x }` --> `x` + case tree: Splice => + val body1 = transform(tree.expr)(using spliceContext) + val tpe1 = + if level == 0 then tree.tpe + else healType(tree.srcPos)(tree.tpe.widenTermRefExpr) + untpd.cpy.Splice(tree)(body1).withType(tpe1) + + case tree @ QuotedTypeOf(body) => + if (ctx.property(InAnnotation).isDefined) + report.error("Cannot have a quote in an annotation", tree.srcPos) + + if level == 0 then + val (tags, body1) = inContextWithQuoteTypeTags { transform(body)(using quoteContext) } + val quotes = transform(tree.args.head) + tags match + case tag :: Nil if body1.isType && body1.tpe =:= tag.tpe.select(tpnme.Underlying) => + tag // Optimization: `quoted.Type.of[x.Underlying](quotes)` --> `x` + case _ => + // `quoted.Type.of[]()` --> `'[].apply()` + tpd.Quote(body1, tags).select(nme.apply).appliedTo(quotes).withSpan(tree.span) + else + super.transform(tree) + case _: DefDef if tree.symbol.isInlineMethod => + tree + + case _ if !inQuoteOrSpliceScope => + checkAnnotations(tree) // Check quotes in annotations + super.transform(tree) + + case _: TypeTree => + val tp1 = transformTypeAnnotationSplices(tree.tpe) + val healedType = healType(tree.srcPos)(tp1) + if healedType == tree.tpe then tree + else TypeTree(healedType).withSpan(tree.span) + case _: RefTree | _: SingletonTypeTree if tree.isType => + val healedType = healType(tree.srcPos)(tree.tpe) + if healedType == tree.tpe then tree + else TypeTree(healedType).withSpan(tree.span) + case tree: Ident if isWildcardArg(tree) => + tree.withType(healType(tree.srcPos)(tree.tpe)) + case tree: Ident => // this is a term Ident + checkLevelConsistency(tree) + tree + case tree: This => + checkLevelConsistency(tree) + tree + case _: AppliedTypeTree => + super.transform(tree) match + case tree1: AppliedTypeTree if tree1 ne tree => + // propagate healed types + tree1.withType(tree1.tpt.tpe.appliedTo(tree1.args.map(_.tpe))) + case tree1 => tree1 + case tree: ValOrDefDef => + checkAnnotations(tree) + healInfo(tree, tree.tpt.srcPos) + super.transform(tree) + case tree: Bind => + checkAnnotations(tree) + healInfo(tree, tree.srcPos) + super.transform(tree) + case tree: UnApply => + super.transform(tree).withType(healType(tree.srcPos)(tree.tpe)) + case tree: TypeDef if tree.symbol.is(Case) && level > 0 => + report.error(reporting.CaseClassInInlinedCode(tree), tree) + super.transform(tree) + case _ => + super.transform(tree) + end transform + + def transformTypeAnnotationSplices(tp: Type)(using Context) = new TypeMap { + def apply(tp: Type): Type = tp match + case tp: AnnotatedType => + val newAnnotTree = transform(tp.annot.tree) + derivedAnnotatedType(tp, apply(tp.parent), tp.annot.derivedAnnotation(newAnnotTree)) + case _ => + mapOver(tp) + }.apply(tp) + + /** Check that annotations do not contain quotes and and that splices are valid */ + private def checkAnnotations(tree: Tree)(using Context): Unit = + tree match + case tree: DefTree => + lazy val annotCtx = ctx.fresh.setProperty(InAnnotation, true).withOwner(tree.symbol) + for (annot <- tree.symbol.annotations) annot match + case annot: BodyAnnotation => annot // already checked in PrepareInlineable before the creation of the BodyAnnotation + case annot => transform(annot.tree)(using annotCtx) + case _ => + + /** Heal types in the info of the given tree */ + private def healInfo(tree: Tree, pos: SrcPos)(using Context): Unit = + tree.symbol.info = healType(pos)(tree.symbol.info) + + /** If the type refers to a locally defined symbol (either directly, or in a pickled type), + * check that its staging level matches the current level. + * - Static types and term are allowed at any level. + * - If a type reference is used a higher level, then it is inconsistent. + * Will attempt to heal before failing. + * - If a term reference is used a higher level, then it is inconsistent. + * It cannot be healed because the term will not exist in any future stage. + * + * If `T` is a reference to a type at the wrong level, try to heal it by replacing it with + * a type tag of type `quoted.Type[T]`. + * The tag is generated by an instance of `QuoteTypeTags` directly if the splice is explicit + * or indirectly by `tryHeal`. + */ + protected def healType(pos: SrcPos)(tpe: Type)(using Context) = + new HealType(pos).apply(tpe) + + /** Check level consistency of terms references */ + private def checkLevelConsistency(tree: Ident | This)(using Context): Unit = + new TypeTraverser { + def traverse(tp: Type): Unit = + tp match + case tp @ TermRef(NoPrefix, _) if !tp.symbol.isStatic && level != levelOf(tp.symbol) => + levelError(tp.symbol, tp, tree.srcPos) + case tp: ThisType if level != -1 && level != levelOf(tp.cls) => + levelError(tp.cls, tp, tree.srcPos) + case tp: AnnotatedType => + traverse(tp.parent) + case _ if tp.typeSymbol.is(Package) => + // OK + case _ => + traverseChildren(tp) + }.traverse(tree.tpe) + + private def levelError(sym: Symbol, tp: Type, pos: SrcPos)(using Context): tp.type = { + def symStr = + if (!tp.isInstanceOf[ThisType]) sym.show + else if (sym.is(ModuleClass)) sym.sourceModule.show + else i"${sym.name}.this" + val hint = + if sym.is(Inline) && levelOf(sym) < level then + "\n\n" + + "Hint: Staged references to inline definition in quotes are only inlined after the quote is spliced into level 0 code by a macro. " + + "Try moving this inline definition in a statically accessible location such as an object (this definition can be private)." + else "" + report.error( + em"""access to $symStr from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level.$hint""", pos) + tp + } + + private object CancelledQuote: + def unapply(tree: Quote): Option[Tree] = + def rec(tree: Tree): Option[Tree] = tree match + case Block(Nil, expr) => rec(expr) + case Splice(inner) => Some(inner) + case _ => None + rec(tree.body) + + private object CancelledSplice: + def unapply(tree: Splice): Option[Tree] = + def rec(tree: Tree): Option[Tree] = tree match + case Block(Nil, expr) => rec(expr) + case Quote(inner, _) => Some(inner) + case _ => None + rec(tree.expr) +} diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala new file mode 100644 index 000000000000..7d3ca0ad2f63 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -0,0 +1,111 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.QuoteTypeTags.* +import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.typer.Implicits.SearchFailureType +import dotty.tools.dotc.util.SrcPos + +class HealType(pos: SrcPos)(using Context) extends TypeMap { + + /** If the type refers to a locally defined symbol (either directly, or in a pickled type), + * check that its staging level matches the current level. + * - Static types and term are allowed at any level. + * - If a type reference is used a higher level, then it is inconsistent. + * Will attempt to heal before failing. + * - If a term reference is used a higher level, then it is inconsistent. + * It cannot be healed because the term will not exist in any future stage. + * + * If `T` is a reference to a type at the wrong level, try to heal it by replacing it with + * a type tag of type `quoted.Type[T]`. + * The tag is recorded by an instance of `QuoteTypeTags` directly if the splice is explicit + * or indirectly by `tryHeal`. + */ + def apply(tp: Type): Type = + tp match + case NonSpliceAlias(aliased) => this.apply(aliased) + case tp: TypeRef => healTypeRef(tp) + case tp: TermRef => + val inconsistentRoot = levelInconsistentRootOfPath(tp) + if inconsistentRoot.exists then levelError(inconsistentRoot, tp, pos) + else tp + case tp: AnnotatedType => + derivedAnnotatedType(tp, apply(tp.parent), tp.annot) + case _ => + mapOver(tp) + + private def healTypeRef(tp: TypeRef): Type = + tp.prefix match + case prefix: TermRef if tp.symbol.isTypeSplice => + checkNotWildcardSplice(tp) + if level == 0 then tp else getTagRef(prefix) + case _: TermRef | _: ThisType | NoPrefix => + if levelInconsistentRootOfPath(tp).exists then + tryHeal(tp) + else + tp + case _ => + mapOver(tp) + + private object NonSpliceAlias: + def unapply(tp: TypeRef)(using Context): Option[Type] = tp.underlying match + case TypeAlias(alias) if !tp.symbol.isTypeSplice => Some(alias) + case _ => None + + private def checkNotWildcardSplice(splice: TypeRef): Unit = + splice.prefix.termSymbol.info.argInfos match + case (tb: TypeBounds) :: _ => report.error(em"Cannot stage $splice because it is an alias to a wildcard type", pos) + case _ => + + /** Return the root of this path if it is a variable defined in a previous level. + * If the path is consistent, return NoSymbol. + */ + private def levelInconsistentRootOfPath(tp: Type)(using Context): Symbol = + tp match + case tp @ NamedType(NoPrefix, _) if level > levelOf(tp.symbol) => tp.symbol + case tp: NamedType if !tp.symbol.isStatic => levelInconsistentRootOfPath(tp.prefix) + case tp: ThisType if level > levelOf(tp.cls) => tp.cls + case _ => NoSymbol + + /** Try to heal reference to type `T` used in a higher level than its definition. + * Returns a reference to a type tag generated by `QuoteTypeTags` that contains a + * reference to a type alias containing the equivalent of `${summon[quoted.Type[T]]}.Underlying`. + * Emits an error if `T` cannot be healed and returns `T`. + */ + protected def tryHeal(tp: TypeRef): Type = { + val reqType = defn.QuotedTypeClass.typeRef.appliedTo(tp) + val tag = ctx.typer.inferImplicitArg(reqType, pos.span) + tag.tpe match + case tp: TermRef => + ctx.typer.checkStable(tp, pos, "type witness") + if levelOf(tp.symbol) > 0 then tp.select(tpnme.Underlying) + else getTagRef(tp) + case _: SearchFailureType => + report.error( + ctx.typer.missingArgMsg(tag, reqType, "") + .prepend(i"Reference to $tp within quotes requires a given $reqType in scope.\n") + .append("\n"), + pos) + tp + case _ => + report.error(em"""Reference to $tp within quotes requires a given $reqType in scope. + | + |""", pos) + tp + } + + private def levelError(sym: Symbol, tp: Type, pos: SrcPos): tp.type = { + report.error( + em"""access to $sym from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level""", pos) + tp + } +} diff --git a/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala new file mode 100644 index 000000000000..0b5032ea5a6d --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala @@ -0,0 +1,24 @@ +package dotty.tools.dotc.staging + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.util.Property + +import scala.collection.mutable.LinkedHashSet + +object QuoteTypeTags: + + private val TaggedTypes = new Property.Key[LinkedHashSet[TermRef]] + + def inContextWithQuoteTypeTags(body: Context ?=> tpd.Tree)(using Context): (List[tpd.Tree], tpd.Tree) = + val tags = LinkedHashSet.empty[TermRef] + val transformed = body(using ctx.fresh.setProperty(TaggedTypes, tags)) + (tags.toList.map(tpd.ref(_)), transformed) + + def getTagRef(spliced: TermRef)(using Context): Type = + ctx.property(TaggedTypes).get += spliced + spliced.select(tpnme.Underlying) diff --git a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala new file mode 100644 index 000000000000..05b3efab408c --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala @@ -0,0 +1,52 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.util.Property +import dotty.tools.dotc.util.SrcPos + +import scala.collection.mutable + +object StagingLevel { + + /** A key to be used in a context property that tracks the staging level */ + private val LevelKey = new Property.Key[Int] + + /** A key to be used in a context property that caches the `levelOf` mapping */ + private val LevelOfKey = new Property.Key[Map[Symbol, Int]] + + /** All enclosing calls that are currently inlined, from innermost to outermost. */ + def level(using Context): Int = + ctx.property(LevelKey).getOrElse(0) + + /** Context with an incremented staging level. */ + def quoteContext(using Context): FreshContext = + ctx.fresh.setProperty(LevelKey, level + 1) + + /** Context with a decremented staging level. */ + def spliceContext(using Context): FreshContext = + ctx.fresh.setProperty(LevelKey, level - 1) + + /** If we are inside a quote or a splice */ + def inQuoteOrSpliceScope(using Context): Boolean = + ctx.property(LevelKey).isDefined + + /** The quotation level of the definition of the locally defined symbol */ + def levelOf(sym: Symbol)(using Context): Int = + ctx.property(LevelOfKey) match + case Some(map) => map.getOrElse(sym, 0) + case None => 0 + + /** Context with the current staging level set for the symbols */ + def symbolsInCurrentLevel(syms: List[Symbol])(using Context): Context = + if level == 0 then ctx + else + val levelOfMap = ctx.property(LevelOfKey).getOrElse(Map.empty) + val syms1 = syms//.filter(sym => !levelOfMap.contains(sym)) + val newMap = syms1.foldLeft(levelOfMap)((acc, sym) => acc.updated(sym, level)) + ctx.fresh.setProperty(LevelOfKey, newMap) +} diff --git a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala new file mode 100644 index 000000000000..674dfff2f642 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala @@ -0,0 +1,49 @@ +package dotty.tools.dotc +package staging + +import dotty.tools.dotc.ast.{TreeMapWithImplicits, tpd} +import dotty.tools.dotc.config.Printers.staging +import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.staging.StagingLevel.* + +import scala.collection.mutable + +/** TreeMap that keeps track of staging levels using StagingLevel. */ +abstract class TreeMapWithStages extends TreeMapWithImplicits { + import tpd._ + + override def transform(tree: Tree)(using Context): Tree = + if (tree.source != ctx.source && tree.source.exists) + transform(tree)(using ctx.withSource(tree.source)) + else reporting.trace(i"TreeMapWithStages.transform $tree at $level", staging, show = true) { + tree match { + case Block(stats, _) => + val defSyms = stats.collect { case defTree: DefTree => defTree.symbol } + super.transform(tree)(using symbolsInCurrentLevel(defSyms)) + + case CaseDef(pat, guard, body) => + super.transform(tree)(using symbolsInCurrentLevel(tpd.patVars(pat))) + + case (_:Import | _:Export) => + tree + + case _: Template => + val decls = tree.symbol.owner.info.decls.toList + super.transform(tree)(using symbolsInCurrentLevel(decls)) + + case LambdaTypeTree(tparams, body) => + super.transform(tree)(using symbolsInCurrentLevel(tparams.map(_.symbol))) + + case tree: DefTree => + val paramSyms = tree match + case tree: DefDef => tree.paramss.flatten.map(_.symbol) + case _ => Nil + super.transform(tree)(using symbolsInCurrentLevel(tree.symbol :: paramSyms)) + + case _ => + super.transform(tree) + } + } +} diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 14362260d032..3175ffceae49 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -71,7 +71,7 @@ abstract class AccessProxies { def needsAccessor(sym: Symbol)(using Context): Boolean def ifNoHost(reference: RefTree)(using Context): Tree = { - assert(false, "no host found for $reference with ${reference.symbol.showLocated} from ${ctx.owner}") + assert(false, i"no host found for $reference with ${reference.symbol.showLocated} from ${ctx.owner}") reference } diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index 7ac3dc972ad1..b8cbb4367db4 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -13,13 +13,14 @@ import scala.collection.mutable.ListBuffer /** Rewrite an application * - * (((x1, ..., xn) => b): T)(y1, ..., yn) + * (([X1, ..., Xm] => (x1, ..., xn) => b): T)[T1, ..., Tm](y1, ..., yn) * * where * * - all yi are pure references without a prefix * - the closure can also be contextual or erased, but cannot be a SAM type - * _ the type ascription ...: T is optional + * - the type parameters Xi and type arguments Ti are optional + * - the type ascription ...: T is optional * * to * @@ -38,14 +39,10 @@ class BetaReduce extends MiniPhase: override def description: String = BetaReduce.description - override def transformApply(app: Apply)(using Context): Tree = app.fun match - case Select(fn, nme.apply) if defn.isFunctionType(fn.tpe) => - val app1 = BetaReduce(app, fn, app.args) - if app1 ne app then report.log(i"beta reduce $app -> $app1") - app1 - case _ => - app - + override def transformApply(app: Apply)(using Context): Tree = + val app1 = BetaReduce(app) + if app1 ne app then report.log(i"beta reduce $app -> $app1") + app1 object BetaReduce: import ast.tpd._ @@ -53,36 +50,77 @@ object BetaReduce: val name: String = "betaReduce" val description: String = "reduce closure applications" - /** Beta-reduces a call to `fn` with arguments `argSyms` or returns `tree` */ - def apply(original: Tree, fn: Tree, args: List[Tree])(using Context): Tree = - fn match - case Typed(expr, _) => - BetaReduce(original, expr, args) - case Block((anonFun: DefDef) :: Nil, closure: Closure) => - BetaReduce(anonFun, args) - case Block(stats, expr) => - val tree = BetaReduce(original, expr, args) - if tree eq original then original - else cpy.Block(fn)(stats, tree) - case Inlined(call, bindings, expr) => - val tree = BetaReduce(original, expr, args) - if tree eq original then original - else cpy.Inlined(fn)(call, bindings, tree) + /** Rewrite an application + * + * ((x1, ..., xn) => b)(e1, ..., en) + * + * to + * + * val/def x1 = e1; ...; val/def xn = en; b + * + * where `def` is used for call-by-name parameters. However, we shortcut any NoPrefix + * refs among the ei's directly without creating an intermediate binding. + * + * Similarly, rewrites type applications + * + * ([X1, ..., Xm] => (x1, ..., xn) => b).apply[T1, .., Tm](e1, ..., en) + * + * to + * + * type X1 = T1; ...; type Xm = Tm;val/def x1 = e1; ...; val/def xn = en; b + * + * This beta-reduction preserves the integrity of `Inlined` tree nodes. + */ + def apply(tree: Tree)(using Context): Tree = + val bindingsBuf = new ListBuffer[DefTree] + def recur(fn: Tree, argss: List[List[Tree]]): Option[Tree] = fn match + case Block((ddef : DefDef) :: Nil, closure: Closure) if ddef.symbol == closure.meth.symbol => + Some(reduceApplication(ddef, argss, bindingsBuf)) + case Block((TypeDef(_, template: Template)) :: Nil, Typed(Apply(Select(New(_), _), _), _)) if template.constr.rhs.isEmpty => + template.body match + case (ddef: DefDef) :: Nil => Some(reduceApplication(ddef, argss, bindingsBuf)) + case _ => None + case Block(stats, expr) if stats.forall(isPureBinding) => + recur(expr, argss).map(cpy.Block(fn)(stats, _)) + case Inlined(call, bindings, expr) if bindings.forall(isPureBinding) => + recur(expr, argss).map(cpy.Inlined(fn)(call, bindings, _)) + case Typed(expr, tpt) => + recur(expr, argss) + case TypeApply(Select(expr, nme.asInstanceOfPM), List(tpt)) => + recur(expr, argss) + case _ => None + tree match + case Apply(Select(fn, nme.apply), args) if defn.isFunctionType(fn.tpe) => + recur(fn, List(args)) match + case Some(reduced) => + seq(bindingsBuf.result(), reduced).withSpan(tree.span) + case None => + tree + case Apply(TypeApply(Select(fn, nme.apply), targs), args) if fn.tpe.typeSymbol eq dotc.core.Symbols.defn.PolyFunctionClass => + recur(fn, List(targs, args)) match + case Some(reduced) => + seq(bindingsBuf.result(), reduced).withSpan(tree.span) + case None => + tree case _ => - original - end apply - - /** Beta-reduces a call to `ddef` with arguments `args` */ - def apply(ddef: DefDef, args: List[Tree])(using Context) = - val bindings = new ListBuffer[ValDef]() - val expansion1 = reduceApplication(ddef, args, bindings) - val bindings1 = bindings.result() - seq(bindings1, expansion1) + tree /** Beta-reduces a call to `ddef` with arguments `args` and registers new bindings */ - def reduceApplication(ddef: DefDef, args: List[Tree], bindings: ListBuffer[ValDef])(using Context): Tree = - val vparams = ddef.termParamss.iterator.flatten.toList - assert(args.hasSameLengthAs(vparams)) + def reduceApplication(ddef: DefDef, argss: List[List[Tree]], bindings: ListBuffer[DefTree])(using Context): Tree = + val (targs, args) = argss.flatten.partition(_.isType) + val tparams = ddef.leadingTypeParams + val vparams = ddef.termParamss.flatten + + val targSyms = + for (targ, tparam) <- targs.zip(tparams) yield + targ.tpe.dealias match + case ref @ TypeRef(NoPrefix, _) => + ref.symbol + case _ => + val binding = TypeDef(newSymbol(ctx.owner, tparam.name, EmptyFlags, TypeAlias(targ.tpe), coord = targ.span)).withSpan(targ.span) + bindings += binding + binding.symbol + val argSyms = for (arg, param) <- args.zip(vparams) yield arg.tpe.dealias match @@ -90,7 +128,10 @@ object BetaReduce: ref.symbol case _ => val flags = Synthetic | (param.symbol.flags & Erased) - val tpe = if arg.tpe.dealias.isInstanceOf[ConstantType] then arg.tpe.dealias else arg.tpe.widen + val tpe = + if arg.tpe.isBottomType then param.tpe.widenTermRefExpr + else if arg.tpe.dealias.isInstanceOf[ConstantType] then arg.tpe.dealias + else arg.tpe.widen val binding = ValDef(newSymbol(ctx.owner, param.name, flags, tpe, coord = arg.span), arg).withSpan(arg.span) if !(tpe.isInstanceOf[ConstantType] && isPureExpr(arg)) then bindings += binding @@ -99,8 +140,8 @@ object BetaReduce: val expansion = TreeTypeMap( oldOwners = ddef.symbol :: Nil, newOwners = ctx.owner :: Nil, - substFrom = vparams.map(_.symbol), - substTo = argSyms + substFrom = (tparams ::: vparams).map(_.symbol), + substTo = targSyms ::: argSyms ).transform(ddef.rhs) val expansion1 = new TreeMap { diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index e302170991f9..569b16681cde 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -129,9 +129,12 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { assert(ctx.typer.isInstanceOf[Erasure.Typer]) ctx.typer.typed(untpd.cpy.Apply(ref)(ref, args), member.info.finalResultType) else - val defn.ContextFunctionType(argTypes, resType, isErased) = tp: @unchecked + val defn.ContextFunctionType(argTypes, resType, erasedParams) = tp: @unchecked val anonFun = newAnonFun(ctx.owner, - MethodType(if isErased then Nil else argTypes, resType), + MethodType( + argTypes.zip(erasedParams.padTo(argTypes.length, false)) + .flatMap((t, e) => if e then None else Some(t)), + resType), coord = ctx.owner.coord) anonFun.info = transformInfo(anonFun, anonFun.info) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 69ec9f0d7b2b..bd521c8679d0 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -27,6 +27,7 @@ import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.StdNames.nme import scala.math.Ordering + /** * A compiler phase that checks for unused imports or definitions * @@ -146,6 +147,13 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke if !tree.isInstanceOf[tpd.InferredTypeTree] then typeTraverser(unusedDataApply).traverse(tree.tpe) ctx + override def prepareForAssign(tree: tpd.Assign)(using Context): Context = + unusedDataApply{ ud => + val sym = tree.lhs.symbol + if sym.exists then + ud.registerSetVar(sym) + } + // ========== MiniPhase Transform ========== override def transformBlock(tree: tpd.Block)(using Context): tpd.Tree = @@ -172,6 +180,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke unusedDataApply(_.removeIgnoredUsage(tree.symbol)) tree + // ---------- MiniPhase HELPERS ----------- private def pushInBlockTemplatePackageDef(tree: tpd.Block | tpd.Template | tpd.PackageDef)(using Context): Context = @@ -215,11 +224,11 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke case sel: Select => prepareForSelect(sel) traverseChildren(tree)(using newCtx) - case _: (tpd.Block | tpd.Template | tpd.PackageDef) => + case tree: (tpd.Block | tpd.Template | tpd.PackageDef) => //! DIFFERS FROM MINIPHASE - unusedDataApply { ud => - ud.inNewScope(ScopeType.fromTree(tree))(traverseChildren(tree)(using newCtx)) - } + pushInBlockTemplatePackageDef(tree) + traverseChildren(tree)(using newCtx) + popOutBlockTemplatePackageDef() case t:tpd.ValDef => prepareForValDef(t) traverseChildren(tree)(using newCtx) @@ -235,6 +244,9 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke case t: tpd.Bind => prepareForBind(t) traverseChildren(tree)(using newCtx) + case t:tpd.Assign => + prepareForAssign(t) + traverseChildren(tree) case _: tpd.InferredTypeTree => case t@tpd.TypeTree() => //! DIFFERS FROM MINIPHASE @@ -278,6 +290,10 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke report.warning(s"unused private member", t) case UnusedSymbol(t, _, WarnTypes.PatVars) => report.warning(s"unused pattern variable", t) + case UnusedSymbol(t, _, WarnTypes.UnsetLocals) => + report.warning(s"unset local variable", t) + case UnusedSymbol(t, _, WarnTypes.UnsetPrivates) => + report.warning(s"unset private variable", t) } end CheckUnused @@ -297,6 +313,8 @@ object CheckUnused: case ImplicitParams case PrivateMembers case PatVars + case UnsetLocals + case UnsetPrivates /** * The key used to retrieve the "unused entity" analysis metadata, @@ -343,12 +361,8 @@ object CheckUnused: private val implicitParamInScope = MutSet[tpd.MemberDef]() private val patVarsInScope = MutSet[tpd.Bind]() - /* Unused collection collected at the end */ - private val unusedLocalDef = MutSet[tpd.MemberDef]() - private val unusedPrivateDef = MutSet[tpd.MemberDef]() - private val unusedExplicitParams = MutSet[tpd.MemberDef]() - private val unusedImplicitParams = MutSet[tpd.MemberDef]() - private val unusedPatVars = MutSet[tpd.Bind]() + /** All variables sets*/ + private val setVars = MutSet[Symbol]() /** All used symbols */ private val usedDef = MutSet[Symbol]() @@ -360,15 +374,6 @@ object CheckUnused: private val paramsToSkip = MutSet[Symbol]() - /** - * Push a new Scope of the given type, executes the given Unit and - * pop it back to the original type. - */ - def inNewScope(newScope: ScopeType)(execInNewScope: => Unit)(using Context): Unit = - val prev = currScopeType - pushScope(newScope) - execInNewScope - popScope() def finishAggregation(using Context)(): Unit = val unusedInThisStage = this.getUnused @@ -443,6 +448,9 @@ object CheckUnused: impInScope.push(MutSet()) usedInScope.push(MutSet()) + def registerSetVar(sym: Symbol): Unit = + setVars += sym + /** * leave the current scope and do : * @@ -501,15 +509,19 @@ object CheckUnused: unusedImport.map(d => UnusedSymbol(d.srcPos, d.name, WarnTypes.Imports)).toList else Nil - val sortedLocalDefs = + // Partition to extract unset local variables from usedLocalDefs + val (usedLocalDefs, unusedLocalDefs) = if ctx.settings.WunusedHas.locals then - localDefInScope - .filterNot(d => d.symbol.usedDefContains) - .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) - .filterNot(d => containsSyntheticSuffix(d.symbol)) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.LocalDefs)).toList + localDefInScope.partition(d => d.symbol.usedDefContains) else - Nil + (Nil, Nil) + val sortedLocalDefs = + unusedLocalDefs + .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .filterNot(d => containsSyntheticSuffix(d.symbol)) + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.LocalDefs)).toList + val unsetLocalDefs = usedLocalDefs.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetLocals)).toList + val sortedExplicitParams = if ctx.settings.WunusedHas.explicits then explicitParamInScope @@ -527,14 +539,14 @@ object CheckUnused: .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ImplicitParams)).toList else Nil - val sortedPrivateDefs = + // Partition to extract unset private variables from usedPrivates + val (usedPrivates, unusedPrivates) = if ctx.settings.WunusedHas.privates then - privateDefInScope - .filterNot(d => d.symbol.usedDefContains) - .filterNot(d => containsSyntheticSuffix(d.symbol)) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PrivateMembers)).toList + privateDefInScope.partition(d => d.symbol.usedDefContains) else - Nil + (Nil, Nil) + val sortedPrivateDefs = unusedPrivates.filterNot(d => containsSyntheticSuffix(d.symbol)).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PrivateMembers)).toList + val unsetPrivateDefs = usedPrivates.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetPrivates)).toList val sortedPatVars = if ctx.settings.WunusedHas.patvars then patVarsInScope @@ -544,7 +556,9 @@ object CheckUnused: .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PatVars)).toList else Nil - val warnings = List(sortedImp, sortedLocalDefs, sortedExplicitParams, sortedImplicitParams, sortedPrivateDefs, sortedPatVars).flatten.sortBy { s => + val warnings = + List(sortedImp, sortedLocalDefs, sortedExplicitParams, sortedImplicitParams, + sortedPrivateDefs, sortedPatVars, unsetLocalDefs, unsetPrivateDefs).flatten.sortBy { s => val pos = s.pos.sourcePos (pos.line, pos.column) } @@ -731,10 +745,13 @@ object CheckUnused: !isSyntheticMainParam(sym) && !sym.shouldNotReportParamOwner - private def shouldReportPrivateDef(using Context): Boolean = currScopeType.top == ScopeType.Template && !memDef.symbol.isConstructor && memDef.symbol.is(Private, butNot = SelfName | Synthetic | CaseAccessor) + private def isUnsetVarDef(using Context): Boolean = + val sym = memDef.symbol + sym.is(Mutable) && !setVars(sym) + extension (imp: tpd.Import) /** Enum generate an import for its cases (but outside them), which should be ignored */ def isGeneratedByEnum(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/transform/Constructors.scala b/compiler/src/dotty/tools/dotc/transform/Constructors.scala index 59b90ff7f084..4dd7205e4ee0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Constructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/Constructors.scala @@ -226,31 +226,39 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = constrStats += intoConstr(stat, sym) } else dropped += sym - case stat @ DefDef(name, _, tpt, _) - if stat.symbol.isGetter && stat.symbol.owner.is(Trait) && !stat.symbol.is(Lazy) && !stat.symbol.isConstExprFinalVal => + case stat @ DefDef(name, _, tpt, _) if stat.symbol.isGetter && !stat.symbol.is(Lazy) => val sym = stat.symbol assert(isRetained(sym), sym) - if !stat.rhs.isEmpty && !isWildcardArg(stat.rhs) then - /* !!! Work around #9390 - * This should really just be `sym.setter`. However, if we do that, we'll miss - * setters for mixed in `private var`s. Even though the scope clearly contains the - * setter symbol with the correct Name structure (since the `find` finds it), - * `.decl(setterName)` used by `.setter` through `.accessorNamed` will *not* find it. - * Could it be that the hash table of the `Scope` is corrupted? - * We still try `sym.setter` first as an optimization, since it will work for all - * public vars in traits and all (public or private) vars in classes. - */ - val symSetter = - if sym.setter.exists then - sym.setter - else - val setterName = sym.asTerm.name.setterName - sym.owner.info.decls.find(d => d.is(Accessor) && d.name == setterName) - val setter = - if (symSetter.exists) symSetter - else sym.accessorNamed(Mixin.traitSetterName(sym.asTerm)) - constrStats += Apply(ref(setter), intoConstr(stat.rhs, sym).withSpan(stat.span) :: Nil) - clsStats += cpy.DefDef(stat)(rhs = EmptyTree) + if sym.isConstExprFinalVal then + if stat.rhs.isInstanceOf[Literal] then + clsStats += stat + else + constrStats += intoConstr(stat.rhs, sym) + clsStats += cpy.DefDef(stat)(rhs = Literal(sym.constExprFinalValConstantType.value).withSpan(stat.span)) + else if !sym.owner.is(Trait) then + clsStats += stat + else + if !stat.rhs.isEmpty && !isWildcardArg(stat.rhs) then + /* !!! Work around #9390 + * This should really just be `sym.setter`. However, if we do that, we'll miss + * setters for mixed in `private var`s. Even though the scope clearly contains the + * setter symbol with the correct Name structure (since the `find` finds it), + * `.decl(setterName)` used by `.setter` through `.accessorNamed` will *not* find it. + * Could it be that the hash table of the `Scope` is corrupted? + * We still try `sym.setter` first as an optimization, since it will work for all + * public vars in traits and all (public or private) vars in classes. + */ + val symSetter = + if sym.setter.exists then + sym.setter + else + val setterName = sym.asTerm.name.setterName + sym.owner.info.decls.find(d => d.is(Accessor) && d.name == setterName) + val setter = + if (symSetter.exists) symSetter + else sym.accessorNamed(Mixin.traitSetterName(sym.asTerm)) + constrStats += Apply(ref(setter), intoConstr(stat.rhs, sym).withSpan(stat.span) :: Nil) + clsStats += cpy.DefDef(stat)(rhs = EmptyTree) case DefDef(nme.CONSTRUCTOR, ((outerParam @ ValDef(nme.OUTER, _, _)) :: _) :: Nil, _, _) => clsStats += mapOuter(outerParam.symbol).transform(stat) case _: DefTree => diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala index 2ab910f6d06e..b4eb71c541d3 100644 --- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala +++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala @@ -58,7 +58,7 @@ object ContextFunctionResults: */ def contextResultsAreErased(sym: Symbol)(using Context): Boolean = def allErased(tp: Type): Boolean = tp.dealias match - case defn.ContextFunctionType(_, resTpe, isErased) => isErased && allErased(resTpe) + case defn.ContextFunctionType(_, resTpe, erasedParams) => !erasedParams.contains(false) && allErased(resTpe) case _ => true contextResultCount(sym) > 0 && allErased(sym.info.finalResultType) @@ -72,10 +72,8 @@ object ContextFunctionResults: integrateContextResults(rt, crCount) case tp: MethodOrPoly => tp.derivedLambdaType(resType = integrateContextResults(tp.resType, crCount)) - case defn.ContextFunctionType(argTypes, resType, isErased) => - val methodType: MethodTypeCompanion = - if isErased then ErasedMethodType else MethodType - methodType(argTypes, integrateContextResults(resType, crCount - 1)) + case defn.ContextFunctionType(argTypes, resType, erasedParams) => + MethodType(argTypes, integrateContextResults(resType, crCount - 1)) /** The total number of parameters of method `sym`, not counting * erased parameters, but including context result parameters. @@ -85,14 +83,16 @@ object ContextFunctionResults: def contextParamCount(tp: Type, crCount: Int): Int = if crCount == 0 then 0 else - val defn.ContextFunctionType(params, resTpe, isErased) = tp: @unchecked + val defn.ContextFunctionType(params, resTpe, erasedParams) = tp: @unchecked val rest = contextParamCount(resTpe, crCount - 1) - if isErased then rest else params.length + rest + if erasedParams.contains(true) then erasedParams.count(_ == false) + rest else params.length + rest def normalParamCount(tp: Type): Int = tp.widenExpr.stripPoly match case mt @ MethodType(pnames) => val rest = normalParamCount(mt.resType) - if mt.isErasedMethod then rest else pnames.length + rest + if mt.hasErasedParams then + mt.erasedParams.count(_ == false) + rest + else pnames.length + rest case _ => contextParamCount(tp, contextResultCount(sym)) normalParamCount(sym.info) @@ -116,8 +116,14 @@ object ContextFunctionResults: atPhase(erasurePhase)(integrateSelect(tree, n)) else tree match case Select(qual, name) => - if name == nme.apply && defn.isContextFunctionClass(tree.symbol.maybeOwner) then - integrateSelect(qual, n + 1) + if name == nme.apply then + qual.tpe match + case defn.ContextFunctionType(_, _, _) => + integrateSelect(qual, n + 1) + case _ if defn.isContextFunctionClass(tree.symbol.maybeOwner) => // for TermRefs + integrateSelect(qual, n + 1) + case _ => + n > 0 && contextResultCount(tree.symbol) >= n else n > 0 && contextResultCount(tree.symbol) >= n case Ident(name) => @@ -133,4 +139,4 @@ object ContextFunctionResults: case _ => false -end ContextFunctionResults \ No newline at end of file +end ContextFunctionResults diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala index 479a455b4aea..151e841f0e48 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala @@ -15,6 +15,7 @@ import MegaPhase.* import Decorators.* import typer.RefChecks import reporting.trace +import dotty.tools.dotc.core.Names.Name /** This phase implements the following transformations: * @@ -79,11 +80,14 @@ class ElimByName extends MiniPhase, InfoTransformer: case ExprType(rt) if exprBecomesFunction(sym) => defn.ByNameFunction(rt) case tp: MethodType => - def exprToFun(tp: Type) = tp match - case ExprType(rt) => defn.ByNameFunction(rt) + def exprToFun(tp: Type, name: Name) = tp match + case ExprType(rt) => + if rt.hasAnnotation(defn.ErasedParamAnnot) then + report.error(em"By-name parameter cannot be erased: $name", sym.srcPos) + defn.ByNameFunction(rt) case tp => tp tp.derivedLambdaType( - paramInfos = tp.paramInfos.mapConserve(exprToFun), + paramInfos = tp.paramInfos.zipWithConserve(tp.paramNames)(exprToFun), resType = transformInfo(tp.resType, sym)) case tp: PolyType => tp.derivedLambdaType(resType = transformInfo(tp.resType, sym)) diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala index 78baec70bee6..359b882ef26b 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala @@ -87,7 +87,8 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => * signatures of a Java varargs method and a Scala varargs override are not the same. */ private def overridesJava(sym: Symbol)(using Context) = - sym.owner.info.baseClasses.drop(1).exists { bc => + sym.memberCanMatchInheritedSymbols + && sym.owner.info.baseClasses.drop(1).exists { bc => bc.is(JavaDefined) && { val other = bc.info.nonPrivateDecl(sym.name) other.hasAltWith { alt => diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 129964557995..981dd5f60aea 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -500,7 +500,7 @@ object Erasure { if isFunction && !ctx.settings.scalajs.value then val arity = implParamTypes.length val specializedFunctionalInterface = - if defn.isSpecializableFunctionSAM(implParamTypes, implResultType) then + if !implType.hasErasedParams && defn.isSpecializableFunctionSAM(implParamTypes, implResultType) then // Using these subclasses is critical to avoid boxing since their // SAM is a specialized method `apply$mc*$sp` whose default // implementation in FunctionN boxes. @@ -679,6 +679,8 @@ object Erasure { val qualTp = tree.qualifier.typeOpt.widen if qualTp.derivesFrom(defn.PolyFunctionClass) then erasePolyFunctionApply(qualTp.select(nme.apply).widen).classSymbol + else if defn.isErasedFunctionType(qualTp) then + eraseErasedFunctionApply(qualTp.select(nme.apply).widen.asInstanceOf[MethodType]).classSymbol else NoSymbol } @@ -774,7 +776,7 @@ object Erasure { select(qual1, sym) else val castTarget = // Avoid inaccessible cast targets, see i8661 - if isJvmAccessible(sym.owner) + if isJvmAccessible(sym.owner) && sym.owner.isType then sym.owner.typeRef else @@ -827,7 +829,10 @@ object Erasure { val Apply(fun, args) = tree val origFun = fun.asInstanceOf[tpd.Tree] val origFunType = origFun.tpe.widen(using preErasureCtx) - val ownArgs = if origFunType.isErasedMethod then Nil else args + val ownArgs = origFunType match + case mt: MethodType if mt.hasErasedParams => + args.zip(mt.erasedParams).collect { case (arg, false) => arg } + case _ => args val fun1 = typedExpr(fun, AnyFunctionProto) fun1.tpe.widen match case mt: MethodType => diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index 0552fe31f8a2..0bfc444e0997 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -145,15 +145,13 @@ class ExpandSAMs extends MiniPhase: def translateMatch(tree: Match, pfParam: Symbol, cases: List[CaseDef], defaultValue: Tree)(using Context) = { val selector = tree.selector - val selectorTpe = selector.tpe.widen - val defaultSym = newSymbol(pfParam.owner, nme.WILDCARD, SyntheticCase, selectorTpe) - val defaultCase = - CaseDef( - Bind(defaultSym, Underscore(selectorTpe)), - EmptyTree, - defaultValue) - val unchecked = selector.annotated(New(ref(defn.UncheckedAnnot.typeRef))) - cpy.Match(tree)(unchecked, cases :+ defaultCase) + val cases1 = if cases.exists(isDefaultCase) then cases + else + val selectorTpe = selector.tpe.widen + val defaultSym = newSymbol(pfParam.owner, nme.WILDCARD, SyntheticCase, selectorTpe) + val defaultCase = CaseDef(Bind(defaultSym, Underscore(selectorTpe)), EmptyTree, defaultValue) + cases :+ defaultCase + cpy.Match(tree)(selector, cases1) .subst(param.symbol :: Nil, pfParam :: Nil) // Needed because a partial function can be written as: // param => param match { case "foo" if foo(param) => param } diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index cddfe51275c8..deb1f665c022 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -13,6 +13,7 @@ import core.Decorators._ import core.StdNames.nme import core.Names._ import core.NameOps._ +import core.NameKinds.SuperArgName import SymUtils._ import dotty.tools.dotc.ast.tpd @@ -197,11 +198,17 @@ object ExplicitOuter { private def outerAccName(cls: ClassSymbol)(using Context): TermName = nme.OUTER.expandedName(cls) + private def outerOwner(sym: Symbol)(using Context): Symbol = + val owner = sym.effectiveOwner + if owner.name.is(SuperArgName) || owner.isLocalDummy + then owner.enclosingClass + else owner + /** Class needs an outer pointer, provided there is a reference to an outer this in it. */ def needsOuterIfReferenced(cls: ClassSymbol)(using Context): Boolean = - !(cls.isStatic || - cls.owner.enclosingClass.isStaticOwner || - cls.is(PureInterface) + !(cls.isStatic + || outerOwner(cls).isStaticOwner + || cls.is(PureInterface) ) /** Class unconditionally needs an outer pointer. This is the case if @@ -226,7 +233,9 @@ object ExplicitOuter { /** The outer parameter accessor of cass `cls` */ private def outerParamAccessor(cls: ClassSymbol)(using Context): TermSymbol = - cls.info.decl(nme.OUTER).symbol.asTerm + val outer = cls.info.decl(nme.OUTER).symbol + assert(outer.isTerm, i"missing outer accessor in $cls") + outer.asTerm /** The outer accessor of class `cls`. To find it is a bit tricky. The * class might have been moved with new owners between ExplicitOuter and Erasure, diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index a7e0795ce195..03639c8af689 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -18,6 +18,7 @@ import NameKinds.OuterSelectName import StdNames._ import TypeUtils.isErasedValueType import config.Feature +import inlines.Inlines.inInlineMethod object FirstTransform { val name: String = "firstTransform" diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index 050abf7f3cb7..a1baeac272b9 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -311,7 +311,9 @@ object GenericSignatures { case mtpe: MethodType => // erased method parameters do not make it to the bytecode. def effectiveParamInfoss(t: Type)(using Context): List[List[Type]] = t match { - case t: MethodType if t.isErasedMethod => effectiveParamInfoss(t.resType) + case t: MethodType if t.hasErasedParams => + t.paramInfos.zip(t.erasedParams).collect{ case (i, false) => i } + :: effectiveParamInfoss(t.resType) case t: MethodType => t.paramInfos :: effectiveParamInfoss(t.resType) case _ => Nil } diff --git a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala index 6edb60a77245..798f34757b35 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala @@ -8,6 +8,8 @@ import Symbols._, Contexts._, Types._, Decorators._ import NameOps._ import Names._ +import scala.collection.mutable.ListBuffer + /** Rewrite an application * * {new { def unapply(x0: X0)(x1: X1,..., xn: Xn) = b }}.unapply(y0)(y1, ..., yn) @@ -38,7 +40,7 @@ class InlinePatterns extends MiniPhase: if app.symbol.name.isUnapplyName && !app.tpe.isInstanceOf[MethodicType] then app match case App(Select(fn, name), argss) => - val app1 = betaReduce(app, fn, name, argss.flatten) + val app1 = betaReduce(app, fn, name, argss) if app1 ne app then report.log(i"beta reduce $app -> $app1") app1 case _ => @@ -51,11 +53,16 @@ class InlinePatterns extends MiniPhase: case Apply(App(fn, argss), args) => (fn, argss :+ args) case _ => (app, Nil) - private def betaReduce(tree: Apply, fn: Tree, name: Name, args: List[Tree])(using Context): Tree = + // TODO merge with BetaReduce.scala + private def betaReduce(tree: Apply, fn: Tree, name: Name, argss: List[List[Tree]])(using Context): Tree = fn match case Block(TypeDef(_, template: Template) :: Nil, Apply(Select(New(_),_), Nil)) if template.constr.rhs.isEmpty => template.body match - case List(ddef @ DefDef(`name`, _, _, _)) => BetaReduce(ddef, args) + case List(ddef @ DefDef(`name`, _, _, _)) => + val bindings = new ListBuffer[DefTree]() + val expansion1 = BetaReduce.reduceApplication(ddef, argss, bindings) + val bindings1 = bindings.result() + seq(bindings1, expansion1) case _ => tree case _ => tree diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index f0ed7026ee91..10f73fa94e08 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -9,10 +9,10 @@ import SymUtils._ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.Trees._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.core.StagingContext._ import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.ast.TreeMapWithImplicits import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer +import dotty.tools.dotc.staging.StagingLevel import scala.collection.mutable.ListBuffer @@ -45,11 +45,7 @@ class Inlining extends MacroTransform { new TreeTraverser { def traverse(tree: Tree)(using Context): Unit = tree match - case _: GenericApply if tree.symbol.isQuote => - traverseChildren(tree)(using StagingContext.quoteContext) - case _: GenericApply if tree.symbol.isExprSplice => - traverseChildren(tree)(using StagingContext.spliceContext) - case tree: RefTree if !Inlines.inInlineMethod && StagingContext.level == 0 => + case tree: RefTree if !Inlines.inInlineMethod && StagingLevel.level == 0 => assert(!tree.symbol.isInlineMethod, tree.show) case _ => traverseChildren(tree) @@ -76,7 +72,7 @@ class Inlining extends MacroTransform { else if tree.symbol.is(Param) then super.transform(tree) else if !tree.symbol.isPrimaryConstructor - && StagingContext.level == 0 + && StagingLevel.level == 0 && MacroAnnotations.hasMacroAnnotation(tree.symbol) then val trees = (new MacroAnnotations).expandAnnotations(tree) @@ -97,10 +93,6 @@ class Inlining extends MacroTransform { val tree1 = super.transform(tree) if tree1.tpe.isError then tree1 else Inlines.inlineCall(tree1) - case _: GenericApply if tree.symbol.isQuote => - super.transform(tree)(using StagingContext.quoteContext) - case _: GenericApply if tree.symbol.isExprSplice => - super.transform(tree)(using StagingContext.spliceContext) case _: PackageDef => super.transform(tree) match case tree1: PackageDef => @@ -112,7 +104,8 @@ class Inlining extends MacroTransform { case _ => tree1 case tree1 => tree1 case _ => - super.transform(tree) + if tree.isType then tree + else super.transform(tree) } } } diff --git a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala index c69b342b9a01..29572a4ae30d 100644 --- a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala +++ b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala @@ -11,6 +11,7 @@ import core.DenotTransformers.IdentityDenotTransformer import core.Symbols.{defn, Symbol} import core.Constants.Constant import core.NameOps.isContextFunction +import core.StdNames.nme import core.Types.* import coverage.* import typer.LiftCoverage @@ -325,7 +326,11 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: // Only transform the params (for the default values) and the rhs, not the name and tpt. val transformedParamss = transformParamss(tree.paramss) val transformedRhs = - if !sym.isOneOf(Accessor | Artifact | Synthetic) && !tree.rhs.isEmpty then + if tree.rhs.isEmpty then + tree.rhs + else if sym.isClassConstructor then + instrumentSecondaryCtor(tree) + else if !sym.isOneOf(Accessor | Artifact | Synthetic) then // If the body can be instrumented, do it (i.e. insert a "coverage call" at the beginning) // This is useful because methods can be stored and called later, or called by reflection, // and if the rhs is too simple to be instrumented (like `def f = this`), @@ -410,6 +415,24 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: val coverageCall = createInvokeCall(parent, pos) InstrumentedParts.singleExprTree(coverageCall, body) + /** Instruments the body of a secondary constructor DefDef. + * + * We must preserve the delegate constructor call as the first statement of + * the rhs Block, otherwise `HoistSuperArgs` will not be happy (see #17042). + */ + private def instrumentSecondaryCtor(ctorDef: DefDef)(using Context): Tree = + // compute position like in instrumentBody + val namePos = ctorDef.namePos + val pos = namePos.withSpan(namePos.span.withStart(ctorDef.span.start)) + val coverageCall = createInvokeCall(ctorDef, pos) + + ctorDef.rhs match + case b @ Block(delegateCtorCall :: stats, expr: Literal) => + cpy.Block(b)(transform(delegateCtorCall) :: coverageCall :: stats.mapConserve(transform), expr) + case rhs => + cpy.Block(rhs)(transform(rhs) :: coverageCall :: Nil, unitLiteral) + end instrumentSecondaryCtor + /** * Checks if the apply needs a lift in the coverage phase. * In case of a nested application, we have to lift all arguments @@ -447,9 +470,14 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: /** Check if an Apply can be instrumented. Prevents this phase from generating incorrect code. */ private def canInstrumentApply(tree: Apply)(using Context): Boolean = + def isSecondaryCtorDelegateCall: Boolean = tree.fun match + case Select(This(_), nme.CONSTRUCTOR) => true + case _ => false + val sym = tree.symbol !sym.isOneOf(ExcludeMethodFlags) && !isCompilerIntrinsicMethod(sym) + && !(sym.isClassConstructor && isSecondaryCtorDelegateCall) && (tree.typeOpt match case AppliedType(tycon: NamedType, _) => /* If the last expression in a block is a context function, we'll try to diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index e4cb21a279d6..b433e37e39c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -283,7 +283,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * * ``` * private @volatile var _x: AnyRef = null - * + * * def x: A = * val result = _x * if result.isInstanceOf[A] then @@ -292,7 +292,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * null // possible unboxing applied here * else * x_compute() // possible unboxing applied here - * + * * private def x_compute(): AnyRef = * while do * val current: AnyRef = _x diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala index d4dd911241d3..b4e8c3acbc5c 100644 --- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala @@ -5,6 +5,7 @@ package transform import core._ import Contexts._, Phases._, Symbols._, Decorators._ import Flags.PackageVal +import staging.StagingLevel.* /** A MegaPhase combines a number of mini-phases which are all executed in * a single tree traversal. @@ -66,6 +67,8 @@ object MegaPhase { def prepareForTry(tree: Try)(using Context): Context = ctx def prepareForSeqLiteral(tree: SeqLiteral)(using Context): Context = ctx def prepareForInlined(tree: Inlined)(using Context): Context = ctx + def prepareForQuote(tree: Quote)(using Context): Context = ctx + def prepareForSplice(tree: Splice)(using Context): Context = ctx def prepareForTypeTree(tree: TypeTree)(using Context): Context = ctx def prepareForBind(tree: Bind)(using Context): Context = ctx def prepareForAlternative(tree: Alternative)(using Context): Context = ctx @@ -100,6 +103,8 @@ object MegaPhase { def transformTry(tree: Try)(using Context): Tree = tree def transformSeqLiteral(tree: SeqLiteral)(using Context): Tree = tree def transformInlined(tree: Inlined)(using Context): Tree = tree + def transformQuote(tree: Quote)(using Context): Tree = tree + def transformSplice(tree: Splice)(using Context): Tree = tree def transformTypeTree(tree: TypeTree)(using Context): Tree = tree def transformBind(tree: Bind)(using Context): Tree = tree def transformAlternative(tree: Alternative)(using Context): Tree = tree @@ -394,6 +399,16 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { val expansion = transformTree(tree.expansion, start)(using inlineContext(tree.call)) goInlined(cpy.Inlined(tree)(tree.call, bindings, expansion), start) } + case tree: Quote => + inContext(prepQuote(tree, start)(using outerCtx)) { + val body = transformTree(tree.body, start)(using quoteContext) + goQuote(cpy.Quote(tree)(body, Nil), start) + } + case tree: Splice => + inContext(prepSplice(tree, start)(using outerCtx)) { + val expr = transformTree(tree.expr, start)(using spliceContext) + goSplice(cpy.Splice(tree)(expr), start) + } case tree: Return => inContext(prepReturn(tree, start)(using outerCtx)) { val expr = transformTree(tree.expr, start) @@ -546,6 +561,10 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { private val nxSeqLiteralTransPhase = init("transformSeqLiteral") private val nxInlinedPrepPhase = init("prepareForInlined") private val nxInlinedTransPhase = init("transformInlined") + private val nxQuotePrepPhase = init("prepareForQuote") + private val nxQuoteTransPhase = init("transformQuote") + private val nxSplicePrepPhase = init("prepareForPrep") + private val nxSpliceTransPhase = init("transformSplice") private val nxTypeTreePrepPhase = init("prepareForTypeTree") private val nxTypeTreeTransPhase = init("transformTypeTree") private val nxBindPrepPhase = init("prepareForBind") @@ -893,6 +912,36 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { } } + def prepQuote(tree: Quote, start: Int)(using Context): Context = { + val phase = nxQuotePrepPhase(start) + if (phase == null) ctx + else prepQuote(tree, phase.idxInGroup + 1)(using phase.prepareForQuote(tree)) + } + + def goQuote(tree: Quote, start: Int)(using Context): Tree = { + val phase = nxQuoteTransPhase(start) + if (phase == null) tree + else phase.transformQuote(tree) match { + case tree1: Quote => goQuote(tree1, phase.idxInGroup + 1) + case tree1 => transformNode(tree1, phase.idxInGroup + 1) + } + } + + def prepSplice(tree: Splice, start: Int)(using Context): Context = { + val phase = nxSplicePrepPhase(start) + if (phase == null) ctx + else prepSplice(tree, phase.idxInGroup + 1)(using phase.prepareForSplice(tree)) + } + + def goSplice(tree: Splice, start: Int)(using Context): Tree = { + val phase = nxSpliceTransPhase(start) + if (phase == null) tree + else phase.transformSplice(tree) match { + case tree1: Splice => goSplice(tree1, phase.idxInGroup + 1) + case tree1 => transformNode(tree1, phase.idxInGroup + 1) + } + } + def prepTypeTree(tree: TypeTree, start: Int)(using Context): Context = { val phase = nxTypeTreePrepPhase(start) if (phase == null) ctx diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala index 5a2eda4101a4..03ac15b39ffe 100644 --- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala +++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala @@ -20,8 +20,6 @@ import sjs.JSSymUtils._ import util.Store -import dotty.tools.backend.sjs.JSDefinitions.jsdefn - object Memoize { val name: String = "memoize" val description: String = "add private fields to getters and setters" @@ -130,32 +128,17 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => } if sym.is(Accessor, butNot = NoFieldNeeded) then - /* Tests whether the semantics of Scala.js require a field for this symbol, irrespective of any - * optimization we think we can do. This is the case if one of the following is true: - * - it is a member of a JS type, since it needs to be visible as a JavaScript field - * - is is exported as static member of the companion class, since it needs to be visible as a JavaScript static field - * - it is exported to the top-level, since that can only be done as a true top-level variable, i.e., a field - */ - def sjsNeedsField: Boolean = - ctx.settings.scalajs.value && ( - sym.owner.isJSType - || sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot) - || sym.hasAnnotation(jsdefn.JSExportStaticAnnot) - ) - def adaptToField(field: Symbol, tree: Tree): Tree = if (tree.isEmpty) tree else tree.ensureConforms(field.info.widen) def isErasableBottomField(field: Symbol, cls: Symbol): Boolean = !field.isVolatile && ((cls eq defn.NothingClass) || (cls eq defn.NullClass) || (cls eq defn.BoxedUnitClass)) - && !sjsNeedsField + && !sym.sjsNeedsField if sym.isGetter then - val constantFinalVal = - sym.isAllOf(Accessor | Final, butNot = Mutable) && tree.rhs.isInstanceOf[Literal] && !sjsNeedsField - if constantFinalVal then - // constant final vals do not need to be transformed at all, and do not need a field + if sym.isConstExprFinalVal then + // const-expr final vals do not need to be transformed at all, and do not need a field tree else val field = newField.asTerm @@ -167,7 +150,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => if isErasableBottomField(field, rhsClass) then erasedBottomTree(rhsClass) else transformFollowingDeep(ref(field))(using ctx.withOwner(sym)) val getterDef = cpy.DefDef(tree)(rhs = getterRhs) - sym.copyAndKeepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot)) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot)) Thicket(fieldDef, getterDef) else if sym.isSetter then if (!sym.is(ParamAccessor)) { val Literal(Constant(())) = tree.rhs: @unchecked } // This is intended as an assertion @@ -193,7 +176,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => then Literal(Constant(())) else Assign(ref(field), adaptToField(field, ref(tree.termParamss.head.head.symbol))) val setterDef = cpy.DefDef(tree)(rhs = transformFollowingDeep(initializer)(using ctx.withOwner(sym))) - sym.copyAndKeepAnnotationsCarrying(thisPhase, Set(defn.SetterMetaAnnot)) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.SetterMetaAnnot)) setterDef else // Curiously, some accessors from Scala2 have ' ' suffixes. diff --git a/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala b/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala deleted file mode 100644 index 1d0ed035df09..000000000000 --- a/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala +++ /dev/null @@ -1,312 +0,0 @@ -package dotty.tools.dotc -package transform - -import dotty.tools.dotc.ast.{tpd, untpd} -import dotty.tools.dotc.core.Annotations.BodyAnnotation -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.NameKinds._ -import dotty.tools.dotc.core.StagingContext._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.util.SrcPos -import dotty.tools.dotc.util.Spans._ -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.typer.Checking -import dotty.tools.dotc.typer.Implicits.SearchFailureType -import dotty.tools.dotc.core.Annotations._ - -import dotty.tools.dotc.util.Property - -import scala.annotation.constructorOnly - -/** Checks that the Phase Consistency Principle (PCP) holds and heals types. - * - * Local term references are phase consistent if and only if they are used at the same level as their definition. - * - * Local type references can be used at the level of their definition or lower. If used used at a higher level, - * it will be healed if possible, otherwise it is inconsistent. - * - * Type healing consists in transforming a phase inconsistent type `T` into `summon[Type[T]].Underlying`. - * - * As references to types do not necessarily have an associated tree it is not always possible to replace the types directly. - * Instead we always generate a type alias for it and place it at the start of the surrounding quote. This also avoids duplication. - * For example: - * '{ - * val x: List[T] = List[T]() - * () - * } - * - * is transformed to - * - * '{ - * type t$1 = summon[Type[T]].Underlying - * val x: List[t$1] = List[t$1](); - * () - * } - * - */ -class PCPCheckAndHeal(@constructorOnly ictx: Context) extends TreeMapWithStages(ictx) with Checking { - import tpd._ - - private val InAnnotation = Property.Key[Unit]() - - override def transform(tree: Tree)(using Context): Tree = - if (tree.source != ctx.source && tree.source.exists) - transform(tree)(using ctx.withSource(tree.source)) - else if !isInQuoteOrSplice then - checkAnnotations(tree) - super.transform(tree) - else tree match { - - case _: TypeTree | _: RefTree if tree.isType => - val healedType = healType(tree.srcPos)(tree.tpe) - if healedType == tree.tpe then tree - else TypeTree(healedType).withSpan(tree.span) - case _: AppliedTypeTree => - super.transform(tree) match - case tree1: AppliedTypeTree if tree1 ne tree => - // propagate healed types - tree1.withType(tree1.tpt.tpe.appliedTo(tree1.args.map(_.tpe))) - case tree1 => tree1 - - case _: Ident | _: This => - tree.withType(healTypeOfTerm(tree.srcPos)(tree.tpe)) - - // Remove inline defs in quoted code. Already fully inlined. - case tree: DefDef if tree.symbol.is(Inline) && level > 0 => - EmptyTree - - case tree: ValOrDefDef => - checkAnnotations(tree) - healInfo(tree, tree.tpt.srcPos) - super.transform(tree) - case tree: Bind => - checkAnnotations(tree) - healInfo(tree, tree.srcPos) - super.transform(tree) - case tree: UnApply => - super.transform(tree).withType(healTypeOfTerm(tree.srcPos)(tree.tpe)) - case tree: TypeDef if tree.symbol.is(Case) && level > 0 => - report.error(reporting.CaseClassInInlinedCode(tree), tree) - super.transform(tree) - case _ => - super.transform(tree) - } - - /** Transform quoted trees while maintaining phase correctness */ - override protected def transformQuotation(body: Tree, quote: Apply)(using Context): Tree = { - val taggedTypes = new PCPCheckAndHeal.QuoteTypeTags(quote.span) - - if (ctx.property(InAnnotation).isDefined) - report.error("Cannot have a quote in an annotation", quote.srcPos) - - val contextWithQuote = - if level == 0 then contextWithQuoteTypeTags(taggedTypes)(using quoteContext) - else quoteContext - val body1 = transform(body)(using contextWithQuote) - val body2 = - taggedTypes.getTypeTags match - case Nil => body1 - case tags => tpd.Block(tags, body1).withSpan(body.span) - - if body.isTerm then - // `quoted.runtime.Expr.quote[T]()` --> `quoted.runtime.Expr.quote[T2]()` - val TypeApply(fun, targs) = quote.fun: @unchecked - val targs2 = targs.map(targ => TypeTree(healTypeOfTerm(quote.fun.srcPos)(targ.tpe))) - cpy.Apply(quote)(cpy.TypeApply(quote.fun)(fun, targs2), body2 :: Nil) - else - val quotes = quote.args.mapConserve(transform) - body.tpe match - case tp @ TypeRef(x: TermRef, _) if tp.symbol == defn.QuotedType_splice => - // Optimization: `quoted.Type.of[x.Underlying](quotes)` --> `x` - ref(x) - case _ => - // `quoted.Type.of[](quotes)` --> `quoted.Type.of[](quotes)` - val TypeApply(fun, _) = quote.fun: @unchecked - cpy.Apply(quote)(cpy.TypeApply(quote.fun)(fun, body2 :: Nil), quotes) - } - - /** Transform splice - * - If inside a quote, transform the contents of the splice. - * - If inside inlined code, expand the macro code. - * - If inside of a macro definition, check the validity of the macro. - */ - protected def transformSplice(body: Tree, splice: Apply)(using Context): Tree = { - val body1 = transform(body)(using spliceContext) - splice.fun match { - case fun @ TypeApply(_, _ :: Nil) => - // Type of the splice itself must also be healed - // `quoted.runtime.Expr.quote[F[T]](... T ...)` --> `internal.Quoted.expr[F[$t]](... T ...)` - val tp = healType(splice.srcPos)(splice.tpe.widenTermRefExpr) - cpy.Apply(splice)(cpy.TypeApply(fun)(fun.fun, tpd.TypeTree(tp) :: Nil), body1 :: Nil) - case f @ Apply(fun @ TypeApply(_, _), qctx :: Nil) => - // Type of the splice itself must also be healed - // `quoted.runtime.Expr.quote[F[T]](... T ...)` --> `internal.Quoted.expr[F[$t]](... T ...)` - val tp = healType(splice.srcPos)(splice.tpe.widenTermRefExpr) - cpy.Apply(splice)(cpy.Apply(f)(cpy.TypeApply(fun)(fun.fun, tpd.TypeTree(tp) :: Nil), qctx :: Nil), body1 :: Nil) - } - } - - protected def transformSpliceType(body: Tree, splice: Select)(using Context): Tree = { - val body1 = transform(body)(using spliceContext) - if ctx.reporter.hasErrors then - splice - else - val tagRef = getQuoteTypeTags.getTagRef(splice.qualifier.tpe.asInstanceOf[TermRef]) - ref(tagRef).withSpan(splice.span) - } - - /** Check that annotations do not contain quotes and and that splices are valid */ - private def checkAnnotations(tree: Tree)(using Context): Unit = - tree match - case tree: DefTree => - lazy val annotCtx = ctx.fresh.setProperty(InAnnotation, true).withOwner(tree.symbol) - for (annot <- tree.symbol.annotations) annot match - case annot: BodyAnnotation => annot // already checked in PrepareInlineable before the creation of the BodyAnnotation - case annot => transform(annot.tree)(using annotCtx) - case _ => - - /** Heal types in the info of the given tree */ - private def healInfo(tree: Tree, pos: SrcPos)(using Context): Unit = - tree.symbol.info = healType(pos)(tree.symbol.info) - - /** If the type refers to a locally defined symbol (either directly, or in a pickled type), - * check that its staging level matches the current level. - * - Static types and term are allowed at any level. - * - If a type reference is used a higher level, then it is inconsistent. Will attempt to heal before failing. - * - If a term reference is used a different level, then it is inconsistent. - * - * If `T` is a reference to a type at the wrong level, try to heal it by replacing it with - * a type tag of type `quoted.Type[T]`. - * The tag is generated by an instance of `QuoteTypeTags` directly if the splice is explicit - * or indirectly by `tryHeal`. - */ - private def healType(pos: SrcPos)(using Context) = new TypeMap { - def apply(tp: Type): Type = - tp match - case tp: TypeRef => - tp.prefix match - case NoPrefix if level > levelOf(tp.symbol) && !tp.typeSymbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) => - val tp1 = tp.dealias - if tp1 != tp then apply(tp1) - else tryHeal(tp.symbol, tp, pos) - case prefix: ThisType if !tp.symbol.isStatic && level > levelOf(prefix.cls) => - tryHeal(tp.symbol, tp, pos) - case prefix: TermRef if tp.symbol.isTypeSplice => - prefix.symbol.info.argInfos match - case (tb: TypeBounds) :: _ => - report.error(em"Cannot splice $tp because it is a wildcard type", pos) - case _ => - // Heal explicit type splice in the code - if level > 0 then getQuoteTypeTags.getTagRef(prefix) else tp - case prefix: TermRef if !prefix.symbol.isStatic && level > levelOf(prefix.symbol) => - tryHeal(prefix.symbol, tp, pos) - case _ => - mapOver(tp) - case tp: ThisType if level != -1 && level != levelOf(tp.cls) => - levelError(tp.cls, tp, pos) - case tp: AnnotatedType => - val newAnnotTree = transform(tp.annot.tree) - derivedAnnotatedType(tp, apply(tp.parent), tp.annot.derivedAnnotation(newAnnotTree)) - case _ => - mapOver(tp) - } - - /** Check phase consistency of terms and heal inconsistent type references. */ - private def healTypeOfTerm(pos: SrcPos)(using Context) = new TypeMap { - def apply(tp: Type): Type = - tp match - case tp @ TypeRef(NoPrefix, _) if level > levelOf(tp.symbol) => - tryHeal(tp.symbol, tp, pos) - case tp @ TermRef(NoPrefix, _) if !tp.symbol.isStatic && level != levelOf(tp.symbol) => - levelError(tp.symbol, tp, pos) - case tp: ThisType if level != -1 && level != levelOf(tp.cls) => - levelError(tp.cls, tp, pos) - case tp: AnnotatedType => - derivedAnnotatedType(tp, apply(tp.parent), tp.annot) - case _ => - if tp.typeSymbol.is(Package) then tp - else mapOver(tp) - } - - /** Try to heal reference to type `T` used in a higher level than its definition. - * Returns a reference to a type tag generated by `QuoteTypeTags` that contains a - * reference to a type alias containing the equivalent of `${summon[quoted.Type[T]]}`. - * Emits and error if `T` cannot be healed and returns `T`. - */ - protected def tryHeal(sym: Symbol, tp: TypeRef, pos: SrcPos)(using Context): TypeRef = { - val reqType = defn.QuotedTypeClass.typeRef.appliedTo(tp) - val tag = ctx.typer.inferImplicitArg(reqType, pos.span) - tag.tpe match - - case tp: TermRef => - checkStable(tp, pos, "type witness") - getQuoteTypeTags.getTagRef(tp) - case _: SearchFailureType => - report.error( - ctx.typer.missingArgMsg(tag, reqType, "") - .prepend(i"Reference to $tp within quotes requires a given $reqType in scope.\n") - .append("\n"), - pos) - tp - case _ => - report.error(em"""Reference to $tp within quotes requires a given $reqType in scope. - | - |""", pos) - tp - } - - private def levelError(sym: Symbol, tp: Type, pos: SrcPos)(using Context): tp.type = { - def symStr = - if (!tp.isInstanceOf[ThisType]) sym.show - else if (sym.is(ModuleClass)) sym.sourceModule.show - else i"${sym.name}.this" - val hint = - if sym.is(Inline) && levelOf(sym) < level then - "\n\n" + - "Hint: Staged references to inline definition in quotes are only inlined after the quote is spliced into level 0 code by a macro. " + - "Try moving this inline definition in a statically accessible location such as an object (this definition can be private)." - else "" - report.error( - em"""access to $symStr from wrong staging level: - | - the definition is at level ${levelOf(sym)}, - | - but the access is at level $level.$hint""", pos) - tp - } - -} - -object PCPCheckAndHeal { - import tpd._ - - class QuoteTypeTags(span: Span)(using Context) { - - private val tags = collection.mutable.LinkedHashMap.empty[Symbol, TypeDef] - - def getTagRef(spliced: TermRef): TypeRef = { - val typeDef = tags.getOrElseUpdate(spliced.symbol, mkTagSymbolAndAssignType(spliced)) - typeDef.symbol.typeRef - } - - def getTypeTags: List[TypeDef] = tags.valuesIterator.toList - - private def mkTagSymbolAndAssignType(spliced: TermRef): TypeDef = { - val splicedTree = tpd.ref(spliced).withSpan(span) - val rhs = splicedTree.select(tpnme.Underlying).withSpan(span) - val alias = ctx.typeAssigner.assignType(untpd.TypeBoundsTree(rhs, rhs), rhs, rhs, EmptyTree) - val local = newSymbol( - owner = ctx.owner, - name = UniqueName.fresh((splicedTree.symbol.name.toString + "$_").toTermName).toTypeName, - flags = Synthetic, - info = TypeAlias(splicedTree.tpe.select(tpnme.Underlying)), - coord = span).asType - local.addAnnotation(Annotation(defn.QuotedRuntime_SplicedTypeAnnot, span)) - ctx.typeAssigner.assignType(untpd.TypeDef(local.name, alias), local) - } - - } - -} diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 63ffdffbddef..ac1e1868f26e 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -2,23 +2,26 @@ package dotty.tools package dotc package transform -import scala.annotation.tailrec import core._ import MegaPhase._ -import collection.mutable import Symbols._, Contexts._, Types._, StdNames._, NameOps._ +import patmat.SpaceEngine import util.Spans._ import typer.Applications.* import SymUtils._ import TypeUtils.* +import Annotations.* import Flags._, Constants._ import Decorators._ import NameKinds.{PatMatStdBinderName, PatMatAltsName, PatMatResultName} import config.Printers.patmatch import reporting._ -import dotty.tools.dotc.ast._ +import ast._ import util.Property._ +import scala.annotation.tailrec +import scala.collection.mutable + /** The pattern matching transform. * After this phase, the only Match nodes remaining in the code are simple switches * where every pattern is an integer or string constant @@ -45,9 +48,8 @@ class PatternMatcher extends MiniPhase { val translated = new Translator(matchType, this).translateMatch(tree) // check exhaustivity and unreachability - val engine = new patmat.SpaceEngine - engine.checkExhaustivity(tree) - engine.checkRedundancy(tree) + SpaceEngine.checkExhaustivity(tree) + SpaceEngine.checkRedundancy(tree) translated.ensureConforms(matchType) } @@ -707,9 +709,9 @@ object PatternMatcher { // ----- Generating trees from plans --------------- /** The condition a test plan rewrites to */ - private def emitCondition(plan: TestPlan): Tree = { + private def emitCondition(plan: TestPlan): Tree = val scrutinee = plan.scrutinee - (plan.test: @unchecked) match { + (plan.test: @unchecked) match case NonEmptyTest => constToLiteral( scrutinee @@ -737,41 +739,49 @@ object PatternMatcher { case TypeTest(tpt, trusted) => val expectedTp = tpt.tpe - // An outer test is needed in a situation like `case x: y.Inner => ...` - def outerTestNeeded: Boolean = { - def go(expected: Type): Boolean = expected match { - case tref @ TypeRef(pre: SingletonType, _) => - tref.symbol.isClass && - ExplicitOuter.needsOuterIfReferenced(tref.symbol.asClass) - case AppliedType(tpe, _) => go(tpe) - case _ => - false - } - // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest` - // generates an outer test based on `patType.prefix` with automatically dealises. - go(expectedTp.dealias) - } + def typeTest(scrut: Tree, expected: Type): Tree = + val ttest = scrut.select(defn.Any_typeTest).appliedToType(expected) + if trusted then ttest.pushAttachment(TrustedTypeTestKey, ()) + ttest - def outerTest: Tree = thisPhase.transformFollowingDeep { - val expectedOuter = singleton(expectedTp.normalizedPrefix) - val expectedClass = expectedTp.dealias.classSymbol.asClass - ExplicitOuter.ensureOuterAccessors(expectedClass) - scrutinee.ensureConforms(expectedTp) - .outerSelect(1, expectedClass.owner.typeRef) - .select(defn.Object_eq) - .appliedTo(expectedOuter) - } + /** An outer test is needed in a situation like `case x: y.Inner => ... + * or like case x: O#Inner if the owner of Inner is not a subclass of O. + * Outer tests are added here instead of in TypeTestsCasts since they + * might cause outer accessors to be added to inner classes (via ensureOuterAccessors) + * and therefore have to run before ExplicitOuter. + */ + def addOuterTest(tree: Tree, expected: Type): Tree = expected.dealias match + case tref @ TypeRef(pre, _) => + tref.symbol match + case expectedCls: ClassSymbol if ExplicitOuter.needsOuterIfReferenced(expectedCls) => + def selectOuter = + ExplicitOuter.ensureOuterAccessors(expectedCls) + scrutinee.ensureConforms(expected).outerSelect(1, expectedCls.owner.typeRef) + if pre.isSingleton then + val expectedOuter = singleton(pre) + tree.and(selectOuter.select(defn.Object_eq).appliedTo(expectedOuter)) + else if !expectedCls.isStatic + && expectedCls.owner.isType + && !expectedCls.owner.derivesFrom(pre.classSymbol) + then + val testPre = + if expected.hasAnnotation(defn.UncheckedAnnot) then + AnnotatedType(pre, Annotation(defn.UncheckedAnnot, tree.span)) + else pre + tree.and(typeTest(selectOuter, testPre)) + else tree + case _ => tree + case AppliedType(tycon, _) => + addOuterTest(tree, tycon) + case _ => + tree - expectedTp.dealias match { + expectedTp.dealias match case expectedTp: SingletonType => scrutinee.isInstance(expectedTp) // will be translated to an equality test case _ => - val typeTest = scrutinee.select(defn.Any_typeTest).appliedToType(expectedTp) - if (trusted) typeTest.pushAttachment(TrustedTypeTestKey, ()) - if (outerTestNeeded) typeTest.and(outerTest) else typeTest - } - } - } + addOuterTest(typeTest(scrutinee, expectedTp), expectedTp) + end emitCondition @tailrec private def canFallThrough(plan: Plan): Boolean = plan match { diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala index 21fc27cec0dd..15a1a823589c 100644 --- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala @@ -9,17 +9,18 @@ import Contexts._ import Symbols._ import Constants._ import ast.Trees._ +import ast.untpd import ast.TreeTypeMap import SymUtils._ import NameKinds._ import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.config.ScalaRelease.* import scala.collection.mutable import dotty.tools.dotc.core.Annotations._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import dotty.tools.dotc.inlines.Inlines import scala.annotation.constructorOnly @@ -28,17 +29,15 @@ import scala.annotation.constructorOnly * * Transforms top level quote * ``` - * '{ ... - * @TypeSplice type X0 = {{ 0 | .. | contentsTpe0 | .. }} - * @TypeSplice type X2 = {{ 1 | .. | contentsTpe1 | .. }} + * '{ ... * val x1: U1 = ??? * val x2: U2 = ??? * ... - * {{{ 3 | x1 | contents0 | T0 }}} // hole + * {{{ 3 | x1 | holeContents0 | T0 }}} // hole * ... - * {{{ 4 | x2 | contents1 | T1 }}} // hole + * {{{ 4 | x2 | holeContents1 | T1 }}} // hole * ... - * {{{ 5 | x1, x2 | contents2 | T2 }}} // hole + * {{{ 5 | x1, x2 | holeContents2 | T2 }}} // hole * ... * } * ``` @@ -46,26 +45,23 @@ import scala.annotation.constructorOnly * ``` * unpickleExprV2( * pickled = [[ // PICKLED TASTY - * @TypeSplice type X0 // with bounds that do not contain captured types - * @TypeSplice type X1 // with bounds that do not contain captured types + * @TypeSplice type A // with bounds that do not contain captured types + * @TypeSplice type B // with bounds that do not contain captured types * val x1 = ??? * val x2 = ??? * ... - * {{{ 0 | x1 | | T0 }}} // hole - * ... - * {{{ 1 | x2 | | T1 }}} // hole - * ... - * {{{ 2 | x1, x2 | | T2 }}} // hole + * {{{ 0 | x1 | | T0 }}} // hole + * ... + * {{{ 1 | x2 | | T1 }}} // hole + * ... + * {{{ 2 | x1, x2 | | T2 }}} // hole * ... * ]], - * typeHole = (idx: Int, args: List[Any]) => idx match { - * case 0 => contentsTpe0.apply(args(0).asInstanceOf[Type[?]]) // beta reduced - * case 1 => contentsTpe1.apply(args(0).asInstanceOf[Type[?]]) // beta reduced - * }, + * typeHole = Seq(a, b), * termHole = (idx: Int, args: List[Any], quotes: Quotes) => idx match { - * case 3 => content0.apply(args(0).asInstanceOf[Expr[U1]]).apply(quotes) // beta reduced - * case 4 => content1.apply(args(0).asInstanceOf[Expr[U2]]).apply(quotes) // beta reduced - * case 5 => content2.apply(args(0).asInstanceOf[Expr[U1]], args(1).asInstanceOf[Expr[U2]]).apply(quotes) // beta reduced + * case 3 => holeContents0.apply(args(0).asInstanceOf[Expr[U1]]).apply(quotes) // beta reduced + * case 4 => holeContents1.apply(args(0).asInstanceOf[Expr[U2]]).apply(quotes) // beta reduced + * case 5 => holeContents2.apply(args(0).asInstanceOf[Expr[U1]], args(1).asInstanceOf[Expr[U2]]).apply(quotes) // beta reduced * }, * ) * ``` @@ -84,56 +80,41 @@ class PickleQuotes extends MacroTransform { override def checkPostCondition(tree: Tree)(using Context): Unit = tree match - case tree: RefTree if !Inlines.inInlineMethod => - assert(!tree.symbol.isQuote) - assert(!tree.symbol.isExprSplice) - case _ : TypeDef if !Inlines.inInlineMethod => - assert(!tree.symbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot), - s"${tree.symbol} should have been removed by PickledQuotes because it has a @quoteTypeTag") + case tree: Quote => + assert(Inlines.inInlineMethod) + case tree: Splice => + assert(Inlines.inInlineMethod) case _ => override def run(using Context): Unit = - if (ctx.compilationUnit.needsStaging) super.run(using freshStagingContext) + if (ctx.compilationUnit.needsStaging) super.run protected def newTransformer(using Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match - case Apply(Select(Apply(TypeApply(fn, List(tpt)), List(code)),nme.apply), List(quotes)) - if fn.symbol == defn.QuotedRuntime_exprQuote => - val (contents, codeWithHoles) = makeHoles(code) - val sourceRef = Inlines.inlineCallTrace(ctx.owner, tree.sourcePos) - val codeWithHoles2 = Inlined(sourceRef, Nil, codeWithHoles) - val pickled = PickleQuotes(quotes, codeWithHoles2, contents, tpt.tpe, false) - transform(pickled) // pickle quotes that are in the contents - case Apply(TypeApply(_, List(tpt)), List(quotes)) if tree.symbol == defn.QuotedTypeModule_of => - tpt match - case Select(t, _) if tpt.symbol == defn.QuotedType_splice => - // `Type.of[t.Underlying](quotes)` --> `t` - ref(t.symbol)(using ctx.withSource(tpt.source)).withSpan(tpt.span) - case _ => - val (contents, tptWithHoles) = makeHoles(tpt) - PickleQuotes(quotes, tptWithHoles, contents, tpt.tpe, true) + case Apply(Select(quote: Quote, nme.apply), List(quotes)) => + val (holeContents, quote1) = extractHolesContents(quote) + val quote2 = encodeTypeArgs(quote1) + val holeContents1 = holeContents.map(transform(_)) + PickleQuotes.pickle(quote2, quotes, holeContents1) case tree: DefDef if !tree.rhs.isEmpty && tree.symbol.isInlineMethod => - // Shrink size of the tree. The methods have already been inlined. - // TODO move to FirstTransform to trigger even without quotes - cpy.DefDef(tree)(rhs = defaultValue(tree.rhs.tpe)) + tree case _ => super.transform(tree) } - private def makeHoles(tree: tpd.Tree)(using Context): (List[Tree], tpd.Tree) = - + private def extractHolesContents(quote: tpd.Quote)(using Context): (List[Tree], tpd.Quote) = class HoleContentExtractor extends Transformer: - private val contents = List.newBuilder[Tree] + private val holeContents = List.newBuilder[Tree] override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match - case tree @ Hole(isTerm, _, _, content, _) => - if !content.isEmpty then - contents += content - val holeType = - if isTerm then getTermHoleType(tree.tpe) else getTypeHoleType(tree.tpe) - val hole = cpy.Hole(tree)(content = EmptyTree, TypeTree(holeType)) - if isTerm then Inlined(EmptyTree, Nil, hole).withSpan(tree.span) else hole + case tree @ Hole(isTerm, _, _, content) => + assert(isTerm) + assert(!content.isEmpty) + holeContents += content + val holeType = getTermHoleType(tree.tpe) + val hole = untpd.cpy.Hole(tree)(content = EmptyTree).withType(holeType) + cpy.Inlined(tree)(EmptyTree, Nil, hole) case tree: DefTree => val newAnnotations = tree.symbol.annotations.mapconserve { annot => annot.derivedAnnotation(transform(annot.tree)(using ctx.withOwner(tree.symbol))) @@ -153,20 +134,6 @@ class PickleQuotes extends MacroTransform { } } - /** Remove references to local types that will not be defined in this quote */ - private def getTypeHoleType(using Context) = new TypeMap() { - override def apply(tp: Type): Type = tp match - case tp: TypeRef if tp.typeSymbol.isTypeSplice => - apply(tp.dealias) - case tp @ TypeRef(pre, _) if pre == NoPrefix || pre.termSymbol.isLocal => - val hiBound = tp.typeSymbol.info match - case info: ClassInfo => info.parents.reduce(_ & _) - case info => info.hiBound - apply(hiBound) - case tp => - mapOver(tp) - } - /** Remove references to local types that will not be defined in this quote */ private def getTermHoleType(using Context) = new TypeMap() { override def apply(tp: Type): Type = tp match @@ -180,19 +147,86 @@ class PickleQuotes extends MacroTransform { mapOver(tp) } - /** Get the contents of the transformed tree */ + /** Get the holeContents of the transformed tree */ def getContents() = - val res = contents.result - contents.clear() + val res = holeContents.result + holeContents.clear() res end HoleContentExtractor val holeMaker = new HoleContentExtractor - val newTree = holeMaker.transform(tree) - (holeMaker.getContents(), newTree) - + val body1 = holeMaker.transform(quote.body) + val quote1 = cpy.Quote(quote)(body1, quote.tags) + + (holeMaker.getContents(), quote1) + end extractHolesContents + + /** Encode quote tags as holes in the quote body. + * + * ```scala + * '{ ... t.Underlying ... u.Underlying ... } + * ``` + * becomes + * ```scala + * '{ + * type T = {{ 0 | .. | .. | .. }} + * type U = {{ 1 | .. | .. | .. }} + * ... T ... U ... + * } + * ``` + */ + private def encodeTypeArgs(quote: tpd.Quote)(using Context): tpd.Quote = + if quote.tags.isEmpty then quote + else + val tdefs = quote.tags.zipWithIndex.map(mkTagSymbolAndAssignType) + val typeMapping = quote.tags.map(_.tpe).zip(tdefs.map(_.symbol.typeRef)).toMap + val typeMap = new TypeMap { + override def apply(tp: Type): Type = tp match + case TypeRef(tag: TermRef, _) if tp.typeSymbol == defn.QuotedType_splice => + typeMapping.getOrElse(tag, tp) + case _ => mapOver(tp) + } + def treeMap(tree: Tree): Tree = tree match + case Select(qual, _) if tree.symbol == defn.QuotedType_splice => + typeMapping.get(qual.tpe) match + case Some(tag) => TypeTree(tag).withSpan(tree.span) + case None => tree + case _ => tree + val body1 = new TreeTypeMap(typeMap, treeMap).transform(quote.body) + cpy.Quote(quote)(Block(tdefs, body1), quote.tags) + + private def mkTagSymbolAndAssignType(typeArg: Tree, idx: Int)(using Context): TypeDef = { + val holeType = getTypeHoleType(typeArg.tpe.select(tpnme.Underlying)) + val hole = untpd.cpy.Hole(typeArg)(isTerm = false, idx, Nil, EmptyTree).withType(holeType) + val local = newSymbol( + owner = ctx.owner, + name = UniqueName.fresh(hole.tpe.dealias.typeSymbol.name.toTypeName), + flags = Synthetic, + info = TypeAlias(typeArg.tpe.select(tpnme.Underlying)), + coord = typeArg.span + ).asType + local.addAnnotation(Annotation(defn.QuotedRuntime_SplicedTypeAnnot, typeArg.span)) + ctx.typeAssigner.assignType(untpd.TypeDef(local.name, hole), local).withSpan(typeArg.span) + } - end makeHoles + /** Remove references to local types that will not be defined in this quote */ + private def getTypeHoleType(using Context) = new TypeMap() { + override def apply(tp: Type): Type = tp match + case tp: TypeRef if tp.typeSymbol.isTypeSplice => + apply(tp.dealias) + case tp @ TypeRef(pre, _) if isLocalPath(pre) => + val hiBound = tp.typeSymbol.info match + case info: ClassInfo => info.parents.reduce(_ & _) + case info => info.hiBound + apply(hiBound) + case tp => + mapOver(tp) + + private def isLocalPath(tp: Type): Boolean = tp match + case NoPrefix => true + case tp: TermRef if !tp.symbol.is(Package) => isLocalPath(tp.prefix) + case tp => false + } } @@ -202,7 +236,10 @@ object PickleQuotes { val name: String = "pickleQuotes" val description: String = "turn quoted trees into explicit run-time data structures" - def apply(quotes: Tree, body: Tree, contents: List[Tree], originalTp: Type, isType: Boolean)(using Context) = { + def pickle(quote: Quote, quotes: Tree, holeContents: List[Tree])(using Context) = { + val body = quote.body + val bodyType = quote.bodyType + /** Helper methods to construct trees calling methods in `Quotes.reflect` based on the current `quotes` tree */ object reflect extends ReifiedReflect { val quotesTree = quotes @@ -256,7 +293,7 @@ object PickleQuotes { */ def liftedValue(lit: Literal, lifter: Symbol) = val exprType = defn.QuotedExprClass.typeRef.appliedTo(body.tpe) - ref(lifter).appliedToType(originalTp).select(nme.apply).appliedTo(lit).appliedTo(quotes) + ref(lifter).appliedToType(bodyType).select(nme.apply).appliedTo(lit).appliedTo(quotes) def pickleAsValue(lit: Literal) = { // TODO should all constants be pickled as Literals? @@ -289,24 +326,22 @@ object PickleQuotes { * this closure is always applied directly to the actual context and the BetaReduce phase removes it. */ def pickleAsTasty() = { - val pickleQuote = PickledQuotes.pickleQuote(body) + val body1 = + if body.isType then body + else Inlined(Inlines.inlineCallTrace(ctx.owner, quote.sourcePos), Nil, body) + val pickleQuote = PickledQuotes.pickleQuote(body1) val pickledQuoteStrings = pickleQuote match case x :: Nil => Literal(Constant(x)) case xs => tpd.mkList(xs.map(x => Literal(Constant(x))), TypeTree(defn.StringType)) - // TODO split holes earlier into types and terms. This all holes in each category can have consecutive indices - val (typeSplices, termSplices) = contents.zipWithIndex.partition { - _._1.tpe.derivesFrom(defn.QuotedTypeClass) - } - // This and all closures in typeSplices are removed by the BetaReduce phase val types = - if typeSplices.isEmpty then Literal(Constant(null)) // keep pickled quote without contents as small as possible - else SeqLiteral(typeSplices.map(_._1), TypeTree(defn.QuotedTypeClass.typeRef.appliedTo(WildcardType))) + if quote.tags.isEmpty then Literal(Constant(null)) // keep pickled quote without holeContents as small as possible + else SeqLiteral(quote.tags, TypeTree(defn.QuotedTypeClass.typeRef.appliedTo(TypeBounds.emptyPolyKind))) // This and all closures in termSplices are removed by the BetaReduce phase val termHoles = - if termSplices.isEmpty then Literal(Constant(null)) // keep pickled quote without contents as small as possible + if holeContents.isEmpty then Literal(Constant(null)) // keep pickled quote without holeContents as small as possible else Lambda( MethodType( @@ -314,15 +349,18 @@ object PickleQuotes { List(defn.IntType, defn.SeqType.appliedTo(defn.AnyType), defn.QuotesClass.typeRef), defn.QuotedExprClass.typeRef.appliedTo(defn.AnyType)), args => - val cases = termSplices.map { case (splice, idx) => - val defn.FunctionOf(argTypes, defn.FunctionOf(quotesType :: _, _, _, _), _, _) = splice.tpe: @unchecked + val cases = holeContents.zipWithIndex.map { case (splice, idx) => + val defn.FunctionOf(argTypes, defn.FunctionOf(quotesType :: _, _, _), _) = splice.tpe: @unchecked val rhs = { val spliceArgs = argTypes.zipWithIndex.map { (argType, i) => args(1).select(nme.apply).appliedTo(Literal(Constant(i))).asInstance(argType) } val Block(List(ddef: DefDef), _) = splice: @unchecked // TODO: beta reduce inner closure? Or wait until BetaReduce phase? - BetaReduce(ddef, spliceArgs).select(nme.apply).appliedTo(args(2).asInstance(quotesType)) + BetaReduce( + splice + .select(nme.apply).appliedToArgs(spliceArgs)) + .select(nme.apply).appliedTo(args(2).asInstance(quotesType)) } CaseDef(Literal(Constant(idx)), EmptyTree, rhs) } @@ -331,18 +369,18 @@ object PickleQuotes { case _ => Match(args(0).annotated(New(ref(defn.UncheckedAnnot.typeRef))), cases) ) - val quoteClass = if isType then defn.QuotedTypeClass else defn.QuotedExprClass - val quotedType = quoteClass.typeRef.appliedTo(originalTp) + val quoteClass = if quote.isTypeQuote then defn.QuotedTypeClass else defn.QuotedExprClass + val quotedType = quoteClass.typeRef.appliedTo(bodyType) val lambdaTpe = MethodType(defn.QuotesClass.typeRef :: Nil, quotedType) val unpickleMeth = - if isType then defn.QuoteUnpickler_unpickleTypeV2 + if quote.isTypeQuote then defn.QuoteUnpickler_unpickleTypeV2 else defn.QuoteUnpickler_unpickleExprV2 val unpickleArgs = - if isType then List(pickledQuoteStrings, types) + if quote.isTypeQuote then List(pickledQuoteStrings, types) else List(pickledQuoteStrings, types, termHoles) quotes .asInstance(defn.QuoteUnpicklerClass.typeRef) - .select(unpickleMeth).appliedToType(originalTp) + .select(unpickleMeth).appliedToType(bodyType) .appliedToArgs(unpickleArgs).withSpan(body.span) } @@ -369,8 +407,8 @@ object PickleQuotes { case Inlined(_, Nil, e) => getLiteral(e) case _ => None - if (isType) then - if contents.isEmpty && body.symbol.isPrimitiveValueClass then taggedType() + if body.isType then + if holeContents.isEmpty && body.symbol.isPrimitiveValueClass then taggedType() else pickleAsTasty() else getLiteral(body) match diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 2039a8f19558..ac3dc15092a0 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -157,14 +157,20 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase checkInferredWellFormed(tree.tpt) if sym.is(Method) then if sym.isSetter then - sym.copyAndKeepAnnotationsCarrying(thisPhase, Set(defn.SetterMetaAnnot)) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.SetterMetaAnnot)) + if sym.isOneOf(GivenOrImplicit) then + val cls = sym.info.finalResultType.classSymbol + if cls.isOneOf(GivenOrImplicit) then + sym.updateAnnotationsAfter(thisPhase, + atPhase(thisPhase)(cls.annotationsCarrying(Set(defn.CompanionMethodMetaAnnot))) + ++ sym.annotations) else if sym.is(Param) then - sym.copyAndKeepAnnotationsCarrying(thisPhase, Set(defn.ParamMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.ParamMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) else if sym.is(ParamAccessor) then - sym.copyAndKeepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot)) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot)) else - sym.copyAndKeepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.GetterMetaAnnot, defn.FieldMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) if sym.isScala2Macro && !ctx.settings.XignoreScala2Macros.value then if !sym.owner.unforcedDecls.exists(p => !p.isScala2Macro && p.name == sym.name && p.signature == sym.signature) // Allow scala.reflect.materializeClassTag to be able to compile scala/reflect/package.scala @@ -277,16 +283,14 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase if tree.isType then checkNotPackage(tree) else - if tree.symbol.is(Inline) && !Inlines.inInlineMethod then - ctx.compilationUnit.needsInlining = true checkNoConstructorProxy(tree) + registerNeedsInlining(tree) tree.tpe match { case tpe: ThisType => This(tpe.cls).withSpan(tree.span) case _ => tree } case tree @ Select(qual, name) => - if tree.symbol.is(Inline) then - ctx.compilationUnit.needsInlining = true + registerNeedsInlining(tree) if name.isTypeName then Checking.checkRealizable(qual.tpe, qual.srcPos) withMode(Mode.Type)(super.transform(checkNotPackage(tree))) @@ -296,19 +300,21 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case tree: Apply => val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] val app = - if (methType.isErasedMethod) + if (methType.hasErasedParams) tpd.cpy.Apply(tree)( tree.fun, - tree.args.mapConserve(arg => - if methType.isResultDependent then - Checking.checkRealizable(arg.tpe, arg.srcPos, "erased argument") - if (methType.isImplicitMethod && arg.span.isSynthetic) - arg match - case _: RefTree | _: Apply | _: TypeApply if arg.symbol.is(Erased) => - dropInlines.transform(arg) - case _ => - PruneErasedDefs.trivialErasedTree(arg) - else dropInlines.transform(arg))) + tree.args.zip(methType.erasedParams).map((arg, isErased) => + if !isErased then arg + else + if methType.isResultDependent then + Checking.checkRealizable(arg.tpe, arg.srcPos, "erased argument") + if (methType.isImplicitMethod && arg.span.isSynthetic) + arg match + case _: RefTree | _: Apply | _: TypeApply if arg.symbol.is(Erased) => + dropInlines.transform(arg) + case _ => + PruneErasedDefs.trivialErasedTree(arg) + else dropInlines.transform(arg))) else tree def app1 = @@ -334,10 +340,9 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase val patterns1 = transform(patterns) cpy.UnApply(tree)(transform(fun), transform(implicits), patterns1) case tree: TypeApply => - if tree.symbol.isQuote then + if tree.symbol == defn.QuotedTypeModule_of then ctx.compilationUnit.needsStaging = true - if tree.symbol.is(Inline) then - ctx.compilationUnit.needsInlining = true + registerNeedsInlining(tree) val tree1 @ TypeApply(fn, args) = normalizeTypeArgs(tree) for arg <- args do checkInferredWellFormed(arg) @@ -355,6 +360,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case Inlined(call, bindings, expansion) if !call.isEmpty => val pos = call.sourcePos CrossVersionChecks.checkExperimentalRef(call.symbol, pos) + withMode(Mode.InlinedCall)(transform(call)) val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(call))) case templ: Template => @@ -386,6 +392,8 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase VarianceChecker.check(tree) annotateExperimental(sym) checkMacroAnnotation(sym) + if sym.isOneOf(GivenOrImplicit) then + sym.keepAnnotationsCarrying(thisPhase, Set(defn.CompanionClassMetaAnnot), orNoneOf = defn.MetaAnnots) tree.rhs match case impl: Template => for parent <- impl.parents do @@ -475,6 +483,9 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase ) case Block(_, Closure(_, _, tpt)) if ExpandSAMs.needsWrapperClass(tpt.tpe) => superAcc.withInvalidCurrentClass(super.transform(tree)) + case _: Quote => + ctx.compilationUnit.needsStaging = true + super.transform(tree) case tree => super.transform(tree) } @@ -494,6 +505,10 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase private def normalizeErasedRhs(rhs: Tree, sym: Symbol)(using Context) = if (sym.isEffectivelyErased) dropInlines.transform(rhs) else rhs + private def registerNeedsInlining(tree: Tree)(using Context): Unit = + if tree.symbol.is(Inline) && !Inlines.inInlineMethod && !ctx.mode.is(Mode.InlinedCall) then + ctx.compilationUnit.needsInlining = true + /** Check if the definition has macro annotation and sets `compilationUnit.hasMacroAnnotations` if needed. */ private def registerIfHasMacroAnnotations(tree: DefTree)(using Context) = if !Inlines.inInlineMethod && MacroAnnotations.hasMacroAnnotation(tree.symbol) then diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala index 568512207fde..17f2d11ccfec 100644 --- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala @@ -13,6 +13,7 @@ import ast.tpd import SymUtils._ import config.Feature import Decorators.* +import dotty.tools.dotc.core.Types.MethodType /** This phase makes all erased term members of classes private so that they cannot * conflict with non-erased members. This is needed so that subsequent phases like @@ -38,8 +39,11 @@ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => else sym.copySymDenotation(initFlags = sym.flags | Private) override def transformApply(tree: Apply)(using Context): Tree = - if !tree.fun.tpe.widen.isErasedMethod then tree - else cpy.Apply(tree)(tree.fun, tree.args.map(trivialErasedTree)) + tree.fun.tpe.widen match + case mt: MethodType if mt.hasErasedParams => + cpy.Apply(tree)(tree.fun, tree.args.zip(mt.erasedParams).map((a, e) => if e then trivialErasedTree(a) else a)) + case _ => + tree override def transformValDef(tree: ValDef)(using Context): Tree = checkErasedInExperimental(tree.symbol) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index c524bbb7702f..527c73d02250 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -4,7 +4,7 @@ package transform import core.* import Symbols.*, Contexts.*, Types.*, ContextOps.*, Decorators.*, SymDenotations.* -import Flags.*, SymUtils.*, NameKinds.*, Denotations.Denotation +import Flags.*, SymUtils.*, NameKinds.*, Denotations.{Denotation, SingleDenotation} import ast.* import Names.Name import Phases.Phase @@ -22,7 +22,7 @@ import StdNames.nme import reporting.trace import annotation.constructorOnly import cc.CaptureSet.IdempotentCaptRefMap -import dotty.tools.dotc.core.Denotations.SingleDenotation +import annotation.tailrec object Recheck: import tpd.* @@ -406,7 +406,14 @@ abstract class Recheck extends Phase, SymTransformer: NoType def recheckStats(stats: List[Tree])(using Context): Unit = - stats.foreach(recheck(_)) + @tailrec def traverse(stats: List[Tree])(using Context): Unit = stats match + case (imp: Import) :: rest => + traverse(rest)(using ctx.importContext(imp, imp.symbol)) + case stat :: rest => + recheck(stat) + traverse(rest) + case _ => + traverse(stats) def recheckDef(tree: ValOrDefDef, sym: Symbol)(using Context): Unit = inContext(ctx.localContext(tree, sym)) { diff --git a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala index e462f82b1dad..6e73d683fa2c 100644 --- a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala @@ -17,7 +17,6 @@ import dotty.tools.dotc.core.Annotations._ import dotty.tools.dotc.core.Names._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import scala.annotation.constructorOnly @@ -76,8 +75,8 @@ trait ReifiedReflect: .select(defn.Quotes_reflect_TypeRepr_of) .appliedToType(tpe) .appliedTo( - ref(defn.QuotedTypeModule_of) - .appliedToType(tpe) + tpd.Quote(TypeTree(tpe), Nil) + .select(nme.apply) .appliedTo(quotesTree) ) diff --git a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala index dd109ce153eb..99b6be1eea8a 100644 --- a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala +++ b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala @@ -119,6 +119,9 @@ object ResolveSuper { report.error(IllegalSuperAccessor(base, memberName, targetName, acc, accTp, other.symbol, otherTp), base.srcPos) bcs = bcs.tail } + if sym.is(Accessor) then + report.error( + em"parent ${acc.owner} has a super call which binds to the value ${sym.showFullName}. Super calls can only target methods.", base) sym.orElse { val originalName = acc.name.asTermName.originalOfSuperAccessorName report.error(em"Member method ${originalName.debugString} of mixin ${acc.owner} is missing a concrete super implementation in $base.", base.srcPos) diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala index c1f891d6293a..2248fbc8d570 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala @@ -70,7 +70,7 @@ class SpecializeFunctions extends MiniPhase { /** Dispatch to specialized `apply`s in user code when available */ override def transformApply(tree: Apply)(using Context) = tree match { - case Apply(fun: NameTree, args) if fun.name == nme.apply && args.size <= 3 && fun.symbol.owner.isType => + case Apply(fun: NameTree, args) if fun.name == nme.apply && args.size <= 3 && fun.symbol.maybeOwner.isType => val argTypes = fun.tpe.widen.firstParamTypes.map(_.widenSingleton.dealias) val retType = tree.tpe.widenSingleton.dealias val isSpecializable = diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index b936afb73dc8..741c770e2c77 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -44,7 +44,7 @@ object Splicer { * See: `Staging` */ def splice(tree: Tree, splicePos: SrcPos, spliceExpansionPos: SrcPos, classLoader: ClassLoader)(using Context): Tree = tree match { - case Quoted(quotedTree) => quotedTree + case Quote(quotedTree, Nil) => quotedTree case _ => val macroOwner = newSymbol(ctx.owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) try @@ -86,7 +86,7 @@ object Splicer { } } - /** Checks that no symbol that whas generated within the macro expansion has an out of scope reference */ + /** Checks that no symbol that was generated within the macro expansion has an out of scope reference */ def checkEscapedVariables(tree: Tree, expansionOwner: Symbol)(using Context): tree.type = new TreeTraverser { private[this] var locals = Set.empty[Symbol] @@ -119,7 +119,10 @@ object Splicer { sym.exists && !sym.is(Package) && sym.owner.ownersIterator.exists(x => x == expansionOwner || // symbol was generated within this macro expansion - x.is(Macro, butNot = Method) && x.name == nme.MACROkw // symbol was generated within another macro expansion + { // symbol was generated within another macro expansion + isMacroOwner(x) && + !ctx.owner.ownersIterator.contains(x) + } ) && !locals.contains(sym) // symbol is not in current scope }.traverse(tree) @@ -133,7 +136,7 @@ object Splicer { * See: `Staging` */ def checkValidMacroBody(tree: Tree)(using Context): Unit = tree match { - case Quoted(_) => // ok + case Quote(_, Nil) => // ok case _ => type Env = Set[Symbol] @@ -152,15 +155,15 @@ object Splicer { case Block(Nil, expr) => checkIfValidArgument(expr) case Typed(expr, _) => checkIfValidArgument(expr) - case Apply(Select(Apply(fn, quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote => + case Apply(Select(Quote(body, _), nme.apply), _) => val noSpliceChecker = new TreeTraverser { def traverse(tree: Tree)(using Context): Unit = tree match - case Spliced(_) => + case Splice(_) => report.error("Quoted argument of macros may not have splices", tree.srcPos) case _ => traverseChildren(tree) } - noSpliceChecker.traverse(quoted) + noSpliceChecker.traverse(body) case Apply(TypeApply(fn, List(quoted)), _)if fn.symbol == defn.QuotedTypeModule_of => // OK @@ -200,7 +203,7 @@ object Splicer { case Typed(expr, _) => checkIfValidStaticCall(expr) - case Apply(Select(Apply(fn, quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote => + case Apply(Select(Quote(quoted, Nil), nme.apply), _) => // OK, canceled and warning emitted case Call(fn, args) @@ -222,6 +225,14 @@ object Splicer { checkIfValidStaticCall(tree)(using Set.empty) } + /** Is this the dummy owner of a macro expansion */ + def isMacroOwner(sym: Symbol)(using Context): Boolean = + sym.is(Macro, butNot = Method) && sym.name == nme.MACROkw + + /** Is this the dummy owner of a macro expansion */ + def inMacroExpansion(using Context) = + ctx.owner.ownersIterator.exists(isMacroOwner) + /** Tree interpreter that evaluates the tree. * Interpreter is assumed to start at quotation level -1. */ @@ -229,15 +240,15 @@ object Splicer { override protected def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { // Interpret level -1 quoted code `'{...}` (assumed without level 0 splices) - case Apply(Select(Apply(TypeApply(fn, _), quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote => - val quoted1 = quoted match { - case quoted: Ident if quoted.symbol.isAllOf(InlineByNameProxy) => + case Apply(Select(Quote(body, _), nme.apply), _) => + val body1 = body match { + case expr: Ident if expr.symbol.isAllOf(InlineByNameProxy) => // inline proxy for by-name parameter - quoted.symbol.defTree.asInstanceOf[DefDef].rhs - case Inlined(EmptyTree, _, quoted) => quoted - case _ => quoted + expr.symbol.defTree.asInstanceOf[DefDef].rhs + case Inlined(EmptyTree, _, body1) => body1 + case _ => body } - new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(quoted1, ctx.owner)).withSpan(quoted1.span), SpliceScope.getCurrent) + new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(body1, ctx.owner)).withSpan(body1.span), SpliceScope.getCurrent) // Interpret level -1 `Type.of[T]` case Apply(TypeApply(fn, quoted :: Nil), _) if fn.symbol == defn.QuotedTypeModule_of => diff --git a/compiler/src/dotty/tools/dotc/transform/Splicing.scala b/compiler/src/dotty/tools/dotc/transform/Splicing.scala index ad3f0322130d..ff5dc5042eaf 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicing.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicing.scala @@ -14,15 +14,16 @@ import util.Spans._ import SymUtils._ import NameKinds._ import dotty.tools.dotc.ast.tpd -import StagingContext._ import scala.collection.mutable import dotty.tools.dotc.core.Annotations._ import dotty.tools.dotc.core.Names._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.quoted._ -import dotty.tools.dotc.transform.TreeMapWithStages._ import dotty.tools.dotc.config.ScalaRelease.* +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.QuoteTypeTags +import dotty.tools.dotc.staging.QuoteTypeTags.* import scala.annotation.constructorOnly @@ -77,7 +78,7 @@ class Splicing extends MacroTransform: override def run(using Context): Unit = if ctx.compilationUnit.needsStaging then - super.run(using freshStagingContext) + super.run protected def newTransformer(using Context): Transformer = Level0QuoteTransformer @@ -86,11 +87,9 @@ class Splicing extends MacroTransform: override def transform(tree: tpd.Tree)(using Context): tpd.Tree = assert(level == 0) tree match - case Apply(Select(Apply(TypeApply(fn,_), List(code)),nme.apply),List(quotes)) - if fn.symbol == defn.QuotedRuntime_exprQuote => - QuoteTransformer().transform(tree) - case TypeApply(_, _) if tree.symbol == defn.QuotedTypeModule_of => - QuoteTransformer().transform(tree) + case tree: Quote => + val body1 = QuoteTransformer().transform(tree.body)(using quoteContext) + cpy.Quote(tree)(body1, tree.tags) case tree: DefDef if tree.symbol.is(Inline) => // Quotes in inlined methods are only pickled after they are inlined. tree @@ -98,7 +97,6 @@ class Splicing extends MacroTransform: super.transform(tree) end Level0QuoteTransformer - /** Transforms all direct splices in the current quote and replace them with holes. */ private class QuoteTransformer() extends Transformer: /** Set of definitions in the current quote */ @@ -107,37 +105,19 @@ class Splicing extends MacroTransform: /** Number of holes created in this quote. Used for indexing holes. */ private var numHoles = 0 - /** Mapping from the term symbol of a `Type[T]` to it's hole. Used to deduplicate type holes. */ - private val typeHoles = mutable.Map.empty[Symbol, Hole] + /** Mapping from the term of a `Type[T]` to it's hole. Used to deduplicate type holes. */ + private val typeHoles = mutable.Map.empty[TermRef, Hole] override def transform(tree: tpd.Tree)(using Context): tpd.Tree = + assert(level > 0) tree match - case Apply(fn, List(splicedCode)) if fn.symbol == defn.QuotedRuntime_exprNestedSplice => - if level > 1 then - val splicedCode1 = super.transform(splicedCode)(using spliceContext) - cpy.Apply(tree)(fn, List(splicedCode1)) - else - val holeIdx = numHoles - numHoles += 1 - val splicer = SpliceTransformer(ctx.owner, quotedDefs.contains) - val newSplicedCode1 = splicer.transformSplice(splicedCode, tree.tpe, holeIdx)(using spliceContext) - val newSplicedCode2 = Level0QuoteTransformer.transform(newSplicedCode1)(using spliceContext) - newSplicedCode2 - case tree: TypeDef if tree.symbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) => - val tp @ TypeRef(qual: TermRef, _) = tree.rhs.tpe.hiBound: @unchecked - quotedDefs += tree.symbol - val hole = typeHoles.get(qual.symbol) match - case Some (hole) => cpy.Hole(hole)(content = EmptyTree) - case None => - val holeIdx = numHoles - numHoles += 1 - val hole = tpd.Hole(false, holeIdx, Nil, ref(qual), TypeTree(tp)) - typeHoles.put(qual.symbol, hole) - hole - cpy.TypeDef(tree)(rhs = hole) - case Apply(Select(Apply(TypeApply(fn,_), List(code)),nme.apply),List(quotes)) - if fn.symbol == defn.QuotedRuntime_exprQuote => - super.transform(tree)(using quoteContext) + case tree: Splice if level == 1 => + val holeIdx = numHoles + numHoles += 1 + val splicer = SpliceTransformer(ctx.owner, quotedDefs.contains) + val newSplicedCode1 = splicer.transformSplice(tree.expr, tree.tpe, holeIdx)(using spliceContext) + val newSplicedCode2 = Level0QuoteTransformer.transform(newSplicedCode1)(using spliceContext) + newSplicedCode2 case _: Template => for sym <- tree.symbol.owner.info.decls do quotedDefs += sym @@ -183,14 +163,13 @@ class Splicing extends MacroTransform: * ``` * is transformed into * ```scala - * {{{ | T2 | x, X | (x$1: Expr[T1], X$1: Type[X]) => (using Quotes) ?=> {... ${x$1} ... X$1.Underlying ...} }}} + * {{{ | T2 | x, X | (x$1: Expr[T1], X$1: Type[X]) => (using Quotes) ?=> '{... ${x$1} ... X$1.Underlying ...} }}} * ``` */ private class SpliceTransformer(spliceOwner: Symbol, isCaptured: Symbol => Boolean) extends Transformer: - private var refBindingMap = mutable.Map.empty[Symbol, (Tree, Symbol)] + private var refBindingMap = mutable.LinkedHashMap.empty[Symbol, (Tree, Symbol)] /** Reference to the `Quotes` instance of the current level 1 splice */ private var quotes: Tree | Null = null // TODO: add to the context - private var healedTypes: PCPCheckAndHeal.QuoteTypeTags | Null = null // TODO: add to the context def transformSplice(tree: tpd.Tree, tpe: Type, holeIdx: Int)(using Context): tpd.Tree = assert(level == 0) @@ -202,10 +181,18 @@ class Splicing extends MacroTransform: val ddef = DefDef(meth, List(bindings), newTree.tpe, newTree.changeOwner(ctx.owner, meth)) val fnType = defn.FunctionType(bindings.size, isContextual = false).appliedTo(bindingsTypes :+ newTree.tpe) val closure = Block(ddef :: Nil, Closure(Nil, ref(meth), TypeTree(fnType))) - tpd.Hole(true, holeIdx, refs, closure, TypeTree(tpe)) + tpd.Hole(true, holeIdx, refs, closure, tpe) override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match + case tree: Select if tree.isTerm && isCaptured(tree.symbol) => + tree.symbol.allOverriddenSymbols.find(sym => !isCaptured(sym.owner)) match + case Some(sym) => + // virtualize call on overridden symbol that is not defined in a non static class + transform(tree.qualifier.select(sym)) + case _ => + report.error(em"Can not use reference to staged local ${tree.symbol} defined in an outer quote.\n\nThis can work if ${tree.symbol.owner} would extend a top level interface that defines ${tree.symbol}.", tree) + tree case tree: RefTree => if tree.isTerm then if isCaptured(tree.symbol) then @@ -228,42 +215,25 @@ class Splicing extends MacroTransform: case tree @ Assign(lhs: RefTree, rhs) => if isCaptured(lhs.symbol) then transformSplicedAssign(tree) else super.transform(tree) - case Apply(fn, args) if fn.symbol == defn.QuotedRuntime_exprNestedSplice => - val newArgs = args.mapConserve(arg => transform(arg)(using spliceContext)) - cpy.Apply(tree)(fn, newArgs) - case Apply(sel @ Select(app @ Apply(fn, args),nme.apply), quotesArgs) - if fn.symbol == defn.QuotedRuntime_exprQuote => - args match - case List(tree: RefTree) if isCaptured(tree.symbol) => - capturedTerm(tree) - case _ => - val newArgs = withCurrentQuote(quotesArgs.head) { - if level > 1 then args.mapConserve(arg => transform(arg)(using quoteContext)) - else args.mapConserve(arg => transformLevel0QuoteContent(arg)(using quoteContext)) - } - cpy.Apply(tree)(cpy.Select(sel)(cpy.Apply(app)(fn, newArgs), nme.apply), quotesArgs) - case Apply(TypeApply(_, List(tpt)), List(quotes)) - if tree.symbol == defn.QuotedTypeModule_of && containsCapturedType(tpt.tpe) => - ref(capturedType(tpt))(using ctx.withSource(tree.source)).withSpan(tree.span) case CapturedApplication(fn, argss) => transformCapturedApplication(tree, fn, argss) + case Apply(Select(Quote(body, _), nme.apply), quotes :: Nil) if level == 0 && body.isTerm => + body match + case _: RefTree if isCaptured(body.symbol) => capturedTerm(body) + case _ => withCurrentQuote(quotes) { super.transform(tree) } + case tree: Quote if level == 0 => + if tree.body.isTerm then transformLevel0Quote(tree) + else if containsCapturedType(tree.body.tpe) then capturedPartTypes(tree) + else tree case _ => super.transform(tree) - private def transformLevel0QuoteContent(tree: Tree)(using Context): Tree = + private def transformLevel0Quote(quote: Quote)(using Context): Tree = // transform and collect new healed types - val old = healedTypes - healedTypes = new PCPCheckAndHeal.QuoteTypeTags(tree.span) - val tree1 = transform(tree) - val newHealedTypes = healedTypes.nn.getTypeTags - healedTypes = old - // add new healed types to the current, merge with existing healed types if necessary - if newHealedTypes.isEmpty then tree1 - else tree1 match - case Block(stats @ (x :: _), expr) if x.symbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) => - Block(newHealedTypes ::: stats, expr) - case _ => - Block(newHealedTypes, tree1) + val (tags, body1) = inContextWithQuoteTypeTags { + transform(quote.body)(using quoteContext) + } + cpy.Quote(quote)(body1, quote.tags ::: tags) class ArgsClause(val args: List[Tree]): def isTerm: Boolean = args.isEmpty || args.head.isTerm @@ -335,20 +305,40 @@ class Splicing extends MacroTransform: val bindingSym = refBindingMap.getOrElseUpdate(tree.symbol, (tree, newBinding))._2 ref(bindingSym) - private def capturedType(tree: Tree)(using Context): Symbol = - val tpe = tree.tpe.widenTermRefExpr - def newBinding = newSymbol( + private def newQuotedTypeClassBinding(tpe: Type)(using Context) = + newSymbol( spliceOwner, UniqueName.fresh(nme.Type).toTermName, Param, defn.QuotedTypeClass.typeRef.appliedTo(tpe), ) - val bindingSym = refBindingMap.getOrElseUpdate(tree.symbol, (TypeTree(tree.tpe), newBinding))._2 + + private def capturedType(tree: Tree)(using Context): Symbol = + val tpe = tree.tpe.widenTermRefExpr + val bindingSym = refBindingMap + .getOrElseUpdate(tree.symbol, (TypeTree(tree.tpe), newQuotedTypeClassBinding(tpe)))._2 bindingSym + private def capturedPartTypes(quote: Quote)(using Context): Tree = + val (tags, body1) = inContextWithQuoteTypeTags { + val capturePartTypes = new TypeMap { + def apply(tp: Type) = tp match { + case typeRef: TypeRef if containsCapturedType(typeRef) => + val termRef = refBindingMap + .getOrElseUpdate(typeRef.symbol, (TypeTree(typeRef), newQuotedTypeClassBinding(typeRef)))._2.termRef + val tagRef = getTagRef(termRef) + tagRef + case _ => + mapOver(tp) + } + } + TypeTree(capturePartTypes(quote.body.tpe.widenTermRefExpr)) + } + cpy.Quote(quote)(body1, quote.tags ::: tags) + private def getTagRefFor(tree: Tree)(using Context): Tree = val capturedTypeSym = capturedType(tree) - TypeTree(healedTypes.nn.getTagRef(capturedTypeSym.termRef)) + TypeTree(getTagRef(capturedTypeSym.termRef)) private def withCurrentQuote[T](newQuotes: Tree)(body: => T)(using Context): T = if level == 0 then @@ -368,18 +358,10 @@ class Splicing extends MacroTransform: body(using ctx.withOwner(meth)).changeOwner(ctx.owner, meth) } }) - ref(defn.QuotedRuntime_exprNestedSplice) - .appliedToType(tpe) - .appliedTo(Literal(Constant(null))) // Dropped when creating the Hole that contains it - .appliedTo(closure) + Splice(closure, tpe) private def quoted(expr: Tree)(using Context): Tree = - val tpe = expr.tpe.widenTermRefExpr - ref(defn.QuotedRuntime_exprQuote) - .appliedToType(tpe) - .appliedTo(expr) - .select(nme.apply) - .appliedTo(quotes.nn) + tpd.Quote(expr, Nil).select(nme.apply).appliedTo(quotes.nn) /** Helper methods to construct trees calling methods in `Quotes.reflect` based on the current `quotes` tree */ private object reflect extends ReifiedReflect { diff --git a/compiler/src/dotty/tools/dotc/transform/Staging.scala b/compiler/src/dotty/tools/dotc/transform/Staging.scala index 1de050a9a6c1..43cbe80ce8c4 100644 --- a/compiler/src/dotty/tools/dotc/transform/Staging.scala +++ b/compiler/src/dotty/tools/dotc/transform/Staging.scala @@ -6,18 +6,18 @@ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Phases._ import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StagingContext._ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.util.SrcPos import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.transform.TreeMapWithStages._ +import dotty.tools.dotc.staging.StagingLevel.* +import dotty.tools.dotc.staging.CrossStageSafety +import dotty.tools.dotc.staging.HealType - - -/** Checks that the Phase Consistency Principle (PCP) holds and heals types. +/** Checks that staging level consistency holds and heals types used in higher levels. * - * Type healing consists in transforming a phase inconsistent type `T` into `${ implicitly[Type[T]] }`. + * See `CrossStageSafety` */ class Staging extends MacroTransform { import tpd._ @@ -31,29 +31,40 @@ class Staging extends MacroTransform { override def allowsImplicitSearch: Boolean = true override def checkPostCondition(tree: Tree)(using Context): Unit = - if (ctx.phase <= splicingPhase) { - // Recheck that PCP holds but do not heal any inconsistent types as they should already have been heald + if (ctx.phase <= stagingPhase) { + // Recheck that staging level consistency holds but do not heal any inconsistent types as they should already have been heald tree match { case PackageDef(pid, _) if tree.symbol.owner == defn.RootClass => - val checker = new PCPCheckAndHeal(freshStagingContext) { - override protected def tryHeal(sym: Symbol, tp: TypeRef, pos: SrcPos)(using Context): TypeRef = { - def symStr = - if (sym.is(ModuleClass)) sym.sourceModule.show - else i"${sym.name}.this" - val errMsg = s"\nin ${ctx.owner.fullName}" - assert( - ctx.owner.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) || - (sym.isType && levelOf(sym) > 0), - em"""access to $symStr from wrong staging level: - | - the definition is at level ${levelOf(sym)}, - | - but the access is at level $level.$errMsg""") + val checker = new CrossStageSafety { + override protected def healType(pos: SrcPos)(tpe: Type)(using Context) = new HealType(pos) { + override protected def tryHeal(tp: TypeRef): TypeRef = { + val sym = tp.symbol + def symStr = + if (sym.is(ModuleClass)) sym.sourceModule.show + else i"${sym.name}.this" + val errMsg = s"\nin ${ctx.owner.fullName}" + assert( + ctx.owner.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) || + (sym.isType && levelOf(sym) > 0), + em"""access to $symStr from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level.$errMsg""") - tp - } + tp + } + }.apply(tpe) } checker.transform(tree) case _ => } + } + if !Inlines.inInlineMethod then + tree match { + case tree: RefTree => + assert(level != 0 || tree.symbol != defn.QuotedTypeModule_of, + "scala.quoted.Type.of at level 0 should have been replaced with Quote AST in staging phase") + case _ => + } tree.tpe match { case tpe @ TypeRef(prefix, _) if tpe.typeSymbol.isTypeSplice => @@ -63,14 +74,14 @@ class Staging extends MacroTransform { case _ => // OK } - } + end checkPostCondition override def run(using Context): Unit = - if (ctx.compilationUnit.needsStaging) super.run(using freshStagingContext) + if (ctx.compilationUnit.needsStaging) super.run protected def newTransformer(using Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = - new PCPCheckAndHeal(ctx).transform(tree) + (new CrossStageSafety).transform(tree) } } diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index 2307f759b571..b78c75d58340 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -174,27 +174,30 @@ class SuperAccessors(thisPhase: DenotTransformer) { val sel @ Select(qual, name) = tree: @unchecked val sym = sel.symbol - /** If an accesses to protected member of a class comes from a trait, - * or would need a protected accessor placed in a trait, we cannot - * perform the access to the protected member directly since jvm access - * restrictions require the call site to be in an actual subclass and - * traits don't count as subclasses in this respect. In this case - * we generate a super accessor instead. See SI-2296. - */ def needsSuperAccessor = ProtectedAccessors.needsAccessorIfNotInSubclass(sym) && AccessProxies.hostForAccessorOf(sym).is(Trait) qual match { case _: This if needsSuperAccessor => - /* - * A trait which extends a class and accesses a protected member - * of that class cannot implement the necessary accessor method - * because jvm access restrictions require the call site to be in - * an actual subclass and traits don't count as subclasses in this - * respect. We generate a super accessor itself, which will be fixed - * by the implementing class. See SI-2296. - */ - superAccessorCall(sel) + /* Given a protected member m defined in class C, + * and a trait T that calls m. + * + * If T extends C, then we can access it by casting + * the qualifier of the select to C. + * + * That's because the protected method is actually public, + * so we can call it. For truly protected methods, like from + * Java, we error instead of emitting the wrong code (i17021.ext-java). + * + * Otherwise, we need to go through an accessor, + * which the implementing class will provide an implementation for. + */ + if ctx.owner.enclosingClass.derivesFrom(sym.owner) then + if sym.is(JavaDefined) then + report.error(em"${ctx.owner} accesses protected $sym inside a concrete trait method: use super.${sel.name} instead", sel.srcPos) + sel + else + superAccessorCall(sel) case Super(_, mix) => transformSuperSelect(sel) case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala index b945f5820523..c02a7d90cb8c 100644 --- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala @@ -18,6 +18,8 @@ import Annotations.Annotation import Phases._ import ast.tpd.Literal +import dotty.tools.dotc.transform.sjs.JSSymUtils.sjsNeedsField + import scala.annotation.tailrec object SymUtils: @@ -259,9 +261,29 @@ object SymUtils: self.owner.info.decl(fieldName).suchThat(!_.is(Method)).symbol } + /** Is this symbol a constant expression final val? + * + * This is the case if all of the following are true: + * + * - it is a `final val`, + * - its result type is a `ConstantType`, and + * - it does not need an explicit field because of Scala.js semantics (see `JSSymUtils.sjsNeedsField`). + * + * Constant expression final vals do not need an explicit field to store + * their value. See the Memoize-Mixin-Constructors phase trio. + */ def isConstExprFinalVal(using Context): Boolean = atPhaseNoLater(erasurePhase) { - self.is(Final) && self.info.resultType.isInstanceOf[ConstantType] + self.is(Final, butNot = Mutable) && self.info.resultType.isInstanceOf[ConstantType] + } && !self.sjsNeedsField + + /** The `ConstantType` of a val known to be `isConstrExprFinalVal`. + * + * @pre `self.isConstantExprFinalVal` is true. + */ + def constExprFinalValConstantType(using Context): ConstantType = + atPhaseNoLater(erasurePhase) { + self.info.resultType.asInstanceOf[ConstantType] } def isField(using Context): Boolean = @@ -315,14 +337,6 @@ object SymUtils: def reachableRawTypeRef(using Context) = self.reachableTypeRef.appliedTo(self.typeParams.map(_ => TypeBounds.emptyPolyKind)) - /** Is symbol a quote operation? */ - def isQuote(using Context): Boolean = - self == defn.QuotedRuntime_exprQuote || self == defn.QuotedTypeModule_of - - /** Is symbol a term splice operation? */ - def isExprSplice(using Context): Boolean = - self == defn.QuotedRuntime_exprSplice || self == defn.QuotedRuntime_exprNestedSplice - /** Is symbol a type splice operation? */ def isTypeSplice(using Context): Boolean = self == defn.QuotedType_splice diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 4573c40df78b..34b3183a6b15 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -20,6 +20,8 @@ import ast.{tpd, untpd} import util.Chars._ import collection.mutable import ProtoTypes._ +import staging.StagingLevel +import inlines.Inlines.inInlineMethod import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions @@ -141,7 +143,7 @@ class TreeChecker extends Phase with SymTransformer { override def apply(parent: Tree, tree: Tree)(using Context): Tree = { tree match { case tree: New if !parent.isInstanceOf[tpd.Select] => - assert(assertion = false, i"`New` node must be wrapped in a `Select`:\n parent = ${parent.show}\n child = ${tree.show}") + assert(assertion = false, i"`New` node must be wrapped in a `Select` of the constructor:\n parent = ${parent.show}\n child = ${tree.show}") case _: Annotated => // Don't check inside annotations, since they're allowed to contain // somewhat invalid trees. @@ -445,10 +447,12 @@ object TreeChecker { // Polymorphic apply methods stay structural until Erasure val isPolyFunctionApply = (tree.name eq nme.apply) && tree.qualifier.typeOpt.derivesFrom(defn.PolyFunctionClass) + // Erased functions stay structural until Erasure + val isErasedFunctionApply = (tree.name eq nme.apply) && tree.qualifier.typeOpt.derivesFrom(defn.ErasedFunctionClass) // Outer selects are pickled specially so don't require a symbol val isOuterSelect = tree.name.is(OuterSelectName) val isPrimitiveArrayOp = ctx.erasedTypes && nme.isPrimitiveName(tree.name) - if !(tree.isType || isPolyFunctionApply || isOuterSelect || isPrimitiveArrayOp) then + if !(tree.isType || isPolyFunctionApply || isErasedFunctionApply || isOuterSelect || isPrimitiveArrayOp) then val denot = tree.denot assert(denot.exists, i"Selection $tree with type $tpe does not have a denotation") assert(denot.symbol.exists, i"Denotation $denot of selection $tree with type $tpe does not have a symbol, qualifier type = ${tree.qualifier.typeOpt}") @@ -511,7 +515,7 @@ object TreeChecker { val inliningPhase = ctx.base.inliningPhase inliningPhase.exists && ctx.phase.id > inliningPhase.id if isAfterInlining then - // The staging phase destroys in PCPCheckAndHeal the property that + // The staging phase destroys in CrossStageSafety the property that // tree.expr.tpe <:< pt1. A test case where this arises is run-macros/enum-nat-macro. // We should follow up why this happens. If the problem is fixed, we can // drop the isAfterInlining special case. To reproduce the problem, just @@ -531,11 +535,16 @@ object TreeChecker { i"owner chain = ${tree.symbol.ownersIterator.toList}%, %, ctxOwners = ${ctx.outersIterator.map(_.owner).toList}%, %") } + override def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { + assert(sym.info.isInstanceOf[ClassInfo | TypeBounds], i"wrong type, expect a template or type bounds for ${sym.fullName}, but found: ${sym.info}") + super.typedTypeDef(tdef, sym) + } + override def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = { val TypeDef(_, impl @ Template(constr, _, _, _)) = cdef: @unchecked assert(cdef.symbol == cls) assert(impl.symbol.owner == cls) - assert(constr.symbol.owner == cls) + assert(constr.symbol.owner == cls, i"constr ${constr.symbol} in $cdef has wrong owner; should be $cls but is ${constr.symbol.owner}") assert(cls.primaryConstructor == constr.symbol, i"mismatch, primary constructor ${cls.primaryConstructor}, in tree = ${constr.symbol}") checkOwner(impl) checkOwner(impl.constr) @@ -650,12 +659,48 @@ object TreeChecker { else super.typedPackageDef(tree) + override def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = + if ctx.phase <= stagingPhase.prev then + assert(tree.tags.isEmpty, i"unexpected tags in Quote before staging phase: ${tree.tags}") + else + assert(!tree.body.isInstanceOf[untpd.Splice] || inInlineMethod, i"missed quote cancellation in $tree") + assert(!tree.body.isInstanceOf[untpd.Hole] || inInlineMethod, i"missed quote cancellation in $tree") + if StagingLevel.level != 0 then + assert(tree.tags.isEmpty, i"unexpected tags in Quote at staging level ${StagingLevel.level}: ${tree.tags}") + + for tag <- tree.tags do + assert(tag.isInstanceOf[RefTree], i"expected RefTree in Quote but was: $tag") + + val tree1 = super.typedQuote(tree, pt) + for tag <- tree.tags do + assert(tag.typeOpt.derivesFrom(defn.QuotedTypeClass), i"expected Quote tag to be of type `Type` but was: ${tag.tpe}") + + tree1 match + case Quote(body, targ :: Nil) if body.isType => + assert(!(body.tpe =:= targ.tpe.select(tpnme.Underlying)), i"missed quote cancellation in $tree1") + case _ => + + tree1 + + override def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = + if stagingPhase <= ctx.phase then + assert(!tree.expr.isInstanceOf[untpd.Quote] || inInlineMethod, i"missed quote cancellation in $tree") + super.typedSplice(tree, pt) + override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = { - val tree1 @ Hole(isTermHole, _, args, content, tpt) = super.typedHole(tree, pt): @unchecked + val tree1 @ Hole(isTerm, idx, args, content) = super.typedHole(tree, pt): @unchecked + + assert(idx >= 0, i"hole should not have negative index: $tree") + assert(isTerm || tree.args.isEmpty, i"type hole should not have arguments: $tree") + + // Check that we only add the captured type `T` instead of a more complex type like `List[T]`. + // If we have `F[T]` with captured `F` and `T`, we should list `F` and `T` separately in the args. + for arg <- args do + assert(arg.isTerm || arg.tpe.isInstanceOf[TypeRef], "Expected TypeRef in Hole type args but got: " + arg.tpe) // Check result type of the hole - if isTermHole then assert(tpt.typeOpt <:< pt) - else assert(tpt.typeOpt =:= pt) + if isTerm then assert(tree1.typeOpt <:< pt) + else assert(tree1.typeOpt =:= pt) // Check that the types of the args conform to the types of the contents of the hole val argQuotedTypes = args.map { arg => @@ -667,16 +712,16 @@ object TreeChecker { defn.AnyType case tpe => tpe defn.QuotedExprClass.typeRef.appliedTo(tpe) - else defn.QuotedTypeClass.typeRef.appliedTo(arg.typeOpt) + else defn.QuotedTypeClass.typeRef.appliedTo(arg.typeOpt.widenTermRefExpr) } val expectedResultType = - if isTermHole then defn.QuotedExprClass.typeRef.appliedTo(tpt.typeOpt) - else defn.QuotedTypeClass.typeRef.appliedTo(tpt.typeOpt) + if isTerm then defn.QuotedExprClass.typeRef.appliedTo(tree1.typeOpt) + else defn.QuotedTypeClass.typeRef.appliedTo(tree1.typeOpt) val contextualResult = defn.FunctionOf(List(defn.QuotesClass.typeRef), expectedResultType, isContextual = true) val expectedContentType = defn.FunctionOf(argQuotedTypes, contextualResult) - assert(content.typeOpt =:= expectedContentType) + assert(content.typeOpt =:= expectedContentType, i"unexpected content of hole\nexpected: ${expectedContentType}\nwas: ${content.typeOpt}") tree1 } @@ -729,6 +774,11 @@ object TreeChecker { try treeChecker.typed(expansion)(using checkingCtx) catch case err: java.lang.AssertionError => + val stack = + if !ctx.settings.Ydebug.value then "\nstacktrace available when compiling with `-Ydebug`" + else if err.getStackTrace == null then " no stacktrace" + else err.getStackTrace.nn.mkString(" ", " \n", "") + report.error( s"""Malformed tree was found while expanding macro with -Xcheck-macros. |The tree does not conform to the compiler's tree invariants. @@ -741,7 +791,7 @@ object TreeChecker { | |Error: |${err.getMessage} - | + |$stack |""", original ) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala deleted file mode 100644 index b514b8a7bf11..000000000000 --- a/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala +++ /dev/null @@ -1,164 +0,0 @@ -package dotty.tools.dotc -package transform - -import dotty.tools.dotc.ast.{TreeMapWithImplicits, tpd} -import dotty.tools.dotc.config.Printers.staging -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.StagingContext._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.util.Property - -import scala.collection.mutable -import scala.annotation.constructorOnly - -/** The main transformer class - * @param level the current level, where quotes add one and splices subtract one level. - * The initial level is 0, a level `l` where `l > 0` implies code has been quoted `l` times - * and `l == -1` is code inside a top level splice (in an inline method). - * @param levels a stacked map from symbols to the levels in which they were defined - */ -abstract class TreeMapWithStages(@constructorOnly ictx: Context) extends TreeMapWithImplicits { - - import tpd._ - import TreeMapWithStages._ - - /** A map from locally defined symbols to their definition quotation level */ - private[this] val levelOfMap: mutable.HashMap[Symbol, Int] = ictx.property(LevelOfKey).get - - /** A stack of entered symbols, to be unwound after scope exit */ - private[this] var enteredSyms: List[Symbol] = Nil - - /** If we are inside a quote or a splice */ - private[this] var inQuoteOrSplice = false - - /** The quotation level of the definition of the locally defined symbol */ - protected def levelOf(sym: Symbol): Int = levelOfMap.getOrElse(sym, 0) - - /** Locally defined symbols seen so far by `StagingTransformer.transform` */ - protected def localSymbols: List[Symbol] = enteredSyms - - /** If we are inside a quote or a splice */ - protected def isInQuoteOrSplice: Boolean = inQuoteOrSplice - - /** Enter staging level of symbol defined by `tree` */ - private def markSymbol(sym: Symbol)(using Context): Unit = - if level != 0 && !levelOfMap.contains(sym) then - levelOfMap(sym) = level - enteredSyms = sym :: enteredSyms - - /** Enter staging level of symbol defined by `tree`, if applicable. */ - private def markDef(tree: Tree)(using Context): Unit = tree match { - case tree: DefTree => markSymbol(tree.symbol) - case _ => - } - - /** Transform the quote `quote` which contains the quoted `body`. - * - * - `quoted.runtime.Expr.quote[T]()` --> `quoted.runtime.Expr.quote[T]()` - * - `quoted.Type.of[](quotes)` --> `quoted.Type.of[](quotes)` - */ - protected def transformQuotation(body: Tree, quote: Apply)(using Context): Tree = - if body.isTerm then - cpy.Apply(quote)(quote.fun, body :: Nil) - else - val TypeApply(fun, _) = quote.fun: @unchecked - cpy.Apply(quote)(cpy.TypeApply(quote.fun)(fun, body :: Nil), quote.args) - - /** Transform the expression splice `splice` which contains the spliced `body`. */ - protected def transformSplice(body: Tree, splice: Apply)(using Context): Tree - - /** Transform the type splice `splice` which contains the spliced `body`. */ - protected def transformSpliceType(body: Tree, splice: Select)(using Context): Tree - - override def transform(tree: Tree)(using Context): Tree = - if (tree.source != ctx.source && tree.source.exists) - transform(tree)(using ctx.withSource(tree.source)) - else reporting.trace(i"StagingTransformer.transform $tree at $level", staging, show = true) { - def mapOverTree(lastEntered: List[Symbol]) = - try super.transform(tree) - finally - while (enteredSyms ne lastEntered) { - levelOfMap -= enteredSyms.head - enteredSyms = enteredSyms.tail - } - - def dropEmptyBlocks(tree: Tree): Tree = tree match { - case Block(Nil, expr) => dropEmptyBlocks(expr) - case _ => tree - } - - tree match { - case Apply(Select(Quoted(quotedTree), _), _) if quotedTree.isType => - dropEmptyBlocks(quotedTree) match - case SplicedType(t) => - // Optimization: `quoted.Type.of[x.Underlying]` --> `x` - transform(t) - case _ => - super.transform(tree) - - case tree @ Quoted(quotedTree) => - val old = inQuoteOrSplice - inQuoteOrSplice = true - try dropEmptyBlocks(quotedTree) match { - case Spliced(t) => - // Optimization: `'{ $x }` --> `x` - // and adapt the refinement of `Quotes { type reflect: ... } ?=> Expr[T]` - transform(t).asInstance(tree.tpe) - case _ => transformQuotation(quotedTree, tree) - } - finally inQuoteOrSplice = old - - case tree @ Spliced(splicedTree) => - val old = inQuoteOrSplice - inQuoteOrSplice = true - try dropEmptyBlocks(splicedTree) match { - case Quoted(t) => - // Optimization: `${ 'x }` --> `x` - transform(t) - case _ => transformSplice(splicedTree, tree) - } - finally inQuoteOrSplice = old - - case tree @ SplicedType(splicedTree) => - val old = inQuoteOrSplice - inQuoteOrSplice = true - try transformSpliceType(splicedTree, tree) - finally inQuoteOrSplice = old - - case Block(stats, _) => - val last = enteredSyms - stats.foreach(markDef) - mapOverTree(last) - - case CaseDef(pat, guard, body) => - val last = enteredSyms - tpd.patVars(pat).foreach(markSymbol) - mapOverTree(last) - - case (_:Import | _:Export) => - tree - - case _: Template => - val last = enteredSyms - tree.symbol.owner.info.decls.foreach(markSymbol) - mapOverTree(last) - - case _ => - markDef(tree) - mapOverTree(enteredSyms) - } - } -} - - -object TreeMapWithStages { - - /** A key to be used in a context property that caches the `levelOf` mapping */ - private val LevelOfKey = new Property.Key[mutable.HashMap[Symbol, Int]] - - /** Initial context for a StagingTransformer transformation. */ - def freshStagingContext(using Context): Context = - ctx.fresh.setProperty(LevelOfKey, new mutable.HashMap[Symbol, Int]) - -} diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index 3763af243881..f5cb8eab73a4 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -16,6 +16,8 @@ import util.Spans._ import reporting._ import config.Printers.{ transforms => debug } +import patmat.Typ + /** This transform normalizes type tests and type casts, * also replacing type tests with singleton argument type with reference equality check * Any remaining type tests @@ -51,7 +53,8 @@ object TypeTestsCasts { * 6. if `P = T1 | T2` or `P = T1 & T2`, checkable(X, T1) && checkable(X, T2). * 7. if `P` is a refinement type, "it's a refinement type" * 8. if `P` is a local class which is not statically reachable from the scope where `X` is defined, "it's a local class" - * 9. otherwise, "" + * 9. if `X` is `T1 | T2`, checkable(T1, P) && checkable(T2, P). + * 10. otherwise, "" */ def whyUncheckable(X: Type, P: Type, span: Span)(using Context): String = atPhase(Phases.refchecksPhase.next) { extension (inline s1: String) inline def &&(inline s2: String): String = if s1 == "" then s2 else s1 @@ -129,7 +132,8 @@ object TypeTestsCasts { } - def recur(X: Type, P: Type): String = (X <:< P) ||| (P.dealias match { + def recur(X: Type, P: Type): String = trace(s"recur(${X.show}, ${P.show})") { + (X <:< P) ||| P.dealias.match case _: SingletonType => "" case _: TypeProxy if isAbstract(P) => i"it refers to an abstract type member or type parameter" @@ -138,7 +142,7 @@ object TypeTestsCasts { case defn.ArrayOf(tpE) => recur(tpE, tpT) case _ => recur(defn.AnyType, tpT) } - case tpe: AppliedType => + case tpe @ AppliedType(tycon, targs) => X.widenDealias match { case OrType(tp1, tp2) => // This case is required to retrofit type inference, @@ -147,10 +151,10 @@ object TypeTestsCasts { // - T1 & T2 <:< T3 // See TypeComparer#either recur(tp1, P) && recur(tp2, P) - case _ => + + case x => // always false test warnings are emitted elsewhere - X.classSymbol.exists && P.classSymbol.exists && - !X.classSymbol.asClass.mayHaveCommonChild(P.classSymbol.asClass) + TypeComparer.provablyDisjoint(x, tpe.derivedAppliedType(tycon, targs.map(_ => WildcardType))) || typeArgsTrivial(X, tpe) ||| i"its type arguments can't be determined from $X" } @@ -164,7 +168,7 @@ object TypeTestsCasts { if P.classSymbol.isLocal && foundClasses(X).exists(P.classSymbol.isInaccessibleChildOf) => // 8 i"it's a local class" case _ => "" - }) + } val res = recur(X.widen, replaceP(P)) @@ -302,8 +306,8 @@ object TypeTestsCasts { /** Transform isInstanceOf * - * expr.isInstanceOf[A | B] ~~> expr.isInstanceOf[A] | expr.isInstanceOf[B] - * expr.isInstanceOf[A & B] ~~> expr.isInstanceOf[A] & expr.isInstanceOf[B] + * expr.isInstanceOf[A | B] ~~> expr.isInstanceOf[A] | expr.isInstanceOf[B] + * expr.isInstanceOf[A & B] ~~> expr.isInstanceOf[A] & expr.isInstanceOf[B] * expr.isInstanceOf[Tuple] ~~> scala.runtime.Tuples.isInstanceOfTuple(expr) * expr.isInstanceOf[EmptyTuple] ~~> scala.runtime.Tuples.isInstanceOfEmptyTuple(expr) * expr.isInstanceOf[NonEmptyTuple] ~~> scala.runtime.Tuples.isInstanceOfNonEmptyTuple(expr) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Cache.scala b/compiler/src/dotty/tools/dotc/transform/init/Cache.scala new file mode 100644 index 000000000000..c0391a05262d --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Cache.scala @@ -0,0 +1,201 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* + +import ast.tpd +import tpd.Tree + +/** The co-inductive cache used for analysis + * + * The cache contains two maps from `(Config, Tree)` to `Res`: + * + * - input cache (`this.last`) + * - output cache (`this.current`) + * + * The two caches are required because we want to make sure in a new iteration, + * an expression is evaluated exactly once. The monotonicity of the analysis + * ensures that the cache state goes up the lattice of the abstract domain, + * consequently the algorithm terminates. + * + * The general skeleton for usage of the cache is as follows + * + * def analysis(entryExp: Expr) = { + * def iterate(entryExp: Expr)(using Cache) = + * eval(entryExp, initConfig) + * if cache.hasChanged && noErrors then + * cache.last = cache.current + * cache.current = Empty + * cache.changed = false + * iterate(entryExp) + * else + * reportErrors + * + * + * def eval(expr: Expr, config: Config)(using Cache) = + * cache.cachedEval(config, expr) { + * // Actual recursive evaluation of expression. + * // + * // Only executed if the entry `(exp, config)` is not in the output cache. + * } + * + * iterate(entryExp)(using new Cache) + * } + * + * See the documentation for the method `Cache.cachedEval` for more information. + * + * What goes to the configuration (`Config`) and what goes to the result (`Res`) + * need to be decided by the specific analysis and justified by reasoning about + * soundness. + * + * @tparam Config The analysis state that matters for evaluating an expression. + * @tparam Res The result from the evaluation the given expression. + */ +class Cache[Config, Res]: + import Cache.* + + /** The cache for expression values from last iteration */ + protected var last: ExprValueCache[Config, Res] = Map.empty + + /** The output cache for expression values + * + * The output cache is computed based on the cache values `last` from the + * last iteration. + * + * Both `last` and `current` are required to make sure an encountered + * expression is evaluated once in each iteration. + */ + protected var current: ExprValueCache[Config, Res] = Map.empty + + /** Whether the current heap is different from the last heap? + * + * `changed == false` implies that the fixed point has been reached. + */ + protected var changed: Boolean = false + + /** Whether any value in the output cache (this.current) was accessed + * after being added. If no cached values are used after they are added + * for the first time then another iteration of analysis is not needed. + */ + protected var cacheUsed: Boolean = false + + /** Used to avoid allocation, its state does not matter */ + protected given MutableTreeWrapper = new MutableTreeWrapper + + def get(config: Config, expr: Tree): Option[Res] = + val res = current.get(config, expr) + cacheUsed = cacheUsed || res.nonEmpty + res + + /** Evaluate an expression with cache + * + * The algorithmic skeleton is as follows: + * + * if don't cache result then + * return eval(expr) + * if this.current.contains(config, expr) then + * return cached value + * else + * val assumed = this.last(config, expr) or bottom value if absent + * this.current(config, expr) = assumed + * val actual = eval(expr) + * + * if assumed != actual then + * this.changed = true + * this.current(config, expr) = actual + * + */ + def cachedEval(config: Config, expr: Tree, cacheResult: Boolean, default: Res)(eval: Tree => Res): Res = + if !cacheResult then + eval(expr) + else + this.get(config, expr) match + case Some(value) => value + case None => + val assumeValue: Res = + this.last.get(config, expr) match + case Some(value) => value + case None => + this.last = this.last.updatedNested(config, expr, default) + default + + this.current = this.current.updatedNested(config, expr, assumeValue) + + val actual = eval(expr) + if actual != assumeValue then + // println("Changed! from = " + assumeValue + ", to = " + actual) + this.changed = true + this.current = this.current.updatedNested(config, expr, actual) + // this.current = this.current.removed(config, expr) + end if + + actual + end if + end cachedEval + + def hasChanged = changed + + def isUsed = cacheUsed + + /** Prepare cache for the next iteration + * + * 1. Reset changed flag. + * + * 2. Use current cache as last cache and set current cache to be empty. + */ + def prepareForNextIteration()(using Context) = + this.changed = false + this.cacheUsed = false + this.last = this.current + this.current = Map.empty +end Cache + +object Cache: + type ExprValueCache[Config, Res] = Map[Config, Map[TreeWrapper, Res]] + + /** A wrapper for trees for storage in maps based on referential equality of trees. */ + abstract class TreeWrapper: + def tree: Tree + + override final def equals(other: Any): Boolean = + other match + case that: TreeWrapper => this.tree eq that.tree + case _ => false + + override final def hashCode = tree.hashCode + + /** The immutable wrapper is intended to be stored as key in the heap. */ + class ImmutableTreeWrapper(val tree: Tree) extends TreeWrapper + + /** For queries on the heap, reuse the same wrapper to avoid unnecessary allocation. + * + * A `MutableTreeWrapper` is only ever used temporarily for querying a map, + * and is never inserted to the map. + */ + class MutableTreeWrapper extends TreeWrapper: + var queryTree: Tree | Null = null + def tree: Tree = queryTree match + case tree: Tree => tree + case null => ??? + + extension [Config, Res](cache: ExprValueCache[Config, Res]) + def get(config: Config, expr: Tree)(using queryWrapper: MutableTreeWrapper): Option[Res] = + queryWrapper.queryTree = expr + cache.get(config).flatMap(_.get(queryWrapper)) + + def removed(config: Config, expr: Tree)(using queryWrapper: MutableTreeWrapper) = + queryWrapper.queryTree = expr + val innerMap2 = cache(config).removed(queryWrapper) + cache.updated(config, innerMap2) + + def updatedNested(config: Config, expr: Tree, result: Res): ExprValueCache[Config, Res] = + val wrapper = new ImmutableTreeWrapper(expr) + updatedNestedWrapper(config, wrapper, result) + + def updatedNestedWrapper(config: Config, wrapper: ImmutableTreeWrapper, result: Res): ExprValueCache[Config, Res] = + val innerMap = cache.getOrElse(config, Map.empty[TreeWrapper, Res]) + val innerMap2 = innerMap.updated(wrapper, result) + cache.updated(config, innerMap2) + end extension diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala index 7d92d2b2a921..366fd6be96a2 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala @@ -5,109 +5,64 @@ package init import ast.tpd._ import core._ -import util.SourcePosition import util.Property -import Decorators._, printing.SyntaxHighlighting +import util.SourcePosition import Types._, Symbols._, Contexts._ -import scala.collection.mutable +import Trace.Trace object Errors: private val IsFromPromotion = new Property.Key[Boolean] sealed trait Error: - def trace: Seq[Tree] + def trace: Trace def show(using Context): String - def pos(using Context): SourcePosition = trace.last.sourcePos + def pos(using Context): SourcePosition = Trace.position(using trace).sourcePos def stacktrace(using Context): String = val preamble: String = if ctx.property(IsFromPromotion).nonEmpty then " Promotion trace:\n" else " Calling trace:\n" - buildStacktrace(trace, preamble) + Trace.buildStacktrace(trace, preamble) def issue(using Context): Unit = report.warning(show, this.pos) end Error - def buildStacktrace(trace: Seq[Tree], preamble: String)(using Context): String = if trace.isEmpty then "" else preamble + { - var lastLineNum = -1 - var lines: mutable.ArrayBuffer[String] = new mutable.ArrayBuffer - trace.foreach { tree => - val pos = tree.sourcePos - val prefix = "-> " - val line = - if pos.source.exists then - val loc = "[ " + pos.source.file.name + ":" + (pos.line + 1) + " ]" - val code = SyntaxHighlighting.highlight(pos.lineContent.trim.nn) - i"$code\t$loc" - else - tree.show - val positionMarkerLine = - if pos.exists && pos.source.exists then - positionMarker(pos) - else "" - - // always use the more precise trace location - if lastLineNum == pos.line then - lines.dropRightInPlace(1) - - lines += (prefix + line + "\n" + positionMarkerLine) - - lastLineNum = pos.line - } - val sb = new StringBuilder - for line <- lines do sb.append(line) - sb.toString - } - - /** Used to underline source positions in the stack trace - * pos.source must exist - */ - private def positionMarker(pos: SourcePosition): String = - val trimmed = pos.lineContent.takeWhile(c => c.isWhitespace).length - val padding = pos.startColumnPadding.substring(trimmed).nn + " " - val carets = - if (pos.startLine == pos.endLine) - "^" * math.max(1, pos.endColumn - pos.startColumn) - else "^" - - s"$padding$carets\n" - override def toString() = this.getClass.getName.nn /** Access non-initialized field */ - case class AccessNonInit(field: Symbol)(val trace: Seq[Tree]) extends Error: - def source: Tree = trace.last + case class AccessNonInit(field: Symbol)(val trace: Trace) extends Error: + def source: Tree = Trace.position(using trace) def show(using Context): String = "Access non-initialized " + field.show + "." + stacktrace override def pos(using Context): SourcePosition = field.sourcePos /** Promote a value under initialization to fully-initialized */ - case class PromoteError(msg: String)(val trace: Seq[Tree]) extends Error: + case class PromoteError(msg: String)(val trace: Trace) extends Error: def show(using Context): String = msg + stacktrace - case class AccessCold(field: Symbol)(val trace: Seq[Tree]) extends Error: + case class AccessCold(field: Symbol)(val trace: Trace) extends Error: def show(using Context): String = - "Access field " + field.show + " on a cold object." + stacktrace + "Access field " + field.show + " on an uninitialized (Cold) object." + stacktrace - case class CallCold(meth: Symbol)(val trace: Seq[Tree]) extends Error: + case class CallCold(meth: Symbol)(val trace: Trace) extends Error: def show(using Context): String = - "Call method " + meth.show + " on a cold object." + stacktrace + "Call method " + meth.show + " on an uninitialized (Cold) object." + stacktrace - case class CallUnknown(meth: Symbol)(val trace: Seq[Tree]) extends Error: + case class CallUnknown(meth: Symbol)(val trace: Trace) extends Error: def show(using Context): String = val prefix = if meth.is(Flags.Method) then "Calling the external method " else "Accessing the external field" prefix + meth.show + " may cause initialization errors." + stacktrace /** Promote a value under initialization to fully-initialized */ - case class UnsafePromotion(msg: String, error: Error)(val trace: Seq[Tree]) extends Error: + case class UnsafePromotion(msg: String, error: Error)(val trace: Trace) extends Error: def show(using Context): String = msg + stacktrace + "\n" + - "Promoting the value to hot (transitively initialized) failed due to the following problem:\n" + { + "Promoting the value to transitively initialized (Hot) failed due to the following problem:\n" + { val ctx2 = ctx.withProperty(IsFromPromotion, Some(true)) error.show(using ctx2) } @@ -116,7 +71,7 @@ object Errors: * * Invariant: argsIndices.nonEmpty */ - case class UnsafeLeaking(error: Error, nonHotOuterClass: Symbol, argsIndices: List[Int])(val trace: Seq[Tree]) extends Error: + case class UnsafeLeaking(error: Error, nonHotOuterClass: Symbol, argsIndices: List[Int])(val trace: Trace) extends Error: def show(using Context): String = "Problematic object instantiation: " + argumentInfo() + stacktrace + "\n" + "It leads to the following error during object initialization:\n" + @@ -141,5 +96,5 @@ object Errors: acc + text2 } val verb = if multiple then " are " else " is " - val adjective = "not hot (transitively initialized)." + val adjective = "not transitively initialized (Hot)." subject + verb + adjective diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index eb1692e00a12..4548dccb598f 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -15,10 +15,19 @@ import config.Printers.init as printer import reporting.trace as log import Errors.* +import Trace.* +import Util.* +import Cache.* import scala.collection.mutable import scala.annotation.tailrec +/** + * Checks safe initialization of objects + * + * This algorithm cannot handle safe access of global object names. That part + * is handled by the check in `Objects` (@see Objects). + */ object Semantic: // ----- Domain definitions -------------------------------- @@ -55,16 +64,18 @@ object Semantic: sealed abstract class Value: def show(using Context): String = this match case ThisRef(klass) => - "ThisRef[" + klass.show + "]" + "the original object of type (" + klass.show + ") where initialization checking started" case Warm(klass, outer, ctor, args) => val argsText = if args.nonEmpty then ", args = " + args.map(_.show).mkString("(", ", ", ")") else "" - "Warm[" + klass.show + "] { outer = " + outer.show + argsText + " }" + "a non-transitively initialized (Warm) object of type (" + klass.show + ") { outer = " + outer.show + argsText + " }" case Fun(expr, thisV, klass) => - "Fun { this = " + thisV.show + ", owner = " + klass.show + " }" + "a function where \"this\" is (" + thisV.show + ")" case RefSet(values) => values.map(_.show).mkString("Set { ", ", ", " }") - case _ => - this.toString() + case Hot => + "a transitively initialized (Hot) object" + case Cold => + "an uninitialized (Cold) object" def isHot = this == Hot def isCold = this == Cold @@ -117,7 +128,7 @@ object Semantic: assert(!populatingParams, "the object is already populating parameters") populatingParams = true val tpl = klass.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] - extendTrace(klass.defTree) { this.callConstructor(ctor, args.map(arg => ArgInfo(arg, trace))) } + extendTrace(klass.defTree) { this.callConstructor(ctor, args.map(arg => new ArgInfo(arg, trace))) } populatingParams = false this } @@ -207,7 +218,7 @@ object Semantic: object Cache: /** Cache for expressions * - * Ref -> Tree -> Value + * Value -> Tree -> Value * * The first key is the value of `this` for the expression. * @@ -233,66 +244,27 @@ object Semantic: * that could be reused to check other classes. We employ this trick to * improve performance of the analysis. */ - private type ExprValueCache = Map[Value, Map[TreeWrapper, Value]] /** The heap for abstract objects * - * The heap objects are immutable. - */ - private type Heap = Map[Ref, Objekt] - - /** A wrapper for trees for storage in maps based on referential equality of trees. */ - private abstract class TreeWrapper: - def tree: Tree - - override final def equals(other: Any): Boolean = - other match - case that: TreeWrapper => this.tree eq that.tree - case _ => false - - override final def hashCode = tree.hashCode - - /** The immutable wrapper is intended to be stored as key in the heap. */ - private class ImmutableTreeWrapper(val tree: Tree) extends TreeWrapper - - /** For queries on the heap, reuse the same wrapper to avoid unnecessary allocation. + * The heap objects are immutable and its values are essentially derived + * from the cache, thus they are not part of the configuration. * - * A `MutableTreeWrapper` is only ever used temporarily for querying a map, - * and is never inserted to the map. + * The only exception is the object correspond to `ThisRef`, where the + * object remembers the set of initialized fields. That information is reset + * in each iteration thus is harmless. */ - private class MutableTreeWrapper extends TreeWrapper: - var queryTree: Tree | Null = null - def tree: Tree = queryTree match - case tree: Tree => tree - case null => ??? - - class Cache: - /** The cache for expression values from last iteration */ - private var last: ExprValueCache = Map.empty + private type Heap = Map[Ref, Objekt] - /** The output cache for expression values - * - * The output cache is computed based on the cache values `last` from the - * last iteration. - * - * Both `last` and `current` are required to make sure an encountered - * expression is evaluated once in each iteration. - */ - private var current: ExprValueCache = Map.empty + class Data extends Cache[Value, Value]: /** Global cached values for expressions * * The values are only added when a fixed point is reached. * * It is intended to improve performance for computation related to warm values. */ - private var stable: ExprValueCache = Map.empty - - /** Whether the current heap is different from the last heap? - * - * `changed == false` implies that the fixed point has been reached. - */ - private var changed: Boolean = false + private var stable: ExprValueCache[Value, Value] = Map.empty /** Abstract heap stores abstract objects * @@ -320,77 +292,38 @@ object Semantic: /** Used to revert heap to last stable heap. */ private var heapStable: Heap = Map.empty - /** Used to avoid allocation, its state does not matter */ - private given MutableTreeWrapper = new MutableTreeWrapper - - def hasChanged = changed - - def get(value: Value, expr: Tree): Option[Value] = - current.get(value, expr) match - case None => stable.get(value, expr) + override def get(value: Value, expr: Tree): Option[Value] = + stable.get(value, expr) match + case None => super.get(value, expr) case res => res /** Backup the state of the cache * * All the shared data structures must be immutable. */ - def backup(): Cache = - val cache = new Cache - cache.last = this.last - cache.current = this.current + def backup(): Data = + val cache = new Data cache.stable = this.stable cache.heap = this.heap cache.heapStable = this.heapStable cache.changed = this.changed + cache.last = this.last + cache.current = this.current cache /** Restore state from a backup */ - def restore(cache: Cache) = + def restore(cache: Data) = + this.changed = cache.changed this.last = cache.last this.current = cache.current this.stable = cache.stable this.heap = cache.heap this.heapStable = cache.heapStable - this.changed = cache.changed - - /** Copy the value of `(value, expr)` from the last cache to the current cache - * - * It assumes the value is `Hot` if it doesn't exist in the last cache. - * - * It updates the current caches if the values change. - * - * The two caches are required because we want to make sure in a new iteration, an expression is evaluated once. - */ - def assume(value: Value, expr: Tree, cacheResult: Boolean)(fun: => Value): Contextual[Value] = - val assumeValue: Value = - last.get(value, expr) match - case Some(value) => value - case None => - this.last = last.updatedNested(value, expr, Hot) - Hot - - this.current = current.updatedNested(value, expr, assumeValue) - - val actual = fun - if actual != assumeValue then - this.changed = true - this.current = this.current.updatedNested(value, expr, actual) - else - // It's tempting to cache the value in stable, but it's unsound. - // The reason is that the current value may depend on other values - // which might change. - // - // stable.put(value, expr, actual) - () - end if - - actual - end assume /** Commit current cache to stable cache. */ private def commitToStableCache() = for - (v, m) <- current + (v, m) <- this.current if v.isWarm // It's useless to cache value for ThisRef. (wrapper, res) <- m do @@ -404,10 +337,8 @@ object Semantic: * * 3. Revert heap to stable. */ - def prepareForNextIteration()(using Context) = - this.changed = false - this.last = this.current - this.current = Map.empty + override def prepareForNextIteration()(using Context) = + super.prepareForNextIteration() this.heap = this.heapStable /** Prepare for checking next class @@ -421,15 +352,15 @@ object Semantic: * 4. Reset last cache. */ def prepareForNextClass()(using Context) = - if this.changed then - this.changed = false + if this.hasChanged then this.heap = this.heapStable else this.commitToStableCache() this.heapStable = this.heap - this.last = Map.empty - this.current = Map.empty + // reset changed and cache + super.prepareForNextIteration() + def updateObject(ref: Ref, obj: Objekt) = assert(!this.heapStable.contains(ref)) @@ -438,59 +369,19 @@ object Semantic: def containsObject(ref: Ref) = heap.contains(ref) def getObject(ref: Ref) = heap(ref) - end Cache - - extension (cache: ExprValueCache) - private def get(value: Value, expr: Tree)(using queryWrapper: MutableTreeWrapper): Option[Value] = - queryWrapper.queryTree = expr - cache.get(value).flatMap(_.get(queryWrapper)) - - private def removed(value: Value, expr: Tree)(using queryWrapper: MutableTreeWrapper) = - queryWrapper.queryTree = expr - val innerMap2 = cache(value).removed(queryWrapper) - cache.updated(value, innerMap2) - - private def updatedNested(value: Value, expr: Tree, result: Value): ExprValueCache = - val wrapper = new ImmutableTreeWrapper(expr) - updatedNestedWrapper(value, wrapper, result) - - private def updatedNestedWrapper(value: Value, wrapper: ImmutableTreeWrapper, result: Value): ExprValueCache = - val innerMap = cache.getOrElse(value, Map.empty[TreeWrapper, Value]) - val innerMap2 = innerMap.updated(wrapper, result) - cache.updated(value, innerMap2) - end extension - end Cache + end Data - import Cache.* + end Cache - inline def cache(using c: Cache): Cache = c + inline def cache(using c: Cache.Data): Cache.Data = c // ----- Checker State ----------------------------------- /** The state that threads through the interpreter */ - type Contextual[T] = (Context, Trace, Promoted, Cache, Reporter) ?=> T + type Contextual[T] = (Context, Trace, Promoted, Cache.Data, Reporter) ?=> T // ----- Error Handling ----------------------------------- - object Trace: - opaque type Trace = Vector[Tree] - - val empty: Trace = Vector.empty - - extension (trace: Trace) - def add(node: Tree): Trace = trace :+ node - def toVector: Vector[Tree] = trace - - def show(using trace: Trace, ctx: Context): String = buildStacktrace(trace, "\n") - - def position(using trace: Trace): Tree = trace.last - type Trace = Trace.Trace - - import Trace.* - def trace(using t: Trace): Trace = t - inline def withTrace[T](t: Trace)(op: Trace ?=> T): T = op(using t) - inline def extendTrace[T](node: Tree)(using t: Trace)(op: Trace ?=> T): T = op(using t.add(node)) - /** Error reporting */ trait Reporter: def report(err: Error): Unit @@ -508,7 +399,7 @@ object Semantic: /** * Revert the cache to previous state. */ - def abort()(using Cache): Unit + def abort()(using Cache.Data): Unit def errors: List[Error] object Reporter: @@ -517,8 +408,8 @@ object Semantic: def errors = buf.toList def report(err: Error) = buf += err - class TryBufferedReporter(backup: Cache) extends BufferedReporter with TryReporter: - def abort()(using Cache): Unit = cache.restore(backup) + class TryBufferedReporter(backup: Cache.Data) extends BufferedReporter with TryReporter: + def abort()(using Cache.Data): Unit = cache.restore(backup) class ErrorFound(val error: Error) extends Exception class StopEarlyReporter extends Reporter: @@ -529,7 +420,7 @@ object Semantic: * The TryReporter cannot be thrown away: either `abort` must be called or * the errors must be reported. */ - def errorsIn(fn: Reporter ?=> Unit)(using Cache): TryReporter = + def errorsIn(fn: Reporter ?=> Unit)(using Cache.Data): TryReporter = val reporter = new TryBufferedReporter(cache.backup()) fn(using reporter) reporter @@ -544,7 +435,7 @@ object Semantic: catch case ex: ErrorFound => ex.error :: Nil - def hasErrors(fn: Reporter ?=> Unit)(using Cache): Boolean = + def hasErrors(fn: Reporter ?=> Unit)(using Cache.Data): Boolean = val backup = cache.backup() val errors = stopEarly(fn) cache.restore(backup) @@ -581,7 +472,7 @@ object Semantic: def widenArg: Contextual[Value] = a match case _: Ref | _: Fun => - val hasError = Reporter.hasErrors { a.promote("Argument cannot be promoted to hot") } + val hasError = Reporter.hasErrors { a.promote("Argument is not provably transitively initialized (Hot)") } if hasError then Cold else Hot case RefSet(refs) => @@ -606,14 +497,14 @@ object Semantic: case _ => cache.getObject(ref) - def ensureObjectExists()(using Cache): ref.type = + def ensureObjectExists()(using Cache.Data): ref.type = if cache.containsObject(ref) then printer.println("object " + ref + " already exists") ref else ensureFresh() - def ensureFresh()(using Cache): ref.type = + def ensureFresh()(using Cache.Data): ref.type = val obj = Objekt(ref.klass, fields = Map.empty, outers = Map(ref.klass -> ref.outer)) printer.println("reset object " + ref) cache.updateObject(ref, obj) @@ -664,7 +555,7 @@ object Semantic: Hot case Cold => - val error = AccessCold(field)(trace.toVector) + val error = AccessCold(field)(trace) reporter.report(error) Hot @@ -689,11 +580,11 @@ object Semantic: val rhs = target.defTree.asInstanceOf[ValOrDefDef].rhs eval(rhs, ref, target.owner.asClass, cacheResult = true) else - val error = CallUnknown(field)(trace.toVector) + val error = CallUnknown(field)(trace) reporter.report(error) Hot else - val error = AccessNonInit(target)(trace.toVector) + val error = AccessNonInit(target)(trace) reporter.report(error) Hot else @@ -779,7 +670,7 @@ object Semantic: case Cold => promoteArgs() - val error = CallCold(meth)(trace.toVector) + val error = CallCold(meth)(trace) reporter.report(error) Hot @@ -818,9 +709,11 @@ object Semantic: // no source code available promoteArgs() // try promoting the receiver as last resort - val hasErrors = Reporter.hasErrors { ref.promote("try promote value to hot") } + val hasErrors = Reporter.hasErrors { + ref.promote(ref.show + " has no source code and is not provably transitively initialized (Hot).") + } if hasErrors then - val error = CallUnknown(target)(trace.toVector) + val error = CallUnknown(target)(trace) reporter.report(error) Hot else if target.exists then @@ -899,7 +792,7 @@ object Semantic: Hot else // no source code available - val error = CallUnknown(ctor)(trace.toVector) + val error = CallUnknown(ctor)(trace) reporter.report(error) Hot } @@ -922,7 +815,7 @@ object Semantic: yield i + 1 - val error = UnsafeLeaking(errors.head, nonHotOuterClass, indices)(trace.toVector) + val error = UnsafeLeaking(errors.head, nonHotOuterClass, indices)(trace) reporter.report(error) Hot else @@ -947,7 +840,7 @@ object Semantic: tryLeak(warm, NoSymbol, args2) case Cold => - val error = CallCold(ctor)(trace.toVector) + val error = CallCold(ctor)(trace) reporter.report(error) Hot @@ -1004,7 +897,7 @@ object Semantic: case Cold => Cold - case ref: Ref => eval(vdef.rhs, ref, enclosingClass) + case ref: Ref => eval(vdef.rhs, ref, enclosingClass, cacheResult = sym.is(Flags.Lazy)) case _ => report.error("[Internal error] unexpected this value when accessing local variable, sym = " + sym.show + ", thisValue = " + thisValue2.show + Trace.show, Trace.position) @@ -1078,7 +971,7 @@ object Semantic: case Hot => case Cold => - reporter.report(PromoteError(msg)(trace.toVector)) + reporter.report(PromoteError(msg)(trace)) case thisRef: ThisRef => val emptyFields = thisRef.nonInitFields() @@ -1086,7 +979,7 @@ object Semantic: promoted.promoteCurrent(thisRef) else val fields = "Non initialized field(s): " + emptyFields.map(_.show).mkString(", ") + "." - reporter.report(PromoteError(msg + "\n" + fields)(trace.toVector)) + reporter.report(PromoteError(msg + "\n" + fields)(trace)) case warm: Warm => if !promoted.contains(warm) then @@ -1100,13 +993,13 @@ object Semantic: val errors = Reporter.stopEarly { val res = { given Trace = Trace.empty - eval(body, thisV, klass) + eval(body, thisV, klass, cacheResult = true) } given Trace = Trace.empty.add(body) - res.promote("The function return value is not hot. Found = " + res.show + ".") + res.promote("Only transitively initialized (Hot) values can be returned by functions. The function " + fun.show + " returns " + res.show + ".") } if errors.nonEmpty then - reporter.report(UnsafePromotion(msg, errors.head)(trace.toVector)) + reporter.report(UnsafePromotion(msg, errors.head)(trace)) else promoted.add(fun) @@ -1147,7 +1040,7 @@ object Semantic: // // This invariant holds because of the Scala/Java/JVM restriction that we cannot use `this` in super constructor calls. if subClassSegmentHot && !isHotSegment then - report.error("[Internal error] Expect current segment to hot in promotion, current klass = " + klass.show + + report.error("[Internal error] Expect current segment to be transitively initialized (Hot) in promotion, current klass = " + klass.show + ", subclass = " + subClass.show + Trace.show, Trace.position) // If the outer and parameters of a class are all hot, then accessing fields and methods of the current @@ -1156,20 +1049,20 @@ object Semantic: if !isHotSegment then for member <- klass.info.decls do if member.isClass then - val error = PromoteError("Promotion cancelled as the value contains inner " + member.show + ".")(Vector.empty) + val error = PromoteError("Promotion cancelled as the value contains inner " + member.show + ".")(Trace.empty) reporter.report(error) else if !member.isType && !member.isConstructor && !member.is(Flags.Deferred) then given Trace = Trace.empty if member.is(Flags.Method, butNot = Flags.Accessor) then - val args = member.info.paramInfoss.flatten.map(_ => ArgInfo(Hot, Trace.empty)) + val args = member.info.paramInfoss.flatten.map(_ => new ArgInfo(Hot: Value, Trace.empty)) val res = warm.call(member, args, receiver = warm.klass.typeRef, superType = NoType) withTrace(trace.add(member.defTree)) { - res.promote("Cannot prove that the return value of " + member.show + " is hot. Found = " + res.show + ".") + res.promote("Could not verify that the return value of " + member.show + " is transitively initialized (Hot). It was found to be " + res.show + ".") } else val res = warm.select(member, receiver = warm.klass.typeRef) withTrace(trace.add(member.defTree)) { - res.promote("Cannot prove that the field " + member.show + " is hot. Found = " + res.show + ".") + res.promote("Could not verify that the field " + member.show + " is transitively initialized (Hot). It was found to be " + res.show + ".") } end for @@ -1189,7 +1082,7 @@ object Semantic: } if errors.isEmpty then Nil - else UnsafePromotion(msg, errors.head)(trace.toVector) :: Nil + else UnsafePromotion(msg, errors.head)(trace) :: Nil } end extension @@ -1212,7 +1105,7 @@ object Semantic: * * The class to be checked must be an instantiable concrete class. */ - private def checkClass(classSym: ClassSymbol)(using Cache, Context): Unit = + private def checkClass(classSym: ClassSymbol)(using Cache.Data, Context): Unit = val thisRef = ThisRef(classSym) val tpl = classSym.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] @@ -1231,7 +1124,7 @@ object Semantic: log("checking " + classSym) { eval(tpl, thisRef, classSym) } reporter.errors.foreach(_.issue) - if cache.hasChanged && reporter.errors.isEmpty then + if cache.hasChanged && reporter.errors.isEmpty && cache.isUsed then // code to prepare cache and heap for next iteration cache.prepareForNextIteration() iterate() @@ -1246,16 +1139,16 @@ object Semantic: * Check the specified concrete classes */ def checkClasses(classes: List[ClassSymbol])(using Context): Unit = - given Cache() + given Cache.Data() for classSym <- classes if isConcreteClass(classSym) do checkClass(classSym) // ----- Semantic definition -------------------------------- + type ArgInfo = TraceValue[Value] - /** Utility definition used for better error-reporting of argument errors */ - case class ArgInfo(value: Value, trace: Trace): - def promote: Contextual[Unit] = withTrace(trace) { - value.promote("Cannot prove the method argument is hot. Only hot values are safe to leak.\nFound = " + value.show + ".") + extension (arg: ArgInfo) + def promote: Contextual[Unit] = withTrace(arg.trace) { + arg.value.promote("Could not verify that the method argument is transitively initialized (Hot). It was found to be " + arg.value.show + ". Only transitively initialized arguments may be passed to methods (except constructors).") } /** Evaluate an expression with the given value for `this` in a given class `klass` @@ -1279,10 +1172,7 @@ object Semantic: * @param cacheResult It is used to reduce the size of the cache. */ def eval(expr: Tree, thisV: Ref, klass: ClassSymbol, cacheResult: Boolean = false): Contextual[Value] = log("evaluating " + expr.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) { - cache.get(thisV, expr) match - case Some(value) => value - case None => - cache.assume(thisV, expr, cacheResult) { cases(expr, thisV, klass) } + cache.cachedEval(thisV, expr, cacheResult, default = Hot) { expr => cases(expr, thisV, klass) } } /** Evaluate a list of expressions */ @@ -1299,7 +1189,7 @@ object Semantic: else eval(arg.tree, thisV, klass) - argInfos += ArgInfo(res, trace.add(arg.tree)) + argInfos += new ArgInfo(res, trace.add(arg.tree)) } argInfos.toList @@ -1399,12 +1289,12 @@ object Semantic: eval(qual, thisV, klass) val res = eval(rhs, thisV, klass) extendTrace(expr) { - res.ensureHot("The RHS of reassignment must be hot. Found = " + res.show + ". ") + res.ensureHot("The RHS of reassignment must be transitively initialized (Hot). It was found to be " + res.show + ". ") } case id: Ident => val res = eval(rhs, thisV, klass) extendTrace(expr) { - res.ensureHot("The RHS of reassignment must be hot. Found = " + res.show + ". ") + res.ensureHot("The RHS of reassignment must be transitively initialized (Hot). It was found to be " + res.show + ". ") } case closureDef(ddef) => @@ -1427,14 +1317,14 @@ object Semantic: case Match(selector, cases) => val res = eval(selector, thisV, klass) extendTrace(selector) { - res.ensureHot("The value to be matched needs to be hot. Found = " + res.show + ". ") + res.ensureHot("The value to be matched needs to be transitively initialized (Hot). It was found to be " + res.show + ". ") } eval(cases.map(_.body), thisV, klass).join case Return(expr, from) => val res = eval(expr, thisV, klass) extendTrace(expr) { - res.ensureHot("return expression must be hot. Found = " + res.show + ". ") + res.ensureHot("return expression must be transitively initialized (Hot). It was found to be " + res.show + ". ") } case WhileDo(cond, body) => @@ -1667,7 +1557,7 @@ object Semantic: // The parameter check of traits comes late in the mixin phase. // To avoid crash we supply hot values for erroneous parent calls. // See tests/neg/i16438.scala. - val args: List[ArgInfo] = ctor.info.paramInfoss.flatten.map(_ => ArgInfo(Hot, Trace.empty)) + val args: List[ArgInfo] = ctor.info.paramInfoss.flatten.map(_ => new ArgInfo(Hot, Trace.empty)) extendTrace(superParent) { superCall(tref, ctor, args, tasks) } @@ -1726,85 +1616,3 @@ object Semantic: traverseChildren(tp) traverser.traverse(tpt.tpe) - -// ----- Utility methods and extractors -------------------------------- - - def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match - case tref: TypeRef => tref - case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) - - - opaque type Arg = Tree | ByNameArg - case class ByNameArg(tree: Tree) - - extension (arg: Arg) - def isByName = arg.isInstanceOf[ByNameArg] - def tree: Tree = arg match - case t: Tree => t - case ByNameArg(t) => t - - object Call: - - def unapply(tree: Tree)(using Context): Option[(Tree, List[List[Arg]])] = - tree match - case Apply(fn, args) => - val argTps = fn.tpe.widen match - case mt: MethodType => mt.paramInfos - val normArgs: List[Arg] = args.zip(argTps).map { - case (arg, _: ExprType) => ByNameArg(arg) - case (arg, _) => arg - } - unapply(fn) match - case Some((ref, args0)) => Some((ref, args0 :+ normArgs)) - case None => None - - case TypeApply(fn, targs) => - unapply(fn) - - case ref: RefTree if ref.tpe.widenSingleton.isInstanceOf[MethodicType] => - Some((ref, Nil)) - - case _ => None - - object NewExpr: - def unapply(tree: Tree)(using Context): Option[(TypeRef, New, Symbol, List[List[Arg]])] = - tree match - case Call(fn @ Select(newTree: New, init), argss) if init == nme.CONSTRUCTOR => - val tref = typeRefOf(newTree.tpe) - Some((tref, newTree, fn.symbol, argss)) - case _ => None - - object PolyFun: - def unapply(tree: Tree)(using Context): Option[Tree] = - tree match - case Block((cdef: TypeDef) :: Nil, Typed(NewExpr(tref, _, _, _), _)) - if tref.symbol.isAnonymousClass && tref <:< defn.PolyFunctionType - => - val body = cdef.rhs.asInstanceOf[Template].body - val apply = body.head.asInstanceOf[DefDef] - Some(apply.rhs) - case _ => - None - - extension (symbol: Symbol) def hasSource(using Context): Boolean = - !symbol.defTree.isEmpty - - def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show) { - if (sym.isEffectivelyFinal || sym.isConstructor) sym - else sym.matchingMember(cls.appliedRef) - } - - private def isConcreteClass(cls: ClassSymbol)(using Context) = { - val instantiable: Boolean = - cls.is(Flags.Module) || - !cls.isOneOf(Flags.AbstractOrTrait) && { - // see `Checking.checkInstantiable` in typer - val tp = cls.appliedRef - val stp = SkolemType(tp) - val selfType = cls.givenSelfType.asSeenFrom(stp, cls) - !selfType.exists || stp <:< selfType - } - - // A concrete class may not be instantiated if the self type is not satisfied - instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass - } diff --git a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala new file mode 100644 index 000000000000..7dfbc0b6cfa5 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala @@ -0,0 +1,82 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* +import ast.tpd.* +import util.SourcePosition + +import Decorators._, printing.SyntaxHighlighting + +import scala.collection.mutable + +/** Logic related to evaluation trace for showing friendly error messages + * + * A trace is a sequence of program positions which tells the evaluation order + * that leads to an error. It is usually more informative than the stack trace + * by tracking the exact sub-expression in the trace instead of only methods. + */ +object Trace: + opaque type Trace = Vector[Tree] + + val empty: Trace = Vector.empty + + extension (trace: Trace) + def add(node: Tree): Trace = trace :+ node + def toVector: Vector[Tree] = trace + def ++(trace2: Trace): Trace = trace ++ trace2 + + def show(using trace: Trace, ctx: Context): String = buildStacktrace(trace, "\n") + + def position(using trace: Trace): Tree = trace.last + + def trace(using t: Trace): Trace = t + + inline def withTrace[T](t: Trace)(op: Trace ?=> T): T = op(using t) + + inline def extendTrace[T](node: Tree)(using t: Trace)(op: Trace ?=> T): T = op(using t.add(node)) + + def buildStacktrace(trace: Trace, preamble: String)(using Context): String = if trace.isEmpty then "" else preamble + { + var lastLineNum = -1 + var lines: mutable.ArrayBuffer[String] = new mutable.ArrayBuffer + trace.foreach { tree => + val pos = tree.sourcePos + val prefix = "-> " + val line = + if pos.source.exists then + val loc = "[ " + pos.source.file.name + ":" + (pos.line + 1) + " ]" + val code = SyntaxHighlighting.highlight(pos.lineContent.trim.nn) + i"$code\t$loc" + else + tree.show + val positionMarkerLine = + if pos.exists && pos.source.exists then + positionMarker(pos) + else "" + + // always use the more precise trace location + if lastLineNum == pos.line then + lines.dropRightInPlace(1) + + lines += (prefix + line + "\n" + positionMarkerLine) + + lastLineNum = pos.line + } + val sb = new StringBuilder + for line <- lines do sb.append(line) + sb.toString + } + + /** Used to underline source positions in the stack trace + * pos.source must exist + */ + private def positionMarker(pos: SourcePosition): String = + val trimmed = pos.lineContent.takeWhile(c => c.isWhitespace).length + val padding = pos.startColumnPadding.substring(trimmed).nn + " " + val carets = + if (pos.startLine == pos.endLine) + "^" * math.max(1, pos.endColumn - pos.startColumn) + else "^" + + s"$padding$carets\n" diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala new file mode 100644 index 000000000000..ba2216504aef --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -0,0 +1,100 @@ +package dotty.tools.dotc +package transform +package init + +import core.* +import Contexts.* +import Types.* +import Symbols.* +import StdNames.* +import ast.tpd.* + +import reporting.trace as log +import config.Printers.init as printer + +import Trace.* + +object Util: + /** Utility definition used for better error-reporting of argument errors */ + case class TraceValue[T](value: T, trace: Trace) + + def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match + case tref: TypeRef => tref + case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) + + + opaque type Arg = Tree | ByNameArg + case class ByNameArg(tree: Tree) + + extension (arg: Arg) + def isByName = arg.isInstanceOf[ByNameArg] + def tree: Tree = arg match + case t: Tree => t + case ByNameArg(t) => t + + object Call: + + def unapply(tree: Tree)(using Context): Option[(Tree, List[List[Arg]])] = + tree match + case Apply(fn, args) => + val argTps = fn.tpe.widen match + case mt: MethodType => mt.paramInfos + val normArgs: List[Arg] = args.zip(argTps).map { + case (arg, _: ExprType) => ByNameArg(arg) + case (arg, _) => arg + } + unapply(fn) match + case Some((ref, args0)) => Some((ref, args0 :+ normArgs)) + case None => None + + case TypeApply(fn, targs) => + unapply(fn) + + case ref: RefTree if ref.tpe.widenSingleton.isInstanceOf[MethodicType] => + Some((ref, Nil)) + + case _ => None + + object NewExpr: + def unapply(tree: Tree)(using Context): Option[(TypeRef, New, Symbol, List[List[Arg]])] = + tree match + case Call(fn @ Select(newTree: New, init), argss) if init == nme.CONSTRUCTOR => + val tref = typeRefOf(newTree.tpe) + Some((tref, newTree, fn.symbol, argss)) + case _ => None + + object PolyFun: + def unapply(tree: Tree)(using Context): Option[Tree] = + tree match + case Block((cdef: TypeDef) :: Nil, Typed(NewExpr(tref, _, _, _), _)) + if tref.symbol.isAnonymousClass && tref <:< defn.PolyFunctionType + => + val body = cdef.rhs.asInstanceOf[Template].body + val apply = body.head.asInstanceOf[DefDef] + Some(apply.rhs) + case _ => + None + + def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show): + if sym.isEffectivelyFinal then sym + else sym.matchingMember(cls.appliedRef) + + extension (sym: Symbol) + def hasSource(using Context): Boolean = !sym.defTree.isEmpty + + def isStaticObject(using Context) = + sym.is(Flags.Module, butNot = Flags.Package) && sym.isStatic + + def isConcreteClass(cls: ClassSymbol)(using Context) = + val instantiable: Boolean = + cls.is(Flags.Module) || + !cls.isOneOf(Flags.AbstractOrTrait) && { + // see `Checking.checkInstantiable` in typer + val tp = cls.appliedRef + val stp = SkolemType(tp) + val selfType = cls.givenSelfType.asSeenFrom(stp, cls) + !selfType.exists || stp <:< selfType + } + + // A concrete class may not be instantiated if the self type is not satisfied + instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 90310a385d0c..eab65890c227 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -9,7 +9,7 @@ import TypeUtils._ import Contexts._ import Flags._ import ast._ -import Decorators._ +import Decorators.{ show => _, * } import Symbols._ import StdNames._ import NameOps._ @@ -22,9 +22,13 @@ import transform.SymUtils._ import reporting._ import config.Printers.{exhaustivity => debug} import util.{SrcPos, NoSourcePosition} -import collection.mutable -/** Space logic for checking exhaustivity and unreachability of pattern matching +import scala.annotation.internal.sharable +import scala.collection.mutable + +import SpaceEngine.* + +/* Space logic for checking exhaustivity and unreachability of pattern matching * * Space can be thought of as a set of possible values. A type or a pattern * both refer to spaces. The space of a type is the values that inhabit the @@ -53,9 +57,32 @@ import collection.mutable * */ - /** space definition */ -sealed trait Space +sealed trait Space: + + @sharable private val isSubspaceCache = mutable.HashMap.empty[Space, Boolean] + + def isSubspace(b: Space)(using Context): Boolean = + val a = this + val a2 = a.simplify + val b2 = b.simplify + if (a ne a2) || (b ne b2) then a2.isSubspace(b2) + else if a == Empty then true + else if b == Empty then false + else trace(s"isSubspace(${show(this)}, ${show(b)})", debug) { + isSubspaceCache.getOrElseUpdate(b, computeIsSubspace(a, b)) + } + + @sharable private var mySimplified: Space | Null = null + + def simplify(using Context): Space = + val simplified = mySimplified + if simplified == null then + val simplified = SpaceEngine.computeSimplify(this) + mySimplified = simplified + simplified + else simplified +end Space /** Empty space */ case object Empty extends Space @@ -66,7 +93,21 @@ case object Empty extends Space * @param decomposed: does the space result from decomposition? Used for pretty print * */ -case class Typ(tp: Type, decomposed: Boolean = true) extends Space +case class Typ(tp: Type, decomposed: Boolean = true) extends Space: + private var myDecompose: List[Typ] | Null = null + + def canDecompose(using Context): Boolean = decompose != ListOfTypNoType + + def decompose(using Context): List[Typ] = + val decompose = myDecompose + if decompose == null then + val decompose = tp match + case Parts(parts) => parts.map(Typ(_, decomposed = true)) + case _ => ListOfTypNoType + myDecompose = decompose + decompose + else decompose +end Typ /** Space representing an extractor pattern */ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space @@ -74,59 +115,28 @@ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space /** Union of spaces */ case class Or(spaces: Seq[Space]) extends Space -/** abstract space logic */ -trait SpaceLogic { - /** Is `tp1` a subtype of `tp2`? */ - def isSubType(tp1: Type, tp2: Type): Boolean - - /** True if we can assume that the two unapply methods are the same. - * That is, given the same parameter, they return the same result. - * - * We assume that unapply methods are pure, but the same method may - * be called with different prefixes, thus behaving differently. - */ - def isSameUnapply(tp1: TermRef, tp2: TermRef): Boolean - - /** Return a space containing the values of both types. - * - * The types should be atomic (non-decomposable) and unrelated (neither - * should be a subtype of the other). - */ - def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type): Space - - /** Is the type `tp` decomposable? i.e. all values of the type can be covered - * by its decomposed types. - * - * Abstract sealed class, OrType, Boolean and Java enums can be decomposed. - */ - def canDecompose(tp: Type): Boolean - - /** Return term parameter types of the extractor `unapp` */ - def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int): List[Type] - - /** Get components of decomposable types */ - def decompose(tp: Type): List[Typ] - - /** Whether the extractor covers the given type */ - def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int): Boolean +object SpaceEngine { + import tpd._ - /** Display space in string format */ - def show(sp: Space): String + def simplify(space: Space)(using Context): Space = space.simplify + def isSubspace(a: Space, b: Space)(using Context): Boolean = a.isSubspace(b) + def canDecompose(typ: Typ)(using Context): Boolean = typ.canDecompose + def decompose(typ: Typ)(using Context): List[Typ] = typ.decompose /** Simplify space such that a space equal to `Empty` becomes `Empty` */ - def simplify(space: Space)(using Context): Space = trace(s"simplify ${show(space)} --> ", debug, show)(space match { + def computeSimplify(space: Space)(using Context): Space = trace(s"simplify ${show(space)} --> ", debug, show)(space match { case Prod(tp, fun, spaces) => val sps = spaces.mapconserve(simplify) - if (sps.contains(Empty)) Empty - else if (canDecompose(tp) && decompose(tp).isEmpty) Empty + if sps.contains(Empty) then Empty + else if decompose(tp).isEmpty then Empty else if sps eq spaces then space else Prod(tp, fun, sps) case Or(spaces) => val spaces2 = spaces.map(simplify).filter(_ != Empty) if spaces2.isEmpty then Empty else if spaces2.lengthIs == 1 then spaces2.head else if spaces2.corresponds(spaces)(_ eq _) then space else Or(spaces2) - case Typ(tp, _) => - if (canDecompose(tp) && decompose(tp).isEmpty) Empty + case typ: Typ => + if decompose(typ).isEmpty then Empty else space case _ => space }) @@ -165,26 +175,22 @@ trait SpaceLogic { } /** Is `a` a subspace of `b`? Equivalent to `simplify(simplify(a) - simplify(b)) == Empty`, but faster */ - def isSubspace(a: Space, b: Space)(using Context): Boolean = trace(s"isSubspace(${show(a)}, ${show(b)})", debug) { - def tryDecompose1(tp: Type) = canDecompose(tp) && isSubspace(Or(decompose(tp)), b) - def tryDecompose2(tp: Type) = canDecompose(tp) && isSubspace(a, Or(decompose(tp))) - + def computeIsSubspace(a: Space, b: Space)(using Context): Boolean = { val a2 = simplify(a) val b2 = simplify(b) if (a ne a2) || (b ne b2) then isSubspace(a2, b2) else (a, b) match { case (Empty, _) => true case (_, Empty) => false - case (Or(ss), _) => - ss.forall(isSubspace(_, b)) - case (Typ(tp1, _), Typ(tp2, _)) => + case (Or(ss), _) => ss.forall(isSubspace(_, b)) + case (a @ Typ(tp1, _), Or(ss)) => // optimization: don't go to subtraction too early + ss.exists(isSubspace(a, _)) + || canDecompose(a) && isSubspace(Or(decompose(a)), b) + case (_, Or(_)) => simplify(minus(a, b)) == Empty + case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => isSubType(tp1, tp2) - || canDecompose(tp1) && tryDecompose1(tp1) - || canDecompose(tp2) && tryDecompose2(tp2) - case (Typ(tp1, _), Or(ss)) => // optimization: don't go to subtraction too early - ss.exists(isSubspace(a, _)) || tryDecompose1(tp1) - case (_, Or(_)) => - simplify(minus(a, b)) == Empty + || canDecompose(a) && isSubspace(Or(decompose(a)), b) + || canDecompose(b) && isSubspace(a, Or(decompose(b))) case (Prod(tp1, _, _), Typ(tp2, _)) => isSubType(tp1, tp2) case (Typ(tp1, _), Prod(tp2, fun, ss)) => @@ -192,96 +198,74 @@ trait SpaceLogic { && covers(fun, tp1, ss.length) && isSubspace(Prod(tp2, fun, signature(fun, tp1, ss.length).map(Typ(_, false))), b) case (Prod(_, fun1, ss1), Prod(_, fun2, ss2)) => - isSameUnapply(fun1, fun2) && ss1.zip(ss2).forall((isSubspace _).tupled) + isSameUnapply(fun1, fun2) && ss1.lazyZip(ss2).forall(isSubspace) } } /** Intersection of two spaces */ def intersect(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} & ${show(b)}", debug, show) { - def tryDecompose1(tp: Type) = intersect(Or(decompose(tp)), b) - def tryDecompose2(tp: Type) = intersect(a, Or(decompose(tp))) - (a, b) match { case (Empty, _) | (_, Empty) => Empty case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filter(_ ne Empty)) case (Or(ss), _) => Or(ss.map(intersect(_, b)).filter(_ ne Empty)) - case (Typ(tp1, _), Typ(tp2, _)) => - if (isSubType(tp1, tp2)) a - else if (isSubType(tp2, tp1)) b - else if (canDecompose(tp1)) tryDecompose1(tp1) - else if (canDecompose(tp2)) tryDecompose2(tp2) - else intersectUnrelatedAtomicTypes(tp1, tp2) - case (Typ(tp1, _), Prod(tp2, fun, ss)) => - if (isSubType(tp2, tp1)) b - else if (canDecompose(tp1)) tryDecompose1(tp1) - else if (isSubType(tp1, tp2)) a // problematic corner case: inheriting a case class - else intersectUnrelatedAtomicTypes(tp1, tp2) match - case Typ(tp, _) => Prod(tp, fun, ss) - case sp => sp - case (Prod(tp1, fun, ss), Typ(tp2, _)) => - if (isSubType(tp1, tp2)) a - else if (canDecompose(tp2)) tryDecompose2(tp2) - else if (isSubType(tp2, tp1)) a // problematic corner case: inheriting a case class - else intersectUnrelatedAtomicTypes(tp1, tp2) match - case Typ(tp, _) => Prod(tp, fun, ss) - case sp => sp - case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => - if (!isSameUnapply(fun1, fun2)) intersectUnrelatedAtomicTypes(tp1, tp2) match - case Typ(tp, _) => Prod(tp, fun1, ss1) - case sp => sp - else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty - else Prod(tp1, fun1, ss1.zip(ss2).map((intersect _).tupled)) + case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => + if isSubType(tp1, tp2) then a + else if isSubType(tp2, tp1) then b + else if canDecompose(a) then intersect(Or(decompose(a)), b) + else if canDecompose(b) then intersect(a, Or(decompose(b))) + else intersectUnrelatedAtomicTypes(tp1, tp2)(a) + case (a @ Typ(tp1, _), Prod(tp2, fun, ss)) => + if isSubType(tp2, tp1) then b + else if canDecompose(a) then intersect(Or(decompose(a)), b) + else if isSubType(tp1, tp2) then a // problematic corner case: inheriting a case class + else intersectUnrelatedAtomicTypes(tp1, tp2)(b) + case (Prod(tp1, fun, ss), b @ Typ(tp2, _)) => + if isSubType(tp1, tp2) then a + else if canDecompose(b) then intersect(a, Or(decompose(b))) + else if isSubType(tp2, tp1) then a // problematic corner case: inheriting a case class + else intersectUnrelatedAtomicTypes(tp1, tp2)(a) + case (a @ Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => + if !isSameUnapply(fun1, fun2) then intersectUnrelatedAtomicTypes(tp1, tp2)(a) + else if ss1.lazyZip(ss2).exists((a, b) => simplify(intersect(a, b)) == Empty) then Empty + else Prod(tp1, fun1, ss1.lazyZip(ss2).map(intersect)) } } /** The space of a not covered by b */ def minus(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} - ${show(b)}", debug, show) { - def tryDecompose1(tp: Type) = minus(Or(decompose(tp)), b) - def tryDecompose2(tp: Type) = minus(a, Or(decompose(tp))) - (a, b) match { case (Empty, _) => Empty case (_, Empty) => a - case (Typ(tp1, _), Typ(tp2, _)) => - if (isSubType(tp1, tp2)) Empty - else if (canDecompose(tp1)) tryDecompose1(tp1) - else if (canDecompose(tp2)) tryDecompose2(tp2) + case (Or(ss), _) => Or(ss.map(minus(_, b))) + case (_, Or(ss)) => ss.foldLeft(a)(minus) + case (a @ Typ(tp1, _), b @ Typ(tp2, _)) => + if isSubType(tp1, tp2) then Empty + else if canDecompose(a) then minus(Or(decompose(a)), b) + else if canDecompose(b) then minus(a, Or(decompose(b))) else a - case (Typ(tp1, _), Prod(tp2, fun, ss)) => + case (a @ Typ(tp1, _), Prod(tp2, fun, ss)) => // rationale: every instance of `tp1` is covered by `tp2(_)` if isSubType(tp1, tp2) && covers(fun, tp1, ss.length) then minus(Prod(tp1, fun, signature(fun, tp1, ss.length).map(Typ(_, false))), b) - else if canDecompose(tp1) then - tryDecompose1(tp1) - else - a - case (Or(ss), _) => - Or(ss.map(minus(_, b))) - case (_, Or(ss)) => - ss.foldLeft(a)(minus) - case (Prod(tp1, fun, ss), Typ(tp2, _)) => + else if canDecompose(a) then minus(Or(decompose(a)), b) + else a + case (Prod(tp1, fun, ss), b @ Typ(tp2, _)) => // uncovered corner case: tp2 :< tp1, may happen when inheriting case class - if (isSubType(tp1, tp2)) - Empty - else if (simplify(a) == Empty) - Empty - else if (canDecompose(tp2)) - tryDecompose2(tp2) - else - a + if isSubType(tp1, tp2) then Empty + else if simplify(a) == Empty then Empty + else if canDecompose(b) then minus(a, Or(decompose(b))) + else a case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) - if (!isSameUnapply(fun1, fun2)) => a + if !isSameUnapply(fun1, fun2) => a case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) - if (fun1.symbol.name == nme.unapply && ss1.length != ss2.length) => a - case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => - - val range = (0 until ss1.size).toList + if fun1.symbol.name == nme.unapply && ss1.length != ss2.length => a + case (a @ Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) => + val range = ss1.indices.toList val cache = Array.fill[Space | Null](ss2.length)(null) def sub(i: Int) = if cache(i) == null then cache(i) = minus(ss1(i), ss2(i)) cache(i).nn - end sub if range.exists(i => isSubspace(ss1(i), sub(i))) then a else if cache.forall(sub => isSubspace(sub.nn, Empty)) then Empty @@ -293,9 +277,6 @@ trait SpaceLogic { Or(spaces) } } -} - -object SpaceEngine { /** Is the unapply or unapplySeq irrefutable? * @param unapp The unapply function reference @@ -331,7 +312,7 @@ object SpaceEngine { def isIrrefutableQuotedPattern(unapp: tpd.Tree, implicits: List[tpd.Tree], pt: Type)(using Context): Boolean = { implicits.headOption match // pattern '{ $x: T } - case Some(tpd.Apply(tpd.Select(tpd.Quoted(tpd.TypeApply(fn, List(tpt))), nme.apply), _)) + case Some(tpd.Apply(tpd.Select(tpd.Quote(tpd.TypeApply(fn, List(tpt)), _), nme.apply), _)) if unapp.symbol.owner.eq(defn.QuoteMatching_ExprMatchModule) && fn.symbol.eq(defn.QuotedRuntimePatterns_patternHole) => pt <:< defn.QuotedExprClass.typeRef.appliedTo(tpt.tpe) @@ -343,26 +324,21 @@ object SpaceEngine { case _ => false } -} -/** Scala implementation of space logic */ -class SpaceEngine(using Context) extends SpaceLogic { - import tpd._ - - private val scalaSeqFactoryClass = defn.SeqFactoryClass - private val scalaListType = defn.ListClass.typeRef - private val scalaNilType = defn.NilModule.termRef - private val scalaConsType = defn.ConsClass.typeRef - - private val constantNullType = ConstantType(Constant(null)) - - override def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type): Space = trace(s"atomic intersection: ${AndType(tp1, tp2).show}", debug) { + /** Return a space containing the values of both types. + * + * The types should be atomic (non-decomposable) and unrelated (neither + * should be a subtype of the other). + */ + def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type)(sp: Space)(using Context): Space = trace(i"atomic intersection: ${AndType(tp1, tp2)}", debug, show) { // Precondition: !isSubType(tp1, tp2) && !isSubType(tp2, tp1). if !ctx.mode.is(Mode.SafeNulls) && (tp1.isNullType || tp2.isNullType) then // Since projections of types don't include null, intersection with null is empty. Empty else - val intersection = Typ(AndType(tp1, tp2), decomposed = false) + val intersection = sp match + case sp: Prod => sp.copy(AndType(tp1, tp2)) + case _ => Typ(AndType(tp1, tp2), decomposed = false) // unrelated numeric value classes can equal each other, so let's not consider type space intersection empty if tp1.classSymbol.isNumericValueClass && tp2.classSymbol.isNumericValueClass then intersection else if isPrimToBox(tp1, tp2) || isPrimToBox(tp2, tp1) then intersection @@ -371,7 +347,7 @@ class SpaceEngine(using Context) extends SpaceLogic { } /** Return the space that represents the pattern `pat` */ - def project(pat: Tree): Space = pat match { + def project(pat: Tree)(using Context): Space = trace(i"project($pat ${pat.className} ${pat.tpe})", debug, show)(pat match { case Literal(c) => if (c.value.isInstanceOf[Symbol]) Typ(c.value.asInstanceOf[Symbol].termRef, decomposed = false) @@ -398,7 +374,7 @@ class SpaceEngine(using Context) extends SpaceLogic { val funRef = fun1.tpe.asInstanceOf[TermRef] if (fun.symbol.name == nme.unapplySeq) val (arity, elemTp, resultTp) = unapplySeqInfo(fun.tpe.widen.finalResultType, fun.srcPos) - if (fun.symbol.owner == scalaSeqFactoryClass && scalaListType.appliedTo(elemTp) <:< pat.tpe) + if (fun.symbol.owner == defn.SeqFactoryClass && defn.ListType.appliedTo(elemTp) <:< pat.tpe) // The exhaustivity and reachability logic already handles decomposing sum types (into its subclasses) // and product types (into its components). To get better counter-examples for patterns that are of type // List (or a super-type of list, like LinearSeq) we project them into spaces that use `::` and Nil. @@ -432,14 +408,14 @@ class SpaceEngine(using Context) extends SpaceLogic { case _ => // Pattern is an arbitrary expression; assume a skolem (i.e. an unknown value) of the pattern type Typ(pat.tpe.narrow, decomposed = false) - } + }) - private def project(tp: Type): Space = tp match { + private def project(tp: Type)(using Context): Space = tp match { case OrType(tp1, tp2) => Or(project(tp1) :: project(tp2) :: Nil) case tp => Typ(tp, decomposed = true) } - private def unapplySeqInfo(resTp: Type, pos: SrcPos): (Int, Type, Type) = { + private def unapplySeqInfo(resTp: Type, pos: SrcPos)(using Context): (Int, Type, Type) = { var resultTp = resTp var elemTp = unapplySeqTypeElemTp(resultTp) var arity = productArity(resultTp, pos) @@ -486,15 +462,14 @@ class SpaceEngine(using Context) extends SpaceLogic { * If `isValue` is true, then pattern-bound symbols are erased to its upper bound. * This is needed to avoid spurious unreachable warnings. See tests/patmat/i6197.scala. */ - private def erase(tp: Type, inArray: Boolean = false, isValue: Boolean = false): Type = trace(i"$tp erased to", debug) { + private def erase(tp: Type, inArray: Boolean = false, isValue: Boolean = false)(using Context): Type = + trace(i"erase($tp${if inArray then " inArray" else ""}${if isValue then " isValue" else ""})", debug)(tp match { + case tp @ AppliedType(tycon, args) if tycon.typeSymbol.isPatternBound => + WildcardType - tp match { case tp @ AppliedType(tycon, args) => - if tycon.typeSymbol.isPatternBound then return WildcardType - - val args2 = - if (tycon.isRef(defn.ArrayClass)) args.map(arg => erase(arg, inArray = true, isValue = false)) - else args.map(arg => erase(arg, inArray = false, isValue = false)) + val inArray = tycon.isRef(defn.ArrayClass) + val args2 = args.map(arg => erase(arg, inArray = inArray, isValue = false)) tp.derivedAppliedType(erase(tycon, inArray, isValue = false), args2) case tp @ OrType(tp1, tp2) => @@ -512,48 +487,49 @@ class SpaceEngine(using Context) extends SpaceLogic { else WildcardType case _ => tp - } - } + }) /** Space of the pattern: unapplySeq(a, b, c: _*) */ - def projectSeq(pats: List[Tree]): Space = { - if (pats.isEmpty) return Typ(scalaNilType, false) + def projectSeq(pats: List[Tree])(using Context): Space = { + if (pats.isEmpty) return Typ(defn.NilType, false) val (items, zero) = if (isWildcardStarArg(pats.last)) - (pats.init, Typ(scalaListType.appliedTo(pats.last.tpe.elemType), false)) + (pats.init, Typ(defn.ListType.appliedTo(pats.last.tpe.elemType), false)) else - (pats, Typ(scalaNilType, false)) + (pats, Typ(defn.NilType, false)) - val unapplyTp = scalaConsType.classSymbol.companionModule.termRef.select(nme.unapply) + val unapplyTp = defn.ConsType.classSymbol.companionModule.termRef.select(nme.unapply) items.foldRight[Space](zero) { (pat, acc) => - val consTp = scalaConsType.appliedTo(pats.head.tpe.widen) + val consTp = defn.ConsType.appliedTo(pats.head.tpe.widen) Prod(consTp, unapplyTp, project(pat) :: acc :: Nil) } } - def isPrimToBox(tp: Type, pt: Type): Boolean = + def isPrimToBox(tp: Type, pt: Type)(using Context): Boolean = tp.isPrimitiveValueType && (defn.boxedType(tp).classSymbol eq pt.classSymbol) - private val isSubspaceCache = mutable.HashMap.empty[(Space, Space, Context), Boolean] - - override def isSubspace(a: Space, b: Space)(using Context): Boolean = - isSubspaceCache.getOrElseUpdate((a, b, ctx), super.isSubspace(a, b)) - /** Is `tp1` a subtype of `tp2`? */ - def isSubType(tp1: Type, tp2: Type): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true) { - if tp1 == constantNullType && !ctx.mode.is(Mode.SafeNulls) - then tp2 == constantNullType + def isSubType(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true) { + if tp1 == ConstantType(Constant(null)) && !ctx.mode.is(Mode.SafeNulls) + then tp2 == ConstantType(Constant(null)) else tp1 <:< tp2 } - def isSameUnapply(tp1: TermRef, tp2: TermRef): Boolean = + /** True if we can assume that the two unapply methods are the same. + * That is, given the same parameter, they return the same result. + * + * We assume that unapply methods are pure, but the same method may + * be called with different prefixes, thus behaving differently. + */ + def isSameUnapply(tp1: TermRef, tp2: TermRef)(using Context): Boolean = // always assume two TypeTest[S, T].unapply are the same if they are equal in types (tp1.prefix.isStable && tp2.prefix.isStable || tp1.symbol == defn.TypeTest_unapply) && tp1 =:= tp2 - /** Parameter types of the case class type `tp`. Adapted from `unapplyPlan` in patternMatcher */ - def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int): List[Type] = { + /** Return term parameter types of the extractor `unapp`. + * Parameter types of the case class type `tp`. Adapted from `unapplyPlan` in patternMatcher */ + def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): List[Type] = { val unappSym = unapp.symbol // println("scrutineeTp = " + scrutineeTp.show) @@ -592,10 +568,10 @@ class SpaceEngine(using Context) extends SpaceLogic { if (isUnapplySeq) { val (arity, elemTp, resultTp) = unapplySeqInfo(resTp, unappSym.srcPos) - if (elemTp.exists) scalaListType.appliedTo(elemTp) :: Nil + if (elemTp.exists) defn.ListType.appliedTo(elemTp) :: Nil else { val sels = productSeqSelectors(resultTp, arity, unappSym.srcPos) - sels.init :+ scalaListType.appliedTo(sels.last) + sels.init :+ defn.ListType.appliedTo(sels.last) } } else { @@ -616,56 +592,48 @@ class SpaceEngine(using Context) extends SpaceLogic { } /** Whether the extractor covers the given type */ - def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int): Boolean = - SpaceEngine.isIrrefutable(unapp, argLen) || unapp.symbol == defn.TypeTest_unapply && { + def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): Boolean = + SpaceEngine.isIrrefutable(unapp, argLen) + || unapp.symbol == defn.TypeTest_unapply && { val AppliedType(_, _ :: tp :: Nil) = unapp.prefix.widen.dealias: @unchecked scrutineeTp <:< tp } + || unapp.symbol == defn.ClassTagClass_unapply && { + val AppliedType(_, tp :: Nil) = unapp.prefix.widen.dealias: @unchecked + scrutineeTp <:< tp + } /** Decompose a type into subspaces -- assume the type can be decomposed */ - def decompose(tp: Type): List[Typ] = trace(i"decompose($tp)", debug, show(_: Seq[Space])) { - def rec(tp: Type, mixins: List[Type]): List[Typ] = tp.dealias match { + def decompose(tp: Type)(using Context): List[Type] = trace(i"decompose($tp)", debug) { + def rec(tp: Type, mixins: List[Type]): List[Type] = tp.dealias match case AndType(tp1, tp2) => - def decomposeComponent(tpA: Type, tpB: Type): List[Typ] = - rec(tpA, tpB :: mixins).flatMap { - case Typ(tp, _) => - if tp <:< tpB then - Typ(tp, decomposed = true) :: Nil - else if tpB <:< tp then - Typ(tpB, decomposed = true) :: Nil - else if TypeComparer.provablyDisjoint(tp, tpB) then - Nil - else - Typ(AndType(tp, tpB), decomposed = true) :: Nil - } - - if canDecompose(tp1) then - decomposeComponent(tp1, tp2) - else - decomposeComponent(tp2, tp1) - - case OrType(tp1, tp2) => List(Typ(tp1, true), Typ(tp2, true)) - case tp if tp.isRef(defn.BooleanClass) => - List( - Typ(ConstantType(Constant(true)), true), - Typ(ConstantType(Constant(false)), true) - ) - case tp if tp.isRef(defn.UnitClass) => - Typ(ConstantType(Constant(())), true) :: Nil - case tp if tp.classSymbol.isAllOf(JavaEnumTrait) => - tp.classSymbol.children.map(sym => Typ(sym.termRef, true)) - - case tp @ AppliedType(tycon, targs) if tp.classSymbol.children.isEmpty && canDecompose(tycon) => + var tpB = tp2 + var parts = rec(tp1, tp2 :: mixins) + if parts == ListOfNoType then + tpB = tp1 + parts = rec(tp2, tp1 :: mixins) + if parts == ListOfNoType then ListOfNoType + else parts.collect: + case tp if tp <:< tpB => tp + case tp if tpB <:< tp => tpB + case tp if !TypeComparer.provablyDisjoint(tp, tpB) => AndType(tp, tpB) + + case OrType(tp1, tp2) => List(tp1, tp2) + case tp if tp.isRef(defn.BooleanClass) => List(ConstantType(Constant(true)), ConstantType(Constant(false))) + case tp if tp.isRef(defn.UnitClass) => ConstantType(Constant(())) :: Nil + case tp @ NamedType(Parts(parts), _) => parts.map(tp.derivedSelect) + case _: SingletonType => ListOfNoType + case tp if tp.classSymbol.isAllOf(JavaEnumTrait) => tp.classSymbol.children.map(_.termRef) + // the class of a java enum value is the enum class, so this must follow SingletonType to not loop infinitely + + case tp @ AppliedType(Parts(parts), targs) if tp.classSymbol.children.isEmpty => // It might not obvious that it's OK to apply the type arguments of a parent type to child types. // But this is guarded by `tp.classSymbol.children.isEmpty`, // meaning we'll decompose to the same class, just not the same type. // For instance, from i15029, `decompose((X | Y).Field[T]) = [X.Field[T], Y.Field[T]]`. - rec(tycon, Nil).map(typ => Typ(tp.derivedAppliedType(typ.tp, targs))) + parts.map(tp.derivedAppliedType(_, targs)) - case tp: NamedType if canDecompose(tp.prefix) => - rec(tp.prefix, Nil).map(typ => Typ(tp.derivedSelect(typ.tp))) - - case tp => + case tp if tp.isDecomposableToChildren => def getChildren(sym: Symbol): List[Symbol] = sym.children.flatMap { child => if child eq sym then List(sym) // i3145: sealed trait Baz, val x = new Baz {}, Baz.children returns Baz... @@ -675,53 +643,53 @@ class SpaceEngine(using Context) extends SpaceLogic { else List(child) } val children = getChildren(tp.classSymbol) - debug.println(s"candidates for ${tp.show} : [${children.map(_.show).mkString(", ")}]") + debug.println(i"candidates for $tp : $children") val parts = children.map { sym => val sym1 = if (sym.is(ModuleClass)) sym.sourceModule else sym val refined = TypeOps.refineUsingParent(tp, sym1, mixins) + debug.println(i"$sym1 refined to $refined") - debug.println(sym1.show + " refined to " + refined.show) - - def inhabited(tp: Type): Boolean = - tp.dealias match { - case AndType(tp1, tp2) => !TypeComparer.provablyDisjoint(tp1, tp2) - case OrType(tp1, tp2) => inhabited(tp1) || inhabited(tp2) - case tp: RefinedType => inhabited(tp.parent) - case tp: TypeRef => inhabited(tp.prefix) - case _ => true - } + def inhabited(tp: Type): Boolean = tp.dealias match + case AndType(tp1, tp2) => !TypeComparer.provablyDisjoint(tp1, tp2) + case OrType(tp1, tp2) => inhabited(tp1) || inhabited(tp2) + case tp: RefinedType => inhabited(tp.parent) + case tp: TypeRef => inhabited(tp.prefix) + case _ => true - if (inhabited(refined)) refined + if inhabited(refined) then refined else NoType - } filter(_.exists) + }.filter(_.exists) + debug.println(i"$tp decomposes to $parts") + parts - debug.println(s"${tp.show} decomposes to [${parts.map(_.show).mkString(", ")}]") + case _ => ListOfNoType + end rec - parts.map(Typ(_, true)) - } rec(tp, Nil) } - /** Abstract sealed types, or-types, Boolean and Java enums can be decomposed */ - def canDecompose(tp: Type): Boolean = - val res = tp.dealias match - case AppliedType(tycon, _) if canDecompose(tycon) => true - case tp: NamedType if canDecompose(tp.prefix) => true - case _: SingletonType => false - case _: OrType => true - case AndType(tp1, tp2) => canDecompose(tp1) || canDecompose(tp2) - case _ => - val cls = tp.classSymbol - cls.is(Sealed) - && cls.isOneOf(AbstractOrTrait) - && !cls.hasAnonymousChild - && cls.children.nonEmpty - || cls.isAllOf(JavaEnumTrait) - || tp.isRef(defn.BooleanClass) - || tp.isRef(defn.UnitClass) - //debug.println(s"decomposable: ${tp.show} = $res") - res + extension (tp: Type) + /** A type is decomposable to children if it has a simple kind, it's sealed, + * abstract (or a trait) - so its not a sealed concrete class that can be instantiated on its own, + * has no anonymous children, which we wouldn't be able to name as counter-examples, + * but does have children. + * + * A sealed trait with no subclasses is considered not decomposable and thus is treated as an opaque type. + * A sealed trait with subclasses that then get removed after `refineUsingParent`, decomposes to the empty list. + * So that's why we consider whether a type has children. */ + def isDecomposableToChildren(using Context): Boolean = + val cls = tp.classSymbol + tp.hasSimpleKind && cls.is(Sealed) && cls.isOneOf(AbstractOrTrait) && !cls.hasAnonymousChild && cls.children.nonEmpty + + val ListOfNoType = List(NoType) + val ListOfTypNoType = ListOfNoType.map(Typ(_, decomposed = true)) + + object Parts: + def unapply(tp: Type)(using Context): PartsExtractor = PartsExtractor(decompose(tp)) + + final class PartsExtractor(val get: List[Type]) extends AnyVal: + def isEmpty: Boolean = get == ListOfNoType /** Show friendly type name with current scope in mind * @@ -731,7 +699,7 @@ class SpaceEngine(using Context) extends SpaceLogic { * C --> C if current owner is C !!! * */ - def showType(tp: Type, showTypeArgs: Boolean = false): String = { + def showType(tp: Type, showTypeArgs: Boolean = false)(using Context): String = { val enclosingCls = ctx.owner.enclosingClass def isOmittable(sym: Symbol) = @@ -772,7 +740,7 @@ class SpaceEngine(using Context) extends SpaceLogic { } /** Whether the counterexample is satisfiable. The space is flattened and non-empty. */ - def satisfiable(sp: Space): Boolean = { + def satisfiable(sp: Space)(using Context): Boolean = { def impossible: Nothing = throw new AssertionError("`satisfiable` only accepts flattened space.") def genConstraint(space: Space): List[(Type, Type)] = space match { @@ -803,10 +771,10 @@ class SpaceEngine(using Context) extends SpaceLogic { checkConstraint(genConstraint(sp))(using ctx.fresh.setNewTyperState()) } - def show(ss: Seq[Space]): String = ss.map(show).mkString(", ") + def showSpaces(ss: Seq[Space])(using Context): String = ss.map(show).mkString(", ") /** Display spaces */ - def show(s: Space): String = { + def show(s: Space)(using Context): String = { def params(tp: Type): List[Type] = tp.classSymbol.primaryConstructor.info.firstParamTypes /** does the companion object of the given symbol have custom unapply */ @@ -820,7 +788,7 @@ class SpaceEngine(using Context) extends SpaceLogic { case Empty => "empty" case Typ(c: ConstantType, _) => "" + c.value.value case Typ(tp: TermRef, _) => - if (flattenList && tp <:< scalaNilType) "" + if (flattenList && tp <:< defn.NilType) "" else tp.symbol.showName case Typ(tp, decomposed) => @@ -828,9 +796,9 @@ class SpaceEngine(using Context) extends SpaceLogic { if (ctx.definitions.isTupleNType(tp)) params(tp).map(_ => "_").mkString("(", ", ", ")") - else if (scalaListType.isRef(sym)) + else if (defn.ListType.isRef(sym)) if (flattenList) "_*" else "_: List" - else if (scalaConsType.isRef(sym)) + else if (defn.ConsType.isRef(sym)) if (flattenList) "_, _*" else "List(_, _*)" else if (tp.classSymbol.is(Sealed) && tp.classSymbol.hasAnonymousChild) "_: " + showType(tp) + " (anonymous)" @@ -842,7 +810,7 @@ class SpaceEngine(using Context) extends SpaceLogic { case Prod(tp, fun, params) => if (ctx.definitions.isTupleNType(tp)) "(" + params.map(doShow(_)).mkString(", ") + ")" - else if (tp.isRef(scalaConsType.symbol)) + else if (tp.isRef(defn.ConsType.symbol)) if (flattenList) params.map(doShow(_, flattenList)).filter(_.nonEmpty).mkString(", ") else params.map(doShow(_, flattenList = true)).filter(!_.isEmpty).mkString("List(", ", ", ")") else { @@ -858,7 +826,7 @@ class SpaceEngine(using Context) extends SpaceLogic { doShow(s, flattenList = false) } - private def exhaustivityCheckable(sel: Tree): Boolean = { + private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = { val seen = collection.mutable.Set.empty[Type] // Possible to check everything, but be compatible with scalac by default @@ -887,8 +855,8 @@ class SpaceEngine(using Context) extends SpaceLogic { res } - /** Whehter counter-examples should be further checked? True for GADTs. */ - private def shouldCheckExamples(tp: Type): Boolean = + /** Whether counter-examples should be further checked? True for GADTs. */ + private def shouldCheckExamples(tp: Type)(using Context): Boolean = new TypeAccumulator[Boolean] { override def apply(b: Boolean, tp: Type): Boolean = tp match { case tref: TypeRef if tref.symbol.is(TypeParam) && variance != 1 => true @@ -899,7 +867,7 @@ class SpaceEngine(using Context) extends SpaceLogic { /** Return the underlying type of non-module, non-constant, non-enum case singleton types. * Also widen ExprType to its result type, and rewrap any annotation wrappers. * For example, with `val opt = None`, widen `opt.type` to `None.type`. */ - def toUnderlying(tp: Type): Type = trace(i"toUnderlying($tp)", show = true)(tp match { + def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp)", show = true)(tp match { case _: ConstantType => tp case tp: TermRef if tp.symbol.is(Module) => tp case tp: TermRef if tp.symbol.isAllOf(EnumCase) => tp @@ -909,16 +877,11 @@ class SpaceEngine(using Context) extends SpaceLogic { case _ => tp }) - def checkExhaustivity(_match: Match): Unit = { - val Match(sel, cases) = _match - debug.println(i"checking exhaustivity of ${_match}") - - if (!exhaustivityCheckable(sel)) return - - val selTyp = toUnderlying(sel.tpe).dealias + def checkExhaustivity(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then trace(i"checkExhaustivity($m)", debug) { + val selTyp = toUnderlying(m.selector.tpe).dealias debug.println(i"selTyp = $selTyp") - val patternSpace = Or(cases.foldLeft(List.empty[Space]) { (acc, x) => + val patternSpace = Or(m.cases.foldLeft(List.empty[Space]) { (acc, x) => val space = if (x.guard.isEmpty) project(x.pat) else Empty debug.println(s"${x.pat.show} ====> ${show(space)}") space :: acc @@ -935,10 +898,10 @@ class SpaceEngine(using Context) extends SpaceLogic { if uncovered.nonEmpty then val hasMore = uncovered.lengthCompare(6) > 0 val deduped = dedup(uncovered.take(6)) - report.warning(PatternMatchExhaustivity(show(deduped), hasMore), sel.srcPos) + report.warning(PatternMatchExhaustivity(showSpaces(deduped), hasMore), m.selector) } - private def redundancyCheckable(sel: Tree): Boolean = + private def redundancyCheckable(sel: Tree)(using Context): Boolean = // Ignore Expr[T] and Type[T] for unreachability as a special case. // Quote patterns produce repeated calls to the same unapply method, but with different implicit parameters. // Since we assume that repeated calls to the same unapply method overlap @@ -948,19 +911,15 @@ class SpaceEngine(using Context) extends SpaceLogic { && !sel.tpe.widen.isRef(defn.QuotedExprClass) && !sel.tpe.widen.isRef(defn.QuotedTypeClass) - def checkRedundancy(_match: Match): Unit = { - val Match(sel, _) = _match - val cases = _match.cases.toIndexedSeq - debug.println(i"checking redundancy in $_match") - - if (!redundancyCheckable(sel)) return + def checkRedundancy(m: Match)(using Context): Unit = if redundancyCheckable(m.selector) then trace(i"checkRedundancy($m)", debug) { + val cases = m.cases.toIndexedSeq - val selTyp = toUnderlying(sel.tpe).dealias + val selTyp = toUnderlying(m.selector.tpe).dealias debug.println(i"selTyp = $selTyp") val isNullable = selTyp.classSymbol.isNullableClass val targetSpace = if isNullable - then project(OrType(selTyp, constantNullType, soft = false)) + then project(OrType(selTyp, ConstantType(Constant(null)), soft = false)) else project(selTyp) debug.println(s"targetSpace: ${show(targetSpace)}") @@ -989,6 +948,7 @@ class SpaceEngine(using Context) extends SpaceLogic { for (pat <- deferred.reverseIterator) report.warning(MatchCaseUnreachable(), pat.srcPos) if pat != EmptyTree // rethrow case of catch uses EmptyTree + && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase && isSubspace(covered, prev) then { val nullOnly = isNullable && i == len - 1 && isWildcardArg(pat) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala index 30eed76b18ec..115d41dd3d46 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala @@ -211,6 +211,23 @@ object JSSymUtils { } } } + + /** Tests whether the semantics of Scala.js require a field for this symbol, + * irrespective of any optimization we think we can do. + * + * This is the case if one of the following is true: + * + * - it is a member of a JS type, since it needs to be visible as a JavaScript field + * - is is exported as static member of the companion class, since it needs to be visible as a JavaScript static field + * - it is exported to the top-level, since that can only be done as a true top-level variable, i.e., a field + */ + def sjsNeedsField(using Context): Boolean = + ctx.settings.scalajs.value && ( + sym.owner.isJSType + || sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot) + || sym.hasAnnotation(jsdefn.JSExportStaticAnnot) + ) + end sjsNeedsField } private object JSUnaryOpMethodName { diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index d934dc179989..a2f9a0fb45a3 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -93,6 +93,24 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP } } + private var dynamicImportEnclosingClasses: Set[Symbol] = Set.empty + + private def enterDynamicImportEnclosingClass[A](cls: Symbol)(body: => A): A = { + val saved = dynamicImportEnclosingClasses + dynamicImportEnclosingClasses = saved + cls + try + body + finally + dynamicImportEnclosingClasses = saved + } + + private def hasImplicitThisPrefixToDynamicImportEnclosingClass(tpe: Type)(using Context): Boolean = + tpe match + case tpe: ThisType => dynamicImportEnclosingClasses.contains(tpe.cls) + case TermRef(prefix, _) => hasImplicitThisPrefixToDynamicImportEnclosingClass(prefix) + case _ => false + end hasImplicitThisPrefixToDynamicImportEnclosingClass + /** DefDefs in class templates that export methods to JavaScript */ private val exporters = mutable.Map.empty[Symbol, mutable.ListBuffer[Tree]] @@ -297,10 +315,15 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP assert(currentOwner.isTerm, s"unexpected owner: $currentOwner at ${tree.sourcePos}") + val enclosingClass = currentOwner.enclosingClass + // new DynamicImportThunk { def apply(): Any = body } val dynamicImportThunkAnonClass = AnonClass(currentOwner, List(jsdefn.DynamicImportThunkType), span) { cls => val applySym = newSymbol(cls, nme.apply, Method, MethodType(Nil, Nil, defn.AnyType), coord = span).entered - val newBody = transform(body).changeOwnerAfter(currentOwner, applySym, thisPhase) + val transformedBody = enterDynamicImportEnclosingClass(enclosingClass) { + transform(body) + } + val newBody = transformedBody.changeOwnerAfter(currentOwner, applySym, thisPhase) val applyDefDef = DefDef(applySym, newBody) List(applyDefDef) } @@ -310,6 +333,14 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP .appliedToTypeTree(tpeArg) .appliedTo(dynamicImportThunkAnonClass) + // #17344 Make `ThisType`-based references to enclosing classes of `js.dynamicImport` explicit + case tree: Ident if hasImplicitThisPrefixToDynamicImportEnclosingClass(tree.tpe) => + def rec(tpe: Type): Tree = (tpe: @unchecked) match // exhaustive because of the `if ... =>` + case tpe: ThisType => This(tpe.cls) + case tpe @ TermRef(prefix, _) => rec(prefix).select(tpe.symbol) + + rec(tree.tpe).withSpan(tree.span) + // Compile-time errors and warnings for js.Dynamic.literal case Apply(Apply(fun, nameArgs), args) if fun.symbol == jsdefn.JSDynamicLiteral_applyDynamic || @@ -888,6 +919,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP report.error("A non-native JS trait cannot contain private members", tree) } else if (sym.is(Lazy)) { report.error("A non-native JS trait cannot contain lazy vals", tree) + } else if (sym.is(ParamAccessor)) { + // #12621 + report.error("A non-native JS trait cannot have constructor parameters", tree) } else if (!sym.is(Deferred)) { /* Tell the back-end not to emit this thing. In fact, this only * matters for mixed-in members created from this member. diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index cd33fe9cef24..9c23b7e2024f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -272,7 +272,7 @@ object Applications { else def selectGetter(qual: Tree): Tree = val getterDenot = qual.tpe.member(getterName) - .accessibleFrom(qual.tpe.widenIfUnstable) // to reset Local + .accessibleFrom(qual.tpe.widenIfUnstable, superAccess = true) // to reset Local if (getterDenot.exists) qual.select(TermRef(qual.tpe, getterName, getterDenot)) else EmptyTree if !meth.isClassConstructor then @@ -444,10 +444,17 @@ trait Applications extends Compatibility { /** The function's type after widening and instantiating polytypes * with TypeParamRefs in constraint set */ - @threadUnsafe lazy val methType: Type = liftedFunType.widen match { - case funType: MethodType => funType - case funType: PolyType => instantiateWithTypeVars(funType) - case tp => tp //was: funType + @threadUnsafe lazy val methType: Type = { + def rec(t: Type): Type = { + t.widen match{ + case funType: MethodType => funType + case funType: PolyType => + rec(instantiateWithTypeVars(funType)) + case tp => tp + } + } + + rec(liftedFunType) } @threadUnsafe lazy val liftedFunType: Type = @@ -714,8 +721,8 @@ trait Applications extends Compatibility { || argMatch == ArgMatch.CompatibleCAP && { val argtpe1 = argtpe.widen - val captured = captureWildcards(argtpe1) - (captured ne argtpe1) && isCompatible(captured, formal.widenExpr) + val captured = captureWildcardsCompat(argtpe1, formal.widenExpr) + captured ne argtpe1 } /** The type of the given argument */ @@ -837,7 +844,7 @@ trait Applications extends Compatibility { var typedArgs = typedArgBuf.toList def app0 = cpy.Apply(app)(normalizedFun, typedArgs) // needs to be a `def` because typedArgs can change later val app1 = - if (!success) app0.withType(UnspecifiedErrorType) + if (!success || typedArgs.exists(_.tpe.isError)) app0.withType(UnspecifiedErrorType) else { if !sameSeq(args, orderedArgs) && !isJavaAnnotConstr(methRef.symbol) @@ -1090,7 +1097,7 @@ trait Applications extends Compatibility { } else { val app = tree.fun match - case _: untpd.Splice if ctx.mode.is(Mode.QuotedPattern) => typedAppliedSplice(tree, pt) + case _: untpd.SplicePattern => typedAppliedSplice(tree, pt) case _ => realApply app match { case Apply(fn @ Select(left, _), right :: Nil) if fn.hasType => @@ -1144,8 +1151,12 @@ trait Applications extends Compatibility { val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) record("typedTypeApply") typedExpr(tree.fun, PolyProto(typedArgs, pt)) match { - case _: TypeApply if !ctx.isAfterTyper => - errorTree(tree, em"illegal repeated type application") + case fun: TypeApply if !ctx.isAfterTyper => + val function = fun.fun + val args = (fun.args ++ tree.args).map(_.show).mkString(", ") + errorTree(tree, em"""illegal repeated type application + |You might have meant something like: + |${function}[${args}]""") case typedFn => typedFn.tpe.widen match { case pt: PolyType => @@ -1258,8 +1269,6 @@ trait Applications extends Compatibility { def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = { record("typedUnApply") val Apply(qual, args) = tree - if !ctx.mode.is(Mode.InTypeTest) then - checkMatchable(selType, tree.srcPos, pattern = true) def notAnExtractor(tree: Tree): Tree = // prefer inner errors @@ -1398,12 +1407,13 @@ trait Applications extends Compatibility { val unapplyArgType = mt.paramInfos.head unapp.println(i"unapp arg tpe = $unapplyArgType, pt = $selType") val ownType = - if (selType <:< unapplyArgType) { + if selType <:< unapplyArgType then unapp.println(i"case 1 $unapplyArgType ${ctx.typerState.constraint}") fullyDefinedType(unapplyArgType, "pattern selector", tree.srcPos) selType.dropAnnot(defn.UncheckedAnnot) // need to drop @unchecked. Just because the selector is @unchecked, the pattern isn't. - } - else { + else + if !ctx.mode.is(Mode.InTypeTest) then + checkMatchable(selType, tree.srcPos, pattern = true) // We ignore whether constraining the pattern succeeded. // Constraining only fails if the pattern cannot possibly match, // but useless pattern checks detect more such cases, so we simply rely on them instead. @@ -1412,7 +1422,7 @@ trait Applications extends Compatibility { if (patternBound.nonEmpty) unapplyFn = addBinders(unapplyFn, patternBound) unapp.println(i"case 2 $unapplyArgType ${ctx.typerState.constraint}") unapplyArgType - } + val dummyArg = dummyTreeOfType(ownType) val unapplyApp = typedExpr(untpd.TypedSplice(Apply(unapplyFn, dummyArg :: Nil))) def unapplyImplicits(unapp: Tree): List[Tree] = { @@ -1968,7 +1978,7 @@ trait Applications extends Compatibility { val formals = ref.widen.firstParamTypes if formals.length > idx then formals(idx) match - case defn.FunctionOf(args, _, _, _) => args.length + case defn.FunctionOf(args, _, _) => args.length case _ => -1 else -1 @@ -2052,31 +2062,35 @@ trait Applications extends Compatibility { if isDetermined(alts2) then alts2 else resolveMapped(alts1, _.widen.appliedTo(targs1.tpes), pt1) - case defn.FunctionOf(args, resultType, _, _) => - narrowByTypes(alts, args, resultType) - case pt => - val compat = alts.filterConserve(normalizedCompatible(_, pt, keepConstraint = false)) - if (compat.isEmpty) - /* - * the case should not be moved to the enclosing match - * since SAM type must be considered only if there are no candidates - * For example, the second f should be chosen for the following code: - * def f(x: String): Unit = ??? - * def f: java.io.OutputStream = ??? - * new java.io.ObjectOutputStream(f) - */ - pt match { - case SAMType(mtp) => - narrowByTypes(alts, mtp.paramInfos, mtp.resultType) - case _ => - // pick any alternatives that are not methods since these might be convertible - // to the expected type, or be used as extension method arguments. - val convertible = alts.filterNot(alt => - normalize(alt, IgnoredProto(pt)).widenSingleton.isInstanceOf[MethodType]) - if convertible.length == 1 then convertible else compat - } - else compat + val compat0 = pt match + case defn.FunctionOf(args, resType, _) => + narrowByTypes(alts, args, resType) + case _ => + Nil + if (compat0.isEmpty) then + val compat = alts.filterConserve(normalizedCompatible(_, pt, keepConstraint = false)) + if (compat.isEmpty) + /* + * the case should not be moved to the enclosing match + * since SAM type must be considered only if there are no candidates + * For example, the second f should be chosen for the following code: + * def f(x: String): Unit = ??? + * def f: java.io.OutputStream = ??? + * new java.io.ObjectOutputStream(f) + */ + pt match { + case SAMType(mtp) => + narrowByTypes(alts, mtp.paramInfos, mtp.resultType) + case _ => + // pick any alternatives that are not methods since these might be convertible + // to the expected type, or be used as extension method arguments. + val convertible = alts.filterNot(alt => + normalize(alt, IgnoredProto(pt)).widenSingleton.isInstanceOf[MethodType]) + if convertible.length == 1 then convertible else compat + } + else compat + else compat0 } /** The type of alternative `alt` after instantiating its first parameter @@ -2215,7 +2229,7 @@ trait Applications extends Compatibility { val formalsForArg: List[Type] = altFormals.map(_.head) def argTypesOfFormal(formal: Type): List[Type] = formal.dealias match { - case defn.FunctionOf(args, result, isImplicit, isErased) => args + case defn.FunctionOf(args, result, isImplicit) => args case defn.PartialFunctionOf(arg, result) => arg :: Nil case _ => Nil } @@ -2412,4 +2426,9 @@ trait Applications extends Compatibility { def isApplicableExtensionMethod(methodRef: TermRef, receiverType: Type)(using Context): Boolean = methodRef.symbol.is(ExtensionMethod) && !receiverType.isBottomType && tryApplyingExtensionMethod(methodRef, nullLiteral.asInstance(receiverType)).nonEmpty + + def captureWildcardsCompat(tp: Type, pt: Type)(using Context): Type = + val captured = captureWildcards(tp) + if (captured ne tp) && isCompatible(captured, pt) then captured + else tp } diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 817fe6f21d24..df5639b50302 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -506,7 +506,12 @@ object Checking { // note: this is not covered by the next test since terms can be abstract (which is a dual-mode flag) // but they can never be one of ClassOnlyFlags if !sym.isClass && sym.isOneOf(ClassOnlyFlags) then - fail(em"only classes can be ${(sym.flags & ClassOnlyFlags).flagsString}") + val illegal = sym.flags & ClassOnlyFlags + if sym.is(TypeParam) && illegal == Sealed && Feature.ccEnabled && cc.allowUniversalInBoxed then + if !sym.owner.is(Method) then + fail(em"only method type parameters can be sealed") + else + fail(em"only classes can be ${illegal.flagsString}") if (sym.is(AbsOverride) && !sym.owner.is(Trait)) fail(AbstractOverrideOnlyInTraits(sym)) if sym.is(Trait) then @@ -743,13 +748,16 @@ object Checking { if sym.isNoValue && !ctx.isJava then report.error(JavaSymbolIsNotAValue(sym), tree.srcPos) + /** Check that `tree` refers to a value, unless `tree` is selected or applied + * (singleton types x.type don't count as selections). + */ def checkValue(tree: Tree, proto: Type)(using Context): tree.type = tree match - case tree: RefTree - if tree.name.isTermName - && !proto.isInstanceOf[SelectionProto] - && !proto.isInstanceOf[FunOrPolyProto] => - checkValue(tree) + case tree: RefTree if tree.name.isTermName => + proto match + case _: SelectionProto if proto ne SingletonTypeProto => // no value check + case _: FunOrPolyProto => // no value check + case _ => checkValue(tree) case _ => tree @@ -1193,15 +1201,11 @@ trait Checking { */ def checkNoForwardDependencies(vparams: List[ValDef])(using Context): Unit = vparams match { case vparam :: vparams1 => - val check = new TreeTraverser { - def traverse(tree: Tree)(using Context) = tree match { - case id: Ident if vparams.exists(_.symbol == id.symbol) => - report.error(em"illegal forward reference to method parameter", id.srcPos) - case _ => - traverseChildren(tree) - } + vparam.tpt.foreachSubTree { + case id: Ident if vparams.exists(_.symbol == id.symbol) => + report.error(em"illegal forward reference to method parameter", id.srcPos) + case _ => } - check.traverse(vparam.tpt) checkNoForwardDependencies(vparams1) case Nil => } @@ -1461,7 +1465,6 @@ trait Checking { def checkMatchable(tp: Type, pos: SrcPos, pattern: Boolean)(using Context): Unit = if !tp.derivesFrom(defn.MatchableClass) && sourceVersion.isAtLeast(`future-migration`) then - val kind = if pattern then "pattern selector" else "value" report.warning(MatchableWarning(tp, pattern), pos) /** Check that there is an implicit capability to throw a checked exception diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index ef9599be551c..4087c5faf404 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -18,9 +18,6 @@ class CrossVersionChecks extends MiniPhase: override def description: String = CrossVersionChecks.description - override def runsAfterGroupsOf: Set[String] = Set(FirstTransform.name) - // We assume all type trees except TypeTree have been eliminated - // Note: if a symbol has both @deprecated and @migration annotations and both // warnings are enabled, only the first one checked here will be emitted. // I assume that's a consequence of some code trying to avoid noise by suppressing @@ -52,7 +49,8 @@ class CrossVersionChecks extends MiniPhase: owner.isDeprecated || isEnumOwner(owner) - /**Scan the chain of outer declaring scopes from the current context + /**Skip warnings for synthetic members of case classes during declaration and + * scan the chain of outer declaring scopes from the current context * a deprecation warning will be skipped if one the following holds * for a given declaring scope: * - the symbol associated with the scope is also deprecated. @@ -60,27 +58,20 @@ class CrossVersionChecks extends MiniPhase: * a module that declares `sym`, or the companion class of the * module that declares `sym`. */ - def skipWarning(using Context) = - ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated) + def skipWarning(using Context): Boolean = + (ctx.owner.is(Synthetic) && sym.is(CaseClass)) + || ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated) - for annot <- sym.getAnnotation(defn.DeprecatedAnnot) do + // Also check for deprecation of the companion class for synthetic methods + val toCheck = sym :: (if sym.isAllOf(SyntheticMethod) then sym.owner.companionClass :: Nil else Nil) + for sym <- toCheck; annot <- sym.getAnnotation(defn.DeprecatedAnnot) do if !skipWarning then val msg = annot.argumentConstant(0).map(": " + _.stringValue).getOrElse("") val since = annot.argumentConstant(1).map(" since " + _.stringValue).getOrElse("") report.deprecationWarning(em"${sym.showLocated} is deprecated${since}${msg}", pos) - private def checkExperimentalSignature(sym: Symbol, pos: SrcPos)(using Context): Unit = - class Checker extends TypeTraverser: - def traverse(tp: Type): Unit = - if tp.typeSymbol.isExperimental then - Feature.checkExperimentalDef(tp.typeSymbol, pos) - else - traverseChildren(tp) - if !sym.isInExperimentalScope then - new Checker().traverse(sym.info) - private def checkExperimentalAnnots(sym: Symbol)(using Context): Unit = - if !sym.isInExperimentalScope then + if sym.exists && !sym.isInExperimentalScope then for annot <- sym.annotations if annot.symbol.isExperimental do Feature.checkExperimentalDef(annot.symbol, annot.tree) @@ -119,13 +110,16 @@ class CrossVersionChecks extends MiniPhase: override def transformValDef(tree: ValDef)(using Context): ValDef = checkDeprecatedOvers(tree) checkExperimentalAnnots(tree.symbol) - checkExperimentalSignature(tree.symbol, tree) tree override def transformDefDef(tree: DefDef)(using Context): DefDef = checkDeprecatedOvers(tree) checkExperimentalAnnots(tree.symbol) - checkExperimentalSignature(tree.symbol, tree) + tree + + override def transformTypeDef(tree: TypeDef)(using Context): TypeDef = + // TODO do we need to check checkDeprecatedOvers(tree)? + checkExperimentalAnnots(tree.symbol) tree override def transformIdent(tree: Ident)(using Context): Ident = { @@ -157,19 +151,14 @@ class CrossVersionChecks extends MiniPhase: tree } - override def transformTypeDef(tree: TypeDef)(using Context): TypeDef = { - checkExperimentalAnnots(tree.symbol) + override def transformOther(tree: Tree)(using Context): Tree = + tree.foreachSubTree { // Find references in type trees and imports + case tree: Ident => transformIdent(tree) + case tree: Select => transformSelect(tree) + case tree: TypeTree => transformTypeTree(tree) + case _ => + } tree - } - - override def transformOther(tree: Tree)(using Context): Tree = tree match - case tree: Import => - tree.foreachSubTree { - case t: RefTree => checkUndesiredProperties(t.symbol, t.srcPos) - case _ => - } - tree - case _ => tree end CrossVersionChecks diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index b69d83b2dcd5..717966923708 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -2,20 +2,22 @@ package dotty.tools package dotc package typer -import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Names.{Name, TermName} -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.TypeErasure -import util.Spans._ -import core.Symbols._ -import ErrorReporting._ -import reporting._ +import util.Spans.* +import core.Symbols.* +import ErrorReporting.* +import dotty.tools.dotc.transform.ValueClasses +import dotty.tools.dotc.transform.TypeUtils.isPrimitiveValueType +import reporting.* object Dynamic { private def isDynamicMethod(name: Name): Boolean = @@ -179,12 +181,12 @@ trait Dynamic { val vargss = termArgss(tree) def structuralCall(selectorName: TermName, classOfs: => List[Tree]) = { - val selectable = adapt(qual, defn.SelectableClass.typeRef) + val selectable = adapt(qual, defn.SelectableClass.typeRef | defn.DynamicClass.typeRef) // ($qual: Selectable).$selectorName("$name") val base = untpd.Apply( - untpd.TypedSplice(selectable.select(selectorName)).withSpan(fun.span), + untpd.Select(untpd.TypedSplice(selectable), selectorName).withSpan(fun.span), (Literal(Constant(name.encode.toString)) :: Nil).map(untpd.TypedSplice(_))) val scall = @@ -214,9 +216,33 @@ trait Dynamic { def fail(reason: String): Tree = errorTree(tree, em"Structural access not allowed on method $name because it $reason") + extension (tree: Tree) + /** The implementations of `selectDynamic` and `applyDynamic` in `scala.reflect.SelectDynamic` have no information about the expected return type of a value/method which was declared in the refinement, + * only the JVM type after erasure can be obtained through reflection, e.g. + * + * class Foo(val i: Int) extends AnyVal + * class Reflective extends reflect.Selectable + * val reflective = new Reflective { + * def foo = Foo(1) // Foo at compile time, java.lang.Integer in reflection + * } + * + * Because of that reflective access cannot be implemented properly in `scala.reflect.SelectDynamic` itself + * because it's not known there if the value should be wrapped in a value class constructor call or not. + * Hence the logic of wrapping is performed here, relying on the fact that the implementations of `selectDynamic` and `applyDynamic` in `scala.reflect.SelectDynamic` are final. + */ + def maybeBoxingCast(tpe: Type) = + val maybeBoxed = + if ValueClasses.isDerivedValueClass(tpe.classSymbol) && qual.tpe <:< defn.ReflectSelectableTypeRef then + val genericUnderlying = ValueClasses.valueClassUnbox(tpe.classSymbol.asClass) + val underlying = tpe.select(genericUnderlying).widen.resultType + New(tpe, tree.cast(underlying) :: Nil) + else + tree + maybeBoxed.cast(tpe) + fun.tpe.widen match { case tpe: ValueType => - structuralCall(nme.selectDynamic, Nil).cast(tpe) + structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(tpe) case tpe: MethodType => def isDependentMethod(tpe: Type): Boolean = tpe match { @@ -236,7 +262,7 @@ trait Dynamic { fail(i"has a parameter type with an unstable erasure") :: Nil else TypeErasure.erasure(tpe).asInstanceOf[MethodType].paramInfos.map(clsOf(_)) - structuralCall(nme.applyDynamic, classOfs).cast(tpe.finalResultType) + structuralCall(nme.applyDynamic, classOfs).maybeBoxingCast(tpe.finalResultType) } // (@allanrenucci) I think everything below is dead code diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index 32b5fde689ec..126d109889e1 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -41,12 +41,24 @@ object ErrorReporting { errorType(WrongNumberOfTypeArgs(fntpe, expectedArgs, actual), pos) def missingArgs(tree: Tree, mt: Type)(using Context): Unit = + def isCallableWithoutArgumentsLists(mt: Type): Boolean = mt match + case pt: PolyType => isCallableWithoutArgumentsLists(pt.resType) + case mt: MethodType if mt.isImplicitMethod => isCallableWithoutArgumentsLists(mt.resType) + case mt: MethodType => false + case _ => true + def isCallableWithSingleEmptyArgumentList(mt: Type): Boolean = + mt match + case mt: MethodType if mt.paramNames.isEmpty => isCallableWithoutArgumentsLists(mt.resType) + case mt: MethodType if mt.isImplicitMethod => isCallableWithSingleEmptyArgumentList(mt.resType) + case pt: PolyType => isCallableWithSingleEmptyArgumentList(pt.resType) + case _ => false val meth = err.exprStr(methPart(tree)) - mt match - case mt: MethodType if mt.paramNames.isEmpty => - report.error(MissingEmptyArgumentList(meth), tree.srcPos) - case _ => - report.error(em"missing arguments for $meth", tree.srcPos) + val info = if tree.symbol.exists then tree.symbol.info else mt + if isCallableWithSingleEmptyArgumentList(info) then + report.error(MissingEmptyArgumentList(meth), tree.srcPos) + else + report.error(MissingArgumentList(meth, tree.symbol), tree.srcPos) + def matchReductionAddendum(tps: Type*)(using Context): String = val collectMatchTrace = new TypeAccumulator[String]: diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index 46725f0fa6b2..b1513df777ec 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -285,8 +285,9 @@ object EtaExpansion extends LiftImpure { val body = Apply(lifted, ids) if (mt.isContextualMethod) body.setApplyKind(ApplyKind.Using) val fn = - if (mt.isContextualMethod) new untpd.FunctionWithMods(params, body, Modifiers(Given)) - else if (mt.isImplicitMethod) new untpd.FunctionWithMods(params, body, Modifiers(Implicit)) + if (mt.isContextualMethod) new untpd.FunctionWithMods(params, body, Modifiers(Given), mt.erasedParams) + else if (mt.isImplicitMethod) new untpd.FunctionWithMods(params, body, Modifiers(Implicit), mt.erasedParams) + else if (mt.hasErasedParams) new untpd.FunctionWithMods(params, body, Modifiers(), mt.erasedParams) else untpd.Function(params, body) if (defs.nonEmpty) untpd.Block(defs.toList map (untpd.TypedSplice(_)), fn) else fn } diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 03d3011b4bcd..4bbd6ee080b6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -31,6 +31,7 @@ import Feature.migrateTo3 import config.Printers.{implicits, implicitsDetailed} import collection.mutable import reporting._ +import transform.Splicer import annotation.tailrec import scala.annotation.internal.sharable @@ -48,17 +49,19 @@ object Implicits: } /** Both search candidates and successes are references with a specific nesting level. */ - sealed trait RefAndLevel { + sealed trait RefAndLevel extends Showable { def ref: TermRef def level: Int } /** An eligible implicit candidate, consisting of an implicit reference and a nesting level */ - case class Candidate(implicitRef: ImplicitRef, kind: Candidate.Kind, level: Int) extends RefAndLevel { + case class Candidate(implicitRef: ImplicitRef, kind: Candidate.Kind, level: Int) extends RefAndLevel with Showable { def ref: TermRef = implicitRef.underlyingRef def isExtension = (kind & Candidate.Extension) != 0 def isConversion = (kind & Candidate.Conversion) != 0 + + def toText(printer: Printer): Text = printer.toText(this) } object Candidate { type Kind = Int @@ -567,6 +570,12 @@ object Implicits: def msg(using Context) = em"Failed to synthesize an instance of type ${clarify(expectedType)}:${formatReasons}" + class MacroErrorsFailure(errors: List[Diagnostic.Error], + val expectedType: Type, + val argument: Tree) extends SearchFailureType { + def msg(using Context): Message = + em"${errors.map(_.msg).mkString("\n")}" + } end Implicits import Implicits._ @@ -615,6 +624,8 @@ trait ImplicitRunInfo: traverse(t.prefix) case t: ThisType if t.cls.is(Module) && t.cls.isStaticOwner => traverse(t.cls.sourceModule.termRef) + case t: ThisType => + traverse(t.tref) case t: ConstantType => traverse(t.underlying) case t: TypeParamRef => @@ -736,6 +747,7 @@ trait ImplicitRunInfo: * - If `T` is a singleton reference, the anchors of its underlying type, plus, * if `T` is of the form `(P#x).type`, the anchors of `P`. * - If `T` is the this-type of a static object, the anchors of a term reference to that object. + * - If `T` is some other this-type `P.this.type`, the anchors of `P`. * - If `T` is some other type, the union of the anchors of each constituent type of `T`. * * The _implicit scope_ of a type `tp` is the smallest set S of term references (i.e. TermRefs) @@ -923,7 +935,34 @@ trait Implicits: // example where searching for a nested type causes an infinite loop. None - MissingImplicitArgument(arg, pt, where, paramSymWithMethodCallTree, ignoredInstanceNormalImport) + def allImplicits(currImplicits: ContextualImplicits): List[ImplicitRef] = + if currImplicits.outerImplicits == null then currImplicits.refs + else currImplicits.refs ::: allImplicits(currImplicits.outerImplicits) + + /** Whether the given type is for an implicit def that's a Scala 2 implicit conversion */ + def isImplicitDefConversion(typ: Type): Boolean = typ match { + case PolyType(_, resType) => isImplicitDefConversion(resType) + case mt: MethodType => !mt.isImplicitMethod && !mt.isContextualMethod + case _ => false + } + + def ignoredConvertibleImplicits = arg.tpe match + case fail: SearchFailureType => + if (fail.expectedType eq pt) || isFullyDefined(fail.expectedType, ForceDegree.none) then + // Get every implicit in scope and try to convert each + allImplicits(ctx.implicits) + .view + .map(_.underlyingRef) + .distinctBy(_.denot) + .filter { imp => + !isImplicitDefConversion(imp.underlying) + && imp.symbol != defn.Predef_conforms + && viewExists(imp, fail.expectedType) + } + else + Nil + + MissingImplicitArgument(arg, pt, where, paramSymWithMethodCallTree, ignoredInstanceNormalImport, ignoredConvertibleImplicits) } /** A string indicating the formal parameter corresponding to a missing argument */ @@ -932,7 +971,7 @@ trait Implicits: case Select(qual, nme.apply) if defn.isFunctionType(qual.tpe.widen) => val qt = qual.tpe.widen val qt1 = qt.dealiasKeepAnnots - def addendum = if (qt1 eq qt) "" else (i"\nThe required type is an alias of: $qt1") + def addendum = if (qt1 eq qt) "" else (i"\nWhere $qt is an alias of: $qt1") i"parameter of ${qual.tpe.widen}$addendum" case _ => i"${ if paramName.is(EvidenceParamName) then "an implicit parameter" @@ -1002,11 +1041,10 @@ trait Implicits: if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase.phaseName}" else i"type error: ${argument.tpe} does not conform to $pt${err.whyNoMatchStr(argument.tpe, pt)}") - if pt.unusableForInference - || !argument.isEmpty && argument.tpe.unusableForInference - then return NoMatchingImplicitsFailure + val usableForInference = !pt.unusableForInference + && (argument.isEmpty || !argument.tpe.unusableForInference) - val result0 = + val result0 = if usableForInference then // If we are searching implicits when resolving an import symbol, start the search // in the first enclosing context that does not have the same scope and owner as the current // context. Without that precaution, an eligible implicit in the current scope @@ -1023,7 +1061,7 @@ trait Implicits: catch case ce: CyclicReference => ce.inImplicitSearch = true throw ce - end result0 + else NoMatchingImplicitsFailure val result = result0 match { @@ -1052,7 +1090,7 @@ trait Implicits: result } else result - case NoMatchingImplicitsFailure => + case NoMatchingImplicitsFailure if usableForInference => SearchFailure(new NoMatchingImplicits(pt, argument, ctx.typerState.constraint), span) case _ => result0 @@ -1131,19 +1169,22 @@ trait Implicits: if ctx.reporter.hasErrors || !cand.ref.symbol.isAccessibleFrom(cand.ref.prefix) then - ctx.reporter.removeBufferedMessages - adapted.tpe match { + val res = adapted.tpe match { case _: SearchFailureType => SearchFailure(adapted) case error: PreviousErrorType if !adapted.symbol.isAccessibleFrom(cand.ref.prefix) => SearchFailure(adapted.withType(new NestedFailure(error.msg, pt))) - case _ => + case tpe => // Special case for `$conforms` and `<:<.refl`. Showing them to the users brings // no value, so we instead report a `NoMatchingImplicitsFailure` if (adapted.symbol == defn.Predef_conforms || adapted.symbol == defn.SubType_refl) NoMatchingImplicitsFailure + else if Splicer.inMacroExpansion && tpe <:< pt then + SearchFailure(adapted.withType(new MacroErrorsFailure(ctx.reporter.allErrors.reverse, pt, argument))) else SearchFailure(adapted.withType(new MismatchedImplicit(ref, pt, argument))) } + ctx.reporter.removeBufferedMessages + res else SearchSuccess(adapted, ref, cand.level, cand.isExtension)(ctx.typerState, ctx.gadt) } @@ -1552,7 +1593,6 @@ trait Implicits: * implicit search. * * @param cand The candidate implicit to be explored. - * @param pt The target type for the above candidate. * @result True if this candidate/pt are divergent, false otherwise. */ def checkDivergence(cand: Candidate): Boolean = diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 3442207653d4..4d027b8750e0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -166,14 +166,18 @@ object Inferencing { private var toMaximize: List[TypeVar] = Nil - def apply(x: Boolean, tp: Type): Boolean = - try tp.dealias match + def apply(x: Boolean, tp: Type): Boolean = trace(i"isFullyDefined($tp, $force)", typr) { + try { + val tpd = tp.dealias + if tpd ne tp then apply(x, tpd) + else tp match case _: WildcardType | _: ProtoType => false case tvar: TypeVar if !tvar.isInstantiated => force.appliesTo(tvar) && ctx.typerState.constraint.contains(tvar) && { + var fail = false val direction = instDirection(tvar.origin) if minimizeSelected then if direction <= 0 && tvar.hasLowerBound then @@ -183,20 +187,23 @@ object Inferencing { // else hold off instantiating unbounded unconstrained variable else if direction != 0 then instantiate(tvar, fromBelow = direction < 0) - else if variance >= 0 && (force.ifBottom == IfBottom.ok || tvar.hasLowerBound) then + else if variance >= 0 && tvar.hasLowerBound then + instantiate(tvar, fromBelow = true) + else if (variance > 0 || variance == 0 && !tvar.hasUpperBound) + && force.ifBottom == IfBottom.ok + then // if variance == 0, prefer upper bound if one is given instantiate(tvar, fromBelow = true) else if variance >= 0 && force.ifBottom == IfBottom.fail then - return false + fail = true else toMaximize = tvar :: toMaximize - foldOver(x, tvar) - } - case tp => - reporting.trace(s"IFT $tp") { - foldOver(x, tp) + !fail && foldOver(x, tvar) } + case tp => foldOver(x, tp) + } catch case ex: Throwable => handleRecursive("check fully defined", tp.show, ex) + } def process(tp: Type): Boolean = // Maximize type vars in the order they were visited before */ @@ -307,16 +314,17 @@ object Inferencing { } /** If `tree` has a type lambda type, infer its type parameters by comparing with expected type `pt` */ - def inferTypeParams(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match { + def inferTypeParams(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match case tl: TypeLambda => val (tl1, tvars) = constrained(tl, tree) var tree1 = AppliedTypeTree(tree.withType(tl1), tvars) tree1.tpe <:< pt - fullyDefinedType(tree1.tpe, "template parent", tree.srcPos) - tree1 + if isFullyDefined(tree1.tpe, force = ForceDegree.failBottom) then + tree1 + else + EmptyTree case _ => tree - } def isSkolemFree(tp: Type)(using Context): Boolean = !tp.existsPart(_.isInstanceOf[SkolemType]) @@ -542,6 +550,10 @@ object Inferencing { case tp: AnnotatedType => tp.derivedAnnotatedType(captureWildcards(tp.parent), tp.annot) case _ => tp } + + def hasCaptureConversionArg(tp: Type)(using Context): Boolean = tp match + case tp: AppliedType => tp.args.exists(_.typeSymbol == defn.TypeBox_CAP) + case _ => false } trait Inferencing { this: Typer => @@ -763,13 +775,14 @@ trait Inferencing { this: Typer => end constrainIfDependentParamRef } -/** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */ +/** An enumeration controlling the degree of forcing in "is-fully-defined" checks. */ @sharable object ForceDegree { - class Value(val appliesTo: TypeVar => Boolean, val ifBottom: IfBottom) - val none: Value = new Value(_ => false, IfBottom.ok) - val all: Value = new Value(_ => true, IfBottom.ok) - val failBottom: Value = new Value(_ => true, IfBottom.fail) - val flipBottom: Value = new Value(_ => true, IfBottom.flip) + class Value(val appliesTo: TypeVar => Boolean, val ifBottom: IfBottom): + override def toString = s"ForceDegree.Value(.., $ifBottom)" + val none: Value = new Value(_ => false, IfBottom.ok) { override def toString = "ForceDegree.none" } + val all: Value = new Value(_ => true, IfBottom.ok) { override def toString = "ForceDegree.all" } + val failBottom: Value = new Value(_ => true, IfBottom.fail) { override def toString = "ForceDegree.failBottom" } + val flipBottom: Value = new Value(_ => true, IfBottom.flip) { override def toString = "ForceDegree.flipBottom" } } enum IfBottom: diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 6f85efb0fc8a..cc4433f75a68 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -541,7 +541,11 @@ class Namer { typer: Typer => res = cpy.TypeDef(modCls)( rhs = cpy.Template(modTempl)( derived = if (fromTempl.derived.nonEmpty) fromTempl.derived else modTempl.derived, - body = fromTempl.body ++ modTempl.body)) + body = fromTempl.body.filter { + case stat: DefDef => stat.name != nme.toString_ + // toString should only be generated if explicit companion is missing + case _ => true + } ++ modTempl.body)) if (fromTempl.derived.nonEmpty) { if (modTempl.derived.nonEmpty) report.error(em"a class and its companion cannot both have `derives` clauses", mdef.srcPos) @@ -858,7 +862,6 @@ class Namer { typer: Typer => * with a user-defined method in the same scope with a matching type. */ private def invalidateIfClashingSynthetic(denot: SymDenotation): Unit = - def isCaseClassOrCompanion(owner: Symbol) = owner.isClass && { if (owner.is(Module)) owner.linkedClass.is(CaseClass) @@ -875,10 +878,19 @@ class Namer { typer: Typer => !sd.symbol.is(Deferred) && sd.matches(denot))) val isClashingSynthetic = - denot.is(Synthetic, butNot = ConstructorProxy) - && desugar.isRetractableCaseClassMethodName(denot.name) - && isCaseClassOrCompanion(denot.owner) - && (definesMember || inheritsConcreteMember) + denot.is(Synthetic, butNot = ConstructorProxy) && + ( + (desugar.isRetractableCaseClassMethodName(denot.name) + && isCaseClassOrCompanion(denot.owner) + && (definesMember || inheritsConcreteMember) + ) + || + // remove synthetic constructor of a java Record if it clashes with a non-synthetic constructor + (denot.isConstructor + && denot.owner.is(JavaDefined) && denot.owner.derivesFrom(defn.JavaRecordClass) + && denot.owner.unforcedDecls.lookupAll(denot.name).exists(c => c != denot.symbol && c.info.matches(denot.info)) + ) + ) if isClashingSynthetic then typr.println(i"invalidating clashing $denot in ${denot.owner}") @@ -1227,13 +1239,21 @@ class Namer { typer: Typer => case pt: MethodOrPoly => 1 + extensionParamsCount(pt.resType) case _ => 0 val ddef = tpd.DefDef(forwarder.asTerm, prefss => { + val forwarderCtx = ctx.withOwner(forwarder) val (pathRefss, methRefss) = prefss.splitAt(extensionParamsCount(path.tpe.widen)) val ref = path.appliedToArgss(pathRefss).select(sym.asTerm) - ref.appliedToArgss(adaptForwarderParams(Nil, sym.info, methRefss)) - .etaExpandCFT(using ctx.withOwner(forwarder)) + val rhs = ref.appliedToArgss(adaptForwarderParams(Nil, sym.info, methRefss)) + .etaExpandCFT(using forwarderCtx) + if forwarder.isInlineMethod then + // Eagerly make the body inlineable. `registerInlineInfo` does this lazily + // but it does not get evaluated during typer as the forwarder we are creating + // is already typed. + val inlinableRhs = PrepareInlineable.makeInlineable(rhs)(using forwarderCtx) + PrepareInlineable.registerInlineInfo(forwarder, inlinableRhs)(using forwarderCtx) + inlinableRhs + else + rhs }) - if forwarder.isInlineMethod then - PrepareInlineable.registerInlineInfo(forwarder, ddef.rhs) buf += ddef.withSpan(span) if hasDefaults then foreachDefaultGetterOf(sym.asTerm, @@ -1335,7 +1355,7 @@ class Namer { typer: Typer => * * The idea is that this simulates the hypothetical case where export forwarders * are not generated and we treat an export instead more like an import where we - * expand the use site reference. Test cases in {neg,pos}/i14699.scala. + * expand the use site reference. Test cases in {neg,pos}/i14966.scala. * * @pre Forwarders with the same name are consecutive in `forwarders`. */ @@ -1453,27 +1473,41 @@ class Namer { typer: Typer => * only if parent type contains uninstantiated type parameters. */ def parentType(parent: untpd.Tree)(using Context): Type = - if (parent.isType) - typedAheadType(parent, AnyTypeConstructorProto).tpe - else { - val (core, targs) = stripApply(parent) match { + + def typedParentApplication(parent: untpd.Tree): Type = + val (core, targs) = stripApply(parent) match case TypeApply(core, targs) => (core, targs) case core => (core, Nil) - } - core match { + core match case Select(New(tpt), nme.CONSTRUCTOR) => val targs1 = targs map (typedAheadType(_)) val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes if (ptype.typeParams.isEmpty) ptype - else { + else if (denot.is(ModuleClass) && denot.sourceModule.isOneOf(GivenOrImplicit)) missingType(denot.symbol, "parent ")(using creationContext) fullyDefinedType(typedAheadExpr(parent).tpe, "class parent", parent.srcPos) - } case _ => UnspecifiedErrorType.assertingErrorsReported - } - } + + def typedParentType(tree: untpd.Tree): tpd.Tree = + val parentTpt = typer.typedType(parent, AnyTypeConstructorProto) + val ptpe = parentTpt.tpe + if ptpe.typeParams.nonEmpty + && ptpe.underlyingClassRef(refinementOK = false).exists + then + // Try to infer type parameters from a synthetic application. + // This might yield new info if implicit parameters are resolved. + // A test case is i16778.scala. + val app = untpd.Apply(untpd.Select(untpd.New(parentTpt), nme.CONSTRUCTOR), Nil) + typedParentApplication(app) + app.getAttachment(TypedAhead).getOrElse(parentTpt) + else + parentTpt + + if parent.isType then typedAhead(parent, typedParentType).tpe + else typedParentApplication(parent) + end parentType /** Check parent type tree `parent` for the following well-formedness conditions: * (1) It must be a class type with a stable prefix (@see checkClassTypeWithStablePrefix) @@ -1607,7 +1641,7 @@ class Namer { typer: Typer => case Some(ttree) => ttree case none => val ttree = typed(tree) - xtree.putAttachment(TypedAhead, ttree) + if !ttree.isEmpty then xtree.putAttachment(TypedAhead, ttree) ttree } } diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 8ba842ad695f..bde279c582e6 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -13,6 +13,8 @@ import Decorators._ import Uniques._ import inlines.Inlines import config.Printers.typr +import Inferencing.* +import ErrorReporting.* import util.SourceFile import TypeComparer.necessarySubType @@ -370,7 +372,7 @@ object ProtoTypes { private def isUndefined(tp: Type): Boolean = tp match { case _: WildcardType => true - case defn.FunctionOf(args, result, _, _) => args.exists(isUndefined) || isUndefined(result) + case defn.FunctionOf(args, result, _) => args.exists(isUndefined) || isUndefined(result) case _ => false } @@ -492,7 +494,21 @@ object ProtoTypes { val targ = cacheTypedArg(arg, typer.typedUnadapted(_, wideFormal, locked)(using argCtx), force = true) - typer.adapt(targ, wideFormal, locked) + val targ1 = typer.adapt(targ, wideFormal, locked) + if wideFormal eq formal then targ1 + else checkNoWildcardCaptureForCBN(targ1) + } + + def checkNoWildcardCaptureForCBN(targ1: Tree)(using Context): Tree = { + if hasCaptureConversionArg(targ1.tpe) then + val tp = stripCast(targ1).tpe + errorTree(targ1, + em"""argument for by-name parameter is not a value + |and contains wildcard arguments: $tp + | + |Assign it to a val and pass that instead. + |""") + else targ1 } /** The type of the argument `arg`, or `NoType` if `arg` has not been typed before @@ -671,10 +687,12 @@ object ProtoTypes { * * [] _ */ - @sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways + @sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways: + override def toString = "AnyFunctionProto" /** A prototype for type constructors that are followed by a type application */ - @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways + @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways: + override def toString = "AnyTypeConstructorProto" extension (pt: Type) def isExtensionApplyProto: Boolean = pt match @@ -824,7 +842,7 @@ object ProtoTypes { normalize(et.resultType, pt) case wtp => val iftp = defn.asContextFunctionType(wtp) - if iftp.exists && followIFT then normalize(iftp.dropDependentRefinement.argInfos.last, pt) + if iftp.exists && followIFT then normalize(iftp.functionArgInfos.last, pt) else tp } } @@ -946,8 +964,8 @@ object ProtoTypes { object dummyTreeOfType { def apply(tp: Type)(implicit src: SourceFile): Tree = untpd.Literal(Constant(null)) withTypeUnchecked tp - def unapply(tree: untpd.Tree): Option[Type] = tree match { - case Literal(Constant(null)) => Some(tree.typeOpt) + def unapply(tree: untpd.Tree): Option[Type] = untpd.unsplice(tree) match { + case tree @ Literal(Constant(null)) => Some(tree.typeOpt) case _ => None } } diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index 65d8abfdf6a7..070449e3ee96 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -11,12 +11,13 @@ import dotty.tools.dotc.core.Decorators._ import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.NameKinds.PatMatGivenVarName import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StagingContext._ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ import dotty.tools.dotc.inlines.PrepareInlineable +import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.typer.ErrorReporting.errorTree import dotty.tools.dotc.typer.Implicits._ import dotty.tools.dotc.typer.Inferencing._ import dotty.tools.dotc.util.Spans._ @@ -36,21 +37,21 @@ trait QuotesAndSplices { */ def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = { record("typedQuote") - tree.quoted match { - case untpd.Splice(innerExpr) if tree.isTerm && !ctx.mode.is(Mode.Pattern) => + tree.body match { + case _: untpd.Splice if tree.isTerm && !ctx.mode.is(Mode.Pattern) => report.warning("Canceled splice directly inside a quote. '{ ${ XYZ } } is equivalent to XYZ.", tree.srcPos) case _ => } - val qctx = inferImplicitArg(defn.QuotesClass.typeRef, tree.span) + val quotes = inferImplicitArg(defn.QuotesClass.typeRef, tree.span) - if qctx.tpe.isInstanceOf[SearchFailureType] then - report.error(missingArgMsg(qctx, defn.QuotesClass.typeRef, ""), ctx.source.atSpan(tree.span)) - else if !qctx.tpe.isStable then - report.error(em"Quotes require stable Quotes, but found non stable $qctx", qctx.srcPos) + if quotes.tpe.isInstanceOf[SearchFailureType] then + report.error(missingArgMsg(quotes, defn.QuotesClass.typeRef, ""), ctx.source.atSpan(tree.span)) + else if !quotes.tpe.isStable then + report.error(em"Quotes require stable Quotes, but found non stable $quotes", quotes.srcPos) if ctx.mode.is(Mode.Pattern) then - typedQuotePattern(tree, pt, qctx).withSpan(tree.span) - else if tree.quoted.isType then + typedQuotePattern(tree, pt, quotes).withSpan(tree.span) + else if tree.isTypeQuote then val msg = em"""Quoted types `'[..]` can only be used in patterns. | |Hint: To get a scala.quoted.Type[T] use scala.quoted.Type.of[T] instead. @@ -58,8 +59,11 @@ trait QuotesAndSplices { report.error(msg, tree.srcPos) EmptyTree else - val exprQuoteTree = untpd.Apply(untpd.ref(defn.QuotedRuntime_exprQuote.termRef), tree.quoted) - makeInlineable(typedApply(exprQuoteTree, pt)(using pushQuotes(qctx)).select(nme.apply).appliedTo(qctx).withSpan(tree.span)) + // TODO typecheck directly (without `exprQuote`) + val exprQuoteTree = untpd.Apply(untpd.ref(defn.QuotedRuntime_exprQuote.termRef), tree.body) + val quotedExpr = typedApply(exprQuoteTree, pt)(using quoteContext) match + case Apply(TypeApply(fn, tpt :: Nil), quotedExpr :: Nil) => untpd.Quote(quotedExpr, Nil).withBodyType(tpt.tpe) + makeInlineable(quotedExpr.select(nme.apply).appliedTo(quotes).withSpan(tree.span)) } private def makeInlineable(tree: Tree)(using Context): Tree = @@ -71,47 +75,60 @@ trait QuotesAndSplices { def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = { record("typedSplice") checkSpliceOutsideQuote(tree) + assert(!ctx.mode.is(Mode.QuotedPattern)) tree.expr match { - case untpd.Quote(innerExpr) if innerExpr.isTerm => + case untpd.Quote(innerExpr, Nil) if innerExpr.isTerm => report.warning("Canceled quote directly inside a splice. ${ '{ XYZ } } is equivalent to XYZ.", tree.srcPos) + return typed(innerExpr, pt) case _ => } - if (ctx.mode.is(Mode.QuotedPattern)) - if (isFullyDefined(pt, ForceDegree.flipBottom)) { - def spliceOwner(ctx: Context): Symbol = - if (ctx.mode.is(Mode.QuotedPattern)) spliceOwner(ctx.outer) else ctx.owner - val pat = typedPattern(tree.expr, defn.QuotedExprClass.typeRef.appliedTo(pt))( - using spliceContext.retractMode(Mode.QuotedPattern).addMode(Mode.Pattern).withOwner(spliceOwner(ctx))) - val baseType = pat.tpe.baseType(defn.QuotedExprClass) - val argType = if baseType != NoType then baseType.argTypesHi.head else defn.NothingType - ref(defn.QuotedRuntime_exprSplice).appliedToType(argType).appliedTo(pat) - } - else { - report.error(em"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.expr.srcPos) - tree.withType(UnspecifiedErrorType) - } - else { - if (StagingContext.level == 0) { - // Mark the first inline method from the context as a macro - def markAsMacro(c: Context): Unit = - if (c.owner eq c.outer.owner) markAsMacro(c.outer) - else if (c.owner.isInlineMethod) c.owner.setFlag(Macro) - else if (!c.outer.owner.is(Package)) markAsMacro(c.outer) - else assert(ctx.reporter.hasErrors) // Did not find inline def to mark as macro - markAsMacro(ctx) - } - - val (outerQctx, ctx1) = popQuotes() + if (level == 0) { + // Mark the first inline method from the context as a macro + def markAsMacro(c: Context): Unit = + if (c.owner eq c.outer.owner) markAsMacro(c.outer) + else if (c.owner.isInlineMethod) c.owner.setFlag(Macro) + else if (!c.outer.owner.is(Package)) markAsMacro(c.outer) + else assert(ctx.reporter.hasErrors) // Did not find inline def to mark as macro + markAsMacro(ctx) + } - val internalSplice = - outerQctx match - case Some(qctxRef) => untpd.Apply(untpd.Apply(untpd.ref(defn.QuotedRuntime_exprNestedSplice.termRef), qctxRef), tree.expr) - case _ => untpd.Apply(untpd.ref(defn.QuotedRuntime_exprSplice.termRef), tree.expr) + // TODO typecheck directly (without `exprSplice`) + val internalSplice = + untpd.Apply(untpd.ref(defn.QuotedRuntime_exprSplice.termRef), tree.expr) + typedApply(internalSplice, pt)(using spliceContext).withSpan(tree.span) match + case tree @ Apply(TypeApply(_, tpt :: Nil), spliced :: Nil) if tree.symbol == defn.QuotedRuntime_exprSplice => + cpy.Splice(tree)(spliced) + case tree => tree + } - typedApply(internalSplice, pt)(using ctx1).withSpan(tree.span) - } + def typedSplicePattern(tree: untpd.SplicePattern, pt: Type)(using Context): Tree = { + record("typedSplicePattern") + if isFullyDefined(pt, ForceDegree.flipBottom) then + def patternOuterContext(ctx: Context): Context = + if (ctx.mode.is(Mode.QuotedPattern)) patternOuterContext(ctx.outer) else ctx + val typedArgs = tree.args.map { + case arg: untpd.Ident => + typedExpr(arg) + case arg => + report.error("Open pattern expected an identifier", arg.srcPos) + EmptyTree + } + for arg <- typedArgs if arg.symbol.is(Mutable) do // TODO support these patterns. Possibly using scala.quoted.util.Var + report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) + val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) + val patType = if tree.args.isEmpty then pt else defn.FunctionOf(argTypes, pt) + val pat = typedPattern(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patType))( + using spliceContext.retractMode(Mode.QuotedPattern).addMode(Mode.Pattern).withOwner(patternOuterContext(ctx).owner)) + val baseType = pat.tpe.baseType(defn.QuotedExprClass) + val argType = if baseType.exists then baseType.argTypesHi.head else defn.NothingType + untpd.cpy.SplicePattern(tree)(pat, typedArgs).withType(pt) + else + errorTree(tree, em"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.body.srcPos) } + def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = + throw new UnsupportedOperationException("cannot type check a Hole node") + /** Types a splice applied to some arguments `$f(arg1, ..., argn)` in a quote pattern. * * The tree is desugared into `$f.apply(arg1, ..., argn)` where the expression `$f` @@ -121,28 +138,17 @@ trait QuotesAndSplices { */ def typedAppliedSplice(tree: untpd.Apply, pt: Type)(using Context): Tree = { assert(ctx.mode.is(Mode.QuotedPattern)) - val untpd.Apply(splice: untpd.Splice, args) = tree: @unchecked - if !isFullyDefined(pt, ForceDegree.flipBottom) then - report.error(em"Type must be fully defined.", splice.srcPos) - tree.withType(UnspecifiedErrorType) - else if splice.isInBraces then // ${x}(...) match an application + val untpd.Apply(splice: untpd.SplicePattern, args) = tree: @unchecked + def isInBraces: Boolean = splice.span.end != splice.body.span.end + if isInBraces then // ${x}(...) match an application val typedArgs = args.map(arg => typedExpr(arg)) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val splice1 = typedSplice(splice, defn.FunctionOf(argTypes, pt)) - Apply(splice1.select(nme.apply), typedArgs).withType(pt).withSpan(tree.span) + val splice1 = typedSplicePattern(splice, defn.FunctionOf(argTypes, pt)) + untpd.cpy.Apply(tree)(splice1.select(nme.apply), typedArgs).withType(pt) else // $x(...) higher-order quasipattern - val typedArgs = args.map { - case arg: untpd.Ident => - typedExpr(arg) - case arg => - report.error("Open pattern expected an identifier", arg.srcPos) - EmptyTree - } if args.isEmpty then - report.error("Missing arguments for open pattern", tree.srcPos) - val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val typedPat = typedSplice(splice, defn.FunctionOf(argTypes, pt)) - ref(defn.QuotedRuntimePatterns_patternHigherOrderHole).appliedToType(pt).appliedTo(typedPat, SeqLiteral(typedArgs, TypeTree(defn.AnyType))) + report.error("Missing arguments for open pattern", tree.srcPos) + typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, args), pt) } /** Type a pattern variable name `t` in quote pattern as `${given t$giveni: Type[t @ _]}`. @@ -163,10 +169,6 @@ trait QuotesAndSplices { using spliceContext.retractMode(Mode.QuotedPattern).withOwner(spliceOwner(ctx))) pat.select(tpnme.Underlying) - def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = - val tpt = typedType(tree.tpt) - assignType(tree, tpt) - private def checkSpliceOutsideQuote(tree: untpd.Tree)(using Context): Unit = if (level == 0 && !ctx.owner.ownersIterator.exists(_.isInlineMethod)) report.error("Splice ${...} outside quotes '{...} or inline method", tree.srcPos) @@ -224,32 +226,17 @@ trait QuotesAndSplices { val freshTypeBindingsBuff = new mutable.ListBuffer[Tree] val typePatBuf = new mutable.ListBuffer[Tree] override def transform(tree: Tree)(using Context) = tree match { - case Typed(Apply(fn, pat :: Nil), tpt) if fn.symbol.isExprSplice && !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => - val tpt1 = transform(tpt) // Transform type bindings - val exprTpt = AppliedTypeTree(TypeTree(defn.QuotedExprClass.typeRef), tpt1 :: Nil) - val newSplice = ref(defn.QuotedRuntime_exprSplice).appliedToType(tpt1.tpe).appliedTo(Typed(pat, exprTpt)) - transform(newSplice) - case Apply(TypeApply(fn, targs), Apply(sp, pat :: Nil) :: args :: Nil) if fn.symbol == defn.QuotedRuntimePatterns_patternHigherOrderHole => - args match // TODO support these patterns. Possibly using scala.quoted.util.Var - case SeqLiteral(args, _) => - for arg <- args; if arg.symbol.is(Mutable) do - report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) - try ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef).appliedToTypeTrees(targs).appliedTo(args).withSpan(tree.span) - finally { - val patType = pat.tpe.widen - val patType1 = patType.translateFromRepeated(toArray = false) - val pat1 = if (patType eq patType1) pat else pat.withType(patType1) - patBuf += pat1 - } - case Apply(fn, pat :: Nil) if fn.symbol.isExprSplice => - try ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) - finally { - val patType = pat.tpe.widen - val patType1 = patType.translateFromRepeated(toArray = false) - val pat1 = if (patType eq patType1) pat else pat.withType(patType1) - patBuf += pat1 - } - case Select(pat, _) if tree.symbol.isTypeSplice => + case Typed(splice @ SplicePattern(pat, Nil), tpt) if !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => + transform(tpt) // Collect type bindings + transform(splice) + case SplicePattern(pat, args) => + val patType = pat.tpe.widen + val patType1 = patType.translateFromRepeated(toArray = false) + val pat1 = if (patType eq patType1) pat else pat.withType(patType1) + patBuf += pat1 + if args.isEmpty then ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) + else ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef).appliedToType(tree.tpe).appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))).withSpan(tree.span) + case Select(pat: Bind, _) if tree.symbol.isTypeSplice => val sym = tree.tpe.dealias.typeSymbol if sym.exists then val tdef = TypeDef(sym.asType).withSpan(sym.span) @@ -380,13 +367,13 @@ trait QuotesAndSplices { * ) => ... * ``` */ - private def typedQuotePattern(tree: untpd.Quote, pt: Type, qctx: Tree)(using Context): Tree = { - if tree.quoted.isTerm && !pt.derivesFrom(defn.QuotedExprClass) then + private def typedQuotePattern(tree: untpd.Quote, pt: Type, quotes: Tree)(using Context): Tree = { + val quoted = tree.body + if quoted.isTerm && !pt.derivesFrom(defn.QuotedExprClass) then report.error("Quote pattern can only match scrutinees of type scala.quoted.Expr", tree.srcPos) - else if tree.quoted.isType && !pt.derivesFrom(defn.QuotedTypeClass) then + else if quoted.isType && !pt.derivesFrom(defn.QuotedTypeClass) then report.error("Quote pattern can only match scrutinees of type scala.quoted.Type", tree.srcPos) - val quoted = tree.quoted val exprPt = pt.baseType(if quoted.isType then defn.QuotedTypeClass else defn.QuotedExprClass) val quotedPt = exprPt.argInfos.headOption match { case Some(argPt: ValueType) => argPt // excludes TypeBounds @@ -438,13 +425,13 @@ trait QuotesAndSplices { if splices.isEmpty then ref(defn.EmptyTupleModule.termRef) else typed(untpd.Tuple(splices.map(x => untpd.TypedSplice(replaceBindingsInTree.transform(x)))).withSpan(quoted.span), patType) - val quoteClass = if (tree.quoted.isTerm) defn.QuotedExprClass else defn.QuotedTypeClass + val quoteClass = if (quoted.isTerm) defn.QuotedExprClass else defn.QuotedTypeClass val quotedPattern = - if (tree.quoted.isTerm) ref(defn.QuotedRuntime_exprQuote.termRef).appliedToType(defn.AnyType).appliedTo(shape).select(nme.apply).appliedTo(qctx) - else ref(defn.QuotedTypeModule_of.termRef).appliedToTypeTree(shape).appliedTo(qctx) + if (quoted.isTerm) tpd.Quote(shape, Nil).select(nme.apply).appliedTo(quotes) + else ref(defn.QuotedTypeModule_of.termRef).appliedToTypeTree(shape).appliedTo(quotes) - val matchModule = if tree.quoted.isTerm then defn.QuoteMatching_ExprMatch else defn.QuoteMatching_TypeMatch - val unapplyFun = qctx.asInstance(defn.QuoteMatchingClass.typeRef).select(matchModule).select(nme.unapply) + val matchModule = if quoted.isTerm then defn.QuoteMatching_ExprMatch else defn.QuoteMatching_TypeMatch + val unapplyFun = quotes.asInstance(defn.QuoteMatchingClass.typeRef).select(matchModule).select(nme.unapply) UnApply( fun = unapplyFun.appliedToTypeTrees(typeBindingsTuple :: TypeTree(patType) :: Nil), diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index b53b2f9ec57a..1fa6e967fbe1 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -12,6 +12,7 @@ import ast.{tpd, untpd} import scala.util.control.NonFatal import util.Spans.Span import Nullables._ +import staging.StagingLevel.* /** A version of Typer that keeps all symbols defined and referenced in a * previously typed tree. @@ -94,6 +95,25 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = typedApply(tree, selType) + override def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = + assertTyped(tree) + val body1 = typed(tree.body, tree.bodyType)(using quoteContext) + for tag <- tree.tags do assertTyped(tag) + untpd.cpy.Quote(tree)(body1, tree.tags).withType(tree.typeOpt) + + override def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = + assertTyped(tree) + val exprType = // Expr[T] + defn.QuotedExprClass.typeRef.appliedTo(tree.typeOpt) + val quoteType = // Quotes ?=> Expr[T] + defn.FunctionType(1, isContextual = true) + .appliedTo(defn.QuotesClass.typeRef, exprType) + val expr1 = typed(tree.expr, quoteType)(using spliceContext) + untpd.cpy.Splice(tree)(expr1).withType(tree.typeOpt) + + override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = + promote(tree) + override def localDummy(cls: ClassSymbol, impl: untpd.Template)(using Context): Symbol = impl.symbol override def retrieveSym(tree: untpd.Tree)(using Context): Symbol = tree.symbol @@ -124,12 +144,10 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = try super.typedUnadapted(tree, pt, locked) - catch { - case NonFatal(ex) => - if ctx.phase != Phases.typerPhase && ctx.phase != Phases.inliningPhase then - println(i"exception while typing $tree of class ${tree.getClass} # ${tree.uniqueId}") - throw ex - } + catch case NonFatal(ex) if ctx.phase != Phases.typerPhase && ctx.phase != Phases.inliningPhase && !ctx.run.enrichedErrorMessage => + val treeStr = tree.show(using ctx.withPhase(ctx.phase.prevMega)) + println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) + throw ex override def inlineExpansion(mdef: DefDef)(using Context): List[Tree] = mdef :: Nil diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 3d53371e603e..025eae3606af 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -295,7 +295,7 @@ object RefChecks { * TODO This still needs to be cleaned up; the current version is a straight port of what was there * before, but it looks too complicated and method bodies are far too large. * - * @param makeOverridePairsChecker A function for creating a OverridePairsChecker instance + * @param makeOverridingPairsChecker A function for creating a OverridePairsChecker instance * from the class symbol and the self type */ def checkAllOverrides(clazz: ClassSymbol, makeOverridingPairsChecker: ((ClassSymbol, Type) => Context ?=> OverridingPairsChecker) | Null = null)(using Context): Unit = { @@ -1090,6 +1090,12 @@ object RefChecks { end checkImplicitNotFoundAnnotation + def checkAnyRefMethodCall(tree: Tree)(using Context) = + if tree.symbol.exists + && defn.topClasses.contains(tree.symbol.owner) + && (!ctx.owner.enclosingClass.exists || ctx.owner.enclosingClass.isPackageObject) then + report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) + } import RefChecks._ @@ -1163,12 +1169,16 @@ class RefChecks extends MiniPhase { thisPhase => checkAllOverrides(cls) checkImplicitNotFoundAnnotation.template(cls.classDenot) tree - } - catch { + } catch { case ex: TypeError => report.error(ex, tree.srcPos) tree } + + override def transformIdent(tree: Ident)(using Context): Tree = + checkAnyRefMethodCall(tree) + tree + } /* todo: rewrite and re-enable @@ -1679,7 +1689,7 @@ class RefChecks extends MiniPhase { thisPhase => // if (settings.warnNullaryUnit) // checkNullaryMethodReturnType(sym) // if (settings.warnInaccessible) { - // if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic) + // if (!sym.isEffectivelyFinal && !sym.isSynthetic) // checkAccessibilityOfReferencedTypes(tree) // } // tree match { diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 71efc27bf673..103961b68c29 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -52,14 +52,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): if defn.SpecialClassTagClasses.contains(sym) then classTagModul.select(sym.name.toTermName).withSpan(span) else - def clsOfType(tp: Type): Type = tp.dealias.underlyingMatchType match - case matchTp: MatchType => - matchTp.alternatives.map(clsOfType) match - case ct1 :: cts if cts.forall(ct1 == _) => ct1 - case _ => NoType - case _ => - escapeJavaArray(erasure(tp)) - val ctype = clsOfType(tp) + val ctype = escapeJavaArray(erasure(tp)) if ctype.exists then classTagModul.select(nme.apply) .appliedToType(tp) @@ -110,12 +103,12 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): def functionTypeEqual(baseFun: Type, actualArgs: List[Type], actualRet: Type, expected: Type) = expected =:= defn.FunctionOf(actualArgs, actualRet, - defn.isContextFunctionType(baseFun), defn.isErasedFunctionType(baseFun)) + defn.isContextFunctionType(baseFun)) val arity: Int = - if defn.isErasedFunctionType(fun) || defn.isErasedFunctionType(fun) then -1 // TODO support? + if defn.isErasedFunctionType(fun) then -1 // TODO support? else if defn.isFunctionType(fun) then // TupledFunction[(...) => R, ?] - fun.dropDependentRefinement.dealias.argInfos match + fun.functionArgInfos match case funArgs :+ funRet if functionTypeEqual(fun, defn.tupleType(funArgs) :: Nil, funRet, tupled) => // TupledFunction[(...funArgs...) => funRet, ?] @@ -123,7 +116,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case _ => -1 else if defn.isFunctionType(tupled) then // TupledFunction[?, (...) => R] - tupled.dropDependentRefinement.dealias.argInfos match + tupled.functionArgInfos match case tupledArgs :: funRet :: Nil => defn.tupleTypes(tupledArgs.dealias) match case Some(funArgs) if functionTypeEqual(tupled, funArgs, funRet, fun) => diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 98e9cb638c17..be6121e13209 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -18,7 +18,7 @@ trait TypeAssigner { import TypeAssigner.* /** The qualifying class of a this or super with prefix `qual` (which might be empty). - * @param packageOk The qualifier may refer to a package. + * @param packageOK The qualifier may refer to a package. */ def qualifyingClass(tree: untpd.Tree, qual: Name, packageOK: Boolean)(using Context): Symbol = { def qualifies(sym: Symbol) = @@ -77,21 +77,25 @@ trait TypeAssigner { * (2) in Java compilation units, `Object` is replaced by `defn.FromJavaObjectType` */ def accessibleType(tpe: Type, superAccess: Boolean)(using Context): Type = - tpe match + if ctx.isJava && tpe.isAnyRef then + defn.FromJavaObjectType + else tpe match case tpe: NamedType => - val pre = tpe.prefix - val name = tpe.name - def postProcess(d: Denotation) = - if ctx.isJava && tpe.isAnyRef then defn.FromJavaObjectType - else TypeOps.makePackageObjPrefixExplicit(tpe withDenot d) - val d = tpe.denot.accessibleFrom(pre, superAccess) - if d.exists then postProcess(d) + val tpe1 = TypeOps.makePackageObjPrefixExplicit(tpe) + if tpe1 ne tpe then + accessibleType(tpe1, superAccess) else - // it could be that we found an inaccessible private member, but there is - // an inherited non-private member with the same name and signature. - val d2 = pre.nonPrivateMember(name).accessibleFrom(pre, superAccess) - if reallyExists(d2) then postProcess(d2) - else NoType + val pre = tpe.prefix + val name = tpe.name + val d = tpe.denot.accessibleFrom(pre, superAccess) + if d eq tpe.denot then tpe + else if d.exists then tpe.withDenot(d) + else + // it could be that we found an inaccessible private member, but there is + // an inherited non-private member with the same name and signature. + val d2 = pre.nonPrivateMember(name).accessibleFrom(pre, superAccess) + if reallyExists(d2) then tpe.withDenot(d2) + else NoType case tpe => tpe /** Try to make `tpe` accessible, emit error if not possible */ diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index eb09d30e60f3..74be1dee9a9b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -44,6 +44,7 @@ import config.Feature import config.Feature.{sourceVersion, migrateTo3} import config.SourceVersion._ import rewrites.Rewrites.patch +import staging.StagingLevel import transform.SymUtils._ import transform.TypeUtils._ import reporting._ @@ -158,8 +159,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * @param required flags the result's symbol must have * @param excluded flags the result's symbol must not have * @param pos indicates position to use for error reporting + * @param altImports a ListBuffer in which alternative imported references are + * collected in case `findRef` is called from an expansion of + * an extension method, i.e. when `e.m` is expanded to `m(e)` and + * a reference for `m` is searched. `null` in all other situations. */ - def findRef(name: Name, pt: Type, required: FlagSet, excluded: FlagSet, pos: SrcPos)(using Context): Type = { + def findRef(name: Name, pt: Type, required: FlagSet, excluded: FlagSet, pos: SrcPos, + altImports: mutable.ListBuffer[TermRef] | Null = null)(using Context): Type = { val refctx = ctx val noImports = ctx.mode.is(Mode.InPackageClauseName) def suppressErrors = excluded.is(ConstructorProxy) @@ -230,15 +236,52 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer fail(AmbiguousReference(name, newPrec, prevPrec, prevCtx)) previous - /** Recurse in outer context. If final result is same as `previous`, check that it - * is new or shadowed. This order of checking is necessary since an - * outer package-level definition might trump two conflicting inner - * imports, so no error should be issued in that case. See i7876.scala. + /** Assemble and check alternatives to an imported reference. This implies: + * - If we expand an extension method (i.e. altImports != null), + * search imports on the same level for other possible resolutions of `name`. + * The result and altImports together then contain all possible imported + * references of the highest possible precedence, where `NamedImport` beats + * `WildImport`. + * - Find a posssibly shadowing reference in an outer context. + * If the result is the same as `previous`, check that it is new or + * shadowed. This order of checking is necessary since an outer package-level + * definition might trump two conflicting inner imports, so no error should be + * issued in that case. See i7876.scala. + * @param previous the previously found reference (which is an import) + * @param prevPrec the precedence of the reference (either NamedImport or WildImport) + * @param prevCtx the context in which the reference was found + * @param using_Context the outer context of `precCtx` */ - def recurAndCheckNewOrShadowed(previous: Type, prevPrec: BindingPrec, prevCtx: Context)(using Context): Type = - val found = findRefRecur(previous, prevPrec, prevCtx) - if found eq previous then checkNewOrShadowed(found, prevPrec)(using prevCtx) - else found + def checkImportAlternatives(previous: Type, prevPrec: BindingPrec, prevCtx: Context)(using Context): Type = + + def addAltImport(altImp: TermRef) = + if !TypeComparer.isSameRef(previous, altImp) + && !altImports.uncheckedNN.exists(TypeComparer.isSameRef(_, altImp)) + then + altImports.uncheckedNN += altImp + + if Feature.enabled(Feature.relaxedExtensionImports) && altImports != null && ctx.isImportContext then + val curImport = ctx.importInfo.uncheckedNN + namedImportRef(curImport) match + case altImp: TermRef => + if prevPrec == WildImport then + // Discard all previously found references and continue with `altImp` + altImports.clear() + checkImportAlternatives(altImp, NamedImport, ctx)(using ctx.outer) + else + addAltImport(altImp) + checkImportAlternatives(previous, prevPrec, prevCtx)(using ctx.outer) + case _ => + if prevPrec == WildImport then + wildImportRef(curImport) match + case altImp: TermRef => addAltImport(altImp) + case _ => + checkImportAlternatives(previous, prevPrec, prevCtx)(using ctx.outer) + else + val found = findRefRecur(previous, prevPrec, prevCtx) + if found eq previous then checkNewOrShadowed(found, prevPrec)(using prevCtx) + else found + end checkImportAlternatives def selection(imp: ImportInfo, name: Name, checkBounds: Boolean): Type = imp.importSym.info match @@ -328,7 +371,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (ctx.scope eq EmptyScope) previous else { var result: Type = NoType - val curOwner = ctx.owner /** Is curOwner a package object that should be skipped? @@ -449,11 +491,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else if (isPossibleImport(NamedImport) && (curImport nen outer.importInfo)) { val namedImp = namedImportRef(curImport.uncheckedNN) if (namedImp.exists) - recurAndCheckNewOrShadowed(namedImp, NamedImport, ctx)(using outer) + checkImportAlternatives(namedImp, NamedImport, ctx)(using outer) else if (isPossibleImport(WildImport) && !curImport.nn.importSym.isCompleting) { val wildImp = wildImportRef(curImport.uncheckedNN) if (wildImp.exists) - recurAndCheckNewOrShadowed(wildImp, WildImport, ctx)(using outer) + checkImportAlternatives(wildImp, WildImport, ctx)(using outer) else { updateUnimported() loop(ctx)(using outer) @@ -543,22 +585,40 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer unimported = saved1 foundUnderScala2 = saved2 - def checkNotShadowed(ownType: Type) = ownType match - case ownType: TermRef if ownType.symbol.is(ConstructorProxy) => - val shadowed = findRef(name, pt, EmptyFlags, ConstructorProxy, tree.srcPos) - if shadowed.exists then - report.error( - em"""Reference to constructor proxy for ${ownType.symbol.companionClass.showLocated} - |shadows outer reference to ${shadowed.termSymbol.showLocated}""", tree.srcPos) - case _ => + /** Normally, returns `ownType` except if `ownType` is a constructor proxy, + * and there is another shadowed type accessible with the same name that is not: + * - if the prototype is an application: + * - if the shadowed type has a method alternative or an apply method, + * issue an ambiguity error + * - otherwise again return `ownType` + * - if the prototype is not an application, return the shadowed type + */ + def checkNotShadowed(ownType: Type): Type = + ownType match + case ownType: TermRef if ownType.symbol.is(ConstructorProxy) => + findRef(name, pt, EmptyFlags, ConstructorProxy, tree.srcPos) match + case shadowed: TermRef if !shadowed.symbol.maybeOwner.isEmptyPackage => + pt match + case pt: FunOrPolyProto => + def err(shadowedIsApply: Boolean) = + report.error(ConstrProxyShadows(ownType, shadowed, shadowedIsApply), tree.srcPos) + if shadowed.denot.hasAltWith(sd => sd.symbol.is(Method, butNot = Accessor)) then + err(shadowedIsApply = false) + else if shadowed.member(nme.apply).hasAltWith(_.symbol.is(Method, butNot = Accessor)) then + err(shadowedIsApply = true) + case _ => + return shadowed + case shadowed => + case _ => + ownType def setType(ownType: Type): Tree = - checkNotShadowed(ownType) - val tree1 = ownType match - case ownType: NamedType if !prefixIsElidable(ownType) => - ref(ownType).withSpan(tree.span) + val checkedType = checkNotShadowed(ownType) + val tree1 = checkedType match + case checkedType: NamedType if !prefixIsElidable(checkedType) => + ref(checkedType).withSpan(tree.span) case _ => - tree.withType(ownType) + tree.withType(checkedType) val tree2 = toNotNullTermRef(tree1, pt) checkLegalValue(tree2, pt) tree2 @@ -635,6 +695,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, // but that is done only after we search for extension methods or conversions. typedSelect(tree, pt, qual) + else if defn.isSmallGenericTuple(qual.tpe) then + val elems = defn.tupleTypes(qual.tpe.widenTermRefExpr).getOrElse(Nil) + typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) else val tree1 = tryExtensionOrConversion( tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) @@ -654,6 +717,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if checkedType1.exists then gadts.println(i"Member selection healed by GADT approximation") finish(tree1, qual1, checkedType1) + else if defn.isSmallGenericTuple(qual1.tpe) then + gadts.println(i"Tuple member selection healed by GADT approximation") + typedSelect(tree, pt, qual1) else tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) else EmptyTree @@ -837,14 +903,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer isSkolemFree(pt) && isEligible(pt.underlyingClassRef(refinementOK = false))) templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) - templ1.parents foreach { - case parent: RefTree => - typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) - case _ => - } - val x = tpnme.ANON_CLASS - val clsDef = TypeDef(x, templ1).withFlags(Final | Synthetic) - typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(x), Nil)), pt) + for case parent: RefTree <- templ1.parents do + typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) + val anon = tpnme.ANON_CLASS + val clsDef = TypeDef(anon, templ1).withFlags(Final | Synthetic) + typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(anon), Nil)), pt) case _ => var tpt1 = typedType(tree.tpt) val tsym = tpt1.tpe.underlyingClassRef(refinementOK = false).typeSymbol @@ -1099,6 +1162,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val (stats1, exprCtx) = withoutMode(Mode.Pattern) { typedBlockStats(tree.stats) } + var expr1 = typedExpr(tree.expr, pt.dropIfProto)(using exprCtx) // If unsafe nulls is enabled inside a block but not enabled outside @@ -1217,8 +1281,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ) end typedIf - /** Decompose function prototype into a list of parameter prototypes and a result prototype - * tree, using WildcardTypes where a type is not known. + /** Decompose function prototype into a list of parameter prototypes and a result + * prototype tree, using WildcardTypes where a type is not known. + * Note: parameter prototypes may be TypeBounds. * For the result type we do this even if the expected type is not fully * defined, which is a bit of a hack. But it's needed to make the following work * (see typers.scala and printers/PlainPrinter.scala for examples). @@ -1254,9 +1319,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // if expected parameter type(s) are wildcards, approximate from below. // if expected result type is a wildcard, approximate from above. // this can type the greatest set of admissible closures. - (pt1.argTypesLo.init, typeTree(interpolateWildcards(pt1.argTypesHi.last))) + + (pt1.argInfos.init, typeTree(interpolateWildcards(pt1.argInfos.last.hiBound))) case RefinedType(parent, nme.apply, mt @ MethodTpe(_, formals, restpe)) - if defn.isNonRefinedFunction(parent) && formals.length == defaultArity => + if (defn.isNonRefinedFunction(parent) || defn.isErasedFunctionType(parent)) && formals.length == defaultArity => (formals, untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef)))) case SAMType(mt @ MethodTpe(_, formals, restpe)) => (formals, @@ -1287,20 +1353,22 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * If both attempts fail, return `NoType`. */ def inferredFromTarget( - param: untpd.ValDef, formal: Type, calleeType: Type, paramIndex: Name => Int)(using Context): Type = + param: untpd.ValDef, formal: Type, calleeType: Type, isErased: Boolean, paramIndex: Name => Int)(using Context): Type = val target = calleeType.widen match case mtpe: MethodType => val pos = paramIndex(param.name) if pos < mtpe.paramInfos.length then - mtpe.paramInfos(pos) + val tp = mtpe.paramInfos(pos) // This works only if vararg annotations match up. // See neg/i14367.scala for an example where the inferred type is mispredicted. // Nevertheless, the alternative would be to give up completely, so this is // defensible. + // Strip inferred erased annotation, to avoid accidentally inferring erasedness + if !isErased then tp.stripAnnots(_.symbol != defn.ErasedParamAnnot) else tp else NoType case _ => NoType if target.exists then formal <:< target - if isFullyDefined(formal, ForceDegree.flipBottom) then formal + if !formal.isExactlyNothing && isFullyDefined(formal, ForceDegree.flipBottom) then formal else if target.exists && isFullyDefined(target, ForceDegree.flipBottom) then target else NoType @@ -1310,32 +1378,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedFunctionType(tree: untpd.Function, pt: Type)(using Context): Tree = { val untpd.Function(args, body) = tree - var funFlags = tree match { - case tree: untpd.FunctionWithMods => tree.mods.flags - case _ => EmptyFlags + body match + case untpd.CapturesAndResult(refs, result) => + return typedUnadapted(untpd.makeRetaining( + cpy.Function(tree)(args, result), refs, tpnme.retains), pt) + case _ => + var (funFlags, erasedParams) = tree match { + case tree: untpd.FunctionWithMods => (tree.mods.flags, tree.erasedParams) + case _ => (EmptyFlags, args.map(_ => false)) } - assert(!funFlags.is(Erased) || !args.isEmpty, "An empty function cannot not be erased") - val numArgs = args.length val isContextual = funFlags.is(Given) - val isErased = funFlags.is(Erased) val isImpure = funFlags.is(Impure) - val funSym = defn.FunctionSymbol(numArgs, isContextual, isErased, isImpure) - - /** If `app` is a function type with arguments that are all erased classes, - * turn it into an erased function type. - */ - def propagateErased(app: Tree): Tree = app match - case AppliedTypeTree(tycon: TypeTree, args) - if !isErased - && numArgs > 0 - && args.indexWhere(!_.tpe.isErasedClass) == numArgs => - val tycon1 = TypeTree(defn.FunctionSymbol(numArgs, isContextual, true, isImpure).typeRef) - .withSpan(tycon.span) - assignType(cpy.AppliedTypeTree(app)(tycon1, args), tycon1, args) - case _ => - app /** Typechecks dependent function type with given parameters `params` */ def typedDependent(params: List[untpd.ValDef])(using Context): Tree = @@ -1350,16 +1405,29 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if funFlags.is(Given) then params.map(_.withAddedFlags(Given)) else params val params2 = params1.map(fixThis.transformSub) - val appDef0 = untpd.DefDef(nme.apply, List(params2), body, EmptyTree).withSpan(tree.span) + val params3 = params2.zipWithConserve(erasedParams) { (arg, isErased) => + if isErased then arg.withAddedFlags(Erased) else arg + } + val appDef0 = untpd.DefDef(nme.apply, List(params3), body, EmptyTree).withSpan(tree.span) index(appDef0 :: Nil) val appDef = typed(appDef0).asInstanceOf[DefDef] val mt = appDef.symbol.info.asInstanceOf[MethodType] if (mt.isParamDependent) report.error(em"$mt is an illegal function type because it has inter-parameter dependencies", tree.srcPos) + // Restart typechecking if there are erased classes that we want to mark erased + if mt.erasedParams.zip(mt.paramInfos.map(_.isErasedClass)).exists((paramErased, classErased) => classErased && !paramErased) then + val newParams = params3.zipWithConserve(mt.paramInfos.map(_.isErasedClass)) { (arg, isErasedClass) => + if isErasedClass then arg.withAddedFlags(Erased) else arg + } + return typedDependent(newParams) val resTpt = TypeTree(mt.nonDependentResultApprox).withSpan(body.span) val typeArgs = appDef.termParamss.head.map(_.tpt) :+ resTpt - val tycon = TypeTree(funSym.typeRef) - val core = propagateErased(AppliedTypeTree(tycon, typeArgs)) + val core = + if mt.hasErasedParams then TypeTree(defn.ErasedFunctionClass.typeRef) + else + val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) + val tycon = TypeTree(funSym.typeRef) + AppliedTypeTree(tycon, typeArgs) RefinedTypeTree(core, List(appDef), ctx.owner.asClass) end typedDependent @@ -1368,17 +1436,25 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedDependent(args.asInstanceOf[List[untpd.ValDef]])( using ctx.fresh.setOwner(newRefinedClassSymbol(tree.span)).setNewScope) case _ => - propagateErased( - typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args :+ body), pt)) + if erasedParams.contains(true) then + typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) + else + val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) + val result = typed(cpy.AppliedTypeTree(tree)(untpd.TypeTree(funSym.typeRef), args :+ body), pt) + // if there are any erased classes, we need to re-do the typecheck. + result match + case r: AppliedTypeTree if r.args.exists(_.tpe.isErasedClass) => + typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) + case _ => result } } def typedFunctionValue(tree: untpd.Function, pt: Type)(using Context): Tree = { val untpd.Function(params: List[untpd.ValDef] @unchecked, _) = tree: @unchecked - val isContextual = tree match { - case tree: untpd.FunctionWithMods => tree.mods.is(Given) - case _ => false + val (isContextual, isDefinedErased) = tree match { + case tree: untpd.FunctionWithMods => (tree.mods.is(Given), tree.erasedParams) + case _ => (false, tree.args.map(_ => false)) } /** The function body to be returned in the closure. Can become a TypedSplice @@ -1479,9 +1555,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val (protoFormals, resultTpt) = decomposeProtoFunction(pt, params.length, tree.srcPos) - def protoFormal(i: Int): Type = - if (protoFormals.length == params.length) protoFormals(i) - else errorType(WrongNumberOfParameters(protoFormals.length), tree.srcPos) + /** Returns the type and whether the parameter is erased */ + def protoFormal(i: Int): (Type, Boolean) = + if (protoFormals.length == params.length) (protoFormals(i), isDefinedErased(i)) + else (errorType(WrongNumberOfParameters(protoFormals.length), tree.srcPos), false) /** Is `formal` a product type which is elementwise compatible with `params`? */ def ptIsCorrectProduct(formal: Type) = @@ -1493,11 +1570,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } var desugared: untpd.Tree = EmptyTree - if protoFormals.length == 1 && params.length != 1 && ptIsCorrectProduct(protoFormals.head) then - val isGenericTuple = - protoFormals.head.derivesFrom(defn.TupleClass) - && !defn.isTupleClass(protoFormals.head.typeSymbol) - desugared = desugar.makeTupledFunction(params, fnBody, isGenericTuple) + if protoFormals.length == 1 && params.length != 1 then + val firstFormal = protoFormals.head.loBound + if ptIsCorrectProduct(firstFormal) then + val isGenericTuple = + firstFormal.derivesFrom(defn.TupleClass) + && !defn.isTupleClass(firstFormal.typeSymbol) + desugared = desugar.makeTupledFunction(params, fnBody, isGenericTuple) else if protoFormals.length > 1 && params.length == 1 then def isParamRef(scrut: untpd.Tree): Boolean = scrut match case untpd.Annotated(scrut1, _) => isParamRef(scrut1) @@ -1519,18 +1598,29 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer for ((param, i) <- params.zipWithIndex) yield if (!param.tpt.isEmpty) param else - val formal = protoFormal(i) + val (formalBounds, isErased) = protoFormal(i) + val formal = formalBounds.loBound + val isBottomFromWildcard = (formalBounds ne formal) && formal.isExactlyNothing val knownFormal = isFullyDefined(formal, ForceDegree.failBottom) + // If the expected formal is a TypeBounds wildcard argument with Nothing as lower bound, + // try to prioritize inferring from target. See issue 16405 (tests/run/16405.scala) val paramType = - if knownFormal then formal - else inferredFromTarget(param, formal, calleeType, paramIndex) - .orElse(errorType(AnonymousFunctionMissingParamType(param, tree, formal), param.srcPos)) + // Strip inferred erased annotation, to avoid accidentally inferring erasedness + val formal0 = if !isErased then formal.stripAnnots(_.symbol != defn.ErasedParamAnnot) else formal + if knownFormal && !isBottomFromWildcard then + formal0 + else + inferredFromTarget(param, formal, calleeType, isErased, paramIndex).orElse( + if knownFormal then formal0 + else errorType(AnonymousFunctionMissingParamType(param, tree, formal), param.srcPos) + ) val paramTpt = untpd.TypedSplice( (if knownFormal then InferredTypeTree() else untpd.TypeTree()) .withType(paramType.translateFromRepeated(toArray = false)) .withSpan(param.span.endPos) ) - cpy.ValDef(param)(tpt = paramTpt) + val param0 = cpy.ValDef(param)(tpt = paramTpt) + if isErased then param0.withAddedFlags(Flags.Erased) else param0 desugared = desugar.makeClosure(inferredParams, fnBody, resultTpt, isContextual, tree.span) typed(desugared, pt) @@ -1569,7 +1659,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer |because it has internal parameter dependencies""") else if ((tree.tpt `eq` untpd.ContextualEmptyTree) && mt.paramNames.isEmpty) // Note implicitness of function in target type since there are no method parameters that indicate it. - TypeTree(defn.FunctionOf(Nil, mt.resType, isContextual = true, isErased = false)) + TypeTree(defn.FunctionOf(Nil, mt.resType, isContextual = true)) + else if hasCaptureConversionArg(mt.resType) then + errorTree(tree, + em"""cannot turn method type $mt into closure + |because it has capture conversion skolem types""") else EmptyTree } @@ -1598,9 +1692,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } else { val (protoFormals, _) = decomposeProtoFunction(pt, 1, tree.srcPos) - val checkMode = - if (pt.isRef(defn.PartialFunctionClass)) desugar.MatchCheck.None - else desugar.MatchCheck.Exhaustive + val checkMode = desugar.MatchCheck.Exhaustive typed(desugar.makeCaseLambda(tree.cases, checkMode, protoFormals.length).withSpan(tree.span), pt) } case _ => @@ -2167,15 +2259,21 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (tree.bound.isEmpty && isFullyDefined(pt, ForceDegree.none)) TypeTree(pt) else typed(tree.bound) val sel1 = typed(tree.selector) + val sel1Tpe = sel1.tpe + if sel1Tpe.isLambdaSub then + report.error(MatchTypeScrutineeCannotBeHigherKinded(sel1Tpe), sel1.srcPos) val pt1 = if (bound1.isEmpty) pt else bound1.tpe - val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1.tpe, pt1)) + val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1Tpe, pt1)) assignType(cpy.MatchTypeTree(tree)(bound1, sel1, cases1), bound1, sel1, cases1) } - def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(using Context): ByNameTypeTree = { - val result1 = typed(tree.result) - assignType(cpy.ByNameTypeTree(tree)(result1), result1) - } + def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(using Context): ByNameTypeTree = tree.result match + case untpd.CapturesAndResult(refs, tpe) => + typedByNameTypeTree( + cpy.ByNameTypeTree(tree)(untpd.makeRetaining(tpe, refs, tpnme.retainsByName))) + case _ => + val result1 = typed(tree.result) + assignType(cpy.ByNameTypeTree(tree)(result1), result1) def typedTypeBoundsTree(tree: untpd.TypeBoundsTree, pt: Type)(using Context): Tree = val TypeBoundsTree(lo, hi, alias) = tree @@ -2331,11 +2429,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = { - if (!sym.info.exists) { // it's a discarded synthetic case class method, drop it - assert(sym.is(Synthetic) && desugar.isRetractableCaseClassMethodName(sym.name)) + def canBeInvalidated(sym: Symbol): Boolean = + sym.is(Synthetic) + && (desugar.isRetractableCaseClassMethodName(sym.name) || + (sym.isConstructor && sym.owner.derivesFrom(defn.JavaRecordClass))) + + if !sym.info.exists then + // it's a discarded method (synthetic case class method or synthetic java record constructor), drop it + assert(canBeInvalidated(sym)) sym.owner.info.decls.openForMutations.unlink(sym) return EmptyTree - } + // TODO: - Remove this when `scala.language.experimental.erasedDefinitions` is no longer experimental. // - Modify signature to `erased def erasedValue[T]: T` if sym.eq(defn.Compiletime_erasedValue) then @@ -2382,7 +2486,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then - if StagingContext.level > 0 then + if StagingLevel.level > 0 then report.error("inline def cannot be within quotes", sym.sourcePos) if sym.is(Given) && untpd.stripBlock(untpd.unsplice(ddef.rhs)).isInstanceOf[untpd.Function] @@ -2483,6 +2587,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkSimpleKinded(parent) // allow missing type parameters if there are implicit arguments to pass // since we can infer type arguments from them + val constr = psym.primaryConstructor + if psym.is(Trait) && constr.exists && !cls.isRefinementClass then + ensureAccessible(constr.termRef, superAccess = true, tree.srcPos) else checkParentCall(result, cls) if cls is Case then @@ -2585,6 +2692,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // check value class constraints checkDerivedValueClass(cls, body1) + // check PolyFunction constraints (no erased functions!) + if parents1.exists(_.tpe.classSymbol eq defn.PolyFunctionClass) then + body1.foreach { + case ddef: DefDef => + ddef.paramss.foreach { params => + val erasedParam = params.collectFirst { case vdef: ValDef if vdef.symbol.is(Erased) => vdef } + erasedParam.foreach { p => + report.error(em"Implementation restriction: erased classes are not allowed in a poly function definition", p.srcPos) + } + } + case _ => + } + val effectiveOwner = cls.owner.skipWeakOwner if !cls.isRefinementClass && !cls.isAllOf(PrivateLocal) @@ -2970,6 +3090,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case untpd.EmptyTree => tpd.EmptyTree case tree: untpd.Quote => typedQuote(tree, pt) case tree: untpd.Splice => typedSplice(tree, pt) + case tree: untpd.SplicePattern => typedSplicePattern(tree, pt) case tree: untpd.MacroTree => report.error("Unexpected macro", tree.srcPos); tpd.nullLiteral // ill-formed code may reach here case tree: untpd.Hole => typedHole(tree, pt) case _ => typedUnadapted(desugar(tree, pt), pt, locked) @@ -3011,7 +3132,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree protected def makeContextualFunction(tree: untpd.Tree, pt: Type)(using Context): Tree = { - val defn.FunctionOf(formals, _, true, _) = pt.dropDependentRefinement: @unchecked + val defn.FunctionOf(formals, _, true) = pt.dropDependentRefinement: @unchecked // The getter of default parameters may reach here. // Given the code below @@ -3039,7 +3160,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else formals.map(untpd.TypeTree) } - val ifun = desugar.makeContextualFunction(paramTypes, tree, defn.isErasedFunctionType(pt)) + val erasedParams = pt.dealias match { + case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams + case _ => paramTypes.map(_ => false) + } + + val ifun = desugar.makeContextualFunction(paramTypes, tree, erasedParams) typr.println(i"make contextual function $tree / $pt ---> $ifun") typedFunctionValue(ifun, pt) } @@ -3109,7 +3235,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer traverse(xtree :: rest) case stat :: rest => val stat1 = typed(stat)(using ctx.exprContext(stat, exprOwner)) - checkStatementPurity(stat1)(stat, exprOwner) + if !checkInterestingResultInStatement(stat1) then checkStatementPurity(stat1)(stat, exprOwner) buf += stat1 traverse(rest)(using stat1.nullableContext) case nil => @@ -3341,11 +3467,37 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def selectionProto = SelectionProto(tree.name, mbrProto, compat, privateOK = inSelect) def tryExtension(using Context): Tree = - findRef(tree.name, WildcardType, ExtensionMethod, EmptyFlags, qual.srcPos) match + val altImports = new mutable.ListBuffer[TermRef]() + findRef(tree.name, WildcardType, ExtensionMethod, EmptyFlags, qual.srcPos, altImports) match case ref: TermRef => - extMethodApply(untpd.TypedSplice(tpd.ref(ref).withSpan(tree.nameSpan)), qual, pt) + def tryExtMethod(ref: TermRef)(using Context) = + extMethodApply(untpd.TypedSplice(tpd.ref(ref).withSpan(tree.nameSpan)), qual, pt) + if altImports.isEmpty then + tryExtMethod(ref) + else + // Try all possible imports and collect successes and failures + val successes, failures = new mutable.ListBuffer[(Tree, TyperState)] + for alt <- ref :: altImports.toList do + val nestedCtx = ctx.fresh.setNewTyperState() + val app = tryExtMethod(alt)(using nestedCtx) + (if nestedCtx.reporter.hasErrors then failures else successes) + += ((app, nestedCtx.typerState)) + typr.println(i"multiple extensioin methods, success: ${successes.toList}, failure: ${failures.toList}") + + def pick(alt: (Tree, TyperState)): Tree = + val (app, ts) = alt + ts.commit() + app + + successes.toList match + case Nil => pick(failures.head) + case success :: Nil => pick(success) + case (expansion1, _) :: (expansion2, _) :: _ => + report.error(AmbiguousExtensionMethod(tree, expansion1, expansion2), tree.srcPos) + expansion1 case _ => EmptyTree + end tryExtension def nestedFailure(ex: TypeError) = rememberSearchFailure(qual, @@ -3440,7 +3592,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer adapt(tree, pt, ctx.typerState.ownedVars) private def adapt1(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = { - assert(pt.exists && !pt.isInstanceOf[ExprType] || ctx.reporter.errorsReported) + assert(pt.exists && !pt.isInstanceOf[ExprType] || ctx.reporter.errorsReported, i"tree: $tree, pt: $pt") def methodStr = err.refStr(methPart(tree).tpe) def readapt(tree: Tree)(using Context) = adapt(tree, pt, locked) @@ -3900,7 +4052,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else defn.functionArity(ptNorm) else val nparams = wtp.paramInfos.length - if nparams > 0 || pt.eq(AnyFunctionProto) then nparams + if nparams > 1 + || nparams == 1 && !wtp.isVarArgsMethod + || pt.eq(AnyFunctionProto) + then nparams else -1 // no eta expansion in this case adaptNoArgsUnappliedMethod(wtp, funExpected, arity) case _ => @@ -3940,7 +4095,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return adaptConstant(tree, ConstantType(converted)) case _ => - val captured = captureWildcards(wtp) + val captured = captureWildcardsCompat(wtp, pt) if (captured `ne` wtp) return readapt(tree.cast(captured)) @@ -3994,15 +4149,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else err.typeMismatch(tree, pt, failure) pt match - case pt: SelectionProto => - if tree.tpe.derivesFrom(defn.PairClass) && !defn.isTupleNType(tree.tpe.widenDealias) then - // If this is a generic tuple we need to cast it to make the TupleN/ members accessible. - // This works only for generic tuples of known size up to 22. - defn.tupleTypes(tree.tpe.widenTermRefExpr) match - case Some(elems) if elems.length <= Definitions.MaxTupleArity => - tree.cast(defn.tupleType(elems)) - case _ => tree - else tree // other adaptations for selections are handled in typedSelect + case _: SelectionProto => + tree // adaptations for selections are handled in typedSelect case _ if ctx.mode.is(Mode.ImplicitsEnabled) && tree.tpe.isValueType => if pt.isRef(defn.AnyValClass, skipRefined = false) || pt.isRef(defn.ObjectClass, skipRefined = false) @@ -4200,6 +4348,59 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedExpr(cmp, defn.BooleanType) case _ => + private def checkInterestingResultInStatement(t: Tree)(using Context): Boolean = { + def isUninterestingSymbol(sym: Symbol): Boolean = + sym == NoSymbol || + sym.isConstructor || + sym.is(Package) || + sym.isPackageObject || + sym == defn.BoxedUnitClass || + sym == defn.AnyClass || + sym == defn.AnyRefAlias || + sym == defn.AnyValClass + def isUninterestingType(tpe: Type): Boolean = + tpe == NoType || + tpe.typeSymbol == defn.UnitClass || + defn.isBottomClass(tpe.typeSymbol) || + tpe =:= defn.UnitType || + tpe.typeSymbol == defn.BoxedUnitClass || + tpe =:= defn.AnyValType || + tpe =:= defn.AnyType || + tpe =:= defn.AnyRefType + def isJavaApplication(t: Tree): Boolean = t match { + case Apply(f, _) => f.symbol.is(JavaDefined) && !defn.ObjectClass.isSubClass(f.symbol.owner) + case _ => false + } + def checkInterestingShapes(t: Tree): Boolean = t match { + case If(_, thenpart, elsepart) => checkInterestingShapes(thenpart) || checkInterestingShapes(elsepart) + case Block(_, res) => checkInterestingShapes(res) + case Match(_, cases) => cases.exists(k => checkInterestingShapes(k.body)) + case _ => checksForInterestingResult(t) + } + def checksForInterestingResult(t: Tree): Boolean = ( + !t.isDef // ignore defs + && !isUninterestingSymbol(t.symbol) // ctors, package, Unit, Any + && !isUninterestingType(t.tpe) // bottom types, Unit, Any + && !isThisTypeResult(t) // buf += x + && !isSuperConstrCall(t) // just a thing + && !isJavaApplication(t) // Java methods are inherently side-effecting + // && !treeInfo.hasExplicitUnit(t) // suppressed by explicit expr: Unit // TODO Should explicit `: Unit` be added as warning suppression? + ) + if ctx.settings.WNonUnitStatement.value && !ctx.isAfterTyper && checkInterestingShapes(t) then + val where = t match { + case Block(_, res) => res + case If(_, thenpart, Literal(Constant(()))) => + thenpart match { + case Block(_, res) => res + case _ => thenpart + } + case _ => t + } + report.warning(UnusedNonUnitValue(where.tpe), t.srcPos) + true + else false + } + private def checkStatementPurity(tree: tpd.Tree)(original: untpd.Tree, exprOwner: Symbol)(using Context): Unit = if !tree.tpe.isErroneous && !ctx.isAfterTyper diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index ddf89e7dd04d..5513a1f803c6 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -54,7 +54,7 @@ object Signatures { * Extract (current parameter index, function index, functions) method call for given position. * * @param path The path to the function application - * @param span The position of the cursor + * @param pos The position of the cursor * * @return A triple containing the index of the parameter being edited, the index of functeon * being called, the list of overloads of this function). diff --git a/compiler/src/dotty/tools/dotc/util/StackTraceOps.scala b/compiler/src/dotty/tools/dotc/util/StackTraceOps.scala index 7fa54606c572..f991005f0c43 100644 --- a/compiler/src/dotty/tools/dotc/util/StackTraceOps.scala +++ b/compiler/src/dotty/tools/dotc/util/StackTraceOps.scala @@ -29,7 +29,6 @@ object StackTraceOps: * If a stack trace is truncated, it will be followed by a line of the form * `... 3 elided`, by analogy to the lines `... 3 more` which indicate * shared stack trace segments. - * @param e the exception * @param p the predicate to select the prefix */ def formatStackTracePrefix(p: StackTraceElement => Boolean): String = diff --git a/compiler/src/dotty/tools/dotc/util/lrutest.sc b/compiler/src/dotty/tools/dotc/util/lrutest.sc index 6e6328b248e3..9c811a65a70a 100644 --- a/compiler/src/dotty/tools/dotc/util/lrutest.sc +++ b/compiler/src/dotty/tools/dotc/util/lrutest.sc @@ -15,12 +15,12 @@ object lrutest { cache.last //> res4: Int = 6 cache lookup "hi" //> res5: String = x cache.indices.take(10).toList //> res6: List[Int] = List(7, 0, 1, 2, 3, 4, 5, 6, 7, 0) - + for (i <- 1 to 10) { if (cache.lookup(i.toString) == null) cache.enter(i.toString, i.toString) } - + cache.indices.take(10).toList //> res7: List[Int] = List(5, 6, 7, 0, 1, 2, 3, 4, 5, 6) cache //> res8: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(10 -> 10, 9 - //| > 9, 8 -> 8, 7 -> 7, 6 -> 6, 5 -> 5, 4 -> 4, 3 -> 3) @@ -35,6 +35,6 @@ object lrutest { //| > 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4, 3 -> 3) cache.lookup("11") //> res16: String = null cache.enter("11", "!!") - cache //> res17: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(11 -> !!, 5 + cache //> res17: dotty.tools.dotc.util.LRUCache[String,String] = LRUCache(11 -> !!, 5 //| -> 5, 10 -> 10, 7 -> 7, 9 -> 9, 8 -> 8, 6 -> 6, 4 -> 4) } \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/util/optional.scala b/compiler/src/dotty/tools/dotc/util/optional.scala new file mode 100644 index 000000000000..cb62315d3c98 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/optional.scala @@ -0,0 +1,19 @@ +package dotty.tools.dotc.util + +import scala.util.boundary + +/** Return type that indicates that the method returns a T or aborts to the enclosing boundary with a `None` */ +type optional[T] = boundary.Label[None.type] ?=> T + +/** A prompt for `Option`, which establishes a boundary which `_.?` on `Option` can return */ +object optional: + inline def apply[T](inline body: optional[T]): Option[T] = + boundary(Some(body)) + + extension [T](r: Option[T]) + inline def ? (using label: boundary.Label[None.type]): T = r match + case Some(x) => x + case None => boundary.break(None) + + inline def break()(using label: boundary.Label[None.type]): Nothing = + boundary.break(None) diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index 754c2bae3597..b45de57f9850 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -9,8 +9,7 @@ package io import scala.language.unsafeNulls -import java.net.MalformedURLException -import java.net.URL +import java.net.{MalformedURLException, URI, URISyntaxException, URL} import java.util.regex.PatternSyntaxException import File.pathSeparator @@ -182,8 +181,8 @@ object ClassPath { } def specToURL(spec: String): Option[URL] = - try Some(new URL(spec)) - catch { case _: MalformedURLException => None } + try Some(new URI(spec).toURL) + catch case _: MalformedURLException | _: URISyntaxException => None def manifests: List[java.net.URL] = { import scala.jdk.CollectionConverters.EnumerationHasAsScala diff --git a/compiler/src/dotty/tools/io/JDK9Reflectors.java b/compiler/src/dotty/tools/io/JDK9Reflectors.java index 1b0ce5deabab..9816cc03f92a 100644 --- a/compiler/src/dotty/tools/io/JDK9Reflectors.java +++ b/compiler/src/dotty/tools/io/JDK9Reflectors.java @@ -32,7 +32,7 @@ public final class JDK9Reflectors { } // Classes from java.lang.Runtime are not available in JDK 8 so using them explicitly would prevent this file from compiling with JDK 8 - // but these methods are not called in runtime when using this version of JDK + // but these methods are not called in runtime when using this version of JDK public static /*java.lang.Runtime.Version*/ Object runtimeVersionParse(String string) { try { diff --git a/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala b/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala index 89fd290f7286..7a457a1d7546 100644 --- a/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala +++ b/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala @@ -23,6 +23,9 @@ import java.util.Collections class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) extends ClassLoader(parent): private def findAbstractFile(name: String) = root.lookupPath(name.split('/').toIndexedSeq, directory = false) + // on JDK 20 the URL constructor we're using is deprecated, + // but the recommended replacement, URL.of, doesn't exist on JDK 8 + @annotation.nowarn("cat=deprecation") override protected def findResource(name: String) = findAbstractFile(name) match case null => null diff --git a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala index 3c8c51d8d6b2..9ec0199abcbb 100644 --- a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala +++ b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala @@ -67,7 +67,7 @@ object ScalaClassLoader { @sharable private[this] val bootClassLoader: ClassLoader = if scala.util.Properties.isJavaAtLeast("9") then try - ClassLoader.getSystemClassLoader.getParent + ClassLoader.getSystemClassLoader.getParent catch case _: Throwable => null else null diff --git a/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala b/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala index b33ba14b9e70..5fac91124187 100644 --- a/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala @@ -20,6 +20,4 @@ final class ExprImpl(val tree: tpd.Tree, val scope: Scope) extends Expr[Any] { } override def hashCode(): Int = tree.hashCode() - - override def toString: String = "'{ ... }" } diff --git a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala index 7c952dbbe142..bfa4c1c6d1f2 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala @@ -1,7 +1,6 @@ package scala.quoted package runtime.impl - import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Flags.* @@ -9,6 +8,7 @@ import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.util.optional /** Matches a quoted tree against a quoted pattern tree. * A quoted pattern tree may have type and term holes in addition to normal terms. @@ -103,12 +103,13 @@ import dotty.tools.dotc.core.Symbols.* object QuoteMatcher { import tpd.* - // TODO improve performance - // TODO use flag from Context. Maybe -debug or add -debug-macros private inline val debug = false - import Matching._ + /** Sequence of matched expressions. + * These expressions are part of the scrutinee and will be bound to the quote pattern term splices. + */ + type MatchingExprs = Seq[MatchResult] /** A map relating equivalent symbols from the scrutinee and the pattern * For example in @@ -121,32 +122,34 @@ object QuoteMatcher { private def withEnv[T](env: Env)(body: Env ?=> T): T = body(using env) - def treeMatch(scrutineeTree: Tree, patternTree: Tree)(using Context): Option[Tuple] = + def treeMatch(scrutineeTree: Tree, patternTree: Tree)(using Context): Option[MatchingExprs] = given Env = Map.empty - scrutineeTree =?= patternTree + optional: + scrutineeTree =?= patternTree /** Check that all trees match with `mtch` and concatenate the results with &&& */ - private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => Matching): Matching = (l1, l2) match { + private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => MatchingExprs): optional[MatchingExprs] = (l1, l2) match { case (x :: xs, y :: ys) => mtch(x, y) &&& matchLists(xs, ys)(mtch) case (Nil, Nil) => matched case _ => notMatched } extension (scrutinees: List[Tree]) - private def =?= (patterns: List[Tree])(using Env, Context): Matching = + private def =?= (patterns: List[Tree])(using Env, Context): optional[MatchingExprs] = matchLists(scrutinees, patterns)(_ =?= _) extension (scrutinee0: Tree) /** Check that the trees match and return the contents from the pattern holes. - * Return None if the trees do not match otherwise return Some of a tuple containing all the contents in the holes. + * Return a sequence containing all the contents in the holes. + * If it does not match, continues to the `optional` with `None`. * * @param scrutinee The tree being matched * @param pattern The pattern tree that the scrutinee should match. Contains `patternHole` holes. * @param `summon[Env]` Set of tuples containing pairs of symbols (s, p) where s defines a symbol in `scrutinee` which corresponds to symbol p in `pattern`. - * @return `None` if it did not match or `Some(tup: Tuple)` if it matched where `tup` contains the contents of the holes. + * @return The sequence with the contents of the holes of the matched expression. */ - private def =?= (pattern0: Tree)(using Env, Context): Matching = + private def =?= (pattern0: Tree)(using Env, Context): optional[MatchingExprs] = /* Match block flattening */ // TODO move to cases /** Normalize the tree */ @@ -203,31 +206,12 @@ object QuoteMatcher { // Matches an open term and wraps it into a lambda that provides the free variables case Apply(TypeApply(Ident(_), List(TypeTree())), SeqLiteral(args, _) :: Nil) if pattern.symbol.eq(defn.QuotedRuntimePatterns_higherOrderHole) => - def hoasClosure = { - val names: List[TermName] = args.map { - case Block(List(DefDef(nme.ANON_FUN, _, _, Apply(Ident(name), _))), _) => name.asTermName - case arg => arg.symbol.name.asTermName - } - val argTypes = args.map(x => x.tpe.widenTermRefExpr) - val methTpe = MethodType(names)(_ => argTypes, _ => pattern.tpe) - val meth = newAnonFun(ctx.owner, methTpe) - def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { - val argsMap = args.map(_.symbol).zip(lambdaArgss.head).toMap - val body = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = - tree match - case tree: Ident => summon[Env].get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) - case tree => super.transform(tree) - }.transform(scrutinee) - TreeOps(body).changeNonLocalOwners(meth) - } - Closure(meth, bodyFn) - } + val env = summon[Env] val capturedArgs = args.map(_.symbol) - val captureEnv = summon[Env].filter((k, v) => !capturedArgs.contains(v)) + val captureEnv = env.filter((k, v) => !capturedArgs.contains(v)) withEnv(captureEnv) { scrutinee match - case ClosedPatternTerm(scrutinee) => matched(hoasClosure) + case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, args, env) case _ => notMatched } @@ -317,7 +301,7 @@ object QuoteMatcher { /* Match new */ case New(tpt1) => pattern match - case New(tpt2) if tpt1.tpe.typeSymbol == tpt2.tpe.typeSymbol => matched + case New(tpt2) if tpt1.tpe.dealias.typeSymbol == tpt2.tpe.dealias.typeSymbol => matched case _ => notMatched /* Match this */ @@ -431,7 +415,6 @@ object QuoteMatcher { case _ => scrutinee val pattern = patternTree.symbol - devirtualizedScrutinee == pattern || summon[Env].get(devirtualizedScrutinee).contains(pattern) || devirtualizedScrutinee.allOverriddenSymbols.contains(pattern) @@ -452,32 +435,67 @@ object QuoteMatcher { accumulator.apply(Set.empty, term) } - /** Result of matching a part of an expression */ - private type Matching = Option[Tuple] - - private object Matching { - - def notMatched: Matching = None - - val matched: Matching = Some(Tuple()) - - def matched(tree: Tree)(using Context): Matching = - Some(Tuple1(new ExprImpl(tree, SpliceScope.getCurrent))) - - extension (self: Matching) - def asOptionOfTuple: Option[Tuple] = self - - /** Concatenates the contents of two successful matchings or return a `notMatched` */ - def &&& (that: => Matching): Matching = self match { - case Some(x) => - that match { - case Some(y) => Some(x ++ y) - case _ => None - } - case _ => None - } - end extension - - } + enum MatchResult: + /** Closed pattern extracted value + * @param tree Scrutinee sub-tree that matched + */ + case ClosedTree(tree: Tree) + /** HOAS pattern extracted value + * + * @param tree Scrutinee sub-tree that matched + * @param patternTpe Type of the pattern hole (from the pattern) + * @param args HOAS arguments (from the pattern) + * @param env Mapping between scrutinee and pattern variables + */ + case OpenTree(tree: Tree, patternTpe: Type, args: List[Tree], env: Env) + + /** Return the expression that was extracted from a hole. + * + * If it was a closed expression it returns that expression. Otherwise, + * if it is a HOAS pattern, the surrounding lambda is generated using + * `mapTypeHoles` to create the signature of the lambda. + * + * This expression is assumed to be a valid expression in the given splice scope. + */ + def toExpr(mapTypeHoles: TypeMap, spliceScope: Scope)(using Context): Expr[Any] = this match + case MatchResult.ClosedTree(tree) => + new ExprImpl(tree, spliceScope) + case MatchResult.OpenTree(tree, patternTpe, args, env) => + val names: List[TermName] = args.map { + case Block(List(DefDef(nme.ANON_FUN, _, _, Apply(Ident(name), _))), _) => name.asTermName + case arg => arg.symbol.name.asTermName + } + val paramTypes = args.map(x => mapTypeHoles(x.tpe.widenTermRefExpr)) + val methTpe = MethodType(names)(_ => paramTypes, _ => mapTypeHoles(patternTpe)) + val meth = newAnonFun(ctx.owner, methTpe) + def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { + val argsMap = args.view.map(_.symbol).zip(lambdaArgss.head).toMap + val body = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = + tree match + case tree: Ident => env.get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) + case tree => super.transform(tree) + }.transform(tree) + TreeOps(body).changeNonLocalOwners(meth) + } + val hoasClosure = Closure(meth, bodyFn) + new ExprImpl(hoasClosure, spliceScope) + + private inline def notMatched: optional[MatchingExprs] = + optional.break() + + private inline def matched: MatchingExprs = + Seq.empty + + private inline def matched(tree: Tree)(using Context): MatchingExprs = + Seq(MatchResult.ClosedTree(tree)) + + private def matchedOpen(tree: Tree, patternTpe: Type, args: List[Tree], env: Env)(using Context): MatchingExprs = + Seq(MatchResult.OpenTree(tree, patternTpe, args, env)) + + extension (self: MatchingExprs) + /** Concatenates the contents of two successful matchings */ + def &&& (that: MatchingExprs): MatchingExprs = self ++ that + end extension } diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index d1806947fa5d..db4e3e6c6a05 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -9,7 +9,6 @@ import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Annotations import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.NameKinds import dotty.tools.dotc.core.NameOps._ import dotty.tools.dotc.core.StdNames._ @@ -276,12 +275,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object DefDef extends DefDefModule: def apply(symbol: Symbol, rhsFn: List[List[Tree]] => Option[Term]): DefDef = - assert(symbol.isTerm, s"expected a term symbol but received $symbol") + xCheckMacroAssert(symbol.isTerm, s"expected a term symbol but received $symbol") + xCheckMacroAssert(symbol.flags.is(Flags.Method), "expected a symbol with `Method` flag set") withDefaultPos(tpd.DefDef(symbol.asTerm, prefss => - xCheckMacroedOwners(xCheckMacroValidExpr(rhsFn(prefss)), symbol).getOrElse(tpd.EmptyTree) + xCheckedMacroOwners(xCheckMacroValidExpr(rhsFn(prefss)), symbol).getOrElse(tpd.EmptyTree) )) def copy(original: Tree)(name: String, paramss: List[ParamClause], tpt: TypeTree, rhs: Option[Term]): DefDef = - tpd.cpy.DefDef(original)(name.toTermName, paramss, tpt, xCheckMacroedOwners(rhs, original.symbol).getOrElse(tpd.EmptyTree)) + tpd.cpy.DefDef(original)(name.toTermName, paramss, tpt, xCheckedMacroOwners(rhs, original.symbol).getOrElse(tpd.EmptyTree)) def unapply(ddef: DefDef): (String, List[ParamClause], TypeTree, Option[Term]) = (ddef.name.toString, ddef.paramss, ddef.tpt, optional(ddef.rhs)) end DefDef @@ -307,9 +307,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object ValDef extends ValDefModule: def apply(symbol: Symbol, rhs: Option[Term]): ValDef = - withDefaultPos(tpd.ValDef(symbol.asTerm, xCheckMacroedOwners(xCheckMacroValidExpr(rhs), symbol).getOrElse(tpd.EmptyTree))) + xCheckMacroAssert(!symbol.flags.is(Flags.Method), "expected a symbol without `Method` flag set") + withDefaultPos(tpd.ValDef(symbol.asTerm, xCheckedMacroOwners(xCheckMacroValidExpr(rhs), symbol).getOrElse(tpd.EmptyTree))) def copy(original: Tree)(name: String, tpt: TypeTree, rhs: Option[Term]): ValDef = - tpd.cpy.ValDef(original)(name.toTermName, tpt, xCheckMacroedOwners(xCheckMacroValidExpr(rhs), original.symbol).getOrElse(tpd.EmptyTree)) + tpd.cpy.ValDef(original)(name.toTermName, tpt, xCheckedMacroOwners(xCheckMacroValidExpr(rhs), original.symbol).getOrElse(tpd.EmptyTree)) def unapply(vdef: ValDef): (String, TypeTree, Option[Term]) = (vdef.name.toString, vdef.tpt, optional(vdef.rhs)) @@ -371,16 +372,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Term extends TermModule: def betaReduce(tree: Term): Option[Term] = tree match - case app @ tpd.Apply(tpd.Select(fn, nme.apply), args) if dotc.core.Symbols.defn.isFunctionType(fn.tpe) => - val app1 = dotc.transform.BetaReduce(app, fn, args) - if app1 eq app then None - else Some(app1.withSpan(tree.span)) case tpd.Block(Nil, expr) => for e <- betaReduce(expr) yield tpd.cpy.Block(tree)(Nil, e) case tpd.Inlined(_, Nil, expr) => betaReduce(expr) case _ => - None + val tree1 = dotc.transform.BetaReduce(tree) + if tree1 eq tree then None + else Some(tree1.withSpan(tree.span)) + end Term given TermMethods: TermMethods with @@ -399,7 +399,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def etaExpand(owner: Symbol): Term = self.tpe.widen match { case mtpe: Types.MethodType if !mtpe.isParamDependent => val closureResType = mtpe.resType match { - case t: Types.MethodType => t.toFunctionType(isJava = self.symbol.is(JavaDefined)) + case t: Types.MethodType => t.toFunctionType(isJava = self.symbol.is(dotc.core.Flags.JavaDefined)) case t => t } val closureTpe = Types.MethodType(mtpe.paramNames, mtpe.paramInfos, closureResType) @@ -610,11 +610,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end extension end NamedArgMethods - type Apply = tpd.Apply + type Apply = tpd.Apply | tpd.Quote | tpd.Splice object ApplyTypeTest extends TypeTest[Tree, Apply]: def unapply(x: Tree): Option[Apply & x.type] = x match case x: (tpd.Apply & x.type) => Some(x) + case x: (tpd.Quote & x.type) => Some(x) // TODO expose Quote AST in Quotes + case x: (tpd.Splice & x.type) => Some(x) // TODO expose Splice AST in Quotes case _ => None end ApplyTypeTest @@ -631,8 +633,23 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler given ApplyMethods: ApplyMethods with extension (self: Apply) - def fun: Term = self.fun - def args: List[Term] = self.args + def fun: Term = self match + case self: tpd.Apply => self.fun + case self: tpd.Quote => // TODO expose Quote AST in Quotes + import dotty.tools.dotc.ast.tpd.TreeOps + tpd.ref(dotc.core.Symbols.defn.QuotedRuntime_exprQuote) + .appliedToType(self.bodyType) + .withSpan(self.span) + case self: tpd.Splice => // TODO expose Splice AST in Quotes + import dotty.tools.dotc.ast.tpd.TreeOps + tpd.ref(dotc.core.Symbols.defn.QuotedRuntime_exprSplice) + .appliedToType(self.tpe) + .withSpan(self.span) + + def args: List[Term] = self match + case self: tpd.Apply => self.args + case self: tpd.Quote => List(self.body) // TODO expose Quote AST in Quotes + case self: tpd.Splice => List(self.expr) // TODO expose Splice AST in Quotes end extension end ApplyMethods @@ -812,7 +829,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Lambda extends LambdaModule: def apply(owner: Symbol, tpe: MethodType, rhsFn: (Symbol, List[Tree]) => Tree): Block = val meth = dotc.core.Symbols.newAnonFun(owner, tpe) - withDefaultPos(tpd.Closure(meth, tss => xCheckMacroedOwners(xCheckMacroValidExpr(rhsFn(meth, tss.head.map(withDefaultPos))), meth))) + withDefaultPos(tpd.Closure(meth, tss => xCheckedMacroOwners(xCheckMacroValidExpr(rhsFn(meth, tss.head.map(withDefaultPos))), meth))) def unapply(tree: Block): Option[(List[ValDef], Term)] = tree match { case Block((ddef @ DefDef(_, tpd.ValDefs(params) :: Nil, _, Some(body))) :: Nil, Closure(meth, _)) @@ -1483,6 +1500,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Bind extends BindModule: def apply(sym: Symbol, pattern: Tree): Bind = + xCheckMacroAssert(sym.flags.is(Flags.Case), "expected a symbol with `Case` flag set") withDefaultPos(tpd.Bind(sym, pattern)) def copy(original: Tree)(name: String, pattern: Tree): Bind = withDefaultPos(tpd.cpy.Bind(original)(name.toTermName, pattern)) @@ -1582,8 +1600,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler self.nonEmpty && self.head.symbol.is(dotc.core.Flags.Implicit) def isGiven: Boolean = self.nonEmpty && self.head.symbol.is(dotc.core.Flags.Given) - def isErased: Boolean = - self.nonEmpty && self.head.symbol.is(dotc.core.Flags.Erased) + def isErased: Boolean = false + + def erasedArgs: List[Boolean] = + self.map(_.symbol.is(dotc.core.Flags.Erased)) + def hasErasedArgs: Boolean = + self.exists(_.symbol.is(dotc.core.Flags.Erased)) end TermParamClauseMethods type TypeParamClause = List[tpd.TypeDef] @@ -2140,9 +2162,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler given MethodTypeMethods: MethodTypeMethods with extension (self: MethodType) - def isErased: Boolean = self.isErasedMethod + def isErased: Boolean = false def isImplicit: Boolean = self.isImplicitMethod def param(idx: Int): TypeRepr = self.newParamRef(idx) + + def erasedParams: List[Boolean] = self.erasedParams + def hasErasedParams: Boolean = self.hasErasedParams end extension end MethodTypeMethods @@ -2168,11 +2193,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end extension end PolyTypeMethods - type TypeLambda = dotc.core.Types.TypeLambda + type TypeLambda = dotc.core.Types.HKTypeLambda object TypeLambdaTypeTest extends TypeTest[TypeRepr, TypeLambda]: def unapply(x: TypeRepr): Option[TypeLambda & x.type] = x match - case tpe: (Types.TypeLambda & x.type) => Some(tpe) + case tpe: (Types.HKTypeLambda & x.type) => Some(tpe) case _ => None end TypeLambdaTypeTest @@ -2498,6 +2523,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def newModule(owner: Symbol, name: String, modFlags: Flags, clsFlags: Flags, parents: List[TypeRepr], decls: Symbol => List[Symbol], privateWithin: Symbol): Symbol = assert(parents.nonEmpty && !parents.head.typeSymbol.is(dotc.core.Flags.Trait), "First parent must be a class") + assert(!privateWithin.exists || privateWithin.isType, "privateWithin must be a type symbol or `Symbol.noSymbol`") val mod = dotc.core.Symbols.newNormalizedModuleSymbol( owner, name.toTermName, @@ -2514,13 +2540,26 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def newMethod(owner: Symbol, name: String, tpe: TypeRepr): Symbol = newMethod(owner, name, tpe, Flags.EmptyFlags, noSymbol) def newMethod(owner: Symbol, name: String, tpe: TypeRepr, flags: Flags, privateWithin: Symbol): Symbol = - dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Method, tpe, privateWithin) + xCheckMacroAssert(!privateWithin.exists || privateWithin.isType, "privateWithin must be a type symbol or `Symbol.noSymbol`") + val privateWithin1 = if privateWithin.isTerm then Symbol.noSymbol else privateWithin + checkValidFlags(flags.toTermFlags, Flags.validMethodFlags) + dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Method, tpe, privateWithin1) def newVal(owner: Symbol, name: String, tpe: TypeRepr, flags: Flags, privateWithin: Symbol): Symbol = - dotc.core.Symbols.newSymbol(owner, name.toTermName, flags, tpe, privateWithin) + xCheckMacroAssert(!privateWithin.exists || privateWithin.isType, "privateWithin must be a type symbol or `Symbol.noSymbol`") + val privateWithin1 = if privateWithin.isTerm then Symbol.noSymbol else privateWithin + checkValidFlags(flags.toTermFlags, Flags.validValFlags) + dotc.core.Symbols.newSymbol(owner, name.toTermName, flags, tpe, privateWithin1) def newBind(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol = - dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | Case, tpe) + checkValidFlags(flags.toTermFlags, Flags.validBindFlags) + dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Case, tpe) def noSymbol: Symbol = dotc.core.Symbols.NoSymbol + private inline def checkValidFlags(inline flags: Flags, inline valid: Flags): Unit = + xCheckMacroAssert( + flags <= valid, + s"Received invalid flags. Expected flags ${flags.show} to only contain a subset of ${valid.show}." + ) + def freshName(prefix: String): String = NameKinds.MacroNames.fresh(prefix.toTermName).toString end Symbol @@ -2593,7 +2632,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler self.isTerm && !self.is(dotc.core.Flags.Method) && !self.is(dotc.core.Flags.Case/*, FIXME add this check and fix sourcecode butNot = Enum | Module*/) def isDefDef: Boolean = self.is(dotc.core.Flags.Method) def isBind: Boolean = - self.is(dotc.core.Flags.Case, butNot = Enum | Module) && !self.isClass + self.is(dotc.core.Flags.Case, butNot = dotc.core.Flags.Enum | dotc.core.Flags.Module) && !self.isClass def isNoSymbol: Boolean = self == Symbol.noSymbol def exists: Boolean = self != Symbol.noSymbol @@ -2768,7 +2807,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def SomeModule: Symbol = dotc.core.Symbols.defn.SomeClass.companionModule def ProductClass: Symbol = dotc.core.Symbols.defn.ProductClass def FunctionClass(arity: Int, isImplicit: Boolean = false, isErased: Boolean = false): Symbol = - dotc.core.Symbols.defn.FunctionSymbol(arity, isImplicit, isErased) + if arity < 0 then throw IllegalArgumentException(s"arity: $arity") + if isErased then + throw new Exception("Erased function classes are not supported. Use a refined `scala.runtime.ErasedFunction`") + else dotc.core.Symbols.defn.FunctionSymbol(arity, isImplicit) + def ErasedFunctionClass = dotc.core.Symbols.defn.ErasedFunctionClass def TupleClass(arity: Int): Symbol = dotc.core.Symbols.defn.TupleType(arity).nn.classSymbol.asClass def isTupleClass(sym: Symbol): Boolean = @@ -2783,6 +2826,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object Flags extends FlagsModule: def Abstract: Flags = dotc.core.Flags.Abstract + def AbsOverride: Flags = dotc.core.Flags.AbsOverride def Artifact: Flags = dotc.core.Flags.Artifact def Case: Flags = dotc.core.Flags.Case def CaseAccessor: Flags = dotc.core.Flags.CaseAccessor @@ -2828,6 +2872,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def Synthetic: Flags = dotc.core.Flags.Synthetic def Trait: Flags = dotc.core.Flags.Trait def Transparent: Flags = dotc.core.Flags.Transparent + + // Keep: aligned with Quotes's `newMethod` doc + private[QuotesImpl] def validMethodFlags: Flags = Private | Protected | Override | Deferred | Final | Method | Implicit | Given | Local | AbsOverride | JavaStatic // Flags that could be allowed: Synthetic | ExtensionMethod | Exported | Erased | Infix | Invisible + // Keep: aligned with Quotes's `newVal` doc + private[QuotesImpl] def validValFlags: Flags = Private | Protected | Override | Deferred | Final | Param | Implicit | Lazy | Mutable | Local | ParamAccessor | Module | Package | Case | CaseAccessor | Given | Enum | AbsOverride | JavaStatic // Flags that could be added: Synthetic | Erased | Invisible + // Keep: aligned with Quotes's `newBind` doc + private[QuotesImpl] def validBindFlags: Flags = Case // Flags that could be allowed: Implicit | Given | Erased end Flags given FlagsMethods: FlagsMethods with @@ -2948,7 +2999,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler /** Checks that all definitions in this tree have the expected owner. * Nested definitions are ignored and assumed to be correct by construction. */ - private def xCheckMacroedOwners(tree: Option[Tree], owner: Symbol): tree.type = + private def xCheckedMacroOwners(tree: Option[Tree], owner: Symbol): tree.type = if xCheckMacro then tree match case Some(tree) => @@ -2959,7 +3010,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler /** Checks that all definitions in this tree have the expected owner. * Nested definitions are ignored and assumed to be correct by construction. */ - private def xCheckMacroedOwners(tree: Tree, owner: Symbol): tree.type = + private def xCheckedMacroOwners(tree: Tree, owner: Symbol): tree.type = if xCheckMacro then xCheckMacroOwners(tree, owner) tree @@ -2973,12 +3024,16 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler t match case t: tpd.DefTree => val defOwner = t.symbol.owner - assert(defOwner == owner, + assert(defOwner == owner, { + val ownerName = owner.fullName + val defOwnerName = defOwner.fullName + val duplicateSymbolHint = + if ownerName == defOwnerName then "These are two different symbols instances with the same name. The symbol should have been instantiated only once.\n" + else "" s"""Tree had an unexpected owner for ${t.symbol} |Expected: $owner (${owner.fullName}) |But was: $defOwner (${defOwner.fullName}) - | - | + |$duplicateSymbolHint |The code of the definition of ${t.symbol} is |${Printer.TreeCode.show(t)} | @@ -2992,7 +3047,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler | |Tip: The owner of a tree can be changed using method `Tree.changeOwner`. |Tip: The default owner of definitions created in quotes can be changed using method `Symbol.asQuotes`. - |""".stripMargin) + |""".stripMargin + }) case _ => traverseChildren(t) }.traverse(tree) @@ -3030,6 +3086,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler "Reference to a method must be eta-expanded before it is used as an expression: " + term.show) term + private inline def xCheckMacroAssert(inline cond: Boolean, inline msg: String): Unit = + assert(!xCheckMacro || cond, msg) + object Printer extends PrinterModule: lazy val TreeCode: Printer[Tree] = new Printer[Tree]: @@ -3066,7 +3125,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler lazy val ConstantCode: Printer[Constant] = new Printer[Constant]: def show(const: Constant): String = - const.show(using ctx.fresh.setSetting(ctx.settings.color, "never")) + const.show(using ctx.withoutColors) lazy val ConstantStructure: Printer[Constant] = new Printer[Constant]: def show(const: Constant): String = @@ -3133,20 +3192,27 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler ctx1.gadtState.addToConstraint(typeHoles) ctx1 - val matchings = QuoteMatcher.treeMatch(scrutinee, pat1)(using ctx1) - - if typeHoles.isEmpty then matchings - else { - // After matching and doing all subtype checks, we have to approximate all the type bindings - // that we have found, seal them in a quoted.Type and add them to the result - def typeHoleApproximation(sym: Symbol) = - val fromAboveAnnot = sym.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_fromAboveAnnot) - val fullBounds = ctx1.gadt.fullBounds(sym) - val tp = if fromAboveAnnot then fullBounds.hi else fullBounds.lo - reflect.TypeReprMethods.asType(tp) - matchings.map { tup => - Tuple.fromIArray(typeHoles.map(typeHoleApproximation).toArray.asInstanceOf[IArray[Object]]) ++ tup + // After matching and doing all subtype checks, we have to approximate all the type bindings + // that we have found, seal them in a quoted.Type and add them to the result + def typeHoleApproximation(sym: Symbol) = + val fromAboveAnnot = sym.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_fromAboveAnnot) + val fullBounds = ctx1.gadt.fullBounds(sym) + if fromAboveAnnot then fullBounds.hi else fullBounds.lo + + QuoteMatcher.treeMatch(scrutinee, pat1)(using ctx1).map { matchings => + import QuoteMatcher.MatchResult.* + lazy val spliceScope = SpliceScope.getCurrent + val typeHoleApproximations = typeHoles.map(typeHoleApproximation) + val typeHoleMapping = Map(typeHoles.zip(typeHoleApproximations)*) + val typeHoleMap = new Types.TypeMap { + def apply(tp: Types.Type): Types.Type = tp match + case Types.TypeRef(Types.NoPrefix, _) => typeHoleMapping.getOrElse(tp.typeSymbol, tp) + case _ => mapOver(tp) } + val matchedExprs = matchings.map(_.toExpr(typeHoleMap, spliceScope)) + val matchedTypes = typeHoleApproximations.map(reflect.TypeReprMethods.asType) + val results = matchedTypes ++ matchedExprs + Tuple.fromIArray(IArray.unsafeFromArray(results.toArray)) } } diff --git a/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala b/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala index 36da30e112c8..d4cea83efde8 100644 --- a/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala @@ -14,6 +14,4 @@ final class TypeImpl(val typeTree: tpd.Tree, val scope: Scope) extends Type[?] { } override def hashCode(): Int = typeTree.hashCode() - - override def toString: String = "Type.of[...]" } diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index e934c1930163..a6a773adc9ba 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -1345,18 +1345,22 @@ object SourceCode { } private def printBoundsTree(bounds: TypeBoundsTree)(using elideThis: Option[Symbol]): this.type = { - bounds.low match { - case Inferred() => - case low => - this += " >: " - printTypeTree(low) - } - bounds.hi match { - case Inferred() => this - case hi => - this += " <: " - printTypeTree(hi) - } + if bounds.low.tpe == bounds.hi.tpe then + this += " = " + printTypeTree(bounds.low) + else + bounds.low match { + case Inferred() => + case low => + this += " >: " + printTypeTree(low) + } + bounds.hi match { + case Inferred() => this + case hi => + this += " <: " + printTypeTree(hi) + } } private def printBounds(bounds: TypeBounds)(using elideThis: Option[Symbol]): this.type = { diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index 944bf1957d43..8f9a9bd69a50 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -170,12 +170,16 @@ object CoursierScalaTests: /** Get coursier script */ @BeforeClass def setup(): Unit = - val ver = execCmd("uname")._2.head.replace('L', 'l').replace('D', 'd') + val launcherLocation = "https://github.com/coursier/launchers/raw/master" + val launcherName = execCmd("uname")._2.head.toLowerCase match + case "linux" => "cs-x86_64-pc-linux" + case "darwin" => "cs-x86_64-apple-darwin" + case other => fail(s"Unsupported OS for coursier launcher: $other") def runAndCheckCmd(cmd: String, options: String*): Unit = val (code, out) = execCmd(cmd, options*) if code != 0 then fail(s"Failed to run $cmd ${options.mkString(" ")}, exit code: $code, output: ${out.mkString("\n")}") - runAndCheckCmd("curl", s"-fLo cs https://git.io/coursier-cli-$ver") + runAndCheckCmd("curl", s"-fLo cs $launcherLocation/$launcherName") runAndCheckCmd("chmod", "+x cs") diff --git a/compiler/test-resources/repl-macros/i15104a b/compiler/test-resources/repl-macros/i15104a new file mode 100644 index 000000000000..92e82928b509 --- /dev/null +++ b/compiler/test-resources/repl-macros/i15104a @@ -0,0 +1,7 @@ +scala> import scala.quoted._ +scala> object Foo { def macroImpl(using Quotes) = Expr(1) } +// defined object Foo +scala> inline def foo = ${ Foo.macroImpl } +def foo: Int +scala> foo +val res0: Int = 1 diff --git a/compiler/test-resources/repl-macros/i15104b b/compiler/test-resources/repl-macros/i15104b new file mode 100644 index 000000000000..ebbdb2402076 --- /dev/null +++ b/compiler/test-resources/repl-macros/i15104b @@ -0,0 +1,5 @@ +scala> import scala.quoted._ +scala> object Foo { def macroImpl(using Quotes) = Expr(1); inline def foo = ${ Foo.macroImpl } } +// defined object Foo +scala> Foo.foo +val res0: Int = 1 diff --git a/compiler/test-resources/repl-macros/i15104c b/compiler/test-resources/repl-macros/i15104c new file mode 100644 index 000000000000..482b9487c9d9 --- /dev/null +++ b/compiler/test-resources/repl-macros/i15104c @@ -0,0 +1,7 @@ +scala> import scala.quoted._ +scala> def macroImpl(using Quotes) = Expr(1) +def macroImpl(using x$1: quoted.Quotes): quoted.Expr[Int] +scala> inline def foo = ${ macroImpl } +def foo: Int +scala> foo +val res0: Int = 1 diff --git a/compiler/test-resources/repl-macros/i5551 b/compiler/test-resources/repl-macros/i5551 index fb039ed19dd6..984551438b51 100644 --- a/compiler/test-resources/repl-macros/i5551 +++ b/compiler/test-resources/repl-macros/i5551 @@ -1,8 +1,7 @@ scala> import scala.quoted._ scala> def assertImpl(expr: Expr[Boolean])(using q: Quotes) = '{ if !($expr) then throw new AssertionError("failed assertion")} def assertImpl - (expr: quoted.Expr[Boolean]) - (using q: quoted.Quotes): quoted.Expr[Unit] + (expr: quoted.Expr[Boolean])(using q: quoted.Quotes): scala.quoted.Expr[Unit] scala> inline def assert(expr: => Boolean): Unit = ${ assertImpl('{expr}) } def assert(expr: => Boolean): Unit diff --git a/compiler/test-resources/repl/i10355 b/compiler/test-resources/repl/i10355 index bfe3af835c87..294b9d7f1101 100644 --- a/compiler/test-resources/repl/i10355 +++ b/compiler/test-resources/repl/i10355 @@ -1,5 +1,7 @@ scala> import scala.quoted._ scala> def foo(expr: Expr[Any])(using Quotes) = expr match { case '{ $x: t } => '{ $x: Any } } -def foo(expr: quoted.Expr[Any])(using x$2: quoted.Quotes): quoted.Expr[Any] +def foo + (expr: quoted.Expr[Any])(using x$2: quoted.Quotes): scala.quoted.Expr[Any] scala> def bar(expr: Expr[Any])(using Quotes) = expr match { case '{ $x: t } => '{ val a: t = ??? ; ???} } -def bar(expr: quoted.Expr[Any])(using x$2: quoted.Quotes): quoted.Expr[Nothing] +def bar + (expr: quoted.Expr[Any])(using x$2: quoted.Quotes): scala.quoted.Expr[Nothing] diff --git a/compiler/test-resources/repl/i17231 b/compiler/test-resources/repl/i17231 new file mode 100644 index 000000000000..07a509fea917 --- /dev/null +++ b/compiler/test-resources/repl/i17231 @@ -0,0 +1,2 @@ +scala> summon[ValueOf["a"]] +val res0: ValueOf["a"] = scala.ValueOf@61 diff --git a/compiler/test-resources/repl/i7644 b/compiler/test-resources/repl/i7644 index 8ceaf8b00804..786823073470 100644 --- a/compiler/test-resources/repl/i7644 +++ b/compiler/test-resources/repl/i7644 @@ -5,11 +5,7 @@ scala> class T extends CanEqual | Cannot extend sealed trait CanEqual in a different source file | | longer explanation available when compiling with `-explain` --- [E056] Syntax Error: -------------------------------------------------------- -1 | class T extends CanEqual - | ^^^^^^^^ - | Missing type parameter for CanEqual -2 errors found +1 error found scala> class T extends CanEqual -- [E112] Syntax Error: -------------------------------------------------------- 1 | class T extends CanEqual @@ -17,8 +13,5 @@ scala> class T extends CanEqual | Cannot extend sealed trait CanEqual in a different source file | | longer explanation available when compiling with `-explain` --- [E056] Syntax Error: -------------------------------------------------------- -1 | class T extends CanEqual - | ^^^^^^^^ - | Missing type parameter for CanEqual -2 errors found +1 error found + diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index cdbad2160f2a..9888916a86c9 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -19,6 +19,9 @@ i12299a.scala i13871.scala i15181.scala i15922.scala +t5031_2.scala +i16997.scala +i7414.scala # Tree is huge and blows stack for printing Text i7034.scala @@ -92,3 +95,6 @@ i4176-gadt.scala i13974a.scala java-inherited-type1 + +# recursion limit exceeded +i7445b.scala diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index 71bf530fcda5..ac4ba3ee0e75 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -1622,7 +1622,6 @@ class DottyBytecodeTests extends DottyBytecodeTest { val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) val expected = List( - LineNumber(2, Label(0)), LineNumber(3, Label(0)), LineNumber(4, Label(5)), // case y => LineNumber(5, Label(9)), @@ -1664,7 +1663,6 @@ class DottyBytecodeTests extends DottyBytecodeTest { val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) val expected = List( - LineNumber(2, Label(0)), LineNumber(3, Label(0)), LineNumber(4, Label(5)), // case a if a == 3 => LineNumber(5, Label(15)), diff --git a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala index 33e898718b33..6173842e9ad1 100644 --- a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala @@ -163,28 +163,27 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(6, Label(0)), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(8, Label(6)), + Label(5), + LineNumber(8, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(3, Label(11)), + Label(10), + LineNumber(3, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "tracking"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(16), - LineNumber(10, Label(16)), + Label(15), + LineNumber(10, Label(15)), VarOp(ALOAD, 0), Ldc(LDC, "inner"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(22) + Label(21) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -228,23 +227,22 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(12, Label(0)), LineNumber(7, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(3, Label(6)), + Label(5), + LineNumber(3, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(14, Label(11)), + Label(10), + LineNumber(14, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(17) + Label(16) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -288,23 +286,22 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(12, Label(0)), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(8, Label(6)), + Label(5), + LineNumber(8, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "fgh"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(14, Label(11)), + Label(10), + LineNumber(14, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(17) + Label(16) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -349,23 +346,22 @@ class InlineBytecodeTests extends DottyBytecodeTest { val expected = List( Label(0), - LineNumber(13, Label(0)), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(6), - LineNumber(3, Label(6)), + Label(5), + LineNumber(3, Label(5)), VarOp(ALOAD, 0), Ldc(LDC, "tracking2"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), - Label(11), - LineNumber(15, Label(11)), + Label(10), + LineNumber(15, Label(10)), VarOp(ALOAD, 0), Ldc(LDC, "abc"), Invoke(INVOKEVIRTUAL, "Foo", "foo", "(Ljava/lang/String;)V", false), Op(RETURN), - Label(17) + Label(16) ) assert(instructions == expected, "`track` was not properly inlined in `main`\n" + diffInstructions(instructions, expected)) @@ -582,6 +578,63 @@ class InlineBytecodeTests extends DottyBytecodeTest { } } + @Test def beta_reduce_polymorphic_function = { + val source = """class Test: + | def test = + | ([Z] => (arg: Z) => { val a: Z = arg; a }).apply[Int](2) + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = + List( + Op(ICONST_2), + VarOp(ISTORE, 1), + VarOp(ILOAD, 1), + Op(IRETURN) + ) + + assert(instructions == expected, + "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + + } + } + + @Test def beta_reduce_function_of_opaque_types = { + val source = """object foo: + | opaque type T = Int + | inline def apply(inline op: T => T): T = op(2) + | + |class Test: + | def test = foo { n => n } + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = + List( + Field(GETSTATIC, "foo$", "MODULE$", "Lfoo$;"), + VarOp(ASTORE, 1), + VarOp(ALOAD, 1), + VarOp(ASTORE, 2), + Op(ICONST_2), + Op(IRETURN), + ) + + assert(instructions == expected, + "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + + } + } + @Test def i9456 = { val source = """class Foo { | def test: Int = inline2(inline1(2.+)) diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index e9d0e26f33b0..2a665c478932 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -110,6 +110,8 @@ class BootstrappedOnlyCompilationTests { aggregateTests( compileFilesInDir("tests/neg-macros", defaultOptions.and("-Xcheck-macros")), compileFile("tests/pos-macros/i9570.scala", defaultOptions.and("-Xfatal-warnings")), + compileFile("tests/pos-macros/macro-deprecation.scala", defaultOptions.and("-Xfatal-warnings", "-deprecation")), + compileFile("tests/pos-macros/macro-experimental.scala", defaultOptions.and("-Yno-experimental")), ).checkExpectedErrors() } @@ -130,6 +132,8 @@ class BootstrappedOnlyCompilationTests { compileFilesInDir("tests/run-custom-args/Yretain-trees", defaultOptions and "-Yretain-trees"), compileFilesInDir("tests/run-custom-args/Yread-comments", defaultOptions and "-Yread-docs"), compileFilesInDir("tests/run-custom-args/run-macros-erased", defaultOptions.and("-language:experimental.erasedDefinitions").and("-Xcheck-macros")), + compileDir("tests/run-custom-args/Xmacro-settings/simple", defaultOptions.and("-Xmacro-settings:one,two,three")), + compileDir("tests/run-custom-args/Xmacro-settings/compileTimeEnv", defaultOptions.and("-Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO")), ) }.checkRuns() diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 8d9e28d415c1..4e86a3b83383 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -29,7 +29,7 @@ class CompilationTests { @Test def pos: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePos") - aggregateTests( + var tests = List( compileFile("tests/pos/nullarify.scala", defaultOptions.and("-Ycheck:nullarify")), compileFile("tests/pos-special/utf8encoded.scala", explicitUTF8), compileFile("tests/pos-special/utf16encoded.scala", explicitUTF16), @@ -48,6 +48,7 @@ class CompilationTests { compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/pos-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), + compileFilesInDir("tests/pos-custom-args/strict", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), compileFile( // succeeds despite -Xfatal-warnings because of -nowarn @@ -55,8 +56,6 @@ class CompilationTests { defaultOptions.and("-nowarn", "-Xfatal-warnings") ), compileFile("tests/pos-special/typeclass-scaling.scala", defaultOptions.and("-Xmax-inlines", "40")), - compileFile("tests/pos-special/i7296.scala", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), - compileDir("tests/pos-special/adhoc-extension", defaultOptions.and("-source", "future", "-feature", "-Xfatal-warnings")), compileFile("tests/pos-special/i7575.scala", defaultOptions.andLanguageFeature("dynamics")), compileFile("tests/pos-special/kind-projector.scala", defaultOptions.and("-Ykind-projector")), compileFile("tests/pos-special/kind-projector-underscores.scala", defaultOptions.and("-Ykind-projector:underscores")), @@ -65,10 +64,14 @@ class CompilationTests { compileFile("tests/pos-custom-args/i9267.scala", defaultOptions.and("-Ystop-after:erasure")), compileFile("tests/pos-special/extend-java-enum.scala", defaultOptions.and("-source", "3.0-migration")), compileFile("tests/pos-custom-args/help.scala", defaultOptions.and("-help", "-V", "-W", "-X", "-Y")), - compileFile("tests/pos-custom-args/i10383.scala", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")), compileFile("tests/pos-custom-args/i13044.scala", defaultOptions.and("-Xmax-inlines:33")), - compileFile("tests/pos-custom-args/jdk-8-app.scala", defaultOptions.and("-release:8")), - ).checkCompile() + compileFile("tests/pos-custom-args/jdk-8-app.scala", defaultOptions.and("-release:8")) + ) + + if scala.util.Properties.isJavaAtLeast("16") then + tests ::= compileFilesInDir("tests/pos-java16+", defaultOptions.and("-Ysafe-init")) + + aggregateTests(tests*).checkCompile() } @Test def rewrites: Unit = { @@ -84,6 +87,7 @@ class CompilationTests { compileFile("tests/rewrites/i11895.scala", defaultOptions.and("-indent", "-rewrite")), compileFile("tests/rewrites/i12340.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i17187.scala", unindentOptions.and("-rewrite")), + compileFile("tests/rewrites/i17399.scala", unindentOptions.and("-rewrite")), ).checkRewrites() } @@ -146,6 +150,7 @@ class CompilationTests { compileFilesInDir("tests/neg-custom-args/feature", defaultOptions.and("-Xfatal-warnings", "-feature")), compileFilesInDir("tests/neg-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")), compileFilesInDir("tests/neg-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), + compileFilesInDir("tests/neg-custom-args/explain", defaultOptions.and("-explain")), compileFile("tests/neg-custom-args/avoid-warn-deprecation.scala", defaultOptions.and("-Xfatal-warnings", "-feature")), compileFile("tests/neg-custom-args/i3246.scala", scala2CompatMode), compileFile("tests/neg-custom-args/overrideClass.scala", scala2CompatMode), @@ -158,9 +163,6 @@ class CompilationTests { compileFile("tests/neg-custom-args/i1754.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i12650.scala", allowDeepSubtypes), compileFile("tests/neg-custom-args/i9517.scala", defaultOptions.and("-Xprint-types")), - compileFile("tests/neg-custom-args/i11637.scala", defaultOptions.and("-explain")), - compileFile("tests/neg-custom-args/i15575.scala", defaultOptions.and("-explain")), - compileFile("tests/neg-custom-args/i16601a.scala", defaultOptions.and("-explain")), compileFile("tests/neg-custom-args/interop-polytypes.scala", allowDeepSubtypes.and("-Yexplicit-nulls")), compileFile("tests/neg-custom-args/conditionalWarnings.scala", allowDeepSubtypes.and("-deprecation").and("-Xfatal-warnings")), compileFilesInDir("tests/neg-custom-args/isInstanceOf", allowDeepSubtypes and "-Xfatal-warnings"), @@ -185,7 +187,6 @@ class CompilationTests { compileFile("tests/neg-custom-args/matchable.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), compileFile("tests/neg-custom-args/i7314.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")), compileFile("tests/neg-custom-args/capt-wf.scala", defaultOptions.and("-language:experimental.captureChecking", "-Xfatal-warnings")), - compileDir("tests/neg-custom-args/hidden-type-errors", defaultOptions.and("-explain")), compileFile("tests/neg-custom-args/i13026.scala", defaultOptions.and("-print-lines")), compileFile("tests/neg-custom-args/i13838.scala", defaultOptions.and("-Ximplicit-search-limit", "1000")), compileFile("tests/neg-custom-args/jdk-9-app.scala", defaultOptions.and("-release:8")), @@ -210,8 +211,6 @@ class CompilationTests { compileFile("tests/run-custom-args/defaults-serizaliable-no-forwarders.scala", defaultOptions and "-Xmixin-force-forwarders:false"), compileFilesInDir("tests/run-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")), compileFilesInDir("tests/run-custom-args/fatal-warnings", defaultOptions.and("-Xfatal-warnings")), - compileDir("tests/run-custom-args/Xmacro-settings/simple", defaultOptions.and("-Xmacro-settings:one,two,three")), - compileDir("tests/run-custom-args/Xmacro-settings/compileTimeEnv", defaultOptions.and("-Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO")), compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking")), compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init")), diff --git a/compiler/test/dotty/tools/dotc/core/ShowDecoratorTest.scala b/compiler/test/dotty/tools/dotc/core/ShowDecoratorTest.scala new file mode 100644 index 000000000000..acc9d1914bf6 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/core/ShowDecoratorTest.scala @@ -0,0 +1,21 @@ +package dotty.tools +package dotc +package core + +import Contexts.*, Decorators.*, Denotations.*, SymDenotations.*, Symbols.*, Types.* +import printing.Formatting.Show + +import org.junit.Test +import org.junit.Assert.* + +class ShowDecoratorTest extends DottyTest: + import ShowDecoratorTest.* + + @Test def t1 = assertEquals("... (cannot display due to FooException boom) ...", Foo().tryToShow) +end ShowDecoratorTest + +object ShowDecoratorTest: + import printing.*, Texts.* + class FooException extends Exception("boom") + case class Foo() extends Showable: + def toText(printer: Printer): Text = throw new FooException diff --git a/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala b/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala index 2f35ccb35434..2e4b7bf1bb3f 100644 --- a/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala +++ b/compiler/test/dotty/tools/dotc/printing/SyntaxHighlightingTests.scala @@ -12,8 +12,7 @@ class SyntaxHighlightingTests extends DottyTest { import SyntaxHighlighting._ private def test(source: String, expected: String): Unit = { - val testCtx = ctx.fresh.setSetting(ctx.settings.color, "always") - val highlighted = SyntaxHighlighting.highlight(source)(using testCtx) + val highlighted = SyntaxHighlighting.highlight(source)(using ctx.withColors) .replace(NoColor, ">") .replace(CommentColor, " simplify(simplify(a) - simplify(a)) == Empty // Previously there were no simplify calls, // and this is a counter-example, // for which you need either to simplify(b) or simplify the minus result. - val engine = patmat.SpaceEngine() - import engine.* - val tp = defn.ConsClass.typeRef.appliedTo(defn.AnyType) + val tp = defn.ConsType.appliedTo(defn.AnyType) val unappTp = requiredMethod("scala.collection.immutable.::.unapply").termRef val params = List(Empty, Typ(tp)) diff --git a/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala new file mode 100644 index 000000000000..b08062913dac --- /dev/null +++ b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala @@ -0,0 +1,57 @@ +package dotty.tools +package dotc +package typer + +// Modelling the decision in IsFullyDefined +object InstantiateModel: + enum LB { case NN; case LL; case L1 }; import LB.* + enum UB { case AA; case UU; case U1 }; import UB.* + enum Var { case V; case NotV }; import Var.* + enum MSe { case M; case NotM }; import MSe.* + enum Bot { case Fail; case Ok; case Flip }; import Bot.* + enum Act { case Min; case Max; case ToMax; case Skip; case False }; import Act.* + + // NN/AA = Nothing/Any + // LL/UU = the original bounds, on the type parameter + // L1/U1 = the constrained bounds, on the type variable + // V = variance >= 0 ("non-contravariant") + // MSe = minimisedSelected + // Bot = IfBottom + // ToMax = delayed maximisation, via addition to toMaximize + // Skip = minimisedSelected "hold off instantiating" + // False = return false + + // there are 9 combinations: + // # | LB | UB | d | // d = direction + // --+----+----+---+ + // 1 | L1 | AA | - | L1 <: T + // 2 | L1 | UU | - | L1 <: T <: UU + // 3 | LL | U1 | + | LL <: T <: U1 + // 4 | NN | U1 | + | T <: U1 + // 5 | L1 | U1 | 0 | L1 <: T <: U1 + // 6 | LL | UU | 0 | LL <: T <: UU + // 7 | LL | AA | 0 | LL <: T + // 8 | NN | UU | 0 | T <: UU + // 9 | NN | AA | 0 | T + + def decide(lb: LB, ub: UB, v: Var, bot: Bot, m: MSe): Act = (lb, ub) match + case (L1, AA) => Min + case (L1, UU) => Min + case (LL, U1) => Max + case (NN, U1) => Max + + case (L1, U1) => if m==M || v==V then Min else ToMax + case (LL, UU) => if m==M || v==V then Min else ToMax + case (LL, AA) => if m==M || v==V then Min else ToMax + + case (NN, UU) => bot match + case _ if m==M => Max + //case Ok if v==V => Min // removed, i14218 fix + case Fail if v==V => False + case _ => ToMax + + case (NN, AA) => bot match + case _ if m==M => Skip + case Ok if v==V => Min + case Fail if v==V => False + case _ => ToMax diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index bcb08cd232d7..ecdfeb512e1b 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -347,6 +347,12 @@ class ReplCompilerTests extends ReplTest: assertEquals("java.lang.AssertionError: assertion failed", all.head) } + @Test def `i13097 expect lambda after colon` = contextually: + assert(ParseResult.isIncomplete("val x = List(42).foreach:")) + + @Test def `i13097 expect template after colon` = contextually: + assert(ParseResult.isIncomplete("class C:")) + object ReplCompilerTests: private val pattern = Pattern.compile("\\r[\\n]?|\\n"); diff --git a/compiler/test/dotty/tools/repl/ScriptedTests.scala b/compiler/test/dotty/tools/repl/ScriptedTests.scala index 5c3a32cd40f8..dc809228e86b 100644 --- a/compiler/test/dotty/tools/repl/ScriptedTests.scala +++ b/compiler/test/dotty/tools/repl/ScriptedTests.scala @@ -3,12 +3,16 @@ package tools package repl import org.junit.Test +import org.junit.experimental.categories.Category /** Runs all tests contained in `compiler/test-resources/repl/` */ class ScriptedTests extends ReplTest { @Test def replTests = scripts("/repl").foreach(testFile) + @Category(Array(classOf[BootstrappedOnlyTests])) + @Test def replMacrosTests = scripts("/repl-macros").foreach(testFile) + @Test def typePrinterTests = scripts("/type-printer").foreach(testFile) } diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala index bfedc338f25a..75918674146c 100644 --- a/compiler/test/dotty/tools/utils.scala +++ b/compiler/test/dotty/tools/utils.scala @@ -17,8 +17,10 @@ import scala.util.control.{ControlThrowable, NonFatal} import dotc.config.CommandLineParser +object Dummy + def scripts(path: String): Array[File] = { - val dir = new File(this.getClass.getResource(path).getPath) + val dir = new File(Dummy.getClass.getResource(path).getPath) assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") dir.listFiles.filter { f => val path = if f.isDirectory then f.getPath + "/" else f.getPath diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index 62b1b88984bc..bccbcbee29e1 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -12,7 +12,7 @@ import java.nio.file.{Files, NoSuchFileException, Path, Paths} import java.nio.charset.{Charset, StandardCharsets} import java.text.SimpleDateFormat import java.util.{HashMap, Timer, TimerTask} -import java.util.concurrent.{TimeUnit, TimeoutException, Executors => JExecutors} +import java.util.concurrent.{ExecutionException, TimeUnit, TimeoutException, Executors => JExecutors} import scala.collection.mutable import scala.io.{Codec, Source} @@ -494,6 +494,12 @@ trait ParallelTesting extends RunnerOrchestration { self => .and("-d", targetDir.getPath) .withClasspath(targetDir.getPath) + def waitForJudiciously(process: Process): Int = + try process.waitFor() + catch case _: InterruptedException => + try if process.waitFor(5L, TimeUnit.MINUTES) then process.exitValue() else -2 + finally Thread.currentThread.interrupt() + def compileWithJavac(fs: Array[String]) = if (fs.nonEmpty) { val fullArgs = Array( "javac", @@ -503,7 +509,7 @@ trait ParallelTesting extends RunnerOrchestration { self => val process = Runtime.getRuntime.exec(fullArgs) val output = Source.fromInputStream(process.getErrorStream).mkString - if (process.waitFor() != 0) Some(output) + if waitForJudiciously(process) != 0 then Some(output) else None } else None @@ -676,7 +682,11 @@ trait ParallelTesting extends RunnerOrchestration { self => for fut <- eventualResults do try fut.get() - catch case ex: Exception => + catch + case ee: ExecutionException if ee.getCause.isInstanceOf[InterruptedException] => + System.err.println("Interrupted (probably running after shutdown)") + ee.printStackTrace() + case ex: Exception => System.err.println(ex.getMessage) ex.printStackTrace() @@ -751,8 +761,11 @@ trait ParallelTesting extends RunnerOrchestration { self => case _ => } case Failure(output) => - echo(s"Test '${testSource.title}' failed with output:") - echo(output) + if output == "" then + echo(s"Test '${testSource.title}' failed with no output") + else + echo(s"Test '${testSource.title}' failed with output:") + echo(output) failTestSource(testSource) case Timeout => echo("failed because test " + testSource.title + " timed out") diff --git a/compiler/test/worksheets/baseTypetest.sc b/compiler/test/worksheets/baseTypetest.sc index 001f1e3b3eaa..4dbd68a6fdc7 100644 --- a/compiler/test/worksheets/baseTypetest.sc +++ b/compiler/test/worksheets/baseTypetest.sc @@ -22,5 +22,5 @@ object baseTypetest extends DottyTest { defn.StringClass isSubClass defn.NullClass //> res4: Boolean = false defn.StringClass.typeRef.baseType(defn.NullClass) //> res5: dotty.tools.dotc.core.Types.Type = NoType - + } \ No newline at end of file diff --git a/compiler/test/worksheets/denotTest.sc b/compiler/test/worksheets/denotTest.sc index 222a347b6947..aa3fb383bd6f 100644 --- a/compiler/test/worksheets/denotTest.sc +++ b/compiler/test/worksheets/denotTest.sc @@ -7,7 +7,7 @@ import Types._, Symbols._ object denotTest extends DottyTest { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + val str = defn.StringClass.typeRef //> str : dotty.tools.dotc.core.Types.TypeRef = TypeRef(ThisType(module class l //| ang#57),String) val d= str.member("getBytes".toTermName) //> d : dotty.tools.dotc.core.Denotations.Denotation = val getBytes val g @@ -47,7 +47,7 @@ object denotTest extends DottyTest { //| a#35),Array), scala$Array$$T, TypeAlias(TypeRef(ThisType(module class scala# //| 35),Char))), TypeRef(ThisType(module class scala#35),Int), TypeRef(ThisType( //| module class scala#35),Int)), TypeRef(ThisType(module class lang#57),String) - //| ), JavaMethodType(List(x$0), List(RefinedType(TypeRef(ThisType(module class + //| ), JavaMethodType(List(x$0), List(RefinedType(TypeRef(ThisType(module class //| scala#35),Array), scala$Array$$T, TypeAlias(TypeRef(ThisType(module class sc //| ala#35),Char)))), TypeRef(ThisType(module class lang#57),String)), JavaMetho //| dType(List(x$0), List(TypeRef(ThisType(module class scala#35),Any)), TypeRef diff --git a/compiler/test/worksheets/nesting.sc b/compiler/test/worksheets/nesting.sc index a6fc924320a0..bb3e9a71146e 100644 --- a/compiler/test/worksheets/nesting.sc +++ b/compiler/test/worksheets/nesting.sc @@ -2,7 +2,7 @@ package dotty.tools.dotc.core object nesting { class C { - + class D { private def x = "D" def show = x @@ -10,7 +10,7 @@ object nesting { println(x) } } - + val foo: D = { class D extends C.this.D { private def x = "foo.D" @@ -21,11 +21,11 @@ object nesting { new D } } - + val c = new C //> c : dotty.tools.dotc.core.nesting.C = dotty.tools.dotc.core.nesting$C@1a84d //| a23 val d = c.foo //> d : dotty.tools.dotc.core.nesting.c.D = dotty.tools.dotc.core.nesting$C$D$1 //| @2705d88a d.show //> res0: String = foo.D - + } \ No newline at end of file diff --git a/compiler/test/worksheets/periodtest.sc b/compiler/test/worksheets/periodtest.sc index 09c02da19a10..68a7cc43b20e 100644 --- a/compiler/test/worksheets/periodtest.sc +++ b/compiler/test/worksheets/periodtest.sc @@ -2,9 +2,9 @@ package dotty.tools.dotc.core object periodtest { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + import Periods._ - + val p1 = Period(1, 2, 7) //> p1 : dotty.tools.dotc.core.Periods.Period = Period(2..7, run = 1) val p2 = Period(1, 3, 7) //> p2 : dotty.tools.dotc.core.Periods.Period = Period(3..7, run = 1) p1 contains p2 //> res0: Boolean = true diff --git a/compiler/test/worksheets/positiontest.sc b/compiler/test/worksheets/positiontest.sc index 11cc54dbeab9..b152368145f1 100644 --- a/compiler/test/worksheets/positiontest.sc +++ b/compiler/test/worksheets/positiontest.sc @@ -5,7 +5,7 @@ import Positions._ object positiontest { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + val p = Position(0, 1, 0) //> p : dotty.tools.dotc.util.Positions.Position = [0..1] val p2 = Position(0, 2) //> p2 : dotty.tools.dotc.util.Positions.Position = [0..2] val p3 = Position(1, 0) //> p3 : dotty.tools.dotc.util.Positions.Position = [no position] diff --git a/compiler/test/worksheets/testnames.sc b/compiler/test/worksheets/testnames.sc index 282b07d4edb7..8f042b7036fd 100644 --- a/compiler/test/worksheets/testnames.sc +++ b/compiler/test/worksheets/testnames.sc @@ -2,7 +2,7 @@ package dotty.tools.dotc.core object testnames { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - + import Names._ val n = termName("hello") //> n : dotty.tools.dotc.core.Names.TermName = hello val tn = n.toTypeName //> tn : dotty.tools.dotc.core.Names.TypeName = hello @@ -10,7 +10,7 @@ object testnames { assert(tn.toTermName eq n) assert(tn.toLocalName eq ln) assert(n.toLocalName eq ln) - + n == tn //> res0: Boolean = false n == ln //> res1: Boolean = false n eq tn //> res2: Boolean = false @@ -19,7 +19,7 @@ object testnames { val foo = encodedTermName("++") //> foo : dotty.tools.dotc.core.Names.TermName = $plus$plus foo.hashCode //> res5: Int = 5 foo.toTypeName.hashCode //> res6: Int = -5 - + val nfoo = n ++ foo //> nfoo : dotty.tools.dotc.core.testnames.n.ThisName = hello$plus$plus nfoo contains '$' //> res7: Boolean = true nfoo.replace('$', '.') //> res8: dotty.tools.dotc.core.testnames.nfoo.ThisName = hello.plus.plus @@ -36,7 +36,7 @@ object testnames { termName("abc") //> res18: dotty.tools.dotc.core.Names.TermName = abc nfoo.filter(_ >= 'l') //> res19: dotty.tools.dotc.core.Names.Name = lloplusplus nfoo map (_.toUpper) //> res20: dotty.tools.dotc.core.Names.Name = HELLO$PLUS$PLUS - + import Decorators._ val local = "local".toTermName.toLocalName //> local : dotty.tools.dotc.core.Names.LocalName = local diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index 67210f0d6b4f..8b9ec41a7f8c 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -61,15 +61,31 @@ classpathArgs () { CLASS_PATH+="$(find_lib "*tasty-core*")$PSEP" CLASS_PATH+="$(find_lib "*scala3-tasty-inspector*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-0*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-html-parser*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-anchorlink*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-autolink*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-emoji*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-strikethrough*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-tables*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-tasklist*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-wikilink*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-ext-yaml-front-matter*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-ext-tables*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-ext-ins*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-ext-superscript*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-ast*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-data*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-dependency*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-misc*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-format*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-sequence*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-builder*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-collection*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-visitor*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-options*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-util-html*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" + CLASS_PATH+="$(find_lib "*flexmark-ast*")$PSEP" CLASS_PATH+="$(find_lib "*liqp*")$PSEP" CLASS_PATH+="$(find_lib "*jsoup*")$PSEP" CLASS_PATH+="$(find_lib "*jackson-dataformat-yaml*")$PSEP" @@ -80,7 +96,6 @@ classpathArgs () { CLASS_PATH+="$(find_lib "*jline-reader*")$PSEP" CLASS_PATH+="$(find_lib "*jline-terminal-3*")$PSEP" CLASS_PATH+="$(find_lib "*jline-terminal-jna*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" CLASS_PATH+="$(find_lib "*autolink-0.6*")$PSEP" CLASS_PATH+="$(find_lib "*flexmark-jira-converter*")$PSEP" @@ -93,9 +108,6 @@ classpathArgs () { CLASS_PATH+="$(find_lib "*protobuf-java*")$PSEP" CLASS_PATH+="$(find_lib "*util-interface*")$PSEP" CLASS_PATH+="$(find_lib "*jna-5*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-tables*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-ins*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-superscript*")$PSEP" CLASS_PATH+="$(find_lib "*antlr4-runtime*")$PSEP" jvm_cp_args="-classpath \"$CLASS_PATH\"" diff --git a/docs/_assets/css/dottydoc.css b/docs/_assets/css/dottydoc.css index ca7613835ff0..6408fac3fab4 100644 --- a/docs/_assets/css/dottydoc.css +++ b/docs/_assets/css/dottydoc.css @@ -1,243 +1,23 @@ -html, body { - font-weight: 300; - height: 100%; -} - -main.container { - min-height: 100vh; - padding: 15px 15px; - padding-bottom: 45px; /* prevents the content to be hidden by the gitter sidecar */ -} - -.container img { - width: 100%; - height: auto; -} - -/* headers */ -main header { - border-bottom: 1px solid rgba(0,0,0,.1); - margin-bottom: 16px; - padding-bottom: 16px; -} - -main > h1 { - margin-bottom: 20px; -} - .byline { font-size: 14px; + display: flex; + margin-top: 10px; } -.byline, .byline a { - color: grey; -} -.byline .author { - display: block; -} - -/* indexes */ -ul.post-list { - list-style: none; - padding-left: 0; -} -.post-list h2 { - margin-bottom: 0; -} - -/* headings anchors */ -a.anchor { - color: transparent; - margin-left: -23px; - padding-right: 3px; - transition: color .4s ease-out; -} - -a.anchor::before { - content: "\f0c1"; - font-family: "Font Awesome 5 Free"; - font-weight: 900; - font-size: 20px; -} - -h1:hover a.anchor, -h2:hover a.anchor, -h3:hover a.anchor, -h4:hover a.anchor, -h5:hover a.anchor { - color: lightgrey; - text-decoration: none; -} - -h1:hover a.anchor:hover, -h2:hover a.anchor:hover, -h3:hover a.anchor:hover, -h4:hover a.anchor:hover, -h5:hover a.anchor:hover { - color: var(--secondary); -} - -/* blog footer */ -.blog-author { - color: gray; -} - -.blog-author img#author-img { +.byline img#author-img { width: auto; height: auto; - max-width:100px; - max-height:100px; - border-radius: 50%; + max-width: 50px; + border-radius: 10px; } -/* api docs */ -.api span.letter-anchor { - float: left; - width: 50px; - height: 50px; - border-radius: 50px; - color: white; - margin-top: 6px; - margin-right: 8px; - line-height: 50px; - text-align: center; - text-decoration: none; - font-size: 43px; - font-family: var(--font-family-sans-serif); -} -.api span.letter-anchor.object { - line-height: 48px; -} -.api span.letter-anchor.class { - line-height: 48px; - padding-right: 3px; -} -.letter-anchor.object { - background: #2c6c8d; -} -.letter-anchor.class { - background: #44ad7d; -} -.letter-anchor.trait { - background: #19aacf; -} -.letter-anchor.enum { - background: #7803fc; -} -.letter-anchor.package { - background: #2c6c8d; -} - -.api header { - font-family: var(--font-family-sans-serif); +.byline, .byline a { + color: grey; } -.api header .name-prefix { +.byline .author { display: block; } -.api header .name-suffix { - display: inline-block; -} - -.api header h1 { - margin: -13px 8px 0 0; - display: inline-block; -} -.api h2 { - margin-top: 1rem; -} -.api h3 { - display: inline; - margin: 0; - font: inherit; - font-weight: bold; -} - -/* improved display and wrapping of parameters */ -.api .params, .api .type-params { - display: inline-flex; - flex-flow: wrap; -} - -/* api layout */ -.wide-table { - display: table; - width: 100%; -} -.api .member:hover { - background: var(--doc-bg); - cursor: pointer; -} -.api .left-column { - white-space: nowrap; - padding-left: 1em; - border-left: 3px solid transparent;/* table rows cannot have borders*/ - font-family: var(--font-family-monospace); - text-align: right; - width: 1px; -} -.api .member:hover .left-column { - border-left: 3px solid var(--secondary); -} -.api .right-column { - display: inline; - text-align: right; - font-family: var(--font-family-monospace); -} - -/* admonitions */ -blockquote { - padding: 0 1em; - color: #777; - border-left: 0.25em solid #ddd; -} - -aside { - padding: 15px; - margin: 10px 0; -} - -aside.warning { - border-left: 3px solid var(--red500); - background-color: var(--aside-warning-bg); -} - -aside.notice { - border-left: 3px solid #4c97e4; - background-color: #e4ebff; -} - -aside.success { - border-left: 3px solid #36bf1d; - background-color: #ebfddd; -} - -/* media queries for bigger screens (dottydoc is mobile-first) */ -@media (min-width: 576px) { - .byline .author { - display: inline; - margin-left: 1em; - } - main.container { - padding: 15px 30px; - } -} -@media (min-width: 768px) { - .api .member { - display: table-row; - } - .api .left-column { - display: table-cell; - } - .api .right-column { - display: flex; - flex-flow: wrap; - } - main.container { - padding: 15px 45px; - } -} -header { - position: static !important; - width: 100% !important; +.byline .secondary-infos{ + margin-left: 10px; } diff --git a/docs/_assets/images/contribution/breakpoint.jpg b/docs/_assets/images/contribution/breakpoint.jpg new file mode 100644 index 000000000000..748088c269c9 Binary files /dev/null and b/docs/_assets/images/contribution/breakpoint.jpg differ diff --git a/docs/_assets/images/contribution/call-stack.jpg b/docs/_assets/images/contribution/call-stack.jpg new file mode 100644 index 000000000000..8fac2371a6c1 Binary files /dev/null and b/docs/_assets/images/contribution/call-stack.jpg differ diff --git a/docs/_assets/images/contribution/conditional-breakpoint.jpg b/docs/_assets/images/contribution/conditional-breakpoint.jpg new file mode 100644 index 000000000000..11bab89d3f47 Binary files /dev/null and b/docs/_assets/images/contribution/conditional-breakpoint.jpg differ diff --git a/docs/_assets/images/contribution/create-config.jpg b/docs/_assets/images/contribution/create-config.jpg new file mode 100644 index 000000000000..60479233ee70 Binary files /dev/null and b/docs/_assets/images/contribution/create-config.jpg differ diff --git a/docs/_assets/images/contribution/debug-console.jpg b/docs/_assets/images/contribution/debug-console.jpg new file mode 100644 index 000000000000..c9a669019d65 Binary files /dev/null and b/docs/_assets/images/contribution/debug-console.jpg differ diff --git a/docs/_assets/images/contribution/import-build.jpg b/docs/_assets/images/contribution/import-build.jpg new file mode 100644 index 000000000000..79be8450cd4a Binary files /dev/null and b/docs/_assets/images/contribution/import-build.jpg differ diff --git a/docs/_assets/images/contribution/launch-config-file.jpg b/docs/_assets/images/contribution/launch-config-file.jpg new file mode 100644 index 000000000000..4270f6b2326a Binary files /dev/null and b/docs/_assets/images/contribution/launch-config-file.jpg differ diff --git a/docs/_assets/images/contribution/start-debugger.jpg b/docs/_assets/images/contribution/start-debugger.jpg new file mode 100644 index 000000000000..edf17d700afc Binary files /dev/null and b/docs/_assets/images/contribution/start-debugger.jpg differ diff --git a/docs/_assets/images/contribution/toolbar.jpg b/docs/_assets/images/contribution/toolbar.jpg new file mode 100644 index 000000000000..22ae60ba27e1 Binary files /dev/null and b/docs/_assets/images/contribution/toolbar.jpg differ diff --git a/docs/_blog/index.html b/docs/_blog/index.html index 055b069b303d..a59b8702d326 100644 --- a/docs/_blog/index.html +++ b/docs/_blog/index.html @@ -1,6 +1,6 @@ --- layout: static-site-main -title: Blog +title: Blog (archive) ---

{{ page.title }}

diff --git a/docs/_docs/contributing/architecture/context.md b/docs/_docs/contributing/architecture/context.md new file mode 100644 index 000000000000..cd38ee437867 --- /dev/null +++ b/docs/_docs/contributing/architecture/context.md @@ -0,0 +1,53 @@ +--- +layout: doc-page +title: Contexts +--- + +`dotc` has almost no global state (with the exception of the name table, +which hashes strings into unique names). Instead, all +essential bits of information that can vary over a compiler [run](./lifecycle.md) are collected +in a `Context` (defined in [Contexts]). + +Most methods in the compiler depend on an implicit anonymous `Context` parameter, +and a typical definition looks like the following: +```scala +import dotty.tools.dotc.Contexts.{Context, ctx} + +def doFoo(using Context): Unit = + val current = ctx.run // access the Context parameter with `ctx` +``` + +## Memory Leaks +> **Careful:** Contexts can be heavy so beware of memory leaks + +It is good practice to ensure that implicit contexts are not +captured in closures or other long-lived objects, in order to avoid space leaks +in the case where a closure can survive several compiler runs (e.g. a +lazy completer for a library class that is never required). In that case, the +convention is that the `Context` be an explicit parameter, to track its usage. + +## Context Properties + +| Context property | description | +|-------------------|----------------------------------------| +| `compilationUnit` | current compilation unit | +| `phase` | current phase | +| `run` | current run | +| `period` | current period | +| `settings` | the config passed to the compiler | +| `reporter` | operations for logging errors/warnings | +| `definitions` | the standard built in definitions | +| `platform` | operations for the underlying platform | +| `tree` | current tree | +| `scope` | current scope | +| `typer` | current typer | +| `owner` | current owner symbol | +| `outer` | outer Context | +| `mode` | type checking mode | +| `typerState` | | +| `searchHistory` | | +| `implicits` | | +| ... | and so on | + + +[Contexts]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Contexts.scala diff --git a/docs/_docs/contributing/architecture/index.md b/docs/_docs/contributing/architecture/index.md new file mode 100644 index 000000000000..9b976cc643cd --- /dev/null +++ b/docs/_docs/contributing/architecture/index.md @@ -0,0 +1,14 @@ +--- +layout: index +title: High Level Architecture +--- + +This chapter of the guide describes the architecture and concepts of `dotc`, +the Scala 3 compiler, including answers to questions such as: +- "What are the transformations that happen to my code?" +- "How do I run a compiler programatically?" +- "What are symbols, denotations, names and types?" +- "What is a compiler phase?" +- "What is the compiler Context?" + +and many more. diff --git a/docs/_docs/contributing/architecture/lifecycle.md b/docs/_docs/contributing/architecture/lifecycle.md new file mode 100644 index 000000000000..2cf58f477da3 --- /dev/null +++ b/docs/_docs/contributing/architecture/lifecycle.md @@ -0,0 +1,90 @@ +--- +layout: doc-page +title: Compiler Overview +--- + +At a high level, `dotc` is an interactive compiler (see [what is a compiler?](../index.md#what-is-a-compiler)), +and can be invoked frequently, for example to answer questions for an IDE, provide REPL completions, +or to manage incremental builds and more. Each of these use cases requires a customised +workflow, but sharing a common core. + +## Introducing the Compiler's Lifecycle + +#### Core +Customisation is provided by extending the [Compiler] class, which maintains an ordered +list of [phases][Phases], and how to [run][Run] them. Each interaction with a compiler +creates a new run, which is a complete iteration of the compiler's phases over a list +of input sources. Each run has the capability to create new definitions or +invalidate older ones, and `dotc` can [track these changes over time](../architecture/time.md). + +#### Runs +During a run, the input sources are converted to [compilation units][CompilationUnit] (i.e. the abstraction of +compiler state associated with each input source); then iteratively: a single phase is applied to +every compilation unit before progressing to the next phase. + +#### Phases +A phase is an abstract transformation over a compilation unit, it is usually responsible +for transforming the trees and types representing the code of a source file. Some phases of +the compiler are: +- `parser`, which converts text that matches Scala's + [syntax] into abstract syntax trees, ASTs +- `typer`, which checks that trees conform to expected types +- `erasure`, which retypes a more simplified program into one that has the same types as the JVM. +- `genBCode`, the JVM backend, which converts erased compiler trees into Java bytecode format. + +[You can read more about phases here](../architecture/phases.md#phase-categories). + +#### Drivers + +The core compiler also requires a lot of state to be initialised before use, such as [settings][ScalaSettings] +and the [Context](../architecture/context.md). For convenience, the [Driver] class contains high level functions for +configuring the compiler and invoking it programatically. The object [Main] inherits from `Driver` +and is invoked by the `scalac` script. + +## Code Structure + +The code of the compiler is found in the package [dotty.tools], +containing the following sub-packages: +```scala +tools // contains helpers and the `scala` generic runner +├── backend // Compiler backends (currently JVM and JS) +├── dotc // The main compiler, with subpackages: +│ ├── ast // Abstract syntax trees +│   ├── classpath +│   ├── config // Compiler configuration, settings, platform specific definitions. +│   ├── core // Core data structures and operations, with specific subpackages for: +│   │   ├── classfile // Reading of Java classfiles into core data structures +│   │   ├── tasty // Reading and writing of TASTY files to/from core data structures +│   │   └── unpickleScala2 // Reading of Scala2 symbol information into core data structures +│   ├── decompiler // pretty printing TASTY as code +│   ├── fromtasty // driver for recompilation from TASTY +│   ├── interactive // presentation compiler and code completions +│   ├── parsing // Scanner and parser +│   ├── plugins // compile plugin definitions +│   ├── printing // Pretty-printing trees, types and other data +│   ├── profile // internals for profiling the compiler +│   ├── quoted // internals for quoted reflection +│   ├── reporting // Reporting of error messages, warnings and other info. +│   ├── rewrites // Helpers for rewriting Scala 2's constructs into Scala 3's. +│   ├── sbt // Helpers for communicating with the Zinc compiler. +│   ├── semanticdb // Helpers for exporting semanticdb from trees. +│   ├── transform // Miniphases and helpers for tree transformations. +│   ├── typer // Type-checking +│   └── util // General purpose utility classes and modules. +├── io // Helper modules for file access and classpath handling. +├── repl // REPL driver and interaction with the terminal +├── runner // helpers for the `scala` generic runner script +└── scripting // scala runner for the -script argument +``` + + +[Phases]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Phases.scala +[CompilationUnit]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/CompilationUnit.scala + +[dotty.tools]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools +[ScalaSettings]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +[syntax]: https://docs.scala-lang.org/scala3/reference/syntax.html +[Main]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Main.scala +[Driver]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Driver.scala +[Compiler]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Compiler.scala +[Run]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Run.scala \ No newline at end of file diff --git a/docs/_docs/contributing/architecture/phases.md b/docs/_docs/contributing/architecture/phases.md new file mode 100644 index 000000000000..844ae144dddb --- /dev/null +++ b/docs/_docs/contributing/architecture/phases.md @@ -0,0 +1,108 @@ +--- +layout: doc-page +title: Compiler Phases +--- + +As described in the [compiler overview](lifecycle.md#phases), `dotc` is divided into a list of [phases][Phase], +specified in the [Compiler] class. + +#### Printing the phases of the Compiler + +a flattened list of all the phases can be displayed by invoking +the compiler with the `-Xshow-phases` flag: +``` +$ scalac -Xshow-phases +``` + +## Phase Groups + +In class [Compiler] you can access the list of phases with the method `phases`: + +```scala +def phases: List[List[Phase]] = + frontendPhases ::: picklerPhases ::: transformPhases ::: backendPhases +``` + +You can see that phases are actually grouped into sublists, given by the signature +`List[List[Phase]]`; that is, each sublist forms a phase group that is then *fused* into a +single tree traversal when a [Run] is executed. + +Phase fusion allows each phase of a group to be small and modular, +(each performing a single function), while reducing the number of tree traversals +and increasing performance. + +Phases are able to be grouped together if they inherit from [MiniPhase]. + +## Phase Categories + +Phases fall into four categories, allowing customisation by sub-classes of [Compiler]: + +### `frontendPhases` +In the main compiler these include [parser], [typer], [posttyper], +[prepjsinterop] and phases for producing SemanticDB and communicating with the +incremental compiler Zinc. +The [parser] reads source programs and generates untyped abstract syntax trees, which +in [typer] are then typechecked and transformed into typed abstract syntax trees. +Following is [posttyper], performing checks and cleanups that require a fully typed program. +In particular, it +- creates super accessors representing `super` calls in traits +- creates implementations of compiler-implemented methods, +such as `equals` and `hashCode` for case classes. +- marks [compilation units][CompilationUnit] that require inline expansion, or quote pickling +- simplifies trees of erased definitions +- checks variance of type parameters +- mark parameters passed unchanged from subclass to superclass for later pruning. + +### `picklerPhases` +These phases start with [pickler], which serializes typed trees +produced by the `frontendPhases` into TASTy format. Following is [inlining], +which expand calls to inline methods, and [postInlining] providing implementations +of the [Mirror] framework for inlined calls. +Finally are [staging], which ensures that quotes conform to the +[Phase Consistency Principle (PCP)][PCP], and [pickleQuotes] which converts quoted +trees to embedded TASTy strings. + +### `transformPhases` +These phases are concerned with tranformation into lower-level forms +suitable for the runtime system, with two sub-groupings: +- High-level transformations: All phases from [firstTransform] to [erasure]. + Most of these phases transform syntax trees, expanding high-level constructs + to more primitive ones. + - An important transform phase is [patternMatcher], which converts match + trees and patterns into lower level forms, as well as checking the + exhaustivity of sealed types, and unreachability of pattern cases. + - Some phases perform further checks on more primitive trees, + e.g. [refchecks] verifies that no abstract methods exist in concrete classes, + and [initChecker] checks that fields are not used before initialisation. + - The last phase in the group, [erasure] translates all + types into types supported directly by the JVM. To do this, it performs + another type checking pass, but using the rules of the JVM's type system + instead of Scala's. +- Low-level transformations: All phases from `ElimErasedValueType` to + `CollectSuperCalls`. These further transform trees until they are essentially a + structured version of Java bytecode. + +### `backendPhases` +These map the transformed trees to Java classfiles or SJSIR files. + +[CompilationUnit]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/CompilationUnit.scala +[Compiler]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Compiler.scala +[Phase]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Phases.scala +[MiniPhase]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +[Run]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Run.scala +[parser]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala +[typer]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +[posttyper]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +[prepjsinterop]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +[pickler]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Pickler.scala +[inlining]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Inlining.scala +[postInlining]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PostInlining.scala +[staging]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Staging.scala +[pickleQuotes]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +[refchecks]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +[initChecker]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +[firstTransform]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +[patternMatcher]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +[erasure]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Erasure.scala +[Mirror]: https://github.com/lampepfl/dotty/blob/master/library/src/scala/deriving/Mirror.scala +[PCP]: ../../reference/metaprogramming/macros.md#the-phase-consistency-principle diff --git a/docs/_docs/contributing/architecture/symbols.md b/docs/_docs/contributing/architecture/symbols.md new file mode 100644 index 000000000000..c19588a4ff12 --- /dev/null +++ b/docs/_docs/contributing/architecture/symbols.md @@ -0,0 +1,70 @@ +--- +layout: doc-page +title: Symbols +--- + +As discussed previously, `dotc` [maintains time-indexed views](time.md) of various +compiler artifacts. The following sections discuss how they are managed in the compiler. + +## Symbols + +Defined in [Symbols], a `Symbol` is a unique identifier for a definition (e.g. a method, +type, or field). A `ClassSymbol` extends `Symbol` and represents either a +`class`, or a `trait`, or an `object`. A `Symbol` can even refer to non-Scala entities, +such as from the Java standard library. + +## Definitions are Dynamic + +Traditionally, compilers store context-dependent data in a _symbol table_. +Where a symbol then is the central reference to address context-dependent data. +`dotc` instead uses a phase-indexed function (known as +a [Denotation][Denotations]) to compute views of definitions across phases, +as many of attributes associated with definitions are phase-dependent. For example: +- types are gradually simplified by several phases, +- owners change in [lambdaLift] (local methods are lifted to an enclosing class) + and [flatten] (when inner classes are moved to the top level) +- Names are changed when private members need to be accessed from outside + their class (for instance from a nested class or a class implementing + a trait). + +Additionally, symbols are not suitable to be used as a reference to +a definition in another [compilation unit][CompilationUnit]. +In the context of incremental compilation, a symbol from +an external compilation unit may be deleted or changed, making the reference +stale. To counter this, `dotc` types trees of cross-module references with either +a `TermRef` or `TypeRef`. A reference type contains a prefix type and a name. +The denotation that the type refers to is established dynamically based on +these fields. + +## Denotations + +On its own a `Symbol` has no structure. Its semantic meaning is given by being associated +with a [Denotation][Denotations]. + +A denotation is the result of resolving a name during a given period, containing the information +describing some entity (either a term or type), indexed by phase. Denotations usually have a +reference to a selected symbol, but not always, for example if the denotation is overloaded, +i.e. a `MultiDenotation`. + +### SymDenotations +All definition symbols will contain a `SymDenotation`. The denotation, in turn, contains: +- a reverse link to the source symbol +- a reference to the enclosing symbol that defined the source symbol: + - for a local variable, the enclosing method + - for a field or class, the enclosing class +- a set of [flags], describing the definition (e.g. whether it's a trait or mutable). +- the type of the definition (through the `info` method) +- a [signature][Signature1], which uniquely identifies overloaded methods (or else `NotAMethod`). +- and more. + +A class symbol will instead be associated with a `ClassDenotation`, which extends `SymDenotation` +with some additional fields specific for classes. + +[Signature1]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Signature.scala#L9-L33 +[Symbols]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Symbols.scala +[flatten]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Flatten.scala +[lambdaLift]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala +[CompilationUnit]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/CompilationUnit.scala +[Denotations]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Denotations.scala +[SymDenotations]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +[flags]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Flags.scala diff --git a/docs/_docs/contributing/architecture/time.md b/docs/_docs/contributing/architecture/time.md new file mode 100644 index 000000000000..588b1ce40bb2 --- /dev/null +++ b/docs/_docs/contributing/architecture/time.md @@ -0,0 +1,68 @@ +--- +layout: doc-page +title: Time in the Compiler +--- + +In the [compiler overview](lifecycle.md) section, we saw that `dotc` is an interactive compiler, +and so can answer questions about entities as they come into existance and change throughout time, +for example: +- which new definitions were added in a REPL session? +- which definitions were replaced in an incremental build? +- how are definitions simplified as they are adapted to the runtime system? + +## Hours, Minutes, and Periods + +For the compiler to be able to resolve the above temporal questions, and more, it maintains +a concept of time. Additionally, because interactions are frequent, it is important to +persist knowledge of entities between interactions, allowing the compiler to remain performant. +Knowing about time allows the compiler to efficiently mark entities as being outdated. + +Conceptually, `dotc` works like a clock, where its minutes are represented by [phases](phases.md), +and its hours by [runs]. Like a clock, each run passes once each of its phases have completed +sequentially, and then a new run can begin. Phases are further grouped into [periods], where +during a period certain entities of the compiler remain stable. + +## Time Travel + +During a run, each phase can rewrite the world as the compiler sees it, for example: +- to transform trees, +- to gradually simplify type from Scala types to JVM types, +- to move definitions out of inner scopes to outer ones, fitting the JVM's model, +- and so on. + +Because definitions can [change over time](symbols.md#definitions-are-dynamic), various artifacts associated with them +are stored non-destructively, and views of the definition created earlier, or later +in the compiler can be accessed by using the `atPhase` method, defined in [Contexts]. + +As an example, assume the following definitions are available in a [Context](context.md): +```scala +class Box { type X } + +def foo(b: Box)(x: b.X): List[b.X] = List(x) +``` + +You can compare the type of definition `foo` after the [typer] phase and after the [erasure] phase +by using `atPhase`: +```scala +import dotty.tools.dotc.core.Contexts.{Context, atPhase} +import dotty.tools.dotc.core.Phases.{typerPhase, erasurePhase} +import dotty.tools.dotc.core.Decorators.i + +given Context = … + +val fooDef: Symbol = … // `def foo(b: Box)(x: b.X): List[b.X]` + +println(i"$fooDef after typer => ${atPhase(typerPhase.next)(fooDef.info)}") +println(i"$fooDef after erasure => ${atPhase(erasurePhase.next)(fooDef.info)}") +``` +and see the following output: +``` +method foo after typer => (b: Box)(x: b.X): scala.collection.immutable.List[b.X] +method foo after erasure => (b: Box, x: Object): scala.collection.immutable.List +``` + +[runs]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/Run.scala +[periods]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Periods.scala +[Contexts]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Contexts.scala +[typer]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +[erasure]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/Erasure.scala diff --git a/docs/_docs/contributing/architecture/types.md b/docs/_docs/contributing/architecture/types.md new file mode 100644 index 000000000000..64543e555e69 --- /dev/null +++ b/docs/_docs/contributing/architecture/types.md @@ -0,0 +1,147 @@ +--- +layout: doc-page +title: Compiler Types +--- + +## Common Types and their Representation + +Type representations in `dotc` derive from the class `dotty.tools.dotc.core.Types.Type`, +defined in [Types.scala]. The `toString` method on `Type` will display types in a +format corresponding to the backing data structure, e.g. `ExprType(...)` +corresponds to `class ExprType`, defined in [Types.scala]. + +> You can inspect the representation of any type using the [dotty.tools.printTypes][DottyTypeStealer] +> script, its usage and integration into your debugging workflow is [described here](../issues/inspection.md). + +### Types of Definitions + +The following table describes definitions in Scala 3, followed by the `dotc` representation +of two types - a reference to the definition, and then its underlying type. + +**Note**: in the following types, `p` refers to the self-type of the enclosing scope of +the definition, or `NoPrefix` for local definitions and parameters. + +Definition | Reference | Underlying Type +------------------------|-----------------|------------------------- +`type Z >: A <: B` | `TypeRef(p, Z)` | `RealTypeBounds(A, B)` +`type Z = A` | `TypeRef(p, Z)` | `TypeAlias(A)` +`type F[T] = T match …` | `TypeRef(p, F)` | `MatchAlias([T] =>> T match …)` +`class C` | `TypeRef(p, C)` | `ClassInfo(p, C, …)` +`trait T` | `TypeRef(p, T)` | `ClassInfo(p, T, …)` +`object o` | `TermRef(p, o)` | `TypeRef(p, o$)` where `o$` is a class +`def f(x: A): x.type` | `TermRef(p, f)` | `MethodType(x, A, TermParamRef(x))` +`def f[T <: A]: T` | `TermRef(p, f)` | `PolyType(T, <: A, TypeParamRef(T))` +`def f: A` | `TermRef(p, f)` | `ExprType(A)` +`(x: => A)` | `TermRef(p, x)` | `ExprType(A)` where `x` is a parameter +`val x: A` | `TermRef(p, x)` | `A` + +### Types of Values + +The following types may appear in part of the type of an expression: + +Type | Representation +--------------------------|------------------------------ +`x.y.type` | `TermRef(x, y)` +`X#T` | `TypeRef(X, T)` +`x.y.T` and `x.y.type#T` | `TypeRef(TermRef(x, y), T)` +`this.type` | `ThisType(C)` where `C` is the enclosing class +`"hello"` | `ConstantType(Constant("hello"))` +`A & B` | `AndType(A, B)` +`A | B` | `OrType(A, B)` +`A @foo` | `AnnotatedType(A, @foo)` +`[T <: A] =>> T` | `HKTypeLambda(T, <: A, TypeParamRef(T))` +`x.C[A, B]` | `AppliedType(x.C, List(A, B))` +`C { type A = T }` | `RefinedType(C, A, T)`
when `T` is not a member of `C` +`C { type X = Y }` | `RecType(RefinedType(C, X, z.Y))`
when `X` and `Y` are members of `C`
and `z` is a `RecThis` over the enclosing `RecType` +`super.x.type` | `TermRef(SuperType(…), x)` + +## Constructing Types + +### Method Definition Types + +You can see above that method definitions can have an underlying type of +either `PolyType`, `MethodType`, or `ExprType`. `PolyType` and `MethodType` +may be mixed recursively however, and either can appear as the result type of the other. + +Take this example as given: + +```scala +def f[A, B <: Seq[A]](x: A, y: B): Unit +``` +it can be constructed by the following code: + +```scala +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* + +given Context = … // contains the definitions of the compiler + +val f: Symbol = … // def f[A, B <: Seq[A]](x: A, y: B): Unit + +f.info = PolyType( + List("A".toTypeName, "B".toTypeName))( + pt => List( + TypeBounds(defn.NothingType, defn.AnyType), + TypeBounds(defn.NothingType, AppliedType(defn.SeqType, List(pt.newParamRef(0)))) + ), + pt => MethodType( + List("x".toTermName, "y".toTermName))( + mt => List(pt.newParamRef(0), pt.newParamRef(1)), + mt => defn.UnitType + ) +) +``` + +Note that `pt.newParamRef(0)` and `pt.newParamRef(1)` refers to the +type parameters `A` and `B` respectively. + +## Proxy Types and Ground Types +Types in `dotc` are divided into two semantic kinds: +- Ground Types (inheriting from either `CachedGroundType` or `UncachedGroundType`) +- Proxy Types (inheriting from `TypeProxy` via either `CachedProxyType` or `UncachedProxyType`) + +A Proxy Type is anything that can be considered to be an abstraction of another type, +which can be accessed by the `underlying` method of the `TypeProxy` class. It's dual, the +Ground Type has no meaningful underlying type, typically it is the type of method and class +definitions, but also union types and intersection types, along with utility types of the +compiler. + +Here's a diagram, serving as the mental model of the most important and distinct types available after the `typer` phase, derived from [dotty/tools/dotc/core/Types.scala][1]: + +``` +Type -+- proxy_type --+- NamedType --------+- TypeRef + | | \ + | +- SingletonType ----+- TermRef + | | +- ThisType + | | +- SuperType + | | +- ConstantType + | | +- TermParamRef + | | +- RecThis + | | +- SkolemType + | +- TypeParamRef + | +- RefinedOrRecType -+-- RefinedType + | | -+-- RecType + | +- AppliedType + | +- TypeBounds + | +- ExprType + | +- AnnotatedType + | +- TypeVar + | +- HKTypeLambda + | +- MatchType + | + +- ground_type -+- AndType + +- OrType + +- MethodOrPoly -----+-- PolyType + | +-- MethodType + +- ClassInfo + +- NoType + +- NoPrefix + +- ErrorType + +- WildcardType + +``` + +[Types.scala]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Types.scala +[DottyTypeStealer]: https://github.com/lampepfl/dotty/blob/master/compiler/test/dotty/tools/DottyTypeStealer.scala diff --git a/docs/_docs/contributing/contribute-knowledge.md b/docs/_docs/contributing/contribute-knowledge.md deleted file mode 100644 index 7164774ac1df..000000000000 --- a/docs/_docs/contributing/contribute-knowledge.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -layout: doc-page -title: Contributing Knowledge ---- - -# Contribute Internals-related Knowledge -If you know anything useful at all about Dotty, feel free to log this knowledge: - -- [📜Log the Knowledge](https://github.com/lampepfl/dotty-knowledge/issues/new) -- [🎓More about Logging the Knowledge](https://github.com/lampepfl/dotty-knowledge/blob/master/README.md) - -In short, no need to make it pretty, particularly human-readable or give it a particular structure. Just dump the knowledge you have and we'll take it from there. \ No newline at end of file diff --git a/docs/_docs/contributing/getting-started.md b/docs/_docs/contributing/getting-started.md index c842fd0a49d0..af9f2f0783b8 100644 --- a/docs/_docs/contributing/getting-started.md +++ b/docs/_docs/contributing/getting-started.md @@ -3,15 +3,41 @@ layout: doc-page title: Getting Started --- +## Scala CLA +Sometime before submitting your pull request you'll want to make sure you have +signed the [Scala CLA][scala-cla]. You can read more about why we require a CLA +and what exactly is included in it [here][scala-cla]. -Requirements ------------- -Make sure that you are using macOS or Linux (or WSL on Windows) with Java 8 or newer. You can determine which version of the JDK is the -default by typing `java -version` in a Terminal window. +## Making sure the team is aware + +Before digging into an issue or starting on a new feature it's a good idea to +make sure an [issue][dotty-issue] or a [discussion][dotty-discussion] has been +created outlining what you plan to work on. This is both for your and the team's +benefit. It ensures you get the help you need, and also gives the compiler team +a heads-up that someone is working on an issue. + +For some small changes like documentation, this isn't always necessary, but it's +never a bad idea to check. + +## Requirements + +- [git] is essential for managing the Scala 3 code, and contributing to GitHub, + where the code is hosted. +- A Java Virtual Machine (JDK 8 or higher), required for running the build tool. + - download Java from [Oracle Java 8][java8], [Oracle Java 11][java11], + or [AdoptOpenJDK 8/11][adopt]. Refer to [JDK Compatibility][compat] for Scala/Java compatibility detail. + - Verify that the JVM is installed by running the following command in a terminal: `java -version`. +- [sbt][sbt-download], the build tool required to build the Scala 3 compiler and libraries. + +## Nice To Have + +An IDE, such as [Metals] will help you develop in Scala 3 with features such as autocompletion or goto-definition, +and with the [VS Code][vs-code] text editor you can even use the Scala debugger, or create interactive worksheets for an +iterative workflow. + +## Compiling and Running -Compiling and Running ---------------------- Start by cloning the repository: ```bash @@ -48,8 +74,8 @@ $ scala HelloWorld ``` -Starting a REPL ---------------- +## Starting a REPL + ```bash $ sbt > repl @@ -64,8 +90,9 @@ or via bash: ```bash $ scala ``` -Publish to local repository ---------------------------------- + +## Publish to local repository + To test our cloned compiler on local projects: ```bash @@ -79,8 +106,8 @@ ThisBuild / scalaVersion := "-bin-SNAPSHOT" where `dotty-version` can be found in the file `project/Build.scala`, like `3.0.0-M2` -Generating Documentation -------------------------- +## Generating Documentation + To generate this page and other static page docs, run ```bash $ sbt @@ -92,9 +119,22 @@ Before contributing to Dotty, we invite you to consult the [Dotty Developer Guidelines](https://github.com/lampepfl/dotty/blob/main/CONTRIBUTING.md). -Community -------------- +## Community + The main development discussion channels are: - [github.com/lampepfl/dotty/discussions](https://github.com/lampepfl/dotty/discussions) - [contributors.scala-lang.org](https://contributors.scala-lang.org) - [gitter.im/scala/contributors](https://gitter.im/scala/contributors) + +[git]: https://git-scm.com +[Metals]: https://scalameta.org/metals/ +[vs-code]: https://code.visualstudio.com +[lampepfl/dotty]: https://github.com/lampepfl/dotty +[sbt-download]: https://www.scala-sbt.org/download.html +[java8]: https://www.oracle.com/java/technologies/javase-jdk8-downloads.html +[java11]: https://www.oracle.com/java/technologies/javase-jdk11-downloads.html +[adopt]: https://adoptopenjdk.net/ +[compat]: https://docs.scala-lang.org/overviews/jdk-compatibility/overview.html +[scala-cla]: https://www.lightbend.com/contribute/cla/scala +[dotty-issue]: https://github.com/lampepfl/dotty/issues +[dotty-discussion]: https://github.com/lampepfl/dotty/discussions diff --git a/docs/_docs/contributing/index.md b/docs/_docs/contributing/index.md index 6cf0def2d5e2..0cc87e4b3500 100644 --- a/docs/_docs/contributing/index.md +++ b/docs/_docs/contributing/index.md @@ -2,3 +2,48 @@ layout: index title: Contributing --- + +This guide is intended to give new contributors the knowledge they need to +become productive and fix issues or implement new features in Scala 3. It +also documents the inner workings of the Scala 3 compiler, `dotc`. + +### This is a living document + +Keep in mind that the code for `dotc` is continually changing, so the ideas +discussed in this guide may fall out of date. This is a living document, so +please consider contributing to it on +[GitHub](https://github.com/scala/docs.scala-lang/tree/main/_overviews/scala3-contribution) +if you notice anything out of date, or report any issues +[here](https://github.com/scala/docs.scala-lang/issues). + +### Get the Most from This Guide + +`dotc` is built with Scala 3, fully utilising its [new +features](https://docs.scala-lang.org/scala3/new-in-scala3.html). It is recommended that you first have +some familiarity with Scala 3 to get the most out of this guide. You can learn +more in the [language reference](../reference/overview.md). + +Many code snippets in this guide make use of shell commands (a line beginning +with `$`), and in this case a `bash` compatible shell is assumed. You may have +to look up how to translate commands to your shell. + +### What is a Compiler? + +Let's start at the beginning and first look at the question of "what is a +compiler?". A compiler is a program that takes as input text, representing a +program in one language and produces as output the same program, written in +another programming language. + +#### The Scala Compiler + +As an example, `dotc` takes text input, verifies that it is a valid Scala program +and then produces as output the same program, but written in Java bytecode, and optionally +in SJSIR when producing Scala.js output. + +### Contribute Internals-related Knowledge +If you know anything useful at all about Dotty, feel free to log this knowledge: + +- [📜Log the Knowledge](https://github.com/lampepfl/dotty-knowledge/issues/new) +- [🎓More about Logging the Knowledge](https://github.com/lampepfl/dotty-knowledge/blob/master/README.md) + +In short, no need to make it pretty, particularly human-readable or give it a particular structure. Just dump the knowledge you have and we'll take it from there. \ No newline at end of file diff --git a/docs/_docs/contributing/issues/areas.md b/docs/_docs/contributing/issues/areas.md new file mode 100644 index 000000000000..4f9adf79ba77 --- /dev/null +++ b/docs/_docs/contributing/issues/areas.md @@ -0,0 +1,70 @@ +--- +layout: doc-page +title: Common Issue Locations +--- + +Many issues are localised to small domains of the compiler and are self-contained, +here is a non-exhaustive list of such domains, and the files associated with them: + +### Pretty Printing of Types and Trees + +Objects in the compiler that inherit from [Showable] can be pretty printed. +The pretty-printing of objects is used in many places, from debug output, +to user-facing error messages and printing of trees after each phase. + +Look in [RefinedPrinter] (or its parent class [PlainPrinter]) for the implementation of pretty printing. + +### Content of Error Messages + +You can find the definitions of most error messages in [messages] (with IDs +defined in [ErrorMessageID]). If the message is not defined there, try the +`-Ydebug-error` compiler flag, which will print a stack trace leading to the +production of the error, and the contents of the message. + +### Compiler Generated Given Instances + +If the issue lies in given instances provided by the compiler, such as `scala.reflect.ClassTag`, +`scala.deriving.Mirror`, `scala.reflect.TypeTest`, `scala.CanEqual`, `scala.ValueOf`, +`scala.reflect.Manifest`, etc, look in [Synthesizer], which provides factories for +given instances. + +### Compiler Generated Methods + +Members can be generated for many classes, such as `equals` and `hashCode` +for case classes and value classes, and `ordinal` and `fromProduct` for Mirrors. +To change the implementation, see [SyntheticMembers]. + +### Code Completions +For suggestions to auto-complete method selections, see [Completion]. + +### Enum Desugaring +See [Desugar] and [DesugarEnums]. + +### Pattern Match Exhaustivity +See [Space]. + +### Metaprogramming + +#### Quotes Reflection +See the [quoted runtime package][quotes-impl]. + +#### Inline match +See [Inliner]. + +#### Compiletime Ops Types +See `tryCompiletimeConstantFold` in [Types]. + +[Showable]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/printing/Showable.scala +[PlainPrinter]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +[RefinedPrinter]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +[ErrorMessageID]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +[messages]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/reporting/messages.scala +[Synthesizer]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +[SyntheticMembers]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +[quotes-impl]: https://github.com/lampepfl/dotty/tree/master/compiler/src/scala/quoted/runtime/impl +[Inliner]: https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +[Types]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools/dotc/core/Types.scala +[Completion]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools/dotc/interactive/Completion.scala +[DesugarEnums]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala +[Desugar]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/ast/Desugar.scala +[Space]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala diff --git a/docs/_docs/contributing/issues/cause.md b/docs/_docs/contributing/issues/cause.md new file mode 100644 index 000000000000..5bb04e894f70 --- /dev/null +++ b/docs/_docs/contributing/issues/cause.md @@ -0,0 +1,115 @@ +--- +layout: doc-page +title: Finding the Cause of an Issue +--- + +In this section, you will be able to answer questions such as: +- where does an error happen in a codebase? +- when during compilation was a particular tree introduced? +- where is a particular object created? +- where is a particular value assigned to a variable? + +> You may be able to quickly find the source responsible for an issue by consulting [common issue locations](../issues/areas.md) + +## What phase generated a particular tree? + +As described in the [compiler lifecycle](../architecture/lifecycle.md#phases), each phase transforms the trees +and types that represent your code in a certain way. + +To print the code as it is transformed through the compiler, use the compiler flag `-Xprint:all`. +After each phase group is completed, you will see the resulting trees representing the code. + +> It is recommended to test `-Xprint:all` on a single, small file, otherwise a lot of unnecessary +> output will be generated. + +### Trace a Tree Creation Site + +When you see a problematic tree appear after a certain phase group, you know to isolate the rest of +your search to the code of that phase. For example if you found a problematic tree after phase +`posttyper`, the problem most likely appears in the code of [PostTyper]. We can trace the exact point +the tree was generated by looking for its unique ID, and then generating a stack trace at its creation: + +1. Run the compiler with `-Xprint:posttyper` and `-Yshow-tree-ids` flags. + This will only print the trees of the `posttyper` phase. This time you should see the tree + in question be printed alongside its ID. You'll see something like `println#223("Hello World"#37)`. +2. Copy the ID of the desired tree. +3. Run the compiler with `-Ydebug-tree-with-id ` flag. The compiler will print a stack trace + pointing to the creation site of the tree with the provided ID. + +### Enhanced Tree Printing + +As seen above `-Xprint:` can be enhanced with further configuration flags, found in +[ScalaSettings]. For example, you can additionally print the type of a tree with `-Xprint-types`. + +## Increasing Logging Output +Once you have identified the phase that generated a certain tree, you can then increase +logging in that phase, to try and detect erroneous states: + +- general logging within a phase can be enabled with the `-Ylog` compiler flag, such as + - `-Ylog:,,...` for individual phases + - `-Ylog:all` for all phases. +- Additionally, various parts of the compiler have specialised logging objects, defined in [Printers]. + Change any of the printers of interest from `noPrinter` to `default` and increase output specialised + to that domain. + +## Navigating to Where an Error is Generated + +The compiler issues user facing errors for code that is not valid, such as the type mismatch +of assigning an `Int` to a `Boolean` value. Sometimes these errors do not match what is expected, which could be a bug. + +To discover why such a *spurious* error is generated, you can trace the code that generated the error by +adding the `-Ydebug-error` compiler flag, e.g. `scala3/scalac -Ydebug-error Test.scala`. +This flag forces a stack trace to be printed each time an error happens, from the site where it occurred. + +Analysing the trace will give you a clue about the objects involved in producing the error. +For example, you can add some debug statements before the error is issued to discover +the state of the compiler. [See some useful ways to debug values.](./inspection.md) + +### Where was a particular object created? + +If you navigate to the site of the error, and discover a problematic object, you will want to know +why it exists in such a state, as it could be the cause of the error. You can discover the +creation site of that object to understand the logic that created it. + +You can do this by injecting a *tracer* into the class of an instance in question. +A tracer is the following variable: +```scala +val tracer = Thread.currentThread.getStackTrace.mkString("\n") +``` +When placed as a member definition at a class, it will contain a stack trace pointing at where exactly +its particular instance was created. + +Once you've injected a tracer into a class, you can `println` that tracer from the error site or +other site you've found the object in question. + +#### Procedure + +1. Determine the type of the object in question. You can use one of the following techniques to do so: + - Use an IDE to get the type of an expression, or save the expression to a `val` + and see its inferred type. + - Use `println` to print the object or use `getClass` on that object. +2. Locate the type definition for the type of that object. +3. Add a field `val tracer = Thread.currentThread.getStackTrace.mkString("\n")` to that type definition. +4. `println(x.tracer)` (where `x` is the name of the object in question) from the original site where you + encountered the object. This will give you the stack trace pointing to the place where the + constructor of that object was invoked. + +### Where was a particular value assigned to a variable? + +Say you have a certain [type](../architecture/types.md) assigned to a [Denotation] and you would like to know why it has that +specific type. The type of a denotation is defined by `var myInfo: Type`, and can be assigned multiple times. +In this case, knowing the creation site of that `Type`, as described above, is not useful; instead, you need to +know the *assignment* (not *creation*) site. + +This is done similarly to how you trace the creation site. Conceptually, you need to create a proxy for that variable that will log every write operation to it. Practically, if you are trying to trace the assignments to a variable `myInfo` of type `Type`, first, rename it to `myInfo_debug`. Then, insert the following at the same level as that variable: + +```scala +var tracer = "", +def myInfo: Type = myInfo_debug, +def myInfo_=(x: Type) = { tracer = Thread.currentThread.getStackTrace.mkString("\n"); myInfo_debug = x } +``` + +[Printers]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/Printers.scala +[Denotation]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Denotations.scala +[PostTyper]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +[ScalaSettings]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala diff --git a/docs/_docs/contributing/issues/checklist.md b/docs/_docs/contributing/issues/checklist.md new file mode 100644 index 000000000000..e2fcf32531de --- /dev/null +++ b/docs/_docs/contributing/issues/checklist.md @@ -0,0 +1,135 @@ +--- +layout: doc-page +title: Pull Request Checklist +--- + +Once you solved the issue you were working on, you'll likely want to see your +changes added to the [Scala 3 repo][lampepfl/dotty]. To do that, you need to +prepare a [pull request][pull-request] with your changes. Assuming that the team +is aware of what you've been working, here are some final steps that you'll want +to keep in mind as you create your PR. + +### 1. Sign the CLA + +Make sure you have signed the [Scala CLA][cla]. If you have any questions about +what this is and why it's required you can read further about it [here][cla]. + +### 2. Make sure your work is on its own branch + +When submitting your pull request it's always best to ensure the branch name is +unique to the changes you're working on. It's important not to submit your PR on +your `main` branch as this blocks maintainers from making any changes to your PR +if necessary. + +### 3: Add Tests + +Add at least one test that replicates the problem in the issue, and that shows it is now resolved. + +You may of course add variations of the test code to try and eliminate edge cases. +[Become familiar with testing in Scala 3](./testing.md). + +### 4: Add Documentation + +Please ensure that all code is documented to explain its use, even if only internal +changes are made. This refers to scaladocs and also any changes that might be +necessary in the reference docs. + +### 5: Double check everything + +Here are a couple tips to keep in mind. + +- [DRY (Don't Repeat Yourself)][dry] +- [Scouts Rule][scouts] +- When adding new code try use [optional braces]. If you're rewriting old code, + you should also use optional braces unless it introduces more code changes + that necessary. + +### 6: Commit Messages + +Here are some guidelines when writing commits for Dotty. + +1. If your work spans multiple local commits (for example; if you do safe point + commits while working in a feature branch or work in a branch for long time + doing merges/rebases etc.) then please do not commit it all but rewrite the + history by squashing the commits into one large commit which is accompanied + by a detailed commit message for (as discussed in the following sections). + For more info, see the article: [Git Workflow][git-workflow]. Additionally, + every commit should be able to be used in isolation—that is, each commit must + build and pass all tests. + +2. The first line should be a descriptive sentence about what the commit is + doing. It should be possible to fully understand what the commit does by just + reading this single line. It is **not ok** to only list the ticket number, + type "minor fix" or similar. If the commit has a corresponding ticket, + include a reference to the ticket number, prefixed with "Closes #", at the + beginning of the first line followed by the title of the ticket, assuming + that it aptly and concisely summarizes the commit in a single line. If the + commit is a small fix, then you are done. If not, go to 3. + +3. Following the single line description (ideally no more than 70 characters + long) should be a blank line followed by an enumerated list with the details + of the commit. + +4. Add keywords for your commit (depending on the degree of automation we reach, + the list may change over time): + * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone + is encouraged to give feedback, however. (Remember that @-mentions will + result in notifications also when pushing to a WIP branch, so please only + include this in your commit message when you're ready for your pull + request to be reviewed. Alternatively, you may request a review in the + pull request's description.) + * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the + ticket as fixed in the issue tracker (Assembla understands this). + * ``backport to _branch name_`` - if the fix needs to be cherry-picked to + another branch (like 2.9.x, 2.10.x, etc) + +Example: + +``` +fix: here is your pr title briefly mentioning the topic + +Here is the body of your pr with some more information + - Details 1 + - Details 2 + - Details 3 + +Closes #2 +``` + +### 7: Create your PR! + +When the feature or fix is completed you should open a [Pull +Request](https://help.github.com/articles/using-pull-requests) on GitHub. + +If you're not actually finished yet and are just looking for some initial input +on your approach, feel free to open a [Draft PR][draft]. This lets reviewers +know that you're not finished yet. It's also a good idea to put a [wip] in front +of your pr title to make this extra clear. + +Shortly after creating your pull request a maintainer should assign someone to +review it. If this doesn't happen after a few days, feel free to ping someone on +the [Scala Contributors Discor][discord] or tag someone on the PR. Depending on +the type of pull request there might be multiple people that take a look at your +changes. There might also be community input as we try to keep the review +process as open as possible. + +### 8: Addressing feedback + +More than likely you'll get feedback from the reviewers, so you'll want to make +sure to address everything. When in doubt, don't hesitate to ask for +clarification or more information. + +Once you finally see the "LGTM" (Looks Good To Me or Let's Get This Merged) +you're PR will be merged in! + +[pull-request]: https://docs.github.com/en?query=pull+requests +[lampepfl/dotty]: https://github.com/lampepfl/dotty +[cla]: http://typesafe.com/contribute/cla/scala +[issues]: https://github.com/lampepfl/dotty/issues +[full-list]: https://github.com/lampepfl/dotty/blob/master/CONTRIBUTING.md +[discord]: https://discord.gg/TSmY9zkHar +[dry]: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch30.html +[scouts]: https://www.oreilly.com/library/view/97-things-every/9780596809515/ch08.html +[optional-braces]: https://docs.scala-lang.org/scala3/reference/other-new-features/indentation.html +[draft]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#draft-pull-requests +[git-workflow]: http://sandofsky.com/blog/git-workflow.html diff --git a/docs/_docs/contributing/issues/debugging.md b/docs/_docs/contributing/issues/debugging.md new file mode 100644 index 000000000000..2d8a9e5941e4 --- /dev/null +++ b/docs/_docs/contributing/issues/debugging.md @@ -0,0 +1,189 @@ +--- +layout: doc-page +title: Debugging the Compiler +--- + +The debugger is a powerful tool to navigate the internals of the compiler and track bugs. + +You can start the Scala debugger in VSCode using [Metals](https://scalameta.org/metals/). +In this page you will learn how to configure it, and how to use it. + +## Importing the project in VSCode using Metals + +The first step is to import the build in Metals, if it has not yet been imported. + +To do so you can open the [lampefl/dotty][lampepfl/dotty] repository in VSCode and click `Import build` in Metals view. +It may take a few minutes to import, compile and index the full project. + +![Import build](/images/contribution/import-build.jpg) + +If you have any trouble with importing, you can try to switch the build server from Bloop to sbt, +by running the `Metals: Switch build server` command from VSCode command palette. + +## Configuring the debugger + +To configure the debugger in VSCode, you can go to the `Run and Debug` view and click `create a launch.json file`. +It creates the `launch.json` file in the `.vscode` folder, in which we will define the debug configurations. + +![Create launch.json file](/images/contribution/launch-config-file.jpg) + +To create a debug configuration: +- Open the `.vscode/launch.json` file +- Click the `Add Configuration` button +- Go down the list of templates and select `Scala: Run main class` + +![Create configuration](/images/contribution/create-config.jpg) + +The added configuration should look like this: +```json +{ + "type": "scala", + "request": "launch", + "name": "Untitled", + "mainClass": "???", + "args": [], + "jvmOptions": [], + "env": {} +} +``` + +This is a template that you need to fill out. +First You can give a `name` to your configuration, for instance `Debug Scala 3 Compiler`. + +The two most important parameters, to debug the compiler, are `mainClass` and `args`. +The `mainClass` of the compiler is `dotty.tools.dotc.Main`. +In the `args` you need to specify the compiler arguments, which must contain at least a Scala file to compile and a `-classpath` option. + +To start with, we can compile the `../tests/pos/HelloWorld.scala` file. +In the classpath, we always need at least the `scala-library_2.13` and the bootstrapped `scala3-library_3`. +To locate them on your filesystem you can run the `export scala3-library-bootstrapped/fullClasspath` command in sbt. + +``` +$ sbt +> export scala3-library-bootstrapped/fullClasspath +/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar +[success] Total time: 1 s, completed Mar 10, 2023, 4:37:43 PM +``` + +Note that it is important to use the bootstrapped version of the `scala3-library` to get the correct TASTy version. + +Additionally you can add the `-color` and `never` arguments to prevent the compiler from printing ANSI codes as strings in the debug console. + +Here is the final configuration: +```json +{ + "type": "scala", + "request": "launch", + "name": "Debug Scala 3 Compiler", + "mainClass": "dotty.tools.dotc.Main", + "args": [ + "../tests/pos/HelloWorld.scala", + "-classpath", + // To replace with your own paths + "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar", + "-color", + "never" + ], + "jvmOptions": [], + "env": {} +} +``` + +## Customizing the debug configurations + +### Compiling several files at once + +You can compile more than one Scala file, by adding them in the `args`: +```json +"args": [ + "file1.scala", + "file2.scala", + "-classpath", + "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar" +] +``` + +### Depending on a library + +To add a dependency to an external library you need to download it and all its transitive dependencies, and to add them in the classpath. +The Coursier CLI can help you to do that. +For instance to add a dependency to cats you can run: +``` +$ cs fetch org.typelevel::cats-core:2.+ --classpath --scala-version 3 --exclude org.scala-lang:scala-library --exclude org.scala-lang:scala3-library +/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-core_3/2.9.0/cats-core_3-2.9.0.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_3/2.9.0/cats-kernel_3-2.9.0.jar +``` + +And concatenate the output into the classpath argument, which should already contain the scala-library_2.13 and the bootstrapped scala3-library: + +```json +"args": [ + "using-cats.scala", + "-classpath", + "/home/user/lampepfl/dotty/out/bootstrap/scala3-library-bootstrapped/scala-3.3.1-RC1-bin-SNAPSHOT-nonbootstrapped/classes:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/scala-lang/scala-library/2.13.10/scala-library-2.13.10.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-core_3/2.9.0/cats-core_3-2.9.0.jar:/home/user/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_3/2.9.0/cats-kernel_3-2.9.0.jar" +] +``` + +### Add more compiler options + +In the `args` you can add any additional compiler option you want. + +For instance you can add `-Xprint:all` to print all the generated trees after each mega phase. + +Run `scalac -help` to get an overview of the available compiler options. + +### Defining more than one launch configuration + +You can create as many debug configurations as you need: to compile different files, with different compiler options or different classpaths. + +## Starting the debugger + +Before starting the debugger you need to put a breakpoint in the part of the code that you want to debug. +If you don't know where to start, you can put a breakpoint in the `main` method of the `dotty.tools.dotc.Driver` trait. + +![First breakpoint](/images/contribution/breakpoint.jpg) + +Now to start the debugger, open the debug view, find the drop-down list of all the debug configurations and click on yours. +The debugger should start and pause on your breakpoint. + +![Start debugger](/images/contribution/start-debugger.jpg) + +## Using the debugger + +### Navigating the call stack + +When the debugger has paused, you can see the current call stack in the `Debug and Run` view. +Each frame of the call stack contains different variables, whose values you can see in the `Variables` section of the `Debug and Run` view. + +![Call stack](/images/contribution/call-stack.jpg) + +Analysing the call stack and the variables can help you understand the path taken by the compiler to reach that state. + +### The debugging steps + +The debug toolbar contains the `Continue / Pause`, `Step Over`, `Step Into`, `Step Out`, `Restart` and `Stop` buttons. + +![Debugging steps](/images/contribution/toolbar.jpg) + +You can use the step buttons to execute the code step by step and get a precise understanding of the program. + +### The debug console + +When the debugger has paused, you can evaluate any Scala 3 expression in the debug console. +This is useful to inspect some values or to execute some parts of the code. + +For instance, you can evaluate `tree.show` to pretty-print a tree. + +![Import build](/images/contribution/debug-console.jpg) + +### Conditional breakpoints + +In a breakpoint you can define a condition, in the form of a Boolean expression written in Scala. +The program will stop on the breakpoint as soon as the condition is met. + +To add a condition, right-click on a breakpoint and pick `Edit breakpoint...`. + +For instance, if you know that a bug happens on typing a method `foo`, you can use the condition `tree.symbol.name.show == "foo"` in a breakpoint in the `Typer`. + +![Import build](/images/contribution/conditional-breakpoint.jpg) + +[lampepfl/dotty]: https://github.com/lampepfl/dotty diff --git a/docs/_docs/contributing/issues/efficiency.md b/docs/_docs/contributing/issues/efficiency.md new file mode 100644 index 000000000000..07307646a4bb --- /dev/null +++ b/docs/_docs/contributing/issues/efficiency.md @@ -0,0 +1,24 @@ +--- +layout: doc-page +title: Improving Your Workflow +--- + +In the previous sections of this chapter, you saw some techniques for +working with the compiler. Some of these techniques can be used +repetitively, e.g.: + +- Navigating stack frames +- Printing variables in certain ways +- Instrumenting variable definitions with tracers + +The above procedures often take a lot of time when done manually, reducing productivity: +as the cost (in terms of time and effort) is high, you may avoid attempting to do so, +and possibly miss valuable information. + +If you're doing those things really frequently, it is recommended to script your editor +to reduce the number of steps. E.g. navigating to the definition of a stack frame +part when you click it, or instrumenting variables for printing. + +An example of how it is done for Sublime Text 3 is [here](https://github.com/anatoliykmetyuk/scala-debug-sublime). + +True, it takes some time to script your editor, but if you spend a lot of time with issues, it pays off. diff --git a/docs/_docs/contributing/issues/index.md b/docs/_docs/contributing/issues/index.md new file mode 100644 index 000000000000..db348d7edd9d --- /dev/null +++ b/docs/_docs/contributing/issues/index.md @@ -0,0 +1,17 @@ +--- +layout: index +title: Finding the Cause of an Issue +--- + +An issue found in the [GitHub repo][lampepfl/dotty] usually describes some code that +manifests undesired behaviour. + +This chapter of the guide describes the different steps to contribute to Dotty: +- [Reproducing an Issue](./reproduce.md) +- [Finding the Cause of an Issue](./cause.md) +- [Debugging the Compiler](./debugging.md) +- [Other debugging techniques](./other-debugging.md) +- [Inspect the values](./inspection.md) +- [Improving your workflow](./efficiency.md) +- [Testing a Fix](./testing.md) +- [Checklist](./checklist.md) diff --git a/docs/_docs/contributing/issues/inspection.md b/docs/_docs/contributing/issues/inspection.md new file mode 100644 index 000000000000..abedc09ecd3b --- /dev/null +++ b/docs/_docs/contributing/issues/inspection.md @@ -0,0 +1,181 @@ +--- +layout: doc-page +title: How to Inspect Values +--- + +In this section, you will find out how to debug the contents of certain objects +while the compiler is running, and inspect produced artifacts of the compiler. + +## Inspecting variables in-place + +Frequently you will need to inspect the content of a particular variable. +You can either use `println`s or the debugger, more info on how to setup the latter. + +In the remeainder of this article we'll use `println()` inserted in the code, but the same effect can be accomplished by stopping at a breakpoint, and typing `` in the [debug console](./debugging.md#the-debug-console) of the debugger. + +When printing a variable, it's always a good idea to call `show` on that variable: `println(x.show)`. +Many objects of the compiler define `show`, returning a human-readable string. +e.g. if called on a tree, the output will be the tree's representation as source code, rather than +the underlying raw data. + +Sometimes you need to print flags. Flags are metadata attached to [symbols] containing information such as whether a +class is abstract, comes from Java, what modifiers a variable has (private, protected etc) and so on. +Flags are stored in a single `Long` value, each bit of which represents whether a particular flag is set. + +To print flags, you can use the `flagsString` method, e.g. `println(x.flagsString)`. + +## Pretty Printing with a String Interpolator + +You can also pretty print objects with string interpolators, +these default to call `.show` when possible, avoiding boilerplate +and also helping format error messages. + +Import them with the following: + +```scala +import dotty.tools.dotc.core.Decorators.* +``` + +Here is a table of explanations for their use: + +| Usage | Description | +|--------|-----------------------------------| +|`i""` | General purpose string formatting. It calls `.show` on objects
mixing in Showable, `String.valueOf` otherwise | +|`em""` | Formatting for error messages: Like `i` but suppress
follow-on, error messages after the first one if some
of their arguments are "non-sensical". | +|`ex""` | Formatting with added explanations: Like `em`, but add
explanations to give more info about type variables
and to disambiguate where needed. | + + +## Obtaining debug output from the compiler + +As explained in [navigation](../issues/cause.md), we can debug the code being generated as it is transformed +through the compiler. As well as plain tree output, there are many compiler options that +add extra debug information to trees when compiling a file; you can find the full list +in [ScalaSettings]. + +## Stopping the compiler early +Sometimes you may want to stop the compiler after a certain phase, for example to prevent +knock-on errors from occurring from a bug in an earlier phase. Use the flag +`-Ystop-after:` to prevent any phases executing afterwards. + +> e.g. `-Xprint:` where `phase` is a miniphase, will print after +> the whole phase group is complete, which may be several miniphases after `phase`. +> Instead you can use `-Ystop-after: -Xprint:` to stop +> immediately after the miniphase and see the trees that you intended. + +## Printing TASTy of a Class + +If you are working on an issue related to TASTy, it is good to know how to inspect +the contents of a TASTy file, produced from compilation of Scala files. + +The next example uses an [issue directory](../issues/reproduce.md#dotty-issue-workspace) to compile a class and print its TASTy. +In the directory, you should create a file `tasty/Foo.scala` (with contents of `class Foo`), +and create a file `tasty/launch.iss` with the following contents: + +``` +$ (rm -rv out || true) && mkdir out # clean up compiler output, create `out` dir. + +scala3/scalac -d $here/out $here/Foo.scala + +scala3/scalac -print-tasty $here/out/Foo.tasty +``` + +With sbt command `issue tasty` you will see output such as the following: + +``` +-------------------------------------------------------------------------------- +local/foo/out/Foo.tasty +-------------------------------------------------------------------------------- +Names: + 0: ASTs + 1: + 2: Foo + 3: +... +``` +and so on. + +## Inspecting The Representation of Types + +> [learn more about types](../architecture/types.md) in `dotc`. + +If you are curious about the representation of a type, say `[T] =>> List[T]`, +you can use a helper program [dotty.tools.printTypes][DottyTypeStealer], +it prints the internal representation of types, along with their class. It can be +invoked from the sbt shell with three arguments as follows: +```bash +sbt:scala3> scala3-compiler/Test/runMain + dotty.tools.printTypes + + + +``` + +- The first argument, `source`, is an arbitrary string that introduces some Scala definitions. +It may be the empty string `""`. +- The second argument, `kind`, determines the format of the following arguments, +accepting one of the following options: + - `rhs` - accept return types of definitions + - `class` - accept signatures for classes + - `method` - accept signatures for methods + - `type` - accept signatures for type definitions + - The empty string `""`, in which case `rhs` will be assumed. +- The remaining arguments are type signature strings, accepted in the format determined by +`kind`, and collected into a sequence `typeStrings`. Signatures are the part of a definition +that comes after its name, (or a simple type in the case of `rhs`) and may reference +definitions introduced by the `source` argument. + +Each one of `typeStrings` is then printed, displaying their internal structure, alongside their class. + +### Examples + +Here, given a previously defined `class Box { type X }`, you can inspect the return type `Box#X`: +```bash +sbt:scala3> scala3-compiler/Test/runMain +> dotty.tools.printTypes +> "class Box { type X }" +> "rhs" +> "Box#X" +[info] running (fork) dotty.tools.printTypes "class Box { type X }" rhs Box#X +TypeRef(TypeRef(ThisType(TypeRef(NoPrefix,module class )),class Box),type X) [class dotty.tools.dotc.core.Types$CachedTypeRef] +``` + +Here are some other examples you can try: +- `...printTypes "" "class" "[T] extends Seq[T] {}"` +- `...printTypes "" "method" "(x: Int): x.type"` +- `...printTypes "" "type" "<: Int" "= [T] =>> List[T]"` + +### Don't just print: extracting further information + +`dotty.tools.printTypes` is useful to to see the representation +of a type at a glance, but sometimes you want to extract more. Instead, you can use the +method `dotty.tools.DottyTypeStealer.stealType`. With the same inputs as `printTypes`, +it returns both a `Context` containing the definitions passed, along with the list of types. + +As a worked example let's create a test case to verify the structure of `Box#X` that you saw earlier: +```scala +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Types.* + +import org.junit.Test + +import dotty.tools.DottyTypeStealer, DottyTypeStealer.Kind + +class StealBox: + + @Test + def stealBox: Unit = + val (ictx, List(rhs)) = + DottyTypeStealer.stealType("class Box { type X }", Kind.rhs, "Box#X") + + given Context = ictx + + rhs match + case X @ TypeRef(Box @ TypeRef(ThisType(empty), _), _) => + assert(Box.name.toString == "Box") + assert(X.name.toString == "X") + assert(empty.name.toString == "") +``` + +[DottyTypeStealer]: https://github.com/lampepfl/dotty/blob/master/compiler/test/dotty/tools/DottyTypeStealer.scala +[ScalaSettings]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +[symbols]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/SymDenotations.scala diff --git a/docs/_docs/contributing/debugging.md b/docs/_docs/contributing/issues/other-debugging.md similarity index 94% rename from docs/_docs/contributing/debugging.md rename to docs/_docs/contributing/issues/other-debugging.md index 959ad6706290..1aa0fb85e5f8 100644 --- a/docs/_docs/contributing/debugging.md +++ b/docs/_docs/contributing/issues/other-debugging.md @@ -1,26 +1,8 @@ --- layout: doc-page -title: Debugging Techniques +title: Other Debugging Techniques --- -# Debugging Techniques -- [Setting up the playground](#setting-up-the-playground) -- [Show for human readable output](#show-for-human-readable-output) -- [How to disable color](#how-to-disable-color) -- [Reporting as a non-intrusive println](#reporting-as-a-non-intrusive-println) -- [Printing out trees after phases](#printing-out-trees-after-phases) -- [Printing out stack traces of compile time errors](#printing-out-stack-traces-of-compile-time-errors) -- [Configuring the printer output](#configuring-the-printer-output) -- [Figuring out an object creation site](#figuring-out-an-object-creation-site) - * [Via ID](#via-id) - * [Via tracer](#via-tracer) -- [Built-in Logging Architecture](#built-in-logging-architecture) - * [Printers](#printers) - * [Tracing](#tracing) - * [Reporter](#reporter) - -Table of contents generated with markdown-toc - ## Setting up the playground Consider the `../issues/Playground.scala` (relative to the Dotty directory) file is: diff --git a/docs/_docs/contributing/issues/reproduce.md b/docs/_docs/contributing/issues/reproduce.md new file mode 100644 index 000000000000..41d96327ef24 --- /dev/null +++ b/docs/_docs/contributing/issues/reproduce.md @@ -0,0 +1,127 @@ +--- +layout: doc-page +title: Reproducing an Issue +--- + +To try fixing it, you will first need to reproduce the issue, so that +- you can understand its cause +- you can verify that any changes made to the codebase have a positive impact on the issue. + +Say you want to reproduce locally issue [#7710], you would first copy the code from the *"Minimised Code"* +section of the issue to a file named e.g. `local/i7710.scala`, +and then try to compile it from the sbt console opened in the dotty root directory: +```bash +$ sbt +sbt:scala3> scala3/scalac -d local/out local/i7710.scala +``` +> Here, the `-d` flag specifies a directory `local/out` where generated code will be output. + +You can then verify that the local reproduction has the same behaviour as originally reported in the issue. +If so, then you can start to try and fix it. Otherwise, perhaps the issue is out of date, or +is missing information about how to accurately reproduce the issue. + +## Dotty Issue Workspace + +Sometimes you will need more complex commands to reproduce an issue, and it is useful to script these, which +can be done with [dotty-issue-workspace]. It allows to bundle sbt commands for issue reproduction in one +file and then run them from the Dotty project's sbt console. + +### Try an Example Issue + +Let's use [dotty-issue-workspace] to reproduce issue [#7710]: +1. Follow [the steps in the README][workspace-readme] to install the plugin. +2. In your Issue Workspace directory (as defined in the plugin's README file, + "Getting Started" section, step 2), create a subdirectory for the + issue: `mkdir i7710`. +3. Create a file for the reproduction: `cd i7710; touch Test.scala`. In that file, + insert the code from the issue. +4. In the same directory, create a file `launch.iss` with the following content: + ```bash + $ (rm -rv out || true) && mkdir out # clean up compiler output, create `out` dir. + + scala3/scalac -d $here/out $here/Test.scala + ``` + + - The first line, `$ (rm -rv out || true) && mkdir out` specifies a shell command + (it starts with `$`), in this case to ensure that there is a fresh `out` + directory to hold compiler output. + - The next line, `scala3/scalac -d $here/out $here/Test.scala` specifies an sbt + command, which will compile `Test.scala` and place any output into `out`. + `$here` is a special variable that will be replaced by the path of the parent + directory of `launch.iss` when executing the commands. +5. Now, from a terminal you can run the issue from sbt in the dotty directory + ([See here](../getting-started.md#compiling-and-running) for a reminder if you have not cloned the repo.): + ```bash + $ sbt + sbt:scala3> issue i7710 + ``` + This will execute all the commands in the `i7710/launch.iss` file one by one. + If you've set up `dotty-issue-workspace` as described in its README, + the `issue` task will know where to find the folder by its name. + +### Using Script Arguments + +You can use script arguments inside `launch.iss` to reduce the number of steps when +working with issues. + +Say you have an issue `foo`, with two alternative files that are very similar: +`original.scala`, which reproduces the issue, and `alt.scala`, which does not, +and you want to compile them selectively? + +You can achieve this via the following `launch.iss`: + +```bash +$ (rm -rv out || true) && mkdir out # clean up compiler output, create `out` dir. + +scala3/scalac -d $here/out $here/$1.scala # compile the first argument following `issue foo ` +``` + +It is similar to the previous example, except now you will compile a file `$1.scala`, referring +to the first argument passed after the issue name. The command invoked would look like +`issue foo original` to compile `original.scala`, and `issue foo alt` for `alt.scala`. + +In general, you can refer to arguments passed to the `issue ` command using +the dollar notation: `$1` for the first argument, `$2` for the second and so on. + +### Multiline Commands + +Inside a `launch.iss` file, one command can be spread accross multiple lines. For example, +if your command has multiple arguments, you can put each argument on a new line. + +Multiline commands can even have comments inbetween lines. This is useful +if you want to try variants of a command with optional arguments (such as configuration). +You can put the optional arguments on separate lines, and then decide when they are passed to +the command by placing `#` in front to convert it to a comment (i.e. the argument will +not be passed). This saves typing the same arguments each time you want to use them. + +The following `launch.iss` file is an example of how you can use multiline commands as a +template for solving issues that [run compiled code](../issues/testing.md#checking-program-output). It demonstrates configuring the +`scala3/scalac` command using compiler flags, which are commented out. +Put your favourite flags there for quick usage. + +```bash +$ (rm -rv out || true) && mkdir out # clean up compiler output, create `out` dir. + +scala3/scalac # Invoke the compiler task defined by the Dotty sbt project + -d $here/out # All the artefacts go to the `out` folder created earlier + # -Xprint:typer # Useful debug flags, commented out and ready for quick usage. Should you need one, you can quickly access it by uncommenting it. + # -Ydebug-error + # -Yprint-debug + # -Yprint-debug-owners + # -Yshow-tree-ids + # -Ydebug-tree-with-id 340 + # -Ycheck:all + $here/$1.scala # Invoke the compiler on the file passed as the second argument to the `issue` command. E.g. `issue foo Hello` will compile `Hello.scala` assuming the issue folder name is `foo`. + +scala3/scala -classpath $here/out Test # Run main method of `Test` generated by the compiler run. +``` + +## Conclusion + +In this section, you have seen how to reproduce an issue locally, and next you will see +how to try and detect its root cause. + +[lampepfl/dotty]: https://github.com/lampepfl/dotty/issues +[#7710]: https://github.com/lampepfl/dotty/issues/7710 +[dotty-issue-workspace]: https://github.com/anatoliykmetyuk/dotty-issue-workspace +[workspace-readme]: https://github.com/anatoliykmetyuk/dotty-issue-workspace#getting-started \ No newline at end of file diff --git a/docs/_docs/contributing/issues/testing.md b/docs/_docs/contributing/issues/testing.md new file mode 100644 index 000000000000..1f7c35c6d58a --- /dev/null +++ b/docs/_docs/contributing/issues/testing.md @@ -0,0 +1,212 @@ +--- +layout: doc-page +title: Testing Your Changes +--- + +It is important to add tests before a pull request, to verify that everything is working as expected, +and act as proof of what is valid/invalid Scala code (in case it is broken in the future). +In this section you will see the testing procedures in Scala 3. + +## Running all Tests + +Running all tests in Dotty is as simple as: + +```bash +$ sbt test +``` +Specifically, `sbt test` runs all tests that do _not_ require a bootstrapped +compiler. In practice, this means that it runs all compilation tests meeting +this criterion, as well as all non-compiler tests. + +To run all tests of Scala 3, including for compiler, REPL, libraries and more, run the following in sbt: + +```bash +$ sbt +sbt:scala3> scala3-bootstrapped/test +``` + +Often however it is not necessary to test everything if your changes are localised to one area, +you will see in the following sections the different kinds of tests, and how +to run individual tests. + +## Compilation Tests + +Compilation tests run the compiler over input files, using various settings. Input files +are found within the `tests/` directory at the root of the compiler repo. + +Test input files are categorised further by placing them in the subdirectories +of the `tests/` directory. A small selection of test categories include: + +- `tests/pos` – tests that should compile: pass if compiles successfully. +- `tests/neg` – should not compile: pass if fails compilation. Useful, e.g., to test an expected compiler error. +- `tests/run` – these tests not only compile but are also run. + +### Naming and Running a Test Case + +Tests are, by convention, named after the number of the issue they are fixing. +e.g. if you are fixing issue 101, then the test should be named `i101.scala`, for a single-file test, +or be within a directory called `i101/` for a multi-file test. + +To run the test, invoke the sbt command `testCompilation i101` (this will match all tests with `"i101"` in +the name, so it is useful to use a unique name) + +The test groups – `pos`, `neg`, etc. – are defined in [CompilationTests]. If you want to run a group +of tests, e.g. `pos`, you can do so via `testOnly *CompilationTests -- *pos` command. + +### Testing a Single Input File + +If your issue is reproducible by only one file, put that file under an appropriate category. +For example, if your issue is about getting rid of a spurious compiler error (that is a code that doesn't compile should, in fact, compile), you can create a file `tests/pos/i101.scala`. + +### Testing Multiple Input Files + +If you need more than one file to reproduce an issue, create a directory instead of a file +e.g. `tests/pos/i101/`, and put all the Scala files that are needed to reproduce the issue there. +There are two ways to organise the input files within: + +**1: Requiring classpath dependency:** Sometimes issues require one file to be compiled after the other, +(e.g. if the issue only happens with a library dependency, like with Java interop). In this case, +the outputs of the first file compiled will be available to the next file compiled, available via the classpath. +This is called *separate compilation*. + +To achieve this, within `tests/pos/i101/`, add a suffix `_n` to each file name, where `n` is an integer defining the +order in which the file will compile. E.g. if you have two files, `Lib.scala` and `Main.scala`, and you need them +compiled separately – Lib first, Main second, then name them `Lib_1.scala` and `Main_2.scala`. + +**2: Without classpath dependency:** If your issue does not require a classpath dependency, your files can be compiled +in a single run, this is called *joint compilation*. In this case use file names without the `_n` suffix. + +### Checking Program Output + +`tests/run` tests verify the run-time behaviour of a test case. The output is checked by invoking a main method +on a class `Test`, this can be done with either +```scala +@main def Test: Unit = assert(1 > 0) +``` +or +```scala +object Test extends scala.App: + assert(1 > 0) +``` + +If your program also prints output, this can be compared against `*.check` files. +These contain the expected output of a program. Checkfiles are named after the issue they are checking, +e.g. `tests/run/i101.check` will check either `tests/run/i101.scala` or `tests/run/i101/`. + +### Checking Compilation Errors + +`tests/neg` tests verify that a file does not compile, and user-facing errors are produced. There are other neg +categories such as `neg-custom-args`, i.e. with `neg` prefixing the directory name. Test files in the `neg*` +categories require annotations for the lines where errors are expected. To do this add one `// error` token to the +end of a line for each expected error. For example, if there are three expected errors, the end of the line should contain +`// error // error // error`. + +You can verify the content of the error messages with a `*.check` file. These contain the expected output of the +compiler. Checkfiles are named after the issue they are checking, +e.g. `i101.check` will check either `tests/neg/i101.scala` or `tests/neg/i101/`. +*Note:* checkfiles are not required for the test to pass, however they do add stronger constraints that the errors +are as expected. + +### If Checkfiles do not Match Output + +If the actual output mismatches the expected output, the test framework will dump the actual output in the file +`*.check.out` and fail the test suite. It will also output the instructions to quickly replace the expected output +with the actual output, in the following format: + +``` +Test output dumped in: tests/neg/Sample.check.out + See diff of the checkfile + > diff tests/neg/Sample.check tests/neg/Sample.check.out + Replace checkfile with current output + > mv tests/neg/Sample.check.out tests/neg/Sample.check +``` + +### Tips for creating Checkfiles + +To create a checkfile for a test, you can do one of the following: + +1. Create an empty checkfile + - then add arbitrary content + - run the test + - when it fails, use the `mv` command reported by the test to replace the initial checkfile with the actual output. +2. Manually compile the file you are testing with `scala3/scalac` + - copy-paste whatever console output the compiler produces to the checkfile. + +### Automatically Updating Checkfiles + +When complex or many checkfiles must be updated, `testCompilation` can run in a mode where it overrides the +checkfiles with the test outputs. +```bash +$ sbt +> testCompilation --update-checkfiles +``` + +Use `--help` to see all the options +```bash +$ sbt +> testCompilation --help +``` + +### Bootstrapped-only tests + +To run `testCompilation` on a bootstrapped Dotty compiler, use +`scala3-compiler-bootstrapped/testCompilation` (with the same syntax as above). +Some tests can only be run in bootstrapped compilers; that includes all tests +with `with-compiler` in their name. + +### From TASTy tests + +`testCompilation` has an additional mode to run tests that compile code from a `.tasty` file. +Modify the lists in [compiler/test/dotc] to enable or disable tests from `.tasty` files. + +```bash +$ sbt +> testCompilation --from-tasty +``` + +## Unit Tests + +Unit tests cover the other areas of the compiler, such as interactions with the REPL, scripting tools and more. +They are defined in [compiler/test], so if your use case isn't covered by this guide, +you may need to consult the codebase. Some common areas are highlighted below: + +### SemanticDB tests + +To test the SemanticDB output from the `extractSemanticDB` phase (enabled with the `-Xsemanticdb` flag), run the following sbt command: +```bash +$ sbt +sbt:scala3> scala3-compiler-bootstrapped/testOnly + dotty.tools.dotc.semanticdb.SemanticdbTests +``` + +[SemanticdbTests] uses source files in `tests/semanticdb/expect` to generate "expect files": +these verify both +- SemanticDB symbol occurrences inline in sourcecode (`*.expect.scala`) +- complete output of all SemanticDB information (`metac.expect`). + +Expect files are used as regression tests to detect changes in the compiler. +Their correctness is determined by human inspection. + +If expect files change then [SemanticdbTests] will fail, and generate new expect files, providing instructions for +comparing the differences and replacing the outdated expect files. + +If you are planning to update the SemanticDB output, you can do it in bulk by running the command +```bash +$ sbt +sbt:scala3> scala3-compiler/Test/runMain + dotty.tools.dotc.semanticdb.updateExpect +``` + +then compare the changes via version control. + +## Troubleshooting + +Some of the tests depend on temporary state stored in the `out` directory. In rare cases, that directory +can enter an inconsistent state and cause spurious test failures. If you suspect a spurious test failure, +you can run `rm -rf out/*` from the root of the repository and run your tests again. If that fails, you +can try `git clean -xfd`. + +[CompilationTests]: https://github.com/lampepfl/dotty/blob/master/compiler/test/dotty/tools/dotc/CompilationTests.scala +[compiler/test]: https://github.com/lampepfl/dotty/blob/master/compiler/test/ +[compiler/test/dotc]: https://github.com/lampepfl/dotty/tree/master/compiler/test/dotc +[SemanticdbTests]: https://github.com/lampepfl/dotty/blob/master/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala diff --git a/docs/_docs/contributing/procedures/index.md b/docs/_docs/contributing/procedures/index.md index 01c76f72c00c..db2b09dbe80f 100644 --- a/docs/_docs/contributing/procedures/index.md +++ b/docs/_docs/contributing/procedures/index.md @@ -2,3 +2,7 @@ layout: index title: Procedures --- + +This chapter of the guide describes: +- [How to release a procedure](./release.md) +- [How to test the vulpix framework](./vulpix.md) \ No newline at end of file diff --git a/docs/_docs/contributing/procedures/vulpix.md b/docs/_docs/contributing/procedures/vulpix.md index 5e8a2eab425b..1eea2fa24778 100644 --- a/docs/_docs/contributing/procedures/vulpix.md +++ b/docs/_docs/contributing/procedures/vulpix.md @@ -3,7 +3,6 @@ layout: doc-page title: Test Vulpix Framework --- -# Test Vulpix Framework If you are modifying the Vulpix framework and need a playground with dummy tests to try out your modifications, do the following. Create the directory structure for the playground: diff --git a/docs/_docs/contributing/testing.md b/docs/_docs/contributing/testing.md deleted file mode 100644 index a01cdb08f8ab..000000000000 --- a/docs/_docs/contributing/testing.md +++ /dev/null @@ -1,207 +0,0 @@ ---- -layout: doc-page -title: Testing in Dotty ---- - -Running all tests in Dotty is as simple as: - -```bash -$ sbt test -``` - -Specifically, `sbt test` runs all tests that do _not_ require a bootstrapped -compiler. In practice, this means that it runs all compilation tests meeting -this criterion, as well as all non-compiler tests. - -The entire suite of tests can be run using the bootstrapped compiler as follows: - -```bash -$ sbt -> scala3-bootstrapped/test -``` - -There are currently several forms of tests in Dotty. These can be split into -two categories: - -## Unit tests -These tests can be found in `/test` and are used to check -functionality of specific parts of the codebase in isolation e.g: parsing, -scanning and message errors. - -To run all tests in e.g., for the compiler test-suite you can write: - -```bash -$ sbt -> scala3-compiler/test -``` - -To run a single test class you use `testOnly` and the fully qualified class name. -For example: - -```bash -> testOnly dotty.tools.dotc.transform.TreeTransformerTest -``` - -The test command follows a regular expression-based syntax `testOnly * -- *`. -The right-hand side picks a range of names for methods and the left-hand side picks a range of class names and their -fully-qualified paths. - -Consequently, you can restrict the aforementioned executed test to a subset of methods by appending ``-- *method_name``. -The example below picks up all methods with the name `canOverwrite`: - -```bash -> testOnly dotty.tools.dotc.transform.TreeTransformerTest -- *canOverwrite -``` - -Additionally, you can run all tests named `method_name`, in any class, without providing a class name: - -```bash -> testOnly -- *canOverwrite -``` - -You can also run all paths of classes of a certain name: - -```bash -> testOnly *.TreeTransformerTest -``` - -### Testing with checkfiles -Some tests support checking the output of the run or the compilation against a checkfile. A checkfile is a file in which the expected output of the compilation or run is defined. A test against a checkfile fails if the actual output mismatches the expected output. - -Currently, the `run` and `neg` (compilation must fail for the test to succeed) tests support the checkfiles. `run`'s checkfiles contain an expected run output of the successfully compiled program. `neg`'s checkfiles contain an expected error output during compilation. - -Absence of a checkfile is **not** a condition for the test failure. E.g. if a `neg` test fails with the expected number of errors and there is no checkfile for it, the test still passes. - -Checkfiles are located in the same directories as the tests they check, have the same name as these tests with the extension `*.check`. E.g. if you have a test named `tests/neg/foo.scala`, you can create a checkfile for it named `tests/neg/foo.check`. And if you have a test composed of several files in a single directory, e.g. `tests/neg/manyScalaFiles`, the checkfile will be `tests/neg/manyScalaFiles.check`. - -If the actual output mismatches the expected output, the test framework will dump the actual output in the file `*.check.out` and fail the test suite. It will also output the instructions to quickly replace the expected output with the actual output, in the following format: - -``` -Test output dumped in: tests/playground/neg/Sample.check.out - See diff of the checkfile - > diff tests/playground/neg/Sample.check tests/playground/neg/Sample.check.out - Replace checkfile with current output - > mv tests/playground/neg/Sample.check.out tests/playground/neg/Sample.check -``` - -To create a checkfile for a test, you can do one of the following: - -- Create a dummy checkfile with a random content, run the test, and, when it fails, use the `mv` command reported by the test to replace the dummy checkfile with the actual output. -- Manually compile the file you are testing with `scalac` and copy-paste whatever console output the compiler produces to the checkfile. - -## Integration tests -These tests are Scala source files expected to compile with Dotty (pos tests), -along with their expected output (run tests) or errors (neg tests). - -All of these tests are contained in the `./tests/*` directories and can be run with the `testCompilation` command. Tests in folders named `with-compiler` are an exception, see next section. - -Currently to run these tests you need to invoke from sbt: - -```bash -$ sbt -> testCompilation -``` - -(which is effectively the same with `testOnly dotty.tools.dotc.CompilationTests`) - -It is also possible to run tests filtered, again from sbt: - -```bash -$ sbt -> testCompilation companions -``` - -This will run both the test `./tests/pos/companions.scala` and -`./tests/neg/companions.scala` since both of these match the given string. -This also means that you could run `testCompilation` with no arguments to run all integration tests. - -When complex checkfiles must be updated, `testCompilation` can run in a mode where it overrides the checkfiles with the test outputs. -```bash -$ sbt -> testCompilation --update-checkfiles -``` - -Use `--help` to see all the options -```bash -$ sbt -> testCompilation --help -``` - -### Joint and separate sources compilation - -When the sources of a test consist of multiple source files places in a single directory they are passed to the compiler in a single run and the compiler decides in which order to compile them. In some cases, however, to reproduce a specific test scenario it might be necessary to compile the source files in several steps in a specified order. To achieve that one can add a `_${step_index}` suffix to a file name (before the `.scala` or `.java` extension) indicating the order of compilation. E.g. if the test directory contains files named `Foo_1.scala`, `Bar_2.scala` and `Baz_2.scala` then `Foo_1.scala` will be compiled first and after that `Bar_2.scala` together with `Baz_2.scala`. - -The other kind of suffix that can modify how particular files are compiled is `_c${compilerVersion}`. When specified, the file will be compiled with a specific version of the compiler instead of the one developed on the current branch. - -Different suffixes can be mixed together (their order is not important although consistency is advised), e.g. `Foo_1_c3.0.2`, `Bar_2_c3.1.0`. - -### Bootstrapped-only tests - -To run `testCompilation` on a bootstrapped Dotty compiler, use -`scala3-compiler-bootstrapped/testCompilation` (with the same syntax as above). -Some tests can only be run in bootstrapped compilers; that includes all tests -with `with-compiler` in their name. - -### From TASTy tests - -`testCompilation` has an additional mode to run tests that compile code from a `.tasty` file. - Modify blacklist and whitelists in `compiler/test/dotc` to enable or disable tests from `.tasty` files. - - ```bash - $ sbt - > testCompilation --from-tasty - ``` - - This mode can be run under `scala3-compiler-bootstrapped/testCompilation` to test on a bootstrapped Dotty compiler. - -### SemanticDB tests - -```bash -$ sbt -> scala3-compiler-bootstrapped/testOnly dotty.tools.dotc.semanticdb.SemanticdbTests -``` - -The output of the `extractSemanticDB` phase, enabled with `-Xsemanticdb` is tested with the bootstrapped JUnit test -`dotty.tools.dotc.semanticdb.SemanticdbTests`. It uses source files in `tests/semanticdb/expect` to generate -two kinds of output file that are compared with "expect files": placement of semanticdb symbol occurrences inline in -sourcecode (`*.expect.scala`), for human verification by inspection; and secondly metap formatted output which outputs -all information stored in semanticdb (`metac.expect`). -Expect files are used as regression tests to detect changes in the compiler. - -The test suite will create a new file if it detects any difference, which can be compared with the -original expect file, or if the user wants to globally replace all expect files for semanticdb they can use -`scala3-compiler-bootstrapped/test:runMain dotty.tools.dotc.semanticdb.updateExpect`, and compare the changes via version -control. - -### Test regimes - -Continuous integration, managed by GitHub Actions, does not run all jobs when a pull request is created. -In particular, test jobs for testing under JDK 8 and Windows are not run. Those jobs are run only for the nightly build. - -If a PR may fail differentially under either JDK 8 or Windows, the test jobs may be triggered by adding -a special command to the PR comment text: - -``` -[test_java8] -[test_windows_full] -``` -Furthermore, CI tests are bootstrapped. A job to also run tests non-bootstrapped may be triggered manually: -``` -[test_non_bootstrapped] -``` -A trivial PR, such as a fix for a typo in a comment or when contributing other documentation, may benefit by skipping CI tests altogether: -``` -[skip ci] -``` -Other jobs which are normally run can also be selectively skipped: -``` -[skip community_build] -[skip test_windows_fast] -``` - -## Troubleshooting - -Some of the tests depend on temporary state stored in the `out` directory. In rare cases, that directory -can enter an inconsistent state and cause spurious test failures. If you suspect a spurious test failure, -you can run `rm -rf out/*` from the root of the repository and run your tests again. If that fails, you -can try `git clean -xfd`. diff --git a/docs/_docs/contributing/tools/index.md b/docs/_docs/contributing/tools/index.md index 92503ee82013..e784e3e15d61 100644 --- a/docs/_docs/contributing/tools/index.md +++ b/docs/_docs/contributing/tools/index.md @@ -2,3 +2,8 @@ layout: index title: IDEs and Tools --- + +This chapter of the guide describes how to use Dotty with IDEs and other tools: +- [IDEs](./ide.md) +- [Use Mill](./mill.md) +- [Use Scalafix](./scalafix.md) diff --git a/docs/_docs/contributing/tools/scalafix.md b/docs/_docs/contributing/tools/scalafix.md index 58c7d0eb7b3a..30c7050f8b3e 100644 --- a/docs/_docs/contributing/tools/scalafix.md +++ b/docs/_docs/contributing/tools/scalafix.md @@ -3,8 +3,6 @@ layout: doc-page title: Working with Scalafix --- -# Working with Scalafix - First, create a new rule as follows (command from https://scalacenter.github.io/scalafix/docs/developers/setup.html): ```bash diff --git a/docs/_docs/contributing/workflow.md b/docs/_docs/contributing/workflow.md index 956ce2998c75..1d11dc61a6bf 100644 --- a/docs/_docs/contributing/workflow.md +++ b/docs/_docs/contributing/workflow.md @@ -103,8 +103,29 @@ The basics of working with Dotty codebase are documented [here](https://dotty.ep | Command | Description | |------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------| +| `scala3/scalac` | Run the compiler directly, with any current changes. | +| `scala3/scala` | Run the main method of a given class name. | | `scalac ../issues/Playground.scala` | Compile the given file – path relative to the Dotty directory. Output the compiled class files to the Dotty directory itself. | | `scala Playground` | Run the compiled class `Playground`. Dotty directory is on classpath by default. | | `repl` | Start REPL | +| `scala3/scalac -print-tasty Foo.tasty` | Print the TASTy of top-level class `Foo` | +| `scala3-bootstrapped/test` | Run all tests for Scala 3. (Slow, recommended for CI only) | +| `scala3-bootstrapped/publishLocal` | Build Scala 3 locally. (Use to debug a specific project) | +| `scalac ../issues/Playground.scala` | Compile the given file – path relative to the Dotty directory. Output the compiled class files to the Dotty directory itself.| | `testOnly dotty.tools.dotc.CompilationTests -- *pos` | Run test (method) `pos` from `CompilationTests` suite. | | `testCompilation sample` | In all test suites, run test files containing the word `sample` in their title. | +| `scala3-compiler/Test/runMain dotty.tools.printTypes`| Print types underlying representation | +| `scaladoc/generateScalaDocumentation` | Build the documentation website (published to https://dotty.epfl.ch) | +| `scaladoc/generateReferenceDocumentation` | Build the reference documentation website (published to https://docs.scala-lang.org/scala3/reference) | + + +## Shell Commands + +| Command | Description | +|--------------------------------------|------------------------------------------------------------------| +| `rm -rv *.tasty *.class out || true` | clean all compiled artifacts, from root dotty directory | + + + + + diff --git a/docs/_docs/index.md b/docs/_docs/index.md index 97dc7fd5886b..e61313d81a4a 100644 --- a/docs/_docs/index.md +++ b/docs/_docs/index.md @@ -1,19 +1,6 @@ --- layout: index redirectFrom: /docs/index.html -nightlyOf: https://docs.scala-lang.org/scala3/reference/ --- -Dotty is the project name for technologies that are considered for inclusion in Scala 3. Scala has -pioneered the fusion of object-oriented and functional programming in a typed setting. Scala 3 will -be a big step towards realizing the full potential of these ideas. Its main objectives are to - -- become more opinionated by promoting programming idioms we found to work well, -- simplify where possible, -- eliminate inconsistencies and surprising behaviors, -- build on strong foundations to ensure the design hangs well together, -- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and performance. - -In this documentation you will find information on how to use the Dotty compiler on your machine, -navigate through the code, setup Dotty with your favorite IDE and more! - +This website contains the developer documentation of the Scala 3 compiler. It targets developers interested in contributing to the compiler, or learning its internals. If you want to learn how to use Scala, go [here](https://docs.scala-lang.org/). diff --git a/docs/_docs/internals/backend.md b/docs/_docs/internals/backend.md index e3215c3993ae..660f6e1f41e5 100644 --- a/docs/_docs/internals/backend.md +++ b/docs/_docs/internals/backend.md @@ -6,8 +6,13 @@ title: "Backend Internals" The code for the JVM backend is split up by functionality and assembled in `GenBCode.scala`. This file defines class `GenBCode`, the compiler phase. +The workflow is split into `CodeGen.scala` Scala compilation context aware responsible for emitting bytecode, +and `PostProcessor.scala` which can be used for parallelized, context agnostic processing. In Scala 2 `PostProcessor`, +was responsible for performing bytecode optimization, e.g. inlining method calls. In Scala 3 it is only used for writing +Class files and Tasty to disk. + ``` -class GenBCodePipeline -[defines]--> PlainClassBuilder +class CodeGen.Impl -[defines]--> PlainClassBuilder | | [extends] [extends] | | @@ -18,14 +23,14 @@ BCodeBodyBuilder ----------------> PlainBodyBuilder BCodeSkelBuilder ----------------> PlainSkelBuilder | / | \ BCodeHelpers ----------------> BCClassGen BCAnnotGen ... (more components) - | | \ - | | \-------------> helper methods - | | \------------> JMirrorBuilder, JBeanInfoBuilder (uses some components, e.g. BCInnerClassGen) - | | - | BytecodeWriters ---------> methods and classes to write byte code files + | \ + | \-------------> helper methods + | \------------> JMirrorBuilder, JAndroidBuilder (uses some components, e.g. BCInnerClassGen) + | \-----------> `backendUtils`: utility for bytecode related ops, contains mapping for supported classfile version | BCodeIdiomatic ----------------> utilities for code generation, e.g. genPrimitiveArithmetic \--------------> `bTypes`: maps and fields for common BTypes + \-------------> `int`: synchronized interface between PostProcessor and compiltion ctx ``` The `BTypes.scala` class contains the `BType` class and predefined BTypes @@ -34,28 +39,33 @@ The `BTypes.scala` class contains the `BType` class and predefined BTypes Compiler creates a `GenBCode` `Phase`, calls `runOn(compilationUnits)`, which calls `run(context)`. This: -* initializes `myPrimitives` defined in `DottyPrimitives` (maps primitive - members, like `int.+`, to bytecode instructions) -* creates a `GenBCodePipeline` and calls `run(tree)` - -`GenBCodePipeline` now: - -* initializes the `bTypes` field of `GenBCodePipeline` defined in `BCodeIdiomatic` - (BType maps, common BTypes like `StringRef`) -* creates `BytecodeWriter` and `JMirrorBuilder` instances (on each compiler run) -* `buildAndSendToDisk(units)`: uses work queues, see below. - - `GenBCodePipeline.feedPipeline1` adds ClassDefs to `q1` - - `Worker1.run` creates ASM `ClassNodes`, adds to `q2`. It creates one - `PlainClassBuilder` for each compilation unit. - - `Worker2.run` adds byte arrays (one for each class) to `q3` - - `GenBCodePipeline.drainQ3` writes byte arrays to disk +* initializes lazily components reused by all `compilationUnits` using same instance of Context: + - `bTypes`, used by `CodeGen` and `PostProcessro`, defined in `BCodeIdiomatic` (BType maps, common BTypes like `StringRef`) + - `backendInterface:` - proxy to Context specific operations + - `codeGen: CodeGen` - uses `backendInterface`, `bTypes`, initializes instance of `DottyPrimitives` and defines `JMirrorBuilder` instance and implements bytecode generation flow (maps primitive members, like `int.+`, to bytecode instructions) + - `fontendAccess` - synchronized `PostProcessor` interface to compiler settings, reporting and GenBCode context (e.g. list of entrypoints) + - `postProcessor` - compilation context agnostic module dedicated to parallel processing of produced bytecode. Currently used only for writing Tasty and Class files. Defines `backendUtils` and `classfileWriter` +* sets context of current compilation unit to the shared context instance +* calls `codeGen.genUnit(ctx.compilation)` which returns structure with generated definitions (both Class files and Tasty) +* calls postProcessing of generated definition in `postProcessor` +* calls registered callbacks if needed for every generated class + +Upon calling `codeGen.genUnit` it: +* creates `PlainClassBuilder` instance for each generated `TypeDef` and creates ASM `ClassNode` +* creates optional mirror class if needed +* generates Tasty file content and store its attributes in either mirror or plain class node + +`PostProcessor` is later: +* enriching `ClassNode` with collected serializable lambdas +* sets its inner classes +* serializes class and writes it to file, optionally it can execute register callbacks for each generated file +* writes generated Tasty to file ## Architecture ## The architecture of `GenBCode` is the same as in Scalac. It can be partitioned into weakly coupled components (called "subsystems" below): - ### (a) The queue subsystem ### Queues mediate between processors, queues don't know what each processor does. @@ -126,4 +136,4 @@ emitting: ### (f) Building an ASM ClassNode given an AST TypeDef ### -It's done by `PlainClassBuilder`(see `GenBCode.scala`). +It's done by `PlainClassBuilder`(see `CodeGen.scala`). diff --git a/docs/_docs/internals/core-data-structures.md b/docs/_docs/internals/core-data-structures.md deleted file mode 100644 index d42a24f0e426..000000000000 --- a/docs/_docs/internals/core-data-structures.md +++ /dev/null @@ -1,117 +0,0 @@ ---- -layout: doc-page -title: Core Data Structures ---- - -(The following is work in progress) - -## Symbols and SymDenotations - - - why symbols are not enough: their contents change all the time - - they change themselvesSo a `Symbol` - - reference: string + sig - - -Dotc is different from most other compilers in that it is centered around the idea of -maintaining views of various artifacts associated with code. These views are indexed -by tne - -A symbol refers to a definition in a source program. Traditionally, - compilers store context-dependent data in a _symbol table_. The - symbol then is the central reference to address context-dependent - data. But for `scalac`'s requirements it turns out that symbols are - both too little and too much for this task. - -Too little: The attributes of a symbol depend on the phase. Examples: -Types are gradually simplified by several phases. Owners are changed -in phases `LambdaLift` (when methods are lifted out to an enclosing -class) and Flatten (when all classes are moved to top level). Names -are changed when private members need to be accessed from outside -their class (for instance from a nested class or a class implementing -a trait). So a functional compiler, a `Symbol` by itself met mean -much. Instead we are more interested in the attributes of a symbol at -a given phase. - -`scalac` has a concept for "attributes of a symbol at - -Too much: If a symbol is used to refer to a definition in another -compilation unit, we get problems for incremental recompilation. The -unit containing the symbol might be changed and recompiled, which -might mean that the definition referred to by the symbol is deleted or -changed. This leads to the problem of stale symbols that refer to -definitions that no longer exist in this form. Scala 2 compiler tried to -address this problem by _rebinding_ symbols appearing in certain cross -module references, but it turned out to be too difficult to do this -reliably for all kinds of references. Scala 3 compiler attacks the problem at -the root instead. The fundamental problem is that symbols are too -specific to serve as a cross-module reference in a system with -incremental compilation. They refer to a particular definition, but -that definition may not persist unchanged after an edit. - -`scalac` uses instead a different approach: A cross module reference is -always type, either a `TermRef` or ` TypeRef`. A reference type contains -a prefix type and a name. The definition the type refers to is established -dynamically based on these fields. - - -a system where sources can be recompiled at any instance, - - the concept of a `Denotation`. - - Since definitions are transformed by phases, - - -The [Dotty project](https://github.com/lampepfl/dotty) -is a platform to develop new technology for Scala -tooling and to try out concepts of future Scala language versions. -Its compiler is a new design intended to reflect the -lessons we learned from work with the Scala compiler. A clean redesign -today will let us iterate faster with new ideas in the future. - -Today we reached an important milestone: The Dotty compiler can -compile itself, and the compiled compiler can act as a drop-in for the -original one. This is what one calls a *bootstrap*. - -## Why is this important? - -The main reason is that this gives us a some validation of the -*trustworthiness* of the compiler itself. Compilers are complex beasts, -and many things can go wrong. By far the worst things that can go -wrong are bugs where incorrect code is produced. It's not fun debugging code that looks perfectly -fine, yet gets translated to something subtly wrong by the compiler. - -Having the compiler compile itself is a good test to demonstrate that -the generated code has reached a certain level of quality. Not only is -a compiler a large program (44k lines in the case of dotty), it is -also one that exercises a large part of the language in quite -intricate ways. Moreover, bugs in the code of a compiler don't tend to -go unnoticed, precisely because every part of a compiler feeds into -other parts and all together are necessary to produce a correct -translation. - -## Are We Done Yet? - -Far from it! The compiler is still very rough. A lot more work is -needed to - - - make it more robust, in particular when analyzing incorrect programs, - - improve error messages and warnings, - - improve the efficiency of some of the generated code, - - embed it in external tools such as sbt, REPL, IDEs, - - remove restrictions on what Scala code can be compiled, - - help in migrating Scala code that will have to be changed. - -## What Are the Next Steps? - -Over the coming weeks and months, we plan to work on the following topics: - - - Make snapshot releases. - - Get the Scala standard library to compile. - - Work on SBT integration of the compiler. - - Work on IDE support. - - Investigate the best way to obtaining a REPL. - - Work on the build infrastructure. - -If you want to get your hands dirty with any of this, now is a good moment to get involved! -To get started: . - diff --git a/docs/_docs/internals/dotc-scalac.md b/docs/_docs/internals/dotc-scalac.md index 3f88502934b7..03baad375eb1 100644 --- a/docs/_docs/internals/dotc-scalac.md +++ b/docs/_docs/internals/dotc-scalac.md @@ -6,7 +6,50 @@ title: "Differences between Scalac and Dotty" Overview explanation how symbols, named types and denotations hang together: [Denotations1] -## Denotation ## +## Some background + +Dotc is different from most other compilers in that it is centered around the +idea of maintaining views of various artifacts associated with code. These views +are indexed by tne. + +A symbol refers to a definition in a source program. Traditionally, compilers +store context-dependent data in a _symbol table_. The symbol then is the central +reference to address context-dependent data. But for `scalac`'s requirements it +turns out that symbols are both too little and too much for this task. + +### Too little + +The attributes of a symbol depend on the phase. Examples: Types are +gradually simplified by several phases. Owners are changed in phases +`LambdaLift` (when methods are lifted out to an enclosing class) and Flatten +(when all classes are moved to top level). Names are changed when private +members need to be accessed from outside their class (for instance from a nested +class or a class implementing a trait). So a functional compiler, a `Symbol` by +itself met mean much. Instead we are more interested in the attributes of a +symbol at a given phase. + +### Too much + +If a symbol is used to refer to a definition in another compilation unit, we get +problems for incremental recompilation. The unit containing the symbol might be +changed and recompiled, which might mean that the definition referred to by the +symbol is deleted or changed. This leads to the problem of stale symbols that +refer to definitions that no longer exist in this form. Scala 2 compiler tried +to address this problem by _rebinding_ symbols appearing in certain cross module +references, but it turned out to be too difficult to do this reliably for all +kinds of references. Scala 3 compiler attacks the problem at the root instead. +The fundamental problem is that symbols are too specific to serve as a +cross-module reference in a system with incremental compilation. They refer to a +particular definition, but that definition may not persist unchanged after an +edit. + +`scalac` uses instead a different approach: A cross module reference is always +type, either a `TermRef` or ` TypeRef`. A reference type contains a prefix type +and a name. The definition the type refers to is established dynamically based +on these fields. + +## Denotation + Comment with a few details: [Denotations2] A `Denotation` is the result of a name lookup during a given period @@ -21,7 +64,7 @@ A `Denotation` is the result of a name lookup during a given period Denotations of methods have a signature ([Signature1]), which uniquely identifies overloaded methods. -### Denotation vs. SymDenotation ### +### Denotation vs. SymDenotation A `SymDenotation` is an extended denotation that has symbol-specific properties (that may change over phases) * `flags` @@ -31,7 +74,7 @@ A `SymDenotation` is an extended denotation that has symbol-specific properties `SymDenotation` implements lazy types (similar to scalac). The type completer assigns the denotation's `info`. -### Implicit Conversion ### +### Implicit Conversion There is an implicit conversion: ```scala core.Symbols.toDenot(sym: Symbol)(implicit ctx: Context): SymDenotation @@ -42,7 +85,7 @@ implicit conversion does **not** need to be imported, it is part of the implicit scope of the type `Symbol` (check the Scala spec). However, it can only be applied if an implicit `Context` is in scope. -## Symbol ## +## Symbol * `Symbol` instances have a `SymDenotation` * Most symbol properties in the Scala 2 compiler are now in the denotation (in the Scala 3 compiler). @@ -57,7 +100,7 @@ if (sym is Flags.PackageClass) // Scala 3 (*) `(*)` Symbols are implicitly converted to their denotation, see above. Each `SymDenotation` has flags that can be queried using the `is` method. -## Flags ## +## Flags * Flags are instances of the value class `FlagSet`, which encapsulates a `Long` * Each flag is either valid for types, terms, or both @@ -74,7 +117,7 @@ if (sym is Flags.PackageClass) // Scala 3 (*) `ModuleVal` / `ModuleClass` for either of the two. * `flags.is(Method | Param)`: true if `flags` has either of the two -## Tree ## +## Tree * Trees don't have symbols - `tree.symbol` is `tree.denot.symbol` - `tree.denot` is `tree.tpe.denot` where the `tpe` is a `NamdedType` (see @@ -86,13 +129,10 @@ if (sym is Flags.PackageClass) // Scala 3 (*) obtained from the symbol that the type refers to. This symbol is searched using `prefix.member(name)`. - -## Type ## +## Type * `MethodType(paramSyms, resultType)` from scalac => `mt @ MethodType(paramNames, paramTypes)`. Result type is `mt.resultType` -`@todo` - [Denotations1]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Denotations.scala#L27-L72 [Denotations2]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Denotations.scala#L77-L103 [Signature1]: https://github.com/lampepfl/dotty/blob/a527f3b1e49c0d48148ccfb2eb52e3302fc4a349/compiler/src/dotty/tools/dotc/core/Signature.scala#L9-L33 diff --git a/docs/_docs/internals/overall-structure.md b/docs/_docs/internals/overall-structure.md index f50ab6bf03a7..5bb43eb946a8 100644 --- a/docs/_docs/internals/overall-structure.md +++ b/docs/_docs/internals/overall-structure.md @@ -104,7 +104,6 @@ phases. The current list of phases is specified in class [Compiler] as follows: List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files List(new PostTyper) :: // Additional checks and cleanups after type checking List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) - List(new Staging) :: // Check PCP, heal quoted types and expand macros List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols Nil @@ -112,6 +111,10 @@ phases. The current list of phases is specified in class [Compiler] as follows: /** Phases dealing with TASTY tree pickling and unpickling */ protected def picklerPhases: List[List[Phase]] = List(new Pickler) :: // Generate TASTY info + List(new Inlining) :: // Inline and execute macros + List(new PostInlining) :: // Add mirror support for inlined code + List(new Staging) :: // Check staging levels and heal staged types + List(new Splicing) :: // Replace level 1 splices with holes List(new PickleQuotes) :: // Turn quoted trees into explicit run-time data structures Nil diff --git a/docs/_docs/internals/syntax-3.1.md b/docs/_docs/internals/syntax-3.1.md index 4d4d3b6d858d..0104222f50f5 100644 --- a/docs/_docs/internals/syntax-3.1.md +++ b/docs/_docs/internals/syntax-3.1.md @@ -11,7 +11,7 @@ hexadecimal code: ```ebnf UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit -hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ ``` Informal descriptions are typeset as `“some comment”`. @@ -22,15 +22,15 @@ form. ```ebnf whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ -upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” -lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” -letter ::= upper | lower “… and Unicode categories Lo, Lt, Lm, Nl” -digit ::= ‘0’ | … | ‘9’ +upper ::= ‘A’ | ... | ‘Z’ | ‘\$’ | ‘_’ “... and Unicode category Lu” +lower ::= ‘a’ | ... | ‘z’ “... and Unicode category Ll” +letter ::= upper | lower “... and Unicode categories Lo, Lt, Lm, Nl” +digit ::= ‘0’ | ... | ‘9’ paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ | ‘'(’ | ‘'[’ | ‘'{’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ - “… and Unicode categories Sm, So” + “... and Unicode categories Sm, So” printableChar ::= “all characters in [\u0020, \u007E] inclusive” charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) @@ -49,7 +49,7 @@ integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | nonZeroDigit {digit} hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit} digit ::= ‘0’ | nonZeroDigit -nonZeroDigit ::= ‘1’ | … | ‘9’ +nonZeroDigit ::= ‘1’ | ... | ‘9’ floatingPointLiteral ::= digit {digit} ‘.’ {digit} [exponentPart] [floatType] diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 8e7de0efe19e..2817a7477b10 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -20,51 +20,46 @@ productions map to AST nodes. The following description of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. -_Unicode escapes_ are used to represent the [Unicode character](https://www.w3.org/International/articles/definitions-characters/) with the given -hexadecimal code: - -```ebnf -UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit -hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ -``` - -Informal descriptions are typeset as `“some comment”`. - ## Lexical Syntax -The lexical syntax of Scala is given by the following grammar in EBNF -form. +The lexical syntax of Scala is given by the following grammar in EBNF form: ```ebnf whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ -upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” -lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” -letter ::= upper | lower “… and Unicode categories Lo, Lt, Lm, Nl” -digit ::= ‘0’ | … | ‘9’ +upper ::= ‘A’ | ... | ‘Z’ | ‘$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that doesn't have + contributory property Other_Lowercase +lower ::= ‘a’ | ... | ‘z’ | ‘_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase +letter ::= upper | lower +digit ::= ‘0’ | ... | ‘9’ paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ - “… and Unicode categories Sm, So” -printableChar ::= “all characters in [\u0020, \u007E] inclusive” + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) +escapeSeq ::= UnicodeEscape | charEscapeSeq op ::= opchar {opchar} varid ::= lower idrest -alphaid ::= upper idrest - | varid +boundvarid ::= varid + | ‘`’ varid ‘`’ plainid ::= alphaid | op id ::= plainid - | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] -decimalNumeral ::= ‘0’ | nonZeroDigit [{digit | ‘_’} digit] +decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] -nonZeroDigit ::= ‘1’ | … | ‘9’ floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] @@ -75,25 +70,25 @@ floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ booleanLiteral ::= ‘true’ | ‘false’ -characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ stringLiteral ::= ‘"’ {stringElement} ‘"’ | ‘"""’ multiLineChars ‘"""’ -stringElement ::= printableChar \ (‘"’ | ‘\’) - | UnicodeEscape - | charEscapeSeq -multiLineChars ::= {[‘"’] [‘"’] char \ ‘"’} {‘"’} -processedStringLiteral - ::= alphaid ‘"’ {[‘\’] processedStringPart | ‘\\’ | ‘\"’} ‘"’ - | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ -processedStringPart +stringElement ::= charNoDoubleQuoteOrNewline + | escapeSeq +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} + +interpolatedString + ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape -escape ::= ‘$$’ - | ‘$’ letter { letter | digit } - | ‘{’ Block [‘;’ whiteSpace stringFormat whiteSpace] ‘}’ -stringFormat ::= {printableChar \ (‘"’ | ‘}’ | ‘ ’ | ‘\t’ | ‘\n’)} - -symbolLiteral ::= ‘'’ plainid // until 2.13 +escape ::= ‘\$\$’ + | ‘\$"’ + | ‘\$’ alphaid + | ‘\$’ BlockExpr +alphaid ::= upper idrest + | varid comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ | ‘//’ “any sequence of characters up to end of line” @@ -140,7 +135,7 @@ type val var while with yield ### Soft keywords ``` -as derives end extension infix inline opaque open throws transparent using | * + - +as derives end erased extension infix inline opaque open throws transparent using | * + - ``` See the [separate section on soft keywords](../reference/soft-modifier.md) for additional @@ -159,7 +154,7 @@ SimpleLiteral ::= [‘-’] integerLiteral | characterLiteral | stringLiteral Literal ::= SimpleLiteral - | processedStringLiteral + | interpolatedStringLiteral | symbolLiteral | ‘null’ @@ -180,13 +175,13 @@ Type ::= FunType | FunParamClause ‘=>>’ Type TermLambdaTypeTree(ps, t) | MatchType | InfixType -FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) +FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) | FunctionWithMods(ts, t, mods, erasedParams) | HKTypeParamClause '=>' Type PolyFunction(ps, t) FunTypeArgs ::= InfixType | ‘(’ [ FunArgTypes ] ‘)’ | FunParamClause FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ -TypedFunParam ::= id ‘:’ Type +TypedFunParam ::= [`erased`] id ‘:’ Type MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) @@ -207,8 +202,8 @@ Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id Singletons ::= Singleton { ‘,’ Singleton } -FunArgType ::= Type - | ‘=>’ Type PrefixOp(=>, t) +FunArgType ::= [`erased`] Type + | [`erased`] ‘=>’ Type PrefixOp(=>, t) FunArgTypes ::= FunArgType { ‘,’ FunArgType } ParamType ::= [‘=>’] ParamValueType ParamValueType ::= [‘into’] ExactParamType Into(t) @@ -229,7 +224,7 @@ BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block | HkTypeParamClause ‘=>’ Block | Expr1 FunParams ::= Bindings - | id + | [`erased`] id | ‘_’ Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] If(Parens(cond), thenp, elsep?) | [‘inline’] ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) @@ -347,9 +342,6 @@ ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) -DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds - TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -363,18 +355,29 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param -Param ::= id ‘:’ ParamType [‘=’ Expr] -DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] -DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause -UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | FunArgTypes) ‘)’ -DefParams ::= DefParam {‘,’ DefParam} -DefParam ::= {Annotation} [‘inline’] Param ValDef(mods, id, tpe, expr) -- point of mods at id. +DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent +DefParamClause ::= DefTypeParamClause + | DefTermParamClause + | UsingParamClause +TypelessClauses ::= TypelessClause {TypelessClause} +TypelessClause ::= DefTermParamClause + | UsingParamClause + +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ +DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ + +DefTermParams ::= DefTermParam {‘,’ DefTermParam} +DefTermParam ::= {Annotation} [`erased`] [‘inline’] Param ValDef(mods, id, tpe, expr) -- point of mods at id. +Param ::= id ‘:’ ParamType [‘=’ Expr] ``` ### Bindings and Imports ```ebnf -Bindings ::= ‘(’ [Binding {‘,’ Binding}] ‘)’ +Bindings ::= ‘(’[`erased`] [Binding {‘,’ [`erased`] Binding}] ‘)’ Binding ::= (id | ‘_’) [‘:’ Type] ValDef(_, id, tpe, EmptyTree) Modifier ::= LocalModifier @@ -419,8 +422,8 @@ Dcl ::= RefineDcl | ‘var’ VarDcl ValDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) VarDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) -DefDcl ::= DefSig ‘:’ Type DefDef(_, name, tparams, vparamss, tpe, EmptyTree) -DefSig ::= id [DefTypeParamClause] DefParamClauses +DefDcl ::= DefSig ‘:’ Type DefDef(_, name, paramss, tpe, EmptyTree) +DefSig ::= id [DefParamClauses] [DefImplicitClause] TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] @@ -431,8 +434,8 @@ Def ::= ‘val’ PatDef | TmplDef PatDef ::= ids [‘:’ Type] ‘=’ Expr | Pattern2 [‘:’ Type] ‘=’ Expr PatDef(_, pats, tpe?, expr) -DefDef ::= DefSig [‘:’ Type] ‘=’ Expr DefDef(_, name, tparams, vparamss, tpe, expr) - | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr DefDef(_, , Nil, vparamss, EmptyTree, expr | Block) +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr DefDef(_, name, paramss, tpe, expr) + | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef @@ -444,10 +447,10 @@ ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefParamClause`, `UsingParamClause` must be present +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} - ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods + ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef | Export diff --git a/docs/_docs/reference/changed-features/implicit-resolution.md b/docs/_docs/reference/changed-features/implicit-resolution.md index bf15baa3299c..6a898690b565 100644 --- a/docs/_docs/reference/changed-features/implicit-resolution.md +++ b/docs/_docs/reference/changed-features/implicit-resolution.md @@ -67,7 +67,8 @@ Opaque type aliases count as anchors only outside the scope where their alias is 1. If _T_ is a reference to a type parameter, the union of the anchors of both of its bounds. 1. If _T_ is a singleton reference, the anchors of its underlying type, plus, if _T_ is of the form _(P#x).type_, the anchors of _P_. - 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object. + 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object, + 1. If _T_ is some other this-type _P.this.type_, the anchors of _P_. 1. If _T_ is some other type, the union of the anchors of each constituent type of _T_. **Definition:** The _implicit scope_ of a type _T_ is the smallest set _S_ of term references such that diff --git a/docs/_docs/reference/changed-features/imports.md b/docs/_docs/reference/changed-features/imports.md index 2058ef08b7db..b322a6a58393 100644 --- a/docs/_docs/reference/changed-features/imports.md +++ b/docs/_docs/reference/changed-features/imports.md @@ -46,7 +46,7 @@ are offered under settings `-source 3.1-migration -rewrite`. ## Syntax -``` +```ebnf Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec | SimpleRef `as` id diff --git a/docs/_docs/reference/changed-features/interpolation-escapes.md b/docs/_docs/reference/changed-features/interpolation-escapes.md index 594e7671c5ab..4abeabdce3ac 100644 --- a/docs/_docs/reference/changed-features/interpolation-escapes.md +++ b/docs/_docs/reference/changed-features/interpolation-escapes.md @@ -4,7 +4,7 @@ title: "Escapes in interpolations" nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/interpolation-escapes.html --- -In Scala 2 there is no straightforward way to represent a single quote character `"` in a single quoted interpolation. A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. +In Scala 2 there is no straightforward way to represent a double-quote character `"` in a quoted interpolation (except in triple-quote interpolation). A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. In Scala 3, we can use the `$` meta character of interpolations to escape a `"` character. Example: diff --git a/docs/_docs/reference/changed-features/match-syntax.md b/docs/_docs/reference/changed-features/match-syntax.md index dba50e9beb6a..3f4d608e261f 100644 --- a/docs/_docs/reference/changed-features/match-syntax.md +++ b/docs/_docs/reference/changed-features/match-syntax.md @@ -47,7 +47,7 @@ The syntactical precedence of match expressions has been changed. The new syntax of match expressions is as follows. -``` +```ebnf InfixExpr ::= ... | InfixExpr MatchClause SimpleExpr ::= ... diff --git a/docs/_docs/reference/changed-features/overload-resolution.md b/docs/_docs/reference/changed-features/overload-resolution.md index bd7782ded520..621515c2a7f8 100644 --- a/docs/_docs/reference/changed-features/overload-resolution.md +++ b/docs/_docs/reference/changed-features/overload-resolution.md @@ -66,11 +66,11 @@ as follows: Replace the sentence -> Otherwise, let `S1,…,Sm` be the vector of types obtained by typing each argument with an undefined expected type. +> Otherwise, let `S1,...,Sm` be the vector of types obtained by typing each argument with an undefined expected type. with the following paragraph: -> Otherwise, let `S1,…,Sm` be the vector of known types of all argument types, where the _known type_ of an argument `E` +> Otherwise, let `S1,...,Sm` be the vector of known types of all argument types, where the _known type_ of an argument `E` is determined as followed: - If `E` is a function value `(p_1, ..., p_n) => B` that misses some parameter types, the known type diff --git a/docs/_docs/reference/changed-features/pattern-bindings.md b/docs/_docs/reference/changed-features/pattern-bindings.md index 2de338fc1dde..a75d64e7cd2d 100644 --- a/docs/_docs/reference/changed-features/pattern-bindings.md +++ b/docs/_docs/reference/changed-features/pattern-bindings.md @@ -50,7 +50,7 @@ for case (x, y) <- elems yield (y, x) // returns List((2, 1), (4, 3)) ## Syntax Changes Generators in for expressions may be prefixed with `case`. -``` +```ebnf Generator ::= [‘case’] Pattern1 ‘<-’ Expr ``` diff --git a/docs/_docs/reference/changed-features/structural-types-spec.md b/docs/_docs/reference/changed-features/structural-types-spec.md index d456932649fb..18d0f31ee6fe 100644 --- a/docs/_docs/reference/changed-features/structural-types-spec.md +++ b/docs/_docs/reference/changed-features/structural-types-spec.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/structu ## Syntax -``` +```ebnf SimpleType ::= ... | Refinement Refinement ::= ‘{’ RefineStatSeq ‘}’ RefineStatSeq ::= RefineStat {semi RefineStat} diff --git a/docs/_docs/reference/changed-features/vararg-splices.md b/docs/_docs/reference/changed-features/vararg-splices.md index 43c4acc5f880..8f23af771216 100644 --- a/docs/_docs/reference/changed-features/vararg-splices.md +++ b/docs/_docs/reference/changed-features/vararg-splices.md @@ -24,7 +24,7 @@ The old syntax for splice arguments will be phased out. ## Syntax -``` +```ebnf ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ | ‘(’ [Patterns ‘,’] Pattern2 ‘*’ ‘)’ diff --git a/docs/_docs/reference/contextual/context-bounds.md b/docs/_docs/reference/contextual/context-bounds.md index 42479d6802b3..11d57c8cbd52 100644 --- a/docs/_docs/reference/contextual/context-bounds.md +++ b/docs/_docs/reference/contextual/context-bounds.md @@ -47,7 +47,7 @@ done automatically under `-rewrite`. ## Syntax -``` +```ebnf TypeParamBounds ::= [SubtypeBounds] {ContextBound} ContextBound ::= ‘:’ Type ``` diff --git a/docs/_docs/reference/contextual/context-functions-spec.md b/docs/_docs/reference/contextual/context-functions-spec.md index 109513e9da86..385ee3901fd8 100644 --- a/docs/_docs/reference/contextual/context-functions-spec.md +++ b/docs/_docs/reference/contextual/context-functions-spec.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-funct ## Syntax -``` +```ebnf Type ::= ... | FunArgTypes ‘?=>’ Type Expr ::= ... diff --git a/docs/_docs/reference/contextual/derivation.md b/docs/_docs/reference/contextual/derivation.md index f073c339ec6f..66d0cf3fdf38 100644 --- a/docs/_docs/reference/contextual/derivation.md +++ b/docs/_docs/reference/contextual/derivation.md @@ -512,7 +512,7 @@ method please read more at [How to write a type class `derived` method using mac ## Syntax -``` +```ebnf Template ::= InheritClauses [TemplateBody] EnumDef ::= id ClassConstr InheritClauses EnumBody InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] diff --git a/docs/_docs/reference/contextual/extension-methods.md b/docs/_docs/reference/contextual/extension-methods.md index d23cadf513d7..d98d80caafc5 100644 --- a/docs/_docs/reference/contextual/extension-methods.md +++ b/docs/_docs/reference/contextual/extension-methods.md @@ -244,7 +244,18 @@ The precise rules for resolving a selection to an extension method are as follow Assume a selection `e.m[Ts]` where `m` is not a member of `e`, where the type arguments `[Ts]` are optional, and where `T` is the expected type. The following two rewritings are tried in order: - 1. The selection is rewritten to `m[Ts](e)`. + 1. The selection is rewritten to `m[Ts](e)` and typechecked, using the following + slight modification of the name resolution rules: + + - If `m` is imported by several imports which are all on the nesting level, + try each import as an extension method instead of failing with an ambiguity. + If only one import leads to an expansion that typechecks without errors, pick + that expansion. If there are several such imports, but only one import which is + not a wildcard import, pick the expansion from that import. Otherwise, report + an ambiguous reference error. + + **Note**: This relaxation is currently enabled only under the `experimental.relaxedExtensionImports` language import. + 2. If the first rewriting does not typecheck with expected type `T`, and there is an extension method `m` in some eligible object `o`, the selection is rewritten to `o.m[Ts](e)`. An object `o` is _eligible_ if @@ -285,7 +296,7 @@ def position(s: String)(ch: Char, n: Int): Int = Here are the syntax changes for extension methods and collective extensions relative to the [current syntax](../syntax.md). -``` +```ebnf BlockStat ::= ... | Extension TemplateStat ::= ... | Extension TopStat ::= ... | Extension diff --git a/docs/_docs/reference/contextual/given-imports.md b/docs/_docs/reference/contextual/given-imports.md index 6a55368979b1..28442581e408 100644 --- a/docs/_docs/reference/contextual/given-imports.md +++ b/docs/_docs/reference/contextual/given-imports.md @@ -103,7 +103,7 @@ given instances once their user base has migrated. ## Syntax -``` +```ebnf Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index 1bfffbc5bf6f..f1333bf8811f 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -10,7 +10,7 @@ that serve for synthesizing arguments to [context parameters](./using-clauses.md ```scala trait Ord[T]: def compare(x: T, y: T): Int - extension (x: T) + extension (x: T) def < (y: T) = compare(x, y) < 0 def > (y: T) = compare(x, y) > 0 @@ -174,7 +174,7 @@ is created for each reference. Here is the syntax for given instances: -``` +```ebnf TmplDef ::= ... | ‘given’ GivenDef GivenDef ::= [GivenSig] StructuralInstance diff --git a/docs/_docs/reference/contextual/right-associative-extension-methods.md b/docs/_docs/reference/contextual/right-associative-extension-methods.md index 068123df8cd2..61f0beece6ed 100644 --- a/docs/_docs/reference/contextual/right-associative-extension-methods.md +++ b/docs/_docs/reference/contextual/right-associative-extension-methods.md @@ -4,45 +4,57 @@ title: "Right-Associative Extension Methods: Details" nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/right-associative-extension-methods.html --- -The most general form of leading parameters of an extension method is as follows: + +The most general signature an extension method can have is as follows: + - An optional type clause `leftTyParams` - A possibly empty list of using clauses `leadingUsing` - - A single parameter `extensionParam` + - A single parameter `leftParam` (in an explicit term clause) - A possibly empty list of using clauses `trailingUsing` + - A name (preceded by the `def` keyword) + - An optional type clause `rightTyParams` + - An optional single parameter `rightParam` (in an explicit term clause) + - Any number of any clauses `rest` -This is then followed by `def`, the method name, and possibly further parameters -`otherParams`. An example is: +For example: ```scala - extension (using a: A, b: B)(using c: C) // <-- leadingUsing - (x: X) // <-- extensionParam + extension [T] // <-- leftTyParams + (using a: A, b: B)(using c: C) // <-- leadingUsing + (x: X) // <-- leftParam (using d: D) // <-- trailingUsing - def +:: (y: Y)(using e: E)(z: Z) // <-- otherParams + def +:: [U] // <-- rightTyParams + (y: Y) // <-- rightParam + (using e: E)(z: Z) // <-- rest ``` + An extension method is treated as a right-associative operator (as in [SLS §6.12.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#infix-operations)) -if it has a name ending in `:` and is immediately followed by a -single parameter. In the example above, that parameter is `(y: Y)`. +if it has a name ending in `:`, and is immediately followed by a +single explicit term parameter (in other words, `rightParam` is present). In the example above, that parameter is `(y: Y)`. The Scala compiler pre-processes a right-associative infix operation such as `x +: xs` to `xs.+:(x)` if `x` is a pure expression or a call-by-name parameter and to `val y = x; xs.+:(y)` otherwise. This is necessary since a regular right-associative infix method is defined in the class of its right operand. To make up for this swap, -the expansion of right-associative extension methods performs an analogous parameter swap. More precisely, if `otherParams` consists of a single parameter -`rightParam` followed by `remaining`, the total parameter sequence +the expansion of right-associative extension methods performs the inverse parameter swap. More precisely, if `rightParam` is present, the total parameter sequence of the extension method's expansion is: ``` - leadingUsing rightParam trailingUsing extensionParam remaining + leftTyParams leadingUsing rightTyParams rightParam leftParam trailingUsing rest ``` +In other words, we swap `leftParams trailingUsing` with `rightTyParam rightParam`. + For instance, the `+::` method above would become ```scala - def +:: (using a: A, b: B)(using c: C) + def +:: [T] + (using a: A, b: B)(using c: C) + [U] (y: Y) - (using d: D) (x: X) + (using d: D) (using e: E)(z: Z) ``` diff --git a/docs/_docs/reference/contextual/using-clauses.md b/docs/_docs/reference/contextual/using-clauses.md index f590cc2e7492..9177a2f47dc9 100644 --- a/docs/_docs/reference/contextual/using-clauses.md +++ b/docs/_docs/reference/contextual/using-clauses.md @@ -150,10 +150,10 @@ def summon[T](using x: T): x.type = x Here is the new syntax of parameters and arguments seen as a delta from the [standard context free syntax of Scala 3](../syntax.md). `using` is a soft keyword, recognized only at the start of a parameter or argument list. It can be used as a normal identifier everywhere else. -``` +```ebnf ClsParamClause ::= ... | UsingClsParamClause -DefParamClauses ::= ... | UsingParamClause +DefParamClause ::= ... | UsingParamClause UsingClsParamClause ::= ‘(’ ‘using’ (ClsParams | Types) ‘)’ -UsingParamClause ::= ‘(’ ‘using’ (DefParams | Types) ‘)’ +UsingParamClause ::= ‘(’ ‘using’ (DefTermParams | Types) ‘)’ ParArgumentExprs ::= ... | ‘(’ ‘using’ ExprsInParens ‘)’ ``` diff --git a/docs/_docs/reference/enums/adts.md b/docs/_docs/reference/enums/adts.md index 3ab8c9f3b45b..5219e062a633 100644 --- a/docs/_docs/reference/enums/adts.md +++ b/docs/_docs/reference/enums/adts.md @@ -154,7 +154,7 @@ The changes are specified below as deltas with respect to the Scala syntax given 1. Enum definitions are defined as follows: - ``` + ```ebnf TmplDef ::= `enum' EnumDef EnumDef ::= id ClassConstr [`extends' [ConstrApps]] EnumBody EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ @@ -164,7 +164,7 @@ The changes are specified below as deltas with respect to the Scala syntax given 2. Cases of enums are defined as follows: - ``` + ```ebnf EnumCase ::= `case' (id ClassConstr [`extends' ConstrApps]] | ids) ``` diff --git a/docs/_docs/reference/experimental/erased-defs-spec.md b/docs/_docs/reference/experimental/erased-defs-spec.md index 24ae89c7e28b..59dfed92da2a 100644 --- a/docs/_docs/reference/experimental/erased-defs-spec.md +++ b/docs/_docs/reference/experimental/erased-defs-spec.md @@ -19,8 +19,8 @@ TODO: complete def g(erased x: Int) = ... - (erased x: Int) => ... - def h(x: (erased Int) => Int) = ... + (erased x: Int, y: Int) => ... + def h(x: (Int, erased Int) => Int) = ... class K(erased x: Int) { ... } erased class E {} @@ -34,12 +34,12 @@ TODO: complete 3. Functions * `(erased x1: T1, x2: T2, ..., xN: TN) => y : (erased T1, T2, ..., TN) => R` - * `(given erased x1: T1, x2: T2, ..., xN: TN) => y: (given erased T1, T2, ..., TN) => R` + * `(given x1: T1, erased x2: T2, ..., xN: TN) => y: (given T1, erased T2, ..., TN) => R` * `(given erased T1) => R <:< erased T1 => R` - * `(given erased T1, T2) => R <:< (erased T1, T2) => R` + * `(given T1, erased T2) => R <:< (T1, erased T2) => R` * ... - Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`) + Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`). The `erased` parameters must match exactly in their respective positions. 4. Eta expansion @@ -51,7 +51,8 @@ TODO: complete * All `erased` parameters are removed from the function * All argument to `erased` parameters are not passed to the function * All `erased` definitions are removed - * All `(erased T1, T2, ..., TN) => R` and `(given erased T1, T2, ..., TN) => R` become `() => R` + * `(erased ET1, erased ET2, T1, ..., erased ETN, TM) => R` are erased to `(T1, ..., TM) => R`. + * `(given erased ET1, erased ET2, T1, ..., erased ETN, TM) => R` are erased to `(given T1, ..., TM) => R`. 6. Overloading @@ -60,11 +61,10 @@ TODO: complete 7. Overriding - * Member definitions overriding each other must both be `erased` or not be `erased` - * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa - * - + * Member definitions overriding each other must both be `erased` or not be `erased`. + * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa. 8. Type Restrictions * For dependent functions, `erased` parameters are limited to realizable types, that is, types that are inhabited by non-null values. This restriction stops us from using a bad bound introduced by an erased value, which leads to unsoundness (see #4060). + * Polymorphic functions with erased parameters are currently not supported, and will be rejected by the compiler. This is purely an implementation restriction, and might be lifted in the future. diff --git a/docs/_docs/reference/experimental/erased-defs.md b/docs/_docs/reference/experimental/erased-defs.md index 548b9c11bc0b..d266cd6c9d19 100644 --- a/docs/_docs/reference/experimental/erased-defs.md +++ b/docs/_docs/reference/experimental/erased-defs.md @@ -54,13 +54,13 @@ semantics and they are completely erased. ## How to define erased terms? Parameters of methods and functions can be declared as erased, placing `erased` -in front of a parameter list (like `given`). +in front of each erased parameter (like `inline`). ```scala -def methodWithErasedEv(erased ev: Ev): Int = 42 +def methodWithErasedEv(erased ev: Ev, x: Int): Int = x + 2 -val lambdaWithErasedEv: erased Ev => Int = - (erased ev: Ev) => 42 +val lambdaWithErasedEv: (erased Ev, Int) => Int = + (erased ev, x) => x + 2 ``` `erased` parameters will not be usable for computations, though they can be used @@ -80,7 +80,7 @@ parameters. ```scala erased val erasedEvidence: Ev = ... -methodWithErasedEv(erasedEvidence) +methodWithErasedEv(erasedEvidence, 40) // 42 ``` ## What happens with erased values at runtime? @@ -89,15 +89,15 @@ As `erased` are guaranteed not to be used in computations, they can and will be erased. ```scala -// becomes def methodWithErasedEv(): Int at runtime -def methodWithErasedEv(erased ev: Ev): Int = ... +// becomes def methodWithErasedEv(x: Int): Int at runtime +def methodWithErasedEv(x: Int, erased ev: Ev): Int = ... def evidence1: Ev = ... erased def erasedEvidence2: Ev = ... // does not exist at runtime erased val erasedEvidence3: Ev = ... // does not exist at runtime -// evidence1 is not evaluated and no value is passed to methodWithErasedEv -methodWithErasedEv(evidence1) +// evidence1 is not evaluated and only `x` is passed to methodWithErasedEv +methodWithErasedEv(x, evidence1) ``` ## State machine with erased evidence example diff --git a/docs/_docs/reference/experimental/generalized-method-syntax.md b/docs/_docs/reference/experimental/generalized-method-syntax.md new file mode 100644 index 000000000000..072052c1ae10 --- /dev/null +++ b/docs/_docs/reference/experimental/generalized-method-syntax.md @@ -0,0 +1,102 @@ +--- +layout: doc-page +title: "Generalized Method Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/generalized-method-syntax.html +--- + +This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: + +```scala +import scala.language.experimental.clauseInterleaving +``` + +The inclusion of using clauses is not the only way in which methods have been updated, type parameter clauses are now allowed in any number and at any position. + +## Syntax Changes + +### In Scala 2 + +The old syntax only allowed zero or one type parameter clause, followed by any number of term clauses, optionnally followed by an implicit clause: + +```scala +def foo[T, U](x: T)(y: U)(z: Int, s: String)(a: Array[T])(implicit ordInt: Ord[Int], l: List[U]) +``` + +### In Scala 3 + +The new syntax allows any number of type clauses, as long as they are not adjacent: +(do note however that [implicit clause are discouraged, in favor of using clauses](https://docs.scala-lang.org/scala3/reference/contextual/relationship-implicits.html)) + +```scala +def foo[T, U](x: T)(y: U)[V](z: V, s: String)(using Ord[Int])[A](a: Array[A])(implicit List[U]) +``` + +### Unchanged + +Class definitions and type declarations are unaffected, there can only be up to one type clause, in leading posion. + +## Motivation + +The new syntax is a powerful but natural extension of the old one, it allows new design patterns while staying intuitive and legible. + +### Dependent Type Clauses + +As type clauses can come after term clauses, it is now possible to have type parameters that depend on term parameters: + +```scala +trait Key { type Value } +trait DB { + def get(k: Key): Option[k.Value] // dependent result type + def getOrElse(k: Key)[V >: k.Value](default: V): V // dependent type parameter +} +``` + +Note that simply replacing `V` by `k.Value` would not be equivalent. For example, if `k.Value` is `Some[Int]`, only the above allows: +`getOrElse(k)[Option[Int]](None)`, which returns a `Number`. + +## Details + +### Application + +Method application is unchanged. +When multiple type clauses are expected but not all are passed, the rightmost ones are inferred. + +In particular, the following does not type check, even though the argument `Char` is only valid for `C`: +```scala +def triple[I <: Int](using Ordering[I])[C <: Char](a: I, b: C) = ??? +triple[Char](0, 'c') // error: Char does not conform to upperbound Int +``` + +### Extension Methods + +Extension methods follow the same syntax, for example the following is valid: +```scala +extension [T](l1: List[T]) + def zipWith[U](l2: List[U])[V](l3: List[V]): List[(T,U,V)] +``` + +### When to use + +We recommand to always put a unique type clause at the beginning, unless it is not possible to do so. +For example, the extension method `zipWith` above should be written `zipWith[U, V](l2: List[U], l3: List[V]): List[(T,U,V)]` instead. +On the other hand, the `getOrElse` method is recommended as-is, as it cannot be written with a leading type clause. + +### Formal syntax + +``` +DefDcl ::= DefSig ‘:’ Type +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr +DefSig ::= id [DefParamClauses] [DefImplicitClause] +DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent +DefParamClause ::= DefTypeParamClause + | DefTermParamClause + | UsingParamClause +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ +DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ +DefTermParams ::= DefTermParam {‘,’ DefTermParam} +DefTermParam ::= {Annotation} [‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] +``` diff --git a/docs/_docs/reference/experimental/named-typeargs-spec.md b/docs/_docs/reference/experimental/named-typeargs-spec.md index 9e1113bbac86..741836a481f2 100644 --- a/docs/_docs/reference/experimental/named-typeargs-spec.md +++ b/docs/_docs/reference/experimental/named-typeargs-spec.md @@ -10,7 +10,7 @@ In this section we give more details about the [named type arguments](named-type The addition to the grammar is: -``` +```ebnf SimpleExpr1 ::= ... | SimpleExpr (TypeArgs | NamedTypeArgs) NamedTypeArgs ::= ‘[’ NamedTypeArg {‘,’ NamedTypeArg} ‘]’ @@ -19,7 +19,7 @@ NamedTypeArg ::= id ‘=’ Type Note in particular that named arguments cannot be passed to type constructors: -``` scala +```scala class C[T] val x: C[T = Int] = // error diff --git a/docs/_docs/reference/metaprogramming/macros-spec.md b/docs/_docs/reference/metaprogramming/macros-spec.md index aa8f94a9a1f7..27a0a2c1bdcb 100644 --- a/docs/_docs/reference/metaprogramming/macros-spec.md +++ b/docs/_docs/reference/metaprogramming/macros-spec.md @@ -4,251 +4,711 @@ title: "Macros Spec" nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros-spec.html --- +## Formalization + +* Multi-stage programming with generative and analytical macros[^2] +* Multi-Stage Macro Calculus, Chapter 4 of Scalable Metaprogramming in Scala 3[^1]. + Contains and extends the calculus of _Multi-stage programming with generative and analytical macros_ with type polymorphism. + +## Syntax + +The quotation syntax using `'` and `$` was chosen to mimic the string interpolation syntax of Scala. +Like a string double-quotation, a single-quote block can contain splices. +However, unlike strings, splices can contain quotes using the same rules. + +```scala +s" Hello $name" s" Hello ${name}" +'{ hello($name) } '{ hello(${name}) } +${ hello('name) } ${ hello('{name}) } +``` + +### Quotes +Quotes come in four flavors: quoted identifiers, quoted blocks, quoted block patterns and quoted type patterns. +Scala 2 used quoted identifiers to represent `Symbol` literals. They were deprecated in Scala 3, allowing the syntax to be used for quotation. +```scala +SimpleExpr ::= ... + | `'` alphaid // quoted identifier + | `'` `{` Block `}` // quoted block +Pattern ::= ... + | `'` `{` Block `}` // quoted block pattern + | `'` `[` Type `]` // quoted type pattern +``` + +Quoted blocks and quoted block patterns contain an expression equivalent to a normal block of code. +When entering either of those we track the fact that we are in a quoted block (`inQuoteBlock`) which is used for spliced identifiers. +When entering a quoted block pattern we additionally track the fact that we are in a quoted pattern (`inQuotePattern`) which is used to distinguish spliced blocks and splice patterns. +Lastly, the quoted type pattern simply contains a type. + +### Splices +Splices come in three flavors: spliced identifiers, spliced blocks and splice patterns. +Scala specifies identifiers containing `$` as valid identifiers but reserves them for compiler and standard library use only. +Unfortunately, many libraries have used such identifiers in Scala 2. Therefore to mitigate the cost of migration, we still support them. +We work around this by only allowing spliced identifiers[^3] within quoted blocks or quoted block patterns (`inQuoteBlock`). +Splice blocks and splice patterns can contain an arbitrary block or pattern respectively. +They are distinguished based on their surrounding quote (`inQuotePattern`), a quote block will contain spliced blocks, and a quote block pattern will contain splice patterns. + +```scala +SimpleExpr ::= ... + | `$` alphaid if inQuoteBlock // spliced identifier + | `$` `{` Block `}` if !inQuotePattern // spliced block + | `$` `{` Pattern `}` if inQuotePattern // splice pattern +``` + +### Quoted Pattern Type Variables +Quoted pattern type variables in quoted patterns and quoted type patterns do not require additional syntax. +Any type definition or reference with a name composed of lower cases is assumed to be a pattern type variable definition while typing. +A backticked type name with lower cases is interpreted as a reference to the type with that name. + + ## Implementation -### Syntax - -Compared to the [Scala 3 reference grammar](../syntax.md) -there are the following syntax changes: -``` -SimpleExpr ::= ... - | ‘'’ ‘{’ Block ‘}’ - | ‘'’ ‘[’ Type ‘]’ - | ‘$’ ‘{’ Block ‘}’ -SimpleType ::= ... - | ‘$’ ‘{’ Block ‘}’ -``` -In addition, an identifier `$x` starting with a `$` that appears inside -a quoted expression or type is treated as a splice `${x}` and a quoted identifier -`'x` that appears inside a splice is treated as a quote `'{x}` - -### Implementation in `scalac` - -Quotes and splices are primitive forms in the generated abstract syntax trees. -Top-level splices are eliminated during macro expansion while typing. On the -other hand, top-level quotes are eliminated in an expansion phase `PickleQuotes` -phase (after typing and pickling). PCP checking occurs while preparing the RHS -of an inline method for top-level splices and in the `Staging` phase (after -typing and before pickling). - -Macro-expansion works outside-in. If the outermost scope is a splice, -the spliced AST will be evaluated in an interpreter. A call to a -previously compiled method can be implemented as a reflective call to -that method. With the restrictions on splices that are currently in -place that’s all that’s needed. We might allow more interpretation in -splices in the future, which would allow us to loosen the -restriction. Quotes in spliced, interpreted code are kept as they -are, after splices nested in the quotes are expanded. - -If the outermost scope is a quote, we need to generate code that -constructs the quoted tree at run-time. We implement this by -serializing the tree as a TASTy structure, which is stored -in a string literal. At runtime, an unpickler method is called to -deserialize the string into a tree. - -Splices inside quoted code insert the spliced tree as is, after -expanding any quotes in the spliced code recursively. +### Run-Time Representation -## Formalization +The standard library defines the `Quotes` interface which contains all the logic and the abstract classes `Expr` and `Type`. +The compiler implements the `Quotes` interface and provides the implementation of `Expr` and `Type`. -The phase consistency principle can be formalized in a calculus that -extends simply-typed lambda calculus with quotes and splices. +##### `class Expr` +Expressions of type `Expr[T]` are represented by the following abstract class: +```scala +abstract class Expr[+T] private[scala] +``` +The only implementation of `Expr` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps a typed AST and a `Scope` object with no methods of its own. +The `Scope` object is used to track the current splice scope and detect scope extrusions. -### Syntax +##### `object Expr` +The companion object of `Expr` contains a few useful static methods; +the `apply`/`unapply` methods to use `ToExpr`/`FromExpr` with ease; +the `betaReduce` and `summon` methods. +It also contains methods to create expressions out of lists or sequences of expressions: `block`, `ofSeq`, `ofList`, `ofTupleFromSeq` and `ofTuple`. -The syntax of terms, values, and types is given as follows: +```scala +object Expr: + def apply[T](x: T)(using ToExpr[T])(using Quotes): Expr[T] = ... + def unapply[T](x: Expr[T])(using FromExpr[T])(using Quotes): Option[T] = ... + def betaReduce[T](e: Expr[T])(using Quotes): Expr[T] = ... + def summon[T: Type](using Quotes): Option[Expr[T]] = ... + def block[T](stats: List[Expr[Any]], e: Expr[T])(using Quotes): Expr[T] = ... + def ofSeq[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[Seq[T]] = ... + def ofList[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[List[T]] = ... + def ofTupleFromSeq(xs: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = ... + def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): + Expr[Tuple.InverseMap[T, Expr]] = ... ``` -Terms t ::= x variable - (x: T) => t lambda - t t application - 't quote - $t splice -Values v ::= (x: T) => t lambda - 'u quote +##### `class Type` +Types of type `Type[T]` are represented by the following abstract class: +```scala +abstract class Type[T <: AnyKind] private[scala]: + type Underlying = T +``` + +The only implementation of `Type` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps the AST of a type and a `Scope` object with no methods of its own. +The upper bound of `T` is `AnyKind` which implies that `T` may be a higher-kinded type. +The `Underlying` alias is used to select the type from an instance of `Type`. +Users never need to use this alias as they can always use `T` directly. +`Underlying` is used for internal encoding while compiling the code (see _Type Healing_). -Simple terms u ::= x | (x: T) => u | u u | 't +##### `object Type` +The companion object of `Type` contains a few useful static methods. +The first and most important one is the `Type.of` given definition. +This instance of `Type[T]` is summoned by default when no other instance is available. +The `of` operation is an intrinsic operation that the compiler will transform into code that will generate the `Type[T]` at run-time. +Secondly, the `Type.show[T]` operation will show a string representation of the type, which is often useful when debugging. +Finally, the object defines `valueOfConstant` (and `valueOfTuple`) which can transform singleton types (or tuples of singleton types) into their value. -Types T ::= A base type - T -> T function type - expr T quoted + +```scala +object Type: + given of[T <: AnyKind](using Quotes): Type[T] = ... + def show[T <: AnyKind](using Type[T])(using Quotes): String = ... + def valueOfConstant[T](using Type[T])(using Quotes): Option[T] = ... + def valueOfTuple[T <: Tuple](using Type[T])(using Quotes): Option[T] = ... ``` -Typing rules are formulated using a stack of environments -`Es`. Individual environments `E` consist as usual of variable -bindings `x: T`. Environments can be combined using the two -combinators `'` and `$`. + +##### `Quotes` +The `Quotes` interface is where most of the primitive operations of the quotation system are defined. + +Quotes define all the `Expr[T]` methods as extension methods. +`Type[T]` does not have methods and therefore does not appear here. +These methods are available as long as `Quotes` is implicitly given in the current scope. + +The `Quotes` instance is also the entry point to the [reflection API](./reflection.md) through the `reflect` object. + +Finally, `Quotes` provides the internal logic used in quote un-pickling (`QuoteUnpickler`) in quote pattern matching (`QuoteMatching`). +These interfaces are added to the self-type of the trait to make sure they are implemented on this object but not visible to users of `Quotes`. + +Internally, the implementation of `Quotes` will also track its current splicing scope `Scope`. +This scope will be attached to any expression that is created using this `Quotes` instance. + +```scala +trait Quotes: + this: runtime.QuoteUnpickler & runtime.QuoteMatching => + + extension [T](self: Expr[T]) + def show: String + def matches(that: Expr[Any]): Boolean + def value(using FromExpr[T]): Option[T] + def valueOrAbort(using FromExpr[T]): T + end extension + + extension (self: Expr[Any]) + def isExprOf[X](using Type[X]): Boolean + def asExprOf[X](using Type[X]): Expr[X] + end extension + + // abstract object reflect ... ``` -Environment E ::= () empty - E, x: T -Env. stack Es ::= () empty - E simple - Es * Es combined -Separator * ::= ' - $ +##### `Scope` +The splice context is represented as a stack (immutable list) of `Scope` objects. +Each `Scope` contains the position of the splice (used for error reporting) and a reference to the enclosing splice scope `Scope`. +A scope is a sub-scope of another if the other is contained in its parents. +This check is performed when an expression is spliced into another using the `Scope` provided in the current scope in `Quotes` and the one in the `Expr` or `Type`. + +### Entry Points +The two entry points for multi-stage programming are macros and the `run` operation. + +#### Macros +Inline macro definitions will inline a top-level splice (a splice not nested in a quote). +This splice needs to be evaluated at compile-time. +In _Avoiding a complete interpreter_[^1], we stated the following restrictions: + + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are either literal constants, quoted expressions (parameters), `Type.of` for type parameters and a reference to `Quotes`. + +These restrictions make the implementation of the interpreter quite simple. +Java Reflection is used to call the single function call in the top-level splice. +The execution of that function is entirely done on compiled bytecode. +These are Scala static methods and may not always become Java static methods, they might be inside module objects. +As modules are encoded as class instances, we need to interpret the prefix of the method to instantiate it before we can invoke the method. + +The code of the arguments has not been compiled and therefore needs to be interpreted by the compiler. +Interpreting literal constants is as simple as extracting the constant from the AST that represents literals. +When interpreting a quoted expression, the contents of the quote is kept as an AST which is wrapped inside the implementation of `Expr`. +Calls to `Type.of[T]` also wrap the AST of the type inside the implementation of `Type`. +Finally, the reference to `Quotes` is supposed to be the reference to the quotes provided by the splice. +This reference is interpreted as a new instance of `Quotes` that contains a fresh initial `Scope` with no parents. + +The result of calling the method via Java Reflection will return an `Expr` containing a new AST that was generated by the implementation of that macro. +The scope of this `Expr` is checked to make sure it did not extrude from some splice or `run` operation. +Then the AST is extracted from the `Expr` and it is inserted as replacement for the AST that contained the top-level splice. + + +#### Run-time Multi-Stage Programming + +To be able to compile the code, the `scala.quoted.staging` library defines the `Compiler` trait. +An instance of `staging.Compiler` is a wrapper over the normal Scala~3 compiler. +To be instantiated it requires an instance of the JVM _classloader_ of the application. + +```scala +import scala.quoted.staging.* +given Compiler = Compiler.make(getClass.getClassLoader) ``` -The two environment combinators are both associative with left and -right identity `()`. -### Operational semantics +The classloader is needed for the compiler to know which dependencies have been loaded and to load the generated code using the same classloader. Below is an example method `mkPower2` that is passed to `staging.run`: + +```scala +def mkPower2()(using Quotes): Expr[Double => Double] = ... -We define a small step reduction relation `-->` with the following rules: +run(mkPower2()) ``` - ((x: T) => t) v --> [x := v]t +To run the previous example, the compiler will create code equivalent to the following class and compile it using a new `Scope` without parents. + +```scala +class RunInstance: + def exec(): Double => Double = ${ mkPower2() } +``` +Finally, `run` will interpret `(new RunInstance).exec()` to evaluate the contents of the quote. +To do this, the resulting `RunInstance` class is loaded in the JVM using Java Reflection, instantiated and then the `exec` method is invoked. + + +### Compilation + +Quotes and splices are primitive forms in the generated typed abstract syntax trees. +These need to be type-checked with some extra rules, e.g., staging levels need to be checked and the references to generic types need to be adapted. +Finally, quoted expressions that will be generated at run-time need to be encoded (serialized/pickled) and decoded (deserialized/unpickled). + +#### Typing Quoted Expressions - ${'u} --> u +The typing process for quoted expressions and splices with `Expr` is relatively straightforward. +At its core, quotes are desugared into calls to `quote`, splices are desugared into calls to `splice`. +We track the quotation level when desugaring into these methods. - t1 --> t2 - ----------------- - e[t1] --> e[t2] + +```scala +def quote[T](x: T): Quotes ?=> Expr[T] + +def splice[T](x: Quotes ?=> Expr[T]): T ``` -The first rule is standard call-by-value beta-reduction. The second -rule says that splice and quotes cancel each other out. The third rule -is a context rule; it says that reduction is allowed in the hole `[ ]` -position of an evaluation context. Evaluation contexts `e` and -splice evaluation context `e_s` are defined syntactically as follows: + +It would be impossible to track the quotation levels if users wrote calls to these methods directly. +To know if it is a call to one of those methods we would need to type it first, but to type it we would need to know if it is one of these methods to update the quotation level. +Therefore these methods can only be used by the compiler. + +At run-time, the splice needs to have a reference to the `Quotes` that created its surrounding quote. +To simplify this for later phases, we track the current `Quotes` and encode a reference directly in the splice using `nestedSplice` instead of `splice`. + +```scala +def nestedSplice[T](q: Quotes)(x: q.Nested ?=> Expr[T]): T ``` -Eval context e ::= [ ] | e t | v e | 'e_s[${e}] -Splice context e_s ::= [ ] | (x: T) => e_s | e_s t | u e_s +With this addition, the original `splice` is only used for top-level splices. + +The levels are mostly used to identify top-level splices that need to be evaluated while typing. +We do not use the quotation level to influence the typing process. +Level checking is performed at a later phase. +This ensures that a source expression in a quote will have the same elaboration as a source expression outside the quote. + + + +#### Quote Pattern Matching + +Pattern matching is defined in the trait `QuoteMatching`, which is part of the self type of `Quotes`. +It is implemented by `Quotes` but not available to users of `Quotes`. +To access it, the compiler generates a cast from `Quotes` to `QuoteMatching` and then selects one of its two members: `ExprMatch` or `TypeMatch`. +`ExprMatch` defines an `unapply` extractor method that is used to encode quote patterns and `TypeMatch` defines an `unapply` method for quoted type patterns. + +```scala +trait Quotes: + self: runtime.QuoteMatching & ... => + ... + +trait QuoteMatching: + object ExprMatch: + def unapply[TypeBindings <: Tuple, Tup <: Tuple] + (scrutinee: Expr[Any]) + (using pattern: Expr[Any]): Option[Tup] = ... + object TypeMatch: + ... ``` -### Typing rules +These extractor methods are only meant to be used in code generated by the compiler. +The call to the extractor that is generated has an already elaborated form that cannot be written in source, namely explicit type parameters and explicit contextual parameters. + +This extractor returns a tuple type `Tup` which cannot be inferred from the types in the method signature. +This type will be computed when typing the quote pattern and will be explicitly added to the extractor call. +To refer to type variables in arbitrary places of `Tup`, we need to define them all before their use, hence we have `TypeBindings`, which will contain all pattern type variable definitions. +The extractor also receives a given parameter of type `Expr[Any]` that will contain an expression that represents the pattern. +The compiler will explicitly add this pattern expression. +We use a given parameter because these are the only parameters we are allowed to add to the extractor call in a pattern position. + +This extractor is a bit convoluted, but it encodes away all the quotation-specific features. +It compiles the pattern down into a representation that the pattern matcher compiler phase understands. -Typing judgments are of the form `Es |- t: T`. There are two -substructural rules which express the fact that quotes and splices -cancel each other out: +The quote patterns are encoded into two parts: a tuple pattern that is tasked with extracting the result of the match and a quoted expression representing the pattern. +For example, if the pattern has no `$` we will have an `EmptyTuple` as the pattern and `'{1}` to represent the pattern. + +```scala + case '{ 1 } => +// is elaborated to + case ExprMatch(EmptyTuple)(using '{1}) => +// ^^^^^^^^^^ ^^^^^^^^^^ +// pattern expression +``` +When extracting expressions, each pattern that is contained in a splice `${..}` will be placed in order in the tuple pattern. +In the following case, the `f` and `x` are placed in a tuple pattern `(f, x)`. +The type of the tuple is encoded in the `Tup` and not only in the tuple itself. +Otherwise, the extractor would return a tuple `Tuple` for which the types need to be tested which is in turn not possible due to type erasure. + +```scala + case '{ ((y: Int) => $f(y)).apply($x) } => +// is elaborated to + case ExprMatch[.., (Expr[Int => Int], Expr[Int])]((f, x))(using pattern) => +// pattern = '{ ((y: Int) => pat[Int](y)).apply(pat[Int]()) } ``` - Es1 * Es2 |- t: T - --------------------------- - Es1 $ E1 ' E2 * Es2 |- t: T +The contents of the quote are transformed into a valid quote expression by replacing the splice with a marker expression `pat[T](..)`. +The type `T` is taken from the type of the splice and the arguments are the HOAS arguments. +This implies that a `pat[T]()` is a closed pattern and `pat[T](y)` is an HOAS pattern that can refer to `y`. - Es1 * Es2 |- t: T - --------------------------- - Es1 ' E1 $ E2 * Es2 |- t: T +Type variables in quoted patterns are first normalized to have all definitions at the start of the pattern. +For each definition of a type variable `t` in the pattern we will add a type variable definition in `TypeBindings`. +Each one will have a corresponding `Type[t]` that will get extracted if the pattern matches. +These `Type[t]` are also listed in the `Tup` and added in the tuple pattern. +It is additionally marked as `using` in the pattern to make it implicitly available in this case branch. + + +```scala + case '{ type t; ($xs: List[t]).map[t](identity[t]) } => +// is elaborated to + case ExprMatch[(t), (Type[t], Expr[List[t]])]((using t, xs))(using p) => +// ^^^ ^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^ ^^^^^^^ +// type bindings result type pattern expression +// p = '{ @patternType type u; pat[List[u]]().map[u](identity[u]) } ``` -The lambda calculus fragment of the rules is standard, except that we -use a stack of environments. The rules only interact with the topmost -environment of the stack. + +The contents of the quote are transformed into a valid quote expression by replacing type variables with fresh ones that do not escape the quote scope. +These are also annotated to be easily identifiable as pattern variables. + +#### Level Consistency Checking +Level consistency checking is performed after typing the program as a static check. +To check level consistency we traverse the tree top-down remembering the context staging level. +Each local definition in scope is recorded with its level and each term reference to a definition is checked against the current staging level. +```scala +// level 0 +'{ // level 1 + val x = ... // level 1 with (x -> 1) + ${ // level 0 (x -> 1) + val y = ... // level 0 with (x -> 1, y -> 0) + x // error: defined at level 1 but used in level 0 + } + // level 1 (x -> 1) + x // x is ok +} ``` - x: T in E - -------------- - Es * E |- x: T +#### Type Healing - Es * E, x: T1 |- t: T2 - ------------------------------- - Es * E |- (x: T1) => t: T -> T2 +When using a generic type `T` in a future stage, it is necessary to have a given `Type[T]` in scope. +The compiler needs to identify those references and link them with the instance of `Type[T]`. +For instance consider the following example: +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[T] } +``` - Es |- t1: T2 -> T Es |- t2: T2 - --------------------------------- - Es |- t1 t2: T +For each reference to a generic type `T` that is defined at level 0 and used at level 1 or greater, the compiler will summon a `Type[T]`. +This is usually the given type that is provided as parameter, `t` in this case. +We can use the type `t.Underlying` to replace `T` as it is an alias of that type. +But `t.Underlying` contains the extra information that it is `t` that will be used in the evaluation of the quote. +In a sense, `Underlying` acts like a splice for types. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[t.Underlying] } ``` -The rules for quotes and splices map between `expr T` and `T` by trading `'` and `$` between -environments and terms. + +Due to some technical limitations, it is not always possible to replace the type reference with the AST containing `t.Underlying`. +To overcome this limitation, we can simply define a list of type aliases at the start of the quote and insert the `t.Underlying` there. +This has the added advantage that we do not have to repeatedly insert the `t.Underlying` in the quote. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ type U = t.Underlying; List.empty[U] } +``` +These aliases can be used at any level within the quote and this transformation is only performed on quotes that are at level 0. + +```scala + '{ List.empty[T] ... '{ List.empty[T] } ... } +// becomes + '{ type U = t.Underlying; List.empty[U] ... '{ List.empty[U] } ... } +``` +If we define a generic type at level 1 or greater, it will not be subject to this transformation. +In some future compilation stage, when the definition of the generic type is at level 0, it will be subject to this transformation. +This simplifies the transformation logic and avoids leaking the encoding into code that a macro could inspect. + +```scala +'{ + def emptyList[T: Type](using Quotes): Expr[List[T]] = '{ List.empty[T] } + ... +} +``` +A similar transformation is performed on `Type.of[T]`. +Any generic type in `T` needs to have an implicitly given `Type[T]` in scope, which will also be used as a path. +The example: + +```scala +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[T] match ... +// becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[t.Underlying] match ... +// then becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + t match ... +``` + +The operation `Type.of[t.Underlying]` can be optimized to just `t`. +But this is not always the case. +If the generic reference is nested in the type, we will need to keep the `Type.of`. + +```scala +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[T]] match ... +// becomes +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[t.Underlying]] match ... +``` + +By doing this transformation, we ensure that each abstract type `U` used in `Type.of` has an implicit `Type[U]` in scope. +This representation makes it simpler to identify parts of the type that are statically known from those that are known dynamically. +Type aliases are also added within the type of the `Type.of` though these are not valid source code. +These would look like `Type.of[{type U = t.Underlying; Map[U, U]}]` if written in source code. + + +#### Splice Normalization + +The contents of a splice may refer to variables defined in the enclosing quote. +This complicates the process of serialization of the contents of the quotes. +To make serialization simple, we first transform the contents of each level 1 splice. +Consider the following example: + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ powerCode('{x}, n) } +} +``` + +The variable `x` is defined in the quote and used in the splice. +The normal form will extract all references to `x` and replace them with a staged version of `x`. +We will replace the reference to `x` of type `T` with a `$y` where `y` is of type `Expr[T]`. +Then we wrap the new contents of the splice in a lambda that defines `y` and apply it to the quoted version of `x`. +After this transformation we have 2 parts, a lambda without references to the quote, which knows how to compute the contents of the splice, and a sequence of quoted arguments that refer to variables defined in the lambda. + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ ((y: Expr[Int]) => powerCode('{$y}, n)).apply('x) } +} +``` + +In general, the splice normal form has the shape `${ .apply(*) }` and the following constraints: + * `` a lambda expression that does not refer to variables defined in the outer quote + * `` sequence of quoted expressions or `Type.of` containing references to variables defined in the enclosing quote and no references to local variables defined outside the enclosing quote + + +##### Function references normalization +A reference to a function `f` that receives parameters is not a valid value in Scala. +Such a function reference `f` can be eta-expanded as `x => f(x)` to be used as a lambda value. +Therefore function references cannot be transformed by the normalization as directly as other expressions as we cannot represent `'{f}` with a method reference type. +We can use the eta-expanded form of `f` in the normalized form. +For example, consider the reference to `f` below. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ '{ f(3)(4, 5) } } +} +``` + +To normalize this code, we can eta-expand the reference to `f` and place it in a quote containing a proper expression. +Therefore the normalized form of the argument `'{f}` becomes the quoted lambda `'{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }` and is an expression of type `Expr[Int => (Int, Int) => Int]`. +The eta-expansion produces one curried lambda per parameter list. +The application `f(3)(4, 5)` does not become `$g(3)(4, 5)` but `$g.apply(3).apply(4, 5)`. +We add the `apply` because `g` is not a quoted reference to a function but a curried lambda. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ + ( + (g: Expr[Int => (Int, Int) => Int]) => '{$g.apply(3).apply(4, 5)} + ).apply('{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }) + } +} +``` + +Then we can apply it and beta-reduce the application when generating the code. + +```scala + (g: Expr[Int => Int => Int]) => betaReduce('{$g.apply(3).apply(4)}) +``` + + +##### Variable assignment normalization +A reference to a mutable variable in the left-hand side of an assignment cannot be transformed directly as it is not in an expression position. +```scala +'{ + var x: Int = 5 + ${ g('{x = 2}) } +} +``` + +We can use the same strategy used for function references by eta-expanding the assignment operation `x = _` into `y => x = y`. + +```scala +'{ + var x: Int = 5 + ${ + g( + ( + (f: Expr[Int => Unit]) => betaReduce('{$f(2)}) + ).apply('{ (y: Int) => x = $y }) + ) + } +} +``` + + +##### Type normalization +Types defined in the quote are subject to a similar transformation. +In this example, `T` is defined within the quote at level 1 and used in the splice again at level 1. + +```scala +'{ def f[T] = ${ '{g[T]} } } +``` + +The normalization will add a `Type[T]` to the lambda, and we will insert this reference. +The difference is that it will add an alias similar to the one used in type healing. +In this example, we create a `type U` that aliases the staged type. + +```scala +'{ + def f[T] = ${ + ( + (t: Type[T]) => '{type U = t.Underling; g[U]} + ).apply(Type.of[T]) + } +} +``` + +#### Serialization + +Quoted code needs to be pickled to make it available at run-time in the next compilation phase. +We implement this by pickling the AST as a TASTy binary. + +##### TASTy +The TASTy format is the typed abstract syntax tree serialization format of Scala 3. +It usually pickles the fully elaborated code after type-checking and is kept along the generated Java classfiles. + + +##### Pickling +We use TASTy as a serialization format for the contents of the quotes. +To show how serialization is performed, we will use the following example. +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ powerCode('{x}, '{n}) } * ${ powerCode('{2}, '{n}) } +} ``` - Es $ () |- t: expr T - -------------------- - Es |- $t: T +This quote is transformed into the following code when normalizing the splices. - Es ' () |- t: T - ---------------- - Es |- 't: expr T +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) + } * ${ + ((m: Expr[Int]) => powerCode('{2}, m)).apply('n) + } +} ``` -The meta theory of a slightly simplified 2-stage variant of this calculus -is studied [separately](./simple-smp.md). -## Going Further +Splice normalization is a key part of the serialization process as it only allows references to variables defined in the quote in the arguments of the lambda in the splice. +This makes it possible to create a closed representation of the quote without much effort. +The first step is to remove all the splices and replace them with holes. +A hole is like a splice but it lacks the knowledge of how to compute the contents of the splice. +Instead, it knows the index of the hole and the contents of the arguments of the splice. +We can see this transformation in the following example where a hole is represented by `<< idx; holeType; args* >>`. + +```scala + ${ ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) } +// becomes + << 0; Double; x, n >> +``` -The metaprogramming framework as presented and currently implemented is quite restrictive -in that it does not allow for the inspection of quoted expressions and -types. It’s possible to work around this by providing all necessary -information as normal, unquoted inline parameters. But we would gain -more flexibility by allowing for the inspection of quoted code with -pattern matching. This opens new possibilities. +As this was the first hole it has index 0. +The hole type is `Double`, which needs to be remembered now that we cannot infer it from the contents of the splice. +The arguments of the splice are `x` and `n`; note that they do not require quoting because they were moved out of the splice. -For instance, here is a version of `power` that generates the multiplications -directly if the exponent is statically known and falls back to the dynamic -implementation of `power` otherwise. +References to healed types are handled in a similar way. +Consider the `emptyList` example, which shows the type aliases that are inserted into the quote. ```scala -import scala.quoted.* +'{ List.empty[T] } +// type healed to +'{ type U = t.Underlying; List.empty[U] } +``` +Instead of replacing a splice, we replace the `t.Underlying` type with a type hole. +The type hole is represented by `<< idx; bounds >>`. +```scala +'{ type U = << 0; Nothing..Any >>; List.empty[U] } +``` +Here, the bounds of `Nothing..Any` are the bounds of the original `T` type. +The types of a `Type.of` are transformed in the same way. -inline def power(x: Double, n: Int): Double = - ${ powerExpr('x, 'n) } -private def powerExpr(x: Expr[Double], n: Expr[Int]) - (using Quotes): Expr[Double] = - n.value match - case Some(m) => powerExpr(x, m) - case _ => '{ dynamicPower($x, $n) } +With these transformations, the contents of the quote or `Type.of` are guaranteed to be closed and therefore can be pickled. +The AST is pickled into TASTy, which is a sequence of bytes. +This sequence of bytes needs to be instantiated in the bytecode, but unfortunately it cannot be dumped into the classfile as bytes. +To reify it we encode the bytes into a Java `String`. +In the following examples we display this encoding in human readable form with the fictitious `|tasty"..."|` string literal. -private def powerExpr(x: Expr[Double], n: Int) - (using Quotes): Expr[Double] = - if n == 0 then '{ 1.0 } - else if n == 1 then x - else if n % 2 == 0 then '{ val y = $x * $x; ${ powerExpr('y, n / 2) } } - else '{ $x * ${ powerExpr(x, n - 1) } } +```scala +// pickled AST bytes encoded in a base64 string +tasty""" + val (x, n): (Double, Int) = (5, 2) + << 0; Double; x, n >> * << 1; Double; n >> +""" +// or +tasty""" + type U = << 0; Nothing..Any; >> + List.empty[U] +""" +``` +The contents of a quote or `Type.of` are not always pickled. +In some cases it is better to generate equivalent (smaller and/or faster) code that will compute the expression. +Literal values are compiled into a call to `Expr()` using the implementation of `ToExpr` to create the quoted expression. +This is currently performed only on literal values, but can be extended to any value for which we have a `ToExpr` defined in the standard library. +Similarly, for non-generic types we can use their respective `java.lang.Class` and convert them into a `Type` using a primitive operation `typeConstructorOf` defined in the reflection API. -private def dynamicPower(x: Double, n: Int): Double = - if n == 0 then 1.0 - else if n % 2 == 0 then dynamicPower(x * x, n / 2) - else x * dynamicPower(x, n - 1) +##### Unpickling + +Now that we have seen how a quote is pickled, we can look at how to unpickle it. +We will continue with the previous example. + +Holes were used to replace the splices in the quote. +When we perform this transformation we also need to remember the lambdas from the splices and their hole index. +When unpickling a hole, the corresponding splice lambda will be used to compute the contents of the hole. +The lambda will receive as parameters quoted versions of the arguments of the hole. +For example to compute the contents of `<< 0; Double; x, n >>` we will evaluate the following code + +```scala + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) ``` -In the above, the method `.value` maps a constant expression of the type -`Expr[T]` to its value of the type `T`. +The evaluation is not as trivial as it looks, because the lambda comes from compiled code and the rest is code that must be interpreted. +We put the AST of `x` and `n` into `Expr` objects to simulate the quotes and then we use Java Reflection to call the `apply` method. + +We may have many holes in a quote and therefore as many lambdas. +To avoid the instantiation of many lambdas, we can join them together into a single lambda. +Apart from the list of arguments, this lambda will also take the index of the hole that is being evaluated. +It will perform a switch match on the index and call the corresponding lambda in each branch. +Each branch will also extract the arguments depending on the definition of the lambda. +The application of the original lambdas are beta-reduced to avoid extra overhead. -With the right extractors, the "AsFunction" conversion -that maps expressions over functions to functions over expressions can -be implemented in user code: ```scala -given AsFunction1[T, U]: Conversion[Expr[T => U], Expr[T] => Expr[U]] with - def apply(f: Expr[T => U]): Expr[T] => Expr[U] = - (x: Expr[T]) => f match - case Lambda(g) => g(x) - case _ => '{ ($f)($x) } +(idx: Int, args: Seq[Any]) => + idx match + case 0 => // for << 0; Double; x, n >> + val x = args(0).asInstanceOf[Expr[Double]] + val n = args(1).asInstanceOf[Expr[Int]] + powerCode(x, n) + case 1 => // for << 1; Double; n >> + val n = args(0).asInstanceOf[Expr[Int]] + powerCode('{2}, n) ``` -This assumes an extractor + +This is similar to what we do for splices when we replace the type aliased with holes we keep track of the index of the hole. +Instead of lambdas, we will have a list of references to instances of `Type`. +From the following example we would extract `t`, `u`, ... . + ```scala -object Lambda: - def unapply[T, U](x: Expr[T => U]): Option[Expr[T] => Expr[U]] + '{ type T1 = t1.Underlying; type Tn = tn.Underlying; ... } +// with holes + '{ type T1 = << 0; ... >>; type Tn = << n-1; ... >>; ... } ``` -Once we allow inspection of code via extractors, it’s tempting to also -add constructors that create typed trees directly without going -through quotes. Most likely, those constructors would work over `Expr` -types which lack a known type argument. For instance, an `Apply` -constructor could be typed as follows: + +As the type holes are at the start of the quote, they will have the first `N` indices. +This implies that we can place the references in a sequence `Seq(t, u, ...)` where the index in the sequence is the same as the hole index. + +Lastly, the quote itself is replaced by a call to `QuoteUnpickler.unpickleExpr` which will unpickle the AST, evaluate the holes, i.e., splices, and wrap the resulting AST in an `Expr[Int]`. +This method takes takes the pickled `|tasty"..."|`, the types and the hole lambda. +Similarly, `Type.of` is replaced with a call to `QuoteUnpickler.unpickleType` but only receives the pickled `|tasty"..."|` and the types. +Because `QuoteUnpickler` is part of the self-type of the `Quotes` class, we have to cast the instance but know that this cast will always succeed. + ```scala -def Apply(fn: Expr[Any], args: List[Expr[Any]]): Expr[Any] +quotes.asInstanceOf[runtime.QuoteUnpickler].unpickleExpr[T]( + pickled = tasty"...", + types = Seq(...), + holes = (idx: Int, args: Seq[Any]) => idx match ... +) ``` -This would allow constructing applications from lists of arguments -without having to match the arguments one-by-one with the -corresponding formal parameter types of the function. We then need "at -the end" a method to convert an `Expr[Any]` to an `Expr[T]` where `T` is -given from the outside. For instance, if `code` yields a `Expr[Any]`, then -`code.atType[T]` yields an `Expr[T]`. The `atType` method has to be -implemented as a primitive; it would check that the computed type -structure of `Expr` is a subtype of the type structure representing -`T`. -Before going down that route, we should evaluate in detail the tradeoffs it -presents. Constructing trees that are only verified _a posteriori_ -to be type correct loses a lot of guidance for constructing the right -trees. So we should wait with this addition until we have more -use-cases that help us decide whether the loss in type-safety is worth -the gain in flexibility. In this context, it seems that deconstructing types is -less error-prone than deconstructing terms, so one might also -envisage a solution that allows the former but not the latter. - -## Conclusion - -Metaprogramming has a reputation of being difficult and confusing. -But with explicit `Expr/Type` types and quotes and splices it can become -downright pleasant. A simple strategy first defines the underlying quoted or unquoted -values using `Expr` and `Type` and then inserts quotes and splices to make the types -line up. Phase consistency is at the same time a great guideline -where to insert a splice or a quote and a vital sanity check that -the result makes sense. +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Multi-stage programming with generative and analytical macros](https://dl.acm.org/doi/10.1145/3486609.3487203). +[^3]: In quotes, identifiers starting with `$` must be surrounded by backticks (`` `$` ``). For example `$conforms` from `scala.Predef`. diff --git a/docs/_docs/reference/metaprogramming/macros.md b/docs/_docs/reference/metaprogramming/macros.md index 0be48ef2baf8..a91e69d985f0 100644 --- a/docs/_docs/reference/metaprogramming/macros.md +++ b/docs/_docs/reference/metaprogramming/macros.md @@ -6,843 +6,617 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros.h > When developing macros enable `-Xcheck-macros` scalac option flag to have extra runtime checks. -## Macros: Quotes and Splices +## Multi-Staging -Macros are built on two well-known fundamental operations: quotation and splicing. -Quotation is expressed as `'{...}` for expressions and splicing is expressed as `${ ... }`. -Additionally, within a quote or a splice we can quote or splice identifiers directly (i.e. `'e` and `$e`). -Readers may notice the resemblance of the two aforementioned syntactic -schemes with the familiar string interpolation syntax. +#### Quoted expressions +Multi-stage programming in Scala 3 uses quotes `'{..}` to delay, i.e., stage, execution of code and splices `${..}` to evaluate and insert code into quotes. +Quoted expressions are typed as `Expr[T]` with a covariant type parameter `T`. +It is easy to write statically safe code generators with these two concepts. +The following example shows a naive implementation of the $x^n$ mathematical operation. ```scala -println(s"Hello, $name, here is the result of 1 + 1 = ${1 + 1}") +import scala.quoted.* +def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } + else if n == 1 then x + else '{ $x * ${ unrolledPowerCode(x, n-1) } } ``` -In string interpolation we _quoted_ a string and then we _spliced_ into it, two others. The first, `name`, is a reference to a value of type [`String`](https://scala-lang.org/api/3.x/scala/Predef$.html#String-0), and the second is an arithmetic expression that will be _evaluated_ followed by the splicing of its string representation. - -Quotes and splices in this section allow us to treat code in a similar way, -effectively supporting macros. The entry point for macros is an inline method -with a top-level splice. We call it a top-level because it is the only occasion -where we encounter a splice outside a quote (consider as a quote the -compilation-unit at the call-site). For example, the code below presents an -`inline` method `assert` which calls at compile-time a method `assertImpl` with -a boolean expression tree as argument. `assertImpl` evaluates the expression and -prints it again in an error message if it evaluates to `false`. - ```scala -import scala.quoted.* - -inline def assert(inline expr: Boolean): Unit = - ${ assertImpl('expr) } - -def assertImpl(expr: Expr[Boolean])(using Quotes) = '{ - if !$expr then - throw AssertionError(s"failed assertion: ${${ showExpr(expr) }}") +'{ + val x = ... + ${ unrolledPowerCode('{x}, 3) } // evaluates to: x * x * x } - -def showExpr(expr: Expr[Boolean])(using Quotes): Expr[String] = - '{ [actual implementation later in this document] } ``` -If `e` is an expression, then `'{e}` represents the typed -abstract syntax tree representing `e`. If `T` is a type, then `Type.of[T]` -represents the type structure representing `T`. The precise -definitions of "typed abstract syntax tree" or "type-structure" do not -matter for now, the terms are used only to give some -intuition. Conversely, `${e}` evaluates the expression `e`, which must -yield a typed abstract syntax tree or type structure, and embeds the -result as an expression (respectively, type) in the enclosing program. +Quotes and splices are duals of each other. +For an arbitrary expression `x` of type `T` we have `${'{x}} = x` and for an arbitrary expression `e` of type `Expr[T]` we have `'{${e}} = e`. -Quotations can have spliced parts in them; in this case the embedded -splices are evaluated and embedded as part of the formation of the -quotation. +#### Abstract types +Quotes can handle generic and abstract types using the type class `Type[T]`. +A quote that refers to a generic or abstract type `T` requires a given `Type[T]` to be provided in the implicit scope. +The following examples show how `T` is annotated with a context bound (`: Type`) to provide an implicit `Type[T]`, or the equivalent `using Type[T]` parameter. -Quotes and splices can also be applied directly to identifiers. An identifier -`$x` starting with a `$` that appears inside a quoted expression or type is treated as a -splice `${x}`. Analogously, an quoted identifier `'x` that appears inside a splice -is treated as a quote `'{x}`. See the Syntax section below for details. +```scala +import scala.quoted.* +def singletonListExpr[T: Type](x: Expr[T])(using Quotes): Expr[List[T]] = + '{ List[T]($x) } // generic T used within a quote -Quotes and splices are duals of each other. -For arbitrary expressions `e` we have: +def emptyListExpr[T](using Type[T], Quotes): Expr[List[T]] = + '{ List.empty[T] } // generic T used within a quote +``` +If no other instance is found, the default `Type.of[T]` is used. +The following example implicitly uses `Type.of[String]` and `Type.of[Option[U]]`. ```scala -${'{e}} = e -'{${e}} = e +val list1: Expr[List[String]] = + singletonListExpr('{"hello"}) // requires a given `Type[Sting]` +val list0: Expr[List[Option[T]]] = + emptyListExpr[Option[U]] // requires a given `Type[Option[U]]` ``` -## Types for Quotations - -The type signatures of quotes and splices can be described using -two fundamental types: -- `Expr[T]`: abstract syntax trees representing expressions of type `T` -- `Type[T]`: non erased representation of type `T`. - -Quoting takes expressions of type `T` to expressions of type `Expr[T]` -and it takes types `T` to expressions of type `Type[T]`. Splicing -takes expressions of type `Expr[T]` to expressions of type `T` and it -takes expressions of type `Type[T]` to types `T`. - -The two types can be defined in package [`scala.quoted`](https://scala-lang.org/api/3.x/scala/quoted.html) as follows: +The `Type.of[T]` method is a primitive operation that the compiler will handle specially. +It will provide the implicit if the type `T` is statically known, or if `T` contains some other types `Ui` for which we have an implicit `Type[Ui]`. +In the example, `Type.of[String]` has a statically known type and `Type.of[Option[U]]` requires an implicit `Type[U]` in scope. +#### Quote context +We also track the current quotation context using a given `Quotes` instance. +To create a quote `'{..}` we require a given `Quotes` context, which should be passed as a contextual parameter `(using Quotes)` to the function. +Each splice will provide a new `Quotes` context within the scope of the splice. +Therefore quotes and splices can be seen as methods with the following signatures, but with special semantics. ```scala -package scala.quoted +def '[T](x: T): Quotes ?=> Expr[T] // def '[T](x: T)(using Quotes): Expr[T] -sealed trait Expr[+T] -sealed trait Type[T] +def $[T](x: Quotes ?=> Expr[T]): T ``` -Both `Expr` and `Type` are abstract and sealed, so all constructors for -these types are provided by the system. One way to construct values of -these types is by quoting, the other is by type-specific lifting -operations that will be discussed later on. +The lambda with a question mark `?=>` is a contextual function; it is a lambda that takes its argument implicitly and provides it implicitly in the implementation the lambda. +`Quotes` are used for a variety of purposes that will be mentioned when covering those topics. -## The Phase Consistency Principle +## Quoted Values -A fundamental *phase consistency principle* (PCP) regulates accesses -to free variables in quoted and spliced code: +#### Lifting +While it is not possible to use cross-stage persistence of local variables, it is possible to lift them to the next stage. +To this end, we provide the `Expr.apply` method, which can take a value and lift it into a quoted representation of the value. -- _For any free variable reference `x`, the number of quoted scopes and the number of spliced scopes between the reference to `x` and the definition of `x` must be equal_. - -Here, `this`-references count as free variables. On the other -hand, we assume that all imports are fully expanded and that `_root_` is -not a free variable. So references to global definitions are -allowed everywhere. +```scala +val expr1plus1: Expr[Int] = '{ 1 + 1 } -The phase consistency principle can be motivated as follows: First, -suppose the result of a program `P` is some quoted text `'{ ... x -... }` that refers to a free variable `x` in `P`. This can be -represented only by referring to the original variable `x`. Hence, the -result of the program will need to persist the program state itself as -one of its parts. We don’t want to do this, hence this situation -should be made illegal. Dually, suppose a top-level part of a program -is a spliced text `${ ... x ... }` that refers to a free variable `x` -in `P`. This would mean that we refer during _construction_ of `P` to -a value that is available only during _execution_ of `P`. This is of -course impossible and therefore needs to be ruled out. Now, the -small-step evaluation of a program will reduce quotes and splices in -equal measure using the cancellation rules above. But it will neither -create nor remove quotes or splices individually. So the PCP ensures -that program elaboration will lead to neither of the two unwanted -situations described above. +val expr2: Expr[Int] = Expr(1 + 1) // lift 2 into '{ 2 } +``` -In what concerns the range of features it covers, this form of macros introduces -a principled metaprogramming framework that is quite close to the MetaML family of -languages. One difference is that MetaML does not have an equivalent of the PCP - -quoted code in MetaML _can_ access variables in its immediately enclosing -environment, with some restrictions and caveats since such accesses involve -serialization. However, this does not constitute a fundamental gain in -expressiveness. +While it looks type wise similar to `'{ 1 + 1 }`, the semantics of `Expr(1 + 1)` are quite different. +`Expr(1 + 1)` will not stage or delay any computation; the argument is evaluated to a value and then lifted into a quote. +The quote will contain code that will create a copy of this value in the next stage. +`Expr` is polymorphic and user-extensible via the `ToExpr` type class. -## From `Expr`s to Functions and Back +```scala +trait ToExpr[T]: + def apply(x: T)(using Quotes): Expr[T] +``` -It is possible to convert any `Expr[T => R]` into `Expr[T] => Expr[R]` and back. -These conversions can be implemented as follows: +We can implement a `ToExpr` using a `given` definition that will add the definition to the implicits in scope. +In the following example we show how to implement a `ToExpr[Option[T]]` for any liftable type `T. ```scala -def to[T: Type, R: Type](f: Expr[T] => Expr[R])(using Quotes): Expr[T => R] = - '{ (x: T) => ${ f('x) } } - -def from[T: Type, R: Type](f: Expr[T => R])(using Quotes): Expr[T] => Expr[R] = - (x: Expr[T]) => '{ $f($x) } +given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with + def apply(opt: Option[T])(using Quotes): Expr[Option[T]] = + opt match + case Some(x) => '{ Some[T]( ${Expr(x)} ) } + case None => '{ None } ``` -Note how the fundamental phase consistency principle works in two -different directions here for `f` and `x`. In the method `to`, the reference to `f` is -legal because it is quoted, then spliced, whereas the reference to `x` -is legal because it is spliced, then quoted. +The `ToExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. -They can be used as follows: +#### Extracting values from quotes +To be able to generate optimized code using the method `unrolledPowerCode`, the macro implementation `powerCode` needs to first +determine whether the argument passed as parameter `n` is a known constant value. +This can be achieved via _unlifting_ using the `Expr.unapply` extractor from our library implementation, which will only match if `n` is a quoted constant and extracts its value. ```scala -val f1: Expr[Int => String] = - to((x: Expr[Int]) => '{ $x.toString }) // '{ (x: Int) => x.toString } - -val f2: Expr[Int] => Expr[String] = - from('{ (x: Int) => x.toString }) // (x: Expr[Int]) => '{ ((x: Int) => x.toString)($x) } -f2('{2}) // '{ ((x: Int) => x.toString)(2) } +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + n match + case Expr(m) => // it is a constant: unlift code n='{m} into number m + unrolledPowerCode(x, m) + case _ => // not known: call power at run-time + '{ power($x, $n) } ``` -One limitation of `from` is that it does not β-reduce when a lambda is called immediately, as evidenced in the code `{ ((x: Int) => x.toString)(2) }`. -In some cases we want to remove the lambda from the code, for this we provide the method `Expr.betaReduce` that turns a tree -describing a function into a function mapping trees to trees. - +Alternatively, the `n.value` method can be used to get an `Option[Int]` with the value or `n.valueOrAbort` to get the value directly. ```scala -object Expr: - ... - def betaReduce[T](expr: Expr[T])(using Quotes): Expr[T] +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + // emits an error message if `n` is not a constant + unrolledPowerCode(x, n.valueOrAbort) ``` -`Expr.betaReduce` returns an expression that is functionally equivalent to e, however if e is of the form `((y1, ..., yn) => e2)(e1, ..., en)` then it optimizes the top most call by returning the result of beta-reducing the application. Otherwise returns expr. - -## Lifting Types - -Types are not directly affected by the phase consistency principle. -It is possible to use types defined at any level in any other level. -But, if a type is used in a subsequent stage it will need to be lifted to a `Type`. -Indeed, the definition of `to` above uses `T` in the next stage, there is a -quote but no splice between the parameter binding of `T` and its -usage. But the code can be rewritten by adding an explicit binding of a `Type[T]`: +`Expr.unapply` and all variants of `value` are polymorphic and user-extensible via a given `FromExpr` type class. ```scala -def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = - '{ (x: t.Underlying) => ${ f('x) } } +trait FromExpr[T]: + def unapply(x: Expr[T])(using Quotes): Option[T] ``` -In this version of `to`, the type of `x` is now the result of -inserting the type `Type[T]` and selecting its `Underlying`. +We can use `given` definitions to implement the `FromExpr` as we did for `ToExpr`. +The `FromExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. +To implement `FromExpr` for non-primitive types we use quote pattern matching (for example `OptionFromExpr`). + + +## Macros and Multi-Stage Programming -To avoid clutter, the compiler converts any type reference to -a type `T` in subsequent phases to `summon[Type[T]].Underlying`. +The system supports multi-stage macros and run-time multi-stage programming using the same quotation abstractions. -And to avoid duplication it does it once per type, and creates -an alias for that type at the start of the quote. +### Multi-Stage Macros -For instance, the user-level definition of `to`: +#### Macros +We can generalize the splicing abstraction to express macros. +A macro consists of a top-level splice that is not nested in any quote. +Conceptually, the contents of the splice are evaluated one stage earlier than the program. +In other words, the contents are evaluated while compiling the program. The generated code resulting from the macro replaces the splice in the program. ```scala -def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = - '{ (x: T) => ${ f('x) } } +def power2(x: Double): Double = + ${ unrolledPowerCode('x, 2) } // x * x ``` -would be rewritten to +#### Inline macros +Since using the splices in the middle of a program is not as ergonomic as calling a function; we hide the staging mechanism from end-users of macros. We have a uniform way of calling macros and normal functions. +For this, _we restrict the use of top-level splices to only appear in inline methods_[^1][^2]. ```scala -def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = - '{ - type T = summon[Type[T]].Underlying - (x: T) => ${ f('x) } - } +// inline macro definition +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + +// user code +def power2(x: Double): Double = + powerMacro(x, 2) // x * x ``` -The `summon` query succeeds because there is a using parameter of -type `Type[T]`, and the reference to that value is -phase-correct. If that was not the case, the phase inconsistency for -`T` would be reported as an error. +The evaluation of the macro will only happen when the code is inlined into `power2`. +When inlined, the code is equivalent to the previous definition of `power2`. +A consequence of using inline methods is that none of the arguments nor the return type of the macro will have to mention the `Expr` types; this hides all aspects of metaprogramming from the end-users. -## Lifting Expressions +#### Avoiding a complete interpreter +When evaluating a top-level splice, the compiler needs to interpret the code that is within the splice. +Providing an interpreter for the entire language is quite tricky, and it is even more challenging to make that interpreter run efficiently. +To avoid needing a complete interpreter, we can impose the following restrictions on splices to simplify the evaluation of the code in top-level splices. + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are literal constants, quoted expressions (parameters), calls to `Type.of` for type parameters and a reference to `Quotes`. -Consider the following implementation of a staged interpreter that implements -a compiler through staging. +In particular, these restrictions disallow the use of splices in top-level splices. +Such a splice would require several stages of interpretation which would be unnecessarily inefficient. +#### Compilation stages +The macro implementation (i.e., the method called in the top-level splice) can come from any pre-compiled library. +This provides a clear difference between the stages of the compilation process. +Consider the following 3 source files defined in distinct libraries. ```scala -import scala.quoted.* - -enum Exp: - case Num(n: Int) - case Plus(e1: Exp, e2: Exp) - case Var(x: String) - case Let(x: String, e: Exp, in: Exp) - -import Exp.* +// Macro.scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = ... +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } ``` -The interpreted language consists of numbers `Num`, addition `Plus`, and variables -`Var` which are bound by `Let`. Here are two sample expressions in the language: - ```scala -val exp = Plus(Plus(Num(2), Var("x")), Num(4)) -val letExp = Let("x", Num(3), exp) +// Lib.scala (depends on Macro.scala) +def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) ``` -Here’s a compiler that maps an expression given in the interpreted -language to quoted Scala code of type `Expr[Int]`. -The compiler takes an environment that maps variable names to Scala `Expr`s. - ```scala -import scala.quoted.* - -def compile(e: Exp, env: Map[String, Expr[Int]])(using Quotes): Expr[Int] = - e match - case Num(n) => - Expr(n) - case Plus(e1, e2) => - '{ ${ compile(e1, env) } + ${ compile(e2, env) } } - case Var(x) => - env(x) - case Let(x, e, body) => - '{ val y = ${ compile(e, env) }; ${ compile(body, env + (x -> 'y)) } } +// App.scala (depends on Lib.scala) +@main def app() = power2(3.14) ``` - -Running `compile(letExp, Map())` would yield the following Scala code: +One way to syntactically visualize this is to put the application in a quote that delays the compilation of the application. +Then the application dependencies can be placed in an outer quote that contains the quoted application, and we repeat this recursively for dependencies of dependencies. ```scala -'{ val y = 3; (2 + y) + 4 } +'{ // macro library (compilation stage 1) + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + ... + inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + '{ // library using macros (compilation stage 2) + def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) + '{ power2(3.14) /* app (compilation stage 3) */ } + } +} ``` -The body of the first clause, `case Num(n) => Expr(n)`, looks suspicious. `n` -is declared as an `Int`, yet it is converted to an `Expr[Int]` with `Expr()`. -Shouldn’t `n` be quoted? In fact this would not -work since replacing `n` by `'n` in the clause would not be phase -correct. +To make the system more versatile, we allow calling macros in the project where it is defined, with some restrictions. +For example, to compile `Macro.scala` and `Lib.scala` together in the same library. +To this end, we do not follow the simpler syntactic model and rely on semantic information from the source files. +When compiling a source, if we detect a call to a macro that is not compiled yet, we delay the compilation of this source to the following compilation stage. +In the example, we would delay the compilation of `Lib.scala` because it contains a compile-time call to `powerCode`. +Compilation stages are repeated until all sources are compiled, or no progress can be made. +If no progress is made, there was a cyclic dependency between the definition and the use of the macro. +We also need to detect if at runtime the macro depends on sources that have not been compiled yet. +These are detected by executing the macro and checking for JVM linking errors to classes that have not been compiled yet. -The `Expr.apply` method is defined in package `quoted`: +### Run-Time Multi-Stage Programming -```scala -package quoted +See [Run-Time Multi-Stage Programming](./staging.md) -object Expr: - ... - def apply[T: ToExpr](x: T)(using Quotes): Expr[T] = - summon[ToExpr[T]].toExpr(x) -``` +## Safety -This method says that values of types implementing the `ToExpr` type class can be -converted to `Expr` values using `Expr.apply`. +Multi-stage programming is by design statically safe and cross-stage safe. -Scala 3 comes with given instances of `ToExpr` for -several types including `Boolean`, `String`, and all primitive number -types. For example, `Int` values can be converted to `Expr[Int]` -values by wrapping the value in a `Literal` tree node. This makes use -of the underlying tree representation in the compiler for -efficiency. But the `ToExpr` instances are nevertheless not _magic_ -in the sense that they could all be defined in a user program without -knowing anything about the representation of `Expr` trees. For -instance, here is a possible instance of `ToExpr[Boolean]`: +### Static Safety -```scala -given ToExpr[Boolean] with - def toExpr(b: Boolean) = - if b then '{ true } else '{ false } -``` +#### Hygiene +All identifier names are interpreted as symbolic references to the corresponding variable in the context of the quote. +Therefore, while evaluating the quote, it is not possible to accidentally rebind a reference to a new variable with the same textual name. -Once we can lift bits, we can work our way up. For instance, here is a -possible implementation of `ToExpr[Int]` that does not use the underlying -tree machinery: +#### Well-typed +If a quote is well typed, then the generated code is well typed. +This is a simple consequence of tracking the type of each expression. +An `Expr[T]` can only be created from a quote that contains an expression of type `T`. +Conversely, an `Expr[T]` can only be spliced in a location that expects a type `T. +As mentioned before, `Expr` is covariant in its type parameter. +This means that an `Expr[T]` can contain an expression of a subtype of `T`. +When spliced in a location that expects a type `T, these expressions also have a valid type. -```scala -given ToExpr[Int] with - def toExpr(n: Int) = n match - case Int.MinValue => '{ Int.MinValue } - case _ if n < 0 => '{ - ${ toExpr(-n) } } - case 0 => '{ 0 } - case _ if n % 2 == 0 => '{ ${ toExpr(n / 2) } * 2 } - case _ => '{ ${ toExpr(n / 2) } * 2 + 1 } -``` +### Cross-Stage Safety + +#### Level consistency +We define the _staging level_ of some code as the number of quotes minus the number of splices surrounding said code. +Local variables must be defined and used in the same staging level. -Since `ToExpr` is a type class, its instances can be conditional. For example, -a `List` is liftable if its element type is: +It is never possible to access a local variable from a lower staging level as it does not yet exist. ```scala -given [T: ToExpr : Type]: ToExpr[List[T]] with - def toExpr(xs: List[T]) = xs match - case head :: tail => '{ ${ Expr(head) } :: ${ toExpr(tail) } } - case Nil => '{ Nil: List[T] } +def badPower(x: Double, n: Int): Double = + ${ unrolledPowerCode('x, n) } // error: value of `n` not known yet ``` -In the end, `ToExpr` resembles very much a serialization -framework. Like the latter it can be derived systematically for all -collections, case classes and enums. Note also that the synthesis -of _type-tag_ values of type `Type[T]` is essentially the type-level -analogue of lifting. -Using lifting, we can now give the missing definition of `showExpr` in the introductory example: +In the context of macros and _cross-platform portability_, that is, +macros compiled on one machine but potentially executed on another, +we cannot support cross-stage persistence of local variables. +Therefore, local variables can only be accessed at precisely the same staging level in our system. ```scala -def showExpr[T](expr: Expr[T])(using Quotes): Expr[String] = - val code: String = expr.show - Expr(code) +def badPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + // error: `n` potentially not available in the next execution environment + '{ power($x, n) } ``` -That is, the `showExpr` method converts its `Expr` argument to a string (`code`), and lifts -the result back to an `Expr[String]` using `Expr.apply`. -## Lifting Types +The rules are slightly different for global definitions, such as `unrolledPowerCode`. +It is possible to generate code that contains a reference to a _global_ definition such as in `'{ power(2, 4) }`. +This is a limited form of cross-stage persistence that does not impede cross-platform portability, where we refer to the already compiled code for `power`. +Each compilation step will lower the staging level by one while keeping global definitions. +In consequence, we can refer to compiled definitions in macros such as `unrolledPowerCode` in `${ unrolledPowerCode('x, 2) }`. -The previous section has shown that the metaprogramming framework has -to be able to take a type `T` and convert it to a type tree of type -`Type[T]` that can be reified. This means that all free variables of -the type tree refer to types and values defined in the current stage. +We can sumarize level consistency in two rules: + * Local variables can be used only at the same staging level as their definition + * Global variables can be used at any staging level -For a reference to a global class, this is easy: Just issue the fully -qualified name of the class. Members of reifiable types are handled by -just reifying the containing type together with the member name. But -what to do for references to type parameters or local type definitions -that are not defined in the current stage? Here, we cannot construct -the `Type[T]` tree directly, so we need to get it from a recursive -implicit search. For instance, to implement +#### Type consistency +As Scala uses type erasure, generic types will be erased at run-time and hence in any following stage. +To ensure any quoted expression that refers to a generic type `T` does not lose the information it needs, we require a given `Type[T]` in scope. +The `Type[T]` will carry over the non-erased representation of the type into the next phase. +Therefore any generic type used at a higher staging level than its definition will require its `Type`. + +#### Scope extrusion +Within the contents of a splice, it is possible to have a quote that refers to a local variable defined in the outer quote. +If this quote is used within the splice, the variable will be in scope. +However, if the quote is somehow _extruded_ outside the splice, then variables might not be in scope anymore. +Quoted expressions can be extruded using side effects such as mutable state and exceptions. +The following example shows how a quote can be extruded using mutable state. ```scala -summon[Type[List[T]]] +var x: Expr[T] = null +'{ (y: T) => ${ x = 'y; 1 } } +x // has value '{y} but y is not in scope ``` -where `T` is not defined in the current stage, we construct the type constructor -of `List` applied to the splice of the result of searching for a given instance for `Type[T]`: +A second way a variable can be extruded is through the `run` method. +If `run` consumes a quoted variable reference, it will not be in scope anymore. +The result will reference a variable that is defined in the next stage. ```scala -Type.of[ List[ summon[Type[T]].Underlying ] ] +'{ (x: Int) => ${ run('x); ... } } +// evaluates to: '{ (x: Int) => ${ x; ... } 1 ``` -This is exactly the algorithm that Scala 2 uses to search for type tags. -In fact Scala 2's type tag feature can be understood as a more ad-hoc version of -`quoted.Type`. As was the case for type tags, the implicit search for a `quoted.Type` -is handled by the compiler, using the algorithm sketched above. +To catch both scope extrusion scenarios, our system restricts the use of quotes by only allowing a quote to be spliced if it was not extruded from a splice scope. +Unlike level consistency, this is checked at run-time[^4] rather than compile-time to avoid making the static type system too complicated. -## Relationship with `inline` +Each `Quotes` instance contains a unique scope identifier and refers to its parent scope, forming a stack of identifiers. +The parent of the scope of a `Quotes` is the scope of the `Quotes` used to create the enclosing quote. +Top-level splices and `run` create new scope stacks. +Every `Expr` knows in which scope it was created. +When it is spliced, we check that the quote scope is either the same as the splice scope, or a parent scope thereof. -Seen by itself, principled metaprogramming looks more like a framework for -runtime metaprogramming than one for compile-time metaprogramming with macros. -But combined with Scala 3’s `inline` feature it can be turned into a compile-time -system. The idea is that macro elaboration can be understood as a combination of -a macro library and a quoted program. For instance, here’s the `assert` macro -again together with a program that calls `assert`. -```scala -object Macros: +## Staged Lambdas - inline def assert(inline expr: Boolean): Unit = - ${ assertImpl('expr) } +When staging programs in a functional language there are two fundamental abstractions: a staged lambda `Expr[T => U]` and a staging lambda `Expr[T] => Expr[U]`. +The first is a function that will exist in the next stage, whereas the second is a function that exists in the current stage. +It is often convenient to have a mechanism to go from `Expr[T => U]` to `Expr[T] => Expr[U]` and vice versa. - def assertImpl(expr: Expr[Boolean])(using Quotes) = - val failMsg: Expr[String] = Expr("failed assertion: " + expr.show) - '{ if !($expr) then throw new AssertionError($failMsg) } +```scala +def later[T: Type, U: Type](f: Expr[T] => Expr[U]): Expr[T => U] = + '{ (x: T) => ${ f('x) } } -@main def program = - val x = 1 - Macros.assert(x != 0) +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => '{ $f($x) } ``` -Inlining the `assert` function would give the following program: +Both conversions can be performed out of the box with quotes and splices. +But if `f` is a known lambda function, `'{ $f($x) }` will not beta-reduce the lambda in place. +This optimization is performed in a later phase of the compiler. +Not reducing the application immediately can simplify analysis of generated code. +Nevertheless, it is possible to beta-reduce the lambda in place using the `Expr.betaReduce` method. ```scala -@main def program = - val x = 1 - ${ Macros.assertImpl('{ x != 0}) } +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => Expr.betaReduce('{ $f($x) }) ``` -The example is only phase correct because `Macros` is a global value and -as such not subject to phase consistency checking. Conceptually that’s -a bit unsatisfactory. If the PCP is so fundamental, it should be -applicable without the global value exception. But in the example as -given this does not hold since both `assert` and `program` call -`assertImpl` with a splice but no quote. +The `betaReduce` method will beta-reduce the outermost application of the expression if possible (regardless of arity). +If it is not possible to beta-reduce the expression, then it will return the original expression. -However, one could argue that the example is really missing -an important aspect: The macro library has to be compiled in a phase -prior to the program using it, but in the code above, macro -and program are defined together. A more accurate view of -macros would be to have the user program be in a phase after the macro -definitions, reflecting the fact that macros have to be defined and -compiled before they are used. Hence, conceptually the program part -should be treated by the compiler as if it was quoted: +## Staged Constructors +To create new class instances in a later stage, we can create them using factory methods (usually `apply` methods of an `object`), or we can instantiate them with a `new`. +For example, we can write `Some(1)` or `new Some(1)`, creating the same value. +In Scala 3, using the factory method call notation will fall back to a `new` if no `apply` method is found. +We follow the usual staging rules when calling a factory method. +Similarly, when we use a `new C`, the constructor of `C` is implicitly called, which also follows the usual staging rules. +Therefore for an arbitrary known class `C`, we can use both `'{ C(...) }` or `'{ new C(...) }` as constructors. +## Staged Classes +Quoted code can contain any valid expression including local class definitions. +This allows the creation of new classes with specialized implementations. +For example, we can implement a new version of `Runnable` that will perform some optimized operation. ```scala -@main def program = '{ - val x = 1 - ${ Macros.assertImpl('{ x != 0 }) } +def mkRunnable(x: Int)(using Quotes): Expr[Runnable] = '{ + class MyRunnable extends Runnable: + def run(): Unit = ... // generate some custom code that uses `x` + new MyRunnable } ``` -If `program` is treated as a quoted expression, the call to -`Macro.assertImpl` becomes phase correct even if macro library and -program are conceptualized as local definitions. +The quoted class is a local class and its type cannot escape the enclosing quote. +The class must be used inside the quote or an instance of it can be returned using a known interface (`Runnable` in this case). -But what about the call from `assert` to `assertImpl`? Here, we need a -tweak of the typing rules. An inline function such as `assert` that -contains a splice operation outside an enclosing quote is called a -_macro_. Macros are supposed to be expanded in a subsequent phase, -i.e. in a quoted context. Therefore, they are also type checked as if -they were in a quoted context. For instance, the definition of -`assert` is typechecked as if it appeared inside quotes. This makes -the call from `assert` to `assertImpl` phase-correct, even if we -assume that both definitions are local. +## Quote Pattern Matching -The `inline` modifier is used to declare a `val` that is -either a constant or is a parameter that will be a constant when instantiated. This -aspect is also important for macro expansion. - -To get values out of expressions containing constants `Expr` provides the method -`value` (or `valueOrError`). This will convert the `Expr[T]` into a `Some[T]` (or `T`) when the -expression contains value. Otherwise it will return `None` (or emit an error). -To avoid having incidental val bindings generated by the inlining of the `def` -it is recommended to use an inline parameter. To illustrate this, consider an -implementation of the `power` function that makes use of a statically known exponent: +It is sometimes necessary to analyze the structure of the code or decompose the code into its sub-expressions. +A classic example is an embedded DSL, where a macro knows a set of definitions that it can reinterpret while compiling the code (for instance, to perform optimizations). +In the following example, we extend our previous implementation of `powCode` to look into `x` to perform further optimizations. ```scala -inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } - -private def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = - n.value match - case Some(m) => powerCode(x, m) - case None => '{ Math.pow($x, $n.toDouble) } - -private def powerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = - if n == 0 then '{ 1.0 } - else if n == 1 then x - else if n % 2 == 0 then '{ val y = $x * $x; ${ powerCode('y, n / 2) } } - else '{ $x * ${ powerCode(x, n - 1) } } +def fusedPowCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + x match + case '{ power($y, $m) } => // we have (y^m)^n + fusedPowCode(y, '{ $n * $m }) // generate code for y^(n*m) + case _ => + '{ power($x, $n) } ``` -## Scope Extrusion -Quotes and splices are duals as far as the PCP is concerned. But there is an -additional restriction that needs to be imposed on splices to guarantee -soundness: code in splices must be free of side effects. The restriction -prevents code like this: +#### Sub-patterns -```scala -var x: Expr[T] = ... -'{ (y: T) => ${ x = 'y; 1 } } -``` - -This code, if it was accepted, would _extrude_ a reference to a quoted variable -`y` from its scope. This would subsequently allow access to a variable outside the -scope where it is defined, which is likely problematic. The code is clearly -phase consistent, so we cannot use PCP to rule it out. Instead, we postulate a -future effect system that can guarantee that splices are pure. In the absence of -such a system we simply demand that spliced expressions are pure by convention, -and allow for undefined compiler behavior if they are not. This is analogous to -the status of pattern guards in Scala, which are also required, but not -verified, to be pure. - -[Multi-Stage Programming](./staging.md) introduces one additional method where -you can expand code at runtime with a method `run`. There is also a problem with -that invocation of `run` in splices. Consider the following expression: +In quoted patterns, the `$` binds the sub-expression to an expression `Expr` that can be used in that `case` branch. +The contents of `${..}` in a quote pattern are regular Scala patterns. +For example, we can use the `Expr(_)` pattern within the `${..}` to only match if it is a known value and extract it. ```scala -'{ (x: Int) => ${ run('x); 1 } } +def fusedUnrolledPowCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + x match + case '{ power($y, ${Expr(m)}) } => // we have (y^m)^n + fusedUnrolledPowCode(y, n * m) // generate code for y * ... * y + case _ => // ( n*m times ) + unrolledPowerCode(x, n) ``` -This is again phase correct, but will lead us into trouble. Indeed, evaluating -the splice will reduce the expression `run('x)` to `x`. But then the result +These value extraction sub-patterns can be polymorphic using an instance of `FromExpr`. +In the following example, we show the implementation of `OptionFromExpr` which internally uses the `FromExpr[T]` to extract the value using the `Expr(x)` pattern. ```scala -'{ (x: Int) => ${ x; 1 } } +given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with + def unapply(x: Expr[Option[T]])(using Quotes): Option[Option[T]] = + x match + case '{ Some( ${Expr(x)} ) } => Some(Some(x)) + case '{ None } => Some(None) + case _ => None ``` -is no longer phase correct. To prevent this soundness hole it seems easiest to -classify `run` as a side-effecting operation. It would thus be prevented from -appearing in splices. In a base language with side effects we would have to do this -anyway: Since `run` runs arbitrary code it can always produce a side effect if -the code it runs produces one. -## Example Expansion -Assume we have two methods, `foreach` that takes an `Expr[Array[T]]` and a -consumer `f`, and `sum` that performs a sum by delegating to `foreach`. +#### Closed patterns +Patterns may contain two kinds of references: global references such as the call to the `power` method in `'{ power(...) }`, or references to bindings defined in the pattern such as `x` in `case '{ (x: Int) => x }`. +When extracting an expression from a quote, we need to ensure that we do not extrude any variable from the scope where it is defined. ```scala -object Macros: - - def foreach[T](arr: Expr[Array[T]], f: Expr[T] => Expr[Unit]) - (using Type[T], Quotes): Expr[Unit] = '{ - var i: Int = 0 - while i < ($arr).length do - val element: T = ($arr)(i) - ${f('element)} - i += 1 - } - - def sum(arr: Expr[Array[Int]])(using Quotes): Expr[Int] = '{ - var sum = 0 - ${ foreach(arr, x => '{sum += $x}) } - sum - } - - inline def sum_m(arr: Array[Int]): Int = ${sum('arr)} - -end Macros +'{ (x: Int) => x + 1 } match + case '{ (y: Int) => $z } => + // should not match, otherwise: z = '{ x + 1 } ``` -A call to `sum_m(Array(1, 2, 3))` will first inline `sum_m`: - -```scala -val arr: Array[Int] = Array.apply(1, 2, 3) -${ _root_.Macros.sum('arr) } -``` +In this example, we see that the pattern should not match. +Otherwise, any use of the expression `z` would contain an unbound reference to `x`. +To avoid any such extrusion, we only match on a `${..}` if its expression is closed under the definitions within the pattern. +Therefore, the pattern will not match if the expression is not closed. -then it will call `sum`: +#### HOAS patterns +To allow extracting expressions that may contain extruded references we offer a _higher-order abstract syntax_ (HOAS) pattern `$f(y)` (or `$f(y1,...,yn)`). +This pattern will eta-expand the sub-expression with respect to `y` and bind it to `f`. +The lambda arguments will replace the variables that might have been extruded. ```scala -val arr: Array[Int] = Array.apply(1, 2, 3) -${ '{ - var sum = 0 - ${ foreach('arr, x => '{sum += $x}) } - sum -} } +'{ ((x: Int) => x + 1).apply(2) } match + case '{ ((y: Int) => $f(y)).apply($z: Int) } => + // f may contain references to `x` (replaced by `$y`) + // f = (y: Expr[Int]) => '{ $y + 1 } + f(z) // generates '{ 2 + 1 } ``` -and cancel the `${'{...}}`: - -```scala -val arr: Array[Int] = Array.apply(1, 2, 3) - -var sum = 0 -${ foreach('arr, x => '{sum += $x}) } -sum -``` -then it will extract `x => '{sum += $x}` into `f`, to have a value: +A HOAS pattern `$x(y1,...,yn)` will only match the expression if it does not contain references to variables defined in the pattern that are not in the set `y1,...,yn`. +In other words, the pattern will match if the expression only contains references to variables defined in the pattern that are in `y1,...,yn`. +Note that the HOAS patterns `$x()` are semantically equivalent to closed patterns `$x`. -```scala -val arr: Array[Int] = Array.apply(1, 2, 3) -var sum = 0 -val f = x => '{sum += $x} -${ _root_.Macros.foreach('arr, 'f)(Type.of[Int]) } -sum -``` +#### Type variables -and then call `foreach`: +Expressions may contain types that are not statically known. +For example, an `Expr[List[Int]]` may contain `list.map(_.toInt)` where `list` is a `List` of some type. +To cover all the possible cases we would need to explicitly match `list` on all possible types (`List[Int]`, `List[Int => Int]`, ...). +This is an infinite set of types and therefore pattern cases. +Even if we would know all possible types that a specific program could use, we may still end up with an unmanageable number of cases. +To overcome this, we introduce type variables in quoted patterns, which will match any type. +In the following example, we show how type variables `t` and `u` match all possible pairs of consecutive calls to `map` on lists. +In the quoted patterns, types named with lower cases are identified as type variables. +This follows the same notation as type variables used in normal patterns. ```scala -val arr: Array[Int] = Array.apply(1, 2, 3) +def fuseMapCode(x: Expr[List[Int]]): Expr[List[Int]] = + x match + case '{ ($ls: List[t]).map[u]($f).map[Int]($g) } => + '{ $ls.map($g.compose($f)) } + ... -var sum = 0 -val f = x => '{sum += $x} -${ '{ - var i: Int = 0 - while i < arr.length do - val element: Int = (arr)(i) - sum += element - i += 1 - sum -} } +fuseMapCode('{ List(1.2).map(f).map(g) }) // '{ List(1.2).map(g.compose(f)) } +fuseMapCode('{ List('a').map(h).map(i) }) // '{ List('a').map(i.compose(h)) } ``` +Variables `f` and `g` are inferred to be of type `Expr[t => u]` and `Expr[u => Int]` respectively. +Subsequently, we can infer `$g.compose($f)` to be of type `Expr[t => Int]` which is the type of the argument of `$ls.map(..)`. -and cancel the `${'{...}}` again: +Type variables are abstract types that will be erased; this implies that to reference them in the second quote we need a given `Type[t]` and `Type[u]`. +The quoted pattern will implicitly provide those given types. +At run-time, when the pattern matches, the type of `t` and `u` will be known, and the `Type[t]` and `Type[u]` will contain the precise types in the expression. +As `Expr` is covariant, the statically known type of the expression might not be the actual type. +Type variables can also be used to recover the precise type of the expression. ```scala -val arr: Array[Int] = Array.apply(1, 2, 3) +def let(x: Expr[Any])(using Quotes): Expr[Any] = + x match + case '{ $x: t } => + '{ val y: t = $x; y } -var sum = 0 -val f = x => '{sum += $x} -var i: Int = 0 -while i < arr.length do - val element: Int = (arr)(i) - sum += element - i += 1 -sum +let('{1}) // will return a `Expr[Any]` that contains an `Expr[Int]]` ``` -Finally cleanups and dead code elimination: - +While we can define the type variable in the middle of the pattern, their normal form is to define them as a `type` with a lower case name at the start of the pattern. +We use the Scala backquote `` `t` `` naming convention which interprets the string within the backquote as a literal name identifier. +This is typically used when we have names that contain special characters that are not allowed for normal Scala identifiers. +But we use it to explicitly state that this is a reference to that name and not the introduction of a new variable. ```scala -val arr: Array[Int] = Array.apply(1, 2, 3) -var sum = 0 -var i: Int = 0 -while i < arr.length do - val element: Int = arr(i) - sum += element - i += 1 -sum + case '{ type t; $x: `t` } => ``` - -## Find implicits within a macro - -Similarly to the `summonFrom` construct, it is possible to make implicit search available -in a quote context. For this we simply provide `scala.quoted.Expr.summon`: +This is a bit more verbose but has some expressivity advantages such as allowing to define bounds on the variables and be able to refer to them several times in any scope of the pattern. ```scala -import scala.collection.immutable.{ TreeSet, HashSet } -inline def setFor[T]: Set[T] = ${ setForExpr[T] } - -def setForExpr[T: Type](using Quotes): Expr[Set[T]] = - Expr.summon[Ordering[T]] match - case Some(ord) => '{ new TreeSet[T]()($ord) } - case _ => '{ new HashSet[T] } + case '{ type t >: List[Int] <: Seq[Int]; $x: `t` } => + case '{ type t; $x: (`t`, `t`) } => ``` -## Relationship with Transparent Inline -[Inline](./inline.md) documents inlining. The code below introduces a transparent -inline method that can calculate either a value of type `Int` or a value of type -`String`. +#### Type patterns +It is possible to only have a type and no expression of that type. +To be able to inspect a type, we introduce quoted type pattern `case '[..] =>`. +It works the same way as a quoted pattern but is restricted to contain a type. +Type variables can be used in quoted type patterns to extract a type. ```scala -transparent inline def defaultOf(inline str: String) = - ${ defaultOfImpl('str) } - -def defaultOfImpl(strExpr: Expr[String])(using Quotes): Expr[Any] = - strExpr.valueOrError match - case "int" => '{1} - case "string" => '{"a"} - -// in a separate file -val a: Int = defaultOf("int") -val b: String = defaultOf("string") - +def empty[T: Type]: Expr[T] = + Type.of[T] match + case '[String] => '{ "" } + case '[List[t]] => '{ List.empty[t] } + ... ``` -## Defining a macro and using it in a single project +`Type.of[T]` is used to summon the given instance of `Type[T]` in scope, it is equivalent to `summon[Type[T]]`. -It is possible to define macros and use them in the same project as long as the implementation -of the macros does not have run-time dependencies on code in the file where it is used. -It might still have compile-time dependencies on types and quoted code that refers to the use-site file. +#### Type testing and casting +It is important to note that instance checks and casts on `Expr`, such as `isInstanceOf[Expr[T]]` and `asInstanceOf[Expr[T]]`, will only check if the instance is of the class `Expr` but will not be able to check the `T` argument. +These cases will issue a warning at compile-time, but if they are ignored, they can result in unexpected behavior. -To provide this functionality Scala 3 provides a transparent compilation mode where files that -try to expand a macro but fail because the macro has not been compiled yet are suspended. -If there are any suspended files when the compilation ends, the compiler will automatically restart -compilation of the suspended files using the output of the previous (partial) compilation as macro classpath. -In case all files are suspended due to cyclic dependencies the compilation will fail with an error. +These operations can be supported correctly in the system. +For a simple type test it is possible to use the `isExprOf[T]` method of `Expr` to check if it is an instance of that type. +Similarly, it is possible to use `asExprOf[T]` to cast an expression to a given type. +These operations use a given `Type[T]` to work around type erasure. -## Pattern matching on quoted expressions -It is possible to deconstruct or extract values out of `Expr` using pattern matching. +## Sub-Expression Transformation -`scala.quoted` contains objects that can help extracting values from `Expr`. - -- `scala.quoted.Expr`/`scala.quoted.Exprs`: matches an expression of a value (resp. list of values) and returns the value (resp. list of values). -- `scala.quoted.Const`/`scala.quoted.Consts`: Same as `Expr`/`Exprs` but only works on primitive values. -- `scala.quoted.Varargs`: matches an explicit sequence of expressions and returns them. These sequences are useful to get individual `Expr[T]` out of a varargs expression of type `Expr[Seq[T]]`. - -These could be used in the following way to optimize any call to `sum` that has statically known values. +The system provides a mechanism to transform all sub-expressions of an expression. +This is useful when the sub-expressions we want to transform are deep in the expression. +It is also necessary if the expression contains sub-expressions that cannot be matched using quoted patterns (such as local class definitions). ```scala -inline def sum(inline args: Int*): Int = ${ sumExpr('args) } -private def sumExpr(argsExpr: Expr[Seq[Int]])(using Quotes): Expr[Int] = - argsExpr match - case Varargs(args @ Exprs(argValues)) => - // args is of type Seq[Expr[Int]] - // argValues is of type Seq[Int] - Expr(argValues.sum) // precompute result of sum - case Varargs(argExprs) => // argExprs is of type Seq[Expr[Int]] - val staticSum: Int = argExprs.map(_.value.getOrElse(0)).sum - val dynamicSum: Seq[Expr[Int]] = argExprs.filter(_.value.isEmpty) - dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) - case _ => - '{ $argsExpr.sum } - -sum(1, 2, 3) // gets matched by Varargs - -val xs = List(1, 2, 3) -sum(xs*) // doesn't get matched by Varargs +trait ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] + def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + ... ``` -### Quoted patterns - -Quoted pattens allow deconstructing complex code that contains a precise structure, types or methods. -Patterns `'{ ... }` can be placed in any location where Scala expects a pattern. - -For example - -```scala -optimize { - sum(sum(1, a, 2), 3, b) -} // should be optimized to 6 + a + b -``` +Users can extend the `ExprMap` trait and implement the `transform` method. +This interface is flexible and can implement top-down, bottom-up, or other transformations. ```scala -def sum(args: Int*): Int = args.sum -inline def optimize(inline arg: Int): Int = ${ optimizeExpr('arg) } -private def optimizeExpr(body: Expr[Int])(using Quotes): Expr[Int] = - body match - // Match a call to sum without any arguments - case '{ sum() } => Expr(0) - // Match a call to sum with an argument $n of type Int. - // n will be the Expr[Int] representing the argument. - case '{ sum($n) } => n - // Match a call to sum and extracts all its args in an `Expr[Seq[Int]]` - case '{ sum(${Varargs(args)}: _*) } => sumExpr(args) - case body => body - -private def sumExpr(args1: Seq[Expr[Int]])(using Quotes): Expr[Int] = - def flatSumArgs(arg: Expr[Int]): Seq[Expr[Int]] = arg match - case '{ sum(${Varargs(subArgs)}: _*) } => subArgs.flatMap(flatSumArgs) - case arg => Seq(arg) - val args2 = args1.flatMap(flatSumArgs) - val staticSum: Int = args2.map(_.value.getOrElse(0)).sum - val dynamicSum: Seq[Expr[Int]] = args2.filter(_.value.isEmpty) - dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) +object OptimizeIdentity extends ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + transformChildren(e) match // bottom-up transformation + case '{ identity($x) } => x + case _ => e ``` -### Recovering precise types using patterns +The `transformChildren` method is implemented as a primitive that knows how to reach all the direct sub-expressions and calls `transform` on each one. +The type passed to `transform` is the expected type of this sub-expression in its expression. +For example while transforming `Some(1)` in `'{ val x: Option[Int] = Some(1); ...}` the type will be `Option[Int]` and not `Some[Int]`. +This implies that we can safely transform `Some(1)` into `None`. -Sometimes it is necessary to get a more precise type for an expression. This can be achieved using the following pattern match. +## Staged Implicit Summoning +When summoning implicit arguments using `summon`, we will find the given instances in the current scope. +It is possible to use `summon` to get staged implicit arguments by explicitly staging them first. +In the following example, we can pass an implicit `Ordering[T]` in a macro as an `Expr[Ordering[T]]` to its implementation. +Then we can splice it and give it implicitly in the next stage. ```scala -def f(expr: Expr[Any])(using Quotes) = expr match - case '{ $x: t } => - // If the pattern match succeeds, then there is - // some type `t` such that - // - `x` is bound to a variable of type `Expr[t]` - // - `t` is bound to a new type `t` and a given - // instance `Type[t]` is provided for it - // That is, we have `x: Expr[t]` and `given Type[t]`, - // for some (unknown) type `t`. -``` - -This might be used to then perform an implicit search as in: - -```scala -extension (inline sc: StringContext) - inline def showMe(inline args: Any*): String = ${ showMeExpr('sc, 'args) } - -private def showMeExpr(sc: Expr[StringContext], argsExpr: Expr[Seq[Any]])(using Quotes): Expr[String] = - import quotes.reflect.report - argsExpr match - case Varargs(argExprs) => - val argShowedExprs = argExprs.map { - case '{ $arg: tp } => - Expr.summon[Show[tp]] match - case Some(showExpr) => - '{ $showExpr.show($arg) } - case None => - report.error(s"could not find implicit for ${Type.show[Show[tp]]}", arg); '{???} - } - val newArgsExpr = Varargs(argShowedExprs) - '{ $sc.s($newArgsExpr: _*) } - case _ => - // `new StringContext(...).showMeExpr(args: _*)` not an explicit `showMeExpr"..."` - report.error(s"Args must be explicit", argsExpr) - '{???} - -trait Show[-T]: - def show(x: T): String - -// in a different file -given Show[Boolean] with - def show(b: Boolean) = "boolean!" +inline def treeSetFor[T](using ord: Ordering[T]): Set[T] = + ${ setExpr[T](using 'ord) } -println(showMe"${true}") +def setExpr[T:Type](using ord: Expr[Ordering[T]])(using Quotes): Expr[Set[T]] = + '{ given Ordering[T] = $ord; new TreeSet[T]() } ``` -### Open code patterns +We pass it as an implicit `Expr[Ordering[T]]` because there might be intermediate methods that can pass it along implicitly. -Quoted pattern matching also provides higher-order patterns to match open terms. If a quoted term contains a definition, -then the rest of the quote can refer to this definition. +An alternative is to summon implicit values in the scope where the macro is invoked. +Using the `Expr.summon` method we get an optional expression containing the implicit instance. +This provides the ability to search for implicit instances conditionally. ```scala -'{ - val x: Int = 4 - x * x -} -``` - -To match such a term we need to match the definition and the rest of the code, but we need to explicitly state that the rest of the code may refer to this definition. - -```scala -case '{ val y: Int = $x; $body(y): Int } => +def summon[T: Type](using Quotes): Option[Expr[T]] ``` -Here `$x` will match any closed expression while `$body(y)` will match an expression that is closed under `y`. Then -the subexpression of type `Expr[Int]` is bound to `body` as an `Expr[Int => Int]`. The extra argument represents the references to `y`. Usually this expression is used in combination with `Expr.betaReduce` to replace the extra argument. - ```scala -inline def eval(inline e: Int): Int = ${ evalExpr('e) } +inline def setFor[T]: Set[T] = + ${ setForExpr[T] } -private def evalExpr(e: Expr[Int])(using Quotes): Expr[Int] = e match - case '{ val y: Int = $x; $body(y): Int } => - // body: Expr[Int => Int] where the argument represents - // references to y - evalExpr(Expr.betaReduce('{$body(${evalExpr(x)})})) - case '{ ($x: Int) * ($y: Int) } => - (x.value, y.value) match - case (Some(a), Some(b)) => Expr(a * b) - case _ => e - case _ => e +def setForExpr[T: Type]()(using Quotes): Expr[Set[T]] = + Expr.summon[Ordering[T]] match + case Some(ord) => + '{ new TreeSet[T]()($ord) } + case _ => + '{ new HashSet[T] } ``` -```scala -eval { // expands to the code: (16: Int) - val x: Int = 4 - x * x -} -``` +## More details -We can also close over several bindings using `$b(a1, a2, ..., an)`. -To match an actual application we can use braces on the function part `${b}(a1, a2, ..., an)`. +* [Specification](./macros-spec.md) +* Scalable Metaprogramming in Scala 3[^1] -## More details -[More details](./macros-spec.md) +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) +[^3]: Implemented in the Scala 3 Dotty project https://github.com/lampepfl/dotty. sbt library dependency `"org.scala-lang" %% "scala3-staging" % scalaVersion.value` +[^4]: Using the `-Xcheck-macros` compiler flag diff --git a/docs/_docs/reference/metaprogramming/simple-smp.md b/docs/_docs/reference/metaprogramming/simple-smp.md index 2ba0155ad329..61b062f55b87 100644 --- a/docs/_docs/reference/metaprogramming/simple-smp.md +++ b/docs/_docs/reference/metaprogramming/simple-smp.md @@ -23,7 +23,7 @@ replace evaluation contexts with contextual typing rules. While this is more verbose, it makes it easier to set up the meta theory. ## Syntax -``` +```ebnf Terms t ::= x variable (x: T) => t lambda t t application diff --git a/docs/_docs/reference/metaprogramming/staging.md b/docs/_docs/reference/metaprogramming/staging.md index e74d491402b5..1c154e09f50e 100644 --- a/docs/_docs/reference/metaprogramming/staging.md +++ b/docs/_docs/reference/metaprogramming/staging.md @@ -1,6 +1,6 @@ --- layout: doc-page -title: "Runtime Multi-Stage Programming" +title: "Run-Time Multi-Stage Programming" nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/staging.html --- @@ -60,7 +60,7 @@ impose the following restrictions on the use of splices. The framework as discussed so far allows code to be staged, i.e. be prepared to be executed at a later stage. To run that code, there is another method in class `Expr` called `run`. Note that `$` and `run` both map from `Expr[T]` -to `T` but only `$` is subject to the [PCP](./macros.md#the-phase-consistency-principle), whereas `run` is just a normal method. +to `T` but only `$` is subject to [Cross-Stage Safety](./macros.md#cross-stage-safety), whereas `run` is just a normal method. `scala.quoted.staging.run` provides a `Quotes` that can be used to show the expression in its scope. On the other hand `scala.quoted.staging.withQuotes` provides a `Quotes` without evaluating the expression. diff --git a/docs/_docs/reference/new-types/dependent-function-types-spec.md b/docs/_docs/reference/new-types/dependent-function-types-spec.md index f3237ddf7b9a..f603200b1ae0 100644 --- a/docs/_docs/reference/new-types/dependent-function-types-spec.md +++ b/docs/_docs/reference/new-types/dependent-function-types-spec.md @@ -8,7 +8,7 @@ Initial implementation in [PR #3464](https://github.com/lampepfl/dotty/pull/3464 ## Syntax -``` +```ebnf FunArgTypes ::= InfixType | ‘(’ [ FunArgType {',' FunArgType } ] ‘)’ | ‘(’ TypedFunParam {',' TypedFunParam } ‘)’ diff --git a/docs/_docs/reference/new-types/intersection-types-spec.md b/docs/_docs/reference/new-types/intersection-types-spec.md index 346c57c004f0..8d332fc6ed29 100644 --- a/docs/_docs/reference/new-types/intersection-types-spec.md +++ b/docs/_docs/reference/new-types/intersection-types-spec.md @@ -12,7 +12,7 @@ with the usual precedence and subject to usual resolving rules. Unless shadowed by another definition, it resolves to the type `scala.&`, which acts as a type alias to an internal representation of intersection types. -``` +```ebnf Type ::= ...| InfixType InfixType ::= RefinedType {id [nl] RefinedType} ``` diff --git a/docs/_docs/reference/new-types/type-lambdas-spec.md b/docs/_docs/reference/new-types/type-lambdas-spec.md index 76937e5160f7..7f7053a13ddd 100644 --- a/docs/_docs/reference/new-types/type-lambdas-spec.md +++ b/docs/_docs/reference/new-types/type-lambdas-spec.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/type-lambdas-s ## Syntax -``` +```ebnf Type ::= ... | TypeParamClause ‘=>>’ Type TypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ TypeParam ::= {Annotation} (id [HkTypeParamClause] | ‘_’) TypeBounds diff --git a/docs/_docs/reference/other-new-features/creator-applications.md b/docs/_docs/reference/other-new-features/creator-applications.md index 81f09d897955..8b1de02b2f25 100644 --- a/docs/_docs/reference/other-new-features/creator-applications.md +++ b/docs/_docs/reference/other-new-features/creator-applications.md @@ -47,8 +47,12 @@ be selected with `apply` (or be applied to arguments, in which case the `apply` inserted). Constructor proxies are also not allowed to shadow normal definitions. That is, -if an identifier resolves to a constructor proxy, and the same identifier is also -defined or imported in some other scope, an ambiguity is reported. +an ambiguity is reported, if + + - an identifier resolves to a constructor proxy, + - the same identifier is also defined or imported in some other scope, + - the other reference can be applied to a (possibly empty) parameter list. That + is, it refers either to a method or to a value containing an apply method as member. ## Motivation diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md index 41104a54e4a6..e8482cb343d9 100644 --- a/docs/_docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -176,7 +176,7 @@ extension (x: String) ## Syntax changes: -``` +```ebnf TemplateStat ::= ... | Export TopStat ::= ... diff --git a/docs/_docs/reference/other-new-features/indentation.md b/docs/_docs/reference/other-new-features/indentation.md index 75306ec6f90d..9963d1ee7577 100644 --- a/docs/_docs/reference/other-new-features/indentation.md +++ b/docs/_docs/reference/other-new-features/indentation.md @@ -100,7 +100,7 @@ There are two rules: - An `` is finally inserted in front of a comma that follows a statement sequence starting with an `` if the indented region is itself enclosed in parentheses. -It is an error if the indentation width of the token following an `` does not match the indentation of some previous line in the enclosing indentation region. For instance, the following would be rejected. +It is generally an error if the indentation width of the token following an `` does not match the indentation of some previous line in the enclosing indentation region. For instance, the following would be rejected. ```scala if x < 0 then @@ -109,6 +109,19 @@ if x < 0 then x ``` +However, there is one exception to this rule: If the next line starts with a '`.`' _and_ the indentation +width is different from the indentation widths of the two neighboring regions by more than a single space, the line accepted. For instance, the following is OK: + +```scala +xs.map: x => + x + 1 + .filter: x => + x > 0 +``` +Here, the line starting with `.filter` does not have an indentation level matching a previous line, +but it is still accepted since it starts with a '`.`' and differs in at least two spaces from the +indentation levels of both the region that is closed and the next outer region. + Indentation tokens are only inserted in regions where newline statement separators are also inferred: at the top-level, inside braces `{...}`, but not inside parentheses `(...)`, patterns or types. @@ -174,12 +187,12 @@ The syntax changes allowing this are as follows: Define for an arbitrary sequence of tokens or non-terminals `TS`: -``` +```ebnf :<<< TS >>> ::= ‘{’ TS ‘}’ | ``` Then the grammar changes as follows: -``` +```ebnf TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> @@ -229,7 +242,7 @@ xs.foldLeft(0): (x, y) => The grammar changes for optional braces around arguments are as follows. -``` +```ebnf SimpleExpr ::= ... | SimpleExpr ColonArgument InfixExpr ::= ... @@ -431,7 +444,7 @@ If none of these criteria apply, it's often better to not use an end marker sinc ### Syntax -``` +```ebnf EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ diff --git a/docs/_docs/reference/other-new-features/opaques-details.md b/docs/_docs/reference/other-new-features/opaques-details.md index 87e56e240481..d285ec8e8325 100644 --- a/docs/_docs/reference/other-new-features/opaques-details.md +++ b/docs/_docs/reference/other-new-features/opaques-details.md @@ -6,7 +6,7 @@ nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/opaqu ## Syntax -``` +```ebnf Modifier ::= ... | ‘opaque’ ``` diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index bc709fb1f870..a705c5a3fd79 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -21,51 +21,48 @@ productions map to AST nodes. The following description of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. -_Unicode escapes_ are used to represent the [Unicode character](https://www.w3.org/International/articles/definitions-characters/) with the given -hexadecimal code: - -``` -UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit -hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ -``` - Informal descriptions are typeset as `“some comment”`. ## Lexical Syntax -The lexical syntax of Scala is given by the following grammar in EBNF -form. +The lexical syntax of Scala is given by the following grammar in EBNF form: -``` +```ebnf whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ -upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” -lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” -letter ::= upper | lower “… and Unicode categories Lo, Lt, Nl” -digit ::= ‘0’ | … | ‘9’ +upper ::= ‘A’ | ... | ‘Z’ | ‘$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that doesn't have + contributory property Other_Lowercase +lower ::= ‘a’ | ... | ‘z’ | ‘_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase +letter ::= upper | lower +digit ::= ‘0’ | ... | ‘9’ paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ - “… and Unicode categories Sm, So” -printableChar ::= “all characters in [\u0020, \u007E] inclusive” + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) +escapeSeq ::= UnicodeEscape | charEscapeSeq op ::= opchar {opchar} varid ::= lower idrest -alphaid ::= upper idrest - | varid +boundvarid ::= varid + | ‘`’ varid ‘`’ plainid ::= alphaid | op id ::= plainid - | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] -decimalNumeral ::= ‘0’ | nonZeroDigit [{digit | ‘_’} digit] +decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] -nonZeroDigit ::= ‘1’ | … | ‘9’ floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] @@ -76,25 +73,25 @@ floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ booleanLiteral ::= ‘true’ | ‘false’ -characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ stringLiteral ::= ‘"’ {stringElement} ‘"’ | ‘"""’ multiLineChars ‘"""’ -stringElement ::= printableChar \ (‘"’ | ‘\’) - | UnicodeEscape - | charEscapeSeq -multiLineChars ::= {[‘"’] [‘"’] char \ ‘"’} {‘"’} -processedStringLiteral - ::= alphaid ‘"’ {[‘\’] processedStringPart | ‘\\’ | ‘\"’} ‘"’ - | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ -processedStringPart +stringElement ::= charNoDoubleQuoteOrNewline + | escapeSeq +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} + +interpolatedString + ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape -escape ::= ‘$$’ - | ‘$’ letter { letter | digit } - | ‘{’ Block [‘;’ whiteSpace stringFormat whiteSpace] ‘}’ -stringFormat ::= {printableChar \ (‘"’ | ‘}’ | ‘ ’ | ‘\t’ | ‘\n’)} - -symbolLiteral ::= ‘'’ plainid // until 2.13 +escape ::= ‘\$\$’ + | ‘\$"’ + | ‘\$’ alphaid + | ‘\$’ BlockExpr +alphaid ::= upper idrest + | varid comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ | ‘//’ “any sequence of characters up to end of line” @@ -105,7 +102,10 @@ semi ::= ‘;’ | nl {nl} ## Optional Braces -The lexical analyzer also inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). +The principle of optional braces is that any keyword that can be followed by `{` can also be followed by an indented block, without needing an intervening `:`. +(Allowing an optional `:` would be counterproductive since it would introduce several ways to do the same thing.) + +The lexical analyzer inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). In the context-free productions below we use the notation `<<< ts >>>` to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. Analogously, the @@ -160,7 +160,7 @@ SimpleLiteral ::= [‘-’] integerLiteral | characterLiteral | stringLiteral Literal ::= SimpleLiteral - | processedStringLiteral + | interpolatedStringLiteral | symbolLiteral | ‘null’ @@ -338,9 +338,6 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeParamBounds -DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds - TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -352,13 +349,20 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param -Param ::= id ‘:’ ParamType [‘=’ Expr] -DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] -DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause -UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | FunArgTypes) ‘)’ -DefParams ::= DefParam {‘,’ DefParam} -DefParam ::= {Annotation} [‘inline’] Param +TypelessClauses ::= TypelessClause {TypelessClause} +TypelessClause ::= DefTermParamClause + | UsingParamClause + +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ +DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ + +DefTermParams ::= DefTermParam {‘,’ DefTermParam} +DefTermParam ::= {Annotation} [‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] ``` ### Bindings and Imports @@ -409,8 +413,8 @@ Dcl ::= RefineDcl ValDcl ::= ids ‘:’ Type VarDcl ::= ids ‘:’ Type DefDcl ::= DefSig ‘:’ Type -DefSig ::= id [DefTypeParamClause] DefParamClauses -TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] +DefSig ::= id [DefTypeParamClause] [TypelessClauses] [DefImplicitClause] +TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds Def ::= ‘val’ PatDef | ‘var’ PatDef @@ -420,7 +424,7 @@ Def ::= ‘val’ PatDef PatDef ::= ids [‘:’ Type] ‘=’ Expr | Pattern2 [‘:’ Type] ‘=’ Expr DefDef ::= DefSig [‘:’ Type] ‘=’ Expr - | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr + | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef @@ -432,10 +436,10 @@ ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefParamClause`, `UsingParamClause` must be present +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} - ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods + ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef | Export diff --git a/docs/_layouts/base.html b/docs/_layouts/base.html index feb79d1590a0..bc8b3ab26bb0 100644 --- a/docs/_layouts/base.html +++ b/docs/_layouts/base.html @@ -1,5 +1,6 @@ + {{ content }} diff --git a/docs/_layouts/blog-page.html b/docs/_layouts/blog-page.html index c5d0fe8875e7..7d1a7439f68a 100644 --- a/docs/_layouts/blog-page.html +++ b/docs/_layouts/blog-page.html @@ -5,14 +5,25 @@

{{ page.title }}

{% if page.subTitle %}
@@ -22,14 +33,4 @@

{{ page.title }}

{{ content }} - - {% if page.author and page.authorImg %} -
-
- - - {{ page.author }} - -
- {% endif %}
diff --git a/docs/_spec/01-lexical-syntax.md b/docs/_spec/01-lexical-syntax.md new file mode 100644 index 000000000000..de11de10402f --- /dev/null +++ b/docs/_spec/01-lexical-syntax.md @@ -0,0 +1,567 @@ +--- +title: Lexical Syntax +layout: default +chapter: 1 +--- + +# Lexical Syntax + +Scala source code consists of Unicode text. + +The program text is tokenized as described in this chapter. +See the last section for special support for XML literals, which are parsed in _XML mode_. + +To construct tokens, characters are distinguished according to the following classes (Unicode general category given in parentheses): + +1. Whitespace characters. `\u0020 | \u0009 | \u000D | \u000A`. +1. Letters, which include lower case letters (`Ll`), upper case letters (`Lu`), title case letters (`Lt`), other letters (`Lo`), modifier letters (`Lm`), letter numerals (`Nl`) and the two characters `\u0024 ‘$’` and `\u005F ‘_’`. +1. Digits `‘0’ | ... | ‘9’`. +1. Parentheses `‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ `. +1. Delimiter characters ``‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ ``. +1. Operator characters. These consist of all printable ASCII characters (`\u0020` - `\u007E`) that are in none of the sets above, mathematical symbols (`Sm`) and other symbols (`So`). + +## Optional Braces + +The principle of optional braces is that any keyword that can be followed by `{` can also be followed by an indented block, without needing an intervening `:`. +(Allowing an optional `:` would be counterproductive since it would introduce several ways to do the same thing.) + +The lexical analyzer inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). + +´\color{red}{\text{TODO SCALA3: Port soft-modifier.md and link it here.}}´ + +In the context-free productions below we use the notation `<<< ts >>>` to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. +Analogously, the notation `:<<< ts >>>` indicates a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent` that follows a `colon` token. + +A `colon` token reads as the standard colon "`:`" but is generated instead of it where `colon` is legal according to the context free syntax, but only if the previous token is an alphanumeric identifier, a backticked identifier, or one of the tokens `this`, `super`, `new`, "`)`", and "`]`". + +``` +colon ::= ':' -- with side conditions explained above + <<< ts >>> ::= ‘{’ ts ‘}’ + | indent ts outdent +:<<< ts >>> ::= [nl] ‘{’ ts ‘}’ + | colon indent ts outdent +``` + +## Identifiers + +```ebnf +op ::= opchar {opchar} +varid ::= lower idrest +boundvarid ::= varid + | ‘`’ varid ‘`’ +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] +escapeSeq ::= UnicodeEscape | charEscapeSeq +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ +``` + +There are three ways to form an identifier. +First, an identifier can start with a letter, followed by an arbitrary sequence of letters and digits. +This may be followed by underscore `‘_‘` characters and another string composed of either letters and digits or of operator characters. +Second, an identifier can start with an operator character followed by an arbitrary sequence of operator characters. +The preceding two forms are called _plain_ identifiers. +Finally, an identifier may also be formed by an arbitrary string between backquotes (host systems may impose some restrictions on which strings are legal for identifiers). +The identifier then is composed of all characters excluding the backquotes themselves. + +As usual, the longest match rule applies. +For instance, the string + +```scala +big_bob++=`def` +``` + +decomposes into the three identifiers `big_bob`, `++=`, and +`def`. + +The rules for pattern matching further distinguish between _variable identifiers_, which start with a lower case letter or `_`, and _constant identifiers_, which do not. + +For this purpose, lower case letters include not only a-z, but also all characters in Unicode category Ll (lowercase letter), as well as all letters that have contributory property Other_Lowercase, except characters in category Nl (letter numerals), which are never taken as lower case. + +The following are examples of variable identifiers: + +> ```scala +> x maxIndex p2p empty_? +> `yield` αρετη _y dot_product_* +> __system _MAX_LEN_ +> ªpple ʰelper +> ``` + +Some examples of constant identifiers are + +> ```scala +> + Object $reserved Džul ǂnûm +> ⅰ_ⅲ Ⅰ_Ⅲ ↁelerious ǃqhàà ʹthatsaletter +> ``` + +The ‘$’ character is reserved for compiler-synthesized identifiers. +User programs should not define identifiers that contain ‘$’ characters. + +### Regular keywords + +The following names are reserved words instead of being members of the syntactic class `id` of lexical identifiers. + +```scala +abstract case catch class def do else +enum export extends false final finally for +given if implicit import lazy match new +null object override package private protected return +sealed super then throw trait true try +type val var while with yield +: = <- => <: >: # +@ =>> ?=> +``` + +### Soft keywords + +Additionally, the following soft keywords are reserved only in some situations. + +´\color{red}{\text{TODO SCALA3: Port soft-modifier.md and link it here.}}´ + +``` +as derives end extension infix inline opaque open transparent using | * + - +``` + + + +> When one needs to access Java identifiers that are reserved words in Scala, use backquote-enclosed strings. +> For instance, the statement `Thread.yield()` is illegal, since `yield` is a reserved word in Scala. +> However, here's a work-around: `` Thread.`yield`() `` + +## Newline Characters + +```ebnf +semi ::= ‘;’ | nl {nl} +``` + +Scala is a line-oriented language where statements may be terminated by semi-colons or newlines. +A newline in a Scala source text is treated as the special token “nl” if the three following criteria are satisfied: + +1. The token immediately preceding the newline can terminate a statement. +1. The token immediately following the newline can begin a statement. +1. The token appears in a region where newlines are enabled. + +The tokens that can terminate a statement are: literals, identifiers and the following delimiters and reserved words: + +```scala +this null true false return type +_ ) ] } +``` + +The tokens that can begin a statement are all Scala tokens _except_ the following delimiters and reserved words: + +```scala +catch else extends finally forSome match +with yield , . ; : = => <- <: <% +>: # [ ) ] } +``` + +A `case` token can begin a statement only if followed by a +`class` or `object` token. + +Newlines are enabled in: + +1. all of a Scala source file, except for nested regions where newlines are disabled, and +1. the interval between matching `{` and `}` brace tokens, except for nested regions where newlines are disabled. + +Newlines are disabled in: + +1. the interval between matching `(` and `)` parenthesis tokens, except for nested regions where newlines are enabled, and +1. the interval between matching `[` and `]` bracket tokens, except for nested regions where newlines are enabled. +1. The interval between a `case` token and its matching `=>` token, except for nested regions where newlines are enabled. +1. Any regions analyzed in [XML mode](#xml-mode). + +Note that the brace characters of `{...}` escapes in XML and string literals are not tokens, and therefore do not enclose a region where newlines are enabled. + +Normally, only a single `nl` token is inserted between two consecutive non-newline tokens which are on different lines, even if there are multiple lines between the two tokens. +However, if two tokens are separated by at least one completely blank line (i.e a line which contains no printable characters), then two `nl` tokens are inserted. + +The Scala grammar (given in full [here](13-syntax-summary.html)) contains productions where optional `nl` tokens, but not semicolons, are accepted. +This has the effect that a new line in one of these positions does not terminate an expression or statement. +These positions can be summarized as follows: + +Multiple newline tokens are accepted in the following places (note that a semicolon in place of the newline would be illegal in every one of these cases): + +- between the condition of a [conditional expression](06-expressions.html#conditional-expressions) or [while loop](06-expressions.html#while-loop-expressions) and the next following expression, +- between the enumerators of a [for-comprehension](06-expressions.html#for-comprehensions-and-for-loops) and the next following expression, and +- after the initial `type` keyword in a [type definition or declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). + +A single new line token is accepted + +- in front of an opening brace ‘{’, if that brace is a legal continuation of the current statement or expression, +- after an [infix operator](06-expressions.html#prefix,-infix,-and-postfix-operations), if the first token on the next line can start an expression, +- in front of a [parameter clause](04-basic-declarations-and-definitions.html#function-declarations-and-definitions), and +- after an [annotation](11-annotations.html#user-defined-annotations). + +> The newline tokens between the two lines are not treated as statement separators. +> +> ```scala +> if (x > 0) +> x = x - 1 +> +> while (x > 0) +> x = x / 2 +> +> for (x <- 1 to 10) +> println(x) +> +> type +> IntList = List[Int] +> ``` + + + +> ```scala +> new Iterator[Int] +> { +> private var x = 0 +> def hasNext = true +> def next = { x += 1; x } +> } +> ``` +> +> With an additional newline character, the same code is interpreted as an object creation followed by a local block: +> +> ```scala +> new Iterator[Int] +> +> { +> private var x = 0 +> def hasNext = true +> def next = { x += 1; x } +> } +> ``` + + + +> ```scala +> x < 0 || +> x > 10 +> ``` +> +> With an additional newline character, the same code is interpreted as two expressions: +> +> ```scala +> x < 0 || +> +> x > 10 +> ``` + + + +> ```scala +> def func(x: Int) +> (y: Int) = x + y +> ``` +> +> With an additional newline character, the same code is interpreted as an abstract method definition and a syntactically illegal statement: +> +> ```scala +> def func(x: Int) +> +> (y: Int) = x + y +> ``` + + + +> ```scala +> @serializable +> protected class Data { ... } +> ``` +> +> With an additional newline character, the same code is interpreted as an attribute and a separate statement (which is syntactically illegal). +> +> ```scala +> @serializable +> +> protected class Data { ... } +> ``` + +## Literals + +There are literals for integer numbers, floating point numbers, characters, booleans, strings. +The syntax of these literals is in each case as in Java. + + + +```ebnf +Literal ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral + | interpolatedString + | ‘null’ +``` + +### Integer Literals + +```ebnf +integerLiteral ::= (decimalNumeral | hexNumeral) + [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +``` + +Values of type `Int` are all integer numbers between $-2\^{31}$ and $2\^{31}-1$, inclusive. +Values of type `Long` are all integer numbers between $-2\^{63}$ and +$2\^{63}-1$, inclusive. +A compile-time error occurs if an integer literal denotes a number outside these ranges. + +Integer literals are usually of type `Int`, or of type `Long` when followed by a `L` or `l` suffix. +(Lowercase `l` is deprecated for reasons of legibility.) + +However, if the expected type [_pt_](06-expressions.html#expression-typing) of a literal in an expression is either `Byte`, `Short`, or `Char` and the integer number fits in the numeric range defined by the type, then the number is converted to type _pt_ and the literal's type is _pt_. +The numeric ranges given by these types are: + +| | | +|----------------|--------------------------| +|`Byte` | ´-2\^7´ to ´2\^7-1´ | +|`Short` | ´-2\^{15}´ to ´2\^{15}-1´| +|`Char` | ´0´ to ´2\^{16}-1´ | + +The digits of a numeric literal may be separated by arbitrarily many underscores for purposes of legibility. + +> ```scala +> 0 21_000 0x7F -42L 0xFFFF_FFFF +> ``` + +### Floating Point Literals + +```ebnf +floatingPointLiteral + ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] + | decimalNumeral exponentPart [floatType] + | decimalNumeral floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] +``` + +Floating point literals are of type `Float` when followed by a floating point type suffix `F` or `f`, and are of type `Double` otherwise. +The type `Float` consists of all IEEE 754 32-bit single-precision binary floating point values, whereas the type `Double` consists of all IEEE 754 64-bit double-precision binary floating point values. + +If a floating point literal in a program is followed by a token starting with a letter, there must be at least one intervening whitespace character between the two tokens. + +> ```scala +> 0.0 1e30f 3.14159f 1.0e-100 .1 +> ``` + + + +> The phrase `1.toString` parses as three different tokens: the integer literal `1`, a `.`, and the identifier `toString`. + + + +> `1.` is not a valid floating point literal because the mandatory digit after the `.` is missing. + +### Boolean Literals + +```ebnf +booleanLiteral ::= ‘true’ | ‘false’ +``` + +The boolean literals `true` and `false` are members of type `Boolean`. + +### Character Literals + +```ebnf +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ +``` + +A character literal is a single character enclosed in quotes. +The character can be any Unicode character except the single quote delimiter or `\u000A` (LF) or `\u000D` (CR); or any Unicode character represented by an +[escape sequence](#escape-sequences). + +> ```scala +> 'a' '\u0041' '\n' '\t' +> ``` + +### String Literals + +```ebnf +stringLiteral ::= ‘"’ {stringElement} ‘"’ +stringElement ::= charNoDoubleQuoteOrNewline | escapeSeq +``` + +A string literal is a sequence of characters in double quotes. +The characters can be any Unicode character except the double quote delimiter or `\u000A` (LF) or `\u000D` (CR); or any Unicode character represented by an [escape sequence](#escape-sequences). + +If the string literal contains a double quote character, it must be escaped using +`"\""`. + +The value of a string literal is an instance of class `String`. + +> ```scala +> "Hello, world!\n" +> "\"Hello,\" replied the world." +> ``` + +#### Multi-Line String Literals + +```ebnf +stringLiteral ::= ‘"""’ multiLineChars ‘"""’ +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} +``` + +A multi-line string literal is a sequence of characters enclosed in triple quotes `""" ... """`. +The sequence of characters is arbitrary, except that it may contain three or more consecutive quote characters only at the very end. +Characters must not necessarily be printable; newlines or other control characters are also permitted. +[Escape sequences](#escape-sequences) are not processed, except for Unicode escapes (this is deprecated since 2.13.2). + +> ```scala +> """the present string +> spans three +> lines.""" +> ``` +> +> This would produce the string: +> +> ```scala +> the present string +> spans three +> lines. +> ``` +> +> The Scala library contains a utility method `stripMargin` which can be used to strip leading whitespace from multi-line strings. +> The expression +> +> ```scala +> """the present string +> |spans three +> |lines.""".stripMargin +> ``` +> +> evaluates to +> +> ```scala +> the present string +> spans three +> lines. +> ``` +> +> Method `stripMargin` is defined in class +> [scala.collection.StringOps](https://www.scala-lang.org/api/current/scala/collection/StringOps.html#stripMargin:String). + +#### Interpolated string + +```ebnf +interpolatedString ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘$$’ + | ‘$"’ + | ‘$’ alphaid + | ‘$’ BlockExpr +alphaid ::= upper idrest + | varid + +``` + +An interpolated string consists of an identifier starting with a letter immediately followed by a string literal. +There may be no whitespace characters or comments between the leading identifier and the opening quote `"` of the string. +The string literal in an interpolated string can be standard (single quote) or multi-line (triple quote). + +Inside an interpolated string none of the usual escape characters are interpreted no matter whether the string literal is normal (enclosed in single quotes) or multi-line (enclosed in triple quotes). +Note that the sequence `\"` does not close a normal string literal (enclosed in single quotes). + +There are three forms of dollar sign escape. +The most general form encloses an expression in `${` and `}`, i.e. `${expr}`. +The expression enclosed in the braces that follow the leading `$` character is of syntactical category BlockExpr. +Hence, it can contain multiple statements, and newlines are significant. +Single ‘$’-signs are not permitted in isolation in an interpolated string. +A single ‘$’-sign can still be obtained by doubling the ‘$’ character: ‘$$’. +A single ‘"’-sign can be obtained by the sequence ‘\$"’. + +The simpler form consists of a ‘$’-sign followed by an identifier starting with a letter and followed only by letters, digits, and underscore characters, e.g., `$id`. +The simpler form is expanded by putting braces around the identifier, e.g., `$id` is equivalent to `${id}`. +In the following, unless we explicitly state otherwise, we assume that this expansion has already been performed. + +The expanded expression is type checked normally. +Usually, `StringContext` will resolve to the default implementation in the scala package, but it could also be user-defined. +Note that new interpolators can also be added through implicit conversion of the built-in `scala.StringContext`. + +One could write an extension +```scala +implicit class StringInterpolation(s: StringContext) { + def id(args: Any*) = ??? +} +``` + +### Escape Sequences + +The following character escape sequences are recognized in character and string literals. + +| charEscapeSeq | unicode | name | char | +|---------------|----------|-----------------|--------| +| `‘\‘ ‘b‘` | `\u0008` | backspace | `BS` | +| `‘\‘ ‘t‘` | `\u0009` | horizontal tab | `HT` | +| `‘\‘ ‘n‘` | `\u000a` | linefeed | `LF` | +| `‘\‘ ‘f‘` | `\u000c` | form feed | `FF` | +| `‘\‘ ‘r‘` | `\u000d` | carriage return | `CR` | +| `‘\‘ ‘"‘` | `\u0022` | double quote | `"` | +| `‘\‘ ‘'‘` | `\u0027` | single quote | `'` | +| `‘\‘ ‘\‘` | `\u005c` | backslash | `\` | + +In addition, Unicode escape sequences of the form `\uxxxx`, where each `x` is a hex digit are recognized in character and string literals. + +It is a compile time error if a backslash character in a character or string literal does not start a valid escape sequence. + +## Whitespace and Comments + +Tokens may be separated by whitespace characters and/or comments. +Comments come in two forms: + +A single-line comment is a sequence of characters which starts with `//` and extends to the end of the line. + +A multi-line comment is a sequence of characters between `/*` and `*/`. +Multi-line comments may be nested, but are required to be properly nested. +Therefore, a comment like `/* /* */` will be rejected as having an unterminated comment. + +## Trailing Commas in Multi-line Expressions + +If a comma (`,`) is followed immediately, ignoring whitespace, by a newline and a closing parenthesis (`)`), bracket (`]`), or brace (`}`), then the comma is treated as a "trailing comma" and is ignored. +For example: + +```scala +foo( + 23, + "bar", + true, +) +``` + +## XML mode + +In order to allow literal inclusion of XML fragments, lexical analysis switches from Scala mode to XML mode when encountering an opening angle bracket ‘<’ in the following circumstance: +The ‘<’ must be preceded either by whitespace, an opening parenthesis or an opening brace and immediately followed by a character starting an XML name. + +```ebnf + ( whitespace | ‘(’ | ‘{’ ) ‘<’ (XNameStart | ‘!’ | ‘?’) + + XNameStart ::= ‘_’ | BaseChar | Ideographic // as in W3C XML, but without ‘:’ +``` + +The scanner switches from XML mode to Scala mode if either + +- the XML expression or the XML pattern started by the initial ‘<’ has been successfully parsed, or if +- the parser encounters an embedded Scala expression or pattern and forces the Scanner back to normal mode, until the Scala expression or pattern is successfully parsed. +In this case, since code and XML fragments can be nested, the parser has to maintain a stack that reflects the nesting of XML and Scala expressions adequately. + +Note that no Scala tokens are constructed in XML mode, and that comments are interpreted as text. + +> The following value definition uses an XML literal with two embedded Scala expressions: +> +> ```scala +> val b = +> The Scala Language Specification +> {scalaBook.version} +> {scalaBook.authors.mkList("", ", ", "")} +> +> ``` diff --git a/docs/_spec/02-identifiers-names-and-scopes.md b/docs/_spec/02-identifiers-names-and-scopes.md new file mode 100644 index 000000000000..2b34ae8844cf --- /dev/null +++ b/docs/_spec/02-identifiers-names-and-scopes.md @@ -0,0 +1,158 @@ +--- +title: Identifiers, Names & Scopes +layout: default +chapter: 2 +--- + +# Identifiers, Names and Scopes + +Names in Scala identify types, values, methods, and classes which are collectively called _entities_. +Names are introduced by local +[definitions and declarations](04-basic-declarations-and-definitions.html#basic-declarations-and-definitions), +[inheritance](05-classes-and-objects.html#class-members), +[import clauses](04-basic-declarations-and-definitions.html#import-clauses), or +[package clauses](09-top-level-definitions.html#packagings) +which are collectively called _bindings_. + +Bindings of different kinds have precedence defined on them: + +1. Definitions and declarations that are local, inherited, or made available by a package clause and also defined in the same compilation unit as the reference to them, have the highest precedence. +1. Explicit imports have the next highest precedence. +1. Wildcard imports have the next highest precedence. +1. Definitions made available by a package clause, but not also defined in the same compilation unit as the reference to them, as well as imports which are supplied by the compiler but not explicitly written in source code, have the lowest precedence. + +There are two different name spaces, one for [types](03-types.html#types) and one for [terms](06-expressions.html#expressions). +The same name may designate a type and a term, depending on the context where the name is used. + +A binding has a _scope_ in which the entity defined by a single +name can be accessed using a simple name. +Scopes are nested. +A binding in some inner scope _shadows_ bindings of lower precedence in the same scope as well as bindings of the same or lower precedence in outer scopes. + +Note that shadowing is only a partial order. +In the following example, neither binding of `x` shadows the other. +Consequently, the reference to `x` in the last line of the block is ambiguous. + +```scala +val x = 1 +locally { + import p.X.x + x +} +``` + +A reference to an unqualified (type- or term-) identifier ´x´ is bound by the unique binding, which + +- defines an entity with name ´x´ in the same namespace as the identifier, and +- shadows all other bindings that define entities with name ´x´ in that namespace. + +It is an error if no such binding exists. +If ´x´ is bound by an import clause, then the simple name ´x´ is taken to be equivalent to the qualified name to which ´x´ is mapped by the import clause. +If ´x´ is bound by a definition or declaration, then ´x´ refers to the entity introduced by that binding. +In that case, the type of ´x´ is the type of the referenced entity. + +A reference to a qualified (type- or term-) identifier ´e.x´ refers to the member of the type ´T´ of ´e´ which has the name ´x´ in the same namespace as the identifier. +It is an error if ´T´ is not a [value type](03-types.html#value-types). +The type of ´e.x´ is the member type of the referenced entity in ´T´. + +Binding precedence implies that the way source is bundled in files affects name resolution. +In particular, imported names have higher precedence than names, defined in other files, that might otherwise be visible because they are defined in either the current package or an enclosing package. + +Note that a package definition is taken as lowest precedence, since packages are open and can be defined across arbitrary compilation units. + +```scala +package util { + import scala.util + class Random + object Test extends App { + println(new util.Random) // scala.util.Random + } +} +``` + +The compiler supplies imports in a preamble to every source file. +This preamble conceptually has the following form, where braces indicate nested scopes: + +```scala +import java.lang._ +{ + import scala._ + { + import Predef._ + { /* source */ } + } +} +``` + +These imports are taken as lowest precedence, so that they are always shadowed by user code, which may contain competing imports and definitions. +They also increase the nesting depth as shown, so that later imports shadow earlier ones. + +As a convenience, multiple bindings of a type identifier to the same underlying type is permitted. +This is possible when import clauses introduce a binding of a member type alias with the same binding precedence, typically through wildcard imports. +This allows redundant type aliases to be imported without introducing an ambiguity. + +```scala +object X { type T = annotation.tailrec } +object Y { type T = annotation.tailrec } +object Z { + import X._, Y._, annotation.{tailrec => T} // OK, all T mean tailrec + @T def f: Int = { f ; 42 } // error, f is not tail recursive +} +``` + +Similarly, imported aliases of names introduced by package statements are allowed, even though the names are strictly ambiguous: + +```scala +// c.scala +package p { class C } + +// xy.scala +import p._ +package p { class X extends C } +package q { class Y extends C } +``` + +The reference to `C` in the definition of `X` is strictly ambiguous because `C` is available by virtue of the package clause in a different file, and can't shadow the imported name. +But because the references are the same, the definition is taken as though it did shadow the import. + +###### Example + +Assume the following two definitions of objects named `X` in packages `p` and `q` in separate compilation units. + +```scala +package p { + object X { val x = 1; val y = 2 } +} + +package q { + object X { val x = true; val y = false } +} +``` + +The following program illustrates different kinds of bindings and precedences between them. + +```scala +package p { // `X' bound by package clause +import Console._ // `println' bound by wildcard import +object Y { + println(s"L4: $X") // `X' refers to `p.X' here + locally { + import q._ // `X' bound by wildcard import + println(s"L7: $X") // `X' refers to `q.X' here + import X._ // `x' and `y' bound by wildcard import + println(s"L9: $x") // `x' refers to `q.X.x' here + locally { + val x = 3 // `x' bound by local definition + println(s"L12: $x") // `x' refers to constant `3' here + locally { + import q.X._ // `x' and `y' bound by wildcard import +// println(s"L15: $x") // reference to `x' is ambiguous here + import X.y // `y' bound by explicit import + println(s"L17: $y") // `y' refers to `q.X.y' here + locally { + val x = "abc" // `x' bound by local definition + import p.X._ // `x' and `y' bound by wildcard import +// println(s"L21: $y") // reference to `y' is ambiguous here + println(s"L22: $x") // `x' refers to string "abc" here +}}}}}} +``` diff --git a/docs/_spec/03-types.md b/docs/_spec/03-types.md new file mode 100644 index 000000000000..bbaac5de03a0 --- /dev/null +++ b/docs/_spec/03-types.md @@ -0,0 +1,853 @@ +--- +title: Types +layout: default +chapter: 3 +--- + +# Types + +```ebnf + Type ::= FunctionArgTypes ‘=>’ Type + | TypeLambdaParams ‘=>>’ Type + | InfixType + FunctionArgTypes ::= InfixType + | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ + TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’ + TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + InfixType ::= CompoundType {id [nl] CompoundType} + CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] + | Refinement + AnnotType ::= SimpleType {Annotation} + SimpleType ::= SimpleType TypeArgs + | SimpleType ‘#’ id + | StableId + | Path ‘.’ ‘type’ + | Literal + | ‘(’ Types ‘)’ + TypeArgs ::= ‘[’ Types ‘]’ + Types ::= Type {‘,’ Type} +``` + +We distinguish between proper types and type constructors, which take type parameters and yield types. +All types have a _kind_, either the kind of proper types or a _higher kind_. +A subset of proper types called _value types_ represents sets of (first-class) values. +Types are either _concrete_ or _abstract_. + +Every concrete value type can be represented as a _class type_, i.e. a [type designator](#type-designators) that refers to a [class or a trait](05-classes-and-objects.html#class-definitions) [^1], or as a [compound type](#compound-types) representing an intersection of types, possibly with a [refinement](#compound-types) that further constrains the types of its members. + + +Abstract types are introduced by [type parameters](04-basic-declarations-and-definitions.html#type-parameters) and [abstract type bindings](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases). +Parentheses in types can be used for grouping. + +[^1]: We assume that objects and packages also implicitly + define a class (of the same name as the object or package, but + inaccessible to user programs). + +Non-value types capture properties of identifiers that [are not values](#non-value-types). +For example, a [type constructor](#type-constructors) does not directly specify a type of values. +However, when a type constructor is applied to the correct type arguments, it yields a proper type, which may be a value type. + +Non-value types are expressed indirectly in Scala. +E.g., a method type is described by writing down a method signature, which in itself is not a real type, although it gives rise to a corresponding [method type](#method-types). +Type constructors are another example, as one can write `type Swap[m[_, _], a,b] = m[b, a]`, but there is no syntax to write the corresponding anonymous type function directly. + +`AnyKind` is the super type of all types in the Scala type system. +It has all possible kinds to encode [kind polymorphism](#kind-polymorphism). +As such, it is neither a value type nor a type constructor. + +## Paths + +```ebnf +Path ::= StableId + | [id ‘.’] this +StableId ::= id + | Path ‘.’ id + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id +ClassQualifier ::= ‘[’ id ‘]’ +``` + +Paths are not types themselves, but they can be a part of named types and in that function form a central role in Scala's type system. + +A path is one of the following. + +- The empty path ε (which cannot be written explicitly in user programs). +- ´C.´`this`, where ´C´ references a class. + The path `this` is taken as a shorthand for ´C.´`this` where ´C´ is the name of the class directly enclosing the reference. +- ´p.x´ where ´p´ is a path and ´x´ is a stable member of ´p´. + _Stable members_ are packages or members introduced by object definitions or by value definitions of [non-volatile types](#volatile-types). +- ´C.´`super`´.x´ or ´C.´`super`´[M].x´ + where ´C´ references a class and ´x´ references a stable member of the super class or designated parent class ´M´ of ´C´. + The prefix `super` is taken as a shorthand for ´C.´`super` where ´C´ is the name of the class directly enclosing the reference. + +A _stable identifier_ is a path which ends in an identifier. + +## Value Types + +Every value in Scala has a type which is of one of the following forms. + +### Singleton Types + +```ebnf +SimpleType ::= Path ‘.’ ‘type’ +``` + +A _singleton type_ is of the form ´p.´`type`. +Where ´p´ is a path pointing to a value which [conforms](06-expressions.html#expression-typing) to `scala.AnyRef`, the type denotes the set of values consisting of `null` and the value denoted by ´p´ (i.e., the value ´v´ for which `v eq p`). +Where the path does not conform to `scala.AnyRef` the type denotes the set consisting of only the value denoted by ´p´. + + + +### Literal Types + +```ebnf +SimpleType ::= Literal +``` + +A literal type `lit` is a special kind of singleton type which denotes the single literal value `lit`. +Thus, the type ascription `1: 1` gives the most precise type to the literal value `1`: the literal type `1`. + +At run time, an expression `e` is considered to have literal type `lit` if `e == lit`. +Concretely, the result of `e.isInstanceOf[lit]` and `e match { case _ : lit => }` is determined by evaluating `e == lit`. + +Literal types are available for all types for which there is dedicated syntax except `Unit`. +This includes the numeric types (other than `Byte` and `Short` which don't currently have syntax), `Boolean`, `Char` and `String`. + +### Stable Types +A _stable type_ is a singleton type, a literal type, or a type that is declared to be a subtype of trait `scala.Singleton`. + +### Type Projection + +```ebnf +SimpleType ::= SimpleType ‘#’ id +``` + +A _type projection_ ´T´#´x´ references the type member named ´x´ of type ´T´. + + + +### Type Designators + +```ebnf +SimpleType ::= StableId +``` + +A _type designator_ refers to a named value type. +It can be simple or qualified. +All such type designators are shorthands for type projections. + +Specifically, the unqualified type name ´t´ where ´t´ is bound in some class, object, or package ´C´ is taken as a shorthand for +´C.´`this.type#`´t´. +If ´t´ is not bound in a class, object, or package, then ´t´ is taken as a shorthand for ε`.type#`´t´. + +A qualified type designator has the form `p.t` where `p` is a [path](#paths) and _t_ is a type name. +Such a type designator is equivalent to the type projection `p.type#t`. + +###### Example + +Some type designators and their expansions are listed below. +We assume a local type parameter ´t´, a value `maintable` with a type member `Node` and the standard class `scala.Int`, + +| Designator | Expansion | +|-------------------- | --------------------------| +|t | ε.type#t | +|Int | scala.type#Int | +|scala.Int | scala.type#Int | +|data.maintable.Node | data.maintable.type#Node | + +### Parameterized Types + +```ebnf +SimpleType ::= SimpleType TypeArgs +TypeArgs ::= ‘[’ Types ‘]’ +``` + +A _parameterized type_ ´T[ T_1, ..., T_n ]´ consists of a type designator ´T´ and type arguments ´T_1, ..., T_n´ where ´n \geq 1´. +´T´ must refer to a type constructor which takes ´n´ type parameters ´a_1, ..., a_n´. + + +Say the type parameters have lower bounds ´L_1, ..., L_n´ and upper bounds ´U_1, ..., U_n´. +The parameterized type is well-formed if each type argument _conforms to its bounds_, i.e. ´\sigma L_i <: T_i <: \sigma U_i´ where ´\sigma´ is the substitution ´[ a_1 := T_1, ..., a_n := T_n ]´. + +#### Example Parameterized Types + +Given the partial type definitions: + +```scala +class TreeMap[A <: Comparable[A], B] { ... } +class List[A] { ... } +class I extends Comparable[I] { ... } + +class F[M[A], X] { ... } +class S[K <: String] { ... } +class G[M[Z <: I], I] { ... } +``` + +the following parameterized types are well-formed: + +```scala +TreeMap[I, String] +List[I] +List[List[Boolean]] + +F[List, Int] +G[S, String] +``` + +and the following types are ill-formed: + +```scala +TreeMap[I] // illegal: wrong number of parameters +TreeMap[List[I], Int] // illegal: type parameter not within bound + +F[Int, Boolean] // illegal: Int is not a type constructor +F[TreeMap, Int] // illegal: TreeMap takes two parameters, + // F expects a constructor taking one +G[S, Int] // illegal: S constrains its parameter to + // conform to String, + // G expects type constructor with a parameter + // that conforms to Int +``` + +#### Wildcard Type Argument + + +```ebnf +WildcardType ::= ‘_’ TypeBounds +``` + +A _wildcard type argument_ is of the form `_´\;´>:´\,L\,´<:´\,U´`. +A wildcard type must appear as a type argument of a parameterized type. +The parameterized type to which the wildcard type is applied cannot be an abstract type constructor. + +Both bound clauses may be omitted. +If both bounds are omitted, the real bounds are inferred from the bounds of the corresponding type parameter in the target type constructor. +Otherwise, if a lower bound clause `>:´\,L´` is missing, `>:´\,´scala.Nothing` is assumed. +Otherwise, if an upper bound clause `<:´\,U´` is missing, `<:´\,´scala.Any` is assumed. + +Given the [above type definitions](#example-parameterized-types), the following types are well-formed: + +```scala +List[_] // inferred as List[_ >: Nothing <: Any] +List[_ <: java.lang.Number] +S[_ <: String] +F[_, Boolean] +``` + +and the following code contains an ill-formed type: + +```scala +trait H[F[A]]: + def f: F[_] // illegal : an abstract type constructor + // cannot be applied to wildcard arguments. +``` + +Wildcard types may also appear as parts of [infix types](#infix-types), [function types](#function-types), or [tuple types](#tuple-types). +Their expansion is then the expansion in the equivalent parameterized type. + +##### Simplification Rules + +Let ´T[T_1, ..., T_n]´ be a parameterized type. +Then, applying a wildcard type argument ´t´ of the form ´\\_ >: L <: U´ at the ´i´'th position obeys the following equivalences: + +- If the type parameter ´T_i´ is declared covariant, then ´t \equiv U´ +- If the type parameter ´T_i´ is declared contravariant, then ´t \equiv L´ + +### Tuple Types + +```ebnf +SimpleType ::= ‘(’ Types ‘)’ +``` + +A _tuple type_ ´(T_1, ..., T_n)´ where ´n \geq 2´ is an alias for the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple`. + +Notes: +- `(´T´)` is just the type ´T´, and not `´T´ *: scala.EmptyTuple`. +- `()` is not a valid type, and not `scala.EmptyTuple`. + +If ´n \leq 22´, the type `´T_1´ *: ... *: ´T_n´ *: scala.EmptyTuple` is both a subtype and a supertype of tuple class `scala.Tuple´_n´[´T_1´, ..., ´T_n´]`. + +Tuple classes are case classes whose fields can be accessed using selectors `_1`, ..., `_n`. +Their functionality is abstracted in the corresponding `scala.Product_´n´` trait. +The _n_-ary tuple class and product trait are defined at least as follows in the standard Scala library (they might also add other methods and implement other traits). + +```scala +case class Tuple´_n´[+´T_1´, ..., +´T_n´](_1: ´T_1´, ..., _n: ´T_n´) +extends Product´_n´[´T_1´, ..., ´T_n´] + +trait Product´_n´[+´T_1´, ..., +´T_n´] extends Product: + override def productArity = ´n´ + def _1: ´T_1´ + ... + def _n: ´T_n´ +``` + +### Annotated Types + +```ebnf +AnnotType ::= SimpleType {Annotation} +``` + +An _annotated type_ ´T´ ´a_1, ..., a_n´ attaches [annotations](11-annotations.html#user-defined-annotations) ´a_1, ..., a_n´ to the type ´T´. + +###### Example + +The following type adds the `@suspendable` annotation to the type `String`: + +```scala +String @suspendable +``` + +### Compound Types + +```ebnf +CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement] + | Refinement +Refinement ::= [nl] ‘{’ RefineStat {semi RefineStat} ‘}’ +RefineStat ::= Dcl + | ‘type’ TypeDef + | +``` + +A _compound type_ ´T_1´ `with` ... `with` ´T_n \\{ R \\}´ represents objects with members as given in the component types ´T_1, ..., T_n´ and the refinement ´\\{ R \\}´. +A refinement ´\\{ R \\}´ contains declarations and type definitions. +If a declaration or definition overrides a declaration or definition in one of the component types ´T_1, ..., T_n´, the usual rules for [overriding](05-classes-and-objects.html#overriding) apply; otherwise the declaration or definition is said to be “structural” [^2]. + +[^2]: A reference to a structurally defined member (method call or access to a value or variable) may generate binary code that is significantly slower than an equivalent code to a non-structural member. + +Within a method declaration in a structural refinement, the type of any value parameter may only refer to type parameters or abstract types that are contained inside the refinement. +That is, it must refer either to a type parameter of the method itself, or to a type definition within the refinement. +This restriction does not apply to the method's result type. + +If no refinement is given, the empty refinement is implicitly added, i.e. ´T_1´ `with` ... `with` ´T_n´ is a shorthand for ´T_1´ `with` ... `with` ´T_n \\{\\}´. + +A compound type may also consist of just a refinement ´\\{ R \\}´ with no preceding component types. +Such a type is equivalent to `AnyRef` ´\\{ R \\}´. + +###### Example + +The following example shows how to declare and use a method which has a parameter type that contains a refinement with structural declarations. + +```scala +case class Bird (val name: String) extends Object { + def fly(height: Int) = ... +... +} +case class Plane (val callsign: String) extends Object { + def fly(height: Int) = ... +... +} +def takeoff( + runway: Int, + r: { val callsign: String; def fly(height: Int) }) = { + tower.print(r.callsign + " requests take-off on runway " + runway) + tower.read(r.callsign + " is clear for take-off") + r.fly(1000) +} +val bird = new Bird("Polly the parrot"){ val callsign = name } +val a380 = new Plane("TZ-987") +takeoff(42, bird) +takeoff(89, a380) +``` + +Although `Bird` and `Plane` do not share any parent class other than `Object`, the parameter _r_ of method `takeoff` is defined using a refinement with structural declarations to accept any object that declares a value `callsign` and a `fly` method. + +### Infix Types + +```ebnf +InfixType ::= CompoundType {id [nl] CompoundType} +``` + +An _infix type_ ´T_1´ `op` ´T_2´ consists of an infix operator `op` which gets applied to two type operands ´T_1´ and ´T_2´. +The type is equivalent to the type application `op`´[T_1, T_2]´. +The infix operator `op` may be an arbitrary identifier. + +Type operators follow the same [precedence and associativity as term operators](06-expressions.html#prefix-infix-and-postfix-operations). +For example, `A + B * C` parses as `A + (B * C)` and `A | B & C` parses as `A | (B & C)`. +Type operators ending in a colon ‘:’ are right-associative; all other operators are left-associative. + +In a sequence of consecutive type infix operations ´t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, ... \, \mathit{op_n} \, t_n´, all operators ´\mathit{op}\_1, ..., \mathit{op}\_n´ must have the same associativity. +If they are all left-associative, the sequence is interpreted as ´(... (t_0 \mathit{op_1} t_1) \mathit{op_2} ...) \mathit{op_n} t_n´, otherwise it is interpreted as ´t_0 \mathit{op_1} (t_1 \mathit{op_2} ( ... \mathit{op_n} t_n) ...)´. + +The type operators `|` and `&` are not really special. +Nevertheless, unless shadowed, they resolve to `scala.|` and `scala.&`, which represent [union and intersection types](#union-and-intersection-types), respectively. + +### Union and Intersection Types + +Syntactically, the types `S | T` and `S & T` are infix types, where the infix operators are `|` and `&`, respectively (see above). + +However, in this specification, ´S | T´ and ´S & T´ refer to the underlying core concepts of *union and intersection types*, respectively. + +- The type ´S | T´ represents the set of values that are represented by *either* ´S´ or ´T´. +- The type ´S & T´ represents the set of values that are represented by *both* ´S´ and ´T´. + +From the [conformance rules](#conformance) rules on union and intersection types, we can show that ´&´ and ´|´ are *commutative* and *associative*. +Moreover, `&` is distributive over `|`. +For any type ´A´, ´B´ and ´C´, all of the following relationships hold: + +- ´A & B \equiv B & A´, +- ´A | B \equiv B | A´, +- ´(A & B) & C \equiv A & (B & C)´, +- ´(A | B) | C \equiv A | (B | C)´, and +- ´A & (B | C) \equiv (A & B) | (A & C)´. + +If ´C´ is a type constructor, then ´C[A] & C[B]´ can be simplified using the following three rules: + +- If ´C´ is covariant, ´C[A] & C[B] \equiv C[A & B]´ +- If ´C´ is contravariant, ´C[A] & C[B] \equiv C[A | B]´ +- If ´C´ is invariant, emit a compile error + +From the above rules, we can derive the following conformance relationships: + +- When ´C´ is covariant, ´C[A & B] <: C[A] & C[B]´. +- When ´C´ is contravariant, ´C[A | B] <: C[A] & C[B]´. + +#### Join of a union type + +In some situations, a union type might need to be widened to a non-union type. +For this purpose, we define the _join_ of a union type ´T_1 | ... | T_n´ as the smallest intersection type of base class instances of ´T_1, ..., T_n´. +Note that union types might still appear as type arguments in the resulting type, this guarantees that the join is always finite. + +For example, given + +```scala +trait C[+T] +trait D +trait E +class A extends C[A] with D +class B extends C[B] with D with E +``` + +The join of ´A | B´ is ´C[A | B] & D´ + +### Function Types + +```ebnf +Type ::= FunctionArgs ‘=>’ Type +FunctionArgs ::= InfixType + | ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’ +``` + +The type ´(T_1, ..., T_n) \Rightarrow R´ represents the set of function values that take arguments of types ´T_1, ..., Tn´ and yield results of type ´R´. +The case of exactly one argument type ´T \Rightarrow R´ is a shorthand for ´(T) \Rightarrow R´. +An argument type of the form ´\Rightarrow T´ represents a [call-by-name parameter](04-basic-declarations-and-definitions.md#by-name-parameters) of type ´T´. + +Function types associate to the right, e.g. ´S \Rightarrow T \Rightarrow R´ is the same as ´S \Rightarrow (T \Rightarrow R)´. + +Function types are [covariant](04-basic-declarations-and-definitions.md#variance-annotations) in their result type and [contravariant](04-basic-declarations-and-definitions.md#variance-annotations) in their argument types. + +Function types are shorthands for class types that define an `apply` method. +Specifically, the ´n´-ary function type ´(T_1, ..., T_n) \Rightarrow R´ is a shorthand for the class type `Function´_n´[´T_1´, ..., ´T_n´, ´R´]`. +In particular ´() \Rightarrow R´ is a shorthand for class type `Function´_0´[´R´]`. + +Such class types behave as if they were instances of the following trait: + +```scala +trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: + def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ +``` + +Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.md#the-function-classes) of the standard library page in this document. + +## Non-Value Types + +The types explained in the following do not denote sets of values. + +### Method Types + +A _method type_ is denoted internally as ´(\mathit{Ps})U´, where ´(\mathit{Ps})´ is a sequence of parameter names and types ´(p_1:T_1, ..., p_n:T_n)´ for some ´n \geq 0´ and ´U´ is a (value or method) type. +This type represents named methods that take arguments named ´p_1, ..., p_n´ of types ´T_1, ..., T_n´ and that return a result of type ´U´. + +Method types associate to the right: ´(\mathit{Ps}\_1)(\mathit{Ps}\_2)U´ is treated as ´(\mathit{Ps}\_1)((\mathit{Ps}\_2)U)´. + +A special case are types of methods without any parameters. +They are written here `=> T`. Parameterless methods name expressions that are re-evaluated each time the parameterless method name is referenced. + + +Method types do not exist as types of values. +If a method name is used as a value, its type is [implicitly converted](06-expressions.html#implicit-conversions) to a corresponding function type. + +###### Example + +The declarations + +```scala +def a: Int +def b (x: Int): Boolean +def c (x: Int) (y: String, z: String): String +``` + +produce the typings + +```scala +a: => Int +b: (Int) Boolean +c: (Int) (String, String) String +``` + +### Polymorphic Method Types + +A polymorphic method type is denoted internally as `[´\mathit{tps}\,´]´T´` where `[´\mathit{tps}\,´]` is a type parameter section `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \geq 0´ and ´T´ is a (value or method) type. +This type represents named methods that take type arguments `´S_1, ..., S_n´` which [conform](#parameterized-types) to the lower bounds `´L_1, ..., L_n´` and the upper bounds `´U_1, ..., U_n´` and that yield results of type ´T´. + +###### Example + +The declarations + +```scala +def empty[A]: List[A] +def union[A <: Comparable[A]] (x: Set[A], xs: Set[A]): Set[A] +``` + +produce the typings + +```scala +empty : [A >: Nothing <: Any] List[A] +union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A] +``` + +### Type Constructors + +``` +Type ::= ... | TypeLambdaParams ‘=>>’ Type +TypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ +TypeLambdaParams ::= ‘[’ TypeLambdaParam {‘,’ TypeLambdaParam} ‘]’ +TypeLambdaParam ::= {Annotation} (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] +``` + + + +A _type constructor_ is either: +- a _type lambda_, of the form `[´\mathit{tps}\,´] =>> ´T´` where `[´\mathit{tps}\,´]` is a type parameter clause `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]` for some ´n \gt 0´ and ´T´ is either a value type +or another type lambda. +- a reference to a [desugared type declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases) upper-bounded by a type lambda. +- a reference to a [polymorphic class](05-classes-and-objects.html##class-definitions). + +Each type parameter ´a_i´ of a type lambda has a variance ´v_i´ which cannot be written down by the user but is inferred from the body of the type lambda to maximize the number of types that conform to the type lambda. + + +#### Inferred type parameter clause + +To each type constructor corresponds an _inferred type parameter clause_ which is computed as follow: +- For a type lambda, its type parameter clause (including variance annotations). +- For a type declaration upper-bounded by a type lambda ´T´, the inferred clause of ´T´. +- For a polymorphic class, its type parameter clause. + + + +## Kind Polymorphism + +Type parameters are normally partitioned into _kinds_, indicated by the top type of which it is a subtype. +Proper types are the types of values and are subtypes of `Any`. +Higher-kinded types are type constructors such as `List` or `Map`. +Covariant single argument type constructors such as `List` are subtypes of `[+X] =>> Any`. +The `Map` type constructor is a subtype of `[X, +Y] =>> Any`. + +A type can be used only as prescribed by its kind. +Subtypes of `Any` cannot be applied to type arguments whereas subtypes of `[X] =>> Any` _must_ be applied to a type argument, unless they are passed to type parameters of the same kind. + +A type parameter whose upper bound is [`scala.AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) can have any kind and is called an _any-kinded type_. + +```scala +def f[T <: AnyKind] = ... +``` + +The actual type arguments of `f` can then be types of arbitrary kinds. +So the following are all legal: + +```scala +f[Int] +f[List] +f[Map] +f[[X] =>> String] +``` + +Since the actual kind of an any-kinded type is unknown, its usage is heavily restricted. +An any-kinded type can neither be the type of a value, nor be instantiated with type parameters. +The only thing one can do with an any-kinded type is to pass it to another any-kinded type argument. + +`AnyKind` plays a special role in Scala's subtype system. +It is a supertype of all other types, no matter what their kind is. +It is also assumed to be kind-compatible with all other types. +Furthermore, `AnyKind` is itself an any-kinded type, so it cannot be the type of values and it cannot be instantiated. + +## Base Types and Member Definitions + +Types of class members depend on the way the members are referenced. +Central here are three notions, namely: +1. the notion of the set of base types of a type ´T´, +1. the notion of a type ´T´ in some class ´C´ seen from some + prefix type ´S´, +1. the notion of the set of member bindings of some type ´T´. + +These notions are defined mutually recursively as follows. + +1. The set of _base types_ of a type is a set of class types, + given as follows. + - The base types of a class type ´C´ with parents ´T_1, ..., T_n´ are ´C´ itself, as well as the base types of the compound type `´T_1´ with ... with ´T_n´ { ´R´ }`. + - The base types of an aliased type are the base types of its alias. + - The base types of an abstract type are the base types of its upper bound. + - The base types of a parameterized type `´C´[´T_1, ..., T_n´]` are the base types of type ´C´, where every occurrence of a type parameter ´a_i´ of ´C´ has been replaced by the corresponding parameter type ´T_i´. + - The base types of a singleton type `´p´.type` are the base types of the type of ´p´. + - The base types of a compound type `´T_1´ with ... with ´T_n´ { ´R´ }` are the _reduced union_ of the base classes of all ´T_i´'s. + This means: Let the multi-set ´\mathscr{S}´ be the multi-set-union of the base types of all ´T_i´'s. + If ´\mathscr{S}´ contains several type instances of the same class, say `´S^i´#´C´[´T^i_1, ..., T^i_n´]` ´(i \in I)´, then all those instances are replaced by one of them which conforms to all others. + It is an error if no such instance exists. + It follows that the reduced union, if it exists, produces a set of class types, where different types are instances of different classes. + - The base types of a type selection `´S´#´T´` are determined as follows. + If ´T´ is an alias or abstract type, the previous clauses apply. + Otherwise, ´T´ must be a (possibly parameterized) class type, which is defined in some class ´B´. + Then the base types of `´S´#´T´` are the base types of ´T´ in ´B´ seen from the prefix type ´S´. + +1. The notion of a type ´T´ _in class ´C´ seen from some prefix type ´S´_ makes sense only if the prefix type ´S´ has a type instance of class ´C´ as a base type, say `´S'´#´C´[´T_1, ..., T_n´]`. +Then we define as follows. + - If `´S´ = ´\epsilon´.type`, then ´T´ in ´C´ seen from ´S´ is ´T´ itself. + - Otherwise, if ´T´ is the ´i´'th type parameter of some class ´D´, then + - If ´S´ has a base type `´D´[´U_1, ..., U_n´]`, for some type parameters `[´U_1, ..., U_n´]`, then ´T´ in ´C´ seen from ´S´ is ´U_i´. + - Otherwise, if ´C´ is defined in a class ´C'´, then ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´. + - Otherwise, if ´C´ is not defined in another class, then ´T´ in ´C´ seen from ´S´ is ´T´ itself. + - Otherwise, if ´T´ is the singleton type `´D´.this.type` for some class ´D´ then + - If ´D´ is a subclass of ´C´ and ´S´ has a type instance of class ´D´ among its base types, then ´T´ in ´C´ seen from ´S´ is ´S´. + - Otherwise, if ´C´ is defined in a class ´C'´, then ´T´ in ´C´ seen from ´S´ is the same as ´T´ in ´C'´ seen from ´S'´. + - Otherwise, if ´C´ is not defined in another class, then ´T´ in ´C´ seen from ´S´ is ´T´ itself. + - If ´T´ is some other type, then the described mapping is performed to all its type components. + +If ´T´ is a possibly parameterized class type, where ´T´'s class is defined in some other class ´D´, and ´S´ is some prefix type, then we use "´T´ seen from ´S´" as a shorthand for "´T´ in ´D´ seen from ´S´". + +1. The _member bindings_ of a type ´T´ are + 1. all bindings ´d´ such that there exists a type instance of some class ´C´ among the base types of ´T´ and there exists a definition or declaration ´d'´ in ´C´ such that ´d´ results from ´d'´ by replacing every type ´T'´ in ´d'´ by ´T'´ in ´C´ seen from ´T´, and + 2. all bindings of the type's [refinement](#compound-types), if it has one. +2. The member bindinds of ´S & T´ are all the binds of ´S´ *and* all the bindins of ´T´. +3. The member bindings of ´S | T´ are the member bindings of its [join](#join-of-a-union-type). + +The _definition_ of a type projection `S#T` is the member binding ´d_T´ of the type `T` in `S`. +In that case, we also say that `S#T` _is defined by_ ´d_T´. + +## Relations between types + +We define the following relations between types. + +| Name | Symbolically | Interpretation | +|------------------|----------------|----------------------------------------------------| +| Equivalence | ´T \equiv U´ | ´T´ and ´U´ are interchangeable in all contexts. | +| Conformance | ´T <: U´ | Type ´T´ conforms to ("is a subtype of") type ´U´. | +| Weak Conformance | ´T <:_w U´ | Augments conformance for primitive numeric types. | +| Compatibility | | Type ´T´ conforms to type ´U´ after conversions. | + +### Equivalence + +´\color{red}{\text{TODO SCALA3: Redefine equivalence as mutual conformance?}}´ + +Equivalence ´(\equiv)´ between types is the smallest congruence [^congruence] such that the following holds: + +- If ´t´ is defined by a type alias `type ´t´ = ´T´`, then ´t´ is equivalent to ´T´. +- If a path ´p´ has a singleton type `´q´.type`, then `´p´.type ´\equiv q´.type`. +- If ´O´ is defined by an object definition, and ´p´ is a path consisting only of package or object selectors and ending in ´O´, then `´O´.this.type ´\equiv p´.type`. +- Two [compound types](#compound-types) are equivalent if the sequences of their component are pairwise equivalent, and occur in the same order, and their refinements are equivalent. Two refinements are equivalent if they bind the same names and the modifiers, types and bounds of every declared entity are equivalent in both refinements. +- Two [method types](#method-types) are equivalent if: + - neither are implicit, or they both are [^implicit]; + - they have equivalent result types; + - they have the same number of parameters; and + - corresponding parameters have equivalent types. + Note that the names of parameters do not matter for method type equivalence. +- Two [polymorphic method types](#polymorphic-method-types) are equivalent if they have the same number of type parameters, and, after renaming one set of type parameters by another, the result types as well as lower and upper bounds of corresponding type parameters are equivalent. +- Two [type constructors](#type-constructors) are equivalent if they have the same number of type parameters, and, after renaming one list of type parameters by another, the result types as well as variances, lower and upper bounds of corresponding type parameters are equivalent. + +[^congruence]: A congruence is an equivalence relation which is closed under formation of contexts. +[^implicit]: A method type is implicit if the parameter section that defines it starts with the `implicit` keyword. + +### Conformance + +The conformance relation ´(<:)´ is the smallest transitive relation that satisfies the following conditions. + +- Conformance includes equivalence. If ´T \equiv U´ then ´T <: U´. +- For every type `´T´` (of any kind), `scala.Nothing <: ´T´ <: scala.AnyKind`. +- For every value type `´T´`, `´T´ <: scala.Any`. +- For every type constructor `´T´` with type parameters `[´U_1´, ..., ´U_n´]`, `[´U_1´, ..., ´U_n´] =>> scala.Nothing <: ´T´ <: [´U_1´, ..., ´U_n´] =>> scala.Any`. +- For every value type ´T´, `scala.Null <: ´T´` unless `´T´ <: scala.AnyVal`. +- A type variable or abstract type ´t´ conforms to its upper bound and its lower bound conforms to ´t´. +- A class type or parameterized type conforms to any of its base-types. +- A singleton type `´p´.type` conforms to the type of the path ´p´. +- A singleton type `´p´.type` conforms to the type `scala.Singleton`. +- A type projection `´T´#´t´` conforms to `´U´#´t´` if ´T´ conforms to ´U´. +- A parameterized type `´T´[´T_1´, ..., ´T_n´]` conforms to `´T´[´U_1´, ..., ´U_n´]` if the following conditions hold for ´i \in \{ 1, ..., n \}´: + 1. If the ´i´'th type parameter of ´T´ is declared covariant, then ´T_i <: U_i´. [^argisnotwildcard] + 1. If the ´i´'th type parameter of ´T´ is declared contravariant, then ´U_i <: T_i´. [^argisnotwildcard] + 1. If the ´i´'th type parameter of ´T´ is declared neither covariant nor contravariant: + 1. If neither ´T_i´ nor ´U_i´ are wildcard type arguments, then ´U_i \equiv T_i´. + 1. If ´T_i´ is a wildcard type argument of the form ´\\_ >: L_1 <: U_1´ and ´U_i´ is a wildcard argument of the form ´\\_ >: L_2 <: U_2´, then ´L_2 <: L_1´ and ´H_1 <: H_2´ (i.e., the ´T_i´ "interval" is contained in the ´U_i´ "interval"). + 1. If ´U_i´ is a wildcard type argument of the form ´\\_ >: L_2 <: U_2´, then ´L_2 <: T_i´ and ´T_i <: U_2´. +- A compound type `´T_1´ with ... with ´T_n´ {´R\,´}` conforms to each of its component types ´T_i´. +- If ´T <: U_i´ for ´i \in \{ 1, ..., n \}´ and for every binding ´d´ of a type or value ´x´ in ´R´ there exists a member binding of ´x´ in ´T´ which subsumes ´d´, then ´T´ conforms to the compound type `´U_1´ with ... with ´U_n´ {´R\,´}`. +- If ´T <: U´, then ´T <: U | W´ and ´T <: W | U´. +- If ´T <: W´ and ´U <: W´, then ´T | U <: W´. +- If ´T <: U´ and ´T <: W´, then ´T <: U & W´. +- If ´T <: W´, then ´T & U <: W´ and ´U & T <: W´. +- If ´T_i \equiv T_i'´ for ´i \in \{ 1, ..., n\}´ and ´U´ conforms to ´U'´ then the method type ´(p_1:T_1, ..., p_n:T_n) U´ conforms to ´(p_1':T_1', ..., p_n':T_n') U'´. +- The polymorphic type ´[a_1 >: L_1 <: U_1, ..., a_n >: L_n <: U_n] T´ conforms to the polymorphic type ´[a_1 >: L_1' <: U_1', ..., a_n >: L_n' <: U_n'] T'´ if, assuming ´L_1' <: a_1 <: U_1', ..., L_n' <: a_n <: U_n'´ one has ´T <: T'´ and ´L_i <: L_i'´ and ´U_i' <: U_i´ for ´i \in \{ 1, ..., n \}´. +- Type constructors ´T´ and ´T'´ follow a similar discipline. +We characterize ´T´ and ´T'´ by their [inferred type parameter clauses](#inferred-type-parameter-clause) ´[a_1, ..., a_n]´ and ´[a_1', ..., a_n']´. +Then, ´T´ conforms to ´T'´ if any list ´[t_1, ..., t_n]´ -- with declared variances, bounds and higher-order type parameter clauses -- of valid type arguments for ´T'´ is also a valid list of type arguments for ´T´ and ´T[t_1, ..., t_n] <: T'[t_1, ..., t_n]´. +Note that this entails that: + - The bounds on ´a_i´ must be weaker than the corresponding bounds declared for ´a'_i´. + - The variance of ´a_i´ must match the variance of ´a'_i´, where covariance matches covariance, contravariance matches contravariance and any variance matches invariance. + - Recursively, these restrictions apply to the corresponding higher-order type parameter clauses of ´a_i´ and ´a'_i´. + + [^argisnotwildcard]: In these cases, if `T_i` and/or `U_i` are wildcard type arguments, the [simplification rules](#simplification-rules) for parameterized types allow to reduce them to real types. + +A declaration or definition in some compound type of class type ´C´ _subsumes_ another declaration of the same name in some compound type or class type ´C'´, if one of the following holds. + +- A value declaration or definition that defines a name ´x´ with type ´T´ subsumes a value or method declaration that defines ´x´ with type ´T'´, provided ´T <: T'´. +- A method declaration or definition that defines a name ´x´ with type ´T´ subsumes a method declaration that defines ´x´ with type ´T'´, provided ´T <: T'´. +- A type alias `type ´t´[´T_1´, ..., ´T_n´] = ´T´` subsumes a type alias `type ´t´[´T_1´, ..., ´T_n´] = ´T'´` if ´T \equiv T'´. +- A type declaration `type ´t´[´T_1´, ..., ´T_n´] >: ´L´ <: ´U´` subsumes a type declaration `type ´t´[´T_1´, ..., ´T_n´] >: ´L'´ <: ´U'´` if ´L' <: L´ and ´U <: U'´. +- A type or class definition that binds a type name ´t´ subsumes an abstract type declaration `type t[´T_1´, ..., ´T_n´] >: L <: U` if ´L <: t <: U´. + +#### Least upper bounds and greatest lower bounds + +The ´(<:)´ relation forms pre-order between types, i.e. it is transitive and reflexive. +This allows us to define _least upper bounds_ and _greatest lower bounds_ of a set of types in terms of that order. + +- the _least upper bound_ of `A` and `B` is the smallest type `L` such that `A` <: `L` and `B` <: `L`. +- the _greatest lower bound_ of `A` and `B` is the largest type `G` such that `G` <: `A` and `G` <: `B`. + +By construction, for all types `A` and `B`, the least upper bound of `A` and `B` is `A | B`, and their greatest lower bound is `A & B`. + +### Weak Conformance + +In some situations Scala uses a more general conformance relation. +A type ´S´ _weakly conforms_ to a type ´T´, written ´S <:_w T´, if ´S <: T´ or both ´S´ and ´T´ are primitive number types and ´S´ precedes ´T´ in the following ordering. + +```scala +Byte ´<:_w´ Short +Short ´<:_w´ Int +Char ´<:_w´ Int +Int ´<:_w´ Long +Long ´<:_w´ Float +Float ´<:_w´ Double +``` + +A _weak least upper bound_ is a least upper bound with respect to weak conformance. + +### Compatibility +A type ´T´ is _compatible_ to a type ´U´ if ´T´ (or its corresponding function type) [weakly conforms](#weak-conformance) to ´U´ after applying [eta-expansion](06-expressions.html#eta-expansion). +If ´T´ is a method type, it's converted to the corresponding function type. +If the types do not weakly conform, the following alternatives are checked in order: +- dropping by-name modifiers: if ´U´ is of the shape `´=> U'´` (and ´T´ is not), `´T <:_w U'´`; +- SAM conversion: if ´T´ corresponds to a function type, and ´U´ declares a single abstract method whose type [corresponds](06-expressions.html#sam-conversion) to the function type ´U'´, `´T <:_w U'´`. +- [implicit conversion](07-implicits.html#views): there's an implicit conversion from ´T´ to ´U´ in scope; + +#### Examples + +##### Function compatibility via SAM conversion + +Given the definitions + +```scala +def foo(x: Int => String): Unit +def foo(x: ToString): Unit + +trait ToString { def convert(x: Int): String } +``` + +The application `foo((x: Int) => x.toString)` [resolves](06-expressions.html#overloading-resolution) to the first overload, as it's more specific: +- `Int => String` is compatible to `ToString` -- when expecting a value of type `ToString`, you may pass a function literal from `Int` to `String`, as it will be SAM-converted to said function; +- `ToString` is not compatible to `Int => String` -- when expecting a function from `Int` to `String`, you may not pass a `ToString`. + +## Volatile Types + +Type volatility approximates the possibility that a type parameter or abstract type instance of a type does not have any non-null values. +A value member of a volatile type cannot appear in a [path](#paths). + +A type is _volatile_ if it falls into one of four categories: + +A compound type `´T_1´ with ... with ´T_n´ {´R\,´}` is volatile if one of the following three conditions hold. + +1. One of ´T_2, ..., T_n´ is a type parameter or abstract type, or +1. ´T_1´ is an abstract type and either the refinement ´R´ or a type ´T_j´ for ´j > 1´ contributes an abstract member to the compound type, or +1. one of ´T_1, ..., T_n´ is a singleton type. + +Here, a type ´S´ _contributes an abstract member_ to a type ´T´ if ´S´ contains an abstract member that is also a member of ´T´. +A refinement ´R´ contributes an abstract member to a type ´T´ if ´R´ contains an abstract declaration which is also a member of ´T´. + +A type designator is volatile if it is an alias of a volatile type, or if it designates a type parameter or abstract type that has a volatile type as its upper bound. + +A singleton type `´p´.type` is volatile, if the underlying type of path ´p´ is volatile. + +## Type Erasure + +A type is called _generic_ if it contains type arguments or type variables. +_Type erasure_ is a mapping from (possibly generic) types to non-generic types. +We write ´|T|´ for the erasure of type ´T´. +The erasure mapping is defined as follows. + +- The erasure of `scala.AnyKind` is `Object`. +- The erasure of an alias type is the erasure of its right-hand side. +- The erasure of an abstract type is the erasure of its upper bound. +- The erasure of the parameterized type `scala.Array´[T_1]´` is `scala.Array´[|T_1|]´`. +- The erasure of every other parameterized type ´T[T_1, ..., T_n]´ is ´|T|´. +- The erasure of a singleton type `´p´.type` is the erasure of the type of ´p´. +- The erasure of a type projection `´T´#´x´` is `|´T´|#´x´`. +- The erasure of a compound type `´T_1´ with ... with ´T_n´ {´R\,´}` is the erasure of the intersection dominator of ´T_1, ..., T_n´. +- The erasure of a union type ´S | T´ is the _erased least upper bound_ (_elub_) of the erasures of ´S´ and ´T´. +- The erasure of an intersection type ´S & T´ is the _eglb_ (erased greatest lower bound) of the erasures of ´S´ and ´T´. + +The erased LUB is computed as follows: + +- if both argument are arrays of objects, an array of the erased LUB of the element types +- if both arguments are arrays of same primitives, an array of this primitive +- if one argument is array of primitives and the other is array of objects, [`Object`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Object.html) +- if one argument is an array, [`Object`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Object.html) +- otherwise a common superclass or trait S of the argument classes, with the following two properties: + - S is minimal: no other common superclass or trait derives from S, and + - S is last: in the linearization of the first argument type ´|A|´ there are no minimal common superclasses or traits that come after S. + The reason to pick last is that we prefer classes over traits that way, which leads to more predictable bytecode and (?) faster dynamic dispatch. + +The rules for ´eglb(A, B)´ are given below in pseudocode: + +``` +eglb(scala.Array[A], JArray[B]) = scala.Array[eglb(A, B)] +eglb(scala.Array[T], _) = scala.Array[T] +eglb(_, scala.Array[T]) = scala.Array[T] +eglb(A, B) = A if A extends B +eglb(A, B) = B if B extends A +eglb(A, _) = A if A is not a trait +eglb(_, B) = B if B is not a trait +eglb(A, _) = A // use first +``` diff --git a/docs/_spec/04-basic-declarations-and-definitions.md b/docs/_spec/04-basic-declarations-and-definitions.md new file mode 100644 index 000000000000..5c45cc5c7819 --- /dev/null +++ b/docs/_spec/04-basic-declarations-and-definitions.md @@ -0,0 +1,758 @@ +--- +title: Basic Declarations & Definitions +layout: default +chapter: 4 +--- + +# Basic Declarations and Definitions + +```ebnf +Dcl ::= ‘val’ ValDcl + | ‘var’ VarDcl + | ‘def’ FunDcl + | ‘type’ {nl} TypeDcl +PatVarDef ::= ‘val’ PatDef + | ‘var’ VarDef +Def ::= PatVarDef + | ‘def’ FunDef + | ‘type’ {nl} TypeDef + | TmplDef +``` + +A _declaration_ introduces names and assigns them types. +It can form part of a [class definition](05-classes-and-objects.html#templates) or of a refinement in a [compound type](03-types.html#compound-types). + +A _definition_ introduces names that denote terms or types. +It can form part of an object or class definition or it can be local to a block. +Both declarations and definitions produce _bindings_ that associate type names with type definitions or bounds, and that associate term names with types. + +The scope of a name introduced by a declaration or definition is the whole statement sequence containing the binding. +However, there is a restriction on forward references in blocks: +In a statement sequence ´s_1 ... s_n´ making up a block, if a simple name in ´s_i´ refers to an entity defined by ´s_j´ where ´j \geq i´, then for all ´s_k´ between and including ´s_i´ and ´s_j´, + +- ´s_k´ cannot be a variable definition. +- If ´s_k´ is a value definition, it must be lazy. + + + +## Value Declarations and Definitions + +```ebnf +Dcl ::= ‘val’ ValDcl +ValDcl ::= ids ‘:’ Type +PatVarDef ::= ‘val’ PatDef +PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr +ids ::= id {‘,’ id} +``` + +A value declaration `val ´x´: ´T´` introduces ´x´ as a name of a value of type ´T´. + +A value definition `val ´x´: ´T´ = ´e´` defines ´x´ as a name of the value that results from the evaluation of ´e´. +If the value definition is not recursive, the type ´T´ may be omitted, in which case the [packed type](06-expressions.html#expression-typing) of expression ´e´ is assumed. +If a type ´T´ is given, then ´e´ is expected to conform to it. + +Evaluation of the value definition implies evaluation of its right-hand side ´e´, unless it has the modifier `lazy`. +The effect of the value definition is to bind ´x´ to the value of ´e´ +converted to type ´T´. +A `lazy` value definition evaluates its right hand side ´e´ the first time the value is accessed. + +A _constant value definition_ is of the form + +```scala +final val x = e +``` + +where `e` is a [constant expression](06-expressions.html#constant-expressions). +The `final` modifier must be present and no type annotation may be given. +References to the constant value `x` are themselves treated as constant expressions; in the generated code they are replaced by the definition's right-hand side `e`. + +Value definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side. +If ´p´ is some pattern other than a simple name or a name followed by a colon and a type, then the value definition `val ´p´ = ´e´` is expanded as follows: + +1. If the pattern ´p´ has bound variables ´x_1, ..., x_n´, where ´n > 1´: + +```scala +val ´\$x´ = ´e´ match {case ´p´ => (´x_1, ..., x_n´)} +val ´x_1´ = ´\$x´._1 +... +val ´x_n´ = ´\$x´._n +``` + +Here, ´\$x´ is a fresh name. + +2. If ´p´ has a unique bound variable ´x´: + +```scala +val ´x´ = ´e´ match { case ´p´ => ´x´ } +``` + +3. If ´p´ has no bound variables: + +```scala +´e´ match { case ´p´ => ()} +``` + +###### Example + +The following are examples of value definitions + +```scala +val pi = 3.1415 +val pi: Double = 3.1415 // equivalent to first definition +val Some(x) = f() // a pattern definition +val x :: xs = mylist // an infix pattern definition +``` + +The last two definitions have the following expansions. + +```scala +val x = f() match { case Some(x) => x } + +val x´\$´ = mylist match { case x :: xs => (x, xs) } +val x = x´\$´._1 +val xs = x´\$´._2 +``` + +The name of any declared or defined value may not end in `_=`. + +A value declaration `val ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of value declarations `val ´x_1´: ´T´; ...; val ´x_n´: ´T´`. +A value definition `val ´p_1, ..., p_n´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1´ = ´e´; ...; val ´p_n´ = ´e´`. +A value definition `val ´p_1, ..., p_n: T´ = ´e´` is a shorthand for the sequence of value definitions `val ´p_1: T´ = ´e´; ...; val ´p_n: T´ = ´e´`. + +## Variable Declarations and Definitions + +```ebnf +Dcl ::= ‘var’ VarDcl +PatVarDef ::= ‘var’ VarDef +VarDcl ::= ids ‘:’ Type +VarDef ::= PatDef + | ids ‘:’ Type ‘=’ ‘_’ +``` + +A variable declaration `var ´x´: ´T´` is equivalent to the declarations of both a _getter method_ ´x´ *and* a _setter method_ `´x´_=`: + +```scala +def ´x´: ´T´ +def ´x´_= (´y´: ´T´): Unit +``` + +An implementation of a class may _define_ a declared variable using a variable definition, or by defining the corresponding setter and getter methods. + +A variable definition `var ´x´: ´T´ = ´e´` introduces a mutable variable with type ´T´ and initial value as given by the expression ´e´. +The type ´T´ can be omitted, in which case the type of ´e´ is assumed. +If ´T´ is given, then ´e´ is expected to [conform to it](06-expressions.html#expression-typing). + +Variable definitions can alternatively have a [pattern](08-pattern-matching.html#patterns) as left-hand side. +A variable definition `var ´p´ = ´e´` where ´p´ is a pattern other than a simple name or a name followed by a colon and a type is expanded in the same way as a [value definition](#value-declarations-and-definitions) `val ´p´ = ´e´`, except that the free names in ´p´ are introduced as mutable variables, not values. + +The name of any declared or defined variable may not end in `_=`. + +A variable definition `var ´x´: ´T´ = _` can appear only as a member of a template. +It introduces a mutable field with type ´T´ and a default initial value. +The default value depends on the type ´T´ as follows: + +| default | type ´T´ | +|----------|------------------------------------| +|`0` | `Int` or one of its subrange types | +|`0L` | `Long` | +|`0.0f` | `Float` | +|`0.0d` | `Double` | +|`false` | `Boolean` | +|`()` | `Unit` | +|`null` | all other types | + +When they occur as members of a template, both forms of variable definition also introduce a getter method ´x´ which returns the value currently assigned to the variable, as well as a setter method `´x´_=` which changes the value currently assigned to the variable. +The methods have the same signatures as for a variable declaration. +The template then has these getter and setter methods as members, whereas the original variable cannot be accessed directly as a template member. + +###### Example + +The following example shows how _properties_ can be simulated in Scala. +It defines a class `TimeOfDayVar` of time values with updatable integer fields representing hours, minutes, and seconds. +Its implementation contains tests that allow only legal values to be assigned to these fields. +The user code, on the other hand, accesses these fields just like normal variables. + +```scala +class TimeOfDayVar { + private var h: Int = 0 + private var m: Int = 0 + private var s: Int = 0 + + def hours = h + def hours_= (h: Int) = if (0 <= h && h < 24) this.h = h + else throw new DateError() + + def minutes = m + def minutes_= (m: Int) = if (0 <= m && m < 60) this.m = m + else throw new DateError() + + def seconds = s + def seconds_= (s: Int) = if (0 <= s && s < 60) this.s = s + else throw new DateError() +} +val d = new TimeOfDayVar +d.hours = 8; d.minutes = 30; d.seconds = 0 +d.hours = 25 // throws a DateError exception +``` + +A variable declaration `var ´x_1, ..., x_n´: ´T´` is a shorthand for the sequence of variable declarations `var ´x_1´: ´T´; ...; var ´x_n´: ´T´`. +A variable definition `var ´x_1, ..., x_n´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1´ = ´e´; ...; var ´x_n´ = ´e´`. +A variable definition `var ´x_1, ..., x_n: T´ = ´e´` is a shorthand for the sequence of variable definitions `var ´x_1: T´ = ´e´; ...; var ´x_n: T´ = ´e´`. + +## Type Declarations and Type Aliases + + + +```ebnf +Dcl ::= ‘type’ {nl} TypeDcl +TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] +Def ::= ‘type’ {nl} TypeDef +TypeDef ::= id [TypeParamClause] ‘=’ Type +``` + +### Desugaring of parameterized type declarations +A parameterized type declaration is desugared into an unparameterized type declaration +whose bounds are type lambdas with explicit variance annotations. + +#### Abstract Type +An abstract type +```scala +type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´U´ +``` +is desugared into an unparameterized abstract type as follow: +- If `L` conforms to `Nothing`, then, + + ```scala +type ´t´ >: Nothing + <: [´\mathit{tps'}\,´] =>> ´U´ + ``` +- otherwise, + + ```scala +type ´t´ >: [´\mathit{tps'}\,´] =>> ´L´ + <: [´\mathit{tps'}\,´] =>> ´U´ + ``` + +If at least one of the ´\mathit{tps}´ contains an explicit variance annotation, then ´\mathit{tps'} = \mathit{tps}´, otherwise we infer the variance of each type parameter as with the user-written type lambda `[´\mathit{tps}\,´] =>> ´U´`. + +The same desugaring applies to type parameters. For instance, +```scala +[F[X] <: Coll[X]] +``` +is treated as a shorthand for +```scala +[F >: Nothing <: [X] =>> Coll[X]] +``` + +#### Type Alias +A parameterized type alias +```scala +type ´t´[´\mathit{tps}\,´] = ´T´ +``` +is desugared into an unparameterized type alias +```scala +type ´t´ = [´\mathit{tps'}\,´] =>> ´T´ +``` +where ´\mathit{tps'}´ is computed as in the previous case. + +´\color{red}{\text{TODO SCALA3: Everything else in this section (and the next one +on type parameters) needs to be rewritten to take into account the desugaring described above.}}´ + +A _type declaration_ `type ´t´[´\mathit{tps}\,´] >: ´L´ <: ´U´` declares ´t´ to be an abstract type with lower bound type ´L´ and upper bound type ´U´. +If the type parameter clause `[´\mathit{tps}\,´]` is omitted, ´t´ abstracts over a proper type, otherwise ´t´ stands for a type constructor that accepts type arguments as described by the type parameter clause. + +If a type declaration appears as a member declaration of a type, implementations of the type may implement ´t´ with any type ´T´ for which ´L <: T <: U´. +It is a compile-time error if ´L´ does not conform to ´U´. +Either or both bounds may be omitted. +If the lower bound ´L´ is absent, the bottom type `scala.Nothing` is assumed. +If the upper bound ´U´ is absent, the top type `scala.Any` is assumed. + +A type constructor declaration imposes additional restrictions on the concrete types for which ´t´ may stand. +Besides the bounds ´L´ and ´U´, the type parameter clause may impose higher-order bounds and variances, as governed by the [conformance of type constructors](03-types.html#conformance). + +The scope of a type parameter extends over the bounds `>: ´L´ <: ´U´` and the type parameter clause ´\mathit{tps}´ itself. +A higher-order type parameter clause (of an abstract type constructor ´tc´) has the same kind of scope, restricted to the declaration of the type parameter ´tc´. + +To illustrate nested scoping, these declarations are all equivalent: `type t[m[x] <: Bound[x], Bound[x]]`, `type t[m[x] <: Bound[x], Bound[y]]` and `type t[m[x] <: Bound[x], Bound[_]]`, as the scope of, e.g., the type parameter of ´m´ is limited to the declaration of ´m´. +In all of them, ´t´ is an abstract type member that abstracts over two type constructors: ´m´ stands for a type constructor that takes one type parameter and that must be a subtype of ´Bound´, ´t´'s second type constructor parameter. +`t[MutableList, Iterable]` is a valid use of ´t´. + +A _type alias_ `type ´t´ = ´T´` defines ´t´ to be an alias name for the type ´T´. +The left hand side of a type alias may have a type parameter clause, e.g. `type ´t´[´\mathit{tps}\,´] = ´T´`. +The scope of a type parameter extends over the right hand side ´T´ and the type parameter clause ´\mathit{tps}´ itself. + +The scope rules for [definitions](#basic-declarations-and-definitions) and [type parameters](#method-declarations-and-definitions) make it possible that a type name appears in its own bound or in its right-hand side. +However, it is a static error if a type alias refers recursively to the defined type constructor itself. +That is, the type ´T´ in a type alias `type ´t´[´\mathit{tps}\,´] = ´T´` may not refer directly or indirectly to the name ´t´. +It is also an error if an abstract type is directly or indirectly its own upper or lower bound. + +###### Example + +The following are legal type declarations and definitions: + +```scala +type IntList = List[Integer] +type T <: Comparable[T] +type Two[A] = Tuple2[A, A] +type MyCollection[+X] <: Iterable[X] +``` + +The following are illegal: + +```scala +type Abs = Comparable[Abs] // recursive type alias + +type S <: T // S, T are bounded by themselves. +type T <: S + +type T >: Comparable[T.That] // Cannot select from T. + // T is a type, not a value +type MyCollection <: Iterable // Type constructor members must explicitly + // state their type parameters. +``` + +If a type alias `type ´t´[´\mathit{tps}\,´] = ´S´` refers to a class type ´S´, the name ´t´ can also be used as a constructor for objects of type ´S´. + +###### Example + +Suppose we make `Pair` an alias of the parameterized class `Tuple2`, as follows: + +```scala +type Pair[+A, +B] = Tuple2[A, B] +object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) +} +``` + +As a consequence, for any two types ´S´ and ´T´, the type `Pair[´S´, ´T\,´]` is equivalent to the type `Tuple2[´S´, ´T\,´]`. +`Pair` can also be used as a constructor instead of `Tuple2`, as in: + +```scala +val x: Pair[Int, String] = new Pair(1, "abc") +``` + +## Type Parameters + +```ebnf +TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’ +VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam +TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] [‘:’ Type] +``` + +Type parameters appear in type definitions, class definitions, and method definitions. +In this section we consider only type parameter definitions with lower bounds `>: ´L´` and upper bounds `<: ´U´` whereas a discussion of context bounds `: ´U´` and view bounds `<% ´U´` is deferred to [here](07-implicits.html#context-bounds-and-view-bounds). + +The most general form of a proper type parameter is +`´@a_1 ... @a_n´ ´\pm´ ´t´ >: ´L´ <: ´U´`. +Here, ´L´, and ´U´ are lower and upper bounds that constrain possible type arguments for the parameter. +It is a compile-time error if ´L´ does not conform to ´U´. +´\pm´ is a _variance_, i.e. an optional prefix of either `+`, or `-`. One or more annotations may precede the type parameter. + + + + + +The names of all type parameters must be pairwise different in their enclosing type parameter clause. +The scope of a type parameter includes in each case the whole type parameter clause. +Therefore it is possible that a type parameter appears as part of its own bounds or the bounds of other type parameters in the same clause. +However, a type parameter may not be bounded directly or indirectly by itself. + +A type constructor parameter adds a nested type parameter clause to the type parameter. +The most general form of a type constructor parameter is `´@a_1 ... @a_n \pm t[\mathit{tps}\,]´ >: ´L´ <: ´U´`. + +The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters. +Higher-order type parameters (the type parameters of a type parameter ´t´) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of ´t´. +Therefore, their names must only be pairwise different from the names of other visible parameters. +Since the names of higher-order type parameters are thus often irrelevant, they may be denoted with a `‘_’`, which is nowhere visible. + +###### Example +Here are some well-formed type parameter clauses: + +```scala +[S, T] +[@specialized T, U] +[Ex <: Throwable] +[A <: Comparable[B], B <: A] +[A, B >: A, C >: A <: B] +[M[X], N[X]] +[M[_], N[_]] // equivalent to previous clause +[M[X <: Bound[X]], Bound[_]] +[M[+X] <: Iterable[X]] +``` + +The following type parameter clauses are illegal: + +```scala +[A >: A] // illegal, `A' has itself as bound +[A <: B, B <: C, C <: A] // illegal, `A' has itself as bound +[A, B, C >: A <: B] // illegal lower bound `A' of `C' does + // not conform to upper bound `B'. +``` + +## Variance Annotations + +Variance annotations indicate how instances of parameterized types vary with respect to [subtyping](03-types.html#conformance). +A ‘+’ variance indicates a covariant dependency, a ‘-’ variance indicates a contravariant dependency, and a missing variance indication indicates an invariant dependency. + +A variance annotation constrains the way the annotated type variable may appear in the type or class which binds the type parameter. +In a type definition `type ´T´[´\mathit{tps}\,´] = ´S´`, or a type declaration `type ´T´[´\mathit{tps}\,´] >: ´L´ <: ´U´` type parameters labeled ‘+’ must only appear in covariant position whereas type parameters labeled ‘-’ must only appear in contravariant position. +Analogously, for a class definition `class ´C´[´\mathit{tps}\,´](´\mathit{ps}\,´) extends ´T´ { ´x´: ´S´ => ...}`, type parameters labeled ‘+’ must only appear in covariant position in the self type ´S´ and the template ´T´, whereas type parameters labeled ‘-’ must only appear in contravariant position. + +The variance position of a type parameter in a type or template is defined as follows. +Let the opposite of covariance be contravariance, and the opposite of invariance be itself. +The top-level of the type or template is always in covariant position. +The variance position changes at the following constructs. + +- The variance position of a method parameter is the opposite of the variance position of the enclosing parameter clause. +- The variance position of a type parameter is the opposite of the variance position of the enclosing type parameter clause. +- The variance position of the lower bound of a type declaration or type parameter is the opposite of the variance position of the type declaration or parameter. +- The type of a mutable variable is always in invariant position. +- The right-hand side of a type alias is always in invariant position. +- The prefix ´S´ of a type selection `´S´#´T´` is always in invariant position. +- For a type argument ´T´ of a type `´S´[´... T ...´ ]`: +If the corresponding type parameter is invariant, then ´T´ is in invariant position. +If the corresponding type parameter is contravariant, the variance position of ´T´ is the opposite of the variance position of the enclosing type `´S´[´... T ...´ ]`. + + + +References to the type parameters in [object-private or object-protected values, types, variables, or methods](05-classes-and-objects.html#modifiers) of the class are not checked for their variance position. +In these members the type parameter may appear anywhere without restricting its legal variance annotations. + +###### Example +The following variance annotation is legal. + +```scala +abstract class P[+A, +B] { + def fst: A; def snd: B +} +``` + +With this variance annotation, type instances of ´P´ subtype covariantly with respect to their arguments. +For instance, + +```scala +P[IOException, String] <: P[Throwable, AnyRef] +``` + +If the members of ´P´ are mutable variables, the same variance annotation becomes illegal. + +```scala +abstract class Q[+A, +B](x: A, y: B) { + var fst: A = x // **** error: illegal variance: + var snd: B = y // `A', `B' occur in invariant position. +} +``` + +If the mutable variables are object-private, the class definition becomes legal again: + +```scala +abstract class R[+A, +B](x: A, y: B) { + private[this] var fst: A = x // OK + private[this] var snd: B = y // OK +} +``` + +###### Example + +The following variance annotation is illegal, since ´a´ appears in contravariant position in the parameter of `append`: + +```scala +abstract class Sequence[+A] { + def append(x: Sequence[A]): Sequence[A] + // **** error: illegal variance: + // `A' occurs in contravariant position. +} +``` + +The problem can be avoided by generalizing the type of `append` by means of a lower bound: + +```scala +abstract class Sequence[+A] { + def append[B >: A](x: Sequence[B]): Sequence[B] +} +``` + +###### Example + +```scala +abstract class OutputChannel[-A] { + def write(x: A): Unit +} +``` + +With that annotation, we have that `OutputChannel[AnyRef]` conforms to `OutputChannel[String]`. +That is, a channel on which one can write any object can substitute for a channel on which one can write only strings. + +## Method Declarations and Definitions + +```ebnf +Dcl ::= ‘def’ FunDcl +FunDcl ::= FunSig ‘:’ Type +Def ::= ‘def’ FunDef +FunDef ::= FunSig [‘:’ Type] ‘=’ Expr +FunSig ::= id [FunTypeParamClause] ParamClauses +FunTypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’ +ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’] +ParamClause ::= [nl] ‘(’ [Params] ‘)’ +Params ::= Param {‘,’ Param} +Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr] +ParamType ::= Type + | ‘=>’ Type + | Type ‘*’ +``` + +A _method declaration_ has the form `def ´f\,\mathit{psig}´: ´T´`, where ´f´ is the method's name, ´\mathit{psig}´ is its parameter signature and ´T´ is its result type. +A _method definition_ `def ´f\,\mathit{psig}´: ´T´ = ´e´` also includes a _method body_ ´e´, i.e. an expression which defines the method's result. +A parameter signature consists of an optional type parameter clause `[´\mathit{tps}\,´]`, followed by zero or more value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_n´)`. +Such a declaration or definition introduces a value with a (possibly polymorphic) method type whose parameter types and result type are as given. + +The type of the method body is expected to [conform](06-expressions.html#expression-typing) to the method's declared result type, if one is given. +If the method definition is not recursive, the result type may be omitted, in which case it is determined from the packed type of the method body. + +A _type parameter clause_ ´\mathit{tps}´ consists of one or more [type declarations](#type-declarations-and-type-aliases), which introduce type parameters, possibly with bounds. +The scope of a type parameter includes the whole signature, including any of the type parameter bounds as well as the method body, if it is present. + +A _value parameter clause_ ´\mathit{ps}´ consists of zero or more formal parameter bindings such as `´x´: ´T´` or `´x: T = e´`, which bind value parameters and associate them with their types. + +### Default Arguments + +Each value parameter declaration may optionally define a default argument. +The default argument expression ´e´ is type-checked with an expected type ´T'´ obtained by replacing all occurrences of the method's type parameters in ´T´ by the undefined type. + +For every parameter ´p_{i,j}´ with a default argument a method named `´f\$´default´\$´n` is generated which computes the default argument expression. +Here, ´n´ denotes the parameter's position in the method declaration. +These methods are parametrized by the type parameter clause `[´\mathit{tps}\,´]` and all value parameter clauses `(´\mathit{ps}_1´)...(´\mathit{ps}_{i-1}´)` preceding ´p_{i,j}´. +The `´f\$´default´\$´n` methods are inaccessible for user programs. + +###### Example +In the method + +```scala +def compare[T](a: T = 0)(b: T = a) = (a == b) +``` + +the default expression `0` is type-checked with an undefined expected +type. +When applying `compare()`, the default value `0` is inserted and `T` is instantiated to `Int`. +The methods computing the default arguments have the form: + +```scala +def compare´\$´default´\$´1[T]: Int = 0 +def compare´\$´default´\$´2[T](a: T): T = a +``` + +The scope of a formal value parameter name ´x´ comprises all subsequent parameter clauses, as well as the method return type and the method body, if they are given. +Both type parameter names and value parameter names must be pairwise distinct. + +A default value which depends on earlier parameters uses the actual arguments if they are provided, not the default arguments. + +```scala +def f(a: Int = 0)(b: Int = a + 1) = b // OK +// def f(a: Int = 0, b: Int = a + 1) // "error: not found: value a" +f(10)() // returns 11 (not 1) +``` + +If an [implicit argument](07-implicits.html#implicit-parameters) is not found by implicit search, it may be supplied using a default argument. + +```scala +implicit val i: Int = 2 +def f(implicit x: Int, s: String = "hi") = s * x +f // "hihi" +``` + +### By-Name Parameters + +```ebnf +ParamType ::= ‘=>’ Type +``` + +The type of a value parameter may be prefixed by `=>`, e.g. `´x´: => ´T´`. +The type of such a parameter is then the parameterless method type `=> ´T´`. +This indicates that the corresponding argument is not evaluated at the point of method application, but instead is evaluated at each use within the method. +That is, the argument is evaluated using _call-by-name_. + +The by-name modifier is disallowed for parameters of classes that carry a `val` or `var` prefix, including parameters of case classes for which a `val` prefix is implicitly generated. + +###### Example +The declaration + +```scala +def whileLoop (cond: => Boolean) (stat: => Unit): Unit +``` + +indicates that both parameters of `whileLoop` are evaluated using call-by-name. + +### Repeated Parameters + +```ebnf +ParamType ::= Type ‘*’ +``` + +The last value parameter of a parameter section may be suffixed by `'*'`, e.g. `(..., ´x´:´T´*)`. +The type of such a _repeated_ parameter inside the method is then the sequence type `scala.Seq[´T´]`. +Methods with repeated parameters `´T´*` take a variable number of arguments of type ´T´. +That is, if a method ´m´ with type `(´p_1:T_1, ..., p_n:T_n, p_s:S´*)´U´` is applied to arguments ´(e_1, ..., e_k)´ where ´k \geq n´, then ´m´ is taken in that application to have type ´(p_1:T_1, ..., p_n:T_n, p_s:S, ..., p_{s'}:S)U´, with ´k - n´ occurrences of type ´S´ where any parameter names beyond ´p_s´ are fresh. The only exception to this rule is if the last argument is marked to be a _sequence argument_ via a `_*` type annotation. +If ´m´ above is applied to arguments `(´e_1, ..., e_n, e'´: _*)`, then the type of ´m´ in that application is taken to be `(´p_1:T_1, ... , p_n:T_n,p_{s}:´scala.Seq[´S´])`. + +It is not allowed to define any default arguments in a parameter section with a repeated parameter. + +###### Example +The following method definition computes the sum of the squares of a variable number of integer arguments. + +```scala +def sum(args: Int*) = { + var result = 0 + for (arg <- args) result += arg + result +} +``` + +The following applications of this method yield `0`, `1`, `6`, in that order. + +```scala +sum() +sum(1) +sum(1, 2, 3) +``` + +Furthermore, assume the definition: + +```scala +val xs = List(1, 2, 3) +``` + +The following application of method `sum` is ill-formed: + +```scala +sum(xs) // ***** error: expected: Int, found: List[Int] +``` + +By contrast, the following application is well formed and yields again the result `6`: + +```scala +sum(xs: _*) +``` + +### Method Return Type Inference + +A class member definition ´m´ that overrides some other method ´m'´ in a base class of ´C´ may leave out the return type, even if it is recursive. +In this case, whether or not `m` is recursive, its return type will be the return type of ´m'´. + +###### Example +Assume the following definitions: + +```scala +trait I { + def factorial(x: Int): Int +} +class C extends I { + def factorial(x: Int) = if (x == 0) 1 else x * factorial(x - 1) +} +``` + +Here, it is OK to leave out the result type of `factorial` in `C`, even though the method is recursive. + + + +## Import Clauses + +```ebnf +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors) +ImportSelectors ::= ‘{’ {ImportSelector ‘,’} + (ImportSelector | ‘_’) ‘}’ +ImportSelector ::= id [‘=>’ id | ‘=>’ ‘_’] +``` + +An import clause has the form `import ´p´.´I´` where ´p´ is a [stable identifier](03-types.html#paths) and ´I´ is an import expression. +The import expression determines a set of names of importable members of ´p´ which are made available without qualification. +A member ´m´ of ´p´ is _importable_ if it is [accessible](05-classes-and-objects.html#modifiers). +The most general form of an import expression is a list of _import selectors_ + +```scala +{ ´x_1´ => ´y_1, ..., x_n´ => ´y_n´, _ } +``` + +for ´n \geq 0´, where the final wildcard `‘_’` may be absent. +It makes available each importable member `´p´.´x_i´` under the unqualified name ´y_i´. I.e. every import selector `´x_i´ => ´y_i´` renames `´p´.´x_i´` to +´y_i´. +If a final wildcard is present, all importable members ´z´ of ´p´ other than `´x_1, ..., x_n,y_1, ..., y_n´` are also made available under their own unqualified names. + +Import selectors work in the same way for type and term members. +For instance, an import clause `import ´p´.{´x´ => ´y\,´}` renames the term +name `´p´.´x´` to the term name ´y´ and the type name `´p´.´x´` to the type name ´y´. +At least one of these two names must reference an importable member of ´p´. + +If the target in an import selector is a wildcard, the import selector hides access to the source member. +For instance, the import selector `´x´ => _` “renames” ´x´ to the wildcard symbol (which is unaccessible as a name in user programs), and thereby effectively prevents unqualified access to ´x´. +This is useful if there is a final wildcard in the same import selector list, which imports all members not mentioned in previous import selectors. + +The scope of a binding introduced by an import-clause starts immediately after the import clause and extends to the end of the enclosing block, template, package clause, or compilation unit, whichever comes first. + +Several shorthands exist. An import selector may be just a simple name ´x´. +In this case, ´x´ is imported without renaming, so the import selector is equivalent to `´x´ => ´x´`. +Furthermore, it is possible to replace the whole import selector list by a single identifier or wildcard. +The import clause `import ´p´.´x´` is equivalent to `import ´p´.{´x\,´}`, i.e. it makes available without qualification the member ´x´ of ´p´. The import clause `import ´p´._` is equivalent to `import ´p´.{_}`, i.e. it makes available without qualification all members of ´p´ (this is analogous to `import ´p´.*` in Java). + +An import clause with multiple import expressions `import ´p_1´.´I_1, ..., p_n´.´I_n´` is interpreted as a sequence of import clauses `import ´p_1´.´I_1´; ...; import ´p_n´.´I_n´`. + +###### Example +Consider the object definition: + +```scala +object M { + def z = 0, one = 1 + def add(x: Int, y: Int): Int = x + y +} +``` + +Then the block + +```scala +{ import M.{one, z => zero, _}; add(zero, one) } +``` + +is equivalent to the block + +```scala +{ M.add(M.z, M.one) } +``` diff --git a/docs/_spec/05-classes-and-objects.md b/docs/_spec/05-classes-and-objects.md new file mode 100644 index 000000000000..6feda780417a --- /dev/null +++ b/docs/_spec/05-classes-and-objects.md @@ -0,0 +1,1214 @@ +--- +title: Classes & Objects +layout: default +chapter: 5 +--- + +# Classes and Objects + +```ebnf +TmplDef ::= [‘case’] ‘class’ ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘trait’ TraitDef +``` + +[Classes](#class-definitions) and [objects](#object-definitions) are both defined in terms of _templates_. + +## Templates + +```ebnf +ClassTemplate ::= [EarlyDefs] ClassParents [TemplateBody] +TraitTemplate ::= [EarlyDefs] TraitParents [TemplateBody] +ClassParents ::= Constr {‘with’ AnnotType} +TraitParents ::= AnnotType {‘with’ AnnotType} +TemplateBody ::= [nl] ‘{’ [SelfType] TemplateStat {semi TemplateStat} ‘}’ +SelfType ::= id [‘:’ Type] ‘=>’ + | this ‘:’ Type ‘=>’ +``` + +A _template_ defines the type signature, behavior and initial state of a trait or class of objects or of a single object. +Templates form part of instance creation expressions, class definitions, and object definitions. +A template `´sc´ with ´mt_1´ with ... with ´mt_n´ { ´\mathit{stats}´ }` consists of a constructor invocation ´sc´ which defines the template's _superclass_, trait references `´mt_1, ..., mt_n´` ´(n \geq 0)´, which define the template's _traits_, and a statement sequence ´\mathit{stats}´ which contains initialization code and additional member definitions for the template. + +Each trait reference ´mt_i´ must denote a [trait](#traits). +By contrast, the superclass constructor ´sc´ normally refers to a class which is not a trait. +It is possible to write a list of parents that starts with a trait reference, e.g. `´mt_1´ with ... with ´mt_n´`. +In that case the list of parents is implicitly extended to include the supertype of ´mt_1´ as the first parent type. +The new supertype must have at least one constructor that does not take parameters. +In the following, we will always assume that this implicit extension has been performed, so that the first parent class of a template is a regular superclass constructor, not a trait reference. + +The list of parents of a template must be well-formed. +This means that the class denoted by the superclass constructor ´sc´ must be a subclass of the superclasses of all the traits ´mt_1, ..., mt_n´. +In other words, the non-trait classes inherited by a template form a chain in the inheritance hierarchy which starts with the template's superclass. + +It is forbidden for a template's superclass constructor ´sc´ to be an [enum class](#enum-definitions), unless the template is the implementation of an [enum case](#enum-definitions) of ´sc´. + +The _least proper supertype_ of a template is the class type or [compound type](03-types.html#compound-types) consisting of all its parent class types. + +The statement sequence ´\mathit{stats}´ contains member definitions that define new members or overwrite members in the parent classes. +If the template forms part of an abstract class or trait definition, the statement part ´\mathit{stats}´ may also contain declarations of abstract members. +If the template forms part of a concrete class definition, ´\mathit{stats}´ may still contain declarations of abstract type members, but not of abstract term members. +Furthermore, ´\mathit{stats}´ may in any case also contain expressions; these are executed in the order they are given as part of the initialization of a template. + +The sequence of template statements may be prefixed with a formal parameter definition and an arrow, e.g. `´x´ =>`, or `´x´:´T´ =>`. +If a formal parameter is given, it can be used as an alias for the reference `this` throughout the body of the template. +If the formal parameter comes with a type ´T´, this definition affects the _self type_ ´S´ of the underlying class or object as follows: +Let ´C´ be the type of the class or trait or object defining the template. +If a type ´T´ is given for the formal self parameter, ´S´ is the greatest lower bound of ´T´ and ´C´. +If no type ´T´ is given, ´S´ is just ´C´. +Inside the template, the type of `this` is assumed to be ´S´. + +The self type of a class or object must conform to the self types of all classes which are inherited by the template ´t´. + +A second form of self type annotation reads just `this: ´S´ =>`. +It prescribes the type ´S´ for `this` without introducing an alias name for it. + +###### Example +Consider the following class definitions: + +```scala +class Base extends Object {} +trait Mixin extends Base {} +object O extends Mixin {} +``` + +In this case, the definition of `O` is expanded to: + +```scala +object O extends Base with Mixin {} +``` + + + +**Inheriting from Java Types** + +A template may have a Java class as its superclass and Java interfaces as its mixins. + +**Template Evaluation** + +Consider a template `´sc´ with ´mt_1´ with ´mt_n´ { ´\mathit{stats}´ }`. + +If this is the template of a [trait](#traits) then its _mixin-evaluation_ consists of an evaluation of the statement sequence ´\mathit{stats}´. + +If this is not a template of a trait, then its _evaluation_ consists of the following steps. + +- First, the superclass constructor ´sc´ is + [evaluated](#constructor-invocations). +- Then, all base classes in the template's [linearization](#class-linearization) up to the template's superclass denoted by ´sc´ are evaluated. +evaluation happens in reverse order of occurrence in the linearization. Each evaluation occurs as follows: + - First, arguments to ´mt_i´ are evaluated from left to right, and set as parameters of ´mt_i´. + - ´mt_i´ is then mixin-evaluated. +- Finally, the statement sequence ´\mathit{stats}\,´ is evaluated. + +### Constructor Invocations + +```ebnf +Constr ::= AnnotType {‘(’ [Exprs] ‘)’} +``` + +Constructor invocations define the type, members, and initial state of objects created by an instance creation expression, or of parts of an object's definition which are inherited by a class or object definition. +A constructor invocation is a method application `´x´.´c´[´\mathit{targs}´](´\mathit{args}_1´)...(´\mathit{args}_n´)`, where ´x´ is a [stable identifier](03-types.html#paths), ´c´ is a type name which either designates a class or defines an alias type for one, ´\mathit{targs}´ is a type argument list, ´\mathit{args}_1, ..., \mathit{args}_n´ are argument lists, and there is a constructor of that class which is [applicable](06-expressions.html#method-applications) to the given arguments. +If the constructor invocation uses named or default arguments, it is transformed into a block expression using the same transformation as described [here](sec:named-default). + +The prefix `´x´.` can be omitted. +A type argument list can be given only if the class ´c´ takes type parameters. +Even then it can be omitted, in which case a type argument list is synthesized using [local type inference](06-expressions.html#local-type-inference). +If no explicit arguments are given, an empty list `()` is implicitly supplied. + +An evaluation of a constructor invocation `´x´.´c´[´\mathit{targs}´](´\mathit{args}_1´)...(´\mathit{args}_n´)` consists of the following steps: + +- First, the prefix ´x´ is evaluated. +- Then, the arguments ´\mathit{args}_1, ..., \mathit{args}_n´ are evaluated from left to right. +- Finally, the class being constructed is initialized by evaluating the template of the class referred to by ´c´. + +### Class Linearization + +The classes reachable through transitive closure of the direct inheritance relation from a class ´C´ are called the _base classes_ of ´C´. +Because of mixins, the inheritance relationship on base classes forms in general a directed acyclic graph. +A linearization of this graph is defined as follows. + +###### Definition: linearization +Let ´C´ be a class with template ´C_1´ with ... with ´C_n´ { ´\mathit{stats}´ }`. +The _linearization_ of ´C´, ´\mathcal{L}(C)´ is defined as follows: +$$ +\mathcal{L}(C) = C, \mathcal{L}(C_n) \; \vec{+} \; ... \; \vec{+} \; \mathcal{L}(C_1) +$$ + +Here ´\vec{+}´ denotes concatenation where elements of the right operand replace identical elements of the left operand: + +$$ +\begin{array}{lcll} +\{a, A\} \;\vec{+}\; B &=& a, (A \;\vec{+}\; B) &{\bf if} \; a \not\in B \\\\ + &=& A \;\vec{+}\; B &{\bf if} \; a \in B +\end{array} +$$ + +###### Example +Consider the following class definitions. + +```scala +abstract class AbsIterator extends AnyRef { ... } +trait RichIterator extends AbsIterator { ... } +class StringIterator extends AbsIterator { ... } +class Iter extends StringIterator with RichIterator { ... } +``` + +Then the linearization of class `Iter` is + +```scala +{ Iter, RichIterator, StringIterator, AbsIterator, AnyRef, Any } +``` + +Note that the linearization of a class refines the inheritance relation: if ´C´ is a subclass of ´D´, then ´C´ precedes ´D´ in any linearization where both ´C´ and ´D´ occur. +[Linearization](#definition:-linearization) also satisfies the property that a linearization of a class always contains the linearization of its direct superclass as a suffix. + +For instance, the linearization of `StringIterator` is + +```scala +{ StringIterator, AbsIterator, AnyRef, Any } +``` + +which is a suffix of the linearization of its subclass `Iter`. +The same is not true for the linearization of mixins. +For instance, the linearization of `RichIterator` is + +```scala +{ RichIterator, AbsIterator, AnyRef, Any } +``` + +which is not a suffix of the linearization of `Iter`. + +### Class Members + +A class ´C´ defined by a template `´C_1´ with ... with ´C_n´ { ´\mathit{stats}´ }` can define members in its statement sequence ´\mathit{stats}´ and can inherit members from all parent classes. +Scala adopts Java and C\#'s conventions for static overloading of methods. +It is thus possible that a class defines and/or inherits several methods with the same name. +To decide whether a defined member of a class ´C´ overrides a member of a parent class, or whether the two co-exist as overloaded variants in ´C´, Scala uses the following definition of _matching_ on members: + +###### Definition: matching +A member definition ´M´ _matches_ a member definition ´M'´, if ´M´ and ´M'´ bind the same name, and one of following holds. + +1. Neither ´M´ nor ´M'´ is a method definition. +2. ´M´ and ´M'´ define both monomorphic methods with equivalent argument types. +3. ´M´ is defined in Java and defines a method with an empty parameter list `()` and ´M'´ defines a parameterless method. +4. ´M´ and ´M'´ define both polymorphic methods with equal number of argument types ´\overline T´, ´\overline T'´ and equal numbers of type parameters ´\overline t´, ´\overline t'´, say, and ´\overline T' = [\overline t'/\overline t]\overline T´. + + + +Member definitions fall into two categories: concrete and abstract. +Members of class ´C´ are either _directly defined_ (i.e. they appear in ´C´'s statement sequence ´\mathit{stats}´) or they are _inherited_. +There are two rules that determine the set of members of a class, one for each category: + +A _concrete member_ of a class ´C´ is any concrete definition ´M´ in some class ´C_i \in \mathcal{L}(C)´, except if there is a preceding clas ´C_j \in \mathcal{L}(C)´ where ´j < i´ which directly defines a concrete member ´M'´ matching ´M´. + +An _abstract member_ of a class ´C´ is any abstract definition ´M´ in some class ´C_i \in \mathcal{L}(C)´, except if ´C´ contains already a concrete member ´M'´ matching ´M´, or if there is a preceding class ´C_j \in \mathcal{L}(C)´ where ´j < i´ which directly defines an abstract member ´M'´ matching ´M´. + +This definition also determines the [overriding](#overriding) relationships between matching members of a class ´C´ and its parents. +First, a concrete definition always overrides an abstract definition. +Second, for definitions ´M´ and ´M´' which are both concrete or both abstract, ´M´ overrides ´M'´ if ´M´ appears in a class that precedes (in the linearization of ´C´) the class in which ´M'´ is defined. + +It is an error if a template directly defines two matching members. +It is also an error if a template contains two members (directly defined or inherited) with the same name and the same [erased type](03-types.html#type-erasure). +Finally, a template is not allowed to contain two methods (directly defined or inherited) with the same name which both define default arguments. + +###### Example +Consider the trait definitions: + +```scala +trait A { def f: Int } +trait B extends A { def f: Int = 1 ; def g: Int = 2 ; def h: Int = 3 } +trait C extends A { override def f: Int = 4 ; def g: Int } +trait D extends B with C { def h: Int } +``` + +Then trait `D` has a directly defined abstract member `h`. +It inherits member `f` from trait `C` and member `g` from trait `B`. + +### Overriding + + + +A member ´M´ of class ´C´ that [matches](#class-members) a non-private member ´M'´ of a base class of ´C´ is said to _override_ that member. +In this case the binding of the overriding member ´M´ must [subsume](03-types.html#conformance) the binding of the overridden member ´M'´. +Furthermore, the following restrictions on modifiers apply to ´M´ and ´M'´: +- ´M'´ must not be a class. +- ´M'´ must not be labeled `final`. +- ´M´ must not be [`private`](#modifiers). +- If ´M´ is labeled `private[´C´]` for some enclosing class or package ´C´, then ´M'´ must be labeled `private[´C'´]` for some class or package ´C'´ where ´C'´ equals ´C´ or ´C'´ is contained in ´C´. + + +- If ´M´ is labeled `protected`, then ´M'´ must also be labeled `protected`. +- If ´M'´ is not an abstract member, then ´M´ must be labeled `override`. +Furthermore, one of two possibilities must hold: + - either ´M´ is defined in a subclass of the class where is ´M'´ is defined, + - or both ´M´ and ´M'´ override a third member ´M''´ which is defined in a base class of both the classes containing ´M´ and ´M'´ +- If ´M'´ is [incomplete](#modifiers) in ´C´ then ´M´ must be labeled `abstract override`. +- If ´M´ and ´M'´ are both concrete value definitions, then either none of them is marked `lazy` or both must be marked `lazy`. + +- A stable member can only be overridden by a stable member. +For example, this is not allowed: + +```scala +class X { val stable = 1} +class Y extends X { override var stable = 1 } // error +``` + +Another restriction applies to abstract type members: +An abstract type member with a [volatile type](03-types.html#volatile-types) as its upper bound may not override an abstract type member which does not have a volatile upper bound. + +A special rule concerns parameterless methods. +If a parameterless method defined as `def ´f´: ´T´ = ...` or `def ´f´ = ...` overrides a method defined in Java of type ´()T'´ which has an empty parameter list, then ´f´ is also assumed to have an empty parameter list. + +An overriding method inherits all default arguments from the definition in the superclass. +By specifying default arguments in the overriding method it is possible to add new defaults (if the corresponding parameter in the superclass does not have a default) or to override the defaults of the superclass (otherwise). + +###### Example + +Consider the definitions: + +```scala +trait Root { type T <: Root } +trait A extends Root { type T <: A } +trait B extends Root { type T <: B } +trait C extends A with B +``` + +Then the class definition `C` is not well-formed because the binding of `T` in `C` is `type T <: B`, which fails to subsume the binding `type T <: A` of `T` +in type `A`. +The problem can be solved by adding an overriding definition of type `T` in class `C`: + +```scala +class C extends A with B { type T <: C } +``` + +### Inheritance Closure + +Let ´C´ be a class type. +The _inheritance closure_ of ´C´ is the smallest set ´\mathscr{S}´ of types such that + +- ´C´ is in ´\mathscr{S}´. +- If ´T´ is in ´\mathscr{S}´, then every type ´T'´ which forms syntactically a part of ´T´ is also in ´\mathscr{S}´. +- If ´T´ is a class type in ´\mathscr{S}´, then all [parents](#templates) of ´T´ are also in ´\mathscr{S}´. + +It is a static error if the inheritance closure of a class type consists of an infinite number of types. +(This restriction is necessary to make subtyping decidable[^kennedy]). + +[^kennedy]: Kennedy, Pierce. [On Decidability of Nominal Subtyping with Variance.]( https://research.microsoft.com/pubs/64041/fool2007.pdf) in FOOL 2007 + +## Modifiers + +```ebnf +Modifier ::= LocalModifier + | AccessModifier + | ‘override’ +LocalModifier ::= ‘abstract’ + | ‘final’ + | ‘sealed’ + | ‘implicit’ + | ‘lazy’ +AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] +AccessQualifier ::= ‘[’ (id | ‘this’) ‘]’ +``` + +Member definitions may be preceded by modifiers which affect the accessibility and usage of the identifiers bound by them. +If several modifiers are given, their order does not matter, but the same modifier may not occur more than once. +Modifiers preceding a repeated definition apply to all constituent definitions. +The rules governing the validity and meaning of a modifier are as follows. + +### `private` +The `private` modifier can be used with any definition or declaration in a template. +Private members of a template can be accessed only from within the directly enclosing template and its companion module or [companion class](#object-definitions). + +The `private` modifier is also valid for [top-level](09-top-level-definitions.html#packagings) templates. + +A `private` modifier can be _qualified_ with an identifier ´C´ (e.g. `private[´C´]`) that must denote a class or package enclosing the definition. +Members labeled with such a modifier are accessible respectively only from code inside the package ´C´ or only from code inside the class ´C´ and its [companion module](#object-definitions). + +A different form of qualification is `private[this]`. +A member ´M´ marked with this modifier is called _object-protected_; it can be accessed only from within the object in which it is defined. +That is, a selection ´p.M´ is only legal if the prefix is `this` or `´O´.this`, for some class ´O´ enclosing the reference. +In addition, the restrictions for unqualified `private` apply. + +Members marked private without a qualifier are called _class-private_, whereas members labeled with `private[this]` are called _object-private_. +A member _is private_ if it is either class-private or object-private, but not if it is marked `private[´C´]` where ´C´ is an identifier; in the latter case the member is called _qualified private_. + +Class-private or object-private members may not be abstract, and may not have `protected` or `override` modifiers. +They are not inherited by subclasses and they may not override definitions in parent classes. + +### `protected` +The `protected` modifier applies to class member definitions. +Protected members of a class can be accessed from within + - the template of the defining class, + - all templates that have the defining class as a base class, + - the companion module of any of those classes. + +A `protected` modifier can be qualified with an identifier ´C´ (e.g. `protected[´C´]`) that must denote a class or package enclosing the definition. +Members labeled with such a modifier are also accessible respectively from all code inside the package ´C´ or from all code inside the class ´C´ and its [companion module](#object-definitions). + +A protected identifier ´x´ may be used as a member name in a selection `´r´.´x´` only if one of the following applies: + - The access is within the template defining the member, or, if a qualification ´C´ is given, inside the package ´C´, or the class ´C´, or its companion module, or + - ´r´ is one of the reserved words `this` and `super`, or + - ´r´'s type conforms to a type-instance of the class which contains the access. + +A different form of qualification is `protected[this]`. +A member ´M´ marked with this modifier is called _object-protected_; it can be accessed only from within the object in which it is defined. That is, a selection ´p.M´ is only legal if the prefix is `this` or `´O´.this`, for some class ´O´ enclosing the reference. In addition, the restrictions for unqualified `protected` apply. + +### `override` +The `override` modifier applies to class member definitions or declarations. +It is mandatory for member definitions or declarations that override some other concrete member definition in a parent class. +If an `override` modifier is given, there must be at least one overridden member definition or declaration (either concrete or abstract). + +### `abstract override` +The `override` modifier has an additional significance when combined with the `abstract` modifier. +That modifier combination is only allowed for value members of traits. + +We call a member ´M´ of a template _incomplete_ if it is either abstract (i.e. defined by a declaration), or it is labeled `abstract` and `override` and every member overridden by ´M´ is again incomplete. + +Note that the `abstract override` modifier combination does not influence the concept whether a member is concrete or abstract. +A member is _abstract_ if only a declaration is given for it; it is _concrete_ if a full definition is given. + +### `abstract` +The `abstract` modifier is used in class definitions. +It is redundant for traits, and mandatory for all other classes which have incomplete members. +Abstract classes cannot be [instantiated](06-expressions.html#instance-creation-expressions) with a constructor invocation unless followed by mixins and/or a refinement which override all incomplete members of the class. +Only abstract classes and traits can have abstract term members. + +The `abstract` modifier can also be used in conjunction with `override` for class member definitions. +In that case the previous discussion applies. + +### `final` +The `final` modifier applies to class member definitions and to class definitions. +A `final` class member definition may not be overridden in subclasses. +A `final` class may not be inherited by a template. +`final` is redundant for object definitions. +Members of final classes or objects are implicitly also final, so the `final` modifier is generally redundant for them, too. +Note, however, that [constant value definitions](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) do require an explicit `final` modifier, even if they are defined in a final class or object. +`final` is permitted for abstract classes but it may not be applied to traits or incomplete members, and it may not be combined in one modifier list with `sealed`. + +### `sealed` +The `sealed` modifier applies to class definitions. +A `sealed` class may not be directly inherited, except if the inheriting template is defined in the same source file as the inherited class. +However, subclasses of a sealed class can be inherited anywhere. + +### `lazy` +The `lazy` modifier applies to value definitions. +A `lazy` value is initialized the first time it is accessed (which might never +happen at all). +Attempting to access a lazy value during its initialization might lead to looping behavior. +If an exception is thrown during initialization, the value is considered uninitialized, and a later access will retry to evaluate its right hand side. + +###### Example +The following code illustrates the use of qualified private: + +```scala +package outerpkg.innerpkg +class Outer { + class Inner { + private[Outer] def f() + private[innerpkg] def g() + private[outerpkg] def h() + } +} +``` + +Here, accesses to the method `f` can appear anywhere within `Outer`, but not outside it. +Accesses to method `g` can appear anywhere within the package `outerpkg.innerpkg`, as would be the case for package-private methods in Java. +Finally, accesses to method `h` can appear anywhere within package `outerpkg`, including packages contained in it. + +###### Example +A useful idiom to prevent clients of a class from constructing new instances of that class is to declare the class `abstract` and `sealed`: + +```scala +object m { + abstract sealed class C (x: Int) { + def nextC = new C(x + 1) {} + } + val empty = new C(0) {} +} +``` + +For instance, in the code above clients can create instances of class `m.C` only by calling the `nextC` method of an existing `m.C` object; it is not possible for clients to create objects of class `m.C` directly. +Indeed the following two lines are both in error: + +```scala +new m.C(0) // **** error: C is abstract, so it cannot be instantiated. +new m.C(0) {} // **** error: illegal inheritance from sealed class. +``` + +A similar access restriction can be achieved by marking the primary constructor `private` ([example](#example-private-constructor)). + +## Class Definitions + +```ebnf +TmplDef ::= ‘class’ ClassDef +ClassDef ::= id [TypeParamClause] {Annotation} + [AccessModifier] ClassParamClauses ClassTemplateOpt +ClassParamClauses ::= {ClassParamClause} + [[nl] ‘(’ implicit ClassParams ‘)’] +ClassParamClause ::= [nl] ‘(’ [ClassParams] ‘)’ +ClassParams ::= ClassParam {‘,’ ClassParam} +ClassParam ::= {Annotation} {Modifier} [(‘val’ | ‘var’)] + id [‘:’ ParamType] [‘=’ Expr] +ClassTemplateOpt ::= ‘extends’ ClassTemplate | [[‘extends’] TemplateBody] +``` + +The most general form of class definition is + +```scala +class ´c´[´\mathit{tps}\,´] ´as´ ´m´(´\mathit{ps}_1´)...(´\mathit{ps}_n´) extends ´t´ ´\quad(n \geq 0)´. +``` + +Here, + + - ´c´ is the name of the class to be defined. + - ´\mathit{tps}´ is a non-empty list of type parameters of the class being defined. + The scope of a type parameter is the whole class definition including the type parameter section itself. + It is illegal to define two type parameters with the same name. + The type parameter section `[´\mathit{tps}\,´]` may be omitted. + A class with a type parameter section is called _polymorphic_, otherwise it is called _monomorphic_. + - ´as´ is a possibly empty sequence of [annotations](11-annotations.html#user-defined-annotations). + If any annotations are given, they apply to the primary constructor of the class. + - ´m´ is an [access modifier](#modifiers) such as `private` or `protected`, possibly with a qualification. + If such an access modifier is given it applies to the primary constructor of the class. + - ´(\mathit{ps}\_1)...(\mathit{ps}\_n)´ are formal value parameter clauses for the _primary constructor_ of the class. + The scope of a formal value parameter includes all subsequent parameter sections and the template ´t´. + However, a formal value parameter may not form part of the types of any of the parent classes or members of the class template ´t´. + It is illegal to define two formal value parameters with the same name. + + If a class has no formal parameter section that is not implicit, an empty parameter section `()` is assumed. + + If a formal parameter declaration ´x: T´ is preceded by a `val` or `var` keyword, an accessor (getter) [definition](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) for this parameter is implicitly added to the class. + + The getter introduces a value member ´x´ of class ´c´ that is defined as an alias of the parameter. + If the introducing keyword is `var`, a setter accessor [`´x´_=`](04-basic-declarations-and-definitions.html#variable-declarations-and-definitions) is also implicitly added to the class. + In invocation of that setter `´x´_=(´e´)` changes the value of the parameter to the result of evaluating ´e´. + + The formal parameter declaration may contain modifiers, which then carry over to the accessor definition(s). + When access modifiers are given for a parameter, but no `val` or `var` keyword, `val` is assumed. + A formal parameter prefixed by `val` or `var` may not at the same time be a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters). + + - ´t´ is a [template](#templates) of the form + + ```scala + ´sc´ with ´mt_1´ with ... with ´mt_m´ { ´\mathit{stats}´ } // ´m \geq 0´ + ``` + + which defines the base classes, behavior and initial state of objects of the class. + The extends clause `extends ´sc´ with ´mt_1´ with ... with ´mt_m´` can be omitted, in which case `extends scala.AnyRef` is assumed. + The class body `{ ´\mathit{stats}´ }` may also be omitted, in which case the empty body `{}` is assumed. + +This class definition defines a type `´c´[´\mathit{tps}\,´]` and a constructor which when applied to parameters conforming to types ´\mathit{ps}´ initializes instances of type `´c´[´\mathit{tps}\,´]` by evaluating the template ´t´. + +###### Example – `val` and `var` parameters +The following example illustrates `val` and `var` parameters of a class `C`: + +```scala +class C(x: Int, val y: String, var z: List[String]) +val c = new C(1, "abc", List()) +c.z = c.y :: c.z +``` + +###### Example – Private Constructor +The following class can be created only from its companion module. + +```scala +object Sensitive { + def makeSensitive(credentials: Certificate): Sensitive = + if (credentials == Admin) new Sensitive() + else throw new SecurityViolationException +} +class Sensitive private () { + ... +} +``` + +### Constructor Definitions + +```ebnf +FunDef ::= ‘this’ ParamClause ParamClauses + (‘=’ ConstrExpr | [nl] ConstrBlock) +ConstrExpr ::= SelfInvocation + | ConstrBlock +ConstrBlock ::= ‘{’ SelfInvocation {semi BlockStat} ‘}’ +SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} +``` + +A class may have additional constructors besides the primary constructor. +These are defined by constructor definitions of the form `def this(´\mathit{ps}_1´)...(´\mathit{ps}_n´) = ´e´`. +Such a definition introduces an additional constructor for the enclosing class, with parameters as given in the formal parameter lists ´\mathit{ps}_1 , ..., \mathit{ps}_n´, and whose evaluation is defined by the constructor expression ´e´. +The scope of each formal parameter is the subsequent parameter sections and the constructor expression ´e´. +A constructor expression is either a self constructor invocation `this(´\mathit{args}_1´)...(´\mathit{args}_n´)` or a block which begins with a self constructor invocation. +The self constructor invocation must construct a generic instance of the class. +I.e. if the class in question has name ´C´ and type parameters `[´\mathit{tps}\,´]`, then a self constructor invocation must generate an instance of `´C´[´\mathit{tps}\,´]`; it is not permitted to instantiate formal type parameters. + +The signature and the self constructor invocation of a constructor definition are type-checked and evaluated in the scope which is in effect at the point of the enclosing class definition, augmented by any type parameters of the enclosing class. +The rest of the constructor expression is type-checked and evaluated as a method body in the current class. + +If there are auxiliary constructors of a class ´C´, they form together with ´C´'s primary [constructor](#class-definitions) an overloaded constructor definition. +The usual rules for [overloading resolution](06-expressions.html#overloading-resolution) apply for constructor invocations of ´C´, including for the self constructor invocations in the constructor expressions themselves. +However, unlike other methods, constructors are never inherited. +To prevent infinite cycles of constructor invocations, there is the restriction that every self constructor invocation must refer to a constructor definition which precedes it (i.e. it must refer to either a preceding auxiliary constructor or the primary constructor of the class). + +###### Example +Consider the class definition + +```scala +class LinkedList[A]() { + var head: A = _ + var tail: LinkedList[A] = null + def this(head: A) = { this(); this.head = head } + def this(head: A, tail: LinkedList[A]) = { this(head); this.tail = tail } +} +``` + +This defines a class `LinkedList` with three constructors. +The second constructor constructs a singleton list, while the third one constructs a list with a given head and tail. + +### Case Classes + +```ebnf +TmplDef ::= ‘case’ ‘class’ ClassDef +``` + +If a class definition is prefixed with `case`, the class is said to be a _case class_. + +A case class is required to have a parameter section that is not implicit. +The formal parameters in the first parameter section are called _elements_ and are treated specially. +First, the value of such a parameter can be extracted as a field of a constructor pattern. +Second, a `val` prefix is implicitly added to such a parameter, unless the parameter already carries a `val` or `var` modifier. +Hence, an accessor definition for the parameter is [generated](#class-definitions). + +A case class definition of `´c´[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` with type parameters ´\mathit{tps}´ and value parameters ´\mathit{ps}´ implies the definition of a companion object, which serves as an [extractor object](08-pattern-matching.html#extractor-patterns). +It has the following shape: + +```scala +object ´c´ { + def apply[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´): ´c´[´\mathit{tps}\,´] = new ´c´[´\mathit{Ts}\,´](´\mathit{xs}_1\,´)...(´\mathit{xs}_n´) + def unapply[´\mathit{tps}\,´](´x´: ´c´[´\mathit{tps}\,´]) = + if (x eq null) scala.None + else scala.Some(´x.\mathit{xs}_{11}, ... , x.\mathit{xs}_{1k}´) +} +``` + +Here, ´\mathit{Ts}´ stands for the vector of types defined in the type parameter section ´\mathit{tps}´, each ´\mathit{xs}\_i´ denotes the parameter names of the parameter section ´\mathit{ps}\_i´, and ´\mathit{xs}\_{11}, ... , \mathit{xs}\_{1k}´ denote the names of all parameters in the first parameter section ´\mathit{xs}\_1´. +If a type parameter section is missing in the class, it is also missing in the `apply` and `unapply` methods. + +If the companion object ´c´ is already defined, the `apply` and `unapply` methods are added to the existing object. +If the object ´c´ already has a [matching](#definition-matching) `apply` (or `unapply`) member, no new definition is added. +The definition of `apply` is omitted if class ´c´ is `abstract`. + +If the case class definition contains an empty value parameter list, the `unapply` method returns a `Boolean` instead of an `Option` type and is defined as follows: + +```scala +def unapply[´\mathit{tps}\,´](´x´: ´c´[´\mathit{tps}\,´]) = x ne null +``` + +The name of the `unapply` method is changed to `unapplySeq` if the first parameter section ´\mathit{ps}_1´ of ´c´ ends in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters). + +A method named `copy` is implicitly added to every case class unless the class already has a member (directly defined or inherited) with that name, or the class has a repeated parameter. +The method is defined as follows: + +```scala +def copy[´\mathit{tps}\,´](´\mathit{ps}'_1\,´)...(´\mathit{ps}'_n´): ´c´[´\mathit{tps}\,´] = new ´c´[´\mathit{Ts}\,´](´\mathit{xs}_1\,´)...(´\mathit{xs}_n´) +``` + +Again, `´\mathit{Ts}´` stands for the vector of types defined in the type parameter section `´\mathit{tps}´` and each `´xs_i´` denotes the parameter names of the parameter section `´ps'_i´`. +The value parameters `´ps'_{1,j}´` of first parameter list have the form `´x_{1,j}´:´T_{1,j}´=this.´x_{1,j}´`, the other parameters `´ps'_{i,j}´` of the `copy` method are defined as `´x_{i,j}´:´T_{i,j}´`. +In all cases `´x_{i,j}´` and `´T_{i,j}´` refer to the name and type of the corresponding class parameter `´\mathit{ps}_{i,j}´`. + +Every case class implicitly overrides some method definitions of class [`scala.AnyRef`](12-the-scala-standard-library.html#root-classes) unless a definition of the same method is already given in the case class itself or a concrete definition of the same method is given in some base class of the case class different from `AnyRef`. +In particular: + +- Method `equals: (Any)Boolean` is structural equality, where two instances are equal if they both belong to the case class in question and they have equal (with respect to `equals`) constructor arguments (restricted to the class's _elements_, i.e., the first parameter section). +- Method `hashCode: Int` computes a hash-code. If the hashCode methods of the data structure members map equal (with respect to equals) values to equal hash-codes, then the case class hashCode method does too. +- Method `toString: String` returns a string representation which contains the name of the class and its elements. + +###### Example +Here is the definition of abstract syntax for lambda calculus: + +```scala +class Expr +case class Var (x: String) extends Expr +case class Apply (f: Expr, e: Expr) extends Expr +case class Lambda(x: String, e: Expr) extends Expr +``` + +This defines a class `Expr` with case classes `Var`, `Apply` and `Lambda`. A call-by-value evaluator for lambda expressions could then be written as follows. + +```scala +type Env = String => Value +case class Value(e: Expr, env: Env) + +def eval(e: Expr, env: Env): Value = e match { + case Var (x) => + env(x) + case Apply(f, g) => + val Value(Lambda (x, e1), env1) = eval(f, env) + val v = eval(g, env) + eval (e1, (y => if (y == x) v else env1(y))) + case Lambda(_, _) => + Value(e, env) +} +``` + +It is possible to define further case classes that extend type `Expr` in other parts of the program, for instance + +```scala +case class Number(x: Int) extends Expr +``` + +This form of extensibility can be excluded by declaring the base class `Expr` `sealed`; in this case, all classes that directly extend `Expr` must be in the same source file as `Expr`. + +## Traits + +```ebnf +TmplDef ::= ‘trait’ ClassDef +``` + +A _trait_ is a class that is meant to be added to some other class as a mixin. +Furthermore, no constructor arguments are passed to the superclass of the trait. +This is not necessary as traits are initialized after the superclass is initialized. + +Assume a trait ´D´ defines some aspect of an instance ´x´ of type ´C´ (i.e. ´D´ is a base class of ´C´). +Then the _actual supertype_ of ´D´ in ´x´ is the compound type consisting of all the base classes in ´\mathcal{L}(C)´ that succeed ´D´. +The actual supertype gives the context for resolving a [`super` reference](06-expressions.html#this-and-super) in a trait. +Note that the actual supertype depends on the type to which the trait is added in a mixin composition; it is not statically known at the time the trait is defined. + +If ´D´ is not a trait, then its actual supertype is simply its least proper supertype (which is statically known). + +###### Example +The following trait defines the property of being comparable to objects of some type. +It contains an abstract method `<` and default implementations of the other comparison operators `<=`, `>`, and `>=`. + +```scala +trait Comparable[T <: Comparable[T]] { self: T => + def < (that: T): Boolean + def <=(that: T): Boolean = this < that || this == that + def > (that: T): Boolean = that < this + def >=(that: T): Boolean = that <= this +} +``` + +###### Example +Consider an abstract class `Table` that implements maps from a type of keys `A` to a type of values `B`. +The class has a method `set` to enter a new key / value pair into the table, and a method `get` that returns an optional value matching a given key. +Finally, there is a method `apply` which is like `get`, except that it returns a given default value if the table is undefined for the given key. +This class is implemented as follows. + +```scala +abstract class Table[A, B](defaultValue: B) { + def get(key: A): Option[B] + def set(key: A, value: B): Unit + def apply(key: A) = get(key) match { + case Some(value) => value + case None => defaultValue + } +} +``` + +Here is a concrete implementation of the `Table` class. + +```scala +class ListTable[A, B](defaultValue: B) extends Table[A, B](defaultValue) { + private var elems: List[(A, B)] = Nil + def get(key: A) = elems.find(_._1 == key).map(_._2) + def set(key: A, value: B) = { elems = (key, value) :: elems } +} +``` + +Here is a trait that prevents concurrent access to the `get` and `set` operations of its parent class: + +```scala +trait SynchronizedTable[A, B] extends Table[A, B] { + abstract override def get(key: A): B = + synchronized { super.get(key) } + abstract override def set(key: A, value: B) = + synchronized { super.set(key, value) } +} +``` + +Note that `SynchronizedTable` does not pass an argument to its superclass, `Table`, even though `Table` is defined with a formal parameter. +Note also that the `super` calls in `SynchronizedTable`'s `get` and `set` methods statically refer to abstract methods in class `Table`. +This is legal, as long as the calling method is labeled [`abstract override`](#modifiers). + +Finally, the following mixin composition creates a synchronized list table with strings as keys and integers as values and with a default value `0`: + +```scala +object MyTable extends ListTable[String, Int](0) with SynchronizedTable[String, Int] +``` + +The object `MyTable` inherits its `get` and `set` method from `SynchronizedTable`. +The `super` calls in these methods are re-bound to refer to the corresponding implementations in `ListTable`, which is the actual supertype of `SynchronizedTable` in `MyTable`. + +### Extending parameterized traits + +Extra rules apply for extending a trait with parameters: + +1. If a class `´C´` extends a parameterized trait `´T´`, and its superclass does not, `´C´` _must_ pass arguments to `´T´`. + +2. If a class `´C´` extends a parameterized trait `´T´`, and its superclass does as well, `´C´` _must not_ pass arguments to `´T´`. + +3. Traits must never pass arguments to parent traits. + +4. If a class `´C´` extends an unparameterized trait `´T_i´` and the base types of `´T_i´` include parameterized trait `´T_j´`, and the superclass of `´C´` does not extend `´T_j´`, then `´C´` _must_ also explicitly extend `´T_j´` and pass arguments. +This rule is relaxed if the missing trait contains only context parameters. In that case the trait reference is implicitly inserted as an additional parent with inferred arguments. + +###### Example - Preventing ambiguities + +The following listing tries to extend `Greeting` twice, with different parameters. + +```scala +trait Greeting(val name: String): + def msg = s"How are you, $name" + +class C extends Greeting("Bob") + +class D extends C, Greeting("Bill") // error + +@main def greet = println(D().msg) +``` + +Should this program print "Bob" or "Bill"? In fact this program is illegal, because it violates rule 2 above. +Instead, `D` can extend `Greeting` without passing arguments. + +###### Example - Overriding + +Here's a variant of `Greeting` that overrides `msg`: +```scala +trait FormalGreeting extends Greeting: + override def msg = s"How do you do, $name" +``` + +Due to rule 4, the following class extending `FormalGreeting` is required to also extend `Greeting` with arguments: +```scala +class GreetBobFormally extends FormalGreeting, Greeting("Bob") +``` + +###### Example - Inferred context parameters + +Here's a variant of `Greeting` where the addressee is a context parameter of type `ImpliedName`: + +```scala +trait ImpliedGreeting(using val iname: ImpliedName): + def msg = s"How are you, $iname" + +case class ImpliedName(name: String): + override def toString = name + +trait ImpliedFormalGreeting extends ImpliedGreeting: + override def msg = s"How do you do, $iname" + +class F(using iname: ImpliedName) extends ImpliedFormalGreeting +``` + +The definition of `F` in the last line is implicitly expanded to +```scala +class F(using iname: ImpliedName) extends + Object, // implicitly inserted + ImpliedGreeting(using iname), // implicitly inserted + ImpliedFormalGreeting +``` +Due to rule 4, `F` is required to also extend `ImpliedGreeting` and pass arguments to it, however note that because `ImpliedGreeting` has only context parameters the extension was added implicitly. + +## Object Definitions + +```ebnf +TmplDef ::= ‘object’ ObjectDef +ObjectDef ::= id ClassTemplate +``` + +An _object definition_ defines a single object of a new class. +Its most general form is `object ´m´ extends ´t´`. +Here, ´m´ is the name of the object to be defined, and ´t´ is a [template](#templates) of the form + +```scala +´sc´ with ´mt_1´ with ... with ´mt_n´ { ´\mathit{stats}´ } +``` + +which defines the base classes, behavior and initial state of ´m´. +The extends clause `extends ´sc´ with ´mt_1´ with ... with ´mt_n´` can be omitted, in which case `extends scala.AnyRef` is assumed. +The class body `{ ´\mathit{stats}´ }` may also be omitted, in which case the empty body `{}` is assumed. + +The object definition defines a single object (or: _module_) conforming to the template ´t´. +It is roughly equivalent to the following definition of a lazy value: + +```scala +lazy val ´m´ = new ´sc´ with ´mt_1´ with ... with ´mt_n´ { this: ´m.type´ => ´\mathit{stats}´ } +``` + +Note that the value defined by an object definition is instantiated lazily. +The `new ´m´$cls` constructor is evaluated not at the point of the object definition, but is instead evaluated the first time ´m´ is dereferenced during execution of the program (which might be never at all). +An attempt to dereference ´m´ again during evaluation of the constructor will lead to an infinite loop or run-time error. +Other threads trying to dereference ´m´ while the constructor is being evaluated block until evaluation is complete. + +The expansion given above is not accurate for top-level objects. +It cannot be because variable and method definition cannot appear on the top-level outside of a [package object](09-top-level-definitions.html#package-objects). +Instead, top-level objects are translated to static fields. + +###### Example +Classes in Scala do not have static members; however, an equivalent effect can be achieved by an accompanying object definition E.g. + +```scala +abstract class Point { + val x: Double + val y: Double + def isOrigin = (x == 0.0 && y == 0.0) +} +object Point { + val origin = new Point() { val x = 0.0; val y = 0.0 } +} +``` + +This defines a class `Point` and an object `Point` which contains `origin` as a member. +Note that the double use of the name `Point` is legal, since the class definition defines the name `Point` in the type name space, whereas the object definition defines a name in the term namespace. + +This technique is applied by the Scala compiler when interpreting a Java class with static members. +Such a class ´C´ is conceptually seen as a pair of a Scala class that contains all instance members of ´C´ and a Scala object that contains all static members of ´C´. + +Generally, a _companion module_ of a class is an object which has the same name as the class and is defined in the same scope and compilation unit. +Conversely, the class is called the _companion class_ of the module. + +Very much like a concrete class definition, an object definition may still contain declarations of abstract type members, but not of abstract term members. + +## Enum Definitions + + +```ebnf +TmplDef ::= ‘enum’ EnumDef +EnumDef ::= id ClassConstr [‘extends’ [ConstrApps]] EnumBody +EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ +EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase +EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps] | ids) +``` + +An _enum definition_ implies the definition of an _enum class_, a companion object, and one or more _enum cases_. + +Enum definitions are useful to encode both Generalised Algebraic Data Types and Enumerated Types. + +The compiler expands enum definitions to code that only uses Scala's other language features. +As such, enum definitions in Scala are convenient _syntactic sugar_, but they are not essential to understand Scala's core. + +We now explain the expansion of enum definitions in detail. +First, some terminology and notational conventions: + +- We use ´E´ as a name of an enum definition, and ´C´ as a name of an enum case that appears in ´E´. +- We use `<...>` for syntactic constructs that in some circumstances might be empty. +For instance, `` represents one or more parameter lists `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` or nothing at all. +- Enum classes fall into two categories: + - _parameterized_ enum classes have at least one of the following: + - a type parameter section, denoted as `[´\mathit{tps}\,´]`; + - one or more (possibly empty) parameter sections, denoted as `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. + - _unparameterized_ enum classes have no type parameter sections and no parameter sections. +- Enum cases fall into three categories: + + - _Class cases_ are those cases that are parameterized, either with a type parameter section `[´\mathit{tps}\,´]` or with one or more (possibly empty) parameter sections `(´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)`. + - _Simple cases_ are cases of an unparameterized enum that have neither parameters nor an extends clause or body. + That is, they consist of a name only. + - _Value cases_ are all cases that do not have a parameter section but that do have a (possibly generated) `extends` clause and/or a body. + +- Simple cases and value cases are collectively called _singleton cases_. + +###### Example + +An example enum for a `Planet` enumeration can be given as +```scala +enum Planet(mass: Double, radius: Double): + case Mercury extends Planet(3.303e+23, 2.4397e6) + case Venus extends Planet(4.869e+24, 6.0518e6) + case Earth extends Planet(5.976e+24, 6.37814e6) + case Mars extends Planet(6.421e+23, 3.3972e6) + case Jupiter extends Planet(1.9e+27, 7.1492e7) + case Saturn extends Planet(5.688e+26, 6.0268e7) + case Uranus extends Planet(8.686e+25, 2.5559e7) + case Neptune extends Planet(1.024e+26, 2.4746e7) + + private inline val G = 6.67300E-11 + def surfaceGravity = G * mass / (radius * radius) + def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity +end Planet +``` + +###### Example + +An example enum for the Option ADT can be given as +```scala +enum Option[+T]: + case Some(x: T) + case None +``` + +### Lowering of Enum Definitions + +###### Summary +An enum class is represented as a `sealed` class that extends the `scala.reflect.Enum` trait. + +Enum cases are represented as follows: +- a class case is mapped to a `case class`, +- a singleton case is mapped to a `val` definition, where + - Simple cases all share a single implementation class. + - Value cases will each be implemented by a unique class. + +###### Precise rules +The `scala.reflect.Enum` trait defines a single public method, `ordinal`: +```scala +package scala.reflect + +transparent trait Enum extends Any, Product, Serializable: + + def ordinal: Int +``` +There are nine desugaring rules. +Rule (1) desugars enum definitions. +Rules (2) and (3) desugar simple cases. +Rules (4) to (6) define `extends` clauses for cases that are missing them. +Rules (7) to (9) define how such cases with `extends` clauses map into `case class`es or `val`s. + +1. An `enum` definition + ```scala + enum ´E´ ... { } + ``` + expands to a `sealed abstract` class that extends the `scala.reflect.Enum` trait and an associated companion object that contains the defined cases, expanded according to rules (2 - 8). + The enum class starts with a compiler-generated import that imports the names `` of all cases so that they can be used without prefix in the class. + ```scala + sealed abstract class ´E´ ... extends with scala.reflect.Enum { + import ´E´.{ } + + } + object ´E´ { } + ``` + +2. A singleton case consisting of a comma-separated list of enum names + ```scala + case ´C_1´, ..., ´C_n´ + ``` + expands to + ```scala + case ´C_1´; ...; case ´C_n´ + ``` + Any modifiers or annotations on the original case extend to all expanded cases. + This result is then further rewritten by either (3 or 4). + +3. A singleton case without an extends clause + ```scala + case ´C´ + ``` + of an unparameterized enum `´E´` expands to the following simple enum case in `´E´`'s companion object: + ```scala + val ´C´ = $new(n, "C") + ``` + Here, `$new` is a private method that creates an instance of ´E´ (see below). + +4. A singleton case without an extends clause + ```scala + case ´C´ + ``` + of an enum `´E´` with type parameters + ```scala + ´\mathit{v}_1´ ´T_1´ >: ´L_1´ <: ´U_1´ , ... , ´\mathit{v}_n´ ´T_n´ >: ´L_n´ <: ´U_n´ (n > 0) + ``` + where each of the variances `´\mathit{v}_i´` is either `'+'` or `'-'`, expands to the following value enum case: + ```scala + case ´C´ extends ´E´[´B_1´, ..., ´B_n´] + ``` + where `´B_i´` is `´L_i´` if `´\mathit{v}_i´ = '+'` and `´U_i´` if `´\mathit{v}_i´ = '-'`. + This result is then further rewritten with rule (8). + **NOTE:** It is not permitted for enums with non-variant type parameters to have singleton cases without an extends clause. + +5. A class case without an extends clause + ```scala + case ´C´ + ``` + of an enum `´E´` that does not take type parameters expands to + ```scala + case ´C´ extends ´E´ + ``` + This result is then further rewritten with rule (9). + +6. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case with neither type parameters nor an extends clause + ```scala + case ´C´ + ``` + expands to + ```scala + case ´C´[´\mathit{tps}´] extends ´E´[´\mathit{tps}´] + ``` + This result is then further rewritten with rule (9). + For class cases that have type parameters themselves, an extends clause needs to be given explicitly. + + +7. If `´E´` is an enum with type parameters `´\mathit{tps}´`, a class case without type parameters but with an extends clause + ```scala + case ´C´ extends + ``` + expands to + ```scala + case ´C´[´\mathit{tps}´] extends + ``` + provided at least one of the parameters `´\mathit{tps}´` is mentioned in a parameter type in `` or in a type argument in ``. + +8. A value case + ```scala + case ´C´ extends + ``` + expands to the following `val` definition in `´E´`'s companion object: + ```scala + val ´C´ = new { ; def ordinal = ´\mathit{n}´ } + ``` + where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0. + The anonymous class also implements the abstract `Product` methods that it inherits from `Enum`. + **NOTE:** It is an error if a value case refers to a type parameter of `´E´` in a type argument within ``. + +9. A class case + ```scala + case ´C´ extends + ``` + expands analogous to a final case class in `´E´`'s companion object: + ```scala + final case class ´C´ extends { + def ordinal = ´\mathit{n}´ + } + ``` + where `´\mathit{n}´` is the ordinal number of the case in the companion object, starting from 0. + **NOTE:** It is an error if a class case refers to a type parameter of `´E´` in a parameter type in `` or `` or in a type argument of ``, unless that parameter is already a type parameter of the case, i.e. the parameter name is defined in ``. + +###### Superclass of an enum case + +an enum case (singleton or class) with explicit extends clause +```scala +case ´C´ extends +``` + +must extend the parent enum `´E´` as the first parent of ``. + +###### Example +Consider the enumeration `RGB`, consisting of simple enum cases: +```scala +enum RGB: + case Red, Green, Blue +``` + +The three simple cases will expand as follows in the companion of `RGB`: + +```scala +val Red = $new(0, "Red") +val Green = $new(1, "Green") +val Blue = $new(2, "Blue") + +private def $new(_$ordinal: Int, $name: String) = + new RGB with scala.runtime.EnumValue: + def ordinal = _$ordinal + override def productPrefix = $name + override def toString = $name +``` + + +###### Example + +Consider the more complex enumeration `Color`, consisting of value enum cases: +```scala +enum Color(val rgb: Int): + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) +``` + +The three value cases will expand as follows in the companion of `Color`: + +```scala +val Red = new Color(0xFF0000): + def ordinal: Int = 0 + override def productPrefix: String = "Red" + override def toString: String = "Red" +val Green = new Color(0x00FF00): + def ordinal: Int = 1 + override def productPrefix: String = "Green" + override def toString: String = "Green" +val Blue = new Color(0x0000FF): + def ordinal: Int = 2 + override def productPrefix: String = "Blue" + override def toString: String = "Blue" +``` + +### Widening of enum cases post-construction +The compiler-generated `apply` and `copy` methods of an class enum case +```scala +case ´C´[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´) extends ´P_1´, ..., ´P_n´ +``` +are treated specially. +A call `´C´[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` of the `apply` method is ascribed the underlying type `´P_1´ & ... & ´P_n´` (dropping any [transparent traits](../other-new-features/transparent-traits.md)) as long as that type is still compatible with the expected type at the point of application. +A call `t.copy[´\mathit{tps}\,´](´\mathit{ps}_1\,´)...(´\mathit{ps}_n´)` of `´C´`'s `copy` method is treated in the same way. + +### Translation of enums with only singleton cases + +An enum `´E´` (possibly generic) that defines one or more singleton cases, and no class cases will define the following additional synthetic members in its companion object (where `´E'´` denotes `´E´` with any type parameters replaced by wildcards): + + - A method `valueOf(name: String): ´E'´`. + It returns the singleton case value whose identifier is `name`. + - A method `values` which returns an `Array[´E'´]` of all singleton case values defined by `E`, in the order of their definitions. + +### Factory method for simple enum cases + +If an enum `´E´` contains at least one simple case, its companion object will define in addition: + + - A private method `$new` which defines a new simple case value with given ordinal number and name. + This method can be thought as being defined as follows. + + ```scala + private def $new(_$ordinal: Int, $name: String): ´E´ with runtime.EnumValue + ``` + - `$new` returns a new instance of an anonymous class which implements the abstract `Product` methods that it inherits from `Enum`. + - if `´E´` inherits from `java.lang.Enum` the anonymous class does not override the `ordinal` or `toString` methods, as these are final in `java.lang.Enum`. + Additionally `productPrefix` will delegate to `this.name`. + +### Translation of Java-compatible enums + +A Java-compatible enum is an enum that extends `java.lang.Enum`. +The translation rules are the same as above, with the reservations defined in this section. + +- It is a compile-time error for a Java-compatible enum to have class cases. + +- Cases such as `case C` expand to a `@static val` as opposed to a `val`. +This allows them to be generated as static fields of the enum type, thus ensuring they are represented the same way as Java enums. + +### Scopes for Enum Cases + +A case in an `enum` is treated similarly to a secondary constructor. +It can access neither the enclosing `enum` using `this`, nor its value parameters or instance members using simple identifiers. + +Even though translated enum cases are located in the enum's companion object, referencing this object or its members via `this` or a simple identifier is also illegal. +The compiler typechecks enum cases in the scope of the enclosing companion object but flags any such illegal accesses as errors. + +### Variance for Type Parameters + +A parameterized enum case ´C´ of enum ´E´ with _inferred_ type parameters will copy variance annotations. +e.g. type parameter ´T_{i}´ from ´E´ will have the same variance as type parameter `´T'_{i}´` in ´C´. + +###### Example + +The following enum `View` has a contravariant type parameter ´T´ and a single case `Refl`, representing a function mapping a type `T` to itself: + +```scala +enum View[-´T´]: + case Refl(f: ´T´ => ´T´) +``` + +`Refl` expands to the following enum: + +```scala +enum View[-´T´]: + case Refl[-´T'´](f: ´T'´ => ´T'´) extends View[´T'´] +``` + +The definition of `Refl` is incorrectly typed, as it uses contravariant type `´T'´` in the covariant result position of a function type. + +A correctly typed version would use an _explicit_, _invariant_ type parameter `´R´` on case `Refl`: + +```scala +enum View[-´T´]: + case Refl[´R´](f: ´R´ => ´R´) extends View[´R´] +``` \ No newline at end of file diff --git a/docs/_spec/06-expressions.md b/docs/_spec/06-expressions.md new file mode 100644 index 000000000000..fa21b4330728 --- /dev/null +++ b/docs/_spec/06-expressions.md @@ -0,0 +1,1412 @@ +--- +title: Expressions +layout: default +chapter: 6 +--- + +# Expressions + +```ebnf +Expr ::= (Bindings | id | ‘_’) ‘=>’ Expr + | Expr1 +Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] + | ‘while’ ‘(’ Expr ‘)’ {nl} Expr + | ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] + | ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr + | ‘throw’ Expr + | ‘return’ [Expr] + | [SimpleExpr ‘.’] id ‘=’ Expr + | SimpleExpr1 ArgumentExprs ‘=’ Expr + | PostfixExpr + | PostfixExpr Ascription + | PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ +PostfixExpr ::= InfixExpr [id [nl]] +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr +PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr +SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody) + | BlockExpr + | SimpleExpr1 [‘_’] +SimpleExpr1 ::= Literal + | Path + | ‘_’ + | ‘(’ [Exprs] ‘)’ + | SimpleExpr ‘.’ id + | SimpleExpr TypeArgs + | SimpleExpr1 ArgumentExprs + | XmlExpr +Exprs ::= Expr {‘,’ Expr} +BlockExpr ::= ‘{’ CaseClauses ‘}’ + | ‘{’ Block ‘}’ +Block ::= BlockStat {semi BlockStat} [ResultExpr] +ResultExpr ::= Expr1 + | (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block +Ascription ::= ‘:’ InfixType + | ‘:’ Annotation {Annotation} + | ‘:’ ‘_’ ‘*’ +``` + +Expressions are composed of operators and operands. +Expression forms are discussed subsequently in decreasing order of precedence. + +## Expression Typing + +The typing of expressions is often relative to some _expected type_ (which might be undefined). +When we write "expression ´e´ is expected to conform to type ´T´", we mean: + 1. the expected type of ´e´ is ´T´, and + 2. the type of expression ´e´ must conform to ´T´. + +The following skolemization rule is applied universally for every expression: +If the type of an expression would be an existential type ´T´, then the type of the expression is assumed instead to be a [skolemization](03-types.html#existential-types) of ´T´. + + +Skolemization is reversed by type packing. +Assume an expression ´e´ of type ´T´ and let ´t_1[\mathit{tps}\_1] >: L_1 <: U_1, ..., t_n[\mathit{tps}\_n] >: L_n <: U_n´ be all the type variables created by skolemization of some part of ´e´ which are free in ´T´. +Then the _packed type_ of ´e´ is + +```scala +´T´ forSome { type ´t_1[\mathit{tps}\_1] >: L_1 <: U_1´; ...; type ´t_n[\mathit{tps}\_n] >: L_n <: U_n´ }. +``` + +## Literals + +```ebnf +SimpleExpr ::= Literal +``` + +Typing of literals is described along with their [lexical syntax](01-lexical-syntax.html#literals); their evaluation is immediate. + +## The _Null_ Value + +The `null` value is of type `scala.Null`, and thus conforms to every reference type. +It denotes a reference value which refers to a special `null` object. +This object implements methods in class `scala.AnyRef` as follows: + +- `eq(´x\,´)` and `==(´x\,´)` return `true` iff the argument ´x´ is also the "null" object. +- `ne(´x\,´)` and `!=(´x\,´)` return true iff the argument x is not also the "null" object. +- `isInstanceOf[´T\,´]` always returns `false`. +- `asInstanceOf[´T\,´]` returns the [default value](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) of type ´T´. +- `##` returns ``0``. + +A reference to any other member of the "null" object causes a `NullPointerException` to be thrown. + +## Designators + +```ebnf +SimpleExpr ::= Path + | SimpleExpr ‘.’ id +``` + +A designator refers to a named term. It can be a _simple name_ or a _selection_. + +A simple name ´x´ refers to a value as specified [here](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes). +If ´x´ is bound by a definition or declaration in an enclosing class or object ´C´, it is taken to be equivalent to the selection `´C´.this.´x´` where ´C´ is taken to refer to the class containing ´x´ even if the type name ´C´ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the occurrence of ´x´. + +If ´r´ is a [stable identifier](03-types.html#paths) of type ´T´, the selection ´r.x´ refers statically to a term member ´m´ of ´r´ that is identified in ´T´ by the name ´x´. + + + +For other expressions ´e´, ´e.x´ is typed as if it was `{ val ´y´ = ´e´; ´y´.´x´ }`, for some fresh name ´y´. + +The expected type of a designator's prefix is always undefined. +The type of a designator is the type ´T´ of the entity it refers to, with the following exception: The type of a [path](03-types.html#paths) ´p´ which occurs in a context where a [stable type](03-types.html#singleton-types) is required is the singleton type `´p´.type`. + +The contexts where a stable type is required are those that satisfy one of the following conditions: + +1. The path ´p´ occurs as the prefix of a selection and it does not designate a constant, or +1. The expected type ´\mathit{pt}´ is a stable type, or +1. The expected type ´\mathit{pt}´ is an abstract type with a stable type as lower bound, and the type ´T´ of the entity referred to by ´p´ does not conform to ´\mathit{pt}´, or +1. The path ´p´ designates a module. + +The selection ´e.x´ is evaluated by first evaluating the qualifier expression ´e´, which yields an object ´r´, say. +The selection's result is then the member of ´r´ that is either defined by ´m´ or defined by a definition overriding ´m´. + +## This and Super + +```ebnf +SimpleExpr ::= [id ‘.’] ‘this’ + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id +``` + +The expression `this` can appear in the statement part of a template or compound type. +It stands for the object being defined by the innermost template or compound type enclosing the reference. +If this is a compound type, the type of `this` is that compound type. +If it is a template of a class or object definition with simple name ´C´, the type of this is the same as the type of `´C´.this`. + +The expression `´C´.this` is legal in the statement part of an enclosing class or object definition with simple name ´C´. +It stands for the object being defined by the innermost such definition. +If the expression's expected type is a stable type, or `´C´.this` occurs as the prefix of a selection, its type is `´C´.this.type`, otherwise it is the self type of class ´C´. + +A reference `super.´m´` refers statically to a method or type ´m´ in the least proper supertype of the innermost template containing the reference. +It evaluates to the member ´m'´ in the actual supertype of that template which is equal to ´m´ or which overrides ´m´. +The statically referenced member ´m´ must be a type or a method. + + + +If it is a method, it must be concrete, or the template containing the reference must have a member ´m'´ which overrides ´m´ and which is labeled `abstract override`. + +A reference `´C´.super.´m´` refers statically to a method or type ´m´ in the least proper supertype of the innermost enclosing class or object definition named ´C´ which encloses the reference. +It evaluates to the member ´m'´ in the actual supertype of that class or object +which is equal to ´m´ or which overrides ´m´. +The statically referenced member ´m´ must be a type or a method. +If the statically referenced member ´m´ is a method, it must be concrete, or the innermost enclosing class or object definition named ´C´ must have a member ´m'´ which overrides ´m´ and which is labeled `abstract override`. + +The `super` prefix may be followed by a trait qualifier `[´T\,´]`, as in `´C´.super[´T\,´].´x´`. +This is called a _static super reference_. +In this case, the reference is to the type or method of ´x´ in the parent trait of ´C´ whose simple name is ´T´. +That member must be uniquely defined. +If it is a method, it must be concrete. + +###### Example +Consider the following class definitions + +```scala +class Root { def x = "Root" } +class A extends Root { override def x = "A" ; def superA = super.x } +trait B extends Root { override def x = "B" ; def superB = super.x } +class C extends Root with B { + override def x = "C" ; def superC = super.x +} +class D extends A with B { + override def x = "D" ; def superD = super.x +} +``` + +The linearization of class `C` is `{C, B, Root}` and the linearization of class `D` is `{D, B, A, Root}`. +Then we have: + +```scala +(new A).superA == "Root" + +(new C).superB == "Root" +(new C).superC == "B" + +(new D).superA == "Root" +(new D).superB == "A" +(new D).superD == "B" +``` + +Note that the `superB` method returns different results depending on whether `B` is mixed in with class `Root` or `A`. + +## Method Applications + +```ebnf +SimpleExpr ::= SimpleExpr1 ArgumentExprs +ArgumentExprs ::= ‘(’ [Exprs] ‘)’ + | ‘(’ ‘using’ Exprs ‘)’ + | ‘(’ [Exprs ‘,’] PostfixExpr ‘:’ ‘_’ ‘*’ ‘)’ + | [nl] BlockExpr +Exprs ::= Expr {‘,’ Expr} +``` + +An application `´f(e_1, ..., e_m)´` applies the method `´f´` to the argument expressions `´e_1, ..., e_m´`. +For this expression to be well-typed, the method must be *applicable* to its arguments: + +If ´f´ has a method type `(´p_1´:´T_1, ..., p_n´:´T_n´)´U´`, each argument expression ´e_i´ is typed with the corresponding parameter type ´T_i´ as expected type. +Let ´S_i´ be the type of argument ´e_i´ ´(i = 1, ..., m)´. +The method ´f´ must be _applicable_ to its arguments ´e_1, ..., e_n´ of types ´S_1, ..., S_n´. +We say that an argument expression ´e_i´ is a _named_ argument if it has the form `´x_i=e'_i´` and `´x_i´` is one of the parameter names `´p_1, ..., p_n´`. + +Once the types ´S_i´ have been determined, the method ´f´ of the above method type is said to be applicable if all of the following conditions hold: + - for every named argument ´p_j=e_i'´ the type ´S_i´ is [compatible](03-types.html#compatibility) with the parameter type ´T_j´; + - for every positional argument ´e_i´ the type ´S_i´ is [compatible](03-types.html#compatibility) with ´T_i´; + - if the expected type is defined, the result type ´U´ is [compatible](03-types.html#compatibility) to it. + +If ´f´ is instead of some value type, the application is taken to be equivalent to `´f´.apply(´e_1, ..., e_m´)`, i.e. the application of an `apply` method defined by ´f´. +Value `´f´` is applicable to the given arguments if `´f´.apply` is applicable. + +Notes: +- In the case where ´f´ or `´f´.apply` is a polymorphic method, this is taken as an [ommitted type application](#type-applications). +- `´f´` is applicable to the given arguments if the result of this type application is applicable. + +The application `´f´(´e_1, ..., e_n´)` evaluates ´f´ and then each argument ´e_1, ..., e_n´ from left to right, except for arguments that correspond to a by-name parameter (see below). +Each argument expression is converted to the type of its corresponding formal parameter. +After that, the application is rewritten to the method's right hand side, with actual arguments substituted for formal parameters. +The result of evaluating the rewritten right-hand side is finally converted to the method's declared result type, if one is given. + +The case of a formal parameter with a parameterless method type `=> ´T´` is treated specially. +In this case, the corresponding actual argument expression ´e´ is not evaluated before the application. +Instead, every use of the formal parameter on the right-hand side of the rewrite rule entails a re-evaluation of ´e´. +In other words, the evaluation order for `=>`-parameters is _call-by-name_ whereas the evaluation order for normal parameters is _call-by-value_. +Furthermore, it is required that ´e´'s [packed type](#expression-typing) conforms to the parameter type ´T´. +The behavior of by-name parameters is preserved if the application is transformed into a block due to named or default arguments. +In this case, the local value for that parameter has the form `val ´y_i´ = () => ´e´` and the argument passed to the method is `´y_i´()`. + +The last argument in an application may be marked as a sequence argument, e.g. `´e´: _*`. +Such an argument must correspond to a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `´S´*` and it must be the only argument matching this parameter (i.e. the number of formal parameters and actual arguments must be the same). +Furthermore, the type of ´e´ must conform to `scala.Seq[´T´]`, for some type ´T´ which conforms to ´S´. +In this case, the argument list is transformed by replacing the sequence ´e´ with its elements. +When the application uses named arguments, the vararg parameter has to be specified exactly once. + +If only a single argument is supplied, it may be supplied as a block expression and parentheses can be omitted, in the form `´f´ { block }`. +This is valid when `f` has a single formal parameter or when all other formal parameters have default values. + +A method application usually allocates a new frame on the program's run-time stack. +However, if a local method or a final method calls itself as its last action, the call is executed using the stack-frame of the caller. + +###### Example +Assume the following method which computes the sum of a variable number of arguments: + +```scala +def sum(xs: Int*) = xs.foldLeft(0)((x, y) => x + y) +``` + +Then + +```scala +sum(1, 2, 3, 4) +sum(List(1, 2, 3, 4): _*) +``` + +both yield `10` as result. +On the other hand, + +```scala +sum(List(1, 2, 3, 4)) +``` + +would not typecheck. + +An argument list may begin with the soft keyword `using` to facilitate cross-compilation with Scala 3. +The keyword is ignored. + +### Named and Default Arguments + +If an application is to use named arguments ´p = e´ or default arguments, the following conditions must hold. + +- For every named argument ´p_i = e_i´ which appears left of a positional argument in the argument list ´e_1 ... e_m´, the argument position ´i´ coincides with the position of parameter ´p_i´ in the parameter list of the applied method. +- The names ´x_i´ of all named arguments are pairwise distinct and no named argument defines a parameter which is already specified by a positional argument. +- Every formal parameter ´p_j:T_j´ which is not specified by either a positional or named argument has a default argument. + +If the application uses named or default arguments the following transformation is applied to convert it into an application without named or default arguments. + +If the method ´f´ has the form `´p.m´[´\mathit{targs}´]` it is transformed into the block + +```scala +{ val q = ´p´ + q.´m´[´\mathit{targs}´] +} +``` + +If the method ´f´ is itself an application expression the transformation is applied recursively on ´f´. +The result of transforming ´f´ is a block of the form + +```scala +{ val q = ´p´ + val ´x_1´ = expr´_1´ + ... + val ´x_k´ = expr´_k´ + q.´m´[´\mathit{targs}´](´\mathit{args}_1´), ...,(´\mathit{args}_l´) +} +``` + +where every argument in ´(\mathit{args}\_1), ..., (\mathit{args}\_l)´ is a reference to one of the values ´x_1, ..., x_k´. +To integrate the current application into the block, first a value definition using a fresh name ´y_i´ is created for every argument in ´e_1, ..., e_m´, which is initialised to ´e_i´ for positional arguments and to ´e'_i´ for named arguments of the form `´x_i=e'_i´`. +Then, for every parameter which is not specified by the argument list, a value definition using a fresh name ´z_i´ is created, which is initialized using the method computing the [default argument](04-basic-declarations-and-definitions.html#method-declarations-and-definitions) of this parameter. + +Let ´\mathit{args}´ be a permutation of the generated names ´y_i´ and ´z_i´ such such that the position of each name matches the position of its corresponding parameter in the method type `(´p_1:T_1, ..., p_n:T_n´)´U´`. +The final result of the transformation is a block of the form + +```scala +{ val q = ´p´ + val ´x_1´ = expr´_1´ + ... + val ´x_l´ = expr´_k´ + val ´y_1´ = ´e_1´ + ... + val ´y_m´ = ´e_m´ + val ´z_1´ = ´q.m\$default\$i[\mathit{targs}](\mathit{args}_1), ..., (\mathit{args}_l)´ + ... + val ´z_d´ = ´q.m\$default\$j[\mathit{targs}](\mathit{args}_1), ..., (\mathit{args}_l)´ + q.´m´[´\mathit{targs}´](´\mathit{args}_1´), ..., (´\mathit{args}_l´)(´\mathit{args}´) +} +``` + +### Signature Polymorphic Methods + +For invocations of signature polymorphic methods of the target platform `´f´(´e_1, ..., e_m´)`, the invoked method has a different method type `(´p_1´:´T_1, ..., p_n´:´T_n´)´U´` at each call site. +The parameter types `´T_, ..., T_n´` are the types of the argument expressions `´e_1, ..., e_m´`. +If the declared return type `´R´` of the signature polymorphic method is any type other than `scala.AnyRef`, then the return type `´U´` is `´R´`. +Otherwise, `´U´` is the expected type at the call site. If the expected type is undefined then `´U´` is `scala.AnyRef`. +The parameter names `´p_1, ..., p_n´` are fresh. + +###### Note + +On the Java platform version 11 and later, signature polymorphic methods are native, members of `java.lang.invoke.MethodHandle` or `java.lang.invoke.VarHandle`, and have a single repeated parameter of type `java.lang.Object*`. + +## Method Values + +```ebnf +SimpleExpr ::= SimpleExpr1 ‘_’ +``` + +The expression `´e´ _` is well-formed if ´e´ is of method +type or if ´e´ is a call-by-name parameter. +If ´e´ is a method with parameters, `´e´ _` represents ´e´ converted to a function type by [eta expansion](#eta-expansion-section). +If ´e´ is a parameterless method or call-by-name parameter of type `=> ´T´`, `´e´ _` represents the function of type `() => ´T´`, which evaluates ´e´ when it is applied to the empty parameter list `()`. + +###### Example +The method values in the left column are each equivalent to the [eta-expanded expressions](#eta-expansion-section) on the right. + +| placeholder syntax | eta-expansion | +|------------------------------ | ----------------------------------------------------------------------------| +|`math.sin _` | `x => math.sin(x)` | +|`math.pow _` | `(x1, x2) => math.pow(x1, x2)` | +|`val vs = 1 to 9; vs.fold _` | `(z) => (op) => vs.fold(z)(op)` | +|`(1 to 9).fold(z)_` | `{ val eta1 = 1 to 9; val eta2 = z; op => eta1.fold(eta2)(op) }` | +|`Some(1).fold(??? : Int)_` | `{ val eta1 = Some(1); val eta2 = () => ???; op => eta1.fold(eta2())(op) }` | + +Note that a space is necessary between a method name and the trailing underscore because otherwise the underscore would be considered part of the name. + +## Type Applications + +```ebnf +SimpleExpr ::= SimpleExpr TypeArgs +``` + +A _type application_ `´e´[´T_1, ..., T_n´]` instantiates a polymorphic method ´e´ of type `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´S´` with argument types `´T_1, ..., T_n´`. +Every argument type ´T_i´ must obey the corresponding bounds ´L_i´ and ´U_i´. +That is, for each ´i = 1, ..., n´, we must have ´\sigma L_i <: T_i <: \sigma U_i´, where ´\sigma´ is the substitution ´[a_1 := T_1, ..., a_n +:= T_n]´. +The type of the application is ´\sigma S´. + +If ´e´ is not a method, and is instead of some value type, the type application is taken to be equivalent to `´e´.apply[´T_1 , ...,´ T´_n´]`, i.e. the application of an `apply` method defined by ´e´. + +Type applications can be omitted if [local type inference](#local-type-inference) can infer best type parameters for a polymorphic method from the types of the actual method arguments and the expected result type. + +## Tuples + +```ebnf +SimpleExpr ::= ‘(’ [Exprs] ‘)’ +``` +A _tuple expression_ `(´e_1´, ..., ´e_n´)` where ´n \geq 2´ is equivalent to the expression `´e_1´ *: ... *: ´e_n´ *: scala.EmptyTuple`. + +Note: as calls to `*:` are slow, a more efficient translation is free to be implemented. For example, `(´e_1´, ´e_2´)` could be translated to `scala.Tuple2(´e_1´, ´e_2´)`, which is indeed equivalent to `´e_1´ *: ´e_2´ *: scala.EmptyTuple`. + +Notes: +- The expression `(´e_1´)` is not equivalent to `´e_1´ *: scala.EmptyTuple`, but instead a regular parenthesized expression. +- The expression `()` is not an alias for `scala.EmptyTuple`, but instead the unique value of type `scala.Unit`. + +## Instance Creation Expressions + +```ebnf +SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody) +``` + +A _simple instance creation expression_ is of the form `new ´c´` where ´c´ is a [constructor invocation](05-classes-and-objects.html#constructor-invocations). +Let ´T´ be the type of ´c´. +Then ´T´ must denote a (a type instance of) a non-abstract subclass of `scala.AnyRef`. +Furthermore, the _concrete self type_ of the expression must conform to the [self type](05-classes-and-objects.html#templates) of the class denoted by ´T´. +The concrete self type is normally ´T´, except if the expression `new ´c´` appears as the right hand side of a value definition + +```scala +val ´x´: ´S´ = new ´c´ +``` + +(where the type annotation `: ´S´` may be missing). +In the latter case, the concrete self type of the expression is the compound type `´T´ with ´x´.type`. + +The expression is evaluated by creating a fresh object of type ´T´ which is initialized by evaluating ´c´. +The type of the expression is ´T´. + +A _general instance creation expression_ is of the form `new ´t´` for some [class template](05-classes-and-objects.html#templates) ´t´. +Such an expression is equivalent to the block + +```scala +{ class ´a´ extends ´t´; new ´a´ } +``` + +where ´a´ is a fresh name of an _anonymous class_ which is inaccessible to user programs. + +There is also a shorthand form for creating values of structural types: +If `{´D´}` is a class body, then `new {´D´}` is equivalent to the general instance creation expression `new AnyRef{´D´}`. + +###### Example +Consider the following structural instance creation expression: + +```scala +new { def getName() = "aaron" } +``` + +This is a shorthand for the general instance creation expression + +```scala +new AnyRef{ def getName() = "aaron" } +``` + +The latter is in turn a shorthand for the block + +```scala +{ class anon$X extends AnyRef{ def getName() = "aaron" }; new anon$X } +``` + +where `anon$X` is some freshly created name. + +## Blocks + +```ebnf +BlockExpr ::= ‘{’ CaseClauses ‘}’ + | ‘{’ Block ‘}’ +Block ::= BlockStat {semi BlockStat} [ResultExpr] +``` + +A _block expression_ `{´s_1´; ...; ´s_n´; ´e\,´}` is constructed from a sequence of block statements ´s_1, ..., s_n´ and a final expression ´e´. +The statement sequence may not contain two definitions or declarations that bind the same name in the same namespace. +The final expression can be omitted, in which case the unit value `()` is assumed. + +The expected type of the final expression ´e´ is the expected type of the block. +The expected type of all preceding statements is undefined. + + +The type of a block `´s_1´; ...; ´s_n´; ´e´` is some type ´T´ such that: + +- ´U <: T´ where ´U´ is the type of ´e´. +- No value or type name is free in ´T´, i.e., ´T´ does not refer to any value or type locally defined in one of the statements ´s_1, ..., s_n´. +- ´T´ is "as small as possible" (this is a soft requirement). + +The precise way in which we compute ´T´, called _type avoidance_, is currently not defined in this specification. + +Evaluation of the block entails evaluation of its statement sequence, followed by an evaluation of the final expression ´e´, which defines the result of the block. + +A block expression `{´c_1´; ...; ´c_n´}` where ´c_1, ..., c_n´ are case clauses forms a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions). + +## Prefix, Infix, and Postfix Operations + +```ebnf +PostfixExpr ::= InfixExpr [id [nl]] +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr +PrefixExpr ::= [‘-’ | ‘+’ | ‘!’ | ‘~’] SimpleExpr +``` + +Expressions can be constructed from operands and operators. + +### Prefix Operations + +A prefix operation ´\mathit{op};e´ consists of a prefix operator ´\mathit{op}´, which must be one of the identifiers ‘`+`’, ‘`-`’, ‘`!`’ or ‘`~`’, which must not be enclosed in backquotes. +The expression ´\mathit{op};e´ is equivalent to the postfix method application `e.unary_´\mathit{op}´`. + + + +Prefix operators are different from normal method applications in that their operand expression need not be atomic. +For instance, the input sequence `-sin(x)` is read as `-(sin(x))`, whereas the method application `negate sin(x)` would be parsed as the application of the infix operator `sin` to the operands `negate` and `(x)`. + +### Postfix Operations + +A postfix operator can be an arbitrary identifier. +The postfix operation ´e;\mathit{op}´ is interpreted as ´e.\mathit{op}´. + +### Infix Operations + +An infix operator can be an arbitrary identifier. +Infix operators have precedence and associativity defined as follows: + +The _precedence_ of an infix operator is determined by the operator's first character. +Characters are listed below in increasing order of precedence, with characters on the same line having the same precedence. + +```scala +(all letters, as defined in [chapter 1](01-lexical-syntax.html), including `_` and `$`) +| +^ +& += ! +< > +: ++ - +* / % +(other operator characters, as defined in [chapter 1](01-lexical-syntax.html), including Unicode categories `Sm` and `So`) +``` + +That is, operators starting with a letter have lowest precedence, followed by operators starting with ‘`|`’, etc. + +There's one exception to this rule, which concerns [_assignment operators_](#assignment-operators). +The precedence of an assignment operator is the same as the one of simple assignment `(=)`. +That is, it is lower than the precedence of any other operator. + +The _associativity_ of an operator is determined by the operator's +last character. +Operators ending in a colon ‘`:`’ are right-associative. +All other operators are left-associative. + +Precedence and associativity of operators determine the grouping of parts of an expression as follows. + +- If there are several infix operations in an expression, then operators with higher precedence bind more closely than operators with lower precedence. +- If there are consecutive infix operations ´e_0; \mathit{op}\_1; e_1; \mathit{op}\_2 ... \mathit{op}\_n; e_n´ with operators ´\mathit{op}\_1, ..., \mathit{op}\_n´ of the same precedence, then all these operators must have the same associativity. +If all operators are left-associative, the sequence is interpreted as ´(...(e_0;\mathit{op}\_1;e_1);\mathit{op}\_2...);\mathit{op}\_n;e_n´. +Otherwise, if all operators are right-associative, the sequence is interpreted as ´e_0;\mathit{op}\_1;(e_1;\mathit{op}\_2;(... \mathit{op}\_n;e_n)...)´. +- Postfix operators always have lower precedence than infix operators. E.g. ´e_1;\mathit{op}\_1;e_2;\mathit{op}\_2´ is always equivalent to ´(e_1;\mathit{op}\_1;e_2);\mathit{op}\_2´. + +The right-hand operand of a left-associative operator may consist of several arguments enclosed in parentheses, e.g. ´e;\mathit{op};(e_1,...,e_n)´. +This expression is then interpreted as ´e.\mathit{op}(e_1,...,e_n)´. + +A left-associative binary operation ´e_1;\mathit{op};e_2´ is interpreted as ´e_1.\mathit{op}(e_2)´. If ´\mathit{op}´ is right-associative and its parameter is passed by name, the same operation is interpreted as ´e_2.\mathit{op}(e_1)´. +If ´\mathit{op}´ is right-associative and its parameter is passed by value, it is interpreted as `{ val ´x´=´e_1´; ´e_2´.´\mathit{op}´(´x\,´) }`, where ´x´ is a fresh name. + +### Assignment Operators + +An _assignment operator_ is an operator symbol (syntax category `op` in [Identifiers](01-lexical-syntax.html#identifiers)) that ends in an equals character “`=`”, with the following exceptions: + +1. the operator also starts with an equals character, or +1. the operator is one of `(<=)`, `(>=)`, `(!=)`. + +Assignment operators are treated specially in that they can be expanded to assignments if no other interpretation is valid. + +Let's consider an assignment operator such as `+=` in an infix operation `´l´ += ´r´`, where ´l´, ´r´ are expressions. +This operation can be re-interpreted as an operation which corresponds to the assignment + +```scala +´l´ = ´l´ + ´r´ +``` + +except that the operation's left-hand-side ´l´ is evaluated only once. + +The re-interpretation occurs if the following two conditions are fulfilled. + +1. The left-hand-side ´l´ does not have a member named `+=`, and also cannot be converted by an [implicit conversion](#implicit-conversions) to a value with a member named `+=`. +1. The assignment `´l´ = ´l´ + ´r´` is type-correct. +In particular this implies that ´l´ refers to a variable or object that can be assigned to, and that is convertible to a value with a member named `+`. + +## Typed Expressions + +```ebnf +Expr1 ::= PostfixExpr ‘:’ CompoundType +``` + +The _typed expression_ ´e: T´ has type ´T´. +The type of expression ´e´ is expected to conform to ´T´. +The result of the expression is the value of ´e´ converted to type ´T´. + +###### Example +Here are examples of well-typed and ill-typed expressions. + +```scala +1: Int // legal, of type Int +1: Long // legal, of type Long +// 1: string // ***** illegal +``` + +## Annotated Expressions + +```ebnf +Expr1 ::= PostfixExpr ‘:’ Annotation {Annotation} +``` + +An _annotated expression_ `´e´: @´a_1´ ... @´a_n´` attaches [annotations](11-annotations.html#user-defined-annotations) ´a_1, ..., a_n´ to the expression ´e´. + +## Assignments + +```ebnf +Expr1 ::= [SimpleExpr ‘.’] id ‘=’ Expr + | PrefixOperator SimpleExpr ‘=’ Expr + | SimpleExpr1 ArgumentExprs ‘=’ Expr +``` + +The interpretation of an assignment to a simple variable `´x´ = ´e´` depends on the definition of ´x´. +If ´x´ denotes a mutable variable, then the assignment changes the current value of ´x´ to be the result of evaluating the expression ´e´. +The type of ´e´ is expected to conform to the type of ´x´. +If ´x´ is a parameterless method defined in some template, and the same template contains a setter method `´x´_=` as member, then the assignment `´x´ = ´e´` is interpreted as the invocation `´x´_=(´e\,´)` of that setter method. +Analogously, an assignment `´f.x´ = ´e´` to a parameterless method ´x´ is interpreted as the invocation `´f.x´_=(´e\,´)`. +If ´x´ is an application of a unary operator, then the expression is interpreted as though it were written as the explicit application `´x´.unary_´\mathit{op}´`, namely, as `´x´.unary_´\mathit{op}´_=(´e\,´)`. + +An assignment `´f´(´\mathit{args}\,´) = ´e´` with a method application to the left of the ‘`=`’ operator is interpreted as `´f.´update(´\mathit{args}´, ´e\,´)`, i.e. the invocation of an `update` method defined by ´f´. + +###### Example +Here are some assignment expressions and their equivalent expansions. + +| assignment | expansion | +|--------------------------|----------------------| +|`x.f = e` | `x.f_=(e)` | +|`x.f() = e` | `x.f.update(e)` | +|`x.f(i) = e` | `x.f.update(i, e)` | +|`x.f(i, j) = e` | `x.f.update(i, j, e)`| + +###### Example Imperative Matrix Multiplication + +Here is the usual imperative code for matrix multiplication. + +```scala +def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { + val zss: Array[Array[Double]] = new Array(xss.length, yss(0).length) + var i = 0 + while (i < xss.length) { + var j = 0 + while (j < yss(0).length) { + var acc = 0.0 + var k = 0 + while (k < yss.length) { + acc = acc + xss(i)(k) * yss(k)(j) + k += 1 + } + zss(i)(j) = acc + j += 1 + } + i += 1 + } + zss +} +``` + +Desugaring the array accesses and assignments yields the following expanded version: + +```scala +def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { + val zss: Array[Array[Double]] = new Array(xss.length, yss.apply(0).length) + var i = 0 + while (i < xss.length) { + var j = 0 + while (j < yss.apply(0).length) { + var acc = 0.0 + var k = 0 + while (k < yss.length) { + acc = acc + xss.apply(i).apply(k) * yss.apply(k).apply(j) + k += 1 + } + zss.apply(i).update(j, acc) + j += 1 + } + i += 1 + } + zss +} +``` + +## Conditional Expressions + +```ebnf +Expr1 ::= ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] +``` + +The _conditional expression_ `if (´e_1´) ´e_2´ else ´e_3´` chooses one of the values of ´e_2´ and ´e_3´, depending on the value of ´e_1´. +The condition ´e_1´ is expected to conform to type `Boolean`. +The then-part ´e_2´ and the else-part ´e_3´ are both expected to conform to the expected type of the conditional expression. +The type of the conditional expression is the [weak least upper bound](03-types.html#weak-conformance) of the types of ´e_2´ and ´e_3´. +A semicolon preceding the `else` symbol of a conditional expression is ignored. + +The conditional expression is evaluated by evaluating first ´e_1´. +If this evaluates to `true`, the result of evaluating ´e_2´ is returned, otherwise the result of evaluating ´e_3´ is returned. + +A short form of the conditional expression eliminates the else-part. +The conditional expression `if (´e_1´) ´e_2´` is evaluated as if it was `if (´e_1´) ´e_2´ else ()`. + +## While Loop Expressions + +```ebnf +Expr1 ::= ‘while’ ‘(’ Expr ‘)’ {nl} Expr +``` + +The _while loop expression_ `while (´e_1´) ´e_2´` is typed and evaluated as if it was an application of `whileLoop (´e_1´) (´e_2´)` where the hypothetical method `whileLoop` is defined as follows. + +```scala +def whileLoop(cond: => Boolean)(body: => Unit): Unit = + if (cond) { body ; whileLoop(cond)(body) } else {} +``` + +## For Comprehensions and For Loops + +```ebnf +Expr1 ::= ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) + {nl} [‘yield’] Expr +Enumerators ::= Generator {semi Generator} +Generator ::= [‘case’] Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} +Guard ::= ‘if’ PostfixExpr +``` + +A _for loop_ `for (´\mathit{enums}\,´) ´e´` executes expression ´e´ for each binding generated by the enumerators ´\mathit{enums}´. +A _for comprehension_ `for (´\mathit{enums}\,´) yield ´e´` evaluates expression ´e´ for each binding generated by the enumerators ´\mathit{enums}´ and collects the results. +An enumerator sequence always starts with a generator; this can be followed by further generators, value definitions, or guards. + +A _generator_ `´p´ <- ´e´` produces bindings from an expression ´e´ which is matched in some way against pattern ´p´. +Optionally, `case` can appear in front of a generator pattern, this has no meaning in Scala 2 but will be [required in Scala 3 if `p` is not irrefutable](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html). + +A _value definition_ `´p´ = ´e´` binds the value name ´p´ (or several names in a pattern ´p´) to the result of evaluating the expression ´e´. +A _guard_ `if ´e´` contains a boolean expression which restricts enumerated bindings. +The precise meaning of generators and guards is defined by translation to invocations of four methods: `map`, `withFilter`, `flatMap`, and `foreach`. +These methods can be implemented in different ways for different carrier types. + +The translation scheme is as follows. +In a first step, every generator `´p´ <- ´e´`, where ´p´ is not [irrefutable](08-pattern-matching.html#patterns) for the type of ´e´ is replaced by + +```scala +´p´ <- ´e´.withFilter { case ´p´ => true; case _ => false } +``` + +Then, the following rules are applied repeatedly until all comprehensions have been eliminated. + + - A for comprehension `for (´p´ <- ´e\,´) yield ´e'´` is translated to `´e´.map { case ´p´ => ´e'´ }`. + - A for loop `for (´p´ <- ´e\,´) ´e'´` is translated to `´e´.foreach { case ´p´ => ´e'´ }`. + - A for comprehension + + ```scala + for (´p´ <- ´e´; ´p'´ <- ´e'; ...´) yield ´e''´ + ``` + + where `...` is a (possibly empty) sequence of generators, definitions, or guards, is translated to + + ```scala + ´e´.flatMap { case ´p´ => for (´p'´ <- ´e'; ...´) yield ´e''´ } + ``` + + - A for loop + + ```scala + for (´p´ <- ´e´; ´p'´ <- ´e'; ...´) ´e''´ + ``` + + where `...` is a (possibly empty) sequence of generators, definitions, or guards, is translated to + + ```scala + ´e´.foreach { case ´p´ => for (´p'´ <- ´e'; ...´) ´e''´ } + ``` + + - A generator `´p´ <- ´e´` followed by a guard `if ´g´` is translated to a single generator `´p´ <- ´e´.withFilter((´x_1, ..., x_n´) => ´g\,´)` where ´x_1, ..., x_n´ are the free variables of ´p´. + + - A generator `´p´ <- ´e´` followed by a value definition `´p'´ = ´e'´` is translated to the following generator of pairs of values, where ´x´ and ´x'´ are fresh names: + + ```scala + (´p´, ´p'´) <- for (´x @ p´ <- ´e´) yield { val ´x' @ p'´ = ´e'´; (´x´, ´x'´) } + ``` + +###### Example +The following code produces all pairs of numbers between ´1´ and ´n-1´ whose sums are prime. + +```scala +for { i <- 1 until n + j <- 1 until i + if isPrime(i+j) +} yield (i, j) +``` + +The for comprehension is translated to: + +```scala +(1 until n) + .flatMap { + case i => (1 until i) + .withFilter { j => isPrime(i+j) } + .map { case j => (i, j) } } +``` + +###### Example +For comprehensions can be used to express vector and matrix algorithms concisely. +For instance, here is a method to compute the transpose of a given matrix: + + + +```scala +def transpose[A](xss: Array[Array[A]]) = { + for (i <- Array.range(0, xss(0).length)) yield + for (xs <- xss) yield xs(i) +} +``` + +Here is a method to compute the scalar product of two vectors: + +```scala +def scalprod(xs: Array[Double], ys: Array[Double]) = { + var acc = 0.0 + for ((x, y) <- xs zip ys) acc = acc + x * y + acc +} +``` + +Finally, here is a method to compute the product of two matrices. +Compare with the [imperative version](#example-imperative-matrix-multiplication). + +```scala +def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = { + val ysst = transpose(yss) + for (xs <- xss) yield + for (yst <- ysst) yield + scalprod(xs, yst) +} +``` + +The code above makes use of the fact that `map`, `flatMap`, `withFilter`, and `foreach` are defined for instances of class `scala.Array`. + +## Return Expressions + +```ebnf +Expr1 ::= ‘return’ [Expr] +``` + +A _return expression_ `return ´e´` must occur inside the body of some enclosing user defined method. +The innermost enclosing method in a source program, ´m´, must have an explicitly declared result type, and the type of ´e´ must conform to it. + +The return expression evaluates the expression ´e´ and returns its value as the result of ´m´. +The evaluation of any statements or expressions following the return expression is omitted. +The type of a return expression is `scala.Nothing`. + +The expression ´e´ may be omitted. +The return expression `return` is type-checked and evaluated as if it were `return ()`. + +Returning from the method from within a nested function may be implemented by throwing and catching a `scala.runtime.NonLocalReturnControl`. +Any exception catches between the point of return and the enclosing methods might see and catch that exception. +A key comparison makes sure that this exception is only caught by the method instance which is terminated by the return. + +If the return expression is itself part of an anonymous function, it is possible that the enclosing method ´m´ has already returned before the return expression is executed. +In that case, the thrown `scala.runtime.NonLocalReturnControl` will not be caught, and will propagate up the call stack. + +## Throw Expressions + +```ebnf +Expr1 ::= ‘throw’ Expr +``` + +A _throw expression_ `throw ´e´` evaluates the expression ´e´. +The type of this expression must conform to `Throwable`. +If ´e´ evaluates to an exception reference, evaluation is aborted with the thrown exception. +If ´e´ evaluates to `null`, evaluation is instead aborted with a `NullPointerException`. +If there is an active [`try` expression](#try-expressions) which handles the thrown exception, evaluation resumes with the handler; otherwise the thread executing the `throw` is aborted. +The type of a throw expression is `scala.Nothing`. + +## Try Expressions + +```ebnf +Expr1 ::= ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] +``` + +A _try expression_ is of the form `try { ´b´ } catch ´h´` where the handler ´h´ is usually a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions) + +```scala +{ case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ } +``` + +This expression is evaluated by evaluating the block ´b´. +If evaluation of ´b´ does not cause an exception to be thrown, the result of ´b´ is returned. +Otherwise the handler ´h´ is applied to the thrown exception. +If the handler contains a case matching the thrown exception, the first such case is invoked. +If the handler contains no case matching the thrown exception, the exception is re-thrown. +More generally, if the handler is a `PartialFunction`, it is applied only if it is defined at the given exception. + +Let ´\mathit{pt}´ be the expected type of the try expression. +The block ´b´ is expected to conform to ´\mathit{pt}´. +The handler ´h´ is expected conform to type `scala.Function[scala.Throwable, ´\mathit{pt}\,´]`. +The type of the try expression is the [weak least upper bound](03-types.html#weak-conformance) of the type of ´b´ and the result type of ´h´. + +A try expression `try { ´b´ } finally ´e´` evaluates the block ´b´. +If evaluation of ´b´ does not cause an exception to be thrown, the expression ´e´ is evaluated. +If an exception is thrown during evaluation of ´e´, the evaluation of the try expression is aborted with the thrown exception. +If no exception is thrown during evaluation of ´e´, the result of ´b´ is returned as the result of the try expression. + +If an exception is thrown during evaluation of ´b´, the finally block ´e´ is also evaluated. +If another exception ´e´ is thrown during evaluation of ´e´, evaluation of the try expression is aborted with the thrown exception. +If no exception is thrown during evaluation of ´e´, the original exception thrown in ´b´ is re-thrown once evaluation of ´e´ has completed. +The block ´b´ is expected to conform to the expected type of the try expression. +The finally expression ´e´ is expected to conform to type `Unit`. + +A try expression `try { ´b´ } catch ´e_1´ finally ´e_2´` is a shorthand for `try { try { ´b´ } catch ´e_1´ } finally ´e_2´`. + +## Anonymous Functions + +```ebnf +Expr ::= (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr +ResultExpr ::= (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block +Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’ +Binding ::= (id | ‘_’) [‘:’ Type] +``` + +The anonymous function of arity ´n´, `(´x_1´: ´T_1, ..., x_n´: ´T_n´) => e` maps parameters ´x_i´ of types ´T_i´ to a result given by expression ´e´. +The scope of each formal parameter ´x_i´ is ´e´. +Formal parameters must have pairwise distinct names. +Type bindings can be omitted, in which case the compiler will attempt to infer valid bindings. + +Note: `() => ´e´` defines a nullary function (´n´ = 0), and not for example `(_: Unit) => ´e´`. + +In the case of a single untyped formal parameter, `(´x\,´) => ´e´` can be abbreviated to `´x´ => ´e´`. +If an anonymous function `(´x´: ´T\,´) => ´e´` with a single typed parameter appears as the result expression of a block, it can be abbreviated to `´x´: ´T´ => e`. + +A formal parameter may also be a wildcard represented by an underscore `_`. +In that case, a fresh name for the parameter is chosen arbitrarily. + +A named parameter of an anonymous function may be optionally preceded by an `implicit` modifier. +In that case the parameter is labeled [`implicit`](07-implicits.html#implicit-parameters-and-views); however the parameter section itself does not count as an [implicit parameter section](07-implicits.html#implicit-parameters). +Hence, arguments to anonymous functions always have to be given explicitly. + +### Translation +If the expected type of the anonymous function is of the shape `scala.Function´n´[´S_1´, ..., ´S_n´, ´R\,´]`, or can be [SAM-converted](#sam-conversion) to such a function type, the type `´T_i´` of a parameter `´x_i´` can be omitted, as far as `´S_i´` is defined in the expected type, and `´T_i´ = ´S_i´` is assumed. +Furthermore, the expected type when type checking ´e´ is ´R´. + +If there is no expected type for the function literal, all formal parameter types `´T_i´` must be specified explicitly, and the expected type of ´e´ is undefined. +The type of the anonymous function is `scala.Function´n´[´T_1´, ..., ´T_n´, ´R\,´]`, where ´R´ is the [packed type](#expression-typing) of ´e´. +´R´ must be equivalent to a type which does not refer to any of the formal parameters ´x_i´. + +The eventual run-time value of an anonymous function is determined by the expected type: + - a subclass of one of the builtin function types, `scala.Function´n´[´S_1, ..., S_n´, ´R\,´]` (with ´S_i´ and ´R´ fully defined), + - a [single-abstract-method (SAM) type](#sam-conversion); + - `PartialFunction[´T´, ´U´]` + - some other type. + +The standard anonymous function evaluates in the same way as the following instance creation expression: + +```scala +new scala.Function´n´[´T_1, ..., T_n´, ´T´] { + def apply(´x_1´: ´T_1, ..., x_n´: ´T_n´): ´T´ = ´e´ +} +``` + +The same evaluation holds for a SAM type, except that the instantiated type is given by the SAM type, and the implemented method is the single abstract method member of this type. + +The underlying platform may provide more efficient ways of constructing these instances, such as Java 8's `invokedynamic` bytecode and `LambdaMetaFactory` class. + +When a `PartialFunction` is required, an additional member `isDefinedAt` is synthesized, which simply returns `true`. +However, if the function literal has the shape `x => x match { $...$ }`, then `isDefinedAt` is derived from the pattern match in the following way: each case from the match expression evaluates to `true`, and if there is no default case, a default case is added that evaluates to `false`. +For more details on how that is implemented see ["Pattern Matching Anonymous Functions"](08-pattern-matching.html#pattern-matching-anonymous-functions). + +###### Example +Examples of anonymous functions: + +```scala +x => x // The identity function + +f => g => x => f(g(x)) // Curried function composition + +(x: Int, y: Int) => x + y // A summation function + +() => { count += 1; count } // The function which takes an + // empty parameter list ´()´, + // increments a non-local variable + // `count' and returns the new value. + +_ => 5 // The function that ignores its argument + // and always returns 5. +``` + +### Placeholder Syntax for Anonymous Functions + +```ebnf +SimpleExpr1 ::= ‘_’ +``` + +An expression (of syntactic category `Expr`) may contain embedded underscore symbols `_` at places where identifiers are legal. +Such an expression represents an anonymous function where subsequent occurrences of underscores denote successive parameters. + +Define an _underscore section_ to be an expression of the form `_:´T´` where ´T´ is a type, or else of the form `_`, provided the underscore does not appear as the expression part of a type ascription `_:´T´`. + +An expression ´e´ of syntactic category `Expr` _binds_ an underscore section ´u´, if the following two conditions hold: (1) ´e´ properly contains ´u´, and (2) there is no other expression of syntactic category `Expr` which is properly contained in ´e´ and which itself properly contains ´u´. + +If an expression ´e´ binds underscore sections ´u_1, ..., u_n´, in this order, it is equivalent to the anonymous function `(´u'_1´, ... ´u'_n´) => ´e'´` where each ´u_i'´ results from ´u_i´ by replacing the underscore with a fresh identifier and ´e'´ results from ´e´ by replacing each underscore section ´u_i´ by ´u_i'´. + +###### Example +The anonymous functions in the left column use placeholder syntax. +Each of these is equivalent to the anonymous function on its right. + +| | | +|---------------------------|----------------------------| +|`_ + 1` | `x => x + 1` | +|`_ * _` | `(x1, x2) => x1 * x2` | +|`(_: Int) * 2` | `(x: Int) => (x: Int) * 2` | +|`if (_) x else y` | `z => if (z) x else y` | +|`_.map(f)` | `x => x.map(f)` | +|`_.map(_ + 1)` | `x => x.map(y => y + 1)` | + +## Constant Expressions + +Constant expressions are expressions that the Scala compiler can evaluate to a constant. +The definition of "constant expression" depends on the platform, but they include at least the expressions of the following forms: + +- A literal of a value class, such as an integer +- A string literal +- A class constructed with [`Predef.classOf`](12-the-scala-standard-library.html#the-predef-object) +- An element of an enumeration from the underlying platform +- A literal array, of the form `Array´(c_1, ..., c_n)´`, where all of the ´c_i´'s are themselves constant expressions +- An identifier defined by a [constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions). + +## Statements + +```ebnf +BlockStat ::= Import + | {Annotation} [‘implicit’] [‘lazy’] Def + | {Annotation} {LocalModifier} TmplDef + | Expr1 + | +TemplateStat ::= Import + | {Annotation} {Modifier} Def + | {Annotation} {Modifier} Dcl + | Expr + | +``` + +Statements occur as parts of blocks and templates. +A _statement_ can be an import, a definition or an expression, or it can be empty. +Statements used in the template of a class definition can also be declarations. +An expression that is used as a statement can have an arbitrary value type. +An expression statement ´e´ is evaluated by evaluating ´e´ and discarding the result of the evaluation. + + + +Block statements may be definitions which bind local names in the block. +The only modifier allowed in all block-local definitions is `implicit`. +When prefixing a class or object definition, modifiers `abstract`, `final`, and `sealed` are also permitted. + +Evaluation of a statement sequence entails evaluation of the statements in the order they are written. + +## Implicit Conversions + +Implicit conversions can be applied to expressions whose type does not match their expected type, to qualifiers in selections, and to unapplied methods. +The available implicit conversions are given in the next two sub-sections. + +### Value Conversions + +The following seven implicit conversions can be applied to an expression ´e´ which has some value type ´T´ and which is type-checked with some expected type ´\mathit{pt}´. + +###### Static Overloading Resolution +If an expression denotes several possible members of a class, [overloading resolution](#overloading-resolution) is applied to pick a unique member. + +###### Type Instantiation +An expression ´e´ of polymorphic type + +```scala +[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´T´ +``` + +which does not appear as the function part of a type application is converted to a type instance of ´T´ by determining with [local type inference](#local-type-inference) instance types `´T_1, ..., T_n´` for the type variables `´a_1, ..., a_n´` and implicitly embedding ´e´ in the [type application](#type-applications) `´e´[´T_1, ..., T_n´]`. + +###### Numeric Widening +If ´e´ has a primitive number type which [weakly conforms](03-types.html#weak-conformance) to the expected type, it is widened to the expected type using one of the numeric conversion methods `toShort`, `toChar`, `toInt`, `toLong`, `toFloat`, `toDouble` defined [in the standard library](12-the-scala-standard-library.html#numeric-value-types). + +Since conversions from `Int` to `Float` and from `Long` to `Float` or `Double` may incur a loss of precision, those implicit conversions are deprecated. +The conversion is permitted for literals if the original value can be recovered, that is, if conversion back to the original type produces the original value. + +###### Numeric Literal Narrowing +If the expected type is `Byte`, `Short` or `Char`, and the expression ´e´ is an integer literal fitting in the range of that type, it is converted to the same literal in that type. + +###### Value Discarding +If ´e´ has some value type and the expected type is `Unit`, ´e´ is converted to the expected type by embedding it in the term `{ ´e´; () }`. + +###### SAM conversion +An expression `(p1, ..., pN) => body` of function type `(T1, ..., TN) => T` is sam-convertible to the expected type `S` if the following holds: + - the class `C` of `S` declares an abstract method `m` with signature `(p1: A1, ..., pN: AN): R`; + - besides `m`, `C` must not declare or inherit any other deferred value members; + - the method `m` must have a single argument list; + - there must be a type `U` that is a subtype of `S`, so that the expression `new U { final def m(p1: A1, ..., pN: AN): R = body }` is well-typed (conforming to the expected type `S`); + - for the purpose of scoping, `m` should be considered a static member (`U`'s members are not in scope in `body`); + - `(A1, ..., AN) => R` is a subtype of `(T1, ..., TN) => T` (satisfying this condition drives type inference of unknown type parameters in `S`); + +Note that a function literal that targets a SAM is not necessarily compiled to the above instance creation expression. +This is platform-dependent. + +It follows that: + - if class `C` defines a constructor, it must be accessible and must define exactly one, empty, argument list; + - class `C` cannot be `final` or `sealed` (for simplicity we ignore the possibility of SAM conversion in the same compilation unit as the sealed class); + - `m` cannot be polymorphic; + - it must be possible to derive a fully-defined type `U` from `S` by inferring any unknown type parameters of `C`. + +Finally, we impose some implementation restrictions (these may be lifted in future releases): + - `C` must not be nested or local (it must not capture its environment, as that results in a nonzero-argument constructor) + - `C`'s constructor must not have an implicit argument list (this simplifies type inference); + - `C` must not declare a self type (this simplifies type inference); + - `C` must not be `@specialized`. + +###### View Application +If none of the previous conversions applies, and ´e´'s type does not conform to the expected type ´\mathit{pt}´, it is attempted to convert ´e´ to the expected type with a [view](07-implicits.html#views). + +###### Selection on `Dynamic` +If none of the previous conversions applies, and ´e´ is a prefix of a selection ´e.x´, and ´e´'s type conforms to class `scala.Dynamic`, then the selection is rewritten according to the rules for [dynamic member selection](#dynamic-member-selection). + +### Method Conversions + +The following four implicit conversions can be applied to methods which are not applied to some argument list. + +###### Evaluation +A parameterless method ´m´ of type `=> ´T´` is always converted to type ´T´ by evaluating the expression to which ´m´ is bound. + +###### Implicit Application +If the method takes only implicit parameters, implicit arguments are passed following the rules [here](07-implicits.html#implicit-parameters). + +###### Eta Expansion +Otherwise, if the method is not a constructor, and the expected type ´\mathit{pt}´ is a function type, or, for methods of non-zero arity, a type [sam-convertible](#sam-conversion) to a function type, ´(\mathit{Ts}') \Rightarrow T'´, [eta-expansion](#eta-expansion-section) is performed on the expression ´e´. + +(The exception for zero-arity methods is to avoid surprises due to unexpected sam conversion.) + +###### Empty Application +Otherwise, if ´e´ has method type ´()T´, it is implicitly applied to the empty argument list, yielding ´e()´. + +### Overloading Resolution + +If an identifier or selection ´e´ references several members of a class, the context of the reference is used to identify a unique member. +The way this is done depends on whether or not ´e´ is used as a method. +Let ´\mathscr{A}´ be the set of members referenced by ´e´. + +Assume first that ´e´ appears as a function in an application, as in `´e´(´e_1´, ..., ´e_m´)`. + +One first determines the set of methods that are potentially [applicable](#method-applications) based on the _shape_ of the arguments. + +The *shape* of an argument expression ´e´, written ´\mathit{shape}(e)´, is a type that is defined as follows: + - For a function expression `(´p_1´: ´T_1, ..., p_n´: ´T_n´) => ´b´: (Any, ..., Any) => ´\mathit{shape}(b)´`, where `Any` occurs ´n´ times in the argument type. + - For a pattern-matching anonymous function definition `{ case ... }`: `PartialFunction[Any, Nothing]`. + - For a named argument `´n´ = ´e´`: ´\mathit{shape}(e)´. + - For all other expressions: `Nothing`. + +Let ´\mathscr{B}´ be the set of alternatives in ´\mathscr{A}´ that are [_applicable_](#method-applications) to expressions ´(e_1, ..., e_n)´ of types ´(\mathit{shape}(e_1), ..., \mathit{shape}(e_n))´. +If there is precisely one alternative in ´\mathscr{B}´, that alternative is chosen. + +Otherwise, let ´S_1, ..., S_m´ be the list of types obtained by typing each argument as follows. + +Normally, an argument is typed without an expected type, except when all alternatives explicitly specify the same parameter type for this argument (a missing parameter type, due to e.g. arity differences, is taken as `NoType`, thus resorting to no expected type), or when trying to propagate more type information to aid inference of higher-order function parameter types, as explained next. + +The intuition for higher-order function parameter type inference is that all arguments must be of a function-like type (`PartialFunction`, `FunctionN` or some equivalent [SAM type](#sam-conversion)), which in turn must define the same set of higher-order argument types, so that they can safely be used as the expected type of a given argument of the overloaded method, without unduly ruling out any alternatives. +The intent is not to steer overloading resolution, but to preserve enough type information to steer type inference of the arguments (a function literal or eta-expanded method) to this overloaded method. + +Note that the expected type drives eta-expansion (not performed unless a function-like type is expected), as well as inference of omitted parameter types of function literals. + +More precisely, an argument `´e_i´` is typed with an expected type that is derived from the `´i´`th argument type found in each alternative (call these `´T_{ij}´` for alternative `´j´` and argument position `´i´`) when all `´T_{ij}´` are function types `´(A_{1j},..., A_{nj}) => ?´` (or the equivalent `PartialFunction`, or SAM) of some arity `´n´`, and their argument types `´A_{kj}´` are identical across all overloads `´j´` for a given `´k´`. +Then, the expected type for `´e_i´` is derived as follows: + - we use `´PartialFunction[A_{1j},..., A_{nj}, ?]´` if for some overload `´j´`, `´T_{ij}´`'s type symbol is `PartialFunction`; + - else, if for some `´j´`, `´T_{ij}´` is `FunctionN`, the expected type is `´FunctionN[A_{1j},..., A_{nj}, ?]´`; + - else, if for all `´j´`, `´T_{ij}´` is a SAM type of the same class, defining argument types `´A_{1j},..., A_{nj}´` (and a potentially varying result type), the expected type encodes these argument types and the SAM class. + +For every member ´m´ in ´\mathscr{B}´ one determines whether it is applicable to expressions (´e_1, ..., e_m´) of types ´S_1, ..., S_m´. + +It is an error if none of the members in ´\mathscr{B}´ is applicable. +If there is one single applicable alternative, that alternative is chosen. +Otherwise, let ´\mathscr{CC}´ be the set of applicable alternatives which don't employ any default argument in the application to ´e_1, ..., e_m´. + +It is again an error if ´\mathscr{CC}´ is empty. +Otherwise, one chooses the _most specific_ alternative among the alternatives in ´\mathscr{CC}´, according to the following definition of being "as specific as", and "more specific than": + + + +- A parameterized method ´m´ of type `(´p_1:T_1, ..., p_n:T_n´)´U´` is _as specific as_ some other member ´m'´ of type ´S´ if ´m'´ is [applicable](#method-applications) to arguments `(´p_1, ..., p_n´)` of types ´T_1, ..., T_n´. + If the last parameter `´p_n´` has a vararg type `´T*´`, then `m` must be applicable to arbitrary numbers of `´T´` parameters (which implies that it must be a varargs method as well). +- A polymorphic method of type `[´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´T´` is as specific as some other member ´m'´ of type ´S´ if ´T´ is as specific as ´S´ under the assumption that for ´i = 1, ..., n´ each ´a_i´ is an abstract type name bounded from below by ´L_i´ and from above by ´U_i´. +- A member of any other type ´T´ is: + - always as specific as a parameterized method or a polymorphic method. + - as specific as a member ´m'´ of any other type ´S´ if ´T´ is [compatible](03-types.html#compatibility) with ´S´. + +The _relative weight_ of an alternative ´A´ over an alternative ´B´ is a +number from 0 to 2, defined as the sum of + +- 1 if ´A´ is as specific as ´B´, 0 otherwise, and +- 1 if ´A´ is defined in a class or object which is derived from the class or object defining ´B´, 0 otherwise. + +A class or object ´C´ is _derived_ from a class or object ´D´ if one of the following holds: + +- ´C´ is a subclass of ´D´, or +- ´C´ is a companion object of a class derived from ´D´, or +- ´D´ is a companion object of a class from which ´C´ is derived. + +An alternative ´A´ is _more specific_ than an alternative ´B´ if the relative weight of ´A´ over ´B´ is greater than the relative weight of ´B´ over ´A´. + +It is an error if there is no alternative in ´\mathscr{CC}´ which is more specific than all other alternatives in ´\mathscr{CC}´. + +Assume next that ´e´ appears as a method in a type application, as in `´e´[´\mathit{targs}\,´]`. +Then all alternatives in ´\mathscr{A}´ which take the same number of type parameters as there are type arguments in ´\mathit{targs}´ are chosen. +It is an error if no such alternative exists. +If there are several such alternatives, overloading resolution is applied again to the whole expression `´e´[´\mathit{targs}\,´]`. + +Assume finally that ´e´ does not appear as a method in either an application or a type application. +If an expected type is given, let ´\mathscr{B}´ be the set of those alternatives in ´\mathscr{A}´ which are [compatible](03-types.html#compatibility) to it. +Otherwise, let ´\mathscr{B}´ be the same as ´\mathscr{A}´. +In this last case we choose the most specific alternative among all alternatives in ´\mathscr{B}´. +It is an error if there is no alternative in ´\mathscr{B}´ which is more specific than all other alternatives in ´\mathscr{B}´. + +###### Example +Consider the following definitions: + +```scala +class A extends B {} +def f(x: B, y: B) = ... +def f(x: A, y: B) = ... +val a: A +val b: B +``` + +Then the application `f(b, b)` refers to the first definition of ´f´ whereas the application `f(a, a)` refers to the second. +Assume now we add a third overloaded definition + +```scala +def f(x: B, y: A) = ... +``` + +Then the application `f(a, a)` is rejected for being ambiguous, since no most specific applicable signature exists. + +### Local Type Inference + +Local type inference infers type arguments to be passed to expressions of polymorphic type. +Say ´e´ is of type [´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´T´ and no explicit type parameters are given. + +Local type inference converts this expression to a type application `´e´[´T_1, ..., T_n´]`. +The choice of the type arguments ´T_1, ..., T_n´ depends on the context in which the expression appears and on the expected type ´\mathit{pt}´. +There are three cases. + +###### Case 1: Selections +If the expression appears as the prefix of a selection with a name ´x´, then type inference is _deferred_ to the whole expression ´e.x´. +That is, if ´e.x´ has type ´S´, it is now treated as having type [´a_1´ >: ´L_1´ <: ´U_1, ..., a_n´ >: ´L_n´ <: ´U_n´]´S´, and local type inference is applied in turn to infer type arguments for ´a_1, ..., a_n´, using the context in which ´e.x´ appears. + +###### Case 2: Values +If the expression ´e´ appears as a value without being applied to value arguments, the type arguments are inferred by solving a constraint system which relates the expression's type ´T´ with the expected type ´\mathit{pt}´. +Without loss of generality we can assume that ´T´ is a value type; if it is a method type we apply [eta-expansion](#eta-expansion-section) to convert it to a function type. +Solving means finding a substitution ´\sigma´ of types ´T_i´ for the type parameters ´a_i´ such that + +- None of the inferred types ´T_i´ is a [singleton type](03-types.html#singleton-types) unless it is a singleton type corresponding to an object or a constant value definition or the corresponding bound ´U_i´ is a subtype of `scala.Singleton`. +- All type parameter bounds are respected, i.e. ´\sigma L_i <: \sigma a_i´ and ´\sigma a_i <: \sigma U_i´ for ´i = 1, ..., n´. +- The expression's type conforms to the expected type, i.e. ´\sigma T <: \sigma \mathit{pt}´. + +It is a compile time error if no such substitution exists. +If several substitutions exist, local-type inference will choose for each type variable ´a_i´ a minimal or maximal type ´T_i´ of the solution space. +A _maximal_ type ´T_i´ will be chosen if the type parameter ´a_i´ appears [contravariantly](04-basic-declarations-and-definitions.html#variance-annotations) in the type ´T´ of the expression. +A _minimal_ type ´T_i´ will be chosen in all other situations, i.e. if the variable appears covariantly, non-variantly or not at all in the type ´T´. +We call such a substitution an _optimal solution_ of the given constraint system for the type ´T´. + +###### Case 3: Methods +The last case applies if the expression ´e´ appears in an application ´e(d_1, ..., d_m)´. +In that case ´T´ is a method type ´(p_1:R_1, ..., p_m:R_m)T'´. +Without loss of generality we can assume that the result type ´T'´ is a value type; if it is a method type we apply [eta-expansion](#eta-expansion-section) to +convert it to a function type. +One computes first the types ´S_j´ of the argument expressions ´d_j´, using two alternative schemes. +Each argument expression ´d_j´ is typed first with the expected type ´R_j´, in which the type parameters ´a_1, ..., a_n´ are taken as type constants. +If this fails, the argument ´d_j´ is typed instead with an expected type ´R_j'´ which results from ´R_j´ by replacing every type parameter in ´a_1, ..., a_n´ with _undefined_. + +In a second step, type arguments are inferred by solving a constraint system which relates the method's type with the expected type ´\mathit{pt}´ and the argument types ´S_1, ..., S_m´. +Solving the constraint system means finding a substitution ´\sigma´ of types ´T_i´ for the type parameters ´a_i´ such that + +- None of the inferred types ´T_i´ is a [singleton type](03-types.html#singleton-types) unless it is a singleton type corresponding to an object or a constant value definition or the corresponding bound ´U_i´ is a subtype of `scala.Singleton`. +- All type parameter bounds are respected, i.e. ´\sigma L_i <: \sigma a_i´ and ´\sigma a_i <: \sigma U_i´ for ´i = 1, ..., n´. +- The method's result type ´T'´ conforms to the expected type, i.e. ´\sigma T' <: \sigma \mathit{pt}´. +- Each argument type [weakly conforms](03-types.html#weak-conformance) to the corresponding formal parameter type, i.e. ´\sigma S_j <:_w \sigma R_j´ for ´j = 1, ..., m´. + +It is a compile time error if no such substitution exists. +If several solutions exist, an optimal one for the type ´T'´ is chosen. + +All or parts of an expected type ´\mathit{pt}´ may be undefined. +The rules for [conformance](03-types.html#conformance) are extended to this case by adding the rule that for any type ´T´ the following two statements are always true: ´\mathit{undefined} <: T´ and ´T <: \mathit{undefined}´ + +It is possible that no minimal or maximal solution for a type variable exists, in which case a compile-time error results. +Because ´<:´ is a pre-order, it is also possible that a solution set has several optimal solutions for a type. +In that case, a Scala compiler is free to pick any one of them. + +###### Example +Consider the two methods: + +```scala +def cons[A](x: A, xs: List[A]): List[A] = x :: xs +def nil[B]: List[B] = Nil +``` + +and the definition + +```scala +val xs = cons(1, nil) +``` + +The application of `cons` is typed with an undefined expected type. +This application is completed by local type inference to `cons[Int](1, nil)`. +Here, one uses the following reasoning to infer the type argument `Int` for the type parameter `a`: + +First, the argument expressions are typed. The first argument `1` has type `Int` whereas the second argument `nil` is itself polymorphic. +One tries to type-check `nil` with an expected type `List[a]`. +This leads to the constraint system + +```scala +List[b?] <: List[a] +``` + +where we have labeled `b?` with a question mark to indicate that it is a variable in the constraint system. +Because class `List` is covariant, the optimal solution of this constraint is + +```scala +b = scala.Nothing +``` + +In a second step, one solves the following constraint system for the type parameter `a` of `cons`: + +```scala +Int <: a? +List[scala.Nothing] <: List[a?] +List[a?] <: ´\mathit{undefined}´ +``` + +The optimal solution of this constraint system is + +```scala +a = Int +``` + +so `Int` is the type inferred for `a`. + +###### Example + +Consider now the definition + +```scala +val ys = cons("abc", xs) +``` + +where `xs` is defined of type `List[Int]` as before. +In this case local type inference proceeds as follows. + +First, the argument expressions are typed. +The first argument `"abc"` has type `String`. +The second argument `xs` is first tried to be typed with expected type `List[a]`. +This fails,as `List[Int]` is not a subtype of `List[a]`. +Therefore, the second strategy is tried; `xs` is now typed with expected type `List[´\mathit{undefined}´]`. +This succeeds and yields the argument type `List[Int]`. + +In a second step, one solves the following constraint system for the type parameter `a` of `cons`: + +```scala +String <: a? +List[Int] <: List[a?] +List[a?] <: ´\mathit{undefined}´ +``` + +The optimal solution of this constraint system is + +```scala +a = scala.Any +``` + +so `scala.Any` is the type inferred for `a`. + +### Eta Expansion + +_Eta-expansion_ converts an expression of method type to an equivalent expression of function type. +It proceeds in two steps. + +First, one identifies the maximal sub-expressions of ´e´; let's say these are ´e_1, ..., e_m´. +For each of these, one creates a fresh name ´x_i´. +Let ´e'´ be the expression resulting from replacing every maximal subexpression ´e_i´ in ´e´ by the corresponding fresh name ´x_i´. +Second, one creates a fresh name ´y_i´ for every argument type ´T_i´ of the method (´i = 1 , ..., n´). +The result of eta-conversion is then: + +```scala +{ val ´x_1´ = ´e_1´; + ... + val ´x_m´ = ´e_m´; + (´y_1: T_1, ..., y_n: T_n´) => ´e'´(´y_1, ..., y_n´) +} +``` + +The behavior of [call-by-name parameters](#function-applications) is preserved under eta-expansion: the corresponding actual argument expression, a sub-expression of parameterless method type, is not evaluated in the expanded block. + +### Dynamic Member Selection + +The standard Scala library defines a marker trait `scala.Dynamic`. +Subclasses of this trait are able to intercept selections and applications on their instances by defining methods of the names `applyDynamic`, `applyDynamicNamed`, `selectDynamic`, and `updateDynamic`. + +The following rewrites are performed, assuming ´e´'s type conforms to `scala.Dynamic`, and the original expression does not type check under the normal rules, as specified fully in the relevant subsection of [implicit conversion](#dynamic-member-selection): + + * `e.m[Ti](xi)` becomes `e.applyDynamic[Ti]("m")(xi)` + * `e.m[Ti]` becomes `e.selectDynamic[Ti]("m")` + * `e.m = x` becomes `e.updateDynamic("m")(x)` + +If any arguments are named in the application (one of the `xi` is of the shape `arg = x`), their name is preserved as the first component of the pair passed to `applyDynamicNamed` (for missing names, `""` is used): + + * `e.m[Ti](argi = xi)` becomes `e.applyDynamicNamed[Ti]("m")(("argi", xi))` + +Finally: + + * `e.m(x) = y` becomes `e.selectDynamic("m").update(x, y)` + +None of these methods are actually defined in the `scala.Dynamic`, so that users are free to define them with or without type parameters, or implicit arguments. diff --git a/docs/_spec/07-implicits.md b/docs/_spec/07-implicits.md new file mode 100644 index 000000000000..2cd80f227cd4 --- /dev/null +++ b/docs/_spec/07-implicits.md @@ -0,0 +1,407 @@ +--- +title: Implicits +layout: default +chapter: 7 +--- + +# Implicits + +## The Implicit Modifier + +```ebnf +LocalModifier ::= ‘implicit’ +ParamClauses ::= {ParamClause} [nl] ‘(’ ‘implicit’ Params ‘)’ +``` + +Template members and parameters labeled with an `implicit` modifier can be passed to [implicit parameters](#implicit-parameters) and can be used as implicit conversions called [views](#views). +The `implicit` modifier is illegal for all type members, as well as for [top-level objects](09-top-level-definitions.html#packagings). + +###### Example Monoid + +The following code defines an abstract class of monoids and two concrete implementations, `StringMonoid` and `IntMonoid`. +The two implementations are marked implicit. + +```scala +abstract class Monoid[A] extends SemiGroup[A] { + def unit: A + def add(x: A, y: A): A +} +object Monoids { + implicit object stringMonoid extends Monoid[String] { + def add(x: String, y: String): String = x.concat(y) + def unit: String = "" + } + implicit object intMonoid extends Monoid[Int] { + def add(x: Int, y: Int): Int = x + y + def unit: Int = 0 + } +} +``` + +## Implicit Parameters + +An _implicit parameter list_ `(implicit ´p_1´,...,´p_n´)` of a method marks the parameters ´p_1, ..., p_n´ as implicit. +A method or constructor can have only one implicit parameter list, and it must be the last parameter list given. + +A method with implicit parameters can be applied to arguments just like a normal method. +In this case the `implicit` label has no effect. +However, if such a method misses arguments for its implicit parameters, such arguments will be automatically provided. + +The actual arguments that are eligible to be passed to an implicit parameter of type ´T´ fall into two categories. +First, eligible are all identifiers ´x´ that can be accessed at the point of the method call without a prefix and that denote an [implicit definition](#the-implicit-modifier) or an implicit parameter. +To be accessible without a prefix, an identifier must be a local name, a member of an enclosing template or a name introduced by an [import clause](04-basic-declarations-and-definitions.html#import-clauses). +If there are no eligible identifiers under this rule, then, second, eligible are also all `implicit` members of some object that belongs to the implicit scope of the implicit parameter's type, ´T´. + +The _implicit scope_ of a type ´T´ consists of all [companion modules](05-classes-and-objects.html#object-definitions) of classes that are associated with the implicit parameter's type. +Here, we say a class ´C´ is _associated_ with a type ´T´ if it is a [base class](05-classes-and-objects.html#class-linearization) of some part of ´T´. + +The _parts_ of a type ´T´ are: + +- if ´T´ is a compound type `´T_1´ with ... with ´T_n´`, the union of the parts of ´T_1, ..., T_n´, as well as ´T´ itself; +- if ´T´ is a parameterized type `´S´[´T_1, ..., T_n´]`, the union of the parts of ´S´ and ´T_1, ..., T_n´; +- if ´T´ is a singleton type `´p´.type`, the parts of the type of ´p´; +- if ´T´ is a type projection `´S´#´U´`, the parts of ´S´ as well as ´T´ itself; +- if ´T´ is a type alias, the parts of its expansion; +- if ´T´ is an abstract type, the parts of its upper bound; +- if ´T´ denotes an implicit conversion to a type with a method with argument types ´T_1, ..., T_n´ and result type ´U´, the union of the parts of ´T_1, ..., T_n´ and ´U´; +- in all other cases, just ´T´ itself. + +Note that packages are internally represented as classes with companion modules to hold the package members. +Thus, implicits defined in a package object are part of the implicit scope of a type prefixed by that package. + +If there are several eligible arguments which match the implicit parameter's type, a most specific one will be chosen using the rules of static [overloading resolution](06-expressions.html#overloading-resolution). +If the parameter has a default argument and no implicit argument can be found the default argument is used. + +###### Example +Assuming the classes from the [`Monoid` example](#example-monoid), here is a method which computes the sum of a list of elements using the monoid's `add` and `unit` operations. + +```scala +def sum[A](xs: List[A])(implicit m: Monoid[A]): A = + if (xs.isEmpty) m.unit + else m.add(xs.head, sum(xs.tail)) +``` + +The monoid in question is marked as an implicit parameter, and can therefore be inferred based on the type of the list. +Consider for instance the call `sum(List(1, 2, 3))` in a context where `stringMonoid` and `intMonoid` are visible. +We know that the formal type parameter `a` of `sum` needs to be instantiated to `Int`. +The only eligible object which matches the implicit formal parameter type `Monoid[Int]` is `intMonoid` so this object will be passed as implicit parameter. + +This discussion also shows that implicit parameters are inferred after any type arguments are [inferred](06-expressions.html#local-type-inference). + +Implicit methods can themselves have implicit parameters. +An example is the following method from module `scala.List`, which injects lists into the `scala.Ordered` class, provided the element type of the list is also convertible to this type. + +```scala +implicit def list2ordered[A](x: List[A]) + (implicit elem2ordered: A => Ordered[A]): Ordered[List[A]] = + ... +``` + +Assume in addition a method + +```scala +implicit def int2ordered(x: Int): Ordered[Int] +``` + +that injects integers into the `Ordered` class. +We can now define a `sort` method over ordered lists: + +```scala +def sort[A](xs: List[A])(implicit a2ordered: A => Ordered[A]) = ... +``` + +We can apply `sort` to a list of lists of integers `yss: List[List[Int]]` as follows: + +```scala +sort(yss) +``` + +The call above will be completed by passing two nested implicit arguments: + +```scala +sort(yss)((xs: List[Int]) => list2ordered[Int](xs)(int2ordered)) +``` + +The possibility of passing implicit arguments to implicit arguments raises the possibility of an infinite recursion. +For instance, one might try to define the following method, which injects _every_ type into the `Ordered` class: + +```scala +implicit def magic[A](x: A)(implicit a2ordered: A => Ordered[A]): Ordered[A] = + a2ordered(x) +``` + +Now, if one tried to apply `sort` to an argument `arg` of a type that did not have another injection into the `Ordered` class, one would obtain an infinite expansion: + +```scala +sort(arg)(x => magic(x)(x => magic(x)(x => ... ))) +``` + +Such infinite expansions should be detected and reported as errors, however to support the deliberate implicit construction of recursive values we allow implicit arguments to be marked as by-name. +At call sites recursive uses of implicit values are permitted if they occur in an implicit by-name argument. + +Consider the following example, + +```scala +trait Foo { + def next: Foo +} + +object Foo { + implicit def foo(implicit rec: Foo): Foo = + new Foo { def next = rec } +} + +val foo = implicitly[Foo] +assert(foo eq foo.next) +``` + +As with the `magic` case above this diverges due to the recursive implicit argument `rec` of method `foo`. +If we mark the implicit argument as by-name, + +```scala +trait Foo { + def next: Foo +} + +object Foo { + implicit def foo(implicit rec: => Foo): Foo = + new Foo { def next = rec } +} + +val foo = implicitly[Foo] +assert(foo eq foo.next) +``` + +the example compiles with the assertion successful. + +When compiled, recursive by-name implicit arguments of this sort are extracted out as val members of a local synthetic object at call sites as follows, + +```scala +val foo: Foo = scala.Predef.implicitly[Foo]( + { + object LazyDefns$1 { + val rec$1: Foo = Foo.foo(rec$1) + // ^^^^^ + // recursive knot tied here + } + LazyDefns$1.rec$1 + } +) +assert(foo eq foo.next) +``` + +Note that the recursive use of `rec$1` occurs within the by-name argument of `foo` and is consequently deferred. +The desugaring matches what a programmer would do to construct such a recursive value explicitly. + +To prevent infinite expansions, such as the `magic` example above, the compiler keeps track of a stack of “open implicit types” for which implicit arguments are currently being searched. +Whenever an implicit argument for type ´T´ is searched, ´T´ is added to the stack paired with the implicit definition which produces it, and whether it was required to satisfy a by-name implicit argument or not. +The type is removed from the stack once the search for the implicit argument either definitely fails or succeeds. +Everytime a type is about to be added to the stack, it is checked against existing entries which were produced by the same implicit definition and then, + ++ if it is equivalent to some type which is already on the stack and there is a by-name argument between that entry and the top of the stack. +In this case the search for that type succeeds immediately and the implicit argument is compiled as a recursive reference to the found argument. +That argument is added as an entry in the synthesized implicit dictionary if it has not already been added. ++ otherwise if the _core_ of the type _dominates_ the core of a type already on the stack, then the implicit expansion is said to _diverge_ and the search for that type fails immediately. ++ otherwise it is added to the stack paired with the implicit definition which produces it. +Implicit resolution continues with the implicit arguments of that definition (if any). + +Here, the _core type_ of ´T´ is ´T´ with aliases expanded, top-level type [annotations](11-annotations.html#user-defined-annotations) and [refinements](03-types.html#compound-types) removed, and occurrences of top-level existentially bound variables replaced by their upper bounds. + +A core type ´T´ _dominates_ a type ´U´ if ´T´ is [equivalent](03-types.html#equivalence) to ´U´, or if the top-level type constructors of ´T´ and ´U´ have a common element and ´T´ is more complex than ´U´ and the _covering sets_ of ´T´ and ´U´ are equal. + +The set of _top-level type constructors_ ´\mathit{ttcs}(T)´ of a type ´T´ depends on the form of the type: + +- For a type designator, ´\mathit{ttcs}(p.c) ~=~ \{c\}´; +- For a parameterized type, ´\mathit{ttcs}(p.c[\mathit{targs}]) ~=~ \{c\}´; +- For a singleton type, ´\mathit{ttcs}(p.type) ~=~ \mathit{ttcs}(T)´, provided ´p´ has type ´T´; +- For a compound type, `´\mathit{ttcs}(T_1´ with ... with ´T_n)´` ´~=~ \mathit{ttcs}(T_1) \cup ... \cup \mathit{ttcs}(T_n)´. + +The _complexity_ ´\operatorname{complexity}(T)´ of a core type is an integer which also depends on the form of the type: + +- For a type designator, ´\operatorname{complexity}(p.c) ~=~ 1 + \operatorname{complexity}(p)´ +- For a parameterized type, ´\operatorname{complexity}(p.c[\mathit{targs}]) ~=~ 1 + \Sigma \operatorname{complexity}(\mathit{targs})´ +- For a singleton type denoting a package ´p´, ´\operatorname{complexity}(p.type) ~=~ 0´ +- For any other singleton type, ´\operatorname{complexity}(p.type) ~=~ 1 + \operatorname{complexity}(T)´, provided ´p´ has type ´T´; +- For a compound type, `´\operatorname{complexity}(T_1´ with ... with ´T_n)´` ´= \Sigma\operatorname{complexity}(T_i)´ + +The _covering set_ ´\mathit{cs}(T)´ of a type ´T´ is the set of type designators mentioned in a type. +For example, given the following, + +```scala +type A = List[(Int, Int)] +type B = List[(Int, (Int, Int))] +type C = List[(Int, String)] +``` + +the corresponding covering sets are: + +- ´\mathit{cs}(A)´: List, Tuple2, Int +- ´\mathit{cs}(B)´: List, Tuple2, Int +- ´\mathit{cs}(C)´: List, Tuple2, Int, String + +###### Example +When typing `sort(xs)` for some list `xs` of type `List[List[List[Int]]]`, the sequence of types for which implicit arguments are searched is + +```scala +List[List[Int]] => Ordered[List[List[Int]]], +List[Int] => Ordered[List[Int]], +Int => Ordered[Int] +``` + +All types share the common type constructor `scala.Function1`, but the complexity of each new type is lower than the complexity of the previous types. +Hence, the code typechecks. + +###### Example +Let `ys` be a list of some type which cannot be converted to `Ordered`. +For instance: + +```scala +val ys = List(new IllegalArgumentException, new ClassCastException, new Error) +``` + +Assume that the definition of `magic` above is in scope. +Then the sequence of types for which implicit arguments are searched is + +```scala +Throwable => Ordered[Throwable], +Throwable => Ordered[Throwable], +... +``` + +Since the second type in the sequence is equal to the first, the compiler will issue an error signalling a divergent implicit expansion. + + +## Views + +Implicit parameters and methods can also define implicit conversions called views. +A _view_ from type ´S´ to type ´T´ is defined by an implicit value which has function type `´S´ => ´T´` or `(=> ´S´) => ´T´` or by a method convertible to a value of that type. + +Views are applied in three situations: + +1. If an expression ´e´ is of type ´T´, and ´T´ does not conform to the expression's expected type ´\mathit{pt}´. +In this case an implicit ´v´ is searched which is applicable to ´e´ and whose result type conforms to ´\mathit{pt}´. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of `´T´ => ´\mathit{pt}´`. +If such a view is found, the expression ´e´ is converted to `´v´(´e´)`. +1. In a selection ´e.m´ with ´e´ of type ´T´, if the selector ´m´ does not denote an accessible member of ´T´. +In this case, a view ´v´ is searched which is applicable to ´e´ and whose result contains a member named ´m´. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of ´T´. +If such a view is found, the selection ´e.m´ is converted to `´v´(´e´).´m´`. +1. In a selection ´e.m(\mathit{args})´ with ´e´ of type ´T´, if the selector ´m´ denotes some member(s) of ´T´, but none of these members is applicable to the arguments ´\mathit{args}´. +In this case a view ´v´ is searched which is applicable to ´e´ and whose result contains a method ´m´ which is applicable to ´\mathit{args}´. +The search proceeds as in the case of implicit parameters, where the implicit scope is the one of ´T´. If such a view is found, the selection ´e.m´ is converted to `´v´(´e´).´m(\mathit{args})´`. + +The implicit view, if it is found, can accept its argument ´e´ as a call-by-value or as a call-by-name parameter. +However, call-by-value implicits take precedence over call-by-name implicits. + +As for implicit parameters, overloading resolution is applied if there are several possible candidates (of either the call-by-value or the call-by-name category). + +###### Example Ordered + +Class `scala.Ordered[A]` contains a method + +```scala + def <= [B >: A](that: B)(implicit b2ordered: B => Ordered[B]): Boolean +``` + +Assume two lists `xs` and `ys` of type `List[Int]` and assume that the `list2ordered` and `int2ordered` methods defined [here](#implicit-parameters) are in scope. +Then the operation + +```scala + xs <= ys +``` + +is legal, and is expanded to: + +```scala + list2ordered(xs)(int2ordered).<= + (ys) + (xs => list2ordered(xs)(int2ordered)) +``` + +The first application of `list2ordered` converts the list `xs` to an instance of class `Ordered`, whereas the second occurrence is part of an implicit parameter passed to the `<=` method. + +## Context Bounds and View Bounds + +```ebnf + TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type] + {‘<%’ Type} {‘:’ Type} +``` + +A type parameter ´A´ of a method or non-trait class may have one or more view bounds `´A´ <% ´T´`. +In this case the type parameter may be instantiated to any type ´S´ which is convertible by application of a view to the bound ´T´. + +A type parameter ´A´ of a method or non-trait class may also have one or more context bounds `´A´ : ´T´`. +In this case the type parameter may be instantiated to any type ´S´ for which _evidence_ exists at the instantiation point that ´S´ satisfies the bound ´T´. +Such evidence consists of an implicit value with type ´T[S]´. + +A method or class containing type parameters with view or context bounds is treated as being equivalent to a method with implicit parameters. +Consider first the case of a single parameter with view and/or context bounds such as: + +```scala +def ´f´[´A´ <% ´T_1´ ... <% ´T_m´ : ´U_1´ : ´U_n´](´\mathit{ps}´): ´R´ = ... +``` + +Then the method definition above is expanded to + +```scala +def ´f´[´A´](´\mathit{ps}´)(implicit ´v_1´: ´A´ => ´T_1´, ..., ´v_m´: ´A´ => ´T_m´, + ´w_1´: ´U_1´[´A´], ..., ´w_n´: ´U_n´[´A´]): ´R´ = ... +``` + +where the ´v_i´ and ´w_j´ are fresh names for the newly introduced implicit parameters. +These parameters are called _evidence parameters_. + +If a class or method has several view- or context-bounded type parameters, each such type parameter is expanded into evidence parameters in the order they appear and all the resulting evidence parameters are concatenated in one implicit parameter section. +Since traits do not take constructor parameters, this translation does not work for them. +Consequently, type-parameters in traits may not be view- or context-bounded. + +Evidence parameters are prepended to the existing implicit parameter section, if one exists. + +For example: + +```scala +def foo[A: M](implicit b: B): C +// expands to: +// def foo[A](implicit evidence´1: M[A], b: B): C +``` + +###### Example +The `<=` method from the [`Ordered` example](#example-ordered) can be declared more concisely as follows: + +```scala +def <= [B >: A <% Ordered[B]](that: B): Boolean +``` + +## Manifests + +Manifests are type descriptors that can be automatically generated by the Scala compiler as arguments to implicit parameters. +The Scala standard library contains a hierarchy of four manifest classes, with `OptManifest` at the top. +Their signatures follow the outline below. + +```scala +trait OptManifest[+T] +object NoManifest extends OptManifest[Nothing] +trait ClassManifest[T] extends OptManifest[T] +trait Manifest[T] extends ClassManifest[T] +``` + +If an implicit parameter of a method or constructor is of a subtype ´M[T]´ of class `OptManifest[T]`, _a manifest is determined for ´M[S]´_, according to the following rules. + +First if there is already an implicit argument that matches ´M[T]´, this argument is selected. + +Otherwise, let ´\mathit{Mobj}´ be the companion object `scala.reflect.Manifest` if ´M´ is trait `Manifest`, or be the companion object `scala.reflect.ClassManifest` otherwise. +Let ´M'´ be the trait `Manifest` if ´M´ is trait `Manifest`, or be the trait `OptManifest` otherwise. +Then the following rules apply. + +1. If ´T´ is a value class or one of the classes `Any`, `AnyVal`, `Object`, `Null`, or `Nothing`, a manifest for it is generated by selecting the corresponding manifest value `Manifest.´T´`, which exists in the `Manifest` module. +1. If ´T´ is an instance of `Array[´S´]`, a manifest is generated with the invocation `´\mathit{Mobj}´.arrayType[S](m)`, where ´m´ is the manifest determined for ´M[S]´. +1. If ´T´ is some other class type ´S´#´C[U_1, ..., U_n]´ where the prefix type ´S´ cannot be statically determined from the class ´C´, a manifest is generated with the invocation `´\mathit{Mobj}´.classType[T](´m_0´, classOf[T], ´ms´)` where ´m_0´ is the manifest determined for ´M'[S]´ and ´ms´ are the manifests determined for ´M'[U_1], ..., M'[U_n]´. +1. If ´T´ is some other class type with type arguments ´U_1, ..., U_n´, a manifest is generated with the invocation `´\mathit{Mobj}´.classType[T](classOf[T], ´ms´)` where ´ms´ are the manifests determined for ´M'[U_1], ..., M'[U_n]´. +1. If ´T´ is a singleton type `´p´.type`, a manifest is generated with the invocation `´\mathit{Mobj}´.singleType[T](´p´)` +1. If ´T´ is a refined type ´T' \{ R \}´, a manifest is generated for ´T'´. +(That is, refinements are never reflected in manifests). +1. If ´T´ is an intersection type `´T_1´ with ... with ´T_n´` where ´n > 1´, the result depends on whether a full manifest is to be determined or not. +If ´M´ is trait `Manifest`, then a manifest is generated with the invocation `Manifest.intersectionType[T](´ms´)` where ´ms´ are the manifests determined for ´M[T_1], ..., M[T_n]´. +Otherwise, if ´M´ is trait `ClassManifest`, then a manifest is generated for the [intersection dominator](03-types.html#type-erasure) of the types ´T_1, ..., T_n´. +1. If ´T´ is some other type, then if ´M´ is trait `OptManifest`, a manifest is generated from the designator `scala.reflect.NoManifest`. +If ´M´ is a type different from `OptManifest`, a static error results. diff --git a/docs/_spec/08-pattern-matching.md b/docs/_spec/08-pattern-matching.md new file mode 100644 index 000000000000..1d50b814ee24 --- /dev/null +++ b/docs/_spec/08-pattern-matching.md @@ -0,0 +1,641 @@ +--- +title: Pattern Matching +layout: default +chapter: 8 +--- + +# Pattern Matching + +## Patterns + +```ebnf + Pattern ::= Pattern1 { ‘|’ Pattern1 } + Pattern1 ::= boundvarid ‘:’ TypePat + | ‘_’ ‘:’ TypePat + | Pattern2 + Pattern2 ::= id [‘@’ Pattern3] + | Pattern3 + Pattern3 ::= SimplePattern + | SimplePattern {id [nl] SimplePattern} + SimplePattern ::= ‘_’ + | varid + | Literal + | StableId + | StableId ‘(’ [Patterns] ‘)’ + | StableId ‘(’ [Patterns ‘,’] [id ‘@’] ‘_’ ‘*’ ‘)’ + | ‘(’ [Patterns] ‘)’ + | XmlPattern + Patterns ::= Pattern {‘,’ Patterns} +``` + +A pattern is built from constants, constructors, variables and type tests. +Pattern matching tests whether a given value (or sequence of values) has the shape defined by a pattern, and, if it does, binds the variables in the pattern to the corresponding components of the value (or sequence of values). +The same variable name may not be bound more than once in a pattern. + +###### Example +Some examples of patterns are: + 1. The pattern `ex: IOException` matches all instances of class `IOException`, binding variable `ex` to the instance. + 1. The pattern `Some(x)` matches values of the form `Some(´v´)`, binding `x` to the argument value ´v´ of the `Some` constructor. + 1. The pattern `(x, _)` matches pairs of values, binding `x` to the first component of the pair. The second component is matched with a wildcard pattern. + 1. The pattern `x :: y :: xs` matches lists of length ´\geq 2´, binding `x` to the list's first element, `y` to the list's second element, and `xs` to the remainder. + 1. The pattern `1 | 2 | 3` matches the integers between 1 and 3. + +Pattern matching is always done in a context which supplies an expected type of the pattern. +We distinguish the following kinds of patterns. + +### Variable Patterns + +```ebnf + SimplePattern ::= ‘_’ + | varid +``` + +A _variable pattern_ ´x´ is a simple identifier which starts with a lower case letter. +It matches any value, and binds the variable name to that value. +The type of ´x´ is the expected type of the pattern as given from outside. +A special case is the wild-card pattern `_` which is treated as if it was a fresh variable on each occurrence. + +### Typed Patterns + +```ebnf + Pattern1 ::= varid ‘:’ TypePat + | ‘_’ ‘:’ TypePat +``` + +A _typed pattern_ ´x: T´ consists of a pattern variable ´x´ and a type pattern ´T´. +The type of ´x´ is the type pattern ´T´, where each type variable and wildcard is replaced by a fresh, unknown type. +This pattern matches any value matched by the [type pattern](#type-patterns) ´T´; it binds the variable name to that value. + +### Pattern Binders + +```ebnf + Pattern2 ::= varid ‘@’ Pattern3 +``` + +A _pattern binder_ `´x´@´p´` consists of a pattern variable ´x´ and a pattern ´p´. +The type of the variable ´x´ is the static type ´T´ implied by the pattern ´p´. +This pattern matches any value ´v´ matched by the pattern ´p´, and it binds the variable name to that value. + +A pattern ´p´ _implies_ a type ´T´ if the pattern matches only values of the type ´T´. + +### Literal Patterns + +```ebnf + SimplePattern ::= Literal +``` + +A _literal pattern_ ´L´ matches any value that is equal (in terms of `==`) to the literal ´L´. +The type of ´L´ must conform to the expected type of the pattern. + +### Interpolated string patterns + +```ebnf + Literal ::= interpolatedString +``` + +The expansion of interpolated string literals in patterns is the same as in expressions. +If it occurs in a pattern, a interpolated string literal of either of the forms +``` +id"text0{ pat1 }text1 ... { patn }textn" +id"""text0{ pat1 }text1 ... { patn }textn""" +``` +is equivalent to: +``` +StringContext("""text0""", ..., """textn""").id(pat1, ..., patn) +``` +You could define your own `StringContext` to shadow the default one that's in the `scala` package. + +This expansion is well-typed if the member `id` evaluates to an extractor object. +If the extractor object has `apply` as well as `unapply` or `unapplySeq` methods, processed strings can be used as either expressions or patterns. + +Taking XML as an example +```scala +implicit class XMLinterpolation(s: StringContext) = { + object xml { + def apply(exprs: Any*) = + // parse ‘s’ and build an XML tree with ‘exprs’ + //in the holes + def unapplySeq(xml: Node): Option[Seq[Node]] = + // match `s’ against `xml’ tree and produce + //subtrees in holes + } +} +``` +Then, XML pattern matching could be expressed like this: +```scala +case xml""" + + $linktext + + """ => ... +``` +where linktext is a variable bound by the pattern. + +### Stable Identifier Patterns + +```ebnf + SimplePattern ::= StableId +``` + +A _stable identifier pattern_ is a [stable identifier](03-types.html#paths) ´r´. +The type of ´r´ must conform to the expected type of the pattern. +The pattern matches any value ´v´ such that `´r´ == ´v´` (see [here](12-the-scala-standard-library.html#root-classes)). + +To resolve the syntactic overlap with a variable pattern, a stable identifier pattern may not be a simple name starting with a lower-case letter. +However, it is possible to enclose such a variable name in backquotes; then it is treated as a stable identifier pattern. + +###### Example +Consider the following class definition: + +```scala +class C { c => + val x = 42 + val y = 27 + val Z = 8 + def f(x: Int) = x match { + case c.x => 1 // matches 42 + case `y` => 2 // matches 27 + case Z => 3 // matches 8 + case x => 4 // matches any value + } +} +``` + +Here, the first three patterns are stable identifier patterns, while the last one is a variable pattern. + +### Constructor Patterns + +```ebnf +SimplePattern ::= StableId ‘(’ [Patterns] ‘)’ +``` + +A _constructor pattern_ is of the form ´c(p_1, ..., p_n)´ where ´n \geq 0´. It consists of a stable identifier ´c´, followed by element patterns ´p_1, ..., p_n´. +The constructor ´c´ is a simple or qualified name which denotes a [case class](05-classes-and-objects.html#case-classes). +If the case class is monomorphic, then it must conform to the expected type of the pattern, and the formal parameter types of ´x´'s [primary constructor](05-classes-and-objects.html#class-definitions) are taken as the expected types of the element patterns ´p_1, ..., p_n´. +If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of ´c´ conforms to the expected type of the pattern. +The instantiated formal parameter types of ´c´'s primary constructor are then taken as the expected types of the component patterns ´p_1, ..., p_n´. +The pattern matches all objects created from constructor invocations ´c(v_1, ..., v_n)´ where each element pattern ´p_i´ matches the corresponding value ´v_i´. + +A special case arises when ´c´'s formal parameter types end in a repeated parameter. +This is further discussed [here](#pattern-sequences). + +### Tuple Patterns + +```ebnf + SimplePattern ::= ‘(’ [Patterns] ‘)’ +``` + +A _tuple pattern_ `(´p_1´, ..., ´p_n´)` where ´n \geq 2´ is equivalent to `´p_1´ *: ... *: ´p_n´ *: scala.EmptyTuple`. + +Notes: +- `()` is equivalent to `_: scala.Unit`, and not `scala.EmptyTuple`. +- `(´pat´)` is a pattern matching ´pat´, and not `´pat´ *: scala.EmptyTuple`. +- As such patterns with `*:` are slow, a more efficient translation is free to be implemented. For example, `(´p_1´, ´p_2´)` could be translated to `scala.Tuple2(´p_1´, ´p_2´)`, which is indeed equivalent to `´p_1´ *: ´p_2´ *: scala.EmptyTuple`. + +### Extractor Patterns + +```ebnf + SimplePattern ::= StableId ‘(’ [Patterns] ‘)’ +``` + +An _extractor pattern_ ´x(p_1, ..., p_n)´ where ´n \geq 0´ is of the same syntactic form as a constructor pattern. +However, instead of a case class, the stable identifier ´x´ denotes an object which has a member method named `unapply` or `unapplySeq` that matches the pattern. + +An extractor pattern cannot match the value `null`. The implementation ensures that the `unapply`/`unapplySeq` method is not applied to `null`. + +A type is said to be an _extractor type_ for some type `T` if it has a method `get` with return type `T`, and a method `isEmpty` with a return type that conforms to `Boolean`. +`Option[T]` is an extractor type for type `T`. + +An `unapply` method in an object ´x´ _matches_ the pattern ´x(p_1, ..., p_n)´ if it has a single parameter (and, optionally, an implicit parameter list) and one of the following applies: + +* ´n=0´ and `unapply`'s result type conforms to `Boolean`. +In this case the extractor pattern matches all values ´v´ for which `´x´.unapply(´v´)` yields `true`. +* ´n=1´ and `unapply`'s result type is an extractor type for some type ´T´. +In this case, the (only) argument pattern ´p_1´ is typed in turn with expected type ´T´. +The extractor pattern matches then all values ´v´ for which `´x´.unapply(´v´)` yields a value ´u´ for which `´u´.isEmpty` yields `false`, `´u´.get` yields a value ´v_1´, and ´p_1´ matches ´v_1´. +* ´n>1´ and `unapply`'s result type is an extractor type for some type ´T´ with members ´\_1, ..., \_n´ returning types ´T_1, ..., T_n´. +In this case, the argument patterns ´p_1, ..., p_n´ are typed in turn with expected types ´T_1 , ..., T_n´. +The extractor pattern matches then all values ´v´ for which `´x´.unapply(´v´)` yields a value ´u´ for which `´u´.isEmpty` yields `false`, `´u´.get` yields some value ´t´, and each pattern ´p_i´ matches the corresponding value ´t._1´ from ´t._1, ..., t._n´. + +An `unapplySeq` method in an object ´x´ matches the pattern ´x(q_1, ..., q_m, p_1, ..., p_n)´ if it takes exactly one argument and its result type is of the form `Option[(´T_1, ..., T_m´, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). +This case is further discussed [below](#pattern-sequences). + +###### Example 1 + +If we define an extractor object `Pair`: + +```scala +object Pair { + def apply[A, B](x: A, y: B) = Tuple2(x, y) + def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) +} +``` + +This means that the name `Pair` can be used in place of `Tuple2` for tuple formation as well as for deconstruction of tuples in patterns. +Hence, the following is possible: + +```scala +val x = (1, 2) +val y = x match { + case Pair(i, s) => Pair(s + i, i * i) +} +``` + +###### Example 2 + +If we define a class `NameBased` + +```scala +class NameBased[A, B](a: A, b: B) { + def isEmpty = false + def get = this + def _1 = a + def _2 = b +} +``` + +Then `NameBased` is an extractor type for `NameBased` itself, since it has a member `isEmpty` returning a value of type Boolean, and it has a member `get` returning a value of type `NameBased`. + +Since it also has members `_1` and `_2`, it can be used in an extractor pattern with n = 2 as follows: + +```scala +object Extractor { + def unapply(x: Any) = new NameBased(1, "two") +} + +"anything" match { + case Extractor(a, b) => println(s"\$a, \$b") //prints "1, two" +} +``` + + +### Pattern Sequences + +```ebnf +SimplePattern ::= StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’ +``` + +A _pattern sequence_ ´p_1, ..., p_n´ appears in two contexts. +First, in a constructor pattern ´c(q_1, ..., q_m, p_1, ..., p_n)´, where ´c´ is a case class which has ´m+1´ primary constructor parameters, ending in a [repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters) of type `S*`. +Second, in an extractor pattern ´x(q_1, ..., q_m, p_1, ..., p_n)´ if the extractor object ´x´ does not have an `unapply` method, but it does define an `unapplySeq` method with a result type that is an extractor type for type `(T_1, ... , T_m, Seq[S])` (if `m = 0`, an extractor type for the type `Seq[S]` is also accepted). The expected type for the patterns ´p_i´ is ´S´. + +The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`. +Each element pattern ´p_i´ is type-checked with ´S´ as expected type, unless it is a sequence wildcard. +If a final sequence wildcard is present, the pattern matches all values ´v´ that are sequences which start with elements matching patterns ´p_1, ..., p_{n-1}´. +If no final sequence wildcard is given, the pattern matches all values ´v´ that are sequences of length ´n´ which consist of elements matching patterns ´p_1, ..., p_n´. + +### Infix Operation Patterns + +```ebnf + Pattern3 ::= SimplePattern {id [nl] SimplePattern} +``` + +An _infix operation pattern_ ´p;\mathit{op};q´ is a shorthand for the +constructor or extractor pattern ´\mathit{op}(p, q)´. +The precedence and associativity of operators in patterns is the same as in [expressions](06-expressions.html#prefix,-infix,-and-postfix-operations). + +An infix operation pattern ´p;\mathit{op};(q_1, ..., q_n)´ is a shorthand for the constructor or extractor pattern ´\mathit{op}(p, q_1, ..., q_n)´. + +### Pattern Alternatives + +```ebnf + Pattern ::= Pattern1 { ‘|’ Pattern1 } +``` + +A _pattern alternative_ `´p_1´ | ... | ´p_n´` consists of a number of alternative patterns ´p_i´. +All alternative patterns are type checked with the expected type of the pattern. +They may not bind variables other than wildcards. +The alternative pattern matches a value ´v´ if at least one its alternatives matches ´v´. + +### XML Patterns + +XML patterns are treated [here](10-xml-expressions-and-patterns.html#xml-patterns). + +### Regular Expression Patterns + +Regular expression patterns have been discontinued in Scala from version 2.0. + +Later version of Scala provide a much simplified version of regular expression patterns that cover most scenarios of non-text sequence processing. +A _sequence pattern_ is a pattern that stands in a position where either (1) a pattern of a type `T` which is conforming to `Seq[A]` for some `A` is expected, or (2) a case class constructor that has an iterated formal parameter `A*`. +A wildcard star pattern `_*` in the rightmost position stands for arbitrary long sequences. +It can be bound to variables using `@`, as usual, in which case the variable will have the type `Seq[A]`. + +### Irrefutable Patterns + +A pattern ´p´ is _irrefutable_ for a type ´T´, if one of the following applies: + +1. ´p´ is a variable pattern, +1. ´p´ is a typed pattern ´x: T'´, and ´T <: T'´, +1. ´p´ is a constructor pattern ´c(p_1, ..., p_n)´, the type ´T´ is an instance of class ´c´, the [primary constructor](05-classes-and-objects.html#class-definitions) of type ´T´ has argument types ´T_1, ..., T_n´, and each ´p_i´ is irrefutable for ´T_i´. +1. ´p´ is an extractor pattern for which the extractor type is `Some[´T´]` for some type ´T´ +1. ´p´ is an extractor pattern for which the extractor types `isEmpty` method is the singleton type `false` +1. ´p´ is an extractor pattern for which the return type is the singleton type `true` + +## Type Patterns + +```ebnf + TypePat ::= Type +``` + +Type patterns consist of types, type variables, and wildcards. +A type pattern ´T´ is of one of the following forms: + +* A reference to a class ´C´, ´p.C´, or `´T´#´C´`. +This type pattern matches any non-null instance of the given class. +Note that the prefix of the class, if it exists, is relevant for determining class instances. +For instance, the pattern ´p.C´ matches only instances of classes ´C´ which were created with the path ´p´ as prefix. +This also applies to prefixes which are not given syntactically. +For example, if ´C´ refers to a class defined in the nearest enclosing class and is thus equivalent to ´this.C´, it is considered to have a prefix. + +The bottom types `scala.Nothing` and `scala.Null` cannot be used as type patterns, because they would match nothing in any case. + +* A singleton type `´p´.type`. This type pattern matches only the value denoted by the path ´p´ (the `eq` method is used to compare the matched value to ´p´). + +* A literal type `´lit´`. This type pattern matches only the value denoted by the literal ´lit´ (the `==` method is used to compare the matched value to ´lit´). + +* A compound type pattern `´T_1´ with ... with ´T_n´` where each ´T_i´ is a type pattern. +This type pattern matches all values that are matched by each of the type patterns ´T_i´. + +* A parameterized type pattern ´T[a_1, ..., a_n]´, where the ´a_i´ are type variable patterns or wildcards `_`. +This type pattern matches all values which match ´T´ for some arbitrary instantiation of the type variables and wildcards. +The bounds or alias type of these type variable are determined as described [here](#type-parameter-inference-in-patterns). + +* A parameterized type pattern `scala.Array´[T_1]´`, where ´T_1´ is a type pattern. +This type pattern matches any non-null instance of type `scala.Array´[U_1]´`, where ´U_1´ is a type matched by ´T_1´. + +Types which are not of one of the forms described above are also accepted as type patterns. +However, such type patterns will be translated to their [erasure](03-types.html#type-erasure). +The Scala compiler will issue an "unchecked" warning for these patterns to flag the possible loss of type-safety. + +A _type variable pattern_ is a simple identifier which starts with a lower case letter. + +## Type Parameter Inference in Patterns + +Type parameter inference is the process of finding bounds for the bound type variables in a typed pattern or constructor pattern. +Inference takes into account the expected type of the pattern. + +### Type parameter inference for typed patterns + +Assume a typed pattern ´p: T'´. Let ´T´ result from ´T'´ where all wildcards in ´T'´ are renamed to fresh variable names. +Let ´a_1, ..., a_n´ be the type variables in ´T´. +These type variables are considered bound in the pattern. +Let the expected type of the pattern be ´\mathit{pt}´. + +Type parameter inference constructs first a set of subtype constraints over the type variables ´a_i´. +The initial constraints set ´\mathcal{C}\_0´ reflects just the bounds of these type variables. +That is, assuming ´T´ has bound type variables ´a_1, ..., a_n´ which correspond to class type parameters ´a_1', ..., a_n'´ with lower bounds ´L_1, ..., L_n´ and upper bounds ´U_1, ..., U_n´, ´\mathcal{C}_0´ contains the constraints + +$$ +\begin{cases} +a_i &<: \sigma U_i & \quad (i = 1, ..., n) \\\\ +\sigma L_i &<: a_i & \quad (i = 1, ..., n) +\end{cases} +$$ + +where ´\sigma´ is the substitution ´[a_1' := a_1, ..., a_n' :=a_n]´. + +The set ´\mathcal{C}_0´ is then augmented by further subtype constraints. +There are two cases. + +###### Case 1 +If there exists a substitution ´\sigma´ over the type variables ´a_i, ..., a_n´ such that ´\sigma T´ conforms to ´\mathit{pt}´, one determines the weakest subtype constraints ´\mathcal{C}\_1´ over the type variables ´a_1, ..., a_n´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}_1´ implies that ´T´ conforms to ´\mathit{pt}´. + +###### Case 2 +Otherwise, if ´T´ can not be made to conform to ´\mathit{pt}´ by instantiating its type variables, one determines all type variables in ´\mathit{pt}´ which are defined as type parameters of a method enclosing the pattern. +Let the set of such type parameters be ´b_1 , ..., b_m´. +Let ´\mathcal{C}\_0'´ be the subtype constraints reflecting the bounds of the type variables ´b_i´. +If ´T´ denotes an instance type of a final class, let ´\mathcal{C}\_2´ be the weakest set of subtype constraints over the type variables ´a_1, ..., a_n´ and ´b_1, ..., b_m´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2´ implies that ´T´ conforms to ´\mathit{pt}´. +If ´T´ does not denote an instance type of a final class, let ´\mathcal{C}\_2´ be the weakest set of subtype constraints over the type variables ´a_1, ..., a_n´ and ´b_1, ..., b_m´ such that ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2´ implies that it is possible to construct a type ´T'´ which conforms to both ´T´ and ´\mathit{pt}´. +It is a static error if there is no satisfiable set of constraints ´\mathcal{C}\_2´ with this property. + +The final step consists in choosing type bounds for the type variables which imply the established constraint system. +The process is different for the two cases above. + +###### Case 1 +We take ´a_i >: L_i <: U_i´ where each ´L_i´ is minimal and each ´U_i´ is maximal wrt ´<:´ such that ´a_i >: L_i <: U_i´ for ´i = 1, ..., n´ implies ´\mathcal{C}\_0 \wedge \mathcal{C}\_1´. + +###### Case 2 +We take ´a_i >: L_i <: U_i´ and ´b\_i >: L_i' <: U_i' ´ where each ´L_i´ and ´L_j'´ is minimal and each ´U_i´ and ´U_j'´ is maximal such that ´a_i >: L_i <: U_i´ for ´i = 1, ..., n´ and ´b_j >: L_j' <: U_j'´ for ´j = 1, ..., m´ implies ´\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}_2´. + +In both cases, local type inference is permitted to limit the complexity of inferred bounds. +Minimality and maximality of types have to be understood relative to the set of types of acceptable complexity. + +### Type parameter inference for constructor patterns +Assume a constructor pattern ´C(p_1, ..., p_n)´ where class ´C´ has type parameters ´a_1, ..., a_n´. +These type parameters are inferred in the same way as for the typed pattern `(_: ´C[a_1, ..., a_n]´)`. + +###### Example +Consider the program fragment: + +```scala +val x: Any +x match { + case y: List[a] => ... +} +``` + +Here, the type pattern `List[a]` is matched against the expected type `Any`. +The pattern binds the type variable `a`. +Since `List[a]` conforms to `Any` for every type argument, there are no constraints on `a`. +Hence, `a` is introduced as an abstract type with no bounds. +The scope of `a` is right-hand side of its case clause. + +On the other hand, if `x` is declared as + +```scala +val x: List[List[String]], +``` + +this generates the constraint `List[a] <: List[List[String]]`, which simplifies to `a <: List[String]`, because `List` is covariant. +Hence, `a` is introduced with upper bound `List[String]`. + +###### Example +Consider the program fragment: + +```scala +val x: Any +x match { + case y: List[String] => ... +} +``` + +Scala does not maintain information about type arguments at run-time, so there is no way to check that `x` is a list of strings. +Instead, the Scala compiler will [erase](03-types.html#type-erasure) the pattern to `List[_]`; that is, it will only test whether the top-level runtime-class of the value `x` conforms to `List`, and the pattern match will succeed if it does. +This might lead to a class cast exception later on, in the case where the list `x` contains elements other than strings. +The Scala compiler will flag this potential loss of type-safety with an "unchecked" warning message. + +###### Example +Consider the program fragment + +```scala +class Term[A] +class Number(val n: Int) extends Term[Int] +def f[B](t: Term[B]): B = t match { + case y: Number => y.n +} +``` + +The expected type of the pattern `y: Number` is `Term[B]`. +The type `Number` does not conform to `Term[B]`; hence Case 2 of the rules above applies. +This means that `B` is treated as another type variable for which subtype constraints are inferred. +In our case the applicable constraint is `Number <: Term[B]`, which entails `B = Int`. Hence, `B` is treated in the case clause as an abstract type with lower and upper bound `Int`. +Therefore, the right hand side of the case clause, `y.n`, of type `Int`, is found to conform to the method's declared result type, `Number`. + +## Pattern Matching Expressions + +```ebnf + Expr ::= PostfixExpr ‘match’ ‘{’ CaseClauses ‘}’ + CaseClauses ::= CaseClause {CaseClause} + CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block +``` + +A _pattern matching expression_ + +```scala +e match { case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ } +``` + +consists of a selector expression ´e´ and a number ´n > 0´ of cases. +Each case consists of a (possibly guarded) pattern ´p_i´ and a block ´b_i´. +Each ´p_i´ might be complemented by a guard `if ´e´` where ´e´ is a boolean expression. +The scope of the pattern variables in ´p_i´ comprises the pattern's guard and the corresponding block ´b_i´. + +Let ´T´ be the type of the selector expression ´e´ and let ´a_1, ..., a_m´ be the type parameters of all methods enclosing the pattern matching expression. +For every ´a_i´, let ´L_i´ be its lower bound and ´U_i´ be its higher bound. +Every pattern ´p \in \{p_1,, ..., p_n\}´ can be typed in two ways. +First, it is attempted to type ´p´ with ´T´ as its expected type. +If this fails, ´p´ is instead typed with a modified expected type ´T'´ which results from ´T´ by replacing every occurrence of a type parameter ´a_i´ by +*undefined*. +If this second step fails also, a compile-time error results. +If the second step succeeds, let ´T_p´ be the type of pattern ´p´ seen as an expression. +One then determines minimal bounds ´L_11, ..., L_m'´ and maximal bounds ´U_1', ..., U_m'´ such that for all ´i´, ´L_i <: L_i'´ and ´U_i' <: U_i´ and the following constraint system is satisfied: + +$$ +L_1 <: a_1 <: U_1\;\wedge\;...\;\wedge\;L_m <: a_m <: U_m \ \Rightarrow\ T_p <: T +$$ + +If no such bounds can be found, a compile time error results. +If such bounds are found, the pattern matching clause starting with ´p´ is then typed under the assumption that each ´a_i´ has lower bound ´L_i'´ instead of ´L_i´ and has upper bound ´U_i'´ instead of ´U_i´. + +The expected type of every block ´b_i´ is the expected type of the whole pattern matching expression. +The type of the pattern matching expression is then the [weak least upper bound](03-types.html#weak-conformance) of the types of all blocks ´b_i´. + +When applying a pattern matching expression to a selector value, patterns are tried in sequence until one is found which matches the [selector value](#patterns). +Say this case is `case ´p_i \Rightarrow b_i´`. +The result of the whole expression is the result of evaluating ´b_i´, where all pattern variables of ´p_i´ are bound to the corresponding parts of the selector value. +If no matching pattern is found, a `scala.MatchError` exception is thrown. + +The pattern in a case may also be followed by a guard suffix `if e` with a boolean expression ´e´. +The guard expression is evaluated if the preceding pattern in the case matches. +If the guard expression evaluates to `true`, the pattern match succeeds as normal. +If the guard expression evaluates to `false`, the pattern in the case is considered not to match and the search for a matching pattern continues. + +In the interest of efficiency the evaluation of a pattern matching expression may try patterns in some other order than textual sequence. +This might affect evaluation through side effects in guards. +However, it is guaranteed that a guard expression is evaluated only if the pattern it guards matches. + +If the selector of a pattern match is an instance of a [`sealed` class](05-classes-and-objects.html#modifiers), a [union type](03-types#union-and-intersection-types), or a combination thereof, the compilation of pattern matching can emit warnings which diagnose that a given set of patterns is not exhaustive, i.e. that there is a possibility of a `MatchError` being raised at run-time. + +###### Example + +Consider the following definitions of arithmetic terms: + +```scala +abstract class Term[T] +case class Lit(x: Int) extends Term[Int] +case class Succ(t: Term[Int]) extends Term[Int] +case class IsZero(t: Term[Int]) extends Term[Boolean] +case class If[T](c: Term[Boolean], + t1: Term[T], + t2: Term[T]) extends Term[T] +``` + +There are terms to represent numeric literals, incrementation, a zero test, and a conditional. +Every term carries as a type parameter the type of the expression it represents (either `Int` or `Boolean`). + +A type-safe evaluator for such terms can be written as follows. + +```scala +def eval[T](t: Term[T]): T = t match { + case Lit(n) => n + case Succ(u) => eval(u) + 1 + case IsZero(u) => eval(u) == 0 + case If(c, u1, u2) => eval(if (eval(c)) u1 else u2) +} +``` + +Note that the evaluator makes crucial use of the fact that type parameters of enclosing methods can acquire new bounds through pattern matching. + +For instance, the type of the pattern in the second case, `Succ(u)`, is `Int`. +It conforms to the selector type `T` only if we assume an upper and lower bound of `Int` for `T`. +Under the assumption `Int <: T <: Int` we can also verify that the type right hand side of the second case, `Int` conforms to its expected type, `T`. + +## Pattern Matching Anonymous Functions + +```ebnf + BlockExpr ::= ‘{’ CaseClauses ‘}’ +``` + +An anonymous function can be defined by a sequence of cases + +```scala +{ case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ } +``` + +which appear as an expression without a prior `match`. +The expected type of such an expression must in part be defined. +It must be either `scala.Function´k´[´S_1, ..., S_k´, ´R´]` for some ´k > 0´, or `scala.PartialFunction[´S_1´, ´R´]`, where the argument type(s) ´S_1, ..., S_k´ must be fully determined, but the result type ´R´ may be undetermined. + +If the expected type is [SAM-convertible](06-expressions.html#sam-conversion) to `scala.Function´k´[´S_1, ..., S_k´, ´R´]`, the expression is taken to be equivalent to the anonymous function: + +```scala +(´x_1: S_1, ..., x_k: S_k´) => (´x_1, ..., x_k´) match { + case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ +} +``` + +Here, each ´x_i´ is a fresh name. +As was shown [here](06-expressions.html#anonymous-functions), this anonymous function is in turn equivalent to the following instance creation expression, where ´T´ is the weak least upper bound of the types of all ´b_i´. + +```scala +new scala.Function´k´[´S_1, ..., S_k´, ´T´] { + def apply(´x_1: S_1, ..., x_k: S_k´): ´T´ = (´x_1, ..., x_k´) match { + case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ + } +} +``` + +If the expected type is `scala.PartialFunction[´S´, ´R´]`, the expression is taken to be equivalent to the following instance creation expression: + +```scala +new scala.PartialFunction[´S´, ´T´] { + def apply(´x´: ´S´): ´T´ = x match { + case ´p_1´ => ´b_1´ ... case ´p_n´ => ´b_n´ + } + def isDefinedAt(´x´: ´S´): Boolean = { + case ´p_1´ => true ... case ´p_n´ => true + case _ => false + } +} +``` + +Here, ´x´ is a fresh name and ´T´ is the weak least upper bound of the types of all ´b_i´. +The final default case in the `isDefinedAt` method is omitted if one of the patterns ´p_1, ..., p_n´ is already a variable or wildcard pattern. + +###### Example +Here's an example which uses `foldLeft` to compute the scalar product of two vectors: + +```scala +def scalarProduct(xs: Array[Double], ys: Array[Double]) = + (xs zip ys).foldLeft(0.0) { + case (a, (b, c)) => a + b * c + } +``` + +The case clauses in this code are equivalent to the following anonymous function: + +```scala +(x, y) => (x, y) match { + case (a, (b, c)) => a + b * c +} +``` diff --git a/docs/_spec/09-top-level-definitions.md b/docs/_spec/09-top-level-definitions.md new file mode 100644 index 000000000000..8406c0180533 --- /dev/null +++ b/docs/_spec/09-top-level-definitions.md @@ -0,0 +1,178 @@ +--- +title: Top-Level Definitions +layout: default +chapter: 9 +--- + +# Top-Level Definitions + +## Compilation Units + +```ebnf +CompilationUnit ::= {‘package’ QualId semi} TopStatSeq +TopStatSeq ::= TopStat {semi TopStat} +TopStat ::= {Annotation} {Modifier} TmplDef + | Import + | Packaging + | PackageObject + | +QualId ::= id {‘.’ id} +``` + +A compilation unit consists of a sequence of packagings, import clauses, and class and object definitions, which may be preceded by a package clause. + +A _compilation unit_ + +```scala +package ´p_1´; +... +package ´p_n´; +´\mathit{stats}´ +``` + +starting with one or more package clauses is equivalent to a compilation unit consisting of the packaging + +```scala +package ´p_1´ { ... + package ´p_n´ { + ´\mathit{stats}´ + } ... +} +``` + +Every compilation unit implicitly imports the following packages, in the given order: + 1. the package `java.lang`, + 2. the package `scala`, and + 3. the object [`scala.Predef`](12-the-scala-standard-library.html#the-predef-object), unless there is an explicit top-level import that references `scala.Predef`. + +Members of a later import in that order hide members of an earlier import. + +The exception to the implicit import of `scala.Predef` can be useful to hide, e.g., predefined implicit conversions. + +## Packagings + +```ebnf +Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’ +``` + +A _package_ is a special object which defines a set of member classes, objects and packages. +Unlike other objects, packages are not introduced by a definition. +Instead, the set of members of a package is determined by packagings. + +A packaging `package ´p´ { ´\mathit{ds}´ }` injects all definitions in ´\mathit{ds}´ as members into the package whose qualified name is ´p´. +Members of a package are called _top-level_ definitions. +If a definition in ´\mathit{ds}´ is labeled `private`, it is visible only for other members in the package. + +Inside the packaging, all members of package ´p´ are visible under their simple names. +However this rule does not extend to members of enclosing packages of ´p´ that are designated by a prefix of the path ´p´. + +```scala +package org.net.prj { + ... +} +``` + +all members of package `org.net.prj` are visible under their simple names, but members of packages `org` or `org.net` require explicit qualification or imports. + +Selections ´p´.´m´ from ´p´ as well as imports from ´p´ work as for objects. +However, unlike other objects, packages may not be used as values. +It is illegal to have a package with the same fully qualified name as a module or a class. + +Top-level definitions outside a packaging are assumed to be injected into a special empty package. +That package cannot be named and therefore cannot be imported. +However, members of the empty package are visible to each other without qualification. + +## Package Objects + +```ebnf +PackageObject ::= ‘package’ ‘object’ ObjectDef +``` + +A _package object_ `package object ´p´ extends ´t´` adds the members of template ´t´ to the package ´p´. +There can be only one package object per package. +The standard naming convention is to place the definition above in a file named `package.scala` that's located in the directory corresponding to package ´p´. + +The package object should not define a member with the same name as one of the top-level objects or classes defined in package ´p´. +If there is a name conflict, the behavior of the program is currently undefined. +It is expected that this restriction will be lifted in a future version of Scala. + +## Package References + +```ebnf +QualId ::= id {‘.’ id} +``` + +A reference to a package takes the form of a qualified identifier. +Like all other references, package references are relative. +That is, a package reference starting in a name ´p´ will be looked up in the closest enclosing scope that defines a member named ´p´. + +If a package name is shadowed, it's possible to refer to its fully-qualified name by prefixing it with the special predefined name `_root_`, which refers to the outermost root package that contains all top-level packages. + +The name `_root_` has this special denotation only when used as the first element of a qualifier; it is an ordinary identifier otherwise. + +###### Example +Consider the following program: + +```scala +package b { + class B +} + +package a { + package b { + class A { + val x = new _root_.b.B + } + class C { + import _root_.b._ + def y = new B + } + } +} + +``` + +Here, the reference `_root_.b.B` refers to class `B` in the toplevel package `b`. +If the `_root_` prefix had been omitted, the name `b` would instead resolve to the package `a.b`, and, provided that package does not also contain a class `B`, a compiler-time error would result. + +## Programs + +A _program_ is a top-level object that has a member method _main_ of type `(Array[String])Unit`. Programs can be executed from a command shell. +The program's command arguments are passed to the `main` method as a parameter of type `Array[String]`. + +The `main` method of a program can be directly defined in the object, or it can be inherited. +The scala library defines a special class `scala.App` whose body acts as a `main` method. +An objects ´m´ inheriting from this class is thus a program, which executes the initialization code of the object ´m´. + +###### Example +The following example will create a hello world program by defining a method `main` in module `test.HelloWorld`. + +```scala +package test +object HelloWorld { + def main(args: Array[String]) { println("Hello World") } +} +``` + +This program can be started by the command + +```scala +scala test.HelloWorld +``` + +In a Java environment, the command + +```scala +java test.HelloWorld +``` + +would work as well. + +`HelloWorld` can also be defined without a `main` method by inheriting from `App` instead: + +```scala +package test +object HelloWorld extends App { + println("Hello World") +} +``` diff --git a/docs/_spec/10-xml-expressions-and-patterns.md b/docs/_spec/10-xml-expressions-and-patterns.md new file mode 100644 index 000000000000..c929e24fe93d --- /dev/null +++ b/docs/_spec/10-xml-expressions-and-patterns.md @@ -0,0 +1,124 @@ +--- +title: XML +layout: default +chapter: 10 +--- + +# XML Expressions and Patterns + +__By Burak Emir__ + +This chapter describes the syntactic structure of XML expressions and patterns. +It follows as closely as possible the XML 1.0 specification, changes being mandated by the possibility of embedding Scala code fragments. + +## XML expressions + +XML expressions are expressions generated by the following production, where the opening bracket `<` of the first element must be in a position to start the lexical [XML mode](01-lexical-syntax.html#xml-mode). + +```ebnf +XmlExpr ::= XmlContent {Element} +``` + +Well-formedness constraints of the XML specification apply, which means for instance that start tags and end tags must match, and attributes may only be defined once, except for constraints related to entity resolution. + +The following productions describe Scala's extensible markup language, designed as close as possible to the W3C extensible markup language standard. +Only the productions for attribute values and character data are changed. +Scala does not support declarations. +Entity references are not resolved at runtime. + +```ebnf +Element ::= EmptyElemTag + | STag Content ETag + +EmptyElemTag ::= ‘<’ Name {S Attribute} [S] ‘/>’ + +STag ::= ‘<’ Name {S Attribute} [S] ‘>’ +ETag ::= ‘’ +Content ::= [CharData] {Content1 [CharData]} +Content1 ::= XmlContent + | Reference + | ScalaExpr +XmlContent ::= Element + | CDSect + | PI + | Comment +``` + +If an XML expression is a single element, its value is a runtime representation of an XML node (an instance of a subclass of `scala.xml.Node`). +If the XML expression consists of more than one element, then its value is a runtime representation of a sequence of XML nodes (an instance of a subclass of `scala.Seq[scala.xml.Node]`). + +If an XML expression is an entity reference, CDATA section, processing instruction, or a comment, it is represented by an instance of the corresponding Scala runtime class. + +By default, beginning and trailing whitespace in element content is removed, and consecutive occurrences of whitespace are replaced by a single space character `\u0020`. +This behavior can be changed to preserve all whitespace with a compiler option. + +```ebnf +Attribute ::= Name Eq AttValue + +AttValue ::= ‘"’ {CharQ | CharRef} ‘"’ + | ‘'’ {CharA | CharRef} ‘'’ + | ScalaExpr + +ScalaExpr ::= Block + +CharData ::= { CharNoRef } ´\textit{ without}´ {CharNoRef}‘{’CharB {CharNoRef} + ´\textit{ and without}´ {CharNoRef}‘]]>’{CharNoRef} +``` + + +XML expressions may contain Scala expressions as attribute values or within nodes. +In the latter case, these are embedded using a single opening brace `{` and ended by a closing brace `}`. +To express a single opening braces within XML text as generated by CharData, it must be doubled. +Thus, `{{` represents the XML text `{` and does not introduce an embedded Scala expression. + + +```ebnf +BaseChar, CDSect, Char, Comment, CombiningChar, Ideographic, NameChar, PI, S, Reference + ::= ´\textit{“as in W3C XML”}´ + +Char1 ::= Char ´\textit{ without}´ ‘<’ | ‘&’ +CharQ ::= Char1 ´\textit{ without}´ ‘"’ +CharA ::= Char1 ´\textit{ without}´ ‘'’ +CharB ::= Char1 ´\textit{ without}´ ‘{’ + +Name ::= XNameStart {NameChar} + +XNameStart ::= ‘_’ | BaseChar | Ideographic + ´\textit{ (as in W3C XML, but without }´ ‘:’´)´ +``` + +## XML patterns + +XML patterns are patterns generated by the following production, where the opening bracket `<` of the element patterns must be in a position to start the lexical [XML mode](01-lexical-syntax.html#xml-mode). + +```ebnf +XmlPattern ::= ElementPattern +``` + +Well-formedness constraints of the XML specification apply. + +An XML pattern has to be a single element pattern. +It matches exactly those runtime representations of an XML tree that have the same structure as described by the pattern. +XML patterns may contain [Scala patterns](08-pattern-matching.html#pattern-matching-expressions). + +Whitespace is treated the same way as in XML expressions. + +By default, beginning and trailing whitespace in element content is removed, and consecutive occurrences of whitespace are replaced by a single space character `\u0020`. +This behavior can be changed to preserve all whitespace with a compiler option. + +```ebnf +ElemPattern ::= EmptyElemTagP + | STagP ContentP ETagP + +EmptyElemTagP ::= ‘<’ Name [S] ‘/>’ +STagP ::= ‘<’ Name [S] ‘>’ +ETagP ::= ‘’ +ContentP ::= [CharData] {(ElemPattern|ScalaPatterns) [CharData]} +ContentP1 ::= ElemPattern + | Reference + | CDSect + | PI + | Comment + | ScalaPatterns +ScalaPatterns ::= ‘{’ Patterns ‘}’ +``` diff --git a/docs/_spec/11-annotations.md b/docs/_spec/11-annotations.md new file mode 100644 index 000000000000..3388d55318ea --- /dev/null +++ b/docs/_spec/11-annotations.md @@ -0,0 +1,126 @@ +--- +title: Annotations +layout: default +chapter: 11 +--- + +# Annotations + +```ebnf + Annotation ::= ‘@’ SimpleType {ArgumentExprs} + ConstrAnnotation ::= ‘@’ SimpleType ArgumentExprs +``` + +## Definition + +Annotations associate meta-information with definitions. +A simple annotation has the form `@´c´` or `@´c(a_1, ..., a_n)´`. +Here, ´c´ is a constructor of a class ´C´, which must conform to the class `scala.Annotation`. + +Annotations may apply to definitions or declarations, types, or expressions. +An annotation of a definition or declaration appears in front of that definition. +An annotation of a type appears after that type. +An annotation of an expression ´e´ appears after the expression ´e´, separated by a colon. +More than one annotation clause may apply to an entity. +The order in which these annotations are given does not matter. + +Examples: + +```scala +@deprecated("Use D", "1.0") class C { ... } // Class annotation +@transient @volatile var m: Int // Variable annotation +String @local // Type annotation +(e: @unchecked) match { ... } // Expression annotation +``` + +## Predefined Annotations + +### Java Platform Annotations + +The meaning of annotation clauses is implementation-dependent. +On the Java platform, the following annotations have a standard meaning. + +* `@transient` Marks a field to be non-persistent; this is equivalent to the `transient` modifier in Java. + +* `@volatile` Marks a field which can change its value outside the control of the program; this is equivalent to the `volatile` modifier in Java. + +* `@SerialVersionUID()` Attaches a serial version identifier (a `long` constant) to a class. +This is equivalent to the following field definition in Java: + +```java +private final static SerialVersionUID = +``` + +* `@throws()` A Java compiler checks that a program contains handlers for checked exceptions by analyzing which checked exceptions can result from the execution of a method or constructor. +For each checked exception which is a possible result, the `throws` clause for the method or constructor must mention the class of that exception or one of the superclasses of the class of that exception. + +### Java Beans Annotations + +* `@scala.beans.BeanProperty` When prefixed to a definition of some variable `X`, this annotation causes getter and setter methods `getX`, `setX` in the Java bean style to be added in the class containing the variable. +The first letter of the variable appears capitalized after the `get` or `set`. +When the annotation is added to the definition of an immutable value definition `X`, only a getter is generated. +The construction of these methods is part of code-generation; therefore, these methods become visible only once a classfile for the containing class is generated. + +* `@scala.beans.BooleanBeanProperty` This annotation is equivalent to `scala.reflect.BeanProperty`, but the generated getter method is named `isX` instead of `getX`. + +### Deprecation Annotations + +* `@deprecated(message: , since: )`
+Marks a definition as deprecated. +Accesses to the defined entity will then cause a deprecated warning mentioning the _message_ `` to be issued from the compiler. +The argument _since_ documents since when the definition should be considered deprecated.
+Deprecated warnings are suppressed in code that belongs itself to a definition that is labeled deprecated. + +* `@deprecatedName(name: , since: )`
+Marks a formal parameter name as deprecated. +Invocations of this entity using named parameter syntax referring to the deprecated parameter name cause a deprecation warning. + +### Scala Compiler Annotations + +* `@unchecked` When applied to the selector of a `match` expression, this attribute suppresses any warnings about non-exhaustive pattern matches that would otherwise be emitted. +For instance, no warnings would be produced for the method definition below. +```scala +def f(x: Option[Int]) = (x: @unchecked) match { + case Some(y) => y +} +``` +Without the `@unchecked` annotation, a Scala compiler could infer that the pattern match is non-exhaustive, and could produce a warning because `Option` is a `sealed` class. + +* `@uncheckedStable` When applied a value declaration or definition, it allows the defined value to appear in a path, even if its type is [volatile](03-types.html#volatile-types). +For instance, the following member definitions are legal: +```scala +type A { type T } +type B +@uncheckedStable val x: A with B // volatile type +val y: x.T // OK since `x' is still a path +``` +Without the `@uncheckedStable` annotation, the designator `x` would not be a path since its type `A with B` is volatile. +Hence, the reference `x.T` would be malformed. + +When applied to value declarations or definitions that have non-volatile types, the annotation has no effect. + +* `@specialized` When applied to the definition of a type parameter, this annotation causes the compiler to generate specialized definitions for primitive types. +An optional list of primitive types may be given, in which case specialization takes into account only those types. +For instance, the following code would generate specialized traits for `Unit`, `Int` and `Double` +```scala +trait Function0[@specialized(Unit, Int, Double) T] { + def apply: T +} +``` +Whenever the static type of an expression matches a specialized variant of a definition, the compiler will instead use the specialized version. +See the [specialization sid](https://docs.scala-lang.org/sips/scala-specialization.html) for more details of the implementation. + + +## User-defined Annotations + +Other annotations may be interpreted by platform- or application-dependent tools. +The class `scala.annotation.Annotation` is the base class for user-defined annotations. It has two sub-traits: +- `scala.annotation.StaticAnnotation`: Instances of a subclass of this trait will be stored in the generated class files, and therefore accessible to runtime reflection and later compilation runs. +- `scala.annotation.ConstantAnnotation`: Instances of a subclass of this trait may only have arguments which are [constant expressions](06-expressions.html#constant-expressions), and are also stored in the generated class files. +- If an annotation class inherits from neither `scala.ConstantAnnotation` nor `scala.StaticAnnotation`, its instances are visible only locally during the compilation run that analyzes them. + +## Host-platform Annotations + +The host platform may define its own annotation format. +These annotations do not extend any of the classes in the `scala.annotation` package, but can generally be used in the same way as Scala annotations. +The host platform may impose additional restrictions on the expressions which are valid as annotation arguments. diff --git a/docs/_spec/12-the-scala-standard-library.md b/docs/_spec/12-the-scala-standard-library.md new file mode 100644 index 000000000000..441955df9b4f --- /dev/null +++ b/docs/_spec/12-the-scala-standard-library.md @@ -0,0 +1,726 @@ +--- +title: Standard Library +layout: default +chapter: 12 +--- + +# The Scala Standard Library + +The Scala standard library consists of the package `scala` with a number of classes and modules. +Some of these classes are described in the following. + +![Class hierarchy of Scala](public/images/classhierarchy.png) + + +## Root Classes + +The root of this hierarchy is formed by class `Any`. +Every class in a Scala execution environment inherits directly or indirectly from this class. +Class `Any` has two direct subclasses: `AnyRef` and `AnyVal`. + +The subclass `AnyRef` represents all values which are represented as objects in the underlying host system. +Classes written in other languages inherit from `scala.AnyRef`. + +The predefined subclasses of class `AnyVal` describe values which are not implemented as objects in the underlying host system. + +User-defined Scala classes which do not explicitly inherit from `AnyVal` inherit directly or indirectly from `AnyRef`. +They cannot inherit from both `AnyRef` and `AnyVal`. + +Classes `AnyRef` and `AnyVal` are required to provide only the members declared in class `Any`, but implementations may add host-specific methods to these classes (for instance, an implementation may identify class `AnyRef` with its own root class for objects). + +The signatures of these root classes are described by the following definitions. + +```scala +package scala +/** The universal root class */ +abstract class Any { + + /** Defined equality; abstract here */ + def equals(that: Any): Boolean + + /** Semantic equality between values */ + final def == (that: Any): Boolean = + if (null eq this) null eq that else this equals that + + /** Semantic inequality between values */ + final def != (that: Any): Boolean = !(this == that) + + /** Hash code; abstract here */ + def hashCode: Int = ... + + /** Textual representation; abstract here */ + def toString: String = ... + + /** Type test; needs to be inlined to work as given */ + def isInstanceOf[a]: Boolean + + /** Type cast; needs to be inlined to work as given */ */ + def asInstanceOf[A]: A = this match { + case x: A => x + case _ => if (this eq null) this + else throw new ClassCastException() + } +} + +/** The root class of all value types */ +final class AnyVal extends Any + +/** The root class of all reference types */ +class AnyRef extends Any { + def equals(that: Any): Boolean = this eq that + final def eq(that: AnyRef): Boolean = ... // reference equality + final def ne(that: AnyRef): Boolean = !(this eq that) + + def hashCode: Int = ... // hashCode computed from allocation address + def toString: String = ... // toString computed from hashCode and class name + + def synchronized[T](body: => T): T // execute `body` in while locking `this`. +} +``` + +The type test `´x´.isInstanceOf[´T´]` is equivalent to a typed pattern match + +```scala +´x´ match { + case _: ´T'´ => true + case _ => false +} +``` + +where the type ´T'´ is the same as ´T´ except if ´T´ is of the form ´D´ or ´D[\mathit{tps}]´ where ´D´ is a type member of some outer class ´C´. +In this case ´T'´ is `´C´#´D´` (or `´C´#´D[tps]´`, respectively), whereas ´T´ itself would expand to `´C´.this.´D[tps]´`. +In other words, an `isInstanceOf` test does not check that types have the same enclosing instance. + +The test `´x´.asInstanceOf[´T´]` is treated specially if ´T´ is a [numeric value type](#value-classes). +In this case the cast will be translated to an application of a [conversion method](#numeric-value-types) `x.to´T´`. +For non-numeric values ´x´ the operation will raise a `ClassCastException`. + +## Value Classes + +Value classes are classes whose instances are not represented as objects by the underlying host system. +All value classes inherit from class `AnyVal`. +Scala implementations need to provide the value classes `Unit`, `Boolean`, `Double`, `Float`, `Long`, `Int`, `Char`, `Short`, and `Byte` (but are free to provide others as well). +The signatures of these classes are defined in the following. + +### Numeric Value Types + +Classes `Double`, `Float`, `Long`, `Int`, `Char`, `Short`, and `Byte` are together called _numeric value types_. +Classes `Byte`, `Short`, or `Char` are called _subrange types_. +Subrange types, as well as `Int` and `Long` are called _integer types_, whereas `Float` and `Double` are called _floating point types_. + +Numeric value types are ranked in the following partial order: + +```scala +Byte - Short + \ + Int - Long - Float - Double + / + Char +``` + +`Byte` and `Short` are the lowest-ranked types in this order, whereas `Double` is the highest-ranked. +Ranking does _not_ +imply a [conformance relationship](03-types.html#conformance); for instance `Int` is not a subtype of `Long`. +However, object [`Predef`](#the-predef-object) defines [views](07-implicits.html#views) from every numeric value type to all higher-ranked numeric value types. +Therefore, lower-ranked types are implicitly converted to higher-ranked types when required by the [context](06-expressions.html#implicit-conversions). + +Given two numeric value types ´S´ and ´T´, the _operation type_ of ´S´ and ´T´ is defined as follows: If both ´S´ and ´T´ are subrange types then the operation type of ´S´ and ´T´ is `Int`. +Otherwise the operation type of ´S´ and ´T´ is the larger of the two types wrt +ranking. +Given two numeric values ´v´ and ´w´ the operation type of ´v´ and ´w´ is the operation type of their run-time types. + +Any numeric value type ´T´ supports the following methods. + +* Comparison methods for equals (`==`), not-equals (`!=`), less-than (`<`), greater-than (`>`), less-than-or-equals (`<=`), greater-than-or-equals (`>=`), which each exist in 7 overloaded alternatives. +Each alternative takes a parameter of some numeric value type. +Its result type is type `Boolean`. +The operation is evaluated by converting the receiver and its argument to their operation type and performing the given comparison operation of that type. +* Arithmetic methods addition (`+`), subtraction (`-`), multiplication (`*`), division (`/`), and remainder (`%`), which each exist in 7 overloaded alternatives. +Each alternative takes a parameter of some numeric value type ´U´. +Its result type is the operation type of ´T´ and ´U´. +The operation is evaluated by converting the receiver and its argument to their operation type and performing the given arithmetic operation of that type. +* Parameterless arithmetic methods identity (`+`) and negation (`-`), with result type ´T´. +The first of these returns the receiver unchanged, whereas the second returns its negation. +* Conversion methods `toByte`, `toShort`, `toChar`, `toInt`, `toLong`, `toFloat`, `toDouble` which convert the receiver object to the target type, using the rules of Java's numeric type cast operation. +The conversion might truncate the numeric value (as when going from `Long` to `Int` or from `Int` to `Byte`) or it might lose precision (as when going from `Double` to `Float` or when converting between `Long` and `Float`). + +Integer numeric value types support in addition the following operations: + +* Bit manipulation methods bitwise-and (`&`), bitwise-or {`|`}, and bitwise-exclusive-or (`^`), which each exist in 5 overloaded alternatives. +Each alternative takes a parameter of some integer numeric value type. +Its result type is the operation type of ´T´ and ´U´. +The operation is evaluated by converting the receiver and its argument to their operation type and performing the given bitwise operation of that type. + +* A parameterless bit-negation method (`~`). +Its result type is the receiver type ´T´ or `Int`, whichever is larger. +The operation is evaluated by converting the receiver to the result type and negating every bit in its value. +* Bit-shift methods left-shift (`<<`), arithmetic right-shift (`>>`), and unsigned right-shift (`>>>`). +Each of these methods has two overloaded alternatives, which take a parameter ´n´ of type `Int`, respectively `Long`. +The result type of the operation is the receiver type ´T´, or `Int`, whichever is larger. +The operation is evaluated by converting the receiver to the result type and performing the specified shift by ´n´ bits. + +Numeric value types also implement operations `equals`, `hashCode`, and `toString` from class `Any`. + +The `equals` method tests whether the argument is a numeric value type. +If this is true, it will perform the `==` operation which is appropriate for that type. +That is, the `equals` method of a numeric value type can be thought of being defined as follows: + +```scala +def equals(other: Any): Boolean = other match { + case that: Byte => this == that + case that: Short => this == that + case that: Char => this == that + case that: Int => this == that + case that: Long => this == that + case that: Float => this == that + case that: Double => this == that + case _ => false +} +``` + +The `hashCode` method returns an integer hashcode that maps equal numeric values to equal results. +It is guaranteed to be the identity for type `Int` and for all subrange types. + +The `toString` method displays its receiver as an integer or floating point number. + +###### Example + +This is the signature of the numeric value type `Int`: + +```scala +package scala +abstract sealed class Int extends AnyVal { + def == (that: Double): Boolean // double equality + def == (that: Float): Boolean // float equality + def == (that: Long): Boolean // long equality + def == (that: Int): Boolean // int equality + def == (that: Short): Boolean // int equality + def == (that: Byte): Boolean // int equality + def == (that: Char): Boolean // int equality + /* analogous for !=, <, >, <=, >= */ + + def + (that: Double): Double // double addition + def + (that: Float): Double // float addition + def + (that: Long): Long // long addition + def + (that: Int): Int // int addition + def + (that: Short): Int // int addition + def + (that: Byte): Int // int addition + def + (that: Char): Int // int addition + /* analogous for -, *, /, % */ + + def & (that: Long): Long // long bitwise and + def & (that: Int): Int // int bitwise and + def & (that: Short): Int // int bitwise and + def & (that: Byte): Int // int bitwise and + def & (that: Char): Int // int bitwise and + /* analogous for |, ^ */ + + def << (cnt: Int): Int // int left shift + def << (cnt: Long): Int // long left shift + /* analogous for >>, >>> */ + + def unary_+ : Int // int identity + def unary_- : Int // int negation + def unary_~ : Int // int bitwise negation + + def toByte: Byte // convert to Byte + def toShort: Short // convert to Short + def toChar: Char // convert to Char + def toInt: Int // convert to Int + def toLong: Long // convert to Long + def toFloat: Float // convert to Float + def toDouble: Double // convert to Double +} +``` + +### Class `Boolean` + +Class `Boolean` has only two values: `true` and `false`. +It implements operations as given in the following class definition. + +```scala +package scala +abstract sealed class Boolean extends AnyVal { + def && (p: => Boolean): Boolean = // boolean and + if (this) p else false + def || (p: => Boolean): Boolean = // boolean or + if (this) true else p + def & (x: Boolean): Boolean = // boolean strict and + if (this) x else false + def | (x: Boolean): Boolean = // boolean strict or + if (this) true else x + def == (x: Boolean): Boolean = // boolean equality + if (this) x else x.unary_! + def != (x: Boolean): Boolean = // boolean inequality + if (this) x.unary_! else x + def unary_!: Boolean = // boolean negation + if (this) false else true +} +``` + +The class also implements operations `equals`, `hashCode`, and `toString` from class `Any`. + +The `equals` method returns `true` if the argument is the same boolean value as the receiver, `false` otherwise. +The `hashCode` method returns a fixed, implementation-specific hash-code when invoked on `true`, and a different, fixed, implementation-specific hash-code when invoked on `false`. +The `toString` method returns the receiver converted to a string, i.e. either `"true"` or `"false"`. + +### Class `Unit` + +Class `Unit` has only one value: `()`. +It implements only the three methods `equals`, `hashCode`, and `toString` from class `Any`. + +The `equals` method returns `true` if the argument is the unit value `()`, `false` otherwise. +The `hashCode` method returns a fixed, implementation-specific hash-code. +The `toString` method returns `"()"`. + +## Standard Reference Classes + +This section presents some standard Scala reference classes which are treated in a special way by the Scala compiler – either Scala provides syntactic sugar for them, or the Scala compiler generates special code for their operations. +Other classes in the standard Scala library are documented in the Scala library documentation by HTML pages. + +### Class `String` + +Scala's `String` class is usually derived from the standard String class of the underlying host system (and may be identified with it). +For Scala clients the class is taken to support in each case a method + +```scala +def + (that: Any): String +``` + +which concatenates its left operand with the textual representation of its right operand. + + +### The `Function` Classes + +For each class type `Function´n´` where ´n = 0, ..., 22´, Scala defines the following function class: + +```scala +package scala +trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: + def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ + override def toString = "" + def curried: ´T_1´ => ... => ´T_n´ => R = ... + def tupled: ((´T_1´, ..., ´T_n´)) => R = ... +``` + +For function types `Function´n´` where ´n > 22´, Scala defines a unique function class: + +```scala +package scala +trait FunctionXXL: + def apply(xs: IArray[Object]): Object + override def toString = "" +``` + +There is no loss of type safety, as the internal representation is still `Function´n´` for all ´n´. +However this means methods `curried` and `tupled` are not available on functions with more than 22 parameters. + +The implicitly imported [`Predef`](#the-predef-object) object defines the name +`Function` as an alias of `Function1`. + + +The `PartialFunction` subclass of `Function1` represents functions that (indirectly) specify their domain. +Use the `isDefined` method to query whether the partial function is defined for a given input (i.e., whether the input is part of the function's domain). + +```scala +class PartialFunction[-A, +B] extends Function1[A, B] { + def isDefinedAt(x: A): Boolean +} +``` + +### Trait `Product` + + +All case classes automatically extend the `Product` trait (and generate synthetic methods to conform to it) (but not `Product´n´`), and define a `_´n´` method for each of their arguments. + +### Trait `Enum` + +All enum definitions automatically extend the `reflect.Enum` trait (and generate synthetic methods to conform to it). + +### Class `Array` + +All operations on arrays desugar to the corresponding operations of the underlying platform. +Therefore, the following class definition is given for informational purposes only: + +```scala +final class Array[T](_length: Int) +extends java.io.Serializable with java.lang.Cloneable { + def length: Int = ... + def apply(i: Int): T = ... + def update(i: Int, x: T): Unit = ... + override def clone(): Array[T] = ... +} +``` + +If ´T´ is not a type parameter or abstract type, the type `Array[T]` is represented as the array type `|T|[]` in the underlying host system, where `|T|` is the erasure of `T`. +If ´T´ is a type parameter or abstract type, a different representation might be used (it is `Object` on the Java platform). + +#### Operations + +`length` returns the length of the array, `apply` means subscripting, and `update` means element update. + +Because of the syntactic sugar for `apply` and `update` operations, we have the following correspondences between Scala and Java code for operations on an array `xs`: + +|_Scala_ |_Java_ | +|------------------|------------| +|`xs.length` |`xs.length` | +|`xs(i)` |`xs[i]` | +|`xs(i) = e` |`xs[i] = e` | + +Two implicit conversions exist in `Predef` that are frequently applied to arrays: a conversion to `scala.collection.mutable.ArrayOps` and a conversion to `scala.collection.mutable.ArraySeq` (a subtype of `scala.collection.Seq`). + +Both types make many of the standard operations found in the Scala collections API available. +The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return a value of type `Array`, while the conversion to `ArraySeq` is permanent as all operations return a value of type `ArraySeq`. +The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. + +Because of the tension between parametrized types in Scala and the ad-hoc implementation of arrays in the host-languages, some subtle points need to be taken into account when dealing with arrays. +These are explained in the following. + +#### Variance + +Unlike arrays in Java, arrays in Scala are _not_ co-variant; That is, ´S <: T´ does not imply `Array[´S´] ´<:´ Array[´T´]` in Scala. +However, it is possible to cast an array of ´S´ to an array of ´T´ if such a cast is permitted in the host environment. + +For instance `Array[String]` does not conform to `Array[Object]`, even though `String` conforms to `Object`. +However, it is possible to cast an expression of type `Array[String]` to `Array[Object]`, and this cast will succeed without raising a `ClassCastException`. Example: + +```scala +val xs = new Array[String](2) +// val ys: Array[Object] = xs // **** error: incompatible types +val ys: Array[Object] = xs.asInstanceOf[Array[Object]] // OK +``` + +The instantiation of an array with a polymorphic element type ´T´ requires information about type ´T´ at runtime. +This information is synthesized by adding a [context bound](07-implicits.html#context-bounds-and-view-bounds) of `scala.reflect.ClassTag` to type ´T´. +An example is the following implementation of method `mkArray`, which creates an array of an arbitrary type ´T´, given a sequence of ´T´`s which defines its elements: + +```scala +import reflect.ClassTag +def mkArray[T : ClassTag](elems: Seq[T]): Array[T] = { + val result = new Array[T](elems.length) + var i = 0 + for (elem <- elems) { + result(i) = elem + i += 1 + } + result +} +``` + +If type ´T´ is a type for which the host platform offers a specialized array representation, this representation is used. + +###### Example +On the Java Virtual Machine, an invocation of `mkArray(List(1,2,3))` will return a primitive array of `int`s, written as `int[]` in Java. + +#### Companion object + +`Array`'s companion object provides various factory methods for the instantiation of single- and multi-dimensional arrays, an extractor method [`unapplySeq`](08-pattern-matching.html#extractor-patterns) which enables pattern matching over arrays and additional utility methods: + +```scala +package scala +object Array { + /** copies array elements from `src` to `dest`. */ + def copy(src: AnyRef, srcPos: Int, + dest: AnyRef, destPos: Int, length: Int): Unit = ... + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = + + /** Create an array with given elements. */ + def apply[T: ClassTag](xs: T*): Array[T] = ... + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = ... + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = ... + ... + + /** Concatenate all argument arrays into a single array. */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = ... + + /** Returns an array that contains the results of some element computation a number + * of times. */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = ... + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = ... + ... + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = ... + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = ... + ... + + /** Returns an array containing a sequence of increasing integers in a range. */ + def range(start: Int, end: Int): Array[Int] = ... + /** Returns an array containing equally spaced values in some integer interval. */ + def range(start: Int, end: Int, step: Int): Array[Int] = ... + + /** Returns an array containing repeated applications of a function to a start value. */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = ... + + /** Enables pattern matching over arrays */ + def unapplySeq[A](x: Array[A]): Option[IndexedSeq[A]] = Some(x) +} +``` + +## Class Node + +```scala +package scala.xml + +trait Node { + + /** the label of this node */ + def label: String + + /** attribute axis */ + def attribute: Map[String, String] + + /** child axis (all children of this node) */ + def child: Seq[Node] + + /** descendant axis (all descendants of this node) */ + def descendant: Seq[Node] = child.toList.flatMap { + x => x::x.descendant.asInstanceOf[List[Node]] + } + + /** descendant axis (all descendants of this node) */ + def descendant_or_self: Seq[Node] = this::child.toList.flatMap { + x => x::x.descendant.asInstanceOf[List[Node]] + } + + override def equals(x: Any): Boolean = x match { + case that:Node => + that.label == this.label && + that.attribute.sameElements(this.attribute) && + that.child.sameElements(this.child) + case _ => false + } + + /** XPath style projection function. Returns all children of this node + * that are labeled with 'that'. The document order is preserved. + */ + def \(that: Symbol): NodeSeq = { + new NodeSeq({ + that.name match { + case "_" => child.toList + case _ => + var res:List[Node] = Nil + for (x <- child.elements if x.label == that.name) { + res = x::res + } + res.reverse + } + }) + } + + /** XPath style projection function. Returns all nodes labeled with the + * name 'that' from the 'descendant_or_self' axis. Document order is preserved. + */ + def \\(that: Symbol): NodeSeq = { + new NodeSeq( + that.name match { + case "_" => this.descendant_or_self + case _ => this.descendant_or_self.asInstanceOf[List[Node]]. + filter(x => x.label == that.name) + }) + } + + /** hashcode for this XML node */ + override def hashCode = + Utility.hashCode(label, attribute.toList.hashCode, child) + + /** string representation of this node */ + override def toString = Utility.toXML(this) + +} +``` + +## The `Predef` Object + +The `Predef` object defines standard methods and type aliases for Scala programs. +It is implicitly imported, as described in [the chapter on name binding](02-identifiers-names-and-scopes.html), so that all its defined members are available without qualification. +Its definition for the JVM environment conforms to the following signature: + +```scala +package scala +object Predef { + + // classOf --------------------------------------------------------- + + /** Returns the runtime representation of a class type. */ + def classOf[T]: Class[T] = null + // this is a dummy, classOf is handled by compiler. + + // valueOf ----------------------------------------------------------- + + /** Retrieve the single value of a type with a unique inhabitant. */ + @inline def valueOf[T](implicit vt: ValueOf[T]): T {} = vt.value + // instances of the ValueOf type class are provided by the compiler. + + // Standard type aliases --------------------------------------------- + + type String = java.lang.String + type Class[T] = java.lang.Class[T] + + // Miscellaneous ----------------------------------------------------- + + type Function[-A, +B] = Function1[A, B] + + type Map[A, +B] = collection.immutable.Map[A, B] + type Set[A] = collection.immutable.Set[A] + + val Map = collection.immutable.Map + val Set = collection.immutable.Set + + // Manifest types, companions, and incantations for summoning --------- + + type ClassManifest[T] = scala.reflect.ClassManifest[T] + type Manifest[T] = scala.reflect.Manifest[T] + type OptManifest[T] = scala.reflect.OptManifest[T] + val ClassManifest = scala.reflect.ClassManifest + val Manifest = scala.reflect.Manifest + val NoManifest = scala.reflect.NoManifest + + def manifest[T](implicit m: Manifest[T]) = m + def classManifest[T](implicit m: ClassManifest[T]) = m + def optManifest[T](implicit m: OptManifest[T]) = m + + // Minor variations on identity functions ----------------------------- + def identity[A](x: A): A = x + def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world + @inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements + + // Asserts, Preconditions, Postconditions ----------------------------- + + def assert(assertion: Boolean) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed") + } + + def assert(assertion: Boolean, message: => Any) { + if (!assertion) + throw new java.lang.AssertionError("assertion failed: " + message) + } + + def assume(assumption: Boolean) { + if (!assumption) + throw new IllegalArgumentException("assumption failed") + } + + def assume(assumption: Boolean, message: => Any) { + if (!assumption) + throw new IllegalArgumentException("assumption failed: " + message.toString) + } + + def require(requirement: Boolean) { + if (!requirement) + throw new IllegalArgumentException("requirement failed") + } + + def require(requirement: Boolean, message: => Any) { + if (!requirement) + throw new IllegalArgumentException("requirement failed: "+ message) + } +``` + +```scala + // Printing and reading ----------------------------------------------- + + def print(x: Any) = Console.print(x) + def println() = Console.println() + def println(x: Any) = Console.println(x) + def printf(text: String, xs: Any*) = Console.printf(text.format(xs: _*)) + + // Implicit conversions ------------------------------------------------ + + ... +} +``` + +### Predefined Implicit Definitions + +The `Predef` object also contains a number of implicit definitions, which are available by default (because `Predef` is implicitly imported). +Implicit definitions come in two priorities. +High-priority implicits are defined in the `Predef` class itself whereas low priority implicits are defined in a class inherited by `Predef`. +The rules of static [overloading resolution](06-expressions.html#overloading-resolution) stipulate that, all other things being equal, implicit resolution prefers high-priority implicits over low-priority ones. + +The available low-priority implicits include definitions falling into the following categories. + +1. For every primitive type, a wrapper that takes values of that type to instances of a `runtime.Rich*` class. +For instance, values of type `Int` can be implicitly converted to instances of class `runtime.RichInt`. + +1. For every array type with elements of primitive type, a wrapper that takes the arrays of that type to instances of a `ArraySeq` class. +For instance, values of type `Array[Float]` can be implicitly converted to instances of class `ArraySeq[Float]`. +There are also generic array wrappers that take elements of type `Array[T]` for arbitrary `T` to `ArraySeq`s. + +1. An implicit conversion from `String` to `WrappedString`. + +The available high-priority implicits include definitions falling into the following categories. + +* An implicit wrapper that adds `ensuring` methods with the following overloaded variants to type `Any`. +```scala +def ensuring(cond: Boolean): A = { assert(cond); x } +def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x } +def ensuring(cond: A => Boolean): A = { assert(cond(x)); x } +def ensuring(cond: A => Boolean, msg: Any): A = { assert(cond(x), msg); x } +``` + +* An implicit wrapper that adds a `->` method with the following implementation to type `Any`. +```scala +def -> [B](y: B): (A, B) = (x, y) +``` + +* For every array type with elements of primitive type, a wrapper that takes the arrays of that type to instances of a `runtime.ArrayOps` class. +For instance, values of type `Array[Float]` can be implicitly converted to instances of class `runtime.ArrayOps[Float]`. +There are also generic array wrappers that take elements of type `Array[T]` for arbitrary `T` to `ArrayOps`s. + +* An implicit wrapper that adds `+` and `formatted` method with the following implementations to type `Any`. +```scala +def +(other: String) = String.valueOf(self) + other +def formatted(fmtstr: String): String = fmtstr format self +``` + +* Numeric primitive conversions that implement the transitive closure of the following mappings: +``` +Byte -> Short +Short -> Int +Char -> Int +Int -> Long +Long -> Float +Float -> Double +``` + +* Boxing and unboxing conversions between primitive types and their boxed versions: +``` +Byte <-> java.lang.Byte +Short <-> java.lang.Short +Char <-> java.lang.Character +Int <-> java.lang.Integer +Long <-> java.lang.Long +Float <-> java.lang.Float +Double <-> java.lang.Double +Boolean <-> java.lang.Boolean +``` + +* An implicit definition that generates instances of type `T <:< T`, for any type `T`. Here, `<:<` is a class defined as follows. +```scala +sealed abstract class <:<[-From, +To] extends (From => To) +``` +Implicit parameters of `<:<` types are typically used to implement type constraints. diff --git a/docs/_spec/13-syntax-summary.md b/docs/_spec/13-syntax-summary.md new file mode 100644 index 000000000000..2dc971fc9840 --- /dev/null +++ b/docs/_spec/13-syntax-summary.md @@ -0,0 +1,185 @@ +--- +title: Syntax Summary +layout: default +chapter: 13 +--- + +# Syntax Summary + +The following descriptions of Scala tokens uses literal characters `‘c’` when referring to the ASCII fragment `\u0000` – `\u007F`. + +Informal descriptions are typeset as `“some comment”`. + +## Lexical Syntax + +The lexical syntax of Scala is given by the following grammar in EBNF form: + +```ebnf +whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ +upper ::= ‘A’ | ... | ‘Z’ | ‘$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that doesn't have + contributory property Other_Lowercase +lower ::= ‘a’ | ... | ‘z’ | ‘_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase +letter ::= upper | lower +digit ::= ‘0’ | ... | ‘9’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ +delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ +opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | + ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | ... | ‘9’ | ‘A’ | ... | ‘F’ | ‘a’ | ... | ‘f’ +charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) +escapeSeq ::= UnicodeEscape | charEscapeSeq + +op ::= opchar {opchar} +varid ::= lower idrest +boundvarid ::= varid + | ‘`’ varid ‘`’ +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | escapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] +quoteId ::= ‘'’ alphaid +spliceId ::= ‘$’ alphaid ; + +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] + +floatingPointLiteral + ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] + | decimalNumeral exponentPart [floatType] + | decimalNumeral floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ + +booleanLiteral ::= ‘true’ | ‘false’ + +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | escapeSeq) ‘'’ + +stringLiteral ::= ‘"’ {stringElement} ‘"’ + | ‘"""’ multiLineChars ‘"""’ +stringElement ::= charNoDoubleQuoteOrNewline + | escapeSeq +multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} + +interpolatedString + ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart + ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘\$\$’ + | ‘\$"’ + | ‘\$’ alphaid + | ‘\$’ BlockExpr + +comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ + | ‘//’ “any sequence of characters up to end of line” + +nl ::= ´\mathit{“new line character”}´ +semi ::= ‘;’ | nl {nl} +``` + +## Optional Braces + +``` +colon ::= ':' -- with side conditions explained in 01-literal-syntax.md + <<< ts >>> ::= ‘{’ ts ‘}’ + | indent ts outdent +:<<< ts >>> ::= [nl] ‘{’ ts ‘}’ + | colon indent ts outdent +``` + +## Context-free Syntax + +´\color{red}{\text{TODO SCALA3: Once we're done porting the spec, make sure that +the references to grammar productions in the rest of the spec match this.}}´ + +The context-free syntax of Scala is given by the following EBNF grammar: + +```ebnf +RefineDcl ::= ‘val’ ValDcl + | ‘def’ DefDcl + | ‘type’ {nl} TypeDcl +Dcl ::= RefineDcl + | ‘var’ VarDcl +ValDcl ::= ids ‘:’ Type +VarDcl ::= ids ‘:’ Type +DefDcl ::= DefSig ‘:’ Type +DefSig ::= id [DefTypeParamClause] [TypelessClauses] [DefImplicitClause] +TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds + +Def ::= ‘val’ PatDef + | ‘var’ PatDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDcl + | TmplDef +PatDef ::= ids [‘:’ Type] ‘=’ Expr + | Pattern2 [‘:’ Type] ‘=’ Expr +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr + | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr + +TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘enum’ EnumDef + | ‘given’ GivenDef +ClassDef ::= id ClassConstr [Template] +ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses +ConstrMods ::= {Annotation} [AccessModifier] +ObjectDef ::= id [Template] +EnumDef ::= id ClassConstr InheritClauses EnumBody +GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] +Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} + ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods +ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef + | Export +Template ::= InheritClauses [TemplateBody] +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) +ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrExpr ::= SelfInvocation + | <<< SelfInvocation {semi BlockStat} >>> +SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} + +WithTemplateBody ::= <<< [SelfType] TemplateStat {semi TemplateStat} >>> +TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> +TemplateStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | {Annotation [nl]} {Modifier} Dcl + | Extension + | Expr1 + | EndMarker + | +SelfType ::= id [‘:’ InfixType] ‘=>’ + | ‘this’ ‘:’ InfixType ‘=>’ + +EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> +EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase +EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps]] | ids) + +TopStats ::= TopStat {semi TopStat} +TopStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | Extension + | Packaging + | PackageObject + | EndMarker + | +Packaging ::= ‘package’ QualId :<<< TopStats >>> +PackageObject ::= ‘package’ ‘object’ ObjectDef + +CompilationUnit ::= {‘package’ QualId semi} TopStats +``` diff --git a/docs/_spec/A1-deprecated.md b/docs/_spec/A1-deprecated.md new file mode 100644 index 000000000000..649c2d7d92e6 --- /dev/null +++ b/docs/_spec/A1-deprecated.md @@ -0,0 +1,21 @@ + +### Symbol Literals + +Symbol literals are no longer supported. + +The [`scala.Symbol`](https://scala-lang.org/api/3.x/scala/Symbol.html) class still exists, so a literal translation of the symbol literal `'xyz` is `Symbol("xyz")`. However, it is recommended to use a plain string literal `"xyz"` instead. (The `Symbol` class will be deprecated and removed in the future). Example: + + +``` +scalac Test.scala +-- Error: Test.scala:1:25 ------------------------------------------------------------------------------------------------ + +1 |@main def test = println('abc) + | ^ + | symbol literal 'abc is no longer supported, + | use a string literal "abc" or an application Symbol("abc") instead, + | or enclose in braces '{abc} if you want a quoted expression. + | For now, you can also `import language.deprecated.symbolLiterals` to accept + | the idiom, but this possibility might no longer be available in the future. +1 error found +``` diff --git a/docs/_spec/A2-scala-2-compatibility.md b/docs/_spec/A2-scala-2-compatibility.md new file mode 100644 index 000000000000..30ac1ac32fc2 --- /dev/null +++ b/docs/_spec/A2-scala-2-compatibility.md @@ -0,0 +1,40 @@ + +### Existential Types + +Existential types using `forSome` (as in [SLS §3.2.12](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#existential-types)) are not available in Scala 3. +Therefore when reading an existential type from Scala 2, the following happens: + +Existential types that can be expressed using only wildcards (but not +`forSome`) are treated as refined types. +For instance, the type +```scala +Map[_ <: AnyRef, Int] +``` +is treated as the type `Map`, where the first type parameter +is upper-bounded by `AnyRef` and the second type parameter is an alias +of `Int`. + +When reading class files compiled with Scala 2, Scala 3 will do a best +effort to approximate existential types with its own types. It will +issue a warning that a precise emulation is not possible. + +### Procedure Syntax + +Procedure syntax +```scala +def f() { ... } +``` +has been dropped. You need to write one of the following instead: +```scala +def f() = { ... } +def f(): Unit = { ... } +``` +Scala 3 accepts the old syntax under the `-source:3.0-migration` option. +If the `-migration` option is set, it can even rewrite old syntax to new. +The [Scalafix](https://scalacenter.github.io/scalafix/) tool also +can rewrite procedure syntax to make it Scala 3 compatible. + +## Compound Types (`with`) + +Intersection types `A & B` replace compound types `A with B` in Scala 2. +For the moment, the syntax `A with B` is still allowed and interpreted as `A & B`, but its usage as a type (as opposed to in a `new` or `extends` clause) will be deprecated and removed in the future. diff --git a/docs/_spec/A3-to-be-deprecated.md b/docs/_spec/A3-to-be-deprecated.md new file mode 100644 index 000000000000..98f758dee2d4 --- /dev/null +++ b/docs/_spec/A3-to-be-deprecated.md @@ -0,0 +1,4 @@ +This is a simple list of feature that are not deprecated yet, but will be in the future. +They should emit warnings or errors only when using the `-source:future` compiler flag. + +- [private[this] and protected[this]](../_docs/reference/dropped-features/this-qualifier.md) diff --git a/docs/_spec/APPLIEDreference/dropped-features/auto-apply.md b/docs/_spec/APPLIEDreference/dropped-features/auto-apply.md new file mode 100644 index 000000000000..b9aedb9f046b --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/auto-apply.md @@ -0,0 +1,96 @@ +--- +layout: doc-page +title: "Dropped: Auto-Application" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/auto-apply.html +--- + +Previously an empty argument list `()` was implicitly inserted when +calling a nullary method without arguments. Example: + +```scala +def next(): T = ... +next // is expanded to next() +``` + +In Scala 3, this idiom is an error. + +```scala +next +^^^^ +method next must be called with () argument +``` + +In Scala 3, the application syntax has to follow exactly the parameter +syntax. Excluded from this rule are methods that are defined in Java +or that override methods defined in Java. The reason for being more +lenient with such methods is that otherwise everyone would have to +write + +```scala +xs.toString().length() +``` + +instead of + +```scala +xs.toString.length +``` + +The latter is idiomatic Scala because it conforms to the _uniform +access principle_. This principle states that one should be able to +change an object member from a field to a non-side-effecting method +and back without affecting clients that access the +member. Consequently, Scala encourages to define such "property" +methods without a `()` parameter list whereas side-effecting methods +should be defined with it. Methods defined in Java cannot make this +distinction; for them a `()` is always mandatory. So Scala fixes the +problem on the client side, by allowing the parameterless references. +But where Scala allows that freedom for all method references, Scala 3 +restricts it to references of external methods that are not defined +themselves in Scala 3. + +For reasons of backwards compatibility, Scala 3 for the moment also +auto-inserts `()` for nullary methods that are defined in Scala 2, or +that override a method defined in Scala 2. It turns out that, because +the correspondence between definition and call was not enforced in +Scala so far, there are quite a few method definitions in Scala 2 +libraries that use `()` in an inconsistent way. For instance, we +find in `scala.math.Numeric` + +```scala +def toInt(): Int +``` + +whereas `toInt` is written without parameters everywhere +else. Enforcing strict parameter correspondence for references to +such methods would project the inconsistencies to client code, which +is undesirable. So Scala 3 opts for more leniency when type-checking +references to such methods until most core libraries in Scala 2 have +been cleaned up. + +Stricter conformance rules also apply to overriding of nullary +methods. It is no longer allowed to override a parameterless method +by a nullary method or _vice versa_. Instead, both methods must agree +exactly in their parameter lists. + +```scala +class A: + def next(): Int + +class B extends A: + def next: Int // overriding error: incompatible type +``` + +Methods overriding Java or Scala 2 methods are again exempted from this +requirement. + +## Migrating code + +Existing Scala code with inconsistent parameters can still be compiled +in Scala 3 under `-source 3.0-migration`. When paired with the `-rewrite` +option, the code will be automatically rewritten to conform to Scala 3's +stricter checking. + +## Reference + +For more information, see [Issue #2570](https://github.com/lampepfl/dotty/issues/2570) and [PR #2716](https://github.com/lampepfl/dotty/pull/2716). diff --git a/docs/_spec/APPLIEDreference/dropped-features/class-shadowing.md b/docs/_spec/APPLIEDreference/dropped-features/class-shadowing.md new file mode 100644 index 000000000000..a27b53db7cce --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/class-shadowing.md @@ -0,0 +1,31 @@ +--- +layout: doc-page +title: "Dropped: Class Shadowing" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/class-shadowing.html +--- + +Scala 2 so far allowed patterns like this: + +```scala +class Base { + class Ops { ... } +} + +class Sub extends Base { + class Ops { ... } +} +``` + +Scala 3 rejects this with the error message: + +```scala +6 | class Ops { } + | ^ + |class Ops cannot have the same name as class Ops in class Base + | -- class definitions cannot be overridden +``` + +The issue is that the two `Ops` classes _look_ like one overrides the +other, but classes in Scala 2 cannot be overridden. To keep things clean +(and its internal operations consistent) the Scala 3 compiler forces you +to rename the inner classes so that their names are different. diff --git a/docs/_spec/APPLIEDreference/dropped-features/delayed-init.md b/docs/_spec/APPLIEDreference/dropped-features/delayed-init.md new file mode 100644 index 000000000000..5d4f614ce951 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/delayed-init.md @@ -0,0 +1,32 @@ +--- +layout: doc-page +title: "Dropped: DelayedInit" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/delayed-init.html +--- + +The special handling of the [`DelayedInit`](https://scala-lang.org/api/3.x/scala/DelayedInit.html) +trait is no longer supported. + +One consequence is that the [`App`](https://scala-lang.org/api/3.x/scala/App.html) class, +which used [`DelayedInit`](https://scala-lang.org/api/3.x/scala/DelayedInit.html) is +now partially broken. You can still use `App` as a simple way to set up a main program. Example: + +```scala +object HelloWorld extends App { + println("Hello, world!") +} +``` + +However, the code is now run in the initializer of the object, which on +some JVM's means that it will only be interpreted. So, better not use it +for benchmarking! Also, if you want to access the command line arguments, +you need to use an explicit `main` method for that. + +```scala +object Hello: + def main(args: Array[String]) = + println(s"Hello, ${args(0)}") +``` + +On the other hand, Scala 3 offers a convenient alternative to such "program" objects +with [`@main` methods](../changed-features/main-functions.md). diff --git a/docs/_spec/APPLIEDreference/dropped-features/do-while.md b/docs/_spec/APPLIEDreference/dropped-features/do-while.md new file mode 100644 index 000000000000..08a730b8b5a7 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/do-while.md @@ -0,0 +1,41 @@ +--- +layout: doc-page +title: "Dropped: Do-While" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/do-while.html +--- + +The syntax construct +```scala +do while +``` +is no longer supported. Instead, it is recommended to use the equivalent `while` loop +below: +```scala +while ({ ; }) () +``` +For instance, instead of +```scala +do + i += 1 +while (f(i) == 0) +``` +one writes +```scala +while + i += 1 + f(i) == 0 +do () +``` +The idea to use a block as the condition of a while also gives a solution +to the "loop-and-a-half" problem. Here is another example: +```scala +while + val x: Int = iterator.next + x >= 0 +do print(".") +``` + +## Why Drop The Construct? + + - `do-while` is used relatively rarely and it can be expressed faithfully using just `while`. So there seems to be little point in having it as a separate syntax construct. + - Under the [new syntax rules](../other-new-features/control-syntax.md) `do` is used as a statement continuation, which would clash with its meaning as a statement introduction. diff --git a/docs/_spec/APPLIEDreference/dropped-features/dropped-features.md b/docs/_spec/APPLIEDreference/dropped-features/dropped-features.md new file mode 100644 index 000000000000..f6a13d9fa5da --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/dropped-features.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Dropped Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features.html +--- + +The following pages document the features of Scala 2 that have been dropped in Scala 3. diff --git a/docs/_spec/APPLIEDreference/dropped-features/early-initializers.md b/docs/_spec/APPLIEDreference/dropped-features/early-initializers.md new file mode 100644 index 000000000000..6f7c59c4f031 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/early-initializers.md @@ -0,0 +1,16 @@ +--- +layout: doc-page +title: "Dropped: Early Initializers" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/early-initializers.html +--- + +Early initializers of the form + +```scala +class C extends { ... } with SuperClass ... +``` + +have been dropped. They were rarely used, and mostly to compensate for the lack of +[trait parameters](../other-new-features/trait-parameters.md), which are now directly supported in Scala 3. + +For more information, see [SLS §5.1.6](https://www.scala-lang.org/files/archive/spec/2.13/05-classes-and-objects.html#early-definitions). diff --git a/docs/_spec/APPLIEDreference/dropped-features/existential-types.md b/docs/_spec/APPLIEDreference/dropped-features/existential-types.md new file mode 100644 index 000000000000..6ef815152cd0 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/existential-types.md @@ -0,0 +1,35 @@ +--- +layout: doc-page +title: "Dropped: Existential Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/existential-types.html +--- + +Existential types using `forSome` (as in +[SLS §3.2.12](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#existential-types)) +have been dropped. The reasons for dropping them are: + + - Existential types violate a type soundness principle on which DOT + and Scala 3 are constructed. That principle says that every + prefix (`p`, respectvely `S`) of a type selection `p.T` or `S#T` + must either come from a value constructed at runtime or refer to a + type that is known to have only good bounds. + + - Existential types create many difficult feature interactions + with other Scala constructs. + + - Existential types largely overlap with path-dependent types, + so the gain of having them is relatively minor. + +Existential types that can be expressed using only wildcards (but not +`forSome`) are still supported, but are treated as refined types. +For instance, the type +```scala +Map[_ <: AnyRef, Int] +``` +is treated as the type `Map`, where the first type parameter +is upper-bounded by `AnyRef` and the second type parameter is an alias +of `Int`. + +When reading class files compiled with Scala 2, Scala 3 will do a best +effort to approximate existential types with its own types. It will +issue a warning that a precise emulation is not possible. diff --git a/docs/_spec/APPLIEDreference/dropped-features/limit22.md b/docs/_spec/APPLIEDreference/dropped-features/limit22.md new file mode 100644 index 000000000000..e72aeadbe2ca --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/limit22.md @@ -0,0 +1,17 @@ +--- +layout: doc-page +title: "Dropped: Limit 22" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/limit22.html +--- + +The limits of 22 for the maximal number of parameters of function types and the +maximal number of fields in tuple types have been dropped. + +* Functions can now have an arbitrary number of parameters. Functions beyond + [`scala.Function22`](https://www.scala-lang.org/api/current/scala/Function22.html) are erased to a new trait [`scala.runtime.FunctionXXL`](https://scala-lang.org/api/3.x/scala/runtime/FunctionXXL.html). + +* Tuples can also have an arbitrary number of fields. Tuples beyond [`scala.Tuple22`](https://www.scala-lang.org/api/current/scala/Tuple22.html) + are erased to a new class [`scala.runtime.TupleXXL`](https://scala-lang.org/api/3.x/scala/runtime/TupleXXL.html) (which extends the trait [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.md)). Furthermore, they support generic + operation such as concatenation and indexing. + +Both of these are implemented using arrays. diff --git a/docs/_spec/APPLIEDreference/dropped-features/macros.md b/docs/_spec/APPLIEDreference/dropped-features/macros.md new file mode 100644 index 000000000000..7ffe9043d0cd --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/macros.md @@ -0,0 +1,16 @@ +--- +layout: doc-page +title: "Dropped: Scala 2 Macros" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html +--- + +The previous, experimental macro system has been dropped. + +Instead, there is a cleaner, more restricted system based on two complementary concepts: `inline` and `'{ ... }`/`${ ... }` code generation. +`'{ ... }` delays the compilation of the code and produces an object containing the code, dually `${ ... }` evaluates an expression which produces code and inserts it in the surrounding `${ ... }`. +In this setting, a definition marked as inlined containing a `${ ... }` is a macro, the code inside the `${ ... }` is executed at compile-time and produces code in the form of `'{ ... }`. +Additionally, the contents of code can be inspected and created with a more complex reflection API as an extension of `'{ ... }`/`${ ... }` framework. + +* `inline` has been [implemented](../metaprogramming/inline.md) in Scala 3. +* Quotes `'{ ... }` and splices `${ ... }` has been [implemented](../metaprogramming/macros.md) in Scala 3. +* [TASTy reflect](../metaprogramming/reflection.md) provides more complex tree based APIs to inspect or create quoted code. diff --git a/docs/_spec/APPLIEDreference/dropped-features/procedure-syntax.md b/docs/_spec/APPLIEDreference/dropped-features/procedure-syntax.md new file mode 100644 index 000000000000..de76fbb32af2 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/procedure-syntax.md @@ -0,0 +1,19 @@ +--- +layout: doc-page +title: "Dropped: Procedure Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/procedure-syntax.html +--- + +Procedure syntax +```scala +def f() { ... } +``` +has been dropped. You need to write one of the following instead: +```scala +def f() = { ... } +def f(): Unit = { ... } +``` +Scala 3 accepts the old syntax under the `-source:3.0-migration` option. +If the `-migration` option is set, it can even rewrite old syntax to new. +The [Scalafix](https://scalacenter.github.io/scalafix/) tool also +can rewrite procedure syntax to make it Scala 3 compatible. diff --git a/docs/_spec/APPLIEDreference/dropped-features/symlits.md b/docs/_spec/APPLIEDreference/dropped-features/symlits.md new file mode 100644 index 000000000000..d3c0180b16e6 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/symlits.md @@ -0,0 +1,24 @@ +--- +layout: doc-page +title: "Dropped: Symbol Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/symlits.html +--- + +Symbol literals are no longer supported. + +The [`scala.Symbol`](https://scala-lang.org/api/3.x/scala/Symbol.html) class still exists, so a literal translation of the symbol literal `'xyz` is `Symbol("xyz")`. However, it is recommended to use a plain string literal `"xyz"` instead. (The `Symbol` class will be deprecated and removed in the future). Example: + + +``` +scalac Test.scala +-- Error: Test.scala:1:25 ------------------------------------------------------------------------------------------------ + +1 |@main def test = println('abc) + | ^ + | symbol literal 'abc is no longer supported, + | use a string literal "abc" or an application Symbol("abc") instead, + | or enclose in braces '{abc} if you want a quoted expression. + | For now, you can also `import language.deprecated.symbolLiterals` to accept + | the idiom, but this possibility might no longer be available in the future. +1 error found +``` diff --git a/docs/_spec/APPLIEDreference/dropped-features/this-qualifier.md b/docs/_spec/APPLIEDreference/dropped-features/this-qualifier.md new file mode 100644 index 000000000000..3fcaefb7e0d8 --- /dev/null +++ b/docs/_spec/APPLIEDreference/dropped-features/this-qualifier.md @@ -0,0 +1,33 @@ +--- +layout: doc-page +title: "Dropped: private[this] and protected[this]" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +--- + +The `private[this]` and `protected[this]` access modifiers will be deprecated and phased out. + +Previously, these modifiers were needed for + + - avoiding the generation of getters and setters + - excluding code under a `private[this]` from variance checks. (Scala 2 also excludes `protected[this]` but this was found to be unsound and was therefore removed). + - avoiding the generation of fields, if a `private[this] val` is not accessed + by a class method. + +The compiler now infers for `private` members the fact that they are only accessed via `this`. Such members are treated as if they had been declared `private[this]`. `protected[this]` is dropped without a replacement. + +This change can in some cases change the semantics of a Scala program, since a +`private` val is no longer guaranteed to generate a field. The field +is omitted if + + - the `val` is only accessed via `this`, and + - the `val` is not accessed from a method in the current class. + +This can cause problems if a program tries to access the missing private field via reflection. The recommended fix is to declare the field instead to be qualified private with the enclosing class as qualifier. Example: +```scala + class C(x: Int): + private[C] val field = x + 1 + // [C] needed if `field` is to be accessed through reflection + val retained = field * field +``` + + diff --git a/docs/_spec/APPLIEDreference/enums/adts.md b/docs/_spec/APPLIEDreference/enums/adts.md new file mode 100644 index 000000000000..23599e49dc5b --- /dev/null +++ b/docs/_spec/APPLIEDreference/enums/adts.md @@ -0,0 +1,90 @@ +--- +layout: doc-page +title: "Algebraic Data Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/adts.html +--- + +The [`enum` concept](./enums.md) is general enough to also support algebraic data types (ADTs) and their generalized version (GADTs). +Here is an example how an `Option` type can be represented as an ADT: + +```scala +enum Option[+T]: + case Some(x: T) + case None +``` + +This example introduces an `Option` enum with a covariant type parameter `T` consisting of two cases, `Some` and `None`. +`Some` is parameterized with a value parameter `x`. +It is a shorthand for writing a case class that extends `Option`. +Since `None` is not parameterized, it is treated as a normal enum value. + +The `extends` clauses that were omitted in the example above can also be given explicitly: + +```scala +enum Option[+T]: + case Some(x: T) extends Option[T] + case None extends Option[Nothing] +``` + +Note that the parent type of the `None` value is inferred as `Option[Nothing]`. +Generally, all covariant type parameters of the enum class are minimized in a compiler-generated `extends` clause whereas all contravariant type parameters are maximized. +If `Option` was non-variant, you would need to give the extends clause of `None` explicitly. + +As for normal enum values, the cases of an `enum` are all defined in the `enum`'s companion object. +So it's `Option.Some` and `Option.None` unless the definitions are "pulled out" with an import. + + +## Widening of Constructor Application + +Observe here the inferred result types of the following expressions: +```scala +scala> Option.Some("hello") +val res1: t2.Option[String] = Some(hello) + +scala> Option.None +val res2: t2.Option[Nothing] = None +``` + +Note that the type of the expressions above is always `Option`. +Generally, the type of a enum case constructor application will be widened to the underlying enum type, unless a more specific type is expected. +This is a subtle difference with respect to normal case classes. +The classes making up the cases do exist, and can be unveiled, either by constructing them directly with a `new`, or by explicitly providing an expected type. + +```scala +scala> new Option.Some(2) +val res3: Option.Some[Int] = Some(2) +scala> val x: Option.Some[Int] = Option.Some(3) +val res4: Option.Some[Int] = Some(3) +``` + +As all other enums, ADTs can define methods. +For instance, here is `Option` again, with an `isDefined` method and an `Option(...)` constructor in its companion object. + +```scala +enum Option[+T]: + case Some(x: T) + case None + + def isDefined: Boolean = this match + case None => false + case _ => true + +object Option: + + def apply[T >: Null](x: T): Option[T] = + if x == null then None else Some(x) + +end Option +``` + +Enumerations and ADTs have been presented as two different concepts. +But since they share the same syntactic construct, they can be seen simply as two ends of a spectrum and it is perfectly possible to construct hybrids. +For instance, the code below gives an implementation of `Color` either with three enum values or with a parameterized case that takes an RGB value. + +```scala +enum Color(val rgb: Int): + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) + case Mix(mix: Int) extends Color(mix) +``` diff --git a/docs/_spec/APPLIEDreference/enums/enums-index.md b/docs/_spec/APPLIEDreference/enums/enums-index.md new file mode 100644 index 000000000000..80d703c3e897 --- /dev/null +++ b/docs/_spec/APPLIEDreference/enums/enums-index.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Enums" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/index.html +--- + +This chapter documents enums in Scala 3. diff --git a/docs/_spec/APPLIEDreference/enums/enums.md b/docs/_spec/APPLIEDreference/enums/enums.md new file mode 100644 index 000000000000..bcab50d3a36d --- /dev/null +++ b/docs/_spec/APPLIEDreference/enums/enums.md @@ -0,0 +1,182 @@ +--- +layout: doc-page +title: "Enumerations" +nightlyOf: https://docs.scala-lang.org/scala3/reference/enums/enums.html +--- + +An enumeration is used to define a type consisting of a set of named values. + +```scala +enum Color: + case Red, Green, Blue +``` + +This defines a new `sealed` class, `Color`, with three values, `Color.Red`, `Color.Green`, `Color.Blue`. +The color values are members of `Color`s companion object. + +## Parameterized enums + +Enums can be parameterized. + +```scala +enum Color(val rgb: Int): + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) +``` + +As the example shows, you can define the parameter value by using an explicit extends clause. + +## Methods defined for enums + +The values of an enum correspond to unique integers. +The integer associated with an enum value is returned by its `ordinal` method: + +```scala +scala> val red = Color.Red +val red: Color = Red +scala> red.ordinal +val res0: Int = 0 +``` + +The companion object of an enum also defines three utility methods. +The `valueOf` method obtains an enum value by its name. +The `values` method returns all enum values defined in an enumeration in an `Array`. +The `fromOrdinal` method obtains an enum value from its ordinal (`Int`) value. + +```scala +scala> Color.valueOf("Blue") +val res0: Color = Blue +scala> Color.values +val res1: Array[Color] = Array(Red, Green, Blue) +scala> Color.fromOrdinal(0) +val res2: Color = Red +``` + +## User-defined members of enums + +It is possible to add your own definitions to an enum. +For example: + +```scala +enum Planet(mass: Double, radius: Double): + private final val G = 6.67300E-11 + def surfaceGravity = G * mass / (radius * radius) + def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity + + case Mercury extends Planet(3.303e+23, 2.4397e6) + case Venus extends Planet(4.869e+24, 6.0518e6) + case Earth extends Planet(5.976e+24, 6.37814e6) + case Mars extends Planet(6.421e+23, 3.3972e6) + case Jupiter extends Planet(1.9e+27, 7.1492e7) + case Saturn extends Planet(5.688e+26, 6.0268e7) + case Uranus extends Planet(8.686e+25, 2.5559e7) + case Neptune extends Planet(1.024e+26, 2.4746e7) +end Planet +``` + +## User-defined companion object of enums +It is also possible to define an explicit companion object for an enum: + +```scala +object Planet: + def main(args: Array[String]) = + val earthWeight = args(0).toDouble + val mass = earthWeight / Earth.surfaceGravity + for p <- values do + println(s"Your weight on $p is ${p.surfaceWeight(mass)}") +end Planet +``` + +## Restrictions on Enum Cases + +Enum case declarations are similar to secondary constructors: +they are scoped outside of the enum template, despite being declared within it. +This means that enum case declarations cannot access inner members of the enum class. + +Similarly, enum case declarations may not directly reference members of the enum's companion object, even if they are imported (directly, or by renaming). +For example: + +```scala +import Planet.* +enum Planet(mass: Double, radius: Double): + private final val (mercuryMass, mercuryRadius) = (3.303e+23, 2.4397e6) + + case Mercury extends Planet(mercuryMass, mercuryRadius) // Not found + case Venus extends Planet(venusMass, venusRadius) // illegal reference + case Earth extends Planet(Planet.earthMass, Planet.earthRadius) // ok +object Planet: + private final val (venusMass, venusRadius) = (4.869e+24, 6.0518e6) + private final val (earthMass, earthRadius) = (5.976e+24, 6.37814e6) +end Planet +``` +The fields referenced by `Mercury` are not visible, and the fields referenced by `Venus` may not be referenced directly (using `import Planet.*`). +You must use an indirect reference, such as demonstrated with `Earth`. + +## Deprecation of Enum Cases + +As a library author, you may want to signal that an enum case is no longer intended for use. +However you could still want to gracefully handle the removal of a case from your public API, such as special casing deprecated cases. + +To illustrate, say that the `Planet` enum originally had an additional case: + +```diff + enum Planet(mass: Double, radius: Double): + ... + case Neptune extends Planet(1.024e+26, 2.4746e7) ++ case Pluto extends Planet(1.309e+22, 1.1883e3) + end Planet +``` + +We now want to deprecate the `Pluto` case. +First we add the `scala.deprecated` annotation to `Pluto`: + +```diff + enum Planet(mass: Double, radius: Double): + ... + case Neptune extends Planet(1.024e+26, 2.4746e7) +- case Pluto extends Planet(1.309e+22, 1.1883e3) ++ ++ @deprecated("refer to IAU definition of planet") ++ case Pluto extends Planet(1.309e+22, 1.1883e3) + end Planet +``` + +Outside the lexical scopes of `enum Planet` or `object Planet`, references to `Planet.Pluto` will produce a deprecation warning. +Within those scopes however, we can still reference it to implement introspection over the deprecated cases: + +```scala +trait Deprecations[T <: reflect.Enum] { + extension (t: T) def isDeprecatedCase: Boolean +} + +object Planet { + given Deprecations[Planet] with { + extension (p: Planet) + def isDeprecatedCase = p == Pluto + } +} +``` + +We could imagine that a library may use [type class derivation](../contextual/derivation.md) to automatically provide an instance for `Deprecations`. + +## Compatibility with Java Enums + +If you want to use the Scala-defined enums as [Java enums](https://docs.oracle.com/javase/tutorial/java/javaOO/enum.html), you can do so by extending the class `java.lang.Enum`, which is imported by default, as follows: + +```scala +enum Color extends Enum[Color] { case Red, Green, Blue } +``` + +The type parameter comes from the Java enum [definition](https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Enum.html) and should be the same as the type of the enum. +There is no need to provide constructor arguments (as defined in the Java API docs) to `java.lang.Enum` when extending it – the compiler will generate them automatically. + +After defining `Color` like that, you can use it like you would a Java enum: + +```scala +scala> Color.Red.compareTo(Color.Green) +val res15: Int = -1 +``` + +For a more in-depth example of using Scala 3 enums from Java, see [this test](https://github.com/lampepfl/dotty/tree/main/tests/run/enum-java). +In the test, the enums are defined in the `MainScala.scala` file and used from a Java source, `Test.java`. diff --git a/docs/_spec/APPLIEDreference/new-types/intersection-types.md b/docs/_spec/APPLIEDreference/new-types/intersection-types.md new file mode 100644 index 000000000000..4720649e16a9 --- /dev/null +++ b/docs/_spec/APPLIEDreference/new-types/intersection-types.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "Intersection Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html +--- + +Used on types, the `&` operator creates an intersection type. + +## Type Checking + +The type `S & T` represents values that are of the type `S` and `T` at the same time. + +```scala +trait Resettable: + def reset(): Unit + +trait Growable[T]: + def add(t: T): Unit + +def f(x: Resettable & Growable[String]) = + x.reset() + x.add("first") +``` + +The parameter `x` is required to be _both_ a `Resettable` and a +`Growable[String]`. + +The members of an intersection type `A & B` are all the members of `A` and all +the members of `B`. For instance `Resettable & Growable[String]` +has member methods `reset` and `add`. + +`&` is _commutative_: `A & B` is the same type as `B & A`. + +If a member appears in both `A` and `B`, its type in `A & B` is the intersection +of its type in `A` and its type in `B`. For instance, assume the definitions: + +```scala +trait A: + def children: List[A] + +trait B: + def children: List[B] + +val x: A & B = new C +val ys: List[A & B] = x.children +``` + +The type of `children` in `A & B` is the intersection of `children`'s +type in `A` and its type in `B`, which is `List[A] & List[B]`. This +can be further simplified to `List[A & B]` because `List` is +covariant. + +One might wonder how the compiler could come up with a definition for +`children` of type `List[A & B]` since what is given are `children` +definitions of type `List[A]` and `List[B]`. The answer is the compiler does not +need to. `A & B` is just a type that represents a set of requirements for +values of the type. At the point where a value is _constructed_, one +must make sure that all inherited members are correctly defined. +So if one defines a class `C` that inherits `A` and `B`, one needs +to give at that point a definition of a `children` method with the required type. + +```scala +class C extends A, B: + def children: List[A & B] = ??? +``` diff --git a/docs/_spec/APPLIEDreference/new-types/type-lambdas.md b/docs/_spec/APPLIEDreference/new-types/type-lambdas.md new file mode 100644 index 000000000000..4de3b260c0a2 --- /dev/null +++ b/docs/_spec/APPLIEDreference/new-types/type-lambdas.md @@ -0,0 +1,15 @@ +--- +layout: doc-page +title: "Type Lambdas" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/type-lambdas.html +--- + +A _type lambda_ lets one express a higher-kinded type directly, without +a type definition. + +```scala +[X, Y] =>> Map[Y, X] +``` + +For instance, the type above defines a binary type constructor, which maps arguments `X` and `Y` to `Map[Y, X]`. +Type parameters of type lambdas can have bounds, but they cannot carry `+` or `-` variance annotations. diff --git a/docs/_spec/APPLIEDreference/new-types/union-types.md b/docs/_spec/APPLIEDreference/new-types/union-types.md new file mode 100644 index 000000000000..152505d7fc8d --- /dev/null +++ b/docs/_spec/APPLIEDreference/new-types/union-types.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: "Union Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/union-types.html +--- + +A union type `A | B` has as values all values of type `A` and also all values of type `B`. + + +```scala +case class UserName(name: String) +case class Password(hash: Hash) + +def help(id: UserName | Password) = + val user = id match + case UserName(name) => lookupName(name) + case Password(hash) => lookupPassword(hash) + ... +``` + +Union types are duals of intersection types. `|` is _commutative_: +`A | B` is the same type as `B | A`. + +The compiler will assign a union type to an expression only if such a +type is explicitly given. This can be seen in the following [REPL](https://docs.scala-lang.org/overviews/repl/overview.html) transcript: + +```scala +scala> val password = Password(123) +val password: Password = Password(123) + +scala> val name = UserName("Eve") +val name: UserName = UserName(Eve) + +scala> if true then name else password +val res2: Object = UserName(Eve) + +scala> val either: Password | UserName = if true then name else password +val either: Password | UserName = UserName(Eve) +``` + +The type of `res2` is `Object & Product`, which is a supertype of +`UserName` and `Password`, but not the least supertype `Password | +UserName`. If we want the least supertype, we have to give it +explicitly, as is done for the type of `either`. + +## Type inference + +When inferring the result type of a definition (`val`, `var`, or `def`) and the +type we are about to infer is a union type, then we replace it by its join. +Similarly, when instantiating a type argument, if the corresponding type +parameter is not upper-bounded by a union type and the type we are about to +instantiate is a union type, we replace it by its join. This mirrors the +treatment of singleton types which are also widened to their underlying type +unless explicitly specified. The motivation is the same: inferring types +which are "too precise" can lead to unintuitive typechecking issues later on. + +**Note:** Since this behavior limits the usability of union types, it might +be changed in the future. For example by not widening unions that have been +explicitly written down by the user and not inferred, or by not widening a type +argument when the corresponding type parameter is covariant. + +See [PR #2330](https://github.com/lampepfl/dotty/pull/2330) and +[Issue #4867](https://github.com/lampepfl/dotty/issues/4867) for further discussions. + +### Example + +```scala +import scala.collection.mutable.ListBuffer +val x = ListBuffer(Right("foo"), Left(0)) +val y: ListBuffer[Either[Int, String]] = x +``` + +This code typechecks because the inferred type argument to `ListBuffer` in the +right-hand side of `x` was `Left[Int, Nothing] | Right[Nothing, String]` which +was widened to `Either[Int, String]`. If the compiler hadn't done this widening, +the last line wouldn't typecheck because `ListBuffer` is invariant in its +argument. diff --git a/docs/_spec/APPLIEDreference/other-new-features/kind-polymorphism.md b/docs/_spec/APPLIEDreference/other-new-features/kind-polymorphism.md new file mode 100644 index 000000000000..685630b86f73 --- /dev/null +++ b/docs/_spec/APPLIEDreference/other-new-features/kind-polymorphism.md @@ -0,0 +1,41 @@ +--- +layout: doc-page +title: "Kind Polymorphism" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html +--- + +Normally type parameters in Scala are partitioned into _kinds_. First-level types are types of values. Higher-kinded types are type constructors +such as `List` or `Map`. The kind of a type is indicated by the top type of which it is a subtype. Normal types are subtypes of `Any`, +covariant single argument type constructors such as `List` are subtypes of `[+X] =>> Any`, and the `Map` type constructor is +a subtype of `[X, +Y] =>> Any`. + +A type can be used only as prescribed by its kind. Subtypes of `Any` cannot be applied to type arguments whereas subtypes of `[X] =>> Any` +_must_ be applied to a type argument, unless they are passed to type parameters of the same kind. + +Sometimes we would like to have type parameters that can have more than one kind, for instance to define an implicit +value that works for parameters of any kind. This is now possible through a form of (_subtype_) kind polymorphism. +Kind polymorphism relies on the special type [`scala.AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) that can be used as an upper bound of a type. + +```scala +def f[T <: AnyKind] = ... +``` + +The actual type arguments of `f` can then be types of arbitrary kinds. So the following would all be legal: + +```scala +f[Int] +f[List] +f[Map] +f[[X] =>> String] +``` + +We call type parameters and abstract types with an `AnyKind` upper bound _any-kinded types_. +Since the actual kind of an any-kinded type is unknown, its usage must be heavily restricted: An any-kinded type +can be neither the type of a value, nor can it be instantiated with type parameters. So about the only +thing one can do with an any-kinded type is to pass it to another any-kinded type argument. +Nevertheless, this is enough to achieve some interesting generalizations that work across kinds, typically +through advanced uses of implicits. + +(todo: insert good concise example) + +`AnyKind` plays a special role in Scala's subtype system: It is a supertype of all other types no matter what their kind is. It is also assumed to be kind-compatible with all other types. Furthermore, `AnyKind` is treated as a higher-kinded type (so it cannot be used as a type of values), but at the same time it has no type parameters (so it cannot be instantiated). diff --git a/docs/_spec/APPLIEDreference/other-new-features/trait-parameters.md b/docs/_spec/APPLIEDreference/other-new-features/trait-parameters.md new file mode 100644 index 000000000000..7924224ddc74 --- /dev/null +++ b/docs/_spec/APPLIEDreference/other-new-features/trait-parameters.md @@ -0,0 +1,34 @@ +--- +layout: doc-page +title: "Trait Parameters" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/trait-parameters.html +--- + +Scala 3 enables traits to have parameters, just like a class. + +For example, here is a trait `Greeting`: +```scala +trait Greeting(val name: String): + def msg = s"How are you, $name" +``` + +A class, enum, or object can extend `Greeting` as follows: + +```scala +class Greet extends Greeting("Bob"): + println(msg) +``` + +However if another trait extends `Greeting` then it must not pass arguments: + +```scala +trait FormalGreeting extends Greeting: + override def msg = s"How do you do, $name" +``` + +If you want a class to greet Bob formally, then you should extend both `FormalGreeting` and `Greeting`: + +```scala +class GreetFormally extends FormalGreeting, Greeting("Bob"): + println(msg) +``` diff --git a/docs/_spec/Dockerfile b/docs/_spec/Dockerfile new file mode 100644 index 000000000000..1fc28081c59f --- /dev/null +++ b/docs/_spec/Dockerfile @@ -0,0 +1,26 @@ +FROM ruby:2.7 + +RUN apt-get install -y curl \ + && curl -sL https://deb.nodesource.com/setup_18.x | bash - \ + && apt-get install -y nodejs \ + && curl -L https://www.npmjs.com/install.sh | sh + +RUN gem update --system +RUN gem install sass-embedded -v 1.58.0 +RUN gem install bundler:1.17.2 jekyll + +WORKDIR /srv/jekyll + +COPY Gemfile . +COPY Gemfile.lock . + + +RUN echo -n "bundle version: " && bundle --version +RUN bundle install +RUN mkdir /opt/npm-global +RUN npm config set prefix '/opt/npm-global' +RUN npm config set global true +RUN npm install bower +RUN echo -n "npm version: " && npm --version +RUN chmod u+s /bin/chown +RUN date diff --git a/docs/_spec/Gemfile b/docs/_spec/Gemfile new file mode 100644 index 000000000000..bc45dc84db8c --- /dev/null +++ b/docs/_spec/Gemfile @@ -0,0 +1,8 @@ +# To build the spec on Travis CI +source "https://rubygems.org" + +gem "jekyll", "3.6.3" +gem "webrick" +gem "rouge" +# gem 's3_website' +gem "redcarpet", "3.5.1" diff --git a/docs/_spec/Gemfile.lock b/docs/_spec/Gemfile.lock new file mode 100644 index 000000000000..48efd373725e --- /dev/null +++ b/docs/_spec/Gemfile.lock @@ -0,0 +1,57 @@ +GEM + remote: https://rubygems.org/ + specs: + addressable (2.8.1) + public_suffix (>= 2.0.2, < 6.0) + colorator (1.1.0) + ffi (1.15.5) + forwardable-extended (2.6.0) + jekyll (3.6.3) + addressable (~> 2.4) + colorator (~> 1.0) + jekyll-sass-converter (~> 1.0) + jekyll-watch (~> 1.1) + kramdown (~> 1.14) + liquid (~> 4.0) + mercenary (~> 0.3.3) + pathutil (~> 0.9) + rouge (>= 1.7, < 3) + safe_yaml (~> 1.0) + jekyll-sass-converter (1.5.2) + sass (~> 3.4) + jekyll-watch (1.5.1) + listen (~> 3.0) + kramdown (1.17.0) + liquid (4.0.3) + listen (3.7.1) + rb-fsevent (~> 0.10, >= 0.10.3) + rb-inotify (~> 0.9, >= 0.9.10) + mercenary (0.3.6) + pathutil (0.16.2) + forwardable-extended (~> 2.6) + public_suffix (5.0.0) + rb-fsevent (0.11.2) + rb-inotify (0.10.1) + ffi (~> 1.0) + redcarpet (3.5.1) + rouge (2.2.1) + safe_yaml (1.0.5) + sass (3.7.4) + sass-listen (~> 4.0.0) + sass-listen (4.0.0) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + webrick (1.7.0) + +PLATFORMS + ruby + x86_64-linux + +DEPENDENCIES + jekyll (= 3.6.3) + redcarpet (= 3.5.1) + rouge + webrick + +BUNDLED WITH + 2.3.5 diff --git a/docs/_spec/README.md b/docs/_spec/README.md new file mode 100644 index 000000000000..b9eba413f8a2 --- /dev/null +++ b/docs/_spec/README.md @@ -0,0 +1,67 @@ +# WIP Scala 3 Language Specification + +**This is still a work in progress, and should *not* be regarded as a source of truth.** + +First of all, the language specification is meant to be correct, precise and clear. + +Second, editing, previewing and generating output for the markdown should be simple and easy. + +Third, we'd like to support different output formats. An html page per chapter with MathJax seems like a good start, as it satisfies the second requirement, and enables the first one. + +## Editing + +We are using Jekyll and [Redcarpet](https://github.com/vmg/redcarpet) to generate the html. + +Check `Gemfile` for the current versions. + +We aim to track the configuration GitHub Pages uses but differences may arise as GitHub Pages evolves. + +## Building + + +To preview locally, run the following commands in the docs/_spec subfolder: + +``` +env UID="$(id -u)" GID="$(id -g)" docker-compose up +``` + +and open http://0.0.0.0:4000/files/archive/spec/2.13/ to view the spec. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`. + + +## General Advice for editors + +- All files must be saved as UTF-8: ensure your editors are configured appropriately. +- Use of the appropriate unicode characters instead of the latex modifiers for accents, etc. is necessary. For example, é instead of `\'e`. +- MathJAX errors will appear within the rendered DOM as span elements with class `mtext` and style attribute `color: red` applied. It is possible to search for this combination in the development tools of the browser of your choice. In chrome, CTRL+F / CMD+F within the inspect element panel allows you to do this. + +- This document follows the "one sentence <=> one line" convention, with the following exceptions below. + - A multiline code block is part of the sentence + - An enumeration of links is long enough + +- Whenever doing an enumeration of the kind "a, ..., z", follow the following conventions: + - It should always be "separator whitespace period period period separator whitespace", for example `, ..., ` or `,\n...,\n` for multiline. + - If in a code block, only the elements (a and z above) should be in math mode (between forward ticks) + - If in a math expression, the whole thing should be in a single math mode + - Look at the [Tuple Types section](docs/_spec/03-types.html#tuple-types) for an example of the different cases above. + +- Try to use "Note" blocks to point out logical conclusions that are not obvious, for examples, look at the [Tuple Types section](docs/_spec/03-types.html#tuple-types). + +### Macro replacements: + +- While MathJAX just support LaTeX style command definition, it is recommended to not use this as it will likely cause issues with preparing the document for PDF or ebook distribution. +- `\SS` (which I could not find defined within the latex source) seems to be closest to `\mathscr{S}` +- `\TYPE` is equivalent to `\boldsymbol{type}' +- As MathJAX has no support for slanted font (latex command \sl), so in all instances this should be replaced with \mathit{} +- The macro \U{ABCD} used for unicode character references can be replaced with \\uABCD. +- The macro \URange{ABCD}{DCBA} used for unicode character ranges can be replaced with \\uABCD-\\uDBCA. +- The macro \commadots can be replaced with ` , … , ` (But should not, see above). +- There is no adequate replacement for `\textsc{...}` (small caps) in pandoc markdown. While unicode contains a number of small capital letters, it is notably missing Q and X as these glyphs are intended for phonetic spelling, therefore these cannot be reliably used. For now, the best option is to use underscore emphasis and capitalise the text manually, `_LIKE THIS_`. + +### Unicode Character replacements + +- The unicode left and right single quotation marks (‘ and ’ (U+2018 and U+2019, respectively)) have been used in place of ` and ', where the quotation marks are intended to be paired. These can be typed on a mac using Option+] for a left quote and Option+Shift+] for the right quote. +- Similarly for left and right double quotation marks (“ and ” (U+201C and U+201D, respectively)) in place of ". These can be typed on a mac using Option+[ and Option+Shift+]. diff --git a/docs/_spec/TODOreference/changed-features/changed-features.md b/docs/_spec/TODOreference/changed-features/changed-features.md new file mode 100644 index 000000000000..cacdc2598a02 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/changed-features.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Other Changed Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features.html +--- + +The following pages document the features that have changed in Scala 3, compared to Scala 2. diff --git a/docs/_spec/TODOreference/changed-features/compiler-plugins.md b/docs/_spec/TODOreference/changed-features/compiler-plugins.md new file mode 100644 index 000000000000..20bdb7f49836 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/compiler-plugins.md @@ -0,0 +1,128 @@ +--- +layout: doc-page +title: "Changes in Compiler Plugins" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/compiler-plugins.html +--- + +Compiler plugins are supported by Dotty (and Scala 3) since 0.9. There are two notable changes +compared to `scalac`: + +- No support for analyzer plugins +- Added support for research plugins + +[Analyzer plugins][1] in `scalac` run during type checking and may influence +normal type checking. This is a very powerful feature but for production usages, +a predictable and consistent type checker is more important. + +For experimentation and research, Scala 3 introduces _research plugin_. Research plugins +are more powerful than `scalac` analyzer plugins as they let plugin authors customize +the whole compiler pipeline. One can easily replace the standard typer by a custom one or +create a parser for a domain-specific language. However, research plugins are only +enabled for nightly or snaphot releases of Scala 3. + +Common plugins that add new phases to the compiler pipeline are called +_standard plugins_ in Scala 3. In terms of features, they are similar to +`scalac` plugins, despite minor changes in the API. + +## Using Compiler Plugins + +Both standard and research plugins can be used with `scalac` by adding the `-Xplugin:` option: + +```shell +scalac -Xplugin:pluginA.jar -Xplugin:pluginB.jar Test.scala +``` + +The compiler will examine the jar provided, and look for a property file named +`plugin.properties` in the root directory of the jar. The property file specifies +the fully qualified plugin class name. The format of a property file is as follows: + +```properties +pluginClass=dividezero.DivideZero +``` + +This is different from `scalac` plugins that required a `scalac-plugin.xml` file. + +Starting from 1.1.5, `sbt` also supports Scala 3 compiler plugins. Please refer to the +[`sbt` documentation][2] for more information. + +## Writing a Standard Compiler Plugin + +Here is the source code for a simple compiler plugin that reports integer divisions by +zero as errors. + +```scala +package dividezero + +import dotty.tools.dotc.ast.Trees.* +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.plugins.{PluginPhase, StandardPlugin} +import dotty.tools.dotc.transform.{Pickler, Staging} + +class DivideZero extends StandardPlugin: + val name: String = "divideZero" + override val description: String = "divide zero check" + + def init(options: List[String]): List[PluginPhase] = + (new DivideZeroPhase) :: Nil + +class DivideZeroPhase extends PluginPhase: + import tpd.* + + val phaseName = "divideZero" + + override val runsAfter = Set(Pickler.name) + override val runsBefore = Set(Staging.name) + + override def transformApply(tree: Apply)(implicit ctx: Context): Tree = + tree match + case Apply(Select(rcvr, nme.DIV), List(Literal(Constant(0)))) + if rcvr.tpe <:< defn.IntType => + report.error("dividing by zero", tree.pos) + case _ => + () + tree +end DivideZeroPhase +``` + +The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` +and implement the method `init` that takes the plugin's options as argument +and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. + +Our plugin adds one compiler phase to the pipeline. A compiler phase must extend +the `PluginPhase` trait. In order to specify when the phase is executed, we also +need to specify a `runsBefore` and `runsAfter` constraints that are list of phase +names. + +We can now transform trees by overriding methods like `transformXXX`. + +## Writing a Research Compiler Plugin + +Here is a template for research plugins. + +```scala +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.plugins.ResearchPlugin + +class DummyResearchPlugin extends ResearchPlugin: + val name: String = "dummy" + override val description: String = "dummy research plugin" + + def init(options: List[String], phases: List[List[Phase]])(implicit ctx: Context): List[List[Phase]] = + phases +end DummyResearchPlugin +``` + +A research plugin must extend the trait `ResearchPlugin` and implement the +method `init` that takes the plugin's options as argument as well as the compiler +pipeline in the form of a list of compiler phases. The method can replace, remove +or add any phases to the pipeline and return the updated pipeline. + + +[1]: https://github.com/scala/scala/blob/2.13.x/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +[2]: https://www.scala-sbt.org/1.x/docs/Compiler-Plugins.html diff --git a/docs/_spec/TODOreference/changed-features/eta-expansion-spec.md b/docs/_spec/TODOreference/changed-features/eta-expansion-spec.md new file mode 100644 index 000000000000..a62d45df9e11 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/eta-expansion-spec.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: "Automatic Eta Expansion - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/eta-expansion-spec.html +--- + +## Motivation + +Scala maintains a convenient distinction between _methods_ and _functions_. +Methods are part of the definition of a class that can be invoked in objects while functions are complete objects themselves, making them first-class entities. For example, they can be assigned to variables. +These two mechanisms are bridged in Scala by a mechanism called +[_eta-expansion_](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#eta-expansion-section) +(also called eta-abstraction), which converts a reference to a method into a function. Intuitively, a method `m` can be passed around by turning it into an object: the function `x => m(x)`. + +In this snippet which assigns a method to a `val`, the compiler will perform _automatic eta-expansion_, as shown in the comment: + +```scala +def m(x: Int, y: String) = ??? +val f = m // becomes: val f = (x: Int, y: String) => m(x, y) +``` + +In Scala 2, a method reference `m` is converted to a function value only if the expected type is a function type, which means the conversion in the example above would not have been triggered, because `val f` does not have a type ascription. To still get eta-expansion, a shortcut `m _` would force the conversion. + +For methods with one or more parameters like in the example above, this restriction has now been dropped. The syntax `m _` is no longer needed and will be deprecated in the future. + +## Automatic eta-expansion and partial application +In the following example `m` can be partially applied to the first two parameters. +Assigning `m` to `f1` will automatically eta-expand. + +```scala +def m(x: Boolean, y: String)(z: Int): List[Int] +val f1 = m +val f2 = m(true, "abc") +``` + +This creates two function values: + +```scala +f1: (Boolean, String) => Int => List[Int] +f2: Int => List[Int] +``` + +## Automatic eta-expansion and implicit parameter lists + +Methods with implicit parameter lists will always get applied to implicit arguments. + +```scala +def foo(x: Int)(implicit p: Double): Float = ??? +implicit val bla: Double = 1.0 + +val bar = foo // val bar: Int => Float = ... +``` + +## Automatic Eta-Expansion and query types + +A method with context parameters can be expanded to a value of a context type by writing the expected context type explicitly. + +```scala +def foo(x: Int)(using p: Double): Float = ??? +val bar: Double ?=> Float = foo(3) +``` + +## Rules + +- If `m` has an argument list with one or more parameters, we always eta-expand +- If `m` is has an empty argument list (i.e. has type `()R`): + 1. If the expected type is of the form `() => T`, we eta expand. + 2. If m is defined by Java, or overrides a Java defined method, we insert `()`. + 3. Otherwise we issue an error of the form: + +Thus, an unapplied method with an empty argument list is only converted to a function when a function type is expected. It is considered best practice to either explicitly apply the method to `()`, or convert it to a function with `() => m()`. + +The method value syntax `m _` is deprecated. + +## Reference + +For more information, see [PR #2701](https://github.com/lampepfl/dotty/pull/2701). diff --git a/docs/_spec/TODOreference/changed-features/eta-expansion.md b/docs/_spec/TODOreference/changed-features/eta-expansion.md new file mode 100644 index 000000000000..c05378135e54 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/eta-expansion.md @@ -0,0 +1,42 @@ +--- +layout: doc-page +title: "Automatic Eta Expansion" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/eta-expansion.html +--- + +The conversion of _methods_ into _functions_ has been improved and happens automatically for methods with one or more parameters. + +```scala +def m(x: Boolean, y: String)(z: Int): List[Int] +val f1 = m +val f2 = m(true, "abc") +``` + +This creates two function values: +```scala +f1: (Boolean, String) => Int => List[Int] +f2: Int => List[Int] +``` + +The syntax `m _` is no longer needed and will be deprecated in the future. + +## Automatic eta-expansion and nullary methods + +Automatic eta expansion does not apply to "nullary" methods that take an empty parameter list. + +```scala +def next(): T +``` + +Given a simple reference to `next` does not auto-convert to a function. +One has to write explicitly `() => next()` to achieve that. +Once again since the `_` is going to be deprecated it's better to write it this way +rather than `next _`. + +The reason for excluding nullary methods from automatic eta expansion +is that Scala implicitly inserts the `()` argument, which would +conflict with eta expansion. Automatic `()` insertion is +[limited](../dropped-features/auto-apply.md) in Scala 3, but the fundamental ambiguity +remains. + +[More details](eta-expansion-spec.md) diff --git a/docs/_spec/TODOreference/changed-features/implicit-conversions-spec.md b/docs/_spec/TODOreference/changed-features/implicit-conversions-spec.md new file mode 100644 index 000000000000..dc19e10c8b8f --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/implicit-conversions-spec.md @@ -0,0 +1,117 @@ +--- +layout: doc-page +title: "Implicit Conversions - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-conversions-spec.html +--- + +## Implementation + +An implicit conversion, or _view_, from type `S` to type `T` is +defined by either: + +- An `implicit def` which has type `S => T` or `(=> S) => T` +- An implicit value which has type `Conversion[S, T]` + +The standard library defines an abstract class [`Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html): + +```scala +package scala +@java.lang.FunctionalInterface +abstract class Conversion[-T, +U] extends Function1[T, U]: + def apply(x: T): U +``` + +Function literals are automatically converted to `Conversion` values. + +Views are applied in three situations: + +1. If an expression `e` is of type `T`, and `T` does not conform to + the expression's expected type `pt`. In this case, an implicit `v` + which is applicable to `e` and whose result type conforms to `pt` + is searched. The search proceeds as in the case of implicit + parameters, where the implicit scope is the one of `T => pt`. If + such a view is found, the expression `e` is converted to `v(e)`. +1. In a selection `e.m` with `e` of type `T`, if the selector `m` does + not denote an accessible member of `T`. In this case, a view `v` + which is applicable to `e` and whose result contains an accessible + member named `m` is searched. The search proceeds as in the case of + implicit parameters, where the implicit scope is the one of `T`. If + such a view is found, the selection `e.m` is converted to `v(e).m`. +1. In an application `e.m(args)` with `e` of type `T`, if the selector + `m` denotes some accessible member(s) of `T`, but none of these + members is applicable to the arguments `args`. In this case, a view + `v` which is applicable to `e` and whose result contains a method + `m` which is applicable to `args` is searched. The search proceeds + as in the case of implicit parameters, where the implicit scope is + the one of `T`. If such a view is found, the application + `e.m(args)` is converted to `v(e).m(args)`. + +# Differences with Scala 2 implicit conversions + +In Scala 2, views whose parameters are passed by-value take precedence +over views whose parameters are passed by-name. This is no longer the +case in Scala 3. A type error reporting the ambiguous conversions will +be emitted in cases where this rule would be applied in Scala 2: + +```scala +implicit def conv1(x: Int): String = x.toString +implicit def conv2(x: => Int): String = x.toString + +val x: String = 0 // Compiles in Scala2 (uses `conv1`), + // type error in Scala 3 because of ambiguity. +``` + +In Scala 2, implicit values of a function type would be considered as +potential views. In Scala 3, these implicit value need to have type +`Conversion`: + +```scala +// Scala 2: +def foo(x: Int)(implicit conv: Int => String): String = x + +// Becomes with Scala 3: +def foo(x: Int)(implicit conv: Conversion[Int, String]): String = x + +// Call site is unchanged: +foo(4)(_.toString) + +// Scala 2: +implicit val myConverter: Int => String = _.toString + +// Becomes with Scala 3: +implicit val myConverter: Conversion[Int, String] = _.toString +``` + +Note that implicit conversions are also affected by the [changes to implicit resolution](implicit-resolution.md) between Scala 2 and Scala 3. + +## Motivation for the changes + +The introduction of [`scala.Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html) +in Scala 3 and the decision to restrict implicit values of this type to be +considered as potential views comes from the desire to remove surprising +behavior from the language: + +```scala +implicit val m: Map[Int, String] = Map(1 -> "abc") + +val x: String = 1 // Scala 2: assigns "abc" to x + // Scala 3: type error +``` + +This snippet contains a type error. The right-hand side of `val x` +does not conform to type `String`. In Scala 2, the compiler will use +`m` as an implicit conversion from `Int` to `String`, whereas Scala 3 +will report a type error, because `Map` isn't an instance of +[`Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html). + +## Migration path + +Implicit values that are used as views should see their type changed to `Conversion`. + +For the migration of implicit conversions that are affected by the +changes to implicit resolution, refer to the [Changes in Implicit Resolution](implicit-resolution.md) for more information. + +## Reference + +For more information about implicit resolution, see [Changes in Implicit Resolution](implicit-resolution.md). +Other details are available in [PR #2065](https://github.com/lampepfl/dotty/pull/2065). diff --git a/docs/_spec/TODOreference/changed-features/implicit-conversions.md b/docs/_spec/TODOreference/changed-features/implicit-conversions.md new file mode 100644 index 000000000000..eef236f39a07 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/implicit-conversions.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "Implicit Conversions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-conversions.html +--- + +An _implicit conversion_, also called _view_, is a conversion that +is applied by the compiler in several situations: + +1. When an expression `e` of type `T` is encountered, but the compiler + needs an expression of type `S`. +1. When an expression `e.m` where `e` has type `T` but `T` defines no + member `m` is encountered. + +In those cases, the compiler looks in the implicit scope for a +conversion that can convert an expression of type `T` to an expression +of type `S` (or to a type that defines a member `m` in the second +case). + +This conversion can be either: + +1. An `implicit def` of type `T => S` or `(=> T) => S` +1. An implicit value of type `scala.Conversion[T, S]` + +Defining an implicit conversion will emit a warning unless the import +`scala.language.implicitConversions` is in scope, or the flag +`-language:implicitConversions` is given to the compiler. + +## Examples + +The first example is taken from [`scala.Predef`](https://scala-lang.org/api/3.x/scala/Predef$.html). +Thanks to this implicit conversion, it is possible to pass a +[`scala.Int`](https://scala-lang.org/api/3.x/scala/Int.html) +to a Java method that expects a `java.lang.Integer` + +```scala +import scala.language.implicitConversions +implicit def int2Integer(x: Int): java.lang.Integer = + x.asInstanceOf[java.lang.Integer] +``` + +The second example shows how to use `Conversion` to define an +`Ordering` for an arbitrary type, given existing `Ordering`s for other +types: + +```scala +import scala.language.implicitConversions +implicit def ordT[T, S]( + implicit conv: Conversion[T, S], + ordS: Ordering[S] + ): Ordering[T] = + // `ordS` compares values of type `S`, but we can convert from `T` to `S` + (x: T, y: T) => ordS.compare(x, y) + +class A(val x: Int) // The type for which we want an `Ordering` + +// Convert `A` to a type for which an `Ordering` is available: +implicit val AToInt: Conversion[A, Int] = _.x + +implicitly[Ordering[Int]] // Ok, exists in the standard library +implicitly[Ordering[A]] // Ok, will use the implicit conversion from + // `A` to `Int` and the `Ordering` for `Int`. +``` + +[More details](implicit-conversions-spec.md) diff --git a/docs/_spec/TODOreference/changed-features/implicit-resolution.md b/docs/_spec/TODOreference/changed-features/implicit-resolution.md new file mode 100644 index 000000000000..bf15baa3299c --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/implicit-resolution.md @@ -0,0 +1,169 @@ +--- +layout: doc-page +title: "Changes in Implicit Resolution" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-resolution.html +--- + +This section describes changes to the implicit resolution that apply both to the new `given`s and to the old-style `implicit`s in Scala 3. +Implicit resolution uses a new algorithm which caches implicit results +more aggressively for performance. There are also some changes that +affect implicits on the language level. + +**1.** Types of implicit values and result types of implicit methods +must be explicitly declared. Excepted are only values in local blocks +where the type may still be inferred: +```scala + class C { + + val ctx: Context = ... // ok + + /*!*/ implicit val x = ... // error: type must be given explicitly + + /*!*/ implicit def y = ... // error: type must be given explicitly + } + val y = { + implicit val ctx = this.ctx // ok + ... + } +``` +**2.** Nesting is now taken into account for selecting an implicit. Consider for instance the following scenario: +```scala + def f(implicit i: C) = { + def g(implicit j: C) = { + implicitly[C] + } + } +``` +This will now resolve the `implicitly` call to `j`, because `j` is nested +more deeply than `i`. Previously, this would have resulted in an +ambiguity error. The previous possibility of an implicit search failure +due to _shadowing_ (where an implicit is hidden by a nested definition) +no longer applies. + +**3.** Package prefixes no longer contribute to the implicit search scope of a type. Example: +```scala + package p + + given a: A = A() + + object o: + given b: B = B() + type C +``` +Both `a` and `b` are visible as implicits at the point of the definition +of `type C`. However, a reference to `p.o.C` outside of package `p` will +have only `b` in its implicit search scope but not `a`. + +In more detail, here are the rules for what constitutes the implicit scope of +a type: + +**Definition:** A reference is an _anchor_ if it refers to an object, a class, a trait, an abstract type, an opaque type alias, or a match type alias. References to packages and package objects are anchors only under `-source:3.0-migration`. +Opaque type aliases count as anchors only outside the scope where their alias is visible. + +**Definition:** The _anchors_ of a type _T_ is a set of references defined as follows: + + 1. If _T_ is a reference to an anchor, _T_ itself plus, if _T_ is of the form _P#A_, the anchors of _P_. + 1. If _T_ is an alias of _U_, the anchors of _U_. + 1. If _T_ is a reference to a type parameter, the union of the anchors of both of its bounds. + 1. If _T_ is a singleton reference, the anchors of its underlying type, plus, + if _T_ is of the form _(P#x).type_, the anchors of _P_. + 1. If _T_ is the this-type _o.this_ of a static object _o_, the anchors of a term reference _o.type_ to that object. + 1. If _T_ is some other type, the union of the anchors of each constituent type of _T_. + + **Definition:** The _implicit scope_ of a type _T_ is the smallest set _S_ of term references such that + + 1. If _T_ is a reference to a class, _S_ includes a reference to the companion object + of the class, if it exists, as well as the implicit scopes of all of _T_'s parent classes. + 1. If _T_ is a reference to an object, _S_ includes _T_ itself as well as + the implicit scopes of all of _T_'s parent classes. + 1. If _T_ is a reference to an opaque type alias named _A_, _S_ includes + a reference to an object _A_ defined in the same scope as the type, if it exists, + as well as the implicit scope of _T_'s underlying type or bounds. + 1. If _T_ is a reference to an abstract type or match type alias + named _A_, _S_ includes a reference to an object _A_ defined in the same scope as the type, if it exists, as well as the implicit scopes of _T_'s given bounds. + 1. If _T_ is a reference to an anchor of the form _p.A_ then _S_ also includes + all term references on the path _p_. + 1. If _T_ is some other type, _S_ includes the implicit scopes of all anchors of _T_. + + +**4.** The treatment of ambiguity errors has changed. If an ambiguity is encountered in some recursive step of an implicit search, the ambiguity is propagated to the caller. + +Example: Say you have the following definitions: +```scala + class A + class B extends C + class C + implicit def a1: A + implicit def a2: A + implicit def b(implicit a: A): B + implicit def c: C +``` +and the query `implicitly[C]`. + +This query would now be classified as ambiguous. This makes sense, after all +there are two possible solutions, `b(a1)` and `b(a2)`, neither of which is better +than the other and both of which are better than the third solution, `c`. +By contrast, Scala 2 would have rejected the search for `A` as +ambiguous, and subsequently have classified the query `b(implicitly[A])` as a normal fail, +which means that the alternative `c` would be chosen as solution! + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement +the analogue of a "negated" search in implicit resolution, where a query `Q1` fails if some +other query `Q2` succeeds and `Q1` succeeds if `Q2` fails. With the new cleaned up behavior +these techniques no longer work. But there is now a new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) +which implements negation directly. For any query type `Q`, `NotGiven[Q]` succeeds if and only if +the implicit search for `Q` fails. + +**5.** The treatment of divergence errors has also changed. A divergent implicit is treated as a normal failure, after which alternatives are still tried. This also makes sense: Encountering a divergent implicit means that we assume that no finite solution can be found on the corresponding path, but another path can still be tried. By contrast, +most (but not all) divergence errors in Scala 2 would terminate the implicit search as a whole. + +**6.** Scala 2 gives a lower level of priority to implicit conversions with call-by-name parameters relative to implicit conversions with call-by-value parameters. Scala 3 drops this distinction. So the following code snippet would be ambiguous in Scala 3: + +```scala + implicit def conv1(x: Int): A = new A(x) + implicit def conv2(x: => Int): A = new A(x) + def buzz(y: A) = ??? + buzz(1) // error: ambiguous +``` +**7.** The rule for picking a _most specific_ alternative among a set of overloaded or implicit alternatives is refined to take context parameters into account. All else being equal, an alternative that takes some context parameters is taken to be less specific than an alternative that takes none. If both alternatives take context parameters, we try to choose between them as if they were methods with regular parameters. The following paragraph in the [SLS §6.26.3](https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#overloading-resolution) is affected by this change: + +_Original version:_ + +> An alternative A is _more specific_ than an alternative B if the relative weight of A over B is greater than the relative weight of B over A. + +_Modified version:_ + +An alternative A is _more specific_ than an alternative B if + + - the relative weight of A over B is greater than the relative weight of B over A, or + - the relative weights are the same, and A takes no implicit parameters but B does, or + - the relative weights are the same, both A and B take implicit parameters, and A is more specific than B if all implicit parameters in either alternative are replaced by regular parameters. + +**8.** The previous disambiguation of implicits based on inheritance depth is refined to make it transitive. Transitivity is important to guarantee that search outcomes are compilation-order independent. Here's a scenario where the previous rules violated transitivity: +```scala + class A extends B + object A { given a ... } + class B + object B extends C { given b ... } + class C { given c } +``` + Here `a` is more specific than `b` since the companion class `A` is a subclass of the companion class `B`. Also, `b` is more specific than `c` + since `object B` extends class `C`. But `a` is not more specific than `c`. This means if `a, b, c` are all applicable implicits, it makes + a difference in what order they are compared. If we compare `b` and `c` + first, we keep `b` and drop `c`. Then, comparing `a` with `b` we keep `a`. But if we compare `a` with `c` first, we fail with an ambiguity error. + +The new rules are as follows: An implicit `a` defined in `A` is more specific than an implicit `b` defined in `B` if + + - `A` extends `B`, or + - `A` is an object and the companion class of `A` extends `B`, or + - `A` and `B` are objects, + `B` does not inherit any implicit members from base classes (*), + and the companion class of `A` extends the companion class of `B`. + +Condition (*) is new. It is necessary to ensure that the defined relation is transitive. + + + + + +[//]: # todo: expand with precise rules diff --git a/docs/_spec/TODOreference/changed-features/imports.md b/docs/_spec/TODOreference/changed-features/imports.md new file mode 100644 index 000000000000..2058ef08b7db --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/imports.md @@ -0,0 +1,60 @@ +--- +layout: doc-page +title: "Imports" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/imports.html +--- + +The syntax of wildcard and renaming imports (and exports) has changed. + +## Wildcard Imports + +Wildcard imports are now expressed with `*` instead of underscore. Example: +```scala +import scala.annotation.* // imports everything in the annotation package +``` + +If you want to import a member named `*` specifically, you can use backticks around it. + +```scala +object A: + def * = ... + def min = ... + +object B: + import A.`*` // imports just `*` + +object C: + import A.* // imports everything in A +``` + +## Renaming Imports + +To rename or exclude an import, we now use `as` instead of `=>`. A single renaming import no longer needs to be enclosed in braces. Examples: + +```scala +import A.{min as minimum, `*` as multiply} +import Predef.{augmentString as _, *} // imports everything except augmentString +import scala.annotation as ann +import java as j +``` + +## Migration + +To support cross-building, Scala 3.0 supports the old import syntax with `_` for wildcards and `=>` for renamings in addition to the new one. The old syntax +will be dropped in a future versions. Automatic rewritings from old to new syntax +are offered under settings `-source 3.1-migration -rewrite`. + +## Syntax + +``` +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec + | SimpleRef `as` id +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*' | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +``` diff --git a/docs/_spec/TODOreference/changed-features/interpolation-escapes.md b/docs/_spec/TODOreference/changed-features/interpolation-escapes.md new file mode 100644 index 000000000000..594e7671c5ab --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/interpolation-escapes.md @@ -0,0 +1,14 @@ +--- +layout: doc-page +title: "Escapes in interpolations" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/interpolation-escapes.html +--- + +In Scala 2 there is no straightforward way to represent a single quote character `"` in a single quoted interpolation. A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. + +In Scala 3, we can use the `$` meta character of interpolations to escape a `"` character. Example: + +```scala + val inventor = "Thomas Edison" + val interpolation = s"as $inventor said: $"The three great essentials to achieve anything worth while are: Hard work, Stick-to-itiveness, and Common sense.$"" +``` diff --git a/docs/_spec/TODOreference/changed-features/lazy-vals-init.md b/docs/_spec/TODOreference/changed-features/lazy-vals-init.md new file mode 100644 index 000000000000..131ac6ad7bb2 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/lazy-vals-init.md @@ -0,0 +1,80 @@ +--- +layout: doc-page +title: Lazy Vals Initialization +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/lazy-vals-init.html +--- + +Scala 3 implements [Version 6](https://docs.scala-lang.org/sips/improved-lazy-val-initialization.html#version-6---no-synchronization-on-this-and-concurrent-initialization-of-fields) +of the [SIP-20] improved lazy vals initialization proposal. + +## Motivation + +The newly proposed lazy val initialization mechanism aims to eliminate the acquisition of resources +during the execution of the lazy val initializer block, thus reducing the possibility of a deadlock. +The concrete deadlock scenarios that the new lazy val initialization scheme eliminates are +summarized in the [SIP-20] document. + +## Implementation + +Given a lazy field of the form: + +```scala +class Foo { + lazy val bar = +} +``` + +The Scala 3 compiler will generate code equivalent to: + +```scala +class Foo { + import scala.runtime.LazyVals + var value_0: Int = _ + var bitmap: Long = 0L + val bitmap_offset: Long = LazyVals.getOffset(classOf[LazyCell], "bitmap") + + def bar(): Int = { + while (true) { + val flag = LazyVals.get(this, bitmap_offset) + val state = LazyVals.STATE(flag, ) + + if (state == ) { + return value_0 + } else if (state == ) { + if (LazyVals.CAS(this, bitmap_offset, flag, , )) { + try { + val result = + value_0 = result + LazyVals.setFlag(this, bitmap_offset, , ) + return result + } + catch { + case ex => + LazyVals.setFlag(this, bitmap_offset, , ) + throw ex + } + } + } else /* if (state == || state == ) */ { + LazyVals.wait4Notification(this, bitmap_offset, flag, ) + } + } + } +} +``` + +The state of the lazy val `` is represented with 4 values: 0, 1, 2 and 3. The state 0 +represents a non-initialized lazy val. The state 1 represents a lazy val that is currently being +initialized by some thread. The state 2 denotes that there are concurrent readers of the lazy val. +The state 3 represents a lazy val that has been initialized. `` is the id of the lazy +val. This id grows with the number of volatile lazy vals defined in the class. + +## Note on recursive lazy vals + +Ideally recursive lazy vals should be flagged as an error. The current behavior for +recursive lazy vals is undefined (initialization may result in a deadlock). + +## Reference + +* [SIP-20] + +[SIP-20]: https://docs.scala-lang.org/sips/improved-lazy-val-initialization.html diff --git a/docs/_spec/TODOreference/changed-features/main-functions.md b/docs/_spec/TODOreference/changed-features/main-functions.md new file mode 100644 index 000000000000..4460300d003e --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/main-functions.md @@ -0,0 +1,87 @@ +--- +layout: doc-page +title: "Main Methods" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/main-functions.html +--- + +Scala 3 offers a new way to define programs that can be invoked from the command line: +A [`@main`](https://scala-lang.org/api/3.x/scala/main.html) annotation on a method turns this method into an executable program. +Example: + +```scala +@main def happyBirthday(age: Int, name: String, others: String*) = + val suffix = + age % 100 match + case 11 | 12 | 13 => "th" + case _ => + age % 10 match + case 1 => "st" + case 2 => "nd" + case 3 => "rd" + case _ => "th" + val bldr = new StringBuilder(s"Happy $age$suffix birthday, $name") + for other <- others do bldr.append(" and ").append(other) + bldr.toString +``` + +This would generate a main program `happyBirthday` that could be called like this + +``` +> scala happyBirthday 23 Lisa Peter +Happy 23rd birthday, Lisa and Peter +``` + +A [`@main`](https://scala-lang.org/api/3.x/scala/main.html) annotated method can be written either at the top-level or in a statically accessible object. The name of the program is in each case the name of the method, without any object prefixes. The [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method can have an arbitrary number of parameters. +For each parameter type there must be an instance of the [`scala.util.CommandLineParser.FromString[T]`](https://scala-lang.org/api/3.x/scala/util/CommandLineParser$$FromString.html) type class that is used to convert an argument string to the required parameter type `T`. +The parameter list of a main method can end in a repeated parameter that then takes all remaining arguments given on the command line. + +The program implemented from a [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method checks that there are enough arguments on +the command line to fill in all parameters, and that argument strings are convertible to +the required types. If a check fails, the program is terminated with an error message. + +Examples: + +``` +> scala happyBirthday 22 +Illegal command line after first argument: more arguments expected + +> scala happyBirthday sixty Fred +Illegal command line: java.lang.NumberFormatException: For input string: "sixty" +``` + +The Scala compiler generates a program from a [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method `f` as follows: + + - It creates a class named `f` in the package where the [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method was found + - The class has a static method `main` with the usual signature. It takes an `Array[String]` + as argument and returns [`Unit`](https://scala-lang.org/api/3.x/scala/Unit.html). + - The generated `main` method calls method `f` with arguments converted using + methods in the [`scala.util.CommandLineParser`](https://scala-lang.org/api/3.x/scala/util/CommandLineParser$.html) object. + +For instance, the `happyBirthDay` method above would generate additional code equivalent to the following class: + +```scala +final class happyBirthday: + import scala.util.CommandLineParser as CLP + def main(args: Array[String]): Unit = + try + happyBirthday( + CLP.parseArgument[Int](args, 0), + CLP.parseArgument[String](args, 1), + CLP.parseRemainingArguments[String](args, 2)) + catch + case error: CLP.ParseError => CLP.showError(error) +``` + +**Note**: The `` modifier above expresses that the `main` method is generated +as a static method of class `happyBirthDay`. It is not available for user programs in Scala. Regular "static" members are generated in Scala using objects instead. + +[`@main`](https://scala-lang.org/api/3.x/scala/main.html) methods are the recommended scheme to generate programs that can be invoked from the command line in Scala 3. They replace the previous scheme to write program as objects with a special `App` parent class. In Scala 2, `happyBirthday` could be written also like this: + +```scala +object happyBirthday extends App: + // needs by-hand parsing of arguments vector + ... +``` + +The previous functionality of [`App`](https://www.scala-lang.org/api/3.x/scala/App.html), which relied on the "magic" [`DelayedInit`](../dropped-features/delayed-init.md) trait, is no longer available. [`App`](https://scala-lang.org/api/3.x/scala/App.html) still exists in limited form for now, but it does not support command line arguments and will be deprecated in the future. If programs need to cross-build +between Scala 2 and Scala 3, it is recommended to use an explicit `main` method with an `Array[String]` argument instead. diff --git a/docs/_spec/TODOreference/changed-features/match-syntax.md b/docs/_spec/TODOreference/changed-features/match-syntax.md new file mode 100644 index 000000000000..dba50e9beb6a --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/match-syntax.md @@ -0,0 +1,56 @@ +--- +layout: doc-page +title: "Match Expressions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/match-syntax.html +--- + +The syntactical precedence of match expressions has been changed. +`match` is still a keyword, but it is used like an alphabetical operator. This has several consequences: + + 1. `match` expressions can be chained: + + ```scala + xs match { + case Nil => "empty" + case _ => "nonempty" + } match { + case "empty" => 0 + case "nonempty" => 1 + } + ``` + + (or, dropping the optional braces) + + ```scala + xs match + case Nil => "empty" + case _ => "nonempty" + match + case "empty" => 0 + case "nonempty" => 1 + ``` + + 2. `match` may follow a period: + + ```scala + if xs.match + case Nil => false + case _ => true + then "nonempty" + else "empty" + ``` + + 3. The scrutinee of a match expression must be an `InfixExpr`. Previously the scrutinee could be followed by a type ascription `: T`, but this is no longer supported. So `x : T match { ... }` now has to be + written `(x: T) match { ... }`. + +## Syntax + +The new syntax of match expressions is as follows. + +``` +InfixExpr ::= ... + | InfixExpr MatchClause +SimpleExpr ::= ... + | SimpleExpr ‘.’ MatchClause +MatchClause ::= ‘match’ ‘{’ CaseClauses ‘}’ +``` diff --git a/docs/_spec/TODOreference/changed-features/numeric-literals.md b/docs/_spec/TODOreference/changed-features/numeric-literals.md new file mode 100644 index 000000000000..bba837dbf67d --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/numeric-literals.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Numeric Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/numeric-literals.html +--- + +[Document was moved](../experimental/numeric-literals.md) diff --git a/docs/_spec/TODOreference/changed-features/operators.md b/docs/_spec/TODOreference/changed-features/operators.md new file mode 100644 index 000000000000..0cf25d77bc11 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/operators.md @@ -0,0 +1,173 @@ +--- +layout: doc-page +title: "Rules for Operators" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/operators.html +--- + +The rules for infix operators have changed in some parts: + +First, an alphanumeric method can be used as an infix operator only if its definition carries an `infix` modifier. + +Second, it is recommended (but not enforced) to augment definitions of symbolic operators +with [`@targetName` annotations](../other-new-features/targetName.md). + +Finally, a syntax change allows infix operators to be written on the left in a multi-line expression. + +## The `infix` Modifier + +An `infix` modifier on a method definition allows using the method as an infix operation. Example: + +```scala +import scala.annotation.targetName + +trait MultiSet[T]: + + infix def union(other: MultiSet[T]): MultiSet[T] + + def difference(other: MultiSet[T]): MultiSet[T] + + @targetName("intersection") + def *(other: MultiSet[T]): MultiSet[T] + +end MultiSet + +val s1, s2: MultiSet[Int] + +s1 union s2 // OK +s1 `union` s2 // also OK but unusual +s1.union(s2) // also OK + +s1.difference(s2) // OK +s1 `difference` s2 // OK +s1 difference s2 // gives a deprecation warning + +s1 * s2 // OK +s1 `*` s2 // also OK, but unusual +s1.*(s2) // also OK, but unusual +``` + +Infix operations involving alphanumeric operators are deprecated, unless +one of the following conditions holds: + + - the operator definition carries an `infix` modifier, or + - the operator was compiled with Scala 2, or + - the operator is followed by an opening brace. + +An alphanumeric operator is an operator consisting entirely of letters, digits, the `$` and `_` characters, or +any Unicode character `c` for which `java.lang.Character.isIdentifierPart(c)` returns `true`. + +Infix operations involving symbolic operators are always allowed, so `infix` is redundant for methods with symbolic names. + +The `infix` modifier can also be given to a type: + +```scala +infix type or[X, Y] +val x: String or Int = ... +``` + +### Motivation + +The purpose of the `infix` modifier is to achieve consistency across a code base in how a method or type is applied. The idea is that the author of a method decides whether that method should be applied as an infix operator or in a regular application. Use sites then implement that decision consistently. + +### Details + + 1. `infix` is a soft modifier. It is treated as a normal identifier except when in modifier position. + + 2. If a method overrides another, their infix annotations must agree. Either both are annotated with `infix`, or none of them are. + + 3. `infix` modifiers can be given to method definitions. The first non-receiver parameter list of an `infix` method must define exactly one parameter. Examples: + + ```scala + infix def op1(x: S): R // ok + infix def op2[T](x: T)(y: S): R // ok + infix def op3[T](x: T, y: S): R // error: two parameters + + extension (x: A) + infix def op4(y: B): R // ok + infix def op5(y1: B, y2: B): R // error: two parameters + ``` + + 4. `infix` modifiers can also be given to type, trait or class definitions that have exactly two type parameters. An infix type like + + ```scala + infix type op[X, Y] + ``` + + can be applied using infix syntax, i.e. `A op B`. + + 5. To smooth migration to Scala 3.0, alphanumeric operators will only be deprecated from Scala 3.1 onwards, +or if the `-source future` option is given in Dotty/Scala 3. + +## The `@targetName` Annotation + +It is recommended that definitions of symbolic operators carry a [`@targetName` annotation](../other-new-features/targetName.md) that provides an encoding of the operator with an alphanumeric name. This has several benefits: + + - It helps interoperability between Scala and other languages. One can call + a Scala-defined symbolic operator from another language using its target name, + which avoids having to remember the low-level encoding of the symbolic name. + - It helps legibility of stacktraces and other runtime diagnostics, where the + user-defined alphanumeric name will be shown instead of the low-level encoding. + - It serves as a documentation tool by providing an alternative regular name + as an alias of a symbolic operator. This makes the definition also easier + to find in a search. + +## Syntax Change + +Infix operators can now appear at the start of lines in a multi-line expression. Examples: + +```scala +val str = "hello" + ++ " world" + ++ "!" + +def condition = + x > 0 + || + xs.exists(_ > 0) + || xs.isEmpty +``` + +Previously, those expressions would have been rejected, since the compiler's semicolon inference +would have treated the continuations `++ " world"` or `|| xs.isEmpty` as separate statements. + +To make this syntax work, the rules are modified to not infer semicolons in front of leading infix operators. +A _leading infix operator_ is + - a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks that + - starts a new line, and + - is not following a blank line, and + - is followed by at least one whitespace character and a token that can start an expression. + - Furthermore, if the operator appears on its own line, the next line must have at least + the same indentation width as the operator. + +Example: + +```scala + freezing + | boiling +``` + +This is recognized as a single infix operation. Compare with: + +```scala + freezing + !boiling +``` + +This is seen as two statements, `freezing` and `!boiling`. The difference is that only the operator in the first example +is followed by a space. + +Another example: + +```scala + println("hello") + ??? + ??? match { case 0 => 1 } +``` + +This code is recognized as three different statements. `???` is syntactically a symbolic identifier, but +neither of its occurrences is followed by a space and a token that can start an expression. + +## Unary operators + +A unary operator must not have explicit parameter lists even if they are empty. +A unary operator is a method named "unary_`op`" where `op` is one of `+`, `-`, `!`, or `~`. diff --git a/docs/_spec/TODOreference/changed-features/overload-resolution.md b/docs/_spec/TODOreference/changed-features/overload-resolution.md new file mode 100644 index 000000000000..621515c2a7f8 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/overload-resolution.md @@ -0,0 +1,102 @@ +--- +layout: doc-page +title: "Changes in Overload Resolution" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/overload-resolution.html +--- + +Overload resolution in Scala 3 improves on Scala 2 in three ways. +First, it takes all argument lists into account instead of +just the first argument list. +Second, it can infer parameter types of function values even if they +are in the first argument list. +Third, default arguments are no longer relevant for prioritization. + +## Looking Beyond the First Argument List + +Overloading resolution now can take argument lists into account when +choosing among a set of overloaded alternatives. +For example, the following code compiles in Scala 3, while it results in an +ambiguous overload error in Scala 2: + +```scala +def f(x: Int)(y: String): Int = 0 +def f(x: Int)(y: Int): Int = 0 + +f(3)("") // ok +``` + +The following code compiles as well: + +```scala +def g(x: Int)(y: Int)(z: Int): Int = 0 +def g(x: Int)(y: Int)(z: String): Int = 0 + +g(2)(3)(4) // ok +g(2)(3)("") // ok +``` + +To make this work, the rules for overloading resolution in [SLS §6.26.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#overloading-resolution) are augmented +as follows: + +> In a situation where a function is applied to more than one argument list, if overloading +resolution yields several competing alternatives when `n >= 1` parameter lists are taken +into account, then resolution re-tried using `n + 1` argument lists. + +This change is motivated by the new language feature +[extension methods](../contextual/extension-methods.md), where emerges the need to do +overload resolution based on additional argument blocks. + +## Parameter Types of Function Values + +The handling of function values with missing parameter types has been improved. We can now +pass such values in the first argument list of an overloaded application, provided +that the remaining parameters suffice for picking a variant of the overloaded function. +For example, the following code compiles in Scala 3, while it results in a +missing parameter type error in Scala2: + +```scala +def f(x: Int, f2: Int => Int) = f2(x) +def f(x: String, f2: String => String) = f2(x) +f("a", _.toUpperCase) +f(2, _ * 2) +``` + +To make this work, the rules for overloading resolution in [SLS §6.26.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#overloading-resolution) are modified +as follows: + +Replace the sentence + +> Otherwise, let `S1,...,Sm` be the vector of types obtained by typing each argument with an undefined expected type. + +with the following paragraph: + +> Otherwise, let `S1,...,Sm` be the vector of known types of all argument types, where the _known type_ of an argument `E` +is determined as followed: + + - If `E` is a function value `(p_1, ..., p_n) => B` that misses some parameter types, the known type + of `E` is `(S_1, ..., S_n) => ?`, where each `S_i` is the type of parameter `p_i` if it is given, or `?` + otherwise. Here `?` stands for a _wildcard type_ that is compatible with every other type. + - Otherwise the known type of `E` is the result of typing `E` with an undefined expected type. + +A pattern matching closure + +```scala +{ case P1 => B1 ... case P_n => B_n } +```` + +is treated as if it was expanded to the function value + +```scala +x => x match { case P1 => B1 ... case P_n => B_n } +``` + +and is therefore also approximated with a `? => ?` type. + +## Default Arguments Are No longer Relevant for Prioritization + +In Scala 2 if among several applicative alternatives one alternative had default arguments, that alternative was dropped from consideration. This has the unfortunate +side effect that adding a default to a parameter of a method can render this method +invisible in overloaded calls. + +Scala 3 drops this distinction. Methods with default parameters are not treated +to have lower priority than other methods. diff --git a/docs/_spec/TODOreference/changed-features/pattern-bindings.md b/docs/_spec/TODOreference/changed-features/pattern-bindings.md new file mode 100644 index 000000000000..2de338fc1dde --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/pattern-bindings.md @@ -0,0 +1,59 @@ +--- +layout: doc-page +title: "Pattern Bindings" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html +--- + +In Scala 2, pattern bindings in `val` definitions and `for` expressions are +loosely typed. Potentially failing matches are still accepted at compile-time, +but may influence the program's runtime behavior. +From Scala 3.2 on, type checking rules will be tightened so that warnings are reported at compile-time instead. + +## Bindings in Pattern Definitions + +```scala +val xs: List[Any] = List(1, 2, 3) +val (x: String) :: _ = xs // error: pattern's type String is more specialized + // than the right-hand side expression's type Any +``` +This code gives a compile-time warning in Scala 3.2 (and also earlier Scala 3.x under the `-source future` setting) whereas it will fail at runtime with a `ClassCastException` in Scala 2. In Scala 3.2, a pattern binding is only allowed if the pattern is _irrefutable_, that is, if the right-hand side's type conforms to the pattern's type. For instance, the following is OK: +```scala +val pair = (1, true) +val (x, y) = pair +``` +Sometimes one wants to decompose data anyway, even though the pattern is refutable. For instance, if at some point one knows that a list `elems` is non-empty one might want to decompose it like this: +```scala +val first :: rest = elems // error +``` +This works in Scala 2. In fact it is a typical use case for Scala 2's rules. But in Scala 3.2 it will give a warning. One can avoid the warning by marking the right-hand side with an [`@unchecked`](https://scala-lang.org/api/3.x/scala/unchecked.html) annotation: +```scala +val first :: rest = elems: @unchecked // OK +``` +This will make the compiler accept the pattern binding. It might give an error at runtime instead, if the underlying assumption that `elems` can never be empty is wrong. + +## Pattern Bindings in `for` Expressions + +Analogous changes apply to patterns in `for` expressions. For instance: + +```scala +val elems: List[Any] = List((1, 2), "hello", (3, 4)) +for (x, y) <- elems yield (y, x) // error: pattern's type (Any, Any) is more specialized + // than the right-hand side expression's type Any +``` +This code gives a compile-time warning in Scala 3.2 whereas in Scala 2 the list `elems` +is filtered to retain only the elements of tuple type that match the pattern `(x, y)`. +The filtering functionality can be obtained in Scala 3 by prefixing the pattern with `case`: +```scala +for case (x, y) <- elems yield (y, x) // returns List((2, 1), (4, 3)) +``` + +## Syntax Changes + +Generators in for expressions may be prefixed with `case`. +``` +Generator ::= [‘case’] Pattern1 ‘<-’ Expr +``` + +## Migration + +The new syntax is supported in Scala 3.0. However, to enable smooth cross compilation between Scala 2 and Scala 3, the changed behavior and additional type checks are only enabled under the `-source future` setting. They will be enabled by default in version 3.2 of the language. diff --git a/docs/_spec/TODOreference/changed-features/pattern-matching.md b/docs/_spec/TODOreference/changed-features/pattern-matching.md new file mode 100644 index 000000000000..30ae5d9dc104 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/pattern-matching.md @@ -0,0 +1,243 @@ +--- +layout: doc-page +title: "Option-less pattern matching" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html +--- + +The implementation of pattern matching in Scala 3 was greatly simplified compared to Scala 2. From a user perspective, this means that Scala 3 generated patterns are a _lot_ easier to debug, as variables all show up in debug modes and positions are correctly preserved. + +Scala 3 supports a superset of Scala 2 [extractors](https://www.scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#extractor-patterns). + +## Extractors + +Extractors are objects that expose a method `unapply` or `unapplySeq`: + +```scala +def unapply[A](x: T)(implicit x: B): U +def unapplySeq[A](x: T)(implicit x: B): U +``` + +Extractors that expose the method `unapply` are called fixed-arity extractors, which +work with patterns of fixed arity. Extractors that expose the method `unapplySeq` are +called variadic extractors, which enables variadic patterns. + +### Fixed-Arity Extractors + +Fixed-arity extractors expose the following signature: + +```scala +def unapply[A](x: T)(implicit x: B): U +``` + +The type `U` conforms to one of the following matches: + +- Boolean match +- Product match + +Or `U` conforms to the type `R`: + +```scala +type R = { + def isEmpty: Boolean + def get: S +} +``` + +and `S` conforms to one of the following matches: + +- single match +- name-based match + +The former form of `unapply` has higher precedence, and _single match_ has higher +precedence over _name-based match_. + +A usage of a fixed-arity extractor is irrefutable if one of the following condition holds: + +- `U = true` +- the extractor is used as a product match +- `U = Some[T]` (for Scala 2 compatibility) +- `U <: R` and `U <: { def isEmpty: false }` + +### Variadic Extractors + +Variadic extractors expose the following signature: + +```scala +def unapplySeq[A](x: T)(implicit x: B): U +``` + +The type `U` conforms to one of the following matches: + +- sequence match +- product-sequence match + +Or `U` conforms to the type `R`: + +```scala +type R = { + def isEmpty: Boolean + def get: S +} +``` + +and `S` conforms to one of the two matches above. + +The former form of `unapplySeq` has higher priority, and _sequence match_ has higher +precedence over _product-sequence match_. + +A usage of a variadic extractor is irrefutable if one of the following conditions holds: + +- the extractor is used directly as a sequence match or product-sequence match +- `U = Some[T]` (for Scala 2 compatibility) +- `U <: R` and `U <: { def isEmpty: false }` + +## Boolean Match + +- `U =:= Boolean` +- Pattern-matching on exactly `0` patterns + +For example: + + + +```scala +object Even: + def unapply(s: String): Boolean = s.size % 2 == 0 + +"even" match + case s @ Even() => println(s"$s has an even number of characters") + case s => println(s"$s has an odd number of characters") + +// even has an even number of characters +``` + +## Product Match + +- `U <: Product` +- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` + +For example: + + + +```scala +class FirstChars(s: String) extends Product: + def _1 = s.charAt(0) + def _2 = s.charAt(1) + + // Not used by pattern matching: Product is only used as a marker trait. + def canEqual(that: Any): Boolean = ??? + def productArity: Int = ??? + def productElement(n: Int): Any = ??? + +object FirstChars: + def unapply(s: String): FirstChars = new FirstChars(s) + +"Hi!" match + case FirstChars(char1, char2) => + println(s"First: $char1; Second: $char2") + +// First: H; Second: i +``` + +## Single Match + +- If there is exactly `1` pattern, pattern-matching on `1` pattern with type `U` + + + +```scala +class Nat(val x: Int): + def get: Int = x + def isEmpty = x < 0 + +object Nat: + def unapply(x: Int): Nat = new Nat(x) + +5 match + case Nat(n) => println(s"$n is a natural number") + case _ => () + +// 5 is a natural number +``` + +## Name-based Match + +- `N > 1` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1 ... _N: PN` members in `U` +- Pattern-matching on exactly `N` patterns with types `P1, P2, ..., PN` + +```scala +object ProdEmpty: + def _1: Int = ??? + def _2: String = ??? + def isEmpty = true + def unapply(s: String): this.type = this + def get = this + +"" match + case ProdEmpty(_, _) => ??? + case _ => () +``` + +## Sequence Match + +- `U <: X`, `T2` and `T3` conform to `T1` + +```scala +type X = { + def lengthCompare(len: Int): Int // or, `def length: Int` + def apply(i: Int): T1 + def drop(n: Int): scala.Seq[T2] + def toSeq: scala.Seq[T3] +} +``` + +- Pattern-matching on _exactly_ `N` simple patterns with types `T1, T1, ..., T1`, where `N` is the runtime size of the sequence, or +- Pattern-matching on `>= N` simple patterns and _a vararg pattern_ (e.g., `xs: _*`) with types `T1, T1, ..., T1, Seq[T1]`, where `N` is the minimum size of the sequence. + + + +```scala +object CharList: + def unapplySeq(s: String): Option[Seq[Char]] = Some(s.toList) + +"example" match + case CharList(c1, c2, c3, c4, _, _, _) => + println(s"$c1,$c2,$c3,$c4") + case _ => + println("Expected *exactly* 7 characters!") + +// e,x,a,m +``` + +## Product-Sequence Match + +- `U <: Product` +- `N > 0` is the maximum number of consecutive (parameterless `def` or `val`) `_1: P1` ... `_N: PN` members in `U` +- `PN` conforms to the signature `X` defined in Seq Pattern +- Pattern-matching on exactly `>= N` patterns, the first `N - 1` patterns have types `P1, P2, ... P(N-1)`, + the type of the remaining patterns are determined as in Seq Pattern. + +```scala +class Foo(val name: String, val children: Int*) +object Foo: + def unapplySeq(f: Foo): Option[(String, Seq[Int])] = + Some((f.name, f.children)) + +def foo(f: Foo) = f match + case Foo(name, x, y, ns*) => ">= two children." + case Foo(name, ns*) => => "< two children." +``` + +There are plans for further simplification, in particular to factor out _product match_ +and _name-based match_ into a single type of extractor. + +## Type testing + +Abstract type testing with `ClassTag` is replaced with `TypeTest` or the alias `Typeable`. + +- pattern `_: X` for an abstract type requires a `TypeTest` in scope +- pattern `x @ X()` for an unapply that takes an abstract type requires a `TypeTest` in scope + +[More details on `TypeTest`](../other-new-features/type-test.md) diff --git a/docs/_spec/TODOreference/changed-features/structural-types-spec.md b/docs/_spec/TODOreference/changed-features/structural-types-spec.md new file mode 100644 index 000000000000..d456932649fb --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/structural-types-spec.md @@ -0,0 +1,153 @@ +--- +layout: doc-page +title: "Programmatic Structural Types - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/structural-types-spec.html +--- + +## Syntax + +``` +SimpleType ::= ... | Refinement +Refinement ::= ‘{’ RefineStatSeq ‘}’ +RefineStatSeq ::= RefineStat {semi RefineStat} +RefineStat ::= ‘val’ VarDcl | ‘def’ DefDcl | ‘type’ {nl} TypeDcl +``` + +## Implementation of Structural Types + +The standard library defines a universal marker trait +[`scala.Selectable`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/Selectable.scala): + +```scala +trait Selectable extends Any +``` + +An implementation of `Selectable` that relies on [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html) is +available in the standard library: `scala.reflect.Selectable`. Other +implementations can be envisioned for platforms where Java reflection +is not available. + +Implementations of `Selectable` have to make available one or both of +the methods `selectDynamic` and `applyDynamic`. The methods could be members of the `Selectable` implementation or they could be extension methods. + +The `selectDynamic` method takes a field name and returns the value associated with that name in the `Selectable`. +It should have a signature of the form: + +```scala +def selectDynamic(name: String): T +``` + +Often, the return type `T` is `Any`. + +Unlike `scala.Dynamic`, there is no special meaning for an `updateDynamic` method. +However, we reserve the right to give it meaning in the future. +Consequently, it is recommended not to define any member called `updateDynamic` in `Selectable`s. + +The `applyDynamic` method is used for selections that are applied to arguments. It takes a method name and possibly `Class`es representing its parameters types as well as the arguments to pass to the function. +Its signature should be of one of the two following forms: + +```scala +def applyDynamic(name: String)(args: Any*): T +def applyDynamic(name: String, ctags: Class[?]*)(args: Any*): T +``` + +Both versions are passed the actual arguments in the `args` parameter. The second version takes in addition a vararg argument of `java.lang.Class`es that identify the method's parameter classes. Such an argument is needed +if `applyDynamic` is implemented using Java reflection, but it could be +useful in other cases as well. `selectDynamic` and `applyDynamic` can also take additional context parameters in using clauses. These are resolved in the normal way at the callsite. + +Given a value `v` of type `C { Rs }`, where `C` is a class reference +and `Rs` are structural refinement declarations, and given `v.a` of type `U`, we consider three distinct cases: + +- If `U` is a value type, we map `v.a` to: + ```scala + v.selectDynamic("a").asInstanceOf[U] + ``` + +- If `U` is a method type `(T11, ..., T1n)...(TN1, ..., TNn): R` and it is not a dependent method type, we map `v.a(a11, ..., a1n)...(aN1, ..., aNn)` to: + ```scala + v.applyDynamic("a")(a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + If this call resolves to an `applyDynamic` method of the second form that takes a `Class[?]*` argument, we further rewrite this call to + ```scala + v.applyDynamic("a", c11, ..., c1n, ..., cN1, ... cNn)( + a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + where each `c_ij` is the literal `java.lang.Class[?]` of the type of the formal parameter `Tij`, i.e., `classOf[Tij]`. + +- If `U` is neither a value nor a method type, or a dependent method + type, an error is emitted. + +Note that `v`'s static type does not necessarily have to conform to `Selectable`, nor does it need to have `selectDynamic` and `applyDynamic` as members. It suffices that there is an implicit +conversion that can turn `v` into a `Selectable`, and the selection methods could also be available as +[extension methods](../contextual/extension-methods.md). + +## Limitations of Structural Types + +- Dependent methods cannot be called via structural call. + +- Refinements may not introduce overloads: If a refinement specifies the signature + of a method `m`, and `m` is also defined in the parent type of the refinement, then + the new signature must properly override the existing one. + +- Subtyping of structural refinements must preserve erased parameter types: Assume + we want to prove `S <: T { def m(x: A): B }`. Then, as usual, `S` must have a member method `m` that can take an argument of type `A`. Furthermore, if `m` is not a member of `T` (i.e. the refinement is structural), an additional condition applies. In this case, the member _definition_ `m` of `S` will have a parameter + with type `A'` say. The additional condition is that the erasure of `A'` and `A` is the same. Here is an example: + + ```scala + class Sink[A] { def put(x: A): Unit = {} } + val a = Sink[String]() + val b: { def put(x: String): Unit } = a // error + b.put("abc") // looks for a method with a `String` parameter + ``` + The second to last line is not well-typed, + since the erasure of the parameter type of `put` in class `Sink` is `Object`, + but the erasure of `put`'s parameter in the type of `b` is `String`. + This additional condition is necessary, since we will have to resort + to some (as yet unknown) form of reflection to call a structural member + like `put` in the type of `b` above. The condition ensures that the statically + known parameter types of the refinement correspond up to erasure to the + parameter types of the selected call target at runtime. + + Most reflection dispatch algorithms need to know exact erased parameter types. For instance, if the example above would typecheck, the call + `b.put("abc")` on the last line would look for a method `put` in the runtime type of `b` that takes a `String` parameter. But the `put` method is the one from class `Sink`, which takes an `Object` parameter. Hence the call would fail at runtime with a `NoSuchMethodException`. + + One might hope for a "more intelligent" reflexive dispatch algorithm that does not require exact parameter type matching. Unfortunately, this can always run into ambiguities, as long as overloading is a possibility. For instance, continuing the example above, we might introduce a new subclass `Sink1` of `Sink` and change the definition of `a` as follows: + + ```scala + class Sink1[A] extends Sink[A] { def put(x: "123") = ??? } + val a: Sink[String] = Sink1[String]() + ``` + + Now there are two `put` methods in the runtime type of `b` with erased parameter + types `Object` and `String`, respectively. Yet dynamic dispatch still needs to go + to the first `put` method, even though the second looks like a better match. + + For the cases where we can in fact implement reflection without knowing precise parameter types (for instance if static overloading is replaced by dynamically dispatched multi-methods), there is an escape hatch. For types that extend `scala.Selectable.WithoutPreciseParameterTypes` the signature check is omitted. Example: + + ```scala + trait MultiMethodSelectable extends Selectable.WithoutPreciseParameterTypes: + // Assume this version of `applyDynamic` can be implemented without knowing + // precise parameter types `paramTypes`: + def applyDynamic(name: String, paramTypes: Class[_]*)(args: Any*): Any = ??? + + class Sink[A] extends MultiMethodSelectable: + def put(x: A): Unit = {} + + val a = new Sink[String] + val b: MultiMethodSelectable { def put(x: String): Unit } = a // OK + ``` +## Differences with Scala 2 Structural Types + +- Scala 2 supports structural types by means of Java reflection. Unlike + Scala 3, structural calls do not rely on a mechanism such as + `Selectable`, and reflection cannot be avoided. +- In Scala 2, refinements can introduce overloads. +- In Scala 2, mutable `var`s are allowed in refinements. In Scala 3, + they are no longer allowed. +- Scala 2 does not impose the "same-erasure" restriction on subtyping of structural types. It allows some calls to fail at runtime instead. + +## Context + +For more information, see [Rethink Structural Types](https://github.com/lampepfl/dotty/issues/1886). diff --git a/docs/_spec/TODOreference/changed-features/structural-types.md b/docs/_spec/TODOreference/changed-features/structural-types.md new file mode 100644 index 000000000000..37e583332cf1 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/structural-types.md @@ -0,0 +1,191 @@ +--- +layout: doc-page +title: "Programmatic Structural Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/structural-types.html +--- + +## Motivation + +Some usecases, such as modelling database access, are more awkward in +statically typed languages than in dynamically typed languages: With +dynamically typed languages, it's quite natural to model a row as a +record or object, and to select entries with simple dot notation (e.g. +`row.columnName`). + +Achieving the same experience in statically typed +language requires defining a class for every possible row arising from +database manipulation (including rows arising from joins and +projections) and setting up a scheme to map between a row and the +class representing it. + +This requires a large amount of boilerplate, which leads developers to +trade the advantages of static typing for simpler schemes where colum +names are represented as strings and passed to other operators (e.g. +`row.select("columnName")`). This approach forgoes the advantages of +static typing, and is still not as natural as the dynamically typed +version. + +Structural types help in situations where we would like to support +simple dot notation in dynamic contexts without losing the advantages +of static typing. They allow developers to use dot notation and +configure how fields and methods should be resolved. + +## Example + +Here's an example of a structural type `Person`: + +```scala + class Record(elems: (String, Any)*) extends Selectable: + private val fields = elems.toMap + def selectDynamic(name: String): Any = fields(name) + + type Person = Record { val name: String; val age: Int } + ``` + +The type `Person` adds a _refinement_ to its parent type `Record` that defines the two fields `name` and `age`. We say the refinement is _structural_ since `name` and `age` are not defined in the parent type. But they exist nevertheless as members of class `Person`. For instance, the following +program would print "Emma is 42 years old.": + +```scala + val person = Record("name" -> "Emma", "age" -> 42).asInstanceOf[Person] + println(s"${person.name} is ${person.age} years old.") +``` + +The parent type `Record` in this example is a generic class that can represent arbitrary records in its `elems` argument. This argument is a +sequence of pairs of labels of type `String` and values of type `Any`. +When we create a `Person` as a `Record` we have to assert with a typecast +that the record defines the right fields of the right types. `Record` +itself is too weakly typed so the compiler cannot know this without +help from the user. In practice, the connection between a structural type +and its underlying generic representation would most likely be done by +a database layer, and therefore would not be a concern of the end user. + +`Record` extends the marker trait [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) and defines +a method `selectDynamic`, which maps a field name to its value. +Selecting a structural type member is done by calling this method. +The `person.name` and `person.age` selections are translated by +the Scala compiler to: + +```scala + person.selectDynamic("name").asInstanceOf[String] + person.selectDynamic("age").asInstanceOf[Int] +``` + +Besides `selectDynamic`, a `Selectable` class sometimes also defines a method `applyDynamic`. This can then be used to translate function calls of structural members. So, if `a` is an instance of `Selectable`, a structural call like `a.f(b, c)` would translate to + +```scala + a.applyDynamic("f")(b, c) +``` + +## Using Java Reflection + +Structural types can also be accessed using [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html). Example: + +```scala + type Closeable = { def close(): Unit } + + class FileInputStream: + def close(): Unit + + class Channel: + def close(): Unit +``` + +Here, we define a structural type `Closeable` that defines a `close` method. There are various classes that have `close` methods, we just list [`FileInputStream`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/FileInputStream.html#%3Cinit%3E(java.io.File)) and [`Channel`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/channels/Channel.html) as two examples. It would be easiest if the two classes shared a common interface that factors out the `close` method. But such factorings are often not possible if different libraries are combined in one application. Yet, we can still have methods that work on +all classes with a `close` method by using the `Closeable` type. For instance, + +```scala + import scala.reflect.Selectable.reflectiveSelectable + + def autoClose(f: Closeable)(op: Closeable => Unit): Unit = + try op(f) finally f.close() +``` + +The call `f.close()` has to use Java reflection to identify and call the `close` method in the receiver `f`. This needs to be enabled by an import +of `reflectiveSelectable` shown above. What happens "under the hood" is then the following: + + - The import makes available an implicit conversion that turns any type into a + `Selectable`. `f` is wrapped in this conversion. + + - The compiler then transforms the `close` call on the wrapped `f` + to an `applyDynamic` call. The end result is: + + ```scala + reflectiveSelectable(f).applyDynamic("close")() + ``` + - The implementation of `applyDynamic` in `reflectiveSelectable`'s result +uses Java reflection to find and call a method `close` with zero parameters in the value referenced by `f` at runtime. + +Structural calls like this tend to be much slower than normal method calls. The mandatory import of `reflectiveSelectable` serves as a signpost that something inefficient is going on. + +**Note:** In Scala 2, Java reflection is the only mechanism available for structural types and it is automatically enabled without needing the +`reflectiveSelectable` conversion. However, to warn against inefficient +dispatch, Scala 2 requires a language import `import scala.language.reflectiveCalls`. + +Before resorting to structural calls with Java reflection one should consider alternatives. For instance, sometimes a more modular _and_ efficient architecture can be obtained using type classes. + +## Extensibility + +New instances of `Selectable` can be defined to support means of +access other than Java reflection, which would enable usages such as +the database access example given at the beginning of this document. + +## Local Selectable Instances + +Local and anonymous classes that extend `Selectable` get more refined types +than other classes. Here is an example: + +```scala +trait Vehicle extends reflect.Selectable: + val wheels: Int + +val i3 = new Vehicle: // i3: Vehicle { val range: Int } + val wheels = 4 + val range = 240 + +i3.range +``` + +The type of `i3` in this example is `Vehicle { val range: Int }`. Hence, +`i3.range` is well-formed. Since the base class `Vehicle` does not define a `range` field or method, we need structural dispatch to access the `range` field of the anonymous class that initializes `id3`. Structural dispatch +is implemented by the base trait [`reflect.Selectable`](https://scala-lang.org/api/3.x/scala/reflect/Selectable.html) of `Vehicle`, which defines the necessary `selectDynamic` member. + +`Vehicle` could also extend some other subclass of [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) that implements `selectDynamic` and `applyDynamic` differently. But if it does not extend a `Selectable` at all, the code would no longer typecheck: + +```scala +trait Vehicle: + val wheels: Int + +val i3 = new Vehicle: // i3: Vehicle + val wheels = 4 + val range = 240 + +i3.range // error: range is not a member of `Vehicle` +``` + +The difference is that the type of an anonymous class that does not extend `Selectable` is just formed from the parent type(s) of the class, without +adding any refinements. Hence, `i3` now has just type `Vehicle` and the selection `i3.range` gives a "member not found" error. + +Note that in Scala 2 all local and anonymous classes could produce values with refined types. But +members defined by such refinements could be selected only with the language import +[`reflectiveCalls`](https://scala-lang.org/api/3.x/scala/languageFeature$$reflectiveCalls$.html). + +## Relation with `scala.Dynamic` + +There are clearly some connections with [`scala.Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) here, since +both select members programmatically. But there are also some +differences. + +- Fully dynamic selection is not typesafe, but structural selection + is, as long as the correspondence of the structural type with the + underlying value is as stated. + +- [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) is just a marker trait, which gives more leeway where and + how to define reflective access operations. By contrast + `Selectable` is a trait which declares the access operations. + +- Two access operations, `selectDynamic` and `applyDynamic` are shared + between both approaches. In `Selectable`, `applyDynamic` also may also take + [`java.lang.Class`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Class.html) arguments indicating the method's formal parameter types. + [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) comes with `updateDynamic`. + +[More details](structural-types-spec.md) diff --git a/docs/_spec/TODOreference/changed-features/type-checking.md b/docs/_spec/TODOreference/changed-features/type-checking.md new file mode 100644 index 000000000000..6f59b1a1c1c6 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/type-checking.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Changes in Type Checking" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/type-checking.html +--- + +*** **TO BE FILLED IN** *** diff --git a/docs/_spec/TODOreference/changed-features/type-inference.md b/docs/_spec/TODOreference/changed-features/type-inference.md new file mode 100644 index 000000000000..00d0e959f5ed --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/type-inference.md @@ -0,0 +1,10 @@ +--- +layout: doc-page +title: "Changes in Type Inference" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/type-inference.html +--- + +For more information, see the two presentations + +* [Scala 3, Type inference and You!](https://www.youtube.com/watch?v=lMvOykNQ4zs) by Guillaume Martres (September 2019) +* [GADTs in Dotty](https://www.youtube.com/watch?v=VV9lPg3fNl8) by Aleksander Boruch-Gruszecki (July 2019). diff --git a/docs/_spec/TODOreference/changed-features/vararg-splices.md b/docs/_spec/TODOreference/changed-features/vararg-splices.md new file mode 100644 index 000000000000..43c4acc5f880 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/vararg-splices.md @@ -0,0 +1,40 @@ +--- +layout: doc-page +title: "Vararg Splices" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/vararg-splices.html +--- + +The syntax of vararg splices in patterns and function arguments has changed. The new syntax uses a postfix `*`, analogously to how a vararg parameter is declared. + +```scala +val arr = Array(0, 1, 2, 3) +val lst = List(arr*) // vararg splice argument +lst match + case List(0, 1, xs*) => println(xs) // binds xs to Seq(2, 3) + case List(1, _*) => // wildcard pattern +``` + +The old syntax for splice arguments will be phased out. + +```scala +/*!*/ val lst = List(arr: _*) // syntax error + lst match + case List(0, 1, xs @ _*) // ok, equivalent to `xs*` +``` + +## Syntax + +``` +ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ + | ‘(’ [Patterns ‘,’] Pattern2 ‘*’ ‘)’ + +ParArgumentExprs ::= ‘(’ [‘using’] ExprsInParens ‘)’ + | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ +``` + +## Compatibility considerations + +To enable cross compilation between Scala 2 and Scala 3, the compiler will +accept both the old and the new syntax. Under the `-source future` setting, an error +will be emitted when the old syntax is encountered. An automatic rewrite from old +to new syntax is offered under `-source future-migration`. diff --git a/docs/_spec/TODOreference/changed-features/wildcards.md b/docs/_spec/TODOreference/changed-features/wildcards.md new file mode 100644 index 000000000000..0d3e13c3d7e0 --- /dev/null +++ b/docs/_spec/TODOreference/changed-features/wildcards.md @@ -0,0 +1,50 @@ +--- +layout: doc-page +title: Wildcard Arguments in Types +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/wildcards.html +--- + +The syntax of wildcard arguments in types has changed from `_` to `?`. Example: +```scala +List[?] +Map[? <: AnyRef, ? >: Null] +``` + +## Motivation + +We would like to use the underscore syntax `_` to stand for an anonymous type parameter, aligning it with its meaning in +value parameter lists. So, just as `f(_)` is a shorthand for the lambda `x => f(x)`, in the future `C[_]` will be a shorthand +for the type lambda `[X] =>> C[X]`. This makes higher-kinded types easier to use. It also removes the wart that, used as a type +parameter, `F[_]` means `F` is a type constructor whereas used as a type, `F[_]` means it is a wildcard (i.e. existential) type. +In the future, `F[_]` will mean the same thing, no matter where it is used. + +We pick `?` as a replacement syntax for wildcard types, since it aligns with +[Java's syntax](https://docs.oracle.com/javase/tutorial/java/generics/wildcardGuidelines.html). + +## Migration Strategy + +The migration to the new scheme is complicated, in particular since the [kind projector](https://github.com/typelevel/kind-projector) +compiler plugin still uses the reverse convention, with `?` meaning parameter placeholder instead of wildcard. Fortunately, kind projector has added `*` as an alternative syntax for `?`. + +A step-by-step migration is made possible with the following measures: + + 1. In Scala 3.0, both `_` and `?` are legal names for wildcards. + 2. In Scala 3.1, `_` is deprecated in favor of `?` as a name for a wildcard. A `-rewrite` option is + available to rewrite one to the other. + 3. In Scala 3.2, the meaning of `_` changes from wildcard to placeholder for type parameter. + 4. The Scala 3.1 behavior is already available today under the `-source future` setting. + +To smooth the transition for codebases that use kind-projector, we adopt the following measures under the command line +option `-Ykind-projector`: + + 1. In Scala 3.0, `*` is available as a type parameter placeholder. + 2. In Scala 3.2, `*` is deprecated in favor of `_`. A `-rewrite` option is + available to rewrite one to the other. + 3. In Scala 3.3, `*` is removed again, and all type parameter placeholders will be expressed with `_`. + +These rules make it possible to cross build between Scala 2 using the kind projector plugin and Scala 3.0 - 3.2 using the compiler option `-Ykind-projector`. + +There is also a migration path for users that want a one-time transition to syntax with `_` as a type parameter placeholder. +With option `-Ykind-projector:underscores` Scala 3 will regard `_` as a type parameter placeholder, leaving `?` as the only syntax for wildcards. + +To cross-compile with old Scala 2 sources, while using `_` a placeholder, you must use options `-Xsource:3 -P:kind-projector:underscore-placeholders` together with a recent version of kind-projector (`0.13` and higher) and most recent versions of Scala 2 (`2.13.5` and higher and `2.12.14` and higher) diff --git a/docs/_spec/TODOreference/contextual/by-name-context-parameters.md b/docs/_spec/TODOreference/contextual/by-name-context-parameters.md new file mode 100644 index 000000000000..3004bfb2c4c2 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/by-name-context-parameters.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "By-Name Context Parameters" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/by-name-context-parameters.html +--- + +Context parameters can be declared by-name to avoid a divergent inferred expansion. Example: + +```scala +trait Codec[T]: + def write(x: T): Unit + +given intCodec: Codec[Int] = ??? + +given optionCodec[T](using ev: => Codec[T]): Codec[Option[T]] with + def write(xo: Option[T]) = xo match + case Some(x) => ev.write(x) + case None => + +val s = summon[Codec[Option[Int]]] + +s.write(Some(33)) +s.write(None) +``` +As is the case for a normal by-name parameter, the argument for the context parameter `ev` +is evaluated on demand. In the example above, if the option value `x` is `None`, it is +not evaluated at all. + +The synthesized argument for a context parameter is backed by a local val +if this is necessary to prevent an otherwise diverging expansion. + +The precise steps for synthesizing an argument for a by-name context parameter of type `=> T` are as follows. + + 1. Create a new given of type `T`: + + ```scala + given lv: T = ??? + ``` + + where `lv` is an arbitrary fresh name. + + 1. This given is not immediately available as candidate for argument inference (making it immediately available could result in a loop in the synthesized computation). But it becomes available in all nested contexts that look again for an argument to a by-name context parameter. + + 1. If this search succeeds with expression `E`, and `E` contains references to `lv`, replace `E` by + + ```scala + { given lv: T = E; lv } + ``` + + Otherwise, return `E` unchanged. + +In the example above, the definition of `s` would be expanded as follows. + +```scala +val s = summon[Test.Codec[Option[Int]]]( + optionCodec[Int](using intCodec) +) +``` + +No local given instance was generated because the synthesized argument is not recursive. + +## Reference + +For more information, see [Issue #1998](https://github.com/lampepfl/dotty/issues/1998) +and the associated [Scala SIP](https://docs.scala-lang.org/sips/byname-implicits.html). diff --git a/docs/_spec/TODOreference/contextual/context-bounds.md b/docs/_spec/TODOreference/contextual/context-bounds.md new file mode 100644 index 000000000000..42479d6802b3 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/context-bounds.md @@ -0,0 +1,53 @@ +--- +layout: doc-page +title: "Context Bounds" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-bounds.html +--- + +A context bound is a shorthand for expressing the common pattern of a context parameter that depends on a type parameter. Using a context bound, the `maximum` function of the last section can be written like this: + +```scala +def maximum[T: Ord](xs: List[T]): T = xs.reduceLeft(max) +``` + +A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `using Ord[T]`. The context parameter(s) generated from context bounds +are added as follows: + + - If the method parameters end in an implicit parameter list or using clause, + context parameters are added in front of that list. + - Otherwise they are added as a separate parameter clause at the end. + +Example: + +```scala +def f[T: C1 : C2, U: C3](x: T)(using y: U, z: V): R +``` + +would expand to + +```scala +def f[T, U](x: T)(using _: C1[T], _: C2[T], _: C3[U], y: U, z: V): R +``` + +Context bounds can be combined with subtype bounds. If both are present, subtype bounds come first, e.g. + +```scala +def g[T <: B : C](x: T): R = ... +``` + +## Migration + +To ease migration, context bounds in Dotty map in Scala 3.0 to old-style implicit parameters +for which arguments can be passed either with a `(using ...)` clause or with a normal application. From Scala 3.1 on, they will map to context parameters instead, as is described above. + +If the source version is `future-migration`, any pairing of an evidence +context parameter stemming from a context bound with a normal argument will give a migration +warning. The warning indicates that a `(using ...)` clause is needed instead. The rewrite can be +done automatically under `-rewrite`. + +## Syntax + +``` +TypeParamBounds ::= [SubtypeBounds] {ContextBound} +ContextBound ::= ‘:’ Type +``` diff --git a/docs/_spec/TODOreference/contextual/context-functions-spec.md b/docs/_spec/TODOreference/contextual/context-functions-spec.md new file mode 100644 index 000000000000..109513e9da86 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/context-functions-spec.md @@ -0,0 +1,79 @@ +--- +layout: doc-page +title: "Context Functions - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-functions-spec.html +--- + +## Syntax + +``` +Type ::= ... + | FunArgTypes ‘?=>’ Type +Expr ::= ... + | FunParams ‘?=>’ Expr +``` + +Context function types associate to the right, e.g. +`S ?=> T ?=> U` is the same as `S ?=> (T ?=> U)`. + +## Implementation + +Context function types are shorthands for class types that define `apply` +methods with context parameters. Specifically, the `N`-ary function type + +`T1, ..., TN ?=> R` is a shorthand for the class type +`ContextFunctionN[T1, ..., TN, R]`. Such class types are assumed to have the following definitions, for any value of `N >= 1`: + +```scala +package scala +trait ContextFunctionN[-T1, ..., -TN, +R]: + def apply(using x1: T1, ..., xN: TN): R +``` + +Context function types erase to normal function types, so these classes are +generated on the fly for typechecking, but not realized in actual code. + +Context function literals `(x1: T1, ..., xn: Tn) ?=> e` map +context parameters `xi` of types `Ti` to the result of evaluating the expression `e`. +The scope of each context parameter `xi` is `e`. The parameters must have pairwise distinct names. + +If the expected type of the context function literal is of the form +`scala.ContextFunctionN[S1, ..., Sn, R]`, the expected type of `e` is `R` and +the type `Ti` of any of the parameters `xi` can be omitted, in which case `Ti += Si` is assumed. If the expected type of the context function literal is +some other type, all context parameter types must be explicitly given, and the expected type of `e` is undefined. +The type of the context function literal is `scala.ContextFunctionN[S1, ...,Sn, T]`, where `T` is the widened +type of `e`. `T` must be equivalent to a type which does not refer to any of +the context parameters `xi`. + +The context function literal is evaluated as the instance creation expression + +```scala +new scala.ContextFunctionN[T1, ..., Tn, T]: + def apply(using x1: T1, ..., xn: Tn): T = e +``` + +A context parameter may also be a wildcard represented by an underscore `_`. In that case, a fresh name for the parameter is chosen arbitrarily. + +**Note:** The closing paragraph of the +[Anonymous Functions section](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#anonymous-functions) +of Scala 2.13 is subsumed by context function types and should be removed. + +Context function literals `(x1: T1, ..., xn: Tn) ?=> e` are +automatically created for any expression `e` whose expected type is +`scala.ContextFunctionN[T1, ..., Tn, R]`, unless `e` is +itself a context function literal. This is analogous to the automatic +insertion of [`scala.Function0`](https://scala-lang.org/api/3.x/scala/Function0.html) around expressions in by-name argument position. + +Context function types generalize to `N > 22` in the same way that function types do, see [the corresponding +documentation](../dropped-features/limit22.md). + +## Examples + +See the section on Expressiveness from [Simplicitly: foundations and +applications of implicit function +types](https://dl.acm.org/citation.cfm?id=3158130). + +## Type Checking + +After desugaring no additional typing rules are required for context function types. diff --git a/docs/_spec/TODOreference/contextual/context-functions.md b/docs/_spec/TODOreference/contextual/context-functions.md new file mode 100644 index 000000000000..0ad3c8757782 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/context-functions.md @@ -0,0 +1,154 @@ +--- +layout: doc-page +title: "Context Functions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-functions.html +--- + +_Context functions_ are functions with (only) context parameters. +Their types are _context function types_. Here is an example of a context function type: + +```scala +type Executable[T] = ExecutionContext ?=> T +``` +Context functions are written using `?=>` as the "arrow" sign. +They are applied to synthesized arguments, in +the same way methods with context parameters are applied. For instance: +```scala + given ec: ExecutionContext = ... + + def f(x: Int): ExecutionContext ?=> Int = ... + + // could be written as follows with the type alias from above + // def f(x: Int): Executable[Int] = ... + + f(2)(using ec) // explicit argument + f(2) // argument is inferred +``` +Conversely, if the expected type of an expression `E` is a context function type +`(T_1, ..., T_n) ?=> U` and `E` is not already an +context function literal, `E` is converted to a context function literal by rewriting it to +```scala + (x_1: T1, ..., x_n: Tn) ?=> E +``` +where the names `x_1`, ..., `x_n` are arbitrary. This expansion is performed +before the expression `E` is typechecked, which means that `x_1`, ..., `x_n` +are available as givens in `E`. + +Like their types, context function literals are written using `?=>` as the arrow between parameters and results. They differ from normal function literals in that their types are context function types. + +For example, continuing with the previous definitions, +```scala + def g(arg: Executable[Int]) = ... + + g(22) // is expanded to g((ev: ExecutionContext) ?=> 22) + + g(f(2)) // is expanded to g((ev: ExecutionContext) ?=> f(2)(using ev)) + + g((ctx: ExecutionContext) ?=> f(3)) // is expanded to g((ctx: ExecutionContext) ?=> f(3)(using ctx)) + g((ctx: ExecutionContext) ?=> f(3)(using ctx)) // is left as it is +``` + +## Example: Builder Pattern + +Context function types have considerable expressive power. For +instance, here is how they can support the "builder pattern", where +the aim is to construct tables like this: +```scala + table { + row { + cell("top left") + cell("top right") + } + row { + cell("bottom left") + cell("bottom right") + } + } +``` +The idea is to define classes for `Table` and `Row` that allow the +addition of elements via `add`: +```scala + class Table: + val rows = new ArrayBuffer[Row] + def add(r: Row): Unit = rows += r + override def toString = rows.mkString("Table(", ", ", ")") + + class Row: + val cells = new ArrayBuffer[Cell] + def add(c: Cell): Unit = cells += c + override def toString = cells.mkString("Row(", ", ", ")") + + case class Cell(elem: String) +``` +Then, the `table`, `row` and `cell` constructor methods can be defined +with context function types as parameters to avoid the plumbing boilerplate +that would otherwise be necessary. +```scala + def table(init: Table ?=> Unit) = + given t: Table = Table() + init + t + + def row(init: Row ?=> Unit)(using t: Table) = + given r: Row = Row() + init + t.add(r) + + def cell(str: String)(using r: Row) = + r.add(new Cell(str)) +``` +With that setup, the table construction code above compiles and expands to: +```scala + table { ($t: Table) ?=> + + row { ($r: Row) ?=> + cell("top left")(using $r) + cell("top right")(using $r) + }(using $t) + + row { ($r: Row) ?=> + cell("bottom left")(using $r) + cell("bottom right")(using $r) + }(using $t) + } +``` +## Example: Postconditions + +As a larger example, here is a way to define constructs for checking arbitrary postconditions using an extension method `ensuring` so that the checked result can be referred to simply by `result`. The example combines opaque type aliases, context function types, and extension methods to provide a zero-overhead abstraction. + +```scala +object PostConditions: + opaque type WrappedResult[T] = T + + def result[T](using r: WrappedResult[T]): T = r + + extension [T](x: T) + def ensuring(condition: WrappedResult[T] ?=> Boolean): T = + assert(condition(using x)) + x +end PostConditions +import PostConditions.{ensuring, result} + +val s = List(1, 2, 3).sum.ensuring(result == 6) +``` +**Explanations**: We use a context function type `WrappedResult[T] ?=> Boolean` +as the type of the condition of `ensuring`. An argument to `ensuring` such as +`(result == 6)` will therefore have a given of type `WrappedResult[T]` in +scope to pass along to the `result` method. `WrappedResult` is a fresh type, to make sure +that we do not get unwanted givens in scope (this is good practice in all cases +where context parameters are involved). Since `WrappedResult` is an opaque type alias, its +values need not be boxed, and since `ensuring` is added as an extension method, its argument +does not need boxing either. Hence, the implementation of `ensuring` is close in efficiency to the best possible code one could write by hand: + +```scala +val s = + val result = List(1, 2, 3).sum + assert(result == 6) + result +``` +## Reference + +For more information, see the [blog article](https://www.scala-lang.org/blog/2016/12/07/implicit-function-types.html), +(which uses a different syntax that has been superseded). + +[More details](./context-functions-spec.md) diff --git a/docs/_spec/TODOreference/contextual/contextual.md b/docs/_spec/TODOreference/contextual/contextual.md new file mode 100644 index 000000000000..fda63397f8f9 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/contextual.md @@ -0,0 +1,83 @@ +--- +layout: index +title: "Contextual Abstractions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual.html +--- + +## Critique of the Status Quo + +Scala's implicits are its most distinguished feature. They are _the_ fundamental way to abstract over context. They represent a unified paradigm with a great variety of use cases, among them: implementing type classes, establishing context, dependency injection, expressing capabilities, computing new types and proving relationships between them. + +Following Haskell, Scala was the second popular language to have some form of implicits. Other languages have followed suit. E.g [Rust's traits](https://doc.rust-lang.org/rust-by-example/trait.html) or [Swift's protocol extensions](https://docs.swift.org/swift-book/LanguageGuide/Protocols.html#ID521). Design proposals are also on the table for Kotlin as [compile time dependency resolution](https://github.com/Kotlin/KEEP/blob/e863b25f8b3f2e9b9aaac361c6ee52be31453ee0/proposals/compile-time-dependency-resolution.md), for C# as [Shapes and Extensions](https://github.com/dotnet/csharplang/issues/164) +or for F# as [Traits](https://github.com/MattWindsor91/visualfsharp/blob/hackathon-vs/examples/fsconcepts.md). Implicits are also a common feature of theorem provers such as [Coq](https://coq.inria.fr/refman/language/extensions/implicit-arguments.html) or [Agda](https://agda.readthedocs.io/en/latest/language/implicit-arguments.html). + +Even though these designs use widely different terminology, they are all variants of the core idea of _term inference_. Given a type, the compiler synthesizes a "canonical" term that has that type. Scala embodies the idea in a purer form than most other languages: An implicit parameter directly leads to an inferred argument term that could also be written down explicitly. By contrast, type class based designs are less direct since they hide term inference behind some form of type classification and do not offer the option of writing the inferred quantities (typically, dictionaries) explicitly. + +Given that term inference is where the industry is heading, and given that Scala has it in a very pure form, how come implicits are not more popular? In fact, it's fair to say that implicits are at the same time Scala's most distinguished and most controversial feature. I believe this is due to a number of aspects that together make implicits harder to learn than necessary and also make it harder to prevent abuses. + +Particular criticisms are: + +1. Being very powerful, implicits are easily over-used and mis-used. This observation holds in almost all cases when we talk about _implicit conversions_, which, even though conceptually different, share the same syntax with other implicit definitions. For instance, regarding the two definitions + + ```scala + implicit def i1(implicit x: T): C[T] = ... + implicit def i2(x: T): C[T] = ... + ``` + + the first of these is a conditional implicit _value_, the second an implicit _conversion_. Conditional implicit values are a cornerstone for expressing type classes, whereas most applications of implicit conversions have turned out to be of dubious value. The problem is that many newcomers to the language start with defining implicit conversions since they are easy to understand and seem powerful and convenient. Scala 3 will put under a language flag both definitions and applications of "undisciplined" implicit conversions between types defined elsewhere. This is a useful step to push back against overuse of implicit conversions. But the problem remains that syntactically, conversions and values just look too similar for comfort. + +2. Another widespread abuse is over-reliance on implicit imports. This often leads to inscrutable type errors that go away with the right import incantation, leaving a feeling of frustration. Conversely, it is hard to see what implicits a program uses since implicits can hide anywhere in a long list of imports. + +3. The syntax of implicit definitions is too minimal. It consists of a single modifier, `implicit`, that can be attached to a large number of language constructs. A problem with this for newcomers is that it conveys mechanism instead of intent. For instance, a type class instance is an implicit object or val if unconditional and an implicit def with implicit parameters referring to some class if conditional. This describes precisely what the implicit definitions translate to -- just drop the `implicit` modifier, and that's it! But the cues that define intent are rather indirect and can be easily misread, as demonstrated by the definitions of `i1` and `i2` above. + +4. The syntax of implicit parameters also has shortcomings. While implicit _parameters_ are designated specifically, arguments are not. Passing an argument to an implicit parameter looks like a regular application `f(arg)`. This is problematic because it means there can be confusion regarding what parameter gets instantiated in a call. For instance, in + + ```scala + def currentMap(implicit ctx: Context): Map[String, Int] + ``` + + one cannot write `currentMap("abc")` since the string `"abc"` is taken as explicit argument to the implicit `ctx` parameter. One has to write `currentMap.apply("abc")` instead, which is awkward and irregular. For the same reason, a method definition can only have one implicit parameter section and it must always come last. This restriction not only reduces orthogonality, but also prevents some useful program constructs, such as a method with a regular parameter whose type depends on an implicit value. Finally, it's also a bit annoying that implicit parameters must have a name, even though in many cases that name is never referenced. + +5. Implicits pose challenges for tooling. The set of available implicits depends on context, so command completion has to take context into account. This is feasible in an IDE but tools like [Scaladoc](https://docs.scala-lang.org/overviews/scaladoc/overview.html) that are based on static web pages can only provide an approximation. Another problem is that failed implicit searches often give very unspecific error messages, in particular if some deeply recursive implicit search has failed. Note that the Scala 3 compiler has already made a lot of progress in the error diagnostics area. If a recursive search fails some levels down, it shows what was constructed and what is missing. Also, it suggests imports that can bring missing implicits in scope. + +None of the shortcomings is fatal, after all implicits are very widely used, and many libraries and applications rely on them. But together, they make code using implicits a lot more cumbersome and less clear than it could be. + +Historically, many of these shortcomings come from the way implicits were gradually "discovered" in Scala. Scala originally had only implicit conversions with the intended use case of "extending" a class or trait after it was defined, i.e. what is expressed by implicit classes in later versions of Scala. Implicit parameters and instance definitions came later in 2006 and we picked similar syntax since it seemed convenient. For the same reason, no effort was made to distinguish implicit imports or arguments from normal ones. + +Existing Scala programmers by and large have gotten used to the status quo and see little need for change. But for newcomers this status quo presents a big hurdle. I believe if we want to overcome that hurdle, we should take a step back and allow ourselves to consider a radically new design. + +## The New Design + +The following pages introduce a redesign of contextual abstractions in Scala. They introduce four fundamental changes: + +1. [Given Instances](./givens.md) are a new way to define basic terms that can be synthesized. They replace implicit definitions. The core principle of the proposal is that, rather than mixing the `implicit` modifier with a large number of features, we have a single way to define terms that can be synthesized for types. + +2. [Using Clauses](./using-clauses.md) are a new syntax for implicit _parameters_ and their _arguments_. It unambiguously aligns parameters and arguments, solving a number of language warts. It also allows us to have several `using` clauses in a definition. + +3. ["Given" Imports](./given-imports.md) are a new class of import selectors that specifically import + givens and nothing else. + +4. [Implicit Conversions](./conversions.md) are now expressed as given instances of a standard `Conversion` class. All other forms of implicit conversions will be phased out. + +This section also contains pages describing other language features that are related to context abstraction. These are: + +- [Context Bounds](./context-bounds.md), which carry over unchanged. +- [Extension Methods](./extension-methods.md) replace implicit classes in a way that integrates better with type classes. +- [Implementing Type Classes](./type-classes.md) demonstrates how some common type classes can be implemented using the new constructs. +- [Type Class Derivation](./derivation.md) introduces constructs to automatically derive type class instances for ADTs. +- [Multiversal Equality](./multiversal-equality.md) introduces a special type class to support type safe equality. +- [Context Functions](./context-functions.md) provide a way to abstract over context parameters. +- [By-Name Context Parameters](./by-name-context-parameters.md) are an essential tool to define recursive synthesized values without looping. +- [Relationship with Scala 2 Implicits](./relationship-implicits.md) discusses the relationship between old-style implicits and new-style givens and how to migrate from one to the other. + +Overall, the new design achieves a better separation of term inference from the rest of the language: There is a single way to define givens instead of a multitude of forms all taking an `implicit` modifier. There is a single way to introduce implicit parameters and arguments instead of conflating implicit with normal arguments. There is a separate way to import givens that does not allow them to hide in a sea of normal imports. And there is a single way to define an implicit conversion which is clearly marked as such and does not require special syntax. + +This design thus avoids feature interactions and makes the language more consistent and orthogonal. It will make implicits easier to learn and harder to abuse. It will greatly improve the clarity of the 95% of Scala programs that use implicits. It has thus the potential to fulfil the promise of term inference in a principled way that is also accessible and friendly. + +Could we achieve the same goals by tweaking existing implicits? After having tried for a long time, I believe now that this is impossible. + +- First, some of the problems are clearly syntactic and require different syntax to solve them. +- Second, there is the problem how to migrate. We cannot change the rules in mid-flight. At some stage of language evolution we need to accommodate both the new and the old rules. With a syntax change, this is easy: Introduce the new syntax with new rules, support the old syntax for a while to facilitate cross compilation, deprecate and phase out the old syntax at some later time. Keeping the same syntax does not offer this path, and in fact does not seem to offer any viable path for evolution +- Third, even if we would somehow succeed with migration, we still have the problem + how to teach this. We cannot make existing tutorials go away. Almost all existing tutorials start with implicit conversions, which will go away; they use normal imports, which will go away, and they explain calls to methods with implicit parameters by expanding them to plain applications, which will also go away. This means that we'd have + to add modifications and qualifications to all existing literature and courseware, likely causing more confusion with beginners instead of less. By contrast, with a new syntax there is a clear criterion: Any book or courseware that mentions `implicit` is outdated and should be updated. diff --git a/docs/_spec/TODOreference/contextual/conversions.md b/docs/_spec/TODOreference/contextual/conversions.md new file mode 100644 index 000000000000..1ce8d42074e7 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/conversions.md @@ -0,0 +1,76 @@ +--- +layout: doc-page +title: "Implicit Conversions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/conversions.html +--- + +Implicit conversions are defined by given instances of the `scala.Conversion` class. +This class is defined in package `scala` as follows: +```scala +abstract class Conversion[-T, +U] extends (T => U): + def apply (x: T): U +``` +For example, here is an implicit conversion from `String` to `Token`: +```scala +given Conversion[String, Token] with + def apply(str: String): Token = new KeyWord(str) +``` +Using an alias this can be expressed more concisely as: +```scala +given Conversion[String, Token] = new KeyWord(_) +``` +An implicit conversion is applied automatically by the compiler in three situations: + +1. If an expression `e` has type `T`, and `T` does not conform to the expression's expected type `S`. +2. In a selection `e.m` with `e` of type `T`, but `T` defines no member `m`. +3. In an application `e.m(args)` with `e` of type `T`, if `T` does define + some member(s) named `m`, but none of these members can be applied to the arguments `args`. + +In the first case, the compiler looks for a given `scala.Conversion` instance that maps +an argument of type `T` to type `S`. In the second and third +case, it looks for a given `scala.Conversion` instance that maps an argument of type `T` +to a type that defines a member `m` which can be applied to `args` if present. +If such an instance `C` is found, the expression `e` is replaced by `C.apply(e)`. + +## Examples + +1. The `Predef` package contains "auto-boxing" conversions that map +primitive number types to subclasses of `java.lang.Number`. For instance, the +conversion from `Int` to `java.lang.Integer` can be defined as follows: + ```scala + given int2Integer: Conversion[Int, java.lang.Integer] = + java.lang.Integer.valueOf(_) + ``` + +2. The "magnet" pattern is sometimes used to express many variants of a method. Instead of defining overloaded versions of the method, one can also let the method take one or more arguments of specially defined "magnet" types, into which various argument types can be converted. Example: + ```scala + object Completions: + + // The argument "magnet" type + enum CompletionArg: + case Error(s: String) + case Response(f: Future[HttpResponse]) + case Status(code: Future[StatusCode]) + + object CompletionArg: + + // conversions defining the possible arguments to pass to `complete` + // these always come with CompletionArg + // They can be invoked explicitly, e.g. + // + // CompletionArg.fromStatusCode(statusCode) + + given fromString : Conversion[String, CompletionArg] = Error(_) + given fromFuture : Conversion[Future[HttpResponse], CompletionArg] = Response(_) + given fromStatusCode: Conversion[Future[StatusCode], CompletionArg] = Status(_) + end CompletionArg + import CompletionArg.* + + def complete[T](arg: CompletionArg) = arg match + case Error(s) => ... + case Response(f) => ... + case Status(code) => ... + + end Completions + ``` +This setup is more complicated than simple overloading of `complete`, but it can still be useful if normal overloading is not available (as in the case above, since we cannot have two overloaded methods that take `Future[...]` arguments), or if normal overloading would lead to a combinatorial explosion of variants. diff --git a/docs/_spec/TODOreference/contextual/derivation-macro.md b/docs/_spec/TODOreference/contextual/derivation-macro.md new file mode 100644 index 000000000000..be7565616913 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/derivation-macro.md @@ -0,0 +1,205 @@ +--- +layout: doc-page +title: "How to write a type class `derived` method using macros" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/derivation-macro.html +--- + +In the main [derivation](./derivation.md) documentation page, we explained the +details behind `Mirror`s and type class derivation. Here we demonstrate how to +implement a type class `derived` method using macros only. We follow the same +example of deriving `Eq` instances and for simplicity we support a `Product` +type e.g., a case class `Person`. The low-level method we will use to implement +the `derived` method exploits quotes, splices of both expressions and types and +the `scala.quoted.Expr.summon` method which is the equivalent of +`summonFrom`. The former is suitable for use in a quote context, used within +macros. + +As in the original code, the type class definition is the same: + +```scala +trait Eq[T]: + def eqv(x: T, y: T): Boolean +``` + +we need to implement a method `Eq.derived` on the companion object of `Eq` that +produces a quoted instance for `Eq[T]`. Here is a possible signature, + +```scala +given derived[T: Type](using Quotes): Expr[Eq[T]] +``` + +and for comparison reasons we give the same signature we had with `inline`: + +```scala +inline given derived[T](using Mirror.Of[T]): Eq[T] = ??? +``` + +Note, that since a type is used in a subsequent stage it will need to be lifted +to a `Type` by using the corresponding context bound. Also, note that we can +summon the quoted `Mirror` inside the body of the `derived` thus we can omit it +from the signature. The body of the `derived` method is shown below: + + +```scala +given derived[T: Type](using Quotes): Expr[Eq[T]] = + import quotes.reflect.* + + val ev: Expr[Mirror.Of[T]] = Expr.summon[Mirror.Of[T]].get + + ev match + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + def eqProductBody(x: Expr[Product], y: Expr[Product])(using Quotes): Expr[Boolean] = { + elemInstances.zipWithIndex.foldLeft(Expr(true)) { + case (acc, ('{ $elem: Eq[t] }, index)) => + val indexExpr = Expr(index) + val e1 = '{ $x.productElement($indexExpr).asInstanceOf[t] } + val e2 = '{ $y.productElement($indexExpr).asInstanceOf[t] } + '{ $acc && $elem.eqv($e1, $e2) } + } + } + '{ eqProduct((x: T, y: T) => ${eqProductBody('x.asExprOf[Product], 'y.asExprOf[Product])}) } + + // case for Mirror.ProductOf[T] + // ... +``` + +Note, that in the `inline` case we can merely write +`summonAll[m.MirroredElemTypes]` inside the inline method but here, since +`Expr.summon` is required, we can extract the element types in a macro fashion. +Being inside a macro, our first reaction would be to write the code below. Since +the path inside the type argument is not stable this cannot be used: + +```scala +'{ + summonAll[$m.MirroredElemTypes] +} +``` + +Instead we extract the tuple-type for element types using pattern matching over +quotes and more specifically of the refined type: + +```scala + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => ... +``` + +Shown below is the implementation of `summonAll` as a macro. We assume that +given instances for our primitive types exist. + +```scala +def summonAll[T: Type](using Quotes): List[Expr[Eq[_]]] = + Type.of[T] match + case '[String *: tpes] => '{ summon[Eq[String]] } :: summonAll[tpes] + case '[Int *: tpes] => '{ summon[Eq[Int]] } :: summonAll[tpes] + case '[tpe *: tpes] => derived[tpe] :: summonAll[tpes] + case '[EmptyTuple] => Nil +``` + +One additional difference with the body of `derived` here as opposed to the one +with `inline` is that with macros we need to synthesize the body of the code during the +macro-expansion time. That is the rationale behind the `eqProductBody` function. +Assuming that we calculate the equality of two `Person`s defined with a case +class that holds a name of type [`String`](https://scala-lang.org/api/3.x/scala/Predef$.html#String-0) +and an age of type `Int`, the equality check we want to generate is the following: + +```scala + true + && Eq[String].eqv(x.productElement(0),y.productElement(0)) + && Eq[Int].eqv(x.productElement(1), y.productElement(1)) +``` + +## Calling the derived method inside the macro + +Following the rules in [Macros](../metaprogramming/metaprogramming.md) we create two methods. +One that hosts the top-level splice `eqv` and one that is the implementation. +Alternatively and what is shown below is that we can call the `eqv` method +directly. The `eqGen` can trigger the derivation. + +```scala +extension [T](inline x: T) + inline def === (inline y: T)(using eq: Eq[T]): Boolean = eq.eqv(x, y) + +inline given eqGen[T]: Eq[T] = ${ Eq.derived[T] } +``` + +Note, that we use inline method syntax and we can compare instance such as +`Sm(Person("Test", 23)) === Sm(Person("Test", 24))` for e.g., the following two +types: + +```scala +case class Person(name: String, age: Int) + +enum Opt[+T]: + case Sm(t: T) + case Nn +``` + +The full code is shown below: + +```scala +import scala.deriving.* +import scala.quoted.* + + +trait Eq[T]: + def eqv(x: T, y: T): Boolean + +object Eq: + given Eq[String] with + def eqv(x: String, y: String) = x == y + + given Eq[Int] with + def eqv(x: Int, y: Int) = x == y + + def eqProduct[T](body: (T, T) => Boolean): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = body(x, y) + + def eqSum[T](body: (T, T) => Boolean): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = body(x, y) + + def summonAll[T: Type](using Quotes): List[Expr[Eq[_]]] = + Type.of[T] match + case '[String *: tpes] => '{ summon[Eq[String]] } :: summonAll[tpes] + case '[Int *: tpes] => '{ summon[Eq[Int]] } :: summonAll[tpes] + case '[tpe *: tpes] => derived[tpe] :: summonAll[tpes] + case '[EmptyTuple] => Nil + + given derived[T: Type](using q: Quotes): Expr[Eq[T]] = + import quotes.reflect.* + + val ev: Expr[Mirror.Of[T]] = Expr.summon[Mirror.Of[T]].get + + ev match + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + val eqProductBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => + elemInstances.zipWithIndex.foldLeft(Expr(true: Boolean)) { + case (acc, (elem, index)) => + val e1 = '{$x.asInstanceOf[Product].productElement(${Expr(index)})} + val e2 = '{$y.asInstanceOf[Product].productElement(${Expr(index)})} + + '{ $acc && $elem.asInstanceOf[Eq[Any]].eqv($e1, $e2) } + } + '{ eqProduct((x: T, y: T) => ${eqProductBody('x, 'y)}) } + + case '{ $m: Mirror.SumOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + val eqSumBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => + val ordx = '{ $m.ordinal($x) } + val ordy = '{ $m.ordinal($y) } + + val elements = Expr.ofList(elemInstances) + '{ $ordx == $ordy && $elements($ordx).asInstanceOf[Eq[Any]].eqv($x, $y) } + + '{ eqSum((x: T, y: T) => ${eqSumBody('x, 'y)}) } + end derived +end Eq + +object Macro3: + extension [T](inline x: T) + inline def === (inline y: T)(using eq: Eq[T]): Boolean = eq.eqv(x, y) + + inline given eqGen[T]: Eq[T] = ${ Eq.derived[T] } +``` diff --git a/docs/_spec/TODOreference/contextual/derivation.md b/docs/_spec/TODOreference/contextual/derivation.md new file mode 100644 index 000000000000..bad47dcb0096 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/derivation.md @@ -0,0 +1,425 @@ +--- +layout: doc-page +title: "Type Class Derivation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/derivation.html +--- + +Type class derivation is a way to automatically generate given instances for type classes which satisfy some simple +conditions. A type class in this sense is any trait or class with a type parameter determining the type being operated +on. Common examples are `Eq`, `Ordering`, or `Show`. For example, given the following `Tree` algebraic data type +(ADT), + +```scala +enum Tree[T] derives Eq, Ordering, Show: + case Branch(left: Tree[T], right: Tree[T]) + case Leaf(elem: T) +``` + +The `derives` clause generates the following given instances for the `Eq`, `Ordering` and `Show` type classes in the +companion object of `Tree`, + +```scala +given [T: Eq] : Eq[Tree[T]] = Eq.derived +given [T: Ordering] : Ordering[Tree[T]] = Ordering.derived +given [T: Show] : Show[Tree[T]] = Show.derived +``` + +We say that `Tree` is the _deriving type_ and that the `Eq`, `Ordering` and `Show` instances are _derived instances_. + +## Types supporting `derives` clauses + +All data types can have a `derives` clause. This document focuses primarily on data types which also have a given instance +of the `Mirror` type class available. + +`Mirror` type class instances provide information at the type level about the components and labelling of the type. +They also provide minimal term level infrastructure to allow higher level libraries to provide comprehensive +derivation support. + +Instances of the `Mirror` type class are generated automatically by the compiler +unconditionally for: +- enums and enum cases, +- case objects. + +Instances for `Mirror` are also generated conditionally for: +- case classes where the constructor is visible at the callsite (always true if the companion is not a case object) +- sealed classes and sealed traits where: + - there exists at least one child case, + - each child case is reachable from the parent's definition, + - if the sealed trait/class has no companion, then each child case is reachable from the callsite through the prefix of the type being mirrored, + - and where the compiler can generate a `Mirror` type class instance for each child case. + + +The `Mirror` type class definition is as follows: + +```scala +sealed trait Mirror: + + /** the type being mirrored */ + type MirroredType + + /** the type of the elements of the mirrored type */ + type MirroredElemTypes + + /** The mirrored *-type */ + type MirroredMonoType + + /** The name of the type */ + type MirroredLabel <: String + + /** The names of the elements of the type */ + type MirroredElemLabels <: Tuple + +object Mirror: + + /** The Mirror for a product type */ + trait Product extends Mirror: + + /** Create a new instance of type `T` with elements + * taken from product `p`. + */ + def fromProduct(p: scala.Product): MirroredMonoType + + trait Sum extends Mirror: + + /** The ordinal number of the case class of `x`. + * For enums, `ordinal(x) == x.ordinal` + */ + def ordinal(x: MirroredMonoType): Int + +end Mirror +``` + +Product types (i.e. case classes and objects, and enum cases) have mirrors which are subtypes of `Mirror.Product`. Sum +types (i.e. sealed class or traits with product children, and enums) have mirrors which are subtypes of `Mirror.Sum`. + +For the `Tree` ADT from above the following `Mirror` instances will be automatically provided by the compiler, + +```scala +// Mirror for Tree +new Mirror.Sum: + type MirroredType = Tree + type MirroredElemTypes[T] = (Branch[T], Leaf[T]) + type MirroredMonoType = Tree[_] + type MirroredLabel = "Tree" + type MirroredElemLabels = ("Branch", "Leaf") + + def ordinal(x: MirroredMonoType): Int = x match + case _: Branch[_] => 0 + case _: Leaf[_] => 1 + +// Mirror for Branch +new Mirror.Product: + type MirroredType = Branch + type MirroredElemTypes[T] = (Tree[T], Tree[T]) + type MirroredMonoType = Branch[_] + type MirroredLabel = "Branch" + type MirroredElemLabels = ("left", "right") + + def fromProduct(p: Product): MirroredMonoType = + new Branch(...) + +// Mirror for Leaf +new Mirror.Product: + type MirroredType = Leaf + type MirroredElemTypes[T] = Tuple1[T] + type MirroredMonoType = Leaf[_] + type MirroredLabel = "Leaf" + type MirroredElemLabels = Tuple1["elem"] + + def fromProduct(p: Product): MirroredMonoType = + new Leaf(...) +``` + +If a Mirror cannot be generated automatically for a given type, an error will appear explaining why it is neither a supported +sum type nor a product type. For example, if `A` is a trait that is not sealed, + +``` +No given instance of type deriving.Mirror.Of[A] was found for parameter x of method summon in object Predef. Failed to synthesize an instance of type deriving.Mirror.Of[A]: + * trait A is not a generic product because it is not a case class + * trait A is not a generic sum because it is not a sealed trait +``` + + +Note the following properties of `Mirror` types, + ++ Properties are encoded using types rather than terms. This means that they have no runtime footprint unless used and + also that they are a compile time feature for use with Scala 3's metaprogramming facilities. ++ There is no restriction against the mirrored type being a local or inner class. ++ The kinds of `MirroredType` and `MirroredElemTypes` match the kind of the data type the mirror is an instance for. + This allows `Mirror`s to support ADTs of all kinds. ++ There is no distinct representation type for sums or products (ie. there is no `HList` or `Coproduct` type as in + Scala 2 versions of Shapeless). Instead the collection of child types of a data type is represented by an ordinary, + possibly parameterized, tuple type. Scala 3's metaprogramming facilities can be used to work with these tuple types + as-is, and higher level libraries can be built on top of them. ++ For both product and sum types, the elements of `MirroredElemTypes` are arranged in definition order (i.e. `Branch[T]` + precedes `Leaf[T]` in `MirroredElemTypes` for `Tree` because `Branch` is defined before `Leaf` in the source file). + This means that `Mirror.Sum` differs in this respect from Shapeless's generic representation for ADTs in Scala 2, + where the constructors are ordered alphabetically by name. ++ The methods `ordinal` and `fromProduct` are defined in terms of `MirroredMonoType` which is the type of kind-`*` + which is obtained from `MirroredType` by wildcarding its type parameters. + +## Type classes supporting automatic deriving + +A trait or class can appear in a `derives` clause if its companion object defines a method named `derived`. The +signature and implementation of a `derived` method for a type class `TC[_]` are arbitrary but it is typically of the +following form, + +```scala +import scala.deriving.Mirror + +inline def derived[T](using Mirror.Of[T]): TC[T] = ... +``` + +That is, the `derived` method takes a context parameter of (some subtype of) type `Mirror` which defines the shape of +the deriving type `T`, and computes the type class implementation according to that shape. This is all that the +provider of an ADT with a `derives` clause has to know about the derivation of a type class instance. + +Note that `derived` methods may have context `Mirror` parameters indirectly (e.g. by having a context argument which in turn +has a context `Mirror` parameter, or not at all (e.g. they might use some completely different user-provided mechanism, for +instance using Scala 3 macros or runtime reflection). We expect that (direct or indirect) `Mirror` based implementations +will be the most common and that is what this document emphasises. + +Type class authors will most likely use higher level derivation or generic programming libraries to implement +`derived` methods. An example of how a `derived` method might be implemented using _only_ the low level facilities +described above and Scala 3's general metaprogramming features is provided below. It is not anticipated that type class +authors would normally implement a `derived` method in this way, however this walkthrough can be taken as a guide for +authors of the higher level derivation libraries that we expect typical type class authors will use (for a fully +worked out example of such a library, see [Shapeless 3](https://github.com/milessabin/shapeless/tree/shapeless-3)). + +## How to write a type class `derived` method using low level mechanisms + +The low-level method we will use to implement a type class `derived` method in this example exploits three new +type-level constructs in Scala 3: inline methods, inline matches, and implicit searches via `summonInline` or `summonFrom`. Given this definition of the +`Eq` type class, + +```scala +trait Eq[T]: + def eqv(x: T, y: T): Boolean +``` + +we need to implement a method `Eq.derived` on the companion object of `Eq` that produces a given instance for `Eq[T]` given +a `Mirror[T]`. Here is a possible implementation, + +```scala +import scala.deriving.Mirror + +inline given derived[T](using m: Mirror.Of[T]): Eq[T] = + val elemInstances = summonAll[m.MirroredElemTypes] // (1) + inline m match // (2) + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) +``` + +Note that `derived` is defined as an `inline` given. This means that the method will be expanded at +call sites (for instance the compiler generated instance definitions in the companion objects of ADTs which have a +`derived Eq` clause), and also that it can be used recursively if necessary, to compute instances for children. + +The body of this method (1) first materializes the `Eq` instances for all the child types of type the instance is +being derived for. This is either all the branches of a sum type or all the fields of a product type. The +implementation of `summonAll` is `inline` and uses Scala 3's `summonInline` construct to collect the instances as a +`List`, + +```scala +inline def summonAll[T <: Tuple]: List[Eq[_]] = + inline erasedValue[T] match + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] +``` + +with the instances for children in hand the `derived` method uses an `inline match` to dispatch to methods which can +construct instances for either sums or products (2). Note that because `derived` is `inline` the match will be +resolved at compile-time and only the left-hand side of the matching case will be inlined into the generated code with +types refined as revealed by the match. + +In the sum case, `eqSum`, we use the runtime `ordinal` values of the arguments to `eqv` to first check if the two +values are of the same subtype of the ADT (3) and then, if they are, to further test for equality based on the `Eq` +instance for the appropriate ADT subtype using the auxiliary method `check` (4). + +```scala +import scala.deriving.Mirror + +def eqSum[T](s: Mirror.SumOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + val ordx = s.ordinal(x) // (3) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) // (4) +``` + +In the product case, `eqProduct` we test the runtime values of the arguments to `eqv` for equality as products based +on the `Eq` instances for the fields of the data type (5), + +```scala +import scala.deriving.Mirror + +def eqProduct[T](p: Mirror.ProductOf[T], elems: List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { // (5) + case ((x, y), elem) => check(elem)(x, y) + } +``` + +Pulling this all together we have the following complete implementation, + +```scala +import scala.deriving.* +import scala.compiletime.{erasedValue, summonInline} + +inline def summonAll[T <: Tuple]: List[Eq[_]] = + inline erasedValue[T] match + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] + +trait Eq[T]: + def eqv(x: T, y: T): Boolean + +object Eq: + given Eq[Int] with + def eqv(x: Int, y: Int) = x == y + + def check(elem: Eq[_])(x: Any, y: Any): Boolean = + elem.asInstanceOf[Eq[Any]].eqv(x, y) + + def iterator[T](p: T) = p.asInstanceOf[Product].productIterator + + def eqSum[T](s: Mirror.SumOf[T], elems: => List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + val ordx = s.ordinal(x) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) + + def eqProduct[T](p: Mirror.ProductOf[T], elems: => List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { + case ((x, y), elem) => check(elem)(x, y) + } + + inline given derived[T](using m: Mirror.Of[T]): Eq[T] = + lazy val elemInstances = summonAll[m.MirroredElemTypes] + inline m match + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) +end Eq +``` + +we can test this relative to a simple ADT like so, + +```scala +enum Opt[+T] derives Eq: + case Sm(t: T) + case Nn + +@main def test(): Unit = + import Opt.* + val eqoi = summon[Eq[Opt[Int]]] + assert(eqoi.eqv(Sm(23), Sm(23))) + assert(!eqoi.eqv(Sm(23), Sm(13))) + assert(!eqoi.eqv(Sm(23), Nn)) +``` + +In this case the code that is generated by the inline expansion for the derived `Eq` instance for `Opt` looks like the +following, after a little polishing, + +```scala +given derived$Eq[T](using eqT: Eq[T]): Eq[Opt[T]] = + eqSum( + summon[Mirror[Opt[T]]], + List( + eqProduct(summon[Mirror[Sm[T]]], List(summon[Eq[T]])), + eqProduct(summon[Mirror[Nn.type]], Nil) + ) + ) +``` + +Alternative approaches can be taken to the way that `derived` methods can be defined. For example, more aggressively +inlined variants using Scala 3 macros, whilst being more involved for type class authors to write than the example +above, can produce code for type classes like `Eq` which eliminate all the abstraction artefacts (eg. the `Lists` of +child instances in the above) and generate code which is indistinguishable from what a programmer might write by hand. +As a third example, using a higher level library such as Shapeless the type class author could define an equivalent +`derived` method as, + +```scala +given eqSum[A](using inst: => K0.CoproductInstances[Eq, A]): Eq[A] with + def eqv(x: A, y: A): Boolean = inst.fold2(x, y)(false)( + [t] => (eqt: Eq[t], t0: t, t1: t) => eqt.eqv(t0, t1) + ) + +given eqProduct[A](using inst: K0.ProductInstances[Eq, A]): Eq[A] with + def eqv(x: A, y: A): Boolean = inst.foldLeft2(x, y)(true: Boolean)( + [t] => (acc: Boolean, eqt: Eq[t], t0: t, t1: t) => + Complete(!eqt.eqv(t0, t1))(false)(true) + ) + +inline def derived[A](using gen: K0.Generic[A]): Eq[A] = + gen.derive(eqProduct, eqSum) +``` + +The framework described here enables all three of these approaches without mandating any of them. + +For a brief discussion on how to use macros to write a type class `derived` +method please read more at [How to write a type class `derived` method using macros](./derivation-macro.md). + +## Deriving instances elsewhere + +Sometimes one would like to derive a type class instance for an ADT after the ADT is defined, without being able to +change the code of the ADT itself. To do this, simply define an instance using the `derived` method of the type class +as right-hand side. E.g, to implement `Ordering` for `Option` define, + +```scala +given [T: Ordering]: Ordering[Option[T]] = Ordering.derived +``` + +Assuming the `Ordering.derived` method has a context parameter of type `Mirror[T]` it will be satisfied by the +compiler generated `Mirror` instance for `Option` and the derivation of the instance will be expanded on the right +hand side of this definition in the same way as an instance defined in ADT companion objects. + +## Syntax + +``` +Template ::= InheritClauses [TemplateBody] +EnumDef ::= id ClassConstr InheritClauses EnumBody +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp {‘with’ ConstrApp} + | ConstrApp {‘,’ ConstrApp} +``` + +**Note:** To align `extends` clauses and `derives` clauses, Scala 3 also allows multiple +extended types to be separated by commas. So the following is now legal: + +```scala +class A extends B, C { ... } +``` + +It is equivalent to the old form + +```scala +class A extends B with C { ... } +``` + +## Discussion + +This type class derivation framework is intentionally very small and low-level. There are essentially two pieces of +infrastructure in compiler-generated `Mirror` instances, + ++ type members encoding properties of the mirrored types. ++ a minimal value level mechanism for working generically with terms of the mirrored types. + +The `Mirror` infrastructure can be seen as an extension of the existing `Product` infrastructure for case classes: +typically `Mirror` types will be implemented by the ADTs companion object, hence the type members and the `ordinal` or +`fromProduct` methods will be members of that object. The primary motivation for this design decision, and the +decision to encode properties via types rather than terms was to keep the bytecode and runtime footprint of the +feature small enough to make it possible to provide `Mirror` instances _unconditionally_. + +Whilst `Mirrors` encode properties precisely via type members, the value level `ordinal` and `fromProduct` are +somewhat weakly typed (because they are defined in terms of `MirroredMonoType`) just like the members of `Product`. +This means that code for generic type classes has to ensure that type exploration and value selection proceed in +lockstep and it has to assert this conformance in some places using casts. If generic type classes are correctly +written these casts will never fail. + +As mentioned, however, the compiler-provided mechanism is intentionally very low level and it is anticipated that +higher level type class derivation and generic programming libraries will build on this and Scala 3's other +metaprogramming facilities to hide these low-level details from type class authors and general users. Type class +derivation in the style of both Shapeless and Magnolia are possible (a prototype of Shapeless 3, which combines +aspects of both Shapeless 2 and Magnolia has been developed alongside this language feature) as is a more aggressively +inlined style, supported by Scala 3's new quote/splice macro and inlining facilities. diff --git a/docs/_spec/TODOreference/contextual/extension-methods.md b/docs/_spec/TODOreference/contextual/extension-methods.md new file mode 100644 index 000000000000..d23cadf513d7 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/extension-methods.md @@ -0,0 +1,306 @@ +--- +layout: doc-page +title: "Extension Methods" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/extension-methods.html +--- + +Extension methods allow one to add methods to a type after the type is defined. Example: + +```scala +case class Circle(x: Double, y: Double, radius: Double) + +extension (c: Circle) + def circumference: Double = c.radius * math.Pi * 2 +``` + +Like regular methods, extension methods can be invoked with infix `.`: + +```scala +val circle = Circle(0, 0, 1) +circle.circumference +``` + +## Translation of Extension Methods + +An extension method translates to a specially labelled method that takes the leading parameter section as its first argument list. The label, expressed +as `` here, is compiler-internal. So, the definition of `circumference` above translates to the following method, and can also be invoked as such: + +``` + def circumference(c: Circle): Double = c.radius * math.Pi * 2 + +assert(circle.circumference == circumference(circle)) +``` + +## Operators + +The extension method syntax can also be used to define operators. Examples: + +```scala +extension (x: String) + def < (y: String): Boolean = ... +extension (x: Elem) + def +: (xs: Seq[Elem]): Seq[Elem] = ... +extension (x: Number) + infix def min (y: Number): Number = ... + +"ab" < "c" +1 +: List(2, 3) +x min 3 +``` + +The three definitions above translate to + +``` + def < (x: String)(y: String): Boolean = ... + def +: (xs: Seq[Elem])(x: Elem): Seq[Elem] = ... + infix def min(x: Number)(y: Number): Number = ... +``` + +Note the swap of the two parameters `x` and `xs` when translating +the right-associative operator `+:` to an extension method. This is analogous +to the implementation of right binding operators as normal methods. The Scala +compiler preprocesses an infix operation `x +: xs` to `xs.+:(x)`, so the extension +method ends up being applied to the sequence as first argument (in other words, the +two swaps cancel each other out). See [here for details](./right-associative-extension-methods.md). + +## Generic Extensions + +It is also possible to extend generic types by adding type parameters to an extension. For instance: + +```scala +extension [T](xs: List[T]) + def second = xs.tail.head + +extension [T: Numeric](x: T) + def + (y: T): T = summon[Numeric[T]].plus(x, y) +``` + +Type parameters on extensions can also be combined with type parameters on the methods +themselves: + +```scala +extension [T](xs: List[T]) + def sumBy[U: Numeric](f: T => U): U = ... +``` + +Type arguments matching method type parameters are passed as usual: + +```scala +List("a", "bb", "ccc").sumBy[Int](_.length) +``` + +By contrast, type arguments matching type parameters following `extension` can be passed +only if the method is referenced as a non-extension method: + +```scala +sumBy[String](List("a", "bb", "ccc"))(_.length) +``` + +Or, when passing both type arguments: + +```scala +sumBy[String](List("a", "bb", "ccc"))[Int](_.length) +``` + +Extensions can also take using clauses. For instance, the `+` extension above could equivalently be written with a using clause: + +```scala +extension [T](x: T)(using n: Numeric[T]) + def + (y: T): T = n.plus(x, y) +``` + +## Collective Extensions + +Sometimes, one wants to define several extension methods that share the same +left-hand parameter type. In this case one can "pull out" the common parameters into +a single extension and enclose all methods in braces or an indented region. +Example: + +```scala +extension (ss: Seq[String]) + + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + + def longestString: String = longestStrings.head +``` + +The same can be written with braces as follows (note that indented regions can still be used inside braces): + +```scala +extension (ss: Seq[String]) { + + def longestStrings: Seq[String] = { + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + } + + def longestString: String = longestStrings.head +} +``` + +Note the right-hand side of `longestString`: it calls `longestStrings` directly, implicitly +assuming the common extended value `ss` as receiver. + +Collective extensions like these are a shorthand for individual extensions +where each method is defined separately. For instance, the first extension above expands to: + +```scala +extension (ss: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + +extension (ss: Seq[String]) + def longestString: String = ss.longestStrings.head +``` + +Collective extensions also can take type parameters and have using clauses. Example: + +```scala +extension [T](xs: List[T])(using Ordering[T]) + def smallest(n: Int): List[T] = xs.sorted.take(n) + def smallestIndices(n: Int): List[Int] = + val limit = smallest(n).max + xs.zipWithIndex.collect { case (x, i) if x <= limit => i } +``` + +## Translation of Calls to Extension Methods + +To convert a reference to an extension method, the compiler has to know about the extension +method. We say in this case that the extension method is _applicable_ at the point of reference. +There are four possible ways for an extension method to be applicable: + + 1. The extension method is visible under a simple name, by being defined or inherited or imported in a scope enclosing the reference. + 2. The extension method is a member of some given + instance that is visible at the point of the reference. + 3. The reference is of the form `r.m` and the extension method + is defined in the implicit scope of the type of `r`. + 4. The reference is of the form `r.m` and the extension method + is defined in some given instance in the implicit scope of the type of `r`. + +Here is an example for the first rule: + +```scala +trait IntOps: + extension (i: Int) def isZero: Boolean = i == 0 + + extension (i: Int) def safeMod(x: Int): Option[Int] = + // extension method defined in same scope IntOps + if x.isZero then None + else Some(i % x) + +object IntOpsEx extends IntOps: + extension (i: Int) def safeDiv(x: Int): Option[Int] = + // extension method brought into scope via inheritance from IntOps + if x.isZero then None + else Some(i / x) + +trait SafeDiv: + import IntOpsEx.* // brings safeDiv and safeMod into scope + + extension (i: Int) def divide(d: Int): Option[(Int, Int)] = + // extension methods imported and thus in scope + (i.safeDiv(d), i.safeMod(d)) match + case (Some(d), Some(r)) => Some((d, r)) + case _ => None +``` + +By the second rule, an extension method can be made available by defining a given instance containing it, like this: + +```scala +given ops1: IntOps() // brings safeMod into scope + +1.safeMod(2) +``` + +By the third and fourth rule, an extension method is available if it is in the implicit scope of the receiver type or in a given instance in that scope. Example: + +```scala +class List[T]: + ... +object List: + ... + extension [T](xs: List[List[T]]) + def flatten: List[T] = xs.foldLeft(List.empty[T])(_ ++ _) + + given [T: Ordering]: Ordering[List[T]] with + extension (xs: List[T]) + def < (ys: List[T]): Boolean = ... +end List + +// extension method available since it is in the implicit scope +// of List[List[Int]] +List(List(1, 2), List(3, 4)).flatten + +// extension method available since it is in the given Ordering[List[T]], +// which is itself in the implicit scope of List[Int] +List(1, 2) < List(3) +``` + +The precise rules for resolving a selection to an extension method are as follows. + +Assume a selection `e.m[Ts]` where `m` is not a member of `e`, where the type arguments `[Ts]` are optional, and where `T` is the expected type. +The following two rewritings are tried in order: + + 1. The selection is rewritten to `m[Ts](e)`. + 2. If the first rewriting does not typecheck with expected type `T`, + and there is an extension method `m` in some eligible object `o`, the selection is rewritten to `o.m[Ts](e)`. An object `o` is _eligible_ if + + - `o` forms part of the implicit scope of `T`, or + - `o` is a given instance that is visible at the point of the application, or + - `o` is a given instance in the implicit scope of `T`. + + This second rewriting is attempted at the time where the compiler also tries an implicit conversion + from `T` to a type containing `m`. If there is more than one way of rewriting, an ambiguity error results. + +An extension method can also be referenced using a simple identifier without a preceding expression. If an identifier `g` appears in the body of an extension method `f` and refers to an extension method `g` that is defined in the same collective extension + +```scala +extension (x: T) + def f ... = ... g ... + def g ... +``` + +the identifier is rewritten to `x.g`. This is also the case if `f` and `g` are the same method. Example: + +```scala +extension (s: String) + def position(ch: Char, n: Int): Int = + if n < s.length && s(n) != ch then position(ch, n + 1) + else n +``` + +The recursive call `position(ch, n + 1)` expands to `s.position(ch, n + 1)` in this case. The whole extension method rewrites to + +```scala +def position(s: String)(ch: Char, n: Int): Int = + if n < s.length && s(n) != ch then position(s)(ch, n + 1) + else n +``` + +## Syntax + +Here are the syntax changes for extension methods and collective extensions relative +to the [current syntax](../syntax.md). + +``` +BlockStat ::= ... | Extension +TemplateStat ::= ... | Extension +TopStat ::= ... | Extension +Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} + ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods +ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef +``` + +In the above the notation `<<< ts >>>` in the production rule `ExtMethods` is defined as follows : + +``` +<<< ts >>> ::= ‘{’ ts ‘}’ | indent ts outdent +``` + +`extension` is a soft keyword. It is recognized as a keyword only if it appears +at the start of a statement and is followed by `[` or `(`. In all other cases +it is treated as an identifier. diff --git a/docs/_spec/TODOreference/contextual/given-imports.md b/docs/_spec/TODOreference/contextual/given-imports.md new file mode 100644 index 000000000000..6a55368979b1 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/given-imports.md @@ -0,0 +1,117 @@ +--- +layout: doc-page +title: "Importing Givens" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/given-imports.html +--- + +A special form of import wildcard selector is used to import given instances. Example: + +```scala +object A: + class TC + given tc: TC = ??? + def f(using TC) = ??? + +object B: + import A.* + import A.given + ... +``` + +In the code above, the `import A.*` clause in object `B` imports all members +of `A` _except_ the given instance `tc`. Conversely, the second import `import A.given` will import _only_ that given instance. +The two import clauses can also be merged into one: + +```scala +object B: + import A.{given, *} + ... +``` + +Generally, a normal wildcard selector `*` brings all definitions other than givens or extensions into scope +whereas a `given` selector brings all givens (including those resulting from extensions) into scope. + +There are two main benefits arising from these rules: + +- It is made clearer where givens in scope are coming from. + In particular, it is not possible to hide imported givens in a long list of regular wildcard imports. +- It enables importing all givens + without importing anything else. This is particularly important since givens + can be anonymous, so the usual recourse of using named imports is not + practical. + +## Importing By Type + +Since givens can be anonymous it is not always practical to import them by their name, and wildcard imports are typically used instead. By-type imports provide a more specific alternative to wildcard imports, which makes it clearer what is imported. Example: + +```scala +import A.given TC +``` + +This imports any given in `A` that has a type which conforms to `TC`. Importing givens of several types `T1,...,Tn` +is expressed by multiple `given` selectors. + +```scala +import A.{given T1, ..., given Tn} +``` + +Importing all given instances of a parameterized type is expressed by wildcard arguments. +For instance, assuming the object + +```scala +object Instances: + given intOrd: Ordering[Int] = ... + given listOrd[T: Ordering]: Ordering[List[T]] = ... + given ec: ExecutionContext = ... + given im: Monoid[Int] = ... +``` + +the import clause + +```scala +import Instances.{given Ordering[?], given ExecutionContext} +``` + +would import the `intOrd`, `listOrd`, and `ec` instances but leave out the `im` instance, since it fits none of the specified bounds. + +By-type imports can be mixed with by-name imports. If both are present in an import clause, by-type imports come last. For instance, the import clause + +```scala +import Instances.{im, given Ordering[?]} +``` + +would import `im`, `intOrd`, and `listOrd` but leave out `ec`. + +## Migration + +The rules for imports stated above have the consequence that a library +would have to migrate in lockstep with all its users from old style implicits and +normal imports to givens and given imports. + +The following modifications avoid this hurdle to migration. + + 1. A `given` import selector also brings old style implicits into scope. So, in Scala 3.0 + an old-style implicit definition can be brought into scope either by a `*` or a `given` wildcard selector. + + 2. In Scala 3.1, old-style implicits accessed through a `*` wildcard import will give a deprecation warning. + + 3. In some version after 3.1, old-style implicits accessed through a `*` wildcard import will give a compiler error. + +These rules mean that library users can use `given` selectors to access old-style implicits in Scala 3.0, +and will be gently nudged and then forced to do so in later versions. Libraries can then switch to +given instances once their user base has migrated. + +## Syntax + +``` +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*' | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +``` diff --git a/docs/_spec/TODOreference/contextual/givens.md b/docs/_spec/TODOreference/contextual/givens.md new file mode 100644 index 000000000000..411d50ba63ea --- /dev/null +++ b/docs/_spec/TODOreference/contextual/givens.md @@ -0,0 +1,193 @@ +--- +layout: doc-page +title: "Given Instances" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/givens.html +--- + +Given instances (or, simply, "givens") define "canonical" values of certain types +that serve for synthesizing arguments to [context parameters](./using-clauses.md). Example: + +```scala +trait Ord[T]: + def compare(x: T, y: T): Int + extension (x: T) def < (y: T) = compare(x, y) < 0 + extension (x: T) def > (y: T) = compare(x, y) > 0 + +given intOrd: Ord[Int] with + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given listOrd[T](using ord: Ord[T]): Ord[List[T]] with + + def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = ord.compare(x, y) + if fst != 0 then fst else compare(xs1, ys1) + +``` + +This code defines a trait `Ord` with two given instances. `intOrd` defines +a given for the type `Ord[Int]` whereas `listOrd[T]` defines givens +for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]` +themselves. The `using` clause in `listOrd` defines a condition: There must be a +given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. +Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). + +## Anonymous Givens + +The name of a given can be left out. So the definitions +of the last section can also be expressed like this: + +```scala +given Ord[Int] with + ... +given [T](using Ord[T]): Ord[List[T]] with + ... +``` + +If the name of a given is missing, the compiler will synthesize a name from +the implemented type(s). + +**Note** The name synthesized by the compiler is chosen to be readable and reasonably concise. For instance, the two instances above would get the names: + +```scala +given_Ord_Int +given_Ord_List +``` + +The precise rules for synthesizing names are found [here](./relationship-implicits.html#anonymous-given-instances). These rules do not guarantee absence of name conflicts between +given instances of types that are "too similar". To avoid conflicts one can +use named instances. + +**Note** To ensure robust binary compatibility, publicly available libraries should prefer named instances. + +## Alias Givens + +An alias can be used to define a given instance that is equal to some expression. Example: + +```scala +given global: ExecutionContext = ForkJoinPool() +``` + +This creates a given `global` of type `ExecutionContext` that resolves to the right +hand side `ForkJoinPool()`. +The first time `global` is accessed, a new `ForkJoinPool` is created, which is then +returned for this and all subsequent accesses to `global`. This operation is thread-safe. + +Alias givens can be anonymous as well, e.g. + +```scala +given Position = enclosingTree.position +given (using config: Config): Factory = MemoizingFactory(config) +``` + +An alias given can have type parameters and context parameters just like any other given, +but it can only implement a single type. + +## Given Macros + +Given aliases can have the `inline` and `transparent` modifiers. +Example: + +```scala +transparent inline given mkAnnotations[A, T]: Annotations[A, T] = ${ + // code producing a value of a subtype of Annotations +} +``` + +Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. + +Given instances can have the `inline` but not `transparent` modifiers as their type is already known from the signature. +Example: + +```scala +trait Show[T] { + inline def show(x: T): String +} + +inline given Show[Foo] with { + /*transparent*/ inline def show(x: Foo): String = ${ ... } +} + +def app = + // inlines `show` method call and removes the call to `given Show[Foo]` + summon[Show[Foo]].show(foo) +``` +Note that the inline methods within the given instances may be `transparent`. + +The inlining of given instances will not inline/duplicate the implementation of the given, it will just inline the instantiation of that instance. +This is used to help dead code elimination of the given instances that are not used after inlining. + + +## Pattern-Bound Given Instances + +Given instances can also appear in patterns. Example: + +```scala +for given Context <- applicationContexts do + +pair match + case (ctx @ given Context, y) => ... +``` + +In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` +instance named `ctx` is established by matching against the first half of the `pair` selector. + +In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. + +## Negated Givens + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement the analogue of a "negated" search in implicit resolution, +where a query Q1 fails if some other query Q2 succeeds and Q1 succeeds if Q2 fails. With the new cleaned up behavior these techniques no longer work. +But the new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) now implements negation directly. + +For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit +search for `Q` fails, for example: + +```scala +import scala.util.NotGiven + +trait Tagged[A] + +case class Foo[A](value: Boolean) +object Foo: + given fooTagged[A](using Tagged[A]): Foo[A] = Foo(true) + given fooNotTagged[A](using NotGiven[Tagged[A]]): Foo[A] = Foo(false) + +@main def test(): Unit = + given Tagged[Int]() + assert(summon[Foo[Int]].value) // fooTagged is found + assert(!summon[Foo[String]].value) // fooNotTagged is found +``` + +## Given Instance Initialization + +A given instance without type or context parameters is initialized on-demand, the first +time it is accessed. If a given has type or context parameters, a fresh instance +is created for each reference. + +## Syntax + +Here is the syntax for given instances: + +``` +TmplDef ::= ... + | ‘given’ GivenDef +GivenDef ::= [GivenSig] StructuralInstance + | [GivenSig] AnnotType ‘=’ Expr + | [GivenSig] AnnotType +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} ‘with’ TemplateBody +``` + +A given instance starts with the reserved word `given` and an optional _signature_. The signature +defines a name and/or parameters for the instance. It is followed by `:`. There are three kinds +of given instances: + +- A _structural instance_ contains one or more types or constructor applications, + followed by `with` and a template body that contains member definitions of the instance. +- An _alias instance_ contains a type, followed by `=` and a right-hand side expression. +- An _abstract instance_ contains just the type, which is not followed by anything. diff --git a/docs/_spec/TODOreference/contextual/multiversal-equality.md b/docs/_spec/TODOreference/contextual/multiversal-equality.md new file mode 100644 index 000000000000..e9a81b95f472 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/multiversal-equality.md @@ -0,0 +1,227 @@ +--- +layout: doc-page +title: "Multiversal Equality" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/multiversal-equality.html +--- + +Previously, Scala had universal equality: Two values of any types +could be compared with each other with `==` and `!=`. This came from +the fact that `==` and `!=` are implemented in terms of Java's +`equals` method, which can also compare values of any two reference +types. + +Universal equality is convenient. But it is also dangerous since it +undermines type safety. For instance, let's assume one is left after some refactoring +with an erroneous program where a value `y` has type `S` instead of the correct type `T`. + +```scala +val x = ... // of type T +val y = ... // of type S, but should be T +x == y // typechecks, will always yield false +``` + +If `y` gets compared to other values of type `T`, +the program will still typecheck, since values of all types can be compared with each other. +But it will probably give unexpected results and fail at runtime. + +Multiversal equality is an opt-in way to make universal equality safer. +It uses a binary type class [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/CanEqual.scala) +to indicate that values of two given types can be compared with each other. +The example above would not typecheck if `S` or `T` was a class +that derives `CanEqual`, e.g. + +```scala +class T derives CanEqual +``` + +Alternatively, one can also provide a `CanEqual` given instance directly, like this: + +```scala +given CanEqual[T, T] = CanEqual.derived +``` + +This definition effectively says that values of type `T` can (only) be +compared to other values of type `T` when using `==` or `!=`. The definition +affects type checking but it has no significance for runtime +behavior, since `==` always maps to `equals` and `!=` always maps to +the negation of `equals`. The right-hand side `CanEqual.derived` of the definition +is a value that has any `CanEqual` instance as its type. Here is the definition of class +`CanEqual` and its companion object: + +```scala +package scala +import annotation.implicitNotFound + +@implicitNotFound("Values of types ${L} and ${R} cannot be compared with == or !=") +sealed trait CanEqual[-L, -R] + +object CanEqual: + object derived extends CanEqual[Any, Any] +``` + +One can have several `CanEqual` given instances for a type. For example, the four +definitions below make values of type `A` and type `B` comparable with +each other, but not comparable to anything else: + +```scala +given CanEqual[A, A] = CanEqual.derived +given CanEqual[B, B] = CanEqual.derived +given CanEqual[A, B] = CanEqual.derived +given CanEqual[B, A] = CanEqual.derived +``` + +The [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/CanEqual.scala) +object defines a number of `CanEqual` given instances that together +define a rule book for what standard types can be compared (more details below). + +There is also a "fallback" instance named `canEqualAny` that allows comparisons +over all types that do not themselves have a `CanEqual` given. `canEqualAny` is defined as follows: + +```scala +def canEqualAny[L, R]: CanEqual[L, R] = CanEqual.derived +``` + +Even though `canEqualAny` is not declared as `given`, the compiler will still +construct an `canEqualAny` instance as answer to an implicit search for the +type `CanEqual[L, R]`, unless `L` or `R` have `CanEqual` instances +defined on them, or the language feature `strictEquality` is enabled. + +The primary motivation for having `canEqualAny` is backwards compatibility. +If this is of no concern, one can disable `canEqualAny` by enabling the language +feature `strictEquality`. As for all language features this can be either +done with an import + +```scala +import scala.language.strictEquality +``` +or with a command line option `-language:strictEquality`. + +## Deriving CanEqual Instances + +Instead of defining `CanEqual` instances directly, it is often more convenient to derive them. Example: + +```scala +class Box[T](x: T) derives CanEqual +``` + +By the usual rules of [type class derivation](./derivation.md), +this generates the following `CanEqual` instance in the companion object of `Box`: + +```scala +given [T, U](using CanEqual[T, U]): CanEqual[Box[T], Box[U]] = + CanEqual.derived +``` + +That is, two boxes are comparable with `==` or `!=` if their elements are. Examples: + +```scala +new Box(1) == new Box(1L) // ok since there is an instance for `CanEqual[Int, Long]` +new Box(1) == new Box("a") // error: can't compare +new Box(1) == 1 // error: can't compare +``` + +## Precise Rules for Equality Checking + +The precise rules for equality checking are as follows. + +If the `strictEquality` feature is enabled then +a comparison using `x == y` or `x != y` between values `x: T` and `y: U` +is legal if there is a `given` of type `CanEqual[T, U]`. + +In the default case where the `strictEquality` feature is not enabled the comparison is +also legal if + + 1. `T` and `U` are the same, or + 2. one of `T`, `U` is a subtype of the _lifted_ version of the other type, or + 3. neither `T` nor `U` have a _reflexive_ `CanEqual` instance. + +Explanations: + + - _lifting_ a type `S` means replacing all references to abstract types + in covariant positions of `S` by their upper bound, and replacing + all refinement types in covariant positions of `S` by their parent. + - a type `T` has a _reflexive_ `CanEqual` instance if the implicit search for `CanEqual[T, T]` + succeeds. + +## Predefined CanEqual Instances + +The `CanEqual` object defines instances for comparing + - the primitive types `Byte`, `Short`, `Char`, `Int`, `Long`, `Float`, `Double`, `Boolean`, and `Unit`, + - `java.lang.Number`, `java.lang.Boolean`, and `java.lang.Character`, + - `scala.collection.Seq`, and `scala.collection.Set`. + +Instances are defined so that every one of these types has a _reflexive_ `CanEqual` instance, and the following holds: + + - Primitive numeric types can be compared with each other. + - Primitive numeric types can be compared with subtypes of `java.lang.Number` (and _vice versa_). + - `Boolean` can be compared with `java.lang.Boolean` (and _vice versa_). + - `Char` can be compared with `java.lang.Character` (and _vice versa_). + - Two sequences (of arbitrary subtypes of `scala.collection.Seq`) can be compared + with each other if their element types can be compared. The two sequence types + need not be the same. + - Two sets (of arbitrary subtypes of `scala.collection.Set`) can be compared + with each other if their element types can be compared. The two set types + need not be the same. + - Any subtype of `AnyRef` can be compared with `Null` (and _vice versa_). + +## Why Two Type Parameters? + +One particular feature of the `CanEqual` type is that it takes _two_ type parameters, representing the types of the two items to be compared. By contrast, conventional +implementations of an equality type class take only a single type parameter which represents the common type of _both_ operands. +One type parameter is simpler than two, so why go through the additional complication? The reason has to do with the fact that, rather than coming up with a type class where no operation existed before, +we are dealing with a refinement of pre-existing, universal equality. It is best illustrated through an example. + +Say you want to come up with a safe version of the `contains` method on `List[T]`. The original definition of `contains` in the standard library was: +```scala +class List[+T]: + ... + def contains(x: Any): Boolean +``` +That uses universal equality in an unsafe way since it permits arguments of any type to be compared with the list's elements. The "obvious" alternative definition +```scala + def contains(x: T): Boolean +``` +does not work, since it refers to the covariant parameter `T` in a nonvariant context. The only variance-correct way to use the type parameter `T` in `contains` is as a lower bound: +```scala + def contains[U >: T](x: U): Boolean +``` +This generic version of `contains` is the one used in the current (Scala 2.13) version of `List`. +It looks different but it admits exactly the same applications as the `contains(x: Any)` definition we started with. +However, we can make it more useful (i.e. restrictive) by adding a `CanEqual` parameter: +```scala + def contains[U >: T](x: U)(using CanEqual[T, U]): Boolean // (1) +``` +This version of `contains` is equality-safe! More precisely, given +`x: T`, `xs: List[T]` and `y: U`, then `xs.contains(y)` is type-correct if and only if +`x == y` is type-correct. + +Unfortunately, the crucial ability to "lift" equality type checking from simple equality and pattern matching to arbitrary user-defined operations gets lost if we restrict ourselves to an equality class with a single type parameter. Consider the following signature of `contains` with a hypothetical `CanEqual1[T]` type class: +```scala + def contains[U >: T](x: U)(using CanEqual1[U]): Boolean // (2) +``` +This version could be applied just as widely as the original `contains(x: Any)` method, +since the `CanEqual1[Any]` fallback is always available! So we have gained nothing. What got lost in the transition to a single parameter type class was the original rule that `CanEqual[A, B]` is available only if neither `A` nor `B` have a reflexive `CanEqual` instance. That rule simply cannot be expressed if there is a single type parameter for `CanEqual`. + +The situation is different under `-language:strictEquality`. In that case, +the `CanEqual[Any, Any]` or `CanEqual1[Any]` instances would never be available, and the +single and two-parameter versions would indeed coincide for most practical purposes. + +But assuming `-language:strictEquality` immediately and everywhere poses migration problems which might well be unsurmountable. Consider again `contains`, which is in the standard library. Parameterizing it with the `CanEqual` type class as in (1) is an immediate win since it rules out non-sensical applications while still allowing all sensible ones. +So it can be done almost at any time, modulo binary compatibility concerns. +On the other hand, parameterizing `contains` with `CanEqual1` as in (2) would make `contains` +unusable for all types that have not yet declared a `CanEqual1` instance, including all +types coming from Java. This is clearly unacceptable. It would lead to a situation where, +rather than migrating existing libraries to use safe equality, the only upgrade path is to have parallel libraries, with the new version only catering to types deriving `CanEqual1` and the old version dealing with everything else. Such a split of the ecosystem would be very problematic, which means the cure is likely to be worse than the disease. + +For these reasons, it looks like a two-parameter type class is the only way forward because it can take the existing ecosystem where it is and migrate it towards a future where more and more code uses safe equality. + +In applications where `-language:strictEquality` is the default one could also introduce a one-parameter type alias such as +```scala +type Eq[-T] = CanEqual[T, T] +``` +Operations needing safe equality could then use this alias instead of the two-parameter `CanEqual` class. But it would only +work under `-language:strictEquality`, since otherwise the universal `Eq[Any]` instance would be available everywhere. + + +More on multiversal equality is found in a [blog post](http://www.scala-lang.org/blog/2016/05/06/multiversal-equality.html) +and a [GitHub issue](https://github.com/lampepfl/dotty/issues/1247). diff --git a/docs/_spec/TODOreference/contextual/relationship-implicits.md b/docs/_spec/TODOreference/contextual/relationship-implicits.md new file mode 100644 index 000000000000..fce07f51151a --- /dev/null +++ b/docs/_spec/TODOreference/contextual/relationship-implicits.md @@ -0,0 +1,206 @@ +--- +layout: doc-page +title: "Relationship with Scala 2 Implicits" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/relationship-implicits.html +--- + +Many, but not all, of the new contextual abstraction features in Scala 3 can be mapped to Scala 2's implicits. This page gives a rundown on the relationships between new and old features. + +## Simulating Scala 3 Contextual Abstraction Concepts with Scala 2 Implicits + +### Given Instances + +Given instances can be mapped to combinations of implicit objects, classes and implicit methods. + + 1. Given instances without parameters are mapped to implicit objects. For instance, + + ```scala + given intOrd: Ord[Int] with { ... } + ``` + + maps to + + ```scala + implicit object intOrd extends Ord[Int] { ... } + ``` + + 2. Parameterized givens are mapped to combinations of classes and implicit methods. For instance, + + ```scala + given listOrd[T](using ord: Ord[T]): Ord[List[T]] with { ... } + ``` + + maps to + + ```scala + class listOrd[T](implicit ord: Ord[T]) extends Ord[List[T]] { ... } + final implicit def listOrd[T](implicit ord: Ord[T]): listOrd[T] = + new listOrd[T] + ``` + + 3. Alias givens map to implicit methods or implicit lazy vals. If an alias has neither type nor context parameters, + it is treated as a lazy val, unless the right-hand side is a simple reference, in which case we can use a forwarder to + that reference without caching it. + +Examples: + +```scala +given global: ExecutionContext = new ForkJoinContext() + +val ctx: Context +given Context = ctx +``` + +would map to + +```scala +final implicit lazy val global: ExecutionContext = new ForkJoinContext() +final implicit def given_Context = ctx +``` + +### Anonymous Given Instances + +Anonymous given instances get compiler synthesized names, which are generated in a reproducible way from the implemented type(s). For example, if the names of the `IntOrd` and `ListOrd` givens above were left out, the following names would be synthesized instead: + +```scala +given given_Ord_Int: Ord[Int] with { ... } +given given_Ord_List[T](using ord: Ord[T]): Ord[List[T]] with { ... } +``` + +The synthesized type names are formed from + +1. the prefix `given_`, +2. the simple name(s) of the implemented type(s), leaving out any prefixes, +3. the simple name(s) of the top-level argument type constructors to these types. + +Tuples are treated as transparent, i.e. a type `F[(X, Y)]` would get the synthesized name +`F_X_Y`. Directly implemented function types `A => B` are represented as `A_to_B`. Function types used as arguments to other type constructors are represented as `Function`. + +### Using Clauses + +Using clauses correspond largely to Scala 2's implicit parameter clauses. E.g. + +```scala +def max[T](x: T, y: T)(using ord: Ord[T]): T +``` + +would be written + +```scala +def max[T](x: T, y: T)(implicit ord: Ord[T]): T +``` + +in Scala 2. The main difference concerns applications of such parameters. +Explicit arguments to parameters of using clauses _must_ be written using `(using ...)`, +mirroring the definition syntax. E.g, `max(2, 3)(using IntOrd)`. +Scala 2 uses normal applications `max(2, 3)(IntOrd)` instead. The Scala 2 syntax has some inherent ambiguities and restrictions which are overcome by the new syntax. For instance, multiple implicit parameter lists are not available in the old syntax, even though they can be simulated using auxiliary objects in the "Aux" pattern. + +The `summon` method corresponds to `implicitly` in Scala 2. +It is precisely the same as the `the` method in [Shapeless](https://github.com/milessabin/shapeless). +The difference between `summon` (or `the`) and `implicitly` is +that `summon` can return a more precise type than the type that was +asked for. + +### Context Bounds + +Context bounds are the same in both language versions. They expand to the respective forms of implicit parameters. + +**Note:** To ease migration, context bounds in Scala 3 map for a limited time to old-style implicit parameters for which arguments can be passed either in a using clause or +in a normal argument list. Once old-style implicits are deprecated, context bounds +will map to using clauses instead. + +### Extension Methods + +Extension methods have no direct counterpart in Scala 2, but they can be simulated with implicit classes. For instance, the extension method + +```scala +extension (c: Circle) + def circumference: Double = c.radius * math.Pi * 2 +``` + +could be simulated to some degree by + +```scala +implicit class CircleDecorator(c: Circle) extends AnyVal { + def circumference: Double = c.radius * math.Pi * 2 +} +``` + +Abstract extension methods in traits that are implemented in given instances have no direct counterpart in Scala 2. The only way to simulate these is to make implicit classes available through imports. The Simulacrum macro library can automate this process in some cases. + +### Type Class Derivation + +Type class derivation has no direct counterpart in the Scala 2 language. Comparable functionality can be achieved by macro-based libraries such as [Shapeless](https://github.com/milessabin/shapeless), [Magnolia](https://propensive.com/opensource/magnolia), or [scalaz-deriving](https://github.com/scalaz/scalaz-deriving). + +### Context Function Types + +Context function types have no analogue in Scala 2. + +### Implicit By-Name Parameters + +Implicit by-name parameters are not supported in Scala 2, but can be emulated to some degree by the `Lazy` type in Shapeless. + +## Simulating Scala 2 Implicits in Scala 3 + +### Implicit Conversions + +Implicit conversion methods in Scala 2 can be expressed as given instances of the `scala.Conversion` class in Scala 3. For instance, instead of + +```scala +implicit def stringToToken(str: String): Token = new Keyword(str) +``` + +one can write + +```scala +given stringToToken: Conversion[String, Token] with + def apply(str: String): Token = KeyWord(str) +``` + +or + +```scala +given stringToToken: Conversion[String, Token] = KeyWord(_) +``` + +### Implicit Classes + +Implicit classes in Scala 2 are often used to define extension methods, which are directly supported in Scala 3. Other uses of implicit classes can be simulated by a pair of a regular class and a given `Conversion` instance. + +### Implicit Values + +Implicit `val` definitions in Scala 2 can be expressed in Scala 3 using a regular `val` definition and an alias given. +For instance, Scala 2's + +```scala +lazy implicit val pos: Position = tree.sourcePos +``` + +can be expressed in Scala 3 as + +```scala +lazy val pos: Position = tree.sourcePos +given Position = pos +``` + +### Abstract Implicits + +An abstract implicit `val` or `def` in Scala 2 can be expressed in Scala 3 using a regular abstract definition and an alias given. For instance, Scala 2's + +```scala +implicit def symDecorator: SymDecorator +``` + +can be expressed in Scala 3 as + +```scala +def symDecorator: SymDecorator +given SymDecorator = symDecorator +``` + +## Implementation Status and Timeline + +The Scala 3 implementation implements both Scala 2's implicits and the new abstractions. In fact, support for Scala 2's implicits is an essential part of the common language subset between 2.13 and Scala 3. +Migration to the new abstractions will be supported by making automatic rewritings available. + +Depending on adoption patterns, old style implicits might start to be deprecated in a version following Scala 3.0. diff --git a/docs/_spec/TODOreference/contextual/right-associative-extension-methods.md b/docs/_spec/TODOreference/contextual/right-associative-extension-methods.md new file mode 100644 index 000000000000..068123df8cd2 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/right-associative-extension-methods.md @@ -0,0 +1,52 @@ +--- +layout: doc-page +title: "Right-Associative Extension Methods: Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/right-associative-extension-methods.html +--- + +The most general form of leading parameters of an extension method is as follows: + + - A possibly empty list of using clauses `leadingUsing` + - A single parameter `extensionParam` + - A possibly empty list of using clauses `trailingUsing` + +This is then followed by `def`, the method name, and possibly further parameters +`otherParams`. An example is: + +```scala + extension (using a: A, b: B)(using c: C) // <-- leadingUsing + (x: X) // <-- extensionParam + (using d: D) // <-- trailingUsing + def +:: (y: Y)(using e: E)(z: Z) // <-- otherParams +``` + +An extension method is treated as a right-associative operator +(as in [SLS §6.12.3](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#infix-operations)) +if it has a name ending in `:` and is immediately followed by a +single parameter. In the example above, that parameter is `(y: Y)`. + +The Scala compiler pre-processes a right-associative infix operation such as `x +: xs` +to `xs.+:(x)` if `x` is a pure expression or a call-by-name parameter and to `val y = x; xs.+:(y)` otherwise. This is necessary since a regular right-associative infix method +is defined in the class of its right operand. To make up for this swap, +the expansion of right-associative extension methods performs an analogous parameter swap. More precisely, if `otherParams` consists of a single parameter +`rightParam` followed by `remaining`, the total parameter sequence +of the extension method's expansion is: + +``` + leadingUsing rightParam trailingUsing extensionParam remaining +``` + +For instance, the `+::` method above would become + +```scala + def +:: (using a: A, b: B)(using c: C) + (y: Y) + (using d: D) + (x: X) + (using e: E)(z: Z) +``` + +This expansion has to be kept in mind when writing right-associative extension +methods with inter-parameter dependencies. + +An overall simpler design could be obtained if right-associative operators could _only_ be defined as extension methods, and would be disallowed as normal methods. In that case neither arguments nor parameters would have to be swapped. Future versions of Scala should strive to achieve this simplification. diff --git a/docs/_spec/TODOreference/contextual/type-classes.md b/docs/_spec/TODOreference/contextual/type-classes.md new file mode 100644 index 000000000000..9fc0d2eec864 --- /dev/null +++ b/docs/_spec/TODOreference/contextual/type-classes.md @@ -0,0 +1,282 @@ +--- +layout: doc-page +title: "Implementing Type classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/type-classes.html +--- + +A _type class_ is an abstract, parameterized type that lets you add new behavior to any closed data type without using sub-typing. This can be useful in multiple use-cases, for example: + +* expressing how a type you don't own (from the standard or 3rd-party library) conforms to such behavior +* expressing such a behavior for multiple types without involving sub-typing relationships (one `extends` another) between those types (see: [ad hoc polymorphism](https://en.wikipedia.org/wiki/Ad_hoc_polymorphism) for instance) + +Therefore in Scala 3, _type classes_ are just _traits_ with one or more parameters whose implementations are not defined through the `extends` keyword, but by **given instances**. +Here are some examples of common type classes: + +## Semigroups and monoids + +Here's the `Monoid` type class definition: + +```scala +trait SemiGroup[T]: + extension (x: T) def combine (y: T): T + +trait Monoid[T] extends SemiGroup[T]: + def unit: T +``` + +An implementation of this `Monoid` type class for the type `String` can be the following: + +```scala +given Monoid[String] with + extension (x: String) def combine (y: String): String = x.concat(y) + def unit: String = "" +``` + +Whereas for the type `Int` one could write the following: + +```scala +given Monoid[Int] with + extension (x: Int) def combine (y: Int): Int = x + y + def unit: Int = 0 +``` + +This monoid can now be used as _context bound_ in the following `combineAll` method: + +```scala +def combineAll[T: Monoid](xs: List[T]): T = + xs.foldLeft(summon[Monoid[T]].unit)(_.combine(_)) +``` + +To get rid of the `summon[...]` we can define a `Monoid` object as follows: + +```scala +object Monoid: + def apply[T](using m: Monoid[T]) = m +``` + +Which would allow to re-write the `combineAll` method this way: + +```scala +def combineAll[T: Monoid](xs: List[T]): T = + xs.foldLeft(Monoid[T].unit)(_.combine(_)) +``` + +## Functors + +A `Functor` for a type provides the ability for its values to be "mapped over", i.e. apply a function that transforms inside a value while remembering its shape. For example, to modify every element of a collection without dropping or adding elements. +We can represent all types that can be "mapped over" with `F`. It's a type constructor: the type of its values becomes concrete when provided a type argument. +Therefore we write it `F[_]`, hinting that the type `F` takes another type as argument. +The definition of a generic `Functor` would thus be written as: + +```scala +trait Functor[F[_]]: + def map[A, B](x: F[A], f: A => B): F[B] +``` + +Which could read as follows: "A `Functor` for the type constructor `F[_]` represents the ability to transform `F[A]` to `F[B]` through the application of function `f` with type `A => B`". We call the `Functor` definition here a _type class_. +This way, we could define an instance of `Functor` for the `List` type: + +```scala +given Functor[List] with + def map[A, B](x: List[A], f: A => B): List[B] = + x.map(f) // List already has a `map` method +``` + +With this `given` instance in scope, everywhere a `Functor` is expected, the compiler will accept a `List` to be used. + +For instance, we may write such a testing method: + +```scala +def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = + assert(expected == summon[Functor[F]].map(original, mapping)) +``` + +And use it this way, for example: + +```scala +assertTransformation(List("a1", "b1"), List("a", "b"), elt => s"${elt}1") +``` + +That's a first step, but in practice we probably would like the `map` function to be a method directly accessible on the type `F`. So that we can call `map` directly on instances of `F`, and get rid of the `summon[Functor[F]]` part. +As in the previous example of Monoids, [`extension` methods](extension-methods.md) help achieving that. Let's re-define the `Functor` type class with extension methods. + +```scala +trait Functor[F[_]]: + extension [A](x: F[A]) + def map[B](f: A => B): F[B] +``` + +The instance of `Functor` for `List` now becomes: + +```scala +given Functor[List] with + extension [A](xs: List[A]) + def map[B](f: A => B): List[B] = + xs.map(f) // List already has a `map` method + +``` + +It simplifies the `assertTransformation` method: + +```scala +def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = + assert(expected == original.map(mapping)) +``` + +The `map` method is now directly used on `original`. It is available as an extension method +since `original`'s type is `F[A]` and a given instance for `Functor[F[A]]` which defines `map` +is in scope. + +## Monads + +Applying `map` in `Functor[List]` to a mapping function of type `A => B` results in a `List[B]`. So applying it to a mapping function of type `A => List[B]` results in a `List[List[B]]`. To avoid managing lists of lists, we may want to "flatten" the values in a single list. + +That's where `Monad` comes in. A `Monad` for type `F[_]` is a `Functor[F]` with two more operations: + +* `flatMap`, which turns an `F[A]` into an `F[B]` when given a function of type `A => F[B]`, +* `pure`, which creates an `F[A]` from a single value `A`. + +Here is the translation of this definition in Scala 3: + +```scala +trait Monad[F[_]] extends Functor[F]: + + /** The unit value for a monad */ + def pure[A](x: A): F[A] + + extension [A](x: F[A]) + /** The fundamental composition operation */ + def flatMap[B](f: A => F[B]): F[B] + + /** The `map` operation can now be defined in terms of `flatMap` */ + def map[B](f: A => B) = x.flatMap(f.andThen(pure)) + +end Monad +``` + +### List + +A `List` can be turned into a monad via this `given` instance: + +```scala +given listMonad: Monad[List] with + def pure[A](x: A): List[A] = + List(x) + extension [A](xs: List[A]) + def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) // rely on the existing `flatMap` method of `List` +``` + +Since `Monad` is a subtype of `Functor`, `List` is also a functor. The Functor's `map` +operation is already provided by the `Monad` trait, so the instance does not need to define +it explicitly. + +### Option + +`Option` is an other type having the same kind of behaviour: + +```scala +given optionMonad: Monad[Option] with + def pure[A](x: A): Option[A] = + Option(x) + extension [A](xo: Option[A]) + def flatMap[B](f: A => Option[B]): Option[B] = xo match + case Some(x) => f(x) + case None => None +``` + +### Reader + +Another example of a `Monad` is the _Reader_ Monad, which acts on functions instead of +data types like `List` or `Option`. It can be used to combine multiple functions +that all need the same parameter. For instance multiple functions needing access to some configuration, context, environment variables, etc. + +Let's define a `Config` type, and two functions using it: + +```scala +trait Config +// ... +def compute(i: Int)(config: Config): String = ??? +def show(str: String)(config: Config): Unit = ??? +``` + +We may want to combine `compute` and `show` into a single function, accepting a `Config` as parameter, and showing the result of the computation, and we'd like to use +a monad to avoid passing the parameter explicitly multiple times. So postulating +the right `flatMap` operation, we could write: + +```scala +def computeAndShow(i: Int): Config => Unit = compute(i).flatMap(show) +``` + +instead of + +```scala +show(compute(i)(config))(config) +``` + +Let's define this m then. First, we are going to define a type named `ConfigDependent` representing a function that when passed a `Config` produces a `Result`. + +```scala +type ConfigDependent[Result] = Config => Result +``` + +The monad instance will look like this: + +```scala +given configDependentMonad: Monad[ConfigDependent] with + + def pure[A](x: A): ConfigDependent[A] = + config => x + + extension [A](x: ConfigDependent[A]) + def flatMap[B](f: A => ConfigDependent[B]): ConfigDependent[B] = + config => f(x(config))(config) + +end configDependentMonad +``` + +The type `ConfigDependent` can be written using [type lambdas](../new-types/type-lambdas.md): + +```scala +type ConfigDependent = [Result] =>> Config => Result +``` + +Using this syntax would turn the previous `configDependentMonad` into: + +```scala +given configDependentMonad: Monad[[Result] =>> Config => Result] with + + def pure[A](x: A): Config => A = + config => x + + extension [A](x: Config => A) + def flatMap[B](f: A => Config => B): Config => B = + config => f(x(config))(config) + +end configDependentMonad +``` + +It is likely that we would like to use this pattern with other kinds of environments than our `Config` trait. The Reader monad allows us to abstract away `Config` as a type _parameter_, named `Ctx` in the following definition: + +```scala +given readerMonad[Ctx]: Monad[[X] =>> Ctx => X] with + + def pure[A](x: A): Ctx => A = + ctx => x + + extension [A](x: Ctx => A) + def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(x(ctx))(ctx) + +end readerMonad +``` + +## Summary + +The definition of a _type class_ is expressed with a parameterised type with abstract members, such as a `trait`. +The main difference between subtype polymorphism and ad-hoc polymorphism with _type classes_ is how the definition of the _type class_ is implemented, in relation to the type it acts upon. +In the case of a _type class_, its implementation for a concrete type is expressed through a `given` instance definition, which is supplied as an implicit argument alongside the value it acts upon. With subtype polymorphism, the implementation is mixed into the parents of a class, and only a single term is required to perform a polymorphic operation. The type class solution +takes more effort to set up, but is more extensible: Adding a new interface to a +class requires changing the source code of that class. But contrast, instances for type classes can be defined anywhere. + +To conclude, we have seen that traits and given instances, combined with other constructs like extension methods, context bounds and type lambdas allow a concise and natural expression of _type classes_. diff --git a/docs/_spec/TODOreference/contextual/using-clauses.md b/docs/_spec/TODOreference/contextual/using-clauses.md new file mode 100644 index 000000000000..9187e1916e7d --- /dev/null +++ b/docs/_spec/TODOreference/contextual/using-clauses.md @@ -0,0 +1,153 @@ +--- +layout: doc-page +title: "Using Clauses" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/using-clauses.html +--- + +Functional programming tends to express most dependencies as simple function parameterization. +This is clean and powerful, but it sometimes leads to functions that take many parameters where the same value is passed over and over again in long call chains to many +functions. Context parameters can help here since they enable the compiler to synthesize +repetitive arguments instead of the programmer having to write them explicitly. + +For example, with the [given instances](./givens.md) defined previously, +a `max` function that works for any arguments for which an ordering exists can be defined as follows: + +```scala +def max[T](x: T, y: T)(using ord: Ord[T]): T = + if ord.compare(x, y) < 0 then y else x +``` + +Here, `ord` is a _context parameter_ introduced with a `using` clause. +The `max` function can be applied as follows: + +```scala +max(2, 3)(using intOrd) +``` + +The `(using intOrd)` part passes `intOrd` as an argument for the `ord` parameter. But the point of context parameters is that this argument can also be left out (and it usually is). So the following applications are equally valid: + +```scala +max(2, 3) +max(List(1, 2, 3), Nil) +``` + +## Anonymous Context Parameters + +In many situations, the name of a context parameter need not be +mentioned explicitly at all, since it is used only in synthesized arguments for +other context parameters. In that case one can avoid defining a parameter name +and just provide its type. Example: + +```scala +def maximum[T](xs: List[T])(using Ord[T]): T = + xs.reduceLeft(max) +``` + +`maximum` takes a context parameter of type `Ord[T]` only to pass it on as an +inferred argument to `max`. The name of the parameter is left out. + +Generally, context parameters may be defined either as a full parameter list `(p_1: T_1, ..., p_n: T_n)` or just as a sequence of types `T_1, ..., T_n`. Vararg parameters are not supported in `using` clauses. + +## Class Context Parameters + +If a class context parameter is made a member by adding a `val` or `var` modifier, +then that member is available as a given instance. + +Compare the following examples, where the attempt to supply an explicit `given` member induces an ambiguity: + +```scala +class GivenIntBox(using val givenInt: Int): + def n = summon[Int] + +class GivenIntBox2(using givenInt: Int): + given Int = givenInt + //def n = summon[Int] // ambiguous +``` + +The `given` member is importable as explained in the section on [importing `given`s](./given-imports.md): + +```scala +val b = GivenIntBox(using 23) +import b.given +summon[Int] // 23 + +import b.* +//givenInt // Not found +``` + +## Inferring Complex Arguments + +Here are two other methods that have a context parameter of type `Ord[T]`: + +```scala +def descending[T](using asc: Ord[T]): Ord[T] = new Ord[T]: + def compare(x: T, y: T) = asc.compare(y, x) + +def minimum[T](xs: List[T])(using Ord[T]) = + maximum(xs)(using descending) +``` + +The `minimum` method's right-hand side passes `descending` as an explicit argument to `maximum(xs)`. +With this setup, the following calls are all well-formed, and they all normalize to the last one: + +```scala +minimum(xs) +maximum(xs)(using descending) +maximum(xs)(using descending(using listOrd)) +maximum(xs)(using descending(using listOrd(using intOrd))) +``` + +## Multiple `using` Clauses + +There can be several `using` clauses in a definition and `using` clauses can be freely mixed with normal parameter clauses. Example: + +```scala +def f(u: Universe)(using ctx: u.Context)(using s: ctx.Symbol, k: ctx.Kind) = ... +``` + +Multiple `using` clauses are matched left-to-right in applications. Example: + +```scala +object global extends Universe { type Context = ... } +given ctx : global.Context with { type Symbol = ...; type Kind = ... } +given sym : ctx.Symbol +given kind: ctx.Kind + +``` +Then the following calls are all valid (and normalize to the last one) + +```scala +f(global) +f(global)(using ctx) +f(global)(using ctx)(using sym, kind) +``` + +But `f(global)(using sym, kind)` would give a type error. + + +## Summoning Instances + +The method `summon` in `Predef` returns the given of a specific type. For example, +the given instance for `Ord[List[Int]]` is produced by + +```scala +summon[Ord[List[Int]]] // reduces to listOrd(using intOrd) +``` + +The `summon` method is simply defined as the (non-widening) identity function over a context parameter. + +```scala +def summon[T](using x: T): x.type = x +``` + +## Syntax + +Here is the new syntax of parameters and arguments seen as a delta from the [standard context free syntax of Scala 3](../syntax.md). `using` is a soft keyword, recognized only at the start of a parameter or argument list. It can be used as a normal identifier everywhere else. + +``` +ClsParamClause ::= ... | UsingClsParamClause +DefParamClauses ::= ... | UsingParamClause +UsingClsParamClause ::= ‘(’ ‘using’ (ClsParams | Types) ‘)’ +UsingParamClause ::= ‘(’ ‘using’ (DefParams | Types) ‘)’ +ParArgumentExprs ::= ... | ‘(’ ‘using’ ExprsInParens ‘)’ +``` diff --git a/docs/_spec/TODOreference/dropped-features/nonlocal-returns.md b/docs/_spec/TODOreference/dropped-features/nonlocal-returns.md new file mode 100644 index 000000000000..17b86f77ee56 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/nonlocal-returns.md @@ -0,0 +1,29 @@ +--- +layout: doc-page +title: "Deprecated: Nonlocal Returns" + +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html +--- + +Returning from nested anonymous functions has been deprecated, and will produce a warning from version `3.2`. + +Nonlocal returns are implemented by throwing and catching `scala.runtime.NonLocalReturnException`-s. This is rarely what is intended by the programmer. It can be problematic because of the hidden performance cost of throwing and catching exceptions. Furthermore, it is a leaky implementation: a catch-all exception handler can intercept a `NonLocalReturnException`. + +A drop-in library replacement is provided in [`scala.util.control.NonLocalReturns`](https://scala-lang.org/api/3.x/scala/util/control/NonLocalReturns$.html). Example: + +```scala +import scala.util.control.NonLocalReturns.* + +extension [T](xs: List[T]) + def has(elem: T): Boolean = returning { + for x <- xs do + if x == elem then throwReturn(true) + false + } + +@main def test(): Unit = + val xs = List(1, 2, 3, 4, 5) + assert(xs.has(2) == xs.contains(2)) +``` + +Note: compiler produces deprecation error on nonlocal returns only with `-source:future` option. diff --git a/docs/_spec/TODOreference/dropped-features/package-objects.md b/docs/_spec/TODOreference/dropped-features/package-objects.md new file mode 100644 index 000000000000..d8149e460bf5 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/package-objects.md @@ -0,0 +1,48 @@ +--- +layout: doc-page +title: "Dropped: Package Objects" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/package-objects.html +--- + +Package objects +```scala +package object p { + val a = ... + def b = ... +} +``` +will be dropped. They are still available in Scala 3.0 and 3.1, but will be deprecated and removed afterwards. + +Package objects are no longer needed since all kinds of definitions can now be written at the top-level. Example: +```scala +package p +type Labelled[T] = (String, T) +val a: Labelled[Int] = ("count", 1) +def b = a._2 + +case class C() + +extension (x: C) def pair(y: C) = (x, y) +``` +There may be several source files in a package containing such top-level definitions, and source files can freely mix top-level value, method, and type definitions with classes and objects. + +The compiler generates synthetic objects that wrap top-level definitions falling into one of the following categories: + + - all pattern, value, method, and type definitions, + - implicit classes and objects, + - companion objects of opaque type aliases. + +If a source file `src.scala` contains such top-level definitions, they will be put in a synthetic object named `src$package`. The wrapping is transparent, however. The definitions in `src` can still be accessed as members of the enclosing package. The synthetic object will be placed last in the file, +after any other package clauses, imports, or object and class definitions. + +**Note:** This means that +1. The name of a source file containing wrapped top-level definitions is relevant for binary compatibility. If the name changes, so does the name of the generated object and its class. + +2. A top-level main method `def main(args: Array[String]): Unit = ...` is wrapped as any other method. If it appears +in a source file `src.scala`, it could be invoked from the command line using a command like `scala src$package`. Since the +"program name" is mangled it is recommended to always put `main` methods in explicitly named objects. + +3. The notion of `private` is independent of whether a definition is wrapped or not. A `private` top-level definition is always visible from everywhere in the enclosing package. + +4. If several top-level definitions are overloaded variants with the same name, +they must all come from the same source file. diff --git a/docs/_spec/TODOreference/dropped-features/type-projection.md b/docs/_spec/TODOreference/dropped-features/type-projection.md new file mode 100644 index 000000000000..08b5ffb34eca --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/type-projection.md @@ -0,0 +1,18 @@ +--- +layout: doc-page +title: "Dropped: General Type Projection" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/type-projection.html +--- + +Scala so far allowed general type projection `T#A` where `T` is an arbitrary type +and `A` names a type member of `T`. + +Scala 3 disallows this if `T` is an abstract type (class types and type aliases +are fine). This change was made because unrestricted type projection +is [unsound](https://github.com/lampepfl/dotty/issues/1050). + +This restriction rules out the [type-level encoding of a combinator +calculus](https://michid.wordpress.com/2010/01/29/scala-type-level-encoding-of-the-ski-calculus/). + +To rewrite code using type projections on abstract types, consider using +path-dependent types or implicit parameters. diff --git a/docs/_spec/TODOreference/dropped-features/weak-conformance-spec.md b/docs/_spec/TODOreference/dropped-features/weak-conformance-spec.md new file mode 100644 index 000000000000..07625dcfe885 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/weak-conformance-spec.md @@ -0,0 +1,54 @@ +--- +layout: doc-page +title: "Dropped: Weak Conformance - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/weak-conformance-spec.html +--- + +To simplify the underlying type theory, Scala 3 drops the notion of +[*weak conformance*](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#weak-conformance) +altogether. Instead, it provides more flexibility when +assigning a type to a constant expression. The new rule is: + + - *If* a list of expressions `Es` appears as one of + + - the elements of a vararg parameter, or + - the alternatives of an if-then-else or match expression, or + - the body and catch results of a try expression, + +- *and* all expressions have primitive numeric types, but they do not + all have the same type, + +- *then* the following is attempted: + + - the expressions `Es` are partitioned into `Int` constants on the + one hand, and all other expressions on the other hand, + - if all the other expressions have the same numeric type `T` + (which can be one of `Byte`, `Short`, `Char`, `Int`, `Long`, `Float`, + `Double`), possibly after widening, and if none of the `Int` + literals would incur a loss of precision when converted to `T`, + then they are thus converted (the other expressions are left + unchanged regardless), + - otherwise, the expressions `Es` are used unchanged. + + A loss of precision occurs for + - an `Int -> Float` conversion of a constant + `c` if `c.toFloat.toInt != c` + - an `Int -> Byte` conversion of a constant + `c` if `c.toByte.toInt != c`, + - an `Int -> Short` conversion of a constant + `c` if `c.toShort.toInt != c`. + +## Examples + +```scala +inline val b = 33 +def f(): Int = b + 1 +Array(b, 33, 5.5) : Array[Double] // b is an inline val +Array(f(), 33, 5.5) : Array[AnyVal] // f() is not a constant +Array(5, 11L) : Array[Long] +Array(5, 11L, 5.5) : Array[AnyVal] // Long and Double found +Array(1.0f, 2) : Array[Float] +Array(1.0f, 1234567890): Array[AnyVal] // loss of precision +Array(b, 33, 'a') : Array[Char] +Array(5.toByte, 11) : Array[Byte] +``` diff --git a/docs/_spec/TODOreference/dropped-features/weak-conformance.md b/docs/_spec/TODOreference/dropped-features/weak-conformance.md new file mode 100644 index 000000000000..b1478326b2c9 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/weak-conformance.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: "Dropped: Weak Conformance" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/weak-conformance.html +--- + +In some situations, Scala used a _weak conformance_ relation when +testing type compatibility or computing the least upper bound of a set +of types. The principal motivation behind weak conformance was to +make an expression like this have type `List[Double]`: + +```scala +List(1.0, math.sqrt(3.0), 0, -3.3) // : List[Double] +``` + +It's "obvious" that this should be a `List[Double]`. However, without +some special provision, the least upper bound of the lists's element +types `(Double, Double, Int, Double)` would be `AnyVal`, hence the list +expression would be given type `List[AnyVal]`. + +A less obvious example is the following one, which was also typed as a +`List[Double]`, using the weak conformance relation. + +```scala +val n: Int = 3 +val c: Char = 'X' +val d: Double = math.sqrt(3.0) +List(n, c, d) // used to be: List[Double], now: List[AnyVal] +``` + +Here, it is less clear why the type should be widened to +`List[Double]`, a `List[AnyVal]` seems to be an equally valid -- and +more principled -- choice. + +Weak conformance applies to all "numeric" types (including `Char`), and +independently of whether the expressions are literals or not. However, +in hindsight, the only intended use case is for *integer literals* to +be adapted to the type of the other expressions. Other types of numerics +have an explicit type annotation embedded in their syntax (`f`, `d`, +`.`, `L` or `'` for `Char`s) which ensures that their author really +meant them to have that specific type). + +Therefore, Scala 3 drops the general notion of weak conformance, and +instead keeps one rule: `Int` literals are adapted to other numeric +types if necessary. + +[More details](weak-conformance-spec.md) diff --git a/docs/_spec/TODOreference/dropped-features/wildcard-init.md b/docs/_spec/TODOreference/dropped-features/wildcard-init.md new file mode 100644 index 000000000000..e42854079cf9 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/wildcard-init.md @@ -0,0 +1,23 @@ +--- +layout: doc-page +title: "Dropped: Wildcard Initializer" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/wildcard-init.html +--- + +The syntax + +```scala + var x: A = _ +``` + +that was used to indicate an uninitialized field, has been dropped. +At its place there is a special value `uninitialized` in the `scala.compiletime` package. +To get an uninitialized field, you now write + +```scala +import scala.compiletime.uninitialized + +var x: A = uninitialized +``` + +To enable cross-compilation, `_` is still supported, but it will be dropped in a future 3.x version. diff --git a/docs/_spec/TODOreference/dropped-features/xml.md b/docs/_spec/TODOreference/dropped-features/xml.md new file mode 100644 index 000000000000..458a347a66c4 --- /dev/null +++ b/docs/_spec/TODOreference/dropped-features/xml.md @@ -0,0 +1,39 @@ +--- +layout: doc-page +title: "Dropped: XML Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/xml.html +--- + +XML Literals are still supported, but will be dropped in the near future, to +be replaced with [XML string interpolation](https://github.com/lampepfl/xml-interpolator): + +```scala +import dotty.xml.interpolator.* + +case class Person(name: String) { override def toString = name } + +@main def test: Unit = + val bill = Person("Bill") + val john = Person("John") + val mike = Person("Mike") + val todoList = List( + (bill, john, "Meeting", "Room 203, 11:00am"), + (john, mike, "Holiday", "March 22-24") + ) + // XML literals (to be dropped) + val mails1 = for (from, to, heading, body) <- todoList yield + + {from}{to} + {heading}{body} + + println(mails1) + // XML string interpolation + val mails2 = for (from, to, heading, body) <- todoList yield xml""" + + ${from}${to} + ${heading}${body} + """ + println(mails2) +``` + +For more information, see the semester project [XML String Interpolator for Dotty](https://infoscience.epfl.ch/record/267527) by Yassin Kammoun (2019). diff --git a/docs/_spec/TODOreference/experimental/canthrow.md b/docs/_spec/TODOreference/experimental/canthrow.md new file mode 100644 index 000000000000..025a0ed1c686 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/canthrow.md @@ -0,0 +1,281 @@ +--- +layout: doc-page +title: "CanThrow Capabilities" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/canthrow.html +--- + +This page describes experimental support for exception checking in Scala 3. It is enabled by the language import +```scala +import language.experimental.saferExceptions +``` +The reason for publishing this extension now is to get feedback on its usability. We are working on more advanced type systems that build on the general ideas put forward in the extension. Those type systems have application areas beyond checked exceptions. Exception checking is a useful starting point since exceptions are familiar to all Scala programmers and their current treatment leaves room for improvement. + +## Why Exceptions? + +Exceptions are an ideal mechanism for error handling in many situations. They serve the intended purpose of propagating error conditions with a minimum of boilerplate. They cause zero overhead for the "happy path", which means they are very efficient as long as errors arise infrequently. Exceptions are also debug friendly, since they produce stack traces that can be inspected at the handler site. So one never has to guess where an erroneous condition originated. + +## Why Not Exceptions? + +However, exceptions in current Scala and many other languages are not reflected in the type system. This means that an essential part of the contract of a function - i.e. what exceptions can it produce? - is not statically checked. Most people acknowledge that this is a problem, but that so far the alternative of checked exceptions was just too painful to be considered. A good example are Java checked exceptions, which do the right thing in principle, but are widely regarded as a mistake since they are so difficult to deal with. So far, none of the successor languages that are modeled after Java or that build on the JVM has copied this feature. See for example Anders Hejlsberg's [statement on why C# does not have checked exceptions](https://www.artima.com/articles/the-trouble-with-checked-exceptions). + +## The Problem With Java's Checked Exceptions + +The main problem with [Java's checked exception model](https://docs.oracle.com/javase/specs/jls/se8/html/jls-11.html#jls-11.2) is its inflexibility, which is due to lack of polymorphism. Consider for instance the `map` function which is declared on `List[A]` like this: +```scala + def map[B](f: A => B): List[B] +``` +In the Java model, function `f` is not allowed to throw a checked exception. So the following call would be invalid: +```scala + xs.map(x => if x < limit then x * x else throw LimitExceeded()) +``` +The only way around this would be to wrap the checked exception `LimitExceeded` in an unchecked [`java.lang.RuntimeException`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/RuntimeException.html) that is caught at the callsite and unwrapped again. Something like this: +```scala + try + xs.map(x => if x < limit then x * x else throw Wrapper(LimitExceeded())) + catch case Wrapper(ex) => throw ex +``` +Ugh! No wonder checked exceptions in Java are not very popular. + +## Monadic Effects + +So the dilemma is that exceptions are easy to use only as long as we forget static type checking. This has caused many people working with Scala to abandon exceptions altogether and to use an error monad like [`Either`](https://scala-lang.org/api/3.x/scala/util/Either.html) instead. This can work in many situations but is not without its downsides either. It makes code a lot more complicated and harder to refactor. It means one is quickly confronted with the problem how to work with several monads. In general, dealing with one monad at a time in Scala is straightforward but dealing with several monads together is much less pleasant since monads don't compose. A great number of techniques have been proposed, implemented, and promoted to deal with this, from monad transformers, to free monads, to tagless final. But none of these techniques is universally liked; each introduces a complicated DSL that's hard to understand for non-experts, introduces runtime overheads, and makes debugging difficult. In the end, quite a few developers prefer to work instead with a single "super-monad" like [`ZIO`](https://zio.dev/version-1.x/datatypes/core/zio) that has error propagation built in alongside other aspects. This one-size fits all approach can work very nicely, even though (or is it because?) it represents an all-encompassing framework. + +However, a programming language is not a framework; it has to cater also for those applications that do not fit the framework's use cases. So there's still a strong motivation for getting exception checking right. + +## From Effects To Capabilities + +Why does `map` work so poorly with Java's checked exception model? It's because +`map`'s signature limits function arguments to not throw checked exceptions. We could try to come up with a more polymorphic formulation of `map`. For instance, it could look like this: +```scala + def map[B, E](f: A => B throws E): List[B] throws E +``` +This assumes a type `A throws E` to indicate computations of type `A` that can throw an exception of type `E`. But in practice the overhead of the additional type parameters makes this approach unappealing as well. Note in particular that we'd have to parameterize _every method_ that takes a function argument that way, so the added overhead of declaring all these exception types looks just like a sort of ceremony we would like to avoid. + +But there is a way to avoid the ceremony. Instead of concentrating on possible _effects_ such as "this code might throw an exception", concentrate on _capabilities_ such as "this code needs the capability to throw an exception". From a standpoint of expressiveness this is quite similar. But capabilities can be expressed as parameters whereas traditionally effects are expressed as some addition to result values. It turns out that this can make a big difference! + +## The `CanThrow` Capability + +In the _effects as capabilities_ model, an effect is expressed as an (implicit) parameter of a certain type. For exceptions we would expect parameters of type +[`CanThrow[E]`](https://scala-lang.org/api/3.x/scala/CanThrow.html) where `E` stands for the exception that can be thrown. Here is the definition of `CanThrow`: +```scala +erased class CanThrow[-E <: Exception] +``` +This shows another experimental Scala feature: [erased definitions](./erased-defs.md). Roughly speaking, values of an erased class do not generate runtime code; they are erased before code generation. This means that all `CanThrow` capabilities are compile-time only artifacts; they do not have a runtime footprint. + +Now, if the compiler sees a `throw Exc()` construct where `Exc` is a checked exception, it will check that there is a capability of type `CanThrow[Exc]` that can be summoned as a given. It's a compile-time error if that's not the case. + +How can the capability be produced? There are several possibilities: + +Most often, the capability is produced by having a using clause `(using CanThrow[Exc])` in some enclosing scope. This roughly corresponds to a [`throws`](https://docs.oracle.com/javase/specs/jls/se7/html/jls-8.html#jls-8.4.6) clause in Java. The analogy is even stronger since alongside [`CanThrow`](https://scala-lang.org/api/3.x/scala/CanThrow.html) there is also the following type alias defined in the [`scala`](https://scala-lang.org/api/3.x/scala.html) package: +```scala +infix type A = Int +``` +```scala +infix type $throws[R, +E <: Exception] = CanThrow[E] ?=> R +``` +That is, [`R $throws E`](https://scala-lang.org/api/3.x/scala/runtime.html#$throws-0) is a context function type that takes an implicit `CanThrow[E]` parameter and that returns a value of type `R`. What's more, the compiler will translate an infix types with `throws` as the operator to `$throws` applications according to the rules +``` + A throws E --> A $throws E + A throws E₁ | ... | Eᵢ --> A $throws E₁ ... $throws Eᵢ +``` +Therefore, a method written like this: +```scala +def m(x: T)(using CanThrow[E]): U +``` +can alternatively be expressed like this: +```scala +def m(x: T): U throws E +``` +Also the capability to throw multiple types of exceptions can be expressed in a few ways as shown in the examples below: +```scala +def m(x: T): U throws E1 | E2 +def m(x: T): U throws E1 throws E2 +def m(x: T)(using CanThrow[E1], CanThrow[E2]): U +def m(x: T)(using CanThrow[E1])(using CanThrow[E2]): U +def m(x: T)(using CanThrow[E1]): U throws E2 +``` + +**Note 1:** A signature like +```scala +def m(x: T)(using CanThrow[E1 | E2]): U +``` +would also allow throwing `E1` or `E2` inside the method's body but might cause problems when someone tried to call this method +from another method declaring its `CanThrow` capabilities like in the earlier examples. +This is because `CanThrow` has a contravariant type parameter so `CanThrow[E1 | E2]` is a subtype of both `CanThrow[E1]` and `CanThrow[E2]`. +Hence the presence of a given instance of `CanThrow[E1 | E2]` in scope satisfies the requirement for `CanThrow[E1]` and `CanThrow[E2]` +but given instances of `CanThrow[E1]` and `CanThrow[E2]` cannot be combined to provide and instance of `CanThrow[E1 | E2]`. + +**Note 2:** One should keep in mind that `|` binds its left and right arguments more tightly than `throws` so `A | B throws E1 | E2` means `(A | B) throws (Ex1 | Ex2)`, not `A | (B throws E1) | E2`. + +The `CanThrow`/`throws` combo essentially propagates the `CanThrow` requirement outwards. But where are these capabilities created in the first place? That's in the `try` expression. Given a `try` like this: + +```scala +try + body +catch + case ex1: Ex1 => handler1 + ... + case exN: ExN => handlerN +``` +the compiler generates an accumulated capability of type `CanThrow[Ex1 | ... | Ex2]` that is available as a given in the scope of `body`. It does this by augmenting the `try` roughly as follows: +```scala +try + erased given CanThrow[Ex1 | ... | ExN] = compiletime.erasedValue + body +catch ... +``` +Note that the right-hand side of the synthesized given is `???` (undefined). This is OK since +this given is erased; it will not be executed at runtime. + +**Note 1:** The [`saferExceptions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$saferExceptions$.html) feature is designed to work only with checked exceptions. An exception type is _checked_ if it is a subtype of +`Exception` but not of `RuntimeException`. The signature of `CanThrow` still admits `RuntimeException`s since `RuntimeException` is a proper subtype of its bound, `Exception`. But no capabilities will be generated for `RuntimeException`s. Furthermore, `throws` clauses +also may not refer to `RuntimeException`s. + +**Note 2:** To keep things simple, the compiler will currently only generate capabilities +for catch clauses of the form +```scala + case ex: Ex => +``` +where `ex` is an arbitrary variable name (`_` is also allowed), and `Ex` is an arbitrary +checked exception type. Constructor patterns such as `Ex(...)` or patterns with guards +are not allowed. The compiler will issue an error if one of these is used to catch +a checked exception and `saferExceptions` is enabled. + +## Example + +That's it. Let's see it in action in an example. First, add an import +```scala +import language.experimental.saferExceptions +``` +to enable exception checking. Now, define an exception `LimitExceeded` and +a function `f` like this: +```scala +val limit = 10e9 +class LimitExceeded extends Exception +def f(x: Double): Double = + if x < limit then x * x else throw LimitExceeded() +``` +You'll get this error message: +``` + if x < limit then x * x else throw LimitExceeded() + ^^^^^^^^^^^^^^^^^^^^^ +The capability to throw exception LimitExceeded is missing. +``` +The capability can be provided by one of the following: + + - Adding a using clause `(using CanThrow[LimitExceeded])` to the definition of the enclosing method + - Adding `throws LimitExceeded` clause after the result type of the enclosing method + - Wrapping this piece of code with a `try` block that catches `LimitExceeded` + +The following import might fix the problem: +```scala + import unsafeExceptions.canThrowAny +``` +As the error message implies, you have to declare that `f` needs the capability to throw a `LimitExceeded` exception. The most concise way to do so is to add a `throws` clause: +```scala +def f(x: Double): Double throws LimitExceeded = + if x < limit then x * x else throw LimitExceeded() +``` +Now put a call to `f` in a `try` that catches `LimitExceeded`: +```scala +@main def test(xs: Double*) = + try println(xs.map(f).sum) + catch case ex: LimitExceeded => println("too large") +``` +Run the program with some inputs: +``` +> scala test 1 2 3 +14.0 +> scala test +0.0 +> scala test 1 2 3 100000000000 +too large +``` +Everything typechecks and works as expected. But wait - we have called `map` without any ceremony! How did that work? Here's how the compiler expands the `test` function: +```scala +// compiler-generated code +@main def test(xs: Double*) = + try + erased given ctl: CanThrow[LimitExceeded] = compiletime.erasedValue + println(xs.map(x => f(x)(using ctl)).sum) + catch case ex: LimitExceeded => println("too large") +``` +The `CanThrow[LimitExceeded]` capability is passed in a synthesized `using` clause to `f`, since `f` requires it. Then the resulting closure is passed to `map`. The signature of `map` does not have to account for effects. It takes a closure as always, but that +closure may refer to capabilities in its free variables. This means that `map` is +already effect polymorphic even though we did not change its signature at all. +So the takeaway is that the effects as capabilities model naturally provides for effect polymorphism whereas this is something that other approaches struggle with. + +## Gradual Typing Via Imports + +Another advantage is that the model allows a gradual migration from current unchecked exceptions to safer exceptions. Imagine for a moment that [`experimental.saferExceptions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$saferExceptions$.html) is turned on everywhere. There would be lots of code that breaks since functions have not yet been properly annotated with `throws`. But it's easy to create an escape hatch that lets us ignore the breakages for a while: simply add the import +```scala +import scala.unsafeExceptions.canThrowAny +``` +This will provide the [`CanThrow`](https://scala-lang.org/api/3.x/scala/CanThrow.html) capability for any exception, and thereby allow +all throws and all other calls, no matter what the current state of `throws` declarations is. Here's the +definition of [`canThrowAny`](https://scala-lang.org/api/3.x/scala/unsafeExceptions$.html#canThrowAny-0): +```scala +package scala +object unsafeExceptions: + given canThrowAny: CanThrow[Exception] = ??? +``` +Of course, defining a global capability like this amounts to cheating. But the cheating is useful for gradual typing. The import could be used to migrate existing code, or to +enable more fluid explorations of code without regard for complete exception safety. At the end of these migrations or explorations the import should be removed. + +## Scope Of the Extension + +To summarize, the extension for safer exception checking consists of the following elements: + + - It adds to the standard library the class `scala.CanThrow`, the type `scala.$throws`, and the [`scala.unsafeExceptions`](https://scala-lang.org/api/3.x/scala/unsafeExceptions$.html) object, as they were described above. + - It adds some desugaring rules ro rewrite `throws` types to cascaded `$throws` types. + - It augments the type checking of `throw` by _demanding_ a `CanThrow` capability or the thrown exception. + - It augments the type checking of `try` by _providing_ `CanThrow` capabilities for every caught exception. + +That's all. It's quite remarkable that one can do exception checking in this way without any special additions to the type system. We just need regular givens and context functions. Any runtime overhead is eliminated using `erased`. + +## Caveats + +Our capability model allows to declare and check the thrown exceptions of first-order code. But as it stands, it does not give us enough mechanism to enforce the _absence_ of +capabilities for arguments to higher-order functions. Consider a variant `pureMap` +of `map` that should enforce that its argument does not throw exceptions or have any other effects (maybe because wants to reorder computations transparently). Right now +we cannot enforce that since the function argument to `pureMap` can capture arbitrary +capabilities in its free variables without them showing up in its type. One possible way to +address this would be to introduce a pure function type (maybe written `A -> B`). Pure functions are not allowed to close over capabilities. Then `pureMap` could be written +like this: +```scala + def pureMap(f: A -> B): List[B] +``` +Another area where the lack of purity requirements shows up is when capabilities escape from bounded scopes. Consider the following function +```scala +def escaped(xs: Double*): () => Int = + try () => xs.map(f).sum + catch case ex: LimitExceeded => -1 +``` +With the system presented here, this function typechecks, with expansion +```scala +// compiler-generated code +def escaped(xs: Double*): () => Int = + try + given ctl: CanThrow[LimitExceeded] = ??? + () => xs.map(x => f(x)(using ctl)).sum + catch case ex: LimitExceeded => -1 +``` +But if you try to call `escaped` like this +```scala +val g = escaped(1, 2, 1000000000) +g() +``` +the result will be a `LimitExceeded` exception thrown at the second line where `g` is called. What's missing is that `try` should enforce that the capabilities it generates do not escape as free variables in the result of its body. It makes sense to describe such scoped effects as _ephemeral capabilities_ - they have lifetimes that cannot be extended to delayed code in a lambda. + + +## Outlook + +We are working on a new class of type system that supports ephemeral capabilities by tracking the free variables of values. Once that research matures, it will hopefully be possible to augment the Scala language so that we can enforce the missing properties. + +And it would have many other applications besides: Exceptions are a special case of _algebraic effects_, which has been a very active research area over the last 20 years and is finding its way into programming languages (e.g. [Koka](https://koka-lang.github.io/koka/doc/book.html#why-handlers), [Eff](https://www.eff-lang.org/learn/), [Multicore OCaml](https://discuss.ocaml.org/t/multicore-ocaml-september-2021-effect-handlers-will-be-in-ocaml-5-0/8554), [Unison](https://www.unisonweb.org/docs/language-reference/#abilities-and-ability-handlers)). In fact, algebraic effects have been characterized as being equivalent to exceptions with an additional _resume_ operation. The techniques developed here for exceptions can probably be generalized to other classes of algebraic effects. + +But even without these additional mechanisms, exception checking is already useful as it is. It gives a clear path forward to make code that uses exceptions safer, better documented, and easier to refactor. The only loophole arises for scoped capabilities - here we have to verify manually that these capabilities do not escape. Specifically, a `try` always has to be placed in the same computation stage as the throws that it enables. + +Put another way: If the status quo is 0% static checking since 100% is too painful, then an alternative that gives you 95% static checking with great ergonomics looks like a win. And we might still get to 100% in the future. + +For more info, see also our [paper at the ACM Scala Symposium 2021](https://infoscience.epfl.ch/record/290885). diff --git a/docs/_spec/TODOreference/experimental/cc.md b/docs/_spec/TODOreference/experimental/cc.md new file mode 100644 index 000000000000..878bc0a64ed6 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/cc.md @@ -0,0 +1,738 @@ +--- +layout: doc-page +title: "Capture Checking" +--- + +Capture checking is a research project that modifies the Scala type system to track references to capabilities in values. It can be enabled with a `-Ycc` compiler option. +At present, capture checking is still highly experimental and unstable. + +To get an idea what capture checking can do, let's start with a small example: +```scala +def usingLogFile[T](op: FileOutputStream => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result +``` +The `usingLogFile` method invokes a given operation with a fresh log file as parameter. Once the operation has ended, the log file is closed and the +operation's result is returned. This is a typical _try-with-resources_ pattern, similar to many other such patterns which are often supported by special language constructs in other languages. + +The problem is that `usingLogFile`'s implementation is not entirely safe. One can +undermine it by passing an operation that performs the logging at some later point +after it has terminated. For instance: +```scala +val later = usingLogFile { file => () => file.write(0) } +later() // crash +``` +When `later` is executed it tries to write to a file that is already closed, which +results in an uncaught `IOException`. + +Capture checking gives us the mechanism to prevent such errors _statically_. To +prevent unsafe usages of `usingLogFile`, we can declare it like this: +```scala +def usingLogFile[T](op: ({*} FileOutputStream) => T): T = + // same body as before +``` +The only thing that's changed is that the `FileOutputStream` parameter of `op` is now +tagged with `{*}`. We'll see that this turns the parameter into a _capability_ whose lifetime is tracked. + +If we now try to define the problematic value `later`, we get a static error: +``` + | val later = usingLogFile { f => () => f.write(0) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` +In this case, it was easy to see that the `logFile` capability escapes in the closure passed to `usingLogFile`. But capture checking also works for more complex cases. +For instance, capture checking is able to distinguish between the following safe code: +```scala +val xs = usingLogFile { f => + List(1, 2, 3).map { x => f.write(x); x * x } +} +``` +and the following unsafe one: +```scala +val xs = usingLogFile { f => + LazyList(1, 2, 3).map { x => f.write(x); x * x } +} +``` +An error would be issued in the second case, but not the first one (this assumes a capture-aware +formulation of `LazyList` which we will present later in this page). + +It turns out that capture checking has very broad applications. Besides the various +try-with-resources patterns, it can also be a key part to the solutions of many other long standing problems in programming languages. Among them: + + - How to have a simple and flexible system for checked exceptions. We show later + how capture checking enables a clean and fully safe system for checked exceptions in Scala. + - How to address the problem of effect polymorphism in general. + - How to solve the "what color is your function?" problem of mixing synchronous + and asynchronous computations. + - How to do region-based allocation, safely, + - How to reason about capabilities associated with memory locations. + +The following sections explain in detail how capture checking works in Scala 3. + + +## Overview + +The capture checker extension introduces a new kind of types and it enforces some rules for working with these types. + +Capture checking is enabled by the compiler option `-Ycc`. If the option is not given, the new +type forms can still be written but they are not checked for consistency, because they are +treated simply as certain uninterpreted annotated types. + +## Capabilities and Capturing Types + +Capture checking is done in terms of _capturing types_ of the form +`{c₁, ..., cᵢ} T`. Here `T` is a type, and `{c₁, ..., cᵢ}` is a _capture set_ consisting of references to capabilities `c₁, ..., cᵢ`. + +A _capability_ is syntactically a method- or class-parameter, a local variable, or the `this` of an enclosing class. The type of a capability +must be a capturing type with a non-empty capture set. We also say that +variables that are capabilities are _tracked_. + +In a sense, every +capability gets its authority from some other, more sweeping capability which it captures. The most sweeping capability, from which ultimately all others are derived is written `*`. We call it the _universal capability_. + +Here is an example: +```scala +class FileSystem + +class Logger(fs: {*} FileSystem): + def log(s: String): Unit = ... // Write to a log file, using `fs` + +def test(fs: {*} FileSystem) = + val l: {fs} Logger = Logger(fs) + l.log("hello world!") + val xs: {l} LazyList[Int] = + LazyList.from(1) + .map { i => + l.log(s"computing elem # $i") + i * i + } + xs +``` +Here, the `test` method takes a `FileSystem` as a parameter. `fs` is a capability since its type has a non-empty capture set. The capability is passed to the `Logger` constructor +and retained as a field in class `Logger`. Hence, the local variable `l` has type +`{fs} Logger`: it is a `Logger` which retains the `fs` capability. + +The second variable defined in `test` is `xs`, a lazy list that is obtained from +`LazyList.from(1)` by logging and mapping consecutive numbers. Since the list is lazy, +it needs to retain the reference to the logger `l` for its computations. Hence, the +type of the list is `{l} LazyList[Int]`. On the other hand, since `xs` only logs but does +not do other file operations, it retains the `fs` capability only indirectly. That's why +`fs` does not show up in the capture set of `xs`. + +Capturing types come with a subtype relation where types with "smaller" capture sets are subtypes of types with larger sets (the _subcapturing_ relation is defined in more detail below). If a type `T` does not have a capture set, it is called _pure_, and is a subtype of +any capturing type that adds a capture set to `T`. + +## Function Types + +The usual function type `A => B` now stands for a function that can capture arbitrary capabilities. We call such functions +_impure_. By contrast, the new single arrow function type `A -> B` stands for a function that cannot capture any capabilities, or otherwise said, is _pure_. One can add a capture set in front of an otherwise pure function. +For instance, `{c, d} A -> B` would be a function that can capture capabilities `c` and `d`, but no others. + +The impure function type `A => B` is treated as an alias for `{*} A -> B`. That is, impure functions are functions that can capture anything. + +Function types and captures both associate to the right, so +```scala +{c} A -> {d} B -> C +``` +is the same as +```scala +{c} (A -> {d} (B -> C)) +``` +Contrast with +```scala +({c} A) -> ({d} B) -> C +``` +which is a curried pure function over argument types that can capture `c` and `d`, respectively. + +Analogous conventions apply to context function types. `A ?=> B` is an impure context function, with `A ?-> B` as its pure complement. + +**Note 1:** The identifiers `->` and `?->` are now treated as soft keywords when used as infix type operators. They are +still available as regular identifiers for terms. For instance, the mapping syntax `Map("x" -> 1, "y" -> 2)` is still supported since it only applies to terms. + +**Note 2:** The distinctions between pure vs impure function types do not apply to methods. In fact, since methods are not values they never capture anything directly. References to +capabilities in a method are instead counted in the capture set of the enclosing object. + +## By-Name Parameter Types + +A convention analogous to function types also extends to by-name parameters. In +```scala +def f(x: => Int): Int +``` +the actual argument can refer to arbitrary capabilities. So the following would be OK: +```scala +f(if p(y) then throw Ex() else 1) +``` +On the other hand, if `f` was defined like this +```scala +def f(x: -> Int): Int +``` +the actual argument to `f` could not refer to any capabilities, so the call above would be rejected. +One can also allow specific capabilities like this: +```scala +def f(x: {c}-> Int): Int +``` +Here, the actual argument to `f` is allowed to use the `c` capability but no others. + +**Note**: It is strongly recommended to write the capability set and the arrow `->` without intervening spaces, +as otherwise the notation would look confusingly like a function type. + +## Subtyping and Subcapturing + +Capturing influences subtyping. As usual we write `T₁ <: T₂` to express that the type +`T₁` is a subtype of the type `T₂`, or equivalently, that `T₁` conforms to `T₂`. An +analogous _subcapturing_ relation applies to capture sets. If `C₁` and `C₂` are capture sets, we write `C₁ <: C₂` to express that `C₁` _is covered by_ `C₂`, or, swapping the operands, that `C₂` _covers_ `C₁`. + +Subtyping extends as follows to capturing types: + + - Pure types are subtypes of capturing types. That is, `T <: C T`, for any type `T`, capturing set `C`. + - For capturing types, smaller capturing sets produce subtypes: `C₁ T₁ <: C₂ T₂` if + `C₁ <: C₂` and `T₁ <: T₂`. + +A subcapturing relation `C₁ <: C₂` holds if `C₂` _accounts for_ every element `c` in `C₁`. This means one of the following three conditions must be true: + + - `c ∈ C₂`, + - `c` refers to a parameter of some class `Cls` and `C₂` contains `Cls.this`, + - `c`'s type has capturing set `C` and `C₂` accounts for every element of `C` (that is, `C <: C₂`). + + +**Example 1.** Given +```scala +fs: {*} FileSystem +ct: {*} CanThrow[Exception] +l : {fs} Logger +``` +we have +``` +{l} <: {fs} <: {*} +{fs} <: {fs, ct} <: {*} +{ct} <: {fs, ct} <: {*} +``` +The set consisting of the root capability `{*}` covers every other capture set. This is +a consequence of the fact that, ultimately, every capability is created from `*`. + +**Example 2.** Consider again the FileSystem/Logger example from before. `LazyList[Int]` is a proper subtype of `{l} LazyList[Int]`. So if the `test` method in that example +was declared with a result type `LazyList[Int]`, we'd get a type error. Here is the error message: +``` +11 |def test(using fs: {*} FileSystem): LazyList[Int] = { + | ^ + | Found: {fs} LazyList[Int] + | Required: LazyList[Int] +``` +Why does it say `{fs} LazyList[Int]` and not `{l} LazyList[Int]`, which is, after all, the type of the returned value `xs`? The reason is that `l` is a local variable in the body of `test`, so it cannot be referred to in a type outside that body. What happens instead is that the type is _widened_ to the smallest supertype that does not mention `l`. Since `l` has capture set `fs`, we have that `{fs}` covers `{l}`, and `{fs}` is acceptable in a result type of `test`, so `{fs}` is the result of that widening. +This widening is called _avoidance_; it is not specific to capture checking but applies to all variable references in Scala types. + +## Capability Classes + +Classes like `CanThrow` or `FileSystem` have the property that their values are always intended to be capabilities. We can make this intention explicit and save boilerplate by declaring these classes with a `@capability` annotation. + +The capture set of a capability class type is always `{*}`. This means we could equivalently express the `FileSystem` and `Logger` classes as follows: +```scala +import annotation.capability + +@capability class FileSystem + +class Logger(using FileSystem): + def log(s: String): Unit = ??? + +def test(using fs: FileSystem) = + val l: {fs} Logger = Logger() + ... +``` +In this version, `FileSystem` is a capability class, which means that the `{*}` capture set is implied on the parameters of `Logger` and `test`. Writing the capture set explicitly produces a warning: +```scala +class Logger(using {*} FileSystem): + ^^^^^^^^^^^^^^ + redundant capture: FileSystem already accounts for * +``` +Another, unrelated change in the version of the last example here is that the `FileSystem` capability is now passed as an implicit parameter. It is quite natural to model capabilities with implicit parameters since it greatly reduces the wiring overhead once multiple capabilities are in play. + +## Capture Checking of Closures + +If a closure refers to capabilities in its body, it captures these capabilities in its type. For instance, consider: +```scala +def test(fs: FileSystem): {fs} String -> Unit = + (x: String) => Logger(fs).log(x) +``` +Here, the body of `test` is a lambda that refers to the capability `fs`, which means that `fs` is retained in the lambda. +Consequently, the type of the lambda is `{fs} String -> Unit`. + +**Note:** Function values are always written with `=>` (or `?=>` for context functions). There is no syntactic +distinction for pure _vs_ impure function values. The distinction is only made in their types. + +A closure also captures all capabilities that are captured by the functions +it calls. For instance, in +```scala +def test(fs: FileSystem) = + def f() = g() + def g() = (x: String) => Logger(fs).log(x) + f +``` +the result of `test` has type `{fs} String -> Unit` even though function `f` itself does not refer to `fs`. + +## Capture Checking of Classes + +The principles for capture checking closures also apply to classes. For instance, consider: +```scala +class Logger(using fs: FileSystem): + def log(s: String): Unit = ... summon[FileSystem] ... + +def test(xfs: FileSystem): {xfs} Logger = + Logger(xfs) +``` +Here, class `Logger` retains the capability `fs` as a (private) field. Hence, the result +of `test` is of type `{xfs} Logger` + +Sometimes, a tracked capability is meant to be used only in the constructor of a class, but +is not intended to be retained as a field. This fact can be communicated to the capture +checker by declaring the parameter as `@constructorOnly`. Example: +```scala +import annotation.constructorOnly + +class NullLogger(using @constructorOnly fs: FileSystem): + ... +def test2(using fs: FileSystem): NullLogger = NullLogger() // OK +``` + +The captured references of a class include _local capabilities_ and _argument capabilities_. Local capabilities are capabilities defined outside the class and referenced from its body. Argument capabilities are passed as parameters to the primary constructor of the class. Local capabilities are inherited: +the local capabilities of a superclass are also local capabilities of its subclasses. Example: + +```scala +@capability class Cap + +def test(a: Cap, b: Cap, c: Cap) = + class Super(y: Cap): + def f = a + class Sub(x: Cap) extends Super(x) + def g = b + Sub(c) +``` +Here class `Super` has local capability `a`, which gets inherited by class +`Sub` and is combined with `Sub`'s own local capability `b`. Class `Sub` also has an argument capability corresponding to its parameter `x`. This capability gets instantiated to `c` in the final constructor call `Sub(c)`. Hence, +the capture set of that call is `{a, b, c}`. + +The capture set of the type of `this` of a class is inferred by the capture checker, unless the type is explicitly declared with a self type annotation like this one: +```scala +class C: + self: {a, b} D => ... +``` +The inference observes the following constraints: + + - The type of `this` of a class `C` includes all captured references of `C`. + - The type of `this` of a class `C` is a subtype of the type of `this` + of each parent class of `C`. + - The type of `this` must observe all constraints where `this` is used. + +For instance, in +```scala +@capability class Cap +def test(c: Cap) = + class A: + val x: A = this + def f = println(c) // error +``` +we know that the type of `this` must be pure, since `this` is the right hand side of a `val` with type `A`. However, in the last line we find that the capture set of the class, and with it the capture set of `this`, would include `c`. This leads to a contradiction, and hence to a checking error: +``` +16 | def f = println(c) // error + | ^ + |(c : Cap) cannot be referenced here; it is not included in the allowed capture set {} +``` + +## Capture Tunnelling + +Consider the following simple definition of a `Pair` class: +```scala +class Pair[+A, +B](x: A, y: B): + def fst: A = x + def snd: B = y +``` +What happens if `Pair` is instantiated like this (assuming `ct` and `fs` are two capabilities in scope)? +```scala +def x: {ct} Int -> String +def y: {fs} Logger +def p = Pair(x, y) +``` +The last line will be typed as follows: +```scala +def p: Pair[{ct} Int -> String, {fs} Logger] = Pair(x, y) +``` +This might seem surprising. The `Pair(x, y)` value does capture capabilities `ct` and `fs`. Why don't they show up in its type at the outside? + +The answer is capture tunnelling. Once a type variable is instantiated to a capturing type, the +capture is not propagated beyond this point. On the other hand, if the type variable is instantiated +again on access, the capture information "pops out" again. For instance, even though `p` is technically pure because its capture set is empty, writing `p.fst` would record a reference to the captured capability `ct`. So if this access was put in a closure, the capability would again form part of the outer capture set. E.g. +```scala +() => p.fst : {ct} () -> {ct} Int -> String +``` +In other words, references to capabilities "tunnel through" in generic instantiations from creation to access; they do not affect the capture set of the enclosing generic data constructor applications. +This principle plays an important part in making capture checking concise and practical. + +## Escape Checking + +The universal capability `*` should be conceptually available only as a parameter to the main program. Indeed, if it was available everywhere, capability checking would be undermined since one could mint new capabilities +at will. In line with this reasoning, some capture sets are restricted so that +they are not allowed to contain the universal capability. + +Specifically, if a capturing type is an instance of a type variable, that capturing type +is not allowed to carry the universal capability `{*}`. There's a connection to tunnelling here. +The capture set of a type has to be present in the environment when a type is instantiated from +a type variable. But `*` is not itself available as a global entity in the environment. Hence, +an error should result. + +We can now reconstruct how this principle produced the error in the introductory example, where +`usingLogFile` was declared like this: +```scala +def usingLogFile[T](op: ({*} FileOutputStream) => T): T = ... +``` +The error message was: +``` + | val later = usingLogFile { f => () => f.write(0) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` +This error message was produced by the following logic: + + - The `f` parameter has type `{*} FileOutputStream`, which makes it a capability. + - Therefore, the type of the expression `() => f.write(0)` is `{f} () -> Unit`. + - This makes the type of the whole closure passed to `usingLogFile` the dependent function type + `(f: {*} FileOutputStream) -> {f} () -> Unit`. + - The expected type of the closure is a simple, parametric, impure function type `({*} FileOutputStream) => T`, + for some instantiation of the type variable `T`. + - The smallest supertype of the closure's dependent function type that is a parametric function type is + `({*} FileOutputStream) => {*} () -> Unit` + - Hence, the type variable `T` is instantiated to `* () -> Unit`, which causes the error. + +An analogous restriction applies to the type of a mutable variable. +Another way one could try to undermine capture checking would be to +assign a closure with a local capability to a global variable. Maybe +like this: +```scala +var loophole: {*} () -> Unit = () => () +usingLogFile { f => + loophole = () => f.write(0) +} +loophole() +``` +But this will not compile either, since mutable variables cannot have universal capture sets. + +One also needs to prevent returning or assigning a closure with a local capability in an argument of a parametric type. For instance, here is a +slightly more refined attack: +```scala +class Cell[+A](x: A) +val sneaky = usingLogFile { f => Cell(() => f.write(0)) } +sneaky.x() +``` +At the point where the `Cell` is created, the capture set of the argument is `f`, which +is OK. But at the point of use, it is `*` (because `f` is no longer in scope), which causes again an error: +``` + | sneaky.x() + | ^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` + +Looking at object graphs, we observe a monotonicity property: The capture set of an object `x` covers the capture sets of all objects reachable through `x`. This property is reflected in the type system by the following _monotonicity rule_: + + - In a class `C` with a field `f`, the capture set `{this}` covers the capture set `{this.f}` as well as the capture set of any application of `this.f` to pure arguments. + +## Checked Exceptions + +Scala enables checked exceptions through a language import. Here is an example, +taken from the [safer exceptions page](./canthrow.md), and also described in a +[paper](https://infoscience.epfl.ch/record/290885) presented at the + 2021 Scala Symposium. +```scala +import language.experimental.saferExceptions + +class LimitExceeded extends Exception + +val limit = 10e+10 +def f(x: Double): Double throws LimitExceeded = + if x < limit then x * x else throw LimitExceeded() +``` +The new `throws` clause expands into an implicit parameter that provides +a `CanThrow` capability. Hence, function `f` could equivalently be written +like this: +```scala +def f(x: Double)(using CanThrow[LimitExceeded]): Double = ... +``` +If the implicit parameter is missing, an error is reported. For instance, the function definition +```scala +def g(x: Double): Double = + if x < limit then x * x else throw LimitExceeded() +``` +is rejected with this error message: +``` + | if x < limit then x * x else throw LimitExceeded() + | ^^^^^^^^^^^^^^^^^^^^^ + |The capability to throw exception LimitExceeded is missing. + |The capability can be provided by one of the following: + | - Adding a using clause `(using CanThrow[LimitExceeded])` to the definition of the enclosing method + | - Adding `throws LimitExceeded` clause after the result type of the enclosing method + | - Wrapping this piece of code with a `try` block that catches LimitExceeded +``` +`CanThrow` capabilities are required by `throw` expressions and are created +by `try` expressions. For instance, the expression +```scala +try xs.map(f).sum +catch case ex: LimitExceeded => -1 +``` +would be expanded by the compiler to something like the following: +```scala +try + erased given ctl: CanThrow[LimitExceeded] = compiletime.erasedValue + xs.map(f).sum +catch case ex: LimitExceeded => -1 +``` +(The `ctl` capability is only used for type checking but need not show up in the generated code, so it can be declared as +erased.) + +As with other capability based schemes, one needs to guard against capabilities +that are captured in results. For instance, here is a problematic use case: +```scala +def escaped(xs: Double*): (() => Double) throws LimitExceeded = + try () => xs.map(f).sum + catch case ex: LimitExceeded => () => -1 +val crasher = escaped(1, 2, 10e+11) +crasher() +``` +This code needs to be rejected since otherwise the call to `crasher()` would cause +an unhandled `LimitExceeded` exception to be thrown. + +Under `-Ycc`, the code is indeed rejected +``` +14 | try () => xs.map(f).sum + | ^ + |The expression's type {*} () -> Double is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +15 | catch case ex: LimitExceeded => () => -1 +``` +To integrate exception and capture checking, only two changes are needed: + + - `CanThrow` is declared as a `@capability` class, so all references to `CanThrow` instances are tracked. + - Escape checking is extended to `try` expressions. The result type of a `try` is not allowed to + capture the universal capability. + +## A Larger Example + +As a larger example, we present an implementation of lazy lists and some use cases. For simplicity, +our lists are lazy only in their tail part. This corresponds to what the Scala-2 type `Stream` did, whereas Scala 3's `LazyList` type computes strictly less since it is also lazy in the first argument. + +Here is the base trait `LzyList` for our version of lazy lists: +```scala +trait LzyList[+A]: + def isEmpty: Boolean + def head: A + def tail: {this} LzyList[A] +``` +Note that `tail` carries a capture annotation. It says that the tail of a lazy list can +potentially capture the same references as the lazy list as a whole. + +The empty case of a `LzyList` is written as usual: +```scala +object LzyNil extends LzyList[Nothing]: + def isEmpty = true + def head = ??? + def tail = ??? +``` +Here is a formulation of the class for lazy cons nodes: +```scala +import scala.compiletime.uninitialized + +final class LzyCons[+A](hd: A, tl: () => {*} LzyList[A]) extends LzyList[A]: + private var forced = false + private var cache: {this} LzyList[A] = uninitialized + private def force = + if !forced then { cache = tl(); forced = true } + cache + + def isEmpty = false + def head = hd + def tail: {this} LzyList[A] = force +end LzyCons +``` +The `LzyCons` class takes two parameters: A head `hd` and a tail `tl`, which is a function +returning a `LzyList`. Both the function and its result can capture arbitrary capabilities. +The result of applying the function is memoized after the first dereference of `tail` in +the private mutable field `cache`. Note that the typing of the assignment `cache = tl()` relies on the monotonicity rule for `{this}` capture sets. + +Here is an extension method to define an infix cons operator `#:` for lazy lists. It is analogous +to `::` but instead of a strict list it produces a lazy list without evaluating its right operand. +```scala +extension [A](x: A) + def #:(xs1: => {*} LzyList[A]): {xs1} LzyList[A] = + LzyCons(x, () => xs1) +``` +Note that `#:` takes an impure call-by-name parameter `xs1` as its right argument. The result +of `#:` is a lazy list that captures that argument. + +As an example usage of `#:`, here is a method `tabulate` that creates a lazy list +of given length with a generator function `gen`. The generator function is allowed +to have side effects. +```scala +def tabulate[A](n: Int)(gen: Int => A) = + def recur(i: Int): {gen} LzyList[A] = + if i == n then LzyNil + else gen(i) #: recur(i + 1) + recur(0) +``` +Here is a use of `tabulate`: +```scala +class LimitExceeded extends Exception +def squares(n: Int)(using ct: CanThrow[LimitExceeded]) = + tabulate(10) { i => + if i > 9 then throw LimitExceeded() + i * i + } +``` +The inferred result type of `squares` is `{ct} LzyList[Int]`, i.e it is a lazy list of +`Int`s that can throw the `LimitExceeded` exception when it is elaborated by calling `tail` +one or more times. + +Here are some further extension methods for mapping, filtering, and concatenating lazy lists: +```scala +extension [A](xs: {*} LzyList[A]) + def map[B](f: A => B): {xs, f} LzyList[B] = + if xs.isEmpty then LzyNil + else f(xs.head) #: xs.tail.map(f) + + def filter(p: A => Boolean): {xs, p} LzyList[A] = + if xs.isEmpty then LzyNil + else if p(xs.head) then xs.head #: xs.tail.filter(p) + else xs.tail.filter(p) + + def concat(ys: {*} LzyList[A]): {xs, ys} LzyList[A] = + if xs.isEmpty then ys + else xs.head #: xs.tail.concat(ys) + + def drop(n: Int): {xs} LzyList[A] = + if n == 0 then xs else xs.tail.drop(n - 1) +``` +Their capture annotations are all as one would expect: + + - Mapping a lazy list produces a lazy list that captures the original list as well + as the (possibly impure) mapping function. + - Filtering a lazy list produces a lazy list that captures the original list as well + as the (possibly impure) filtering predicate. + - Concatenating two lazy lists produces a lazy list that captures both arguments. + - Dropping elements from a lazy list gives a safe approximation where the original list is captured in the result. In fact, it's only some suffix of the list that is retained at run time, but our modelling identifies lazy lists and their suffixes, so this additional knowledge would not be useful. + +Of course the function passed to `map` or `filter` could also be pure. After all, `A -> B` is a subtype of `{*} A -> B` which is the same as `A => B`. In that case, the pure function +argument will _not_ show up in the result type of `map` or `filter`. For instance: +```scala +val xs = squares(10) +val ys: {xs} LzyList[Int] = xs.map(_ + 1) +``` +The type of the mapped list `ys` has only `xs` in its capture set. The actual function +argument does not show up since it is pure. Likewise, if the lazy list +`xs` was pure, it would not show up in any of the method results. +This demonstrates that capability-based +effect systems with capture checking are naturally _effect polymorphic_. + +This concludes our example. It's worth mentioning that an equivalent program defining and using standard, strict lists would require no capture annotations whatsoever. It would compile exactly as written now in standard Scala 3, yet one gets the capture checking for free. Essentially, `=>` already means "can capture anything" and since in a strict list side effecting operations are not retained in the result, there are no additional captures to record. A strict list could of course capture side-effecting closures in its elements but then tunnelling applies, since +these elements are represented by a type variable. This means we don't need to annotate anything there either. + +Another possibility would be a variant of lazy lists that requires all functions passed to `map`, `filter` and other operations like it to be pure. E.g. `map` on such a list would be defined like this: +```scala +extension [A](xs: LzyList[A]) + def map[B](f: A -> B): LzyList[B] = ... +``` +That variant would not require any capture annotations either. + +To summarize, there are two "sweet spots" of data structure design: strict lists in +side-effecting or resource-aware code and lazy lists in purely functional code. +Both are already correctly capture-typed without requiring any explicit annotations. Capture annotations only come into play where the semantics gets more complicated because we deal with delayed effects such as in impure lazy lists or side-effecting iterators over strict lists. This property is probably one of the greatest plus points of our approach to capture checking compared to previous techniques which tend to be more noisy. + +## Function Type Shorthands + +TBD + +## Compilation Options + +The following options are relevant for capture checking. + + - **-Ycc** Enables capture checking. + - **-Xprint:cc** Prints the program with capturing types as inferred by capture checking. + - **-Ycc-debug** Gives more detailed, implementation-oriented information about capture checking, as described in the next section. + + The implementation supporting capture checking with these options is currently in branch `cc-experiment` on dotty.epfl.ch. + +## Capture Checking Internals + +The capture checker is architected as a propagation constraint solver, which runs as a separate phase after type-checking and some initial transformations. + +Constraint variables stand for unknown capture sets. A constraint variable is introduced + + - for every part of a previously inferred type, + - for the accessed references of every method, class, anonymous function, or by-name argument, + - for the parameters passed in a class constructor call. + +Capture sets in explicitly written types are treated as constants (before capture checking, such sets are simply ignored). + +The capture checker essentially rechecks the program with the usual typing rules. Every time a subtype requirement between capturing types is checked, this translates to a subcapturing test on capture sets. If the two sets are constant, this is simply a yes/no question, where a no will produce an error message. + +If the lower set `C₁` of a comparison `C₁ <: C₂` is a variable, the set `C₂` is recorded +as a _superset_ of `C₁`. If the upper set `C₂` is a variable, the elements of `C₁` are _propagated_ to `C₂`. Propagation of an element `x` to a set `C` means that `x` is included as an element in `C`, and it is also propagated +to all known supersets of `C`. If such a superset is a constant, it is checked that `x` is included in it. If that's not the case, the original comparison `C₁ <: C₂` has no solution and an error is reported. + +The type checker also performs various maps on types, for instance when substituting actual argument types for formal parameter types in dependent functions, or mapping +member types with "as-seen-from" in a selection. Maps keep track of the variance +of positions in a type. The variance is initially covariant, it flips to +contravariant in function parameter positions, and can be either covariant, +contravariant, or nonvariant in type arguments, depending on the variance of +the type parameter. + +When capture checking, the same maps are also performed on capture sets. If a capture set is a constant, its elements (which are capabilities) are mapped as regular types. If the result of such a map is not a capability, the result is approximated according to the variance of the type. A covariant approximation replaces a type by its capture set. +A contravariant approximation replaces it with the empty capture set. A nonvariant +approximation replaces the enclosing capturing type with a range of possible types +that gets propagated and resolved further out. + +When a mapping `m` is performed on a capture set variable `C`, a new variable `Cm` is created that contains the mapped elements and that is linked with `C`. If `C` subsequently acquires further elements through propagation, these are also propagated to `Cm` after being transformed by the `m` mapping. `Cm` also gets the same supersets as `C`, mapped again using `m`. + +One interesting aspect of the capture checker concerns the implementation of capture tunnelling. The [foundational theory](https://infoscience.epfl.ch/record/290885) on which capture checking is based makes tunnelling explicit through so-called _box_ and +_unbox_ operations. Boxing hides a capture set and unboxing recovers it. The capture checker inserts virtual box and unbox operations based on actual and expected types similar to the way the type checker inserts implicit conversions. When capture set variables are first introduced, any capture set in a capturing type that is an instance of a type parameter instance is marked as "boxed". A boxing operation is +inserted if the expected type of an expression is a capturing type with +a boxed capture set variable. The effect of the insertion is that any references +to capabilities in the boxed expression are forgotten, which means that capture +propagation is stopped. Dually, if the actual type of an expression has +a boxed variable as capture set, an unbox operation is inserted, which adds all +elements of the capture set to the environment. + +Boxing and unboxing has no runtime effect, so the insertion of these operations is only simulated; the only visible effect is the retraction and insertion +of variables in the capture sets representing the environment of the currently checked expression. + +The `-Ycc-debug` option provides some insight into the workings of the capture checker. +When it is turned on, boxed sets are marked explicitly and capture set variables are printed with an ID and some information about their provenance. For instance, the string `{f, xs}33M5V` indicates a capture set +variable that is known to hold elements `f` and `xs`. The variable's ID is `33`. The `M` +indicates that the variable was created through a mapping from a variable with ID `5`. The latter is a regular variable, as indicated + by `V`. + +Generally, the string following the capture set consists of alternating numbers and letters where each number gives a variable ID and each letter gives the provenance of the variable. Possible letters are + + - `V` : a regular variable, + - `M` : a variable resulting from a _mapping_ of the variable indicated by the string to the right, + - `B` : similar to `M` but where the mapping is a _bijection_, + - `F` : a variable resulting from _filtering_ the elements of the variable indicated by the string to the right, + - `I` : a variable resulting from an _intersection_ of two capture sets, + - `D` : a variable resulting from the set _difference_ of two capture sets. + +At the end of a compilation run, `-Ycc-debug` will print all variable dependencies of variables referred to in previous output. Here is an example: +``` +Capture set dependencies: + {}2V :: + {}3V :: + {}4V :: + {f, xs}5V :: {f, xs}31M5V, {f, xs}32M5V + {f, xs}31M5V :: {xs, f} + {f, xs}32M5V :: +``` +This section lists all variables that appeared in previous diagnostics and their dependencies, recursively. For instance, we learn that + + - variables 2, 3, 4 are empty and have no dependencies, + - variable `5` has two dependencies: variables `31` and `32` which both result from mapping variable `5`, + - variable `31` has a constant fixed superset `{xs, f}` + - variable `32` has no dependencies. + diff --git a/docs/_spec/TODOreference/experimental/erased-defs-spec.md b/docs/_spec/TODOreference/experimental/erased-defs-spec.md new file mode 100644 index 000000000000..5395a8468399 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/erased-defs-spec.md @@ -0,0 +1,64 @@ +--- +layout: doc-page +title: "Erased Definitions - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/erased-defs-spec.html +--- + +TODO: complete +## Rules + +1. `erased` is a soft modifier. It can appear: + * At the start of a parameter block of a method, function or class + * In a method definition + * In a `val` definition (but not `lazy val` or `var`) + * In a `class` or `trait` definition + + ```scala + erased val x = ... + erased def f = ... + + def g(erased x: Int) = ... + + (erased x: Int) => ... + def h(x: (erased Int) => Int) = ... + + class K(erased x: Int) { ... } + erased class E {} + ``` + + +2. A reference to an `erased` val or def can only be used + * Inside the expression of argument to an `erased` parameter + * Inside the body of an `erased` `val` or `def` + + +3. Functions + * `(erased x1: T1, x2: T2, ..., xN: TN) => y : (erased T1, T2, ..., TN) => R` + * `(given erased x1: T1, x2: T2, ..., xN: TN) => y: (given erased T1, T2, ..., TN) => R` + * `(given erased T1) => R <:< erased T1 => R` + * `(given erased T1, T2) => R <:< (erased T1, T2) => R` + * ... + + Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`) + + +4. Eta expansion + + if `def f(erased x: T): U` then `f: (erased T) => U`. + + +5. Erasure semantics + * All `erased` parameters are removed from the function + * All argument to `erased` parameters are not passed to the function + * All `erased` definitions are removed + * All `(erased T1, T2, ..., TN) => R` and `(given erased T1, T2, ..., TN) => R` become `() => R` + + +6. Overloading + + Method with `erased` parameters will follow the normal overloading constraints after erasure. + + +7. Overriding + * Member definitions overriding each other must both be `erased` or not be `erased` + * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa diff --git a/docs/_spec/TODOreference/experimental/erased-defs.md b/docs/_spec/TODOreference/experimental/erased-defs.md new file mode 100644 index 000000000000..28455f26cdc0 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/erased-defs.md @@ -0,0 +1,231 @@ +--- +layout: doc-page +title: "Erased Definitions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/erased-defs.html +--- + +`erased` is a modifier that expresses that some definition or expression is erased by the compiler instead of being represented in the compiled output. It is not yet part of the Scala language standard. To enable `erased`, turn on the language feature +[`experimental.erasedDefinitions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$erasedDefinitions$.html). This can be done with a language import +```scala +import scala.language.experimental.erasedDefinitions +``` +or by setting the command line option `-language:experimental.erasedDefinitions`. +Erased definitions must be in an experimental scope (see [Experimental definitions](../other-new-features/experimental-defs.md)). + +## Why erased terms? + +Let's describe the motivation behind erased terms with an example. In the +following we show a simple state machine which can be in a state `On` or `Off`. +The machine can change state from `Off` to `On` with `turnedOn` only if it is +currently `Off`. This last constraint is captured with the `IsOff[S]` contextual +evidence which only exists for `IsOff[Off]`. For example, not allowing calling +`turnedOn` on in an `On` state as we would require an evidence of type +`IsOff[On]` that will not be found. + +```scala +sealed trait State +final class On extends State +final class Off extends State + +@implicitNotFound("State must be Off") +class IsOff[S <: State] +object IsOff: + given isOff: IsOff[Off] = new IsOff[Off] + +class Machine[S <: State]: + def turnedOn(using IsOff[S]): Machine[On] = new Machine[On] + +val m = new Machine[Off] +m.turnedOn +m.turnedOn.turnedOn // ERROR +// ^ +// State must be Off +``` + +Note that in the code above the actual context arguments for `IsOff` are never +used at runtime; they serve only to establish the right constraints at compile +time. As these terms are never used at runtime there is not real need to have +them around, but they still need to be present in some form in the generated +code to be able to do separate compilation and retain binary compatibility. We +introduce _erased terms_ to overcome this limitation: we are able to enforce the +right constrains on terms at compile time. These terms have no run time +semantics and they are completely erased. + +## How to define erased terms? + +Parameters of methods and functions can be declared as erased, placing `erased` +in front of a parameter list (like `given`). + +```scala +def methodWithErasedEv(erased ev: Ev): Int = 42 + +val lambdaWithErasedEv: erased Ev => Int = + (erased ev: Ev) => 42 +``` + +`erased` parameters will not be usable for computations, though they can be used +as arguments to other `erased` parameters. + +```scala +def methodWithErasedInt1(erased i: Int): Int = + i + 42 // ERROR: can not use i + +def methodWithErasedInt2(erased i: Int): Int = + methodWithErasedInt1(i) // OK +``` + +Not only parameters can be marked as erased, `val` and `def` can also be marked +with `erased`. These will also only be usable as arguments to `erased` +parameters. + +```scala +erased val erasedEvidence: Ev = ... +methodWithErasedEv(erasedEvidence) +``` + +## What happens with erased values at runtime? + +As `erased` are guaranteed not to be used in computations, they can and will be +erased. + +```scala +// becomes def methodWithErasedEv(): Int at runtime +def methodWithErasedEv(erased ev: Ev): Int = ... + +def evidence1: Ev = ... +erased def erasedEvidence2: Ev = ... // does not exist at runtime +erased val erasedEvidence3: Ev = ... // does not exist at runtime + +// evidence1 is not evaluated and no value is passed to methodWithErasedEv +methodWithErasedEv(evidence1) +``` + +## State machine with erased evidence example + +The following example is an extended implementation of a simple state machine +which can be in a state `On` or `Off`. The machine can change state from `Off` +to `On` with `turnedOn` only if it is currently `Off`, conversely from `On` to +`Off` with `turnedOff` only if it is currently `On`. These last constraint are +captured with the `IsOff[S]` and `IsOn[S]` given evidence only exist for +`IsOff[Off]` and `IsOn[On]`. For example, not allowing calling `turnedOff` on in +an `Off` state as we would require an evidence `IsOn[Off]` that will not be +found. + +As the given evidences of `turnedOn` and `turnedOff` are not used in the +bodies of those functions we can mark them as `erased`. This will remove the +evidence parameters at runtime, but we would still evaluate the `isOn` and +`isOff` givens that were found as arguments. As `isOn` and `isOff` are not +used except as `erased` arguments, we can mark them as `erased`, hence removing +the evaluation of the `isOn` and `isOff` evidences. + +```scala +import scala.annotation.implicitNotFound + +sealed trait State +final class On extends State +final class Off extends State + +@implicitNotFound("State must be Off") +class IsOff[S <: State] +object IsOff: + // will not be called at runtime for turnedOn, the + // compiler will only require that this evidence exists + given IsOff[Off] = new IsOff[Off] + +@implicitNotFound("State must be On") +class IsOn[S <: State] +object IsOn: + // will not exist at runtime, the compiler will only + // require that this evidence exists at compile time + erased given IsOn[On] = new IsOn[On] + +class Machine[S <: State] private (): + // ev will disappear from both functions + def turnedOn(using erased ev: IsOff[S]): Machine[On] = new Machine[On] + def turnedOff(using erased ev: IsOn[S]): Machine[Off] = new Machine[Off] + +object Machine: + def newMachine(): Machine[Off] = new Machine[Off] + +@main def test = + val m = Machine.newMachine() + m.turnedOn + m.turnedOn.turnedOff + + // m.turnedOff + // ^ + // State must be On + + // m.turnedOn.turnedOn + // ^ + // State must be Off +``` + +Note that in [Inline](../metaprogramming/inline.md) we discussed `erasedValue` and inline +matches. `erasedValue` is implemented with `erased`, so the state machine above +can be encoded as follows: + +```scala +import scala.compiletime.* + +sealed trait State +final class On extends State +final class Off extends State + +class Machine[S <: State]: + transparent inline def turnOn(): Machine[On] = + inline erasedValue[S] match + case _: Off => new Machine[On] + case _: On => error("Turning on an already turned on machine") + + transparent inline def turnOff(): Machine[Off] = + inline erasedValue[S] match + case _: On => new Machine[Off] + case _: Off => error("Turning off an already turned off machine") + +object Machine: + def newMachine(): Machine[Off] = + println("newMachine") + new Machine[Off] +end Machine + +@main def test = + val m = Machine.newMachine() + m.turnOn() + m.turnOn().turnOff() + m.turnOn().turnOn() // error: Turning on an already turned on machine +``` + +## Erased Classes + +`erased` can also be used as a modifier for a class. An erased class is intended to be used only in erased definitions. If the type of a val definition or parameter is +a (possibly aliased, refined, or instantiated) erased class, the definition is assumed to be `erased` itself. Likewise, a method with an erased class return type is assumed to be `erased` itself. Since given instances expand to vals and defs, they are also assumed to be erased if the type they produce is an erased class. Finally +function types with erased classes as arguments turn into erased function types. + +Example: +```scala +erased class CanRead + +val x: CanRead = ... // `x` is turned into an erased val +val y: CanRead => Int = ... // the function is turned into an erased function +def f(x: CanRead) = ... // `f` takes an erased parameter +def g(): CanRead = ... // `g` is turned into an erased def +given CanRead = ... // the anonymous given is assumed to be erased +``` +The code above expands to +```scala +erased class CanRead + +erased val x: CanRead = ... +val y: (erased CanRead) => Int = ... +def f(erased x: CanRead) = ... +erased def g(): CanRead = ... +erased given CanRead = ... +``` +After erasure, it is checked that no references to values of erased classes remain and that no instances of erased classes are created. So the following would be an error: +```scala +val err: Any = CanRead() // error: illegal reference to erased class CanRead +``` +Here, the type of `err` is `Any`, so `err` is not considered erased. Yet its initializing value is a reference to the erased class `CanRead`. + +[More Details](./erased-defs-spec.md) diff --git a/docs/_spec/TODOreference/experimental/explicit-nulls.md b/docs/_spec/TODOreference/experimental/explicit-nulls.md new file mode 100644 index 000000000000..b3fa53429cfe --- /dev/null +++ b/docs/_spec/TODOreference/experimental/explicit-nulls.md @@ -0,0 +1,543 @@ +--- +layout: doc-page +title: "Explicit Nulls" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/explicit-nulls.html +--- + +Explicit nulls is an opt-in feature that modifies the Scala type system, which makes reference types +(anything that extends [`AnyRef`](https://scala-lang.org/api/3.x/scala/AnyRef.html)) _non-nullable_. + +This means the following code will no longer typecheck: + +```scala +val x: String = null // error: found `Null`, but required `String` +``` + +Instead, to mark a type as nullable we use a [union type](../new-types/union-types.md) + +```scala +val x: String | Null = null // ok +``` + +A nullable type could have null value during runtime; hence, it is not safe to select a member without checking its nullity. + +```scala +x.trim // error: trim is not member of String | Null +``` + +Explicit nulls are enabled via a `-Yexplicit-nulls` flag. + +Read on for details. + +## New Type Hierarchy + +Originally, `Null` is a subtype of all reference types. + +!["Original Type Hierarchy"](images/explicit-nulls/scalaHierarchyWithMatchable.png) + +When explicit nulls is enabled, the type hierarchy changes so that `Null` is only +a subtype of `Any` and `Matchable`, as opposed to every reference type, +which means `null` is no longer a value of `AnyRef` and its subtypes. + +This is the new type hierarchy: + +!["Type Hierarchy for Explicit Nulls"](images/explicit-nulls/scalaHierarchyWithMatchableAndSafeNull.png) + +After erasure, `Null` remains a subtype of all reference types (as forced by the JVM). + +## Working with `Null` + +To make working with nullable values easier, we propose adding a few utilities to the standard library. +So far, we have found the following useful: + +- An extension method `.nn` to "cast away" nullability + + ```scala + extension [T](x: T | Null) + inline def nn: T = + assert(x != null) + x.asInstanceOf[T] + ``` + + This means that given `x: String | Null`, `x.nn` has type `String`, so we can call all the + usual methods on it. Of course, `x.nn` will throw a NPE if `x` is `null`. + + Don't use `.nn` on mutable variables directly, because it may introduce an unknown type into the type of the variable. + +- An [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) language feature. + + When imported, `T | Null` can be used as `T`, similar to regular Scala (without explicit nulls). + + See [UnsafeNulls](#unsafenulls) section for more details. + +## Unsoundness + +The new type system is unsound with respect to `null`. This means there are still instances where an expression has a non-nullable type like `String`, but its value is actually `null`. + +The unsoundness happens because uninitialized fields in a class start out as `null`: + +```scala +class C: + val f: String = foo(f) + def foo(f2: String): String = f2 + +val c = new C() +// c.f == "field is null" +``` + +The unsoundness above can be caught by the compiler with the option `-Ysafe-init`. +More details can be found in [safe initialization](../other-new-features/safe-initialization.md). + +## Equality + +We don't allow the double-equal (`==` and `!=`) and reference (`eq` and `ne`) comparison between +`AnyRef` and `Null` anymore, since a variable with a non-nullable type cannot have `null` as value. +`null` can only be compared with `Null`, nullable union (`T | Null`), or `Any` type. + +For some reason, if we really want to compare `null` with non-null values, we have to provide a type hint (e.g. `: Any`). + +```scala +val x: String = ??? +val y: String | Null = ??? + +x == null // error: Values of types String and Null cannot be compared with == or != +x eq null // error +"hello" == null // error + +y == null // ok +y == x // ok + +(x: String | Null) == null // ok +(x: Any) == null // ok +``` + +## Java Interoperability + +The Scala compiler can load Java classes in two ways: from source or from bytecode. In either case, +when a Java class is loaded, we "patch" the type of its members to reflect that Java types +remain implicitly nullable. + +Specifically, we patch + +- the type of fields + +- the argument type and return type of methods + +We illustrate the rules with following examples: + +- The first two rules are easy: we nullify reference types but not value types. + + ```java + class C { + String s; + int x; + } + ``` + + ==> + + ```scala + class C: + val s: String | Null + val x: Int + ``` + +- We nullify type parameters because in Java a type parameter is always nullable, so the following code compiles. + + ```java + class C { T foo() { return null; } } + ``` + + ==> + + ```scala + class C[T] { def foo(): T | Null } + ``` + + Notice this is rule is sometimes too conservative, as witnessed by + + ```scala + class InScala: + val c: C[Bool] = ??? // C as above + val b: Bool = c.foo() // no longer typechecks, since foo now returns Bool | Null + ``` + +- We can reduce the number of redundant nullable types we need to add. Consider + + ```java + class Box { T get(); } + class BoxFactory { Box makeBox(); } + ``` + + ==> + + ```scala + class Box[T] { def get(): T | Null } + class BoxFactory[T] { def makeBox(): Box[T] | Null } + ``` + + Suppose we have a `BoxFactory[String]`. Notice that calling `makeBox()` on it returns a + `Box[String] | Null`, not a `Box[String | Null] | Null`. This seems at first + glance unsound ("What if the box itself has `null` inside?"), but is sound because calling + `get()` on a `Box[String]` returns a `String | Null`. + + Notice that we need to patch _all_ Java-defined classes that transitively appear in the + argument or return type of a field or method accessible from the Scala code being compiled. + Absent crazy reflection magic, we think that all such Java classes _must_ be visible to + the Typer in the first place, so they will be patched. + +- We will append `Null` to the type arguments if the generic class is defined in Scala. + + ```java + class BoxFactory { + Box makeBox(); // Box is Scala-defined + List>> makeCrazyBoxes(); // List is Java-defined + } + ``` + + ==> + + ```scala + class BoxFactory[T]: + def makeBox(): Box[T | Null] | Null + def makeCrazyBoxes(): java.util.List[Box[java.util.List[T] | Null]] | Null + ``` + + In this case, since `Box` is Scala-defined, we will get `Box[T | Null] | Null`. + This is needed because our nullability function is only applied (modularly) to the Java + classes, but not to the Scala ones, so we need a way to tell `Box` that it contains a + nullable value. + + The `List` is Java-defined, so we don't append `Null` to its type argument. But we + still need to nullify its inside. + +- We don't nullify _simple_ literal constant (`final`) fields, since they are known to be non-null + + ```java + class Constants { + final String NAME = "name"; + final int AGE = 0; + final char CHAR = 'a'; + + final String NAME_GENERATED = getNewName(); + } + ``` + + ==> + + ```scala + class Constants: + val NAME: String("name") = "name" + val AGE: Int(0) = 0 + val CHAR: Char('a') = 'a' + + val NAME_GENERATED: String | Null = getNewName() + ``` + +- We don't append `Null` to a field nor to a return type of a method which is annotated with a + `NotNull` annotation. + + ```java + class C { + @NotNull String name; + @NotNull List getNames(String prefix); // List is Java-defined + @NotNull Box getBoxedName(); // Box is Scala-defined + } + ``` + + ==> + + ```scala + class C: + val name: String + def getNames(prefix: String | Null): java.util.List[String] // we still need to nullify the paramter types + def getBoxedName(): Box[String | Null] // we don't append `Null` to the outmost level, but we still need to nullify inside + ``` + + The annotation must be from the list below to be recognized as `NotNull` by the compiler. + Check `Definitions.scala` for an updated list. + + ```scala + // A list of annotations that are commonly used to indicate + // that a field/method argument or return type is not null. + // These annotations are used by the nullification logic in + // JavaNullInterop to improve the precision of type nullification. + // We don't require that any of these annotations be present + // in the class path, but we want to create Symbols for the + // ones that are present, so they can be checked during nullification. + @tu lazy val NotNullAnnots: List[ClassSymbol] = ctx.getClassesIfDefined( + "javax.annotation.Nonnull" :: + "edu.umd.cs.findbugs.annotations.NonNull" :: + "androidx.annotation.NonNull" :: + "android.support.annotation.NonNull" :: + "android.annotation.NonNull" :: + "com.android.annotations.NonNull" :: + "org.eclipse.jdt.annotation.NonNull" :: + "org.checkerframework.checker.nullness.qual.NonNull" :: + "org.checkerframework.checker.nullness.compatqual.NonNullDecl" :: + "org.jetbrains.annotations.NotNull" :: + "lombok.NonNull" :: + "io.reactivex.annotations.NonNull" :: Nil map PreNamedString) + ``` + +### Override check + +When we check overriding between Scala classes and Java classes, the rules are relaxed for [`Null`](https://scala-lang.org/api/3.x/scala/Null.html) type with this feature, in order to help users to working with Java libraries. + +Suppose we have Java method `String f(String x)`, we can override this method in Scala in any of the following forms: + +```scala +def f(x: String | Null): String | Null + +def f(x: String): String | Null + +def f(x: String | Null): String + +def f(x: String): String +``` + +Note that some of the definitions could cause unsoundness. For example, the return type is not nullable, but a `null` value is actually returned. + +## Flow Typing + +We added a simple form of flow-sensitive type inference. The idea is that if `p` is a +stable path or a trackable variable, then we can know that `p` is non-null if it's compared +with `null`. This information can then be propagated to the `then` and `else` branches +of an if-statement (among other places). + +Example: + +```scala +val s: String | Null = ??? +if s != null then + // s: String + +// s: String | Null + +assert(s != null) +// s: String +``` + +A similar inference can be made for the `else` case if the test is `p == null` + +```scala +if s == null then + // s: String | Null +else + // s: String +``` + +`==` and `!=` is considered a comparison for the purposes of the flow inference. + +### Logical Operators + +We also support logical operators (`&&`, `||`, and `!`): + +```scala +val s: String | Null = ??? +val s2: String | Null = ??? +if s != null && s2 != null then + // s: String + // s2: String + +if s == null || s2 == null then + // s: String | Null + // s2: String | Null +else + // s: String + // s2: String +``` + +### Inside Conditions + +We also support type specialization _within_ the condition, taking into account that `&&` and `||` are short-circuiting: + +```scala +val s: String | Null = ??? + +if s != null && s.length > 0 then // s: String in `s.length > 0` + // s: String + +if s == null || s.length > 0 then // s: String in `s.length > 0` + // s: String | Null +else + // s: String +``` + +### Match Case + +The non-null cases can be detected in match statements. + +```scala +val s: String | Null = ??? + +s match + case _: String => // s: String + case _ => +``` + +### Mutable Variable + +We are able to detect the nullability of some local mutable variables. A simple example is: + +```scala +class C(val x: Int, val next: C | Null) + +var xs: C | Null = C(1, C(2, null)) +// xs is trackable, since all assignments are in the same method +while xs != null do + // xs: C + val xsx: Int = xs.x + val xscpy: C = xs + xs = xscpy // since xscpy is non-null, xs still has type C after this line + // xs: C + xs = xs.next // after this assignment, xs can be null again + // xs: C | Null +``` + +When dealing with local mutable variables, there are two questions: + +1. Whether to track a local mutable variable during flow typing. + We track a local mutable variable if the variable is not assigned in a closure. + For example, in the following code `x` is assigned to by the closure `y`, so we do not + do flow typing on `x`. + + ```scala + var x: String | Null = ??? + def y = + x = null + + if x != null then + // y can be called here, which would break the fact + val a: String = x // error: x is captured and mutated by the closure, not trackable + ``` + +2. Whether to generate and use flow typing on a specific _use_ of a local mutable variable. + We only want to do flow typing on a use that belongs to the same method as the definition + of the local variable. + For example, in the following code, even `x` is not assigned to by a closure, we can only + use flow typing in one of the occurrences (because the other occurrence happens within a + nested closure). + + ```scala + var x: String | Null = ??? + def y = + if x != null then + // not safe to use the fact (x != null) here + // since y can be executed at the same time as the outer block + val _: String = x + if x != null then + val a: String = x // ok to use the fact here + x = null + ``` + +See [more examples](https://github.com/lampepfl/dotty/blob/main/tests/explicit-nulls/neg/flow-varref-in-closure.scala). + +Currently, we are unable to track paths with a mutable variable prefix. +For example, `x.a` if `x` is mutable. + +### Unsupported Idioms + +We don't support: + +- flow facts not related to nullability (`if x == 0 then { // x: 0.type not inferred }`) +- tracking aliasing between non-nullable paths + + ```scala + val s: String | Null = ??? + val s2: String | Null = ??? + if s != null && s == s2 then + // s: String inferred + // s2: String not inferred + ``` + +### UnsafeNulls + +It is difficult to work with many nullable values, we introduce a language feature [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html). +Inside this "unsafe" scope, all `T | Null` values can be used as `T`. + +Users can import [`scala.language.unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) to create such scopes, or use `-language:unsafeNulls` to enable this feature globally (for migration purpose only). + +Assume `T` is a reference type (a subtype of `AnyRef`), the following unsafe operation rules are +applied in this unsafe-nulls scope: + +1. the members of `T` can be found on `T | Null` + +2. a value with type `T` can be compared with `T | Null` and `Null` + +3. suppose `T1` is not a subtype of `T2` using explicit-nulls subtyping (where `Null` is a direct +subtype of Any), extension methods and implicit conversions designed for `T2` can be used for +`T1` if `T1` is a subtype of `T2` using regular subtyping rules (where `Null` is a subtype of every +reference type) + +4. suppose `T1` is not a subtype of `T2` using explicit-nulls subtyping, a value with type `T1` +can be used as `T2` if `T1` is a subtype of `T2` using regular subtyping rules + +Addtionally, `null` can be used as `AnyRef` (`Object`), which means you can select `.eq` or `.toString` on it. + +The program in [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) will have a **similar** semantic as regular Scala, but not **equivalent**. + +For example, the following code cannot be compiled even using unsafe nulls. Because of the +Java interoperation, the type of the get method becomes `T | Null`. + +```scala +def head[T](xs: java.util.List[T]): T = xs.get(0) // error +``` + +Since the compiler doesn’t know whether `T` is a reference type, it is unable to cast `T | Null` +to `T`. A `.nn` need to be inserted after `xs.get(0)` by user manually to fix the error, which +strips the `Null` from its type. + +The intention of this [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) is to give users a better migration path for explicit nulls. +Projects for Scala 2 or regular Scala 3 can try this by adding `-Yexplicit-nulls -language:unsafeNulls` +to the compile options. A small number of manual modifications are expected. To migrate to the full +explicit nulls feature in the future, `-language:unsafeNulls` can be dropped and add +`import scala.language.unsafeNulls` only when needed. + +```scala +def f(x: String): String = ??? +def nullOf[T >: Null]: T = null + +import scala.language.unsafeNulls + +val s: String | Null = ??? +val a: String = s // unsafely convert String | Null to String + +val b1 = s.trim // call .trim on String | Null unsafely +val b2 = b1.length + +f(s).trim // pass String | Null as an argument of type String unsafely + +val c: String = null // Null to String + +val d1: Array[String] = ??? +val d2: Array[String | Null] = d1 // unsafely convert Array[String] to Array[String | Null] +val d3: Array[String] = Array(null) // unsafe + +class C[T >: Null <: String] // define a type bound with unsafe conflict bound + +val n = nullOf[String] // apply a type bound unsafely +``` + +Without the [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html), all these unsafe operations will not be type-checked. + +[`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) also works for extension methods and implicit search. + +```scala +import scala.language.unsafeNulls + +val x = "hello, world!".split(" ").map(_.length) + +given Conversion[String, Array[String]] = _ => ??? + +val y: String | Null = ??? +val z: Array[String | Null] = y +``` + +## Binary Compatibility + +Our strategy for binary compatibility with Scala binaries that predate explicit nulls +and new libraries compiled without `-Yexplicit-nulls` is to leave the types unchanged +and be compatible but unsound. + +[More details](https://dotty.epfl.ch/docs/internals/explicit-nulls.html) diff --git a/docs/_spec/TODOreference/experimental/fewer-braces.md b/docs/_spec/TODOreference/experimental/fewer-braces.md new file mode 100644 index 000000000000..eb454886ad03 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/fewer-braces.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Fewer Braces" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/fewer-braces.html +--- + +The documentation contained in this file is now part of [./indentation.html]. \ No newline at end of file diff --git a/docs/_spec/TODOreference/experimental/main-annotation.md b/docs/_spec/TODOreference/experimental/main-annotation.md new file mode 100644 index 000000000000..0c60e1050b87 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/main-annotation.md @@ -0,0 +1,97 @@ +--- +layout: doc-page +title: "MainAnnotation" +--- + +`MainAnnotation` provides a generic way to define main annotations such as `@main`. + +When a users annotates a method with an annotation that extends `MainAnnotation` a class with a `main` method will be generated. The main method will contain the code needed to parse the command line arguments and run the application. + +```scala +/** Sum all the numbers + * + * @param first Fist number to sum + * @param rest The rest of the numbers to sum + */ +@myMain def sum(first: Int, second: Int = 0, rest: Int*): Int = first + second + rest.sum +``` + +```scala +object foo { + def main(args: Array[String]): Unit = { + val mainAnnot = new myMain() + val info = new Info( + name = "foo.main", + documentation = "Sum all the numbers", + parameters = Seq( + new Parameter("first", "scala.Int", hasDefault=false, isVarargs=false, "Fist number to sum", Seq()), + new Parameter("second", "scala.Int", hasDefault=true, isVarargs=false, "", Seq()), + new Parameter("rest", "scala.Int" , hasDefault=false, isVarargs=true, "The rest of the numbers to sum", Seq()) + ) + ) + val mainArgsOpt = mainAnnot.command(info, args) + if mainArgsOpt.isDefined then + val mainArgs = mainArgsOpt.get + val args0 = mainAnnot.argGetter[Int](info.parameters(0), mainArgs(0), None) // using a parser of Int + val args1 = mainAnnot.argGetter[Int](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) // using a parser of Int + val args2 = mainAnnot.varargGetter[Int](info.parameters(2), mainArgs.drop(2)) // using a parser of Int + mainAnnot.run(() => sum(args0(), args1(), args2()*)) + } +} +``` + +The implementation of the `main` method first instantiates the annotation and then call `command`. +When calling the `command`, the arguments can be checked and preprocessed. +Then it defines a series of argument getters calling `argGetter` for each parameter and `varargGetter` for the last one if it is a varargs. `argGetter` gets an optional lambda that computes the default argument. +Finally, the `run` method is called to run the application. It receives a by-name argument that contains the call the annotated method with the instantiations arguments (using the lambdas from `argGetter`/`varargGetter`). + + +Example of implementation of `myMain` that takes all arguments positionally. It used `util.CommandLineParser.FromString` and expects no default arguments. For simplicity, any errors in preprocessing or parsing results in crash. + +```scala +// Parser used to parse command line arguments +import scala.util.CommandLineParser.FromString[T] + +// Result type of the annotated method is Int and arguments are parsed using FromString +@experimental class myMain extends MainAnnotation[FromString, Int]: + import MainAnnotation.{ Info, Parameter } + + def command(info: Info, args: Seq[String]): Option[Seq[String]] = + if args.contains("--help") then + println(info.documentation) + None // do not parse or run the program + else if info.parameters.exists(_.hasDefault) then + println("Default arguments are not supported") + None + else if info.hasVarargs then + val numPlainArgs = info.parameters.length - 1 + if numPlainArgs > args.length then + println("Not enough arguments") + None + else + Some(args) + else + if info.parameters.length > args.length then + println("Not enough arguments") + None + else if info.parameters.length < args.length then + println("Too many arguments") + None + else + Some(args) + + def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using parser: FromString[T]): () => T = + () => parser.fromString(arg) + + def varargGetter[T](param: Parameter, args: Seq[String])(using parser: FromString[T]): () => Seq[T] = + () => args.map(arg => parser.fromString(arg)) + + def run(program: () => Int): Unit = + println("executing program") + + val result = program() + println("result: " + result) + println("executed program") + +end myMain +``` diff --git a/docs/_spec/TODOreference/experimental/named-typeargs-spec.md b/docs/_spec/TODOreference/experimental/named-typeargs-spec.md new file mode 100644 index 000000000000..9e1113bbac86 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/named-typeargs-spec.md @@ -0,0 +1,41 @@ +--- +layout: doc-page +title: "Named Type Arguments - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/named-typeargs-spec.html +--- + +In this section we give more details about the [named type arguments](named-typeargs.md) (*experimental*). + +## Syntax + +The addition to the grammar is: + +``` +SimpleExpr1 ::= ... + | SimpleExpr (TypeArgs | NamedTypeArgs) +NamedTypeArgs ::= ‘[’ NamedTypeArg {‘,’ NamedTypeArg} ‘]’ +NamedTypeArg ::= id ‘=’ Type +``` + +Note in particular that named arguments cannot be passed to type constructors: + +``` scala +class C[T] + +val x: C[T = Int] = // error + new C[T = Int] // error + +class E extends C[T = Int] // error +``` + +## Compatibility considerations + +Named type arguments do not have an impact on binary compatibility, but they +have an impact on source compatibility: if the name of a method type parameter +is changed, any existing named reference to this parameter will break. This +means that the names of method type parameters are now part of the public API +of a library. + +(Unimplemented proposal: to mitigate this, +[`scala.deprecatedName`](https://www.scala-lang.org/api/current/scala/deprecatedName.html) +could be extended to also be applicable on method type parameters.) diff --git a/docs/_spec/TODOreference/experimental/named-typeargs.md b/docs/_spec/TODOreference/experimental/named-typeargs.md new file mode 100644 index 000000000000..4928a40f8a6a --- /dev/null +++ b/docs/_spec/TODOreference/experimental/named-typeargs.md @@ -0,0 +1,34 @@ +--- +layout: doc-page +title: "Named Type Arguments" +redirectFrom: /docs/reference/other-new-features/named-typeargs.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/named-typeargs.html +--- + +**Note:** This feature is implemented in Scala 3, but is not expected to be part of Scala 3.0. + +Type arguments of methods can now be specified by name as well as by position. Example: + +``` scala +def construct[Elem, Coll[_]](xs: Elem*): Coll[Elem] = ??? + +val xs1 = construct[Coll = List, Elem = Int](1, 2, 3) +val xs2 = construct[Coll = List](1, 2, 3) +``` + +Similar to a named value argument `(x = e)`, a named type argument +`[X = T]` instantiates the type parameter `X` to the type `T`. +Named type arguments do not have to be in order (see `xs1` above) and +unspecified arguments are inferred by the compiler (see `xs2` above). +Type arguments must be all named or un-named, mixtures of named and +positional type arguments are not supported. + +## Motivation + +The main benefit of named type arguments is that unlike positional arguments, +you are allowed to omit passing arguments for some parameters, like in the +definition of `xs2` above. A missing type argument is inferred as usual by +local type inference. This is particularly useful in situations where some type +arguments can be easily inferred from others. + +[More details](./named-typeargs-spec.md) diff --git a/docs/_spec/TODOreference/experimental/numeric-literals.md b/docs/_spec/TODOreference/experimental/numeric-literals.md new file mode 100644 index 000000000000..f493ef459265 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/numeric-literals.md @@ -0,0 +1,257 @@ +--- +layout: doc-page +title: "Numeric Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/numeric-literals.html +--- + +**Note**: This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: + +```scala +import scala.language.experimental.genericNumberLiterals +``` + +In Scala 2, numeric literals were confined to the primitive numeric types `Int`, `Long`, `Float`, and `Double`. Scala 3 allows to write numeric literals also for user-defined types. Example: + +```scala +val x: Long = -10_000_000_000 +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 110_222_799_799.99 + +(y: BigInt) match + case 123_456_789_012_345_678_901 => +``` + +The syntax of numeric literals is the same as before, except there are no pre-set limits +how large they can be. + +## Meaning of Numeric Literals + +The meaning of a numeric literal is determined as follows: + +- If the literal ends with `l` or `L`, it is a `Long` integer (and must fit in its legal range). +- If the literal ends with `f` or `F`, it is a single precision floating point number of type `Float`. +- If the literal ends with `d` or `D`, it is a double precision floating point number of type `Double`. + +In each of these cases the conversion to a number is exactly as in Scala 2 or in Java. If a numeric literal does _not_ end in one of these suffixes, its meaning is determined by the expected type: + +1. If the expected type is `Int`, `Long`, `Float`, or `Double`, the literal is + treated as a standard literal of that type. +2. If the expected type is a fully defined type `T` that has a given instance of type + [`scala.util.FromDigits[T]`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html), the literal is converted to a value of type `T` by passing it as an argument to + the `fromDigits` method of that instance (more details below). +3. Otherwise, the literal is treated as a `Double` literal (if it has a decimal point or an + exponent), or as an `Int` literal (if not). (This last possibility is again as in Scala 2 or Java.) + +With these rules, the definition + +```scala +val x: Long = -10_000_000_000 +``` + +is legal by rule (1), since the expected type is `Long`. The definitions + +```scala +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 111222333444.55 +``` + +are legal by rule (2), since both `BigInt` and `BigDecimal` have [`FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html) instances (which implement the `FromDigits` subclasses [`FromDigits.WithRadix`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$WithRadix.html) and [`FromDigits.Decimal`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$Decimal.html), respectively). On the other hand, + +```scala +val x = -10_000_000_000 +``` + +gives a type error, since without an expected type `-10_000_000_000` is treated by rule (3) as an `Int` literal, but it is too large for that type. + +## The `FromDigits` Trait + +To allow numeric literals, a type simply has to define a `given` instance of the +[`scala.util.FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html) type class, or one of its subclasses. `FromDigits` is defined as follows: + +```scala +trait FromDigits[T]: + def fromDigits(digits: String): T +``` + +Implementations of `fromDigits` convert strings of digits to the values of the +implementation type `T`. +The `digits` string consists of digits between `0` and `9`, possibly preceded by a +sign ("+" or "-"). Number separator characters `_` are filtered out before +the string is passed to `fromDigits`. + +The companion object [`FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits$.html) also defines subclasses of `FromDigits` for whole numbers with a given radix, for numbers with a decimal point, and for numbers that can have both a decimal point and an exponent: + +```scala +object FromDigits: + + /** A subclass of `FromDigits` that also allows to convert whole + * number literals with a radix other than 10 + */ + trait WithRadix[T] extends FromDigits[T]: + def fromDigits(digits: String): T = fromDigits(digits, 10) + def fromDigits(digits: String, radix: Int): T + + /** A subclass of `FromDigits` that also allows to convert number + * literals containing a decimal point ".". + */ + trait Decimal[T] extends FromDigits[T] + + /** A subclass of `FromDigits`that allows also to convert number + * literals containing a decimal point "." or an + * exponent `('e' | 'E')['+' | '-']digit digit*`. + */ + trait Floating[T] extends Decimal[T] +``` + +A user-defined number type can implement one of those, which signals to the compiler +that hexadecimal numbers, decimal points, or exponents are also accepted in literals +for this type. + +## Error Handling + +`FromDigits` implementations can signal errors by throwing exceptions of some subtype +of [`FromDigitsException`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$FromDigitsException.html). `FromDigitsException` is defined with three subclasses in the +`FromDigits` object as follows: + +```scala +abstract class FromDigitsException(msg: String) extends NumberFormatException(msg) + +class NumberTooLarge (msg: String = "number too large") extends FromDigitsException(msg) +class NumberTooSmall (msg: String = "number too small") extends FromDigitsException(msg) +class MalformedNumber(msg: String = "malformed number literal") extends FromDigitsException(msg) +``` + +## Example + +As a fully worked out example, here is an implementation of a new numeric class, `BigFloat`, that accepts numeric literals. `BigFloat` is defined in terms of a `BigInt` mantissa and an `Int` exponent: + +```scala +case class BigFloat(mantissa: BigInt, exponent: Int): + override def toString = s"${mantissa}e${exponent}" +``` + +`BigFloat` literals can have a decimal point as well as an exponent. E.g. the following expression +should produce the `BigFloat` number `BigFloat(-123, 997)`: + +```scala +-0.123E+1000: BigFloat +``` + +The companion object of `BigFloat` defines an `apply` constructor method to construct a `BigFloat` +from a `digits` string. Here is a possible implementation: + +```scala +object BigFloat: + import scala.util.FromDigits + + def apply(digits: String): BigFloat = + val (mantissaDigits, givenExponent) = + digits.toUpperCase.split('E') match + case Array(mantissaDigits, edigits) => + val expo = + try FromDigits.intFromDigits(edigits) + catch case ex: FromDigits.NumberTooLarge => + throw FromDigits.NumberTooLarge(s"exponent too large: $edigits") + (mantissaDigits, expo) + case Array(mantissaDigits) => + (mantissaDigits, 0) + val (intPart, exponent) = + mantissaDigits.split('.') match + case Array(intPart, decimalPart) => + (intPart ++ decimalPart, givenExponent - decimalPart.length) + case Array(intPart) => + (intPart, givenExponent) + BigFloat(BigInt(intPart), exponent) +``` + +To accept `BigFloat` literals, all that's needed in addition is a `given` instance of type +`FromDigits.Floating[BigFloat]`: + +```scala + given FromDigits: FromDigits.Floating[BigFloat] with + def fromDigits(digits: String) = apply(digits) +end BigFloat +``` + +Note that the `apply` method does not check the format of the `digits` argument. It is +assumed that only valid arguments are passed. For calls coming from the compiler +that assumption is valid, since the compiler will first check whether a numeric +literal has the correct format before it gets passed on to a conversion method. + +## Compile-Time Errors + +With the setup of the previous section, a literal like + +```scala +1e10_0000_000_000: BigFloat +``` + +would be expanded by the compiler to + +```scala +BigFloat.FromDigits.fromDigits("1e100000000000") +``` + +Evaluating this expression throws a [`NumberTooLarge`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$NumberTooLarge.html) exception at run time. We would like it to +produce a compile-time error instead. We can achieve this by tweaking the `BigFloat` class +with a small dose of metaprogramming. The idea is to turn the `fromDigits` method +into a macro, i.e. make it an inline method with a splice as right-hand side. +To do this, replace the `FromDigits` instance in the `BigFloat` object by the following two definitions: + +```scala +object BigFloat: + ... + + class FromDigits extends FromDigits.Floating[BigFloat]: + def fromDigits(digits: String) = apply(digits) + + given FromDigits with + override inline def fromDigits(digits: String) = ${ + fromDigitsImpl('digits) + } +``` + +Note that an inline method cannot directly fill in for an abstract method, since it produces +no code that can be executed at runtime. That is why we define an intermediary class +`FromDigits` that contains a fallback implementation which is then overridden by the inline +method in the `FromDigits` given instance. That method is defined in terms of a macro +implementation method `fromDigitsImpl`. Here is its definition: + +```scala + private def fromDigitsImpl(digits: Expr[String])(using ctx: Quotes): Expr[BigFloat] = + digits.value match + case Some(ds) => + try + val BigFloat(m, e) = apply(ds) + '{BigFloat(${Expr(m)}, ${Expr(e)})} + catch case ex: FromDigits.FromDigitsException => + ctx.error(ex.getMessage) + '{BigFloat(0, 0)} + case None => + '{apply($digits)} +end BigFloat +``` + +The macro implementation takes an argument of type `Expr[String]` and yields +a result of type `Expr[BigFloat]`. It tests whether its argument is a constant +string. If that is the case, it converts the string using the `apply` method +and lifts the resulting `BigFloat` back to `Expr` level. For non-constant +strings `fromDigitsImpl(digits)` is simply `apply(digits)`, i.e. everything is +evaluated at runtime in this case. + +The interesting part is the `catch` part of the case where `digits` is constant. +If the `apply` method throws a `FromDigitsException`, the exception's message is issued as a compile time error in the `ctx.error(ex.getMessage)` call. + +With this new implementation, a definition like + +```scala +val x: BigFloat = 1234.45e3333333333 +``` + +would give a compile time error message: + +```scala +3 | val x: BigFloat = 1234.45e3333333333 + | ^^^^^^^^^^^^^^^^^^ + | exponent too large: 3333333333 +``` diff --git a/docs/_spec/TODOreference/experimental/overview.md b/docs/_spec/TODOreference/experimental/overview.md new file mode 100644 index 000000000000..254f103896e4 --- /dev/null +++ b/docs/_spec/TODOreference/experimental/overview.md @@ -0,0 +1,29 @@ +--- +layout: doc-page +title: "Experimental" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/overview.html +redirectFrom: overview.html +--- + +## Experimental language features + +All experimental language features can be found under the `scala.language.experimental` package. +They are enabled by importing the feature or using the `-language` compiler flag. + +* [`erasedDefinitions`](./erased-defs.md): Enable support for `erased` modifier. +* `fewerBraces`: Enable support for using indentation for arguments. +* [`genericNumberLiterals`](./numeric-literals.md): Enable support for generic number literals. +* [`namedTypeArguments`](./named-typeargs.md): Enable support for named type arguments +* [`saferExceptions`](./canthrow.md): Enable support for checked exceptions. + +## Experimental language imports + +In general, experimental language features can be imported in an experimental scope (see [experimental definitions](../other-new-features/experimental-defs.md)). +They can be imported at the top-level if all top-level definitions are `@experimental`. + +## Experimental language features supported by special compiler options + +Some experimental language features that are still in research and development can be enabled with special compiler options. These include + +* [`-Yexplicit-nulls`](./explicit-nulls.md). Enable support for tracking null references in the type system. +* [`-Ycc`](./cc.md). Enable support for capture checking. diff --git a/docs/_spec/TODOreference/experimental/tupled-function.md b/docs/_spec/TODOreference/experimental/tupled-function.md new file mode 100644 index 000000000000..da108fc832ad --- /dev/null +++ b/docs/_spec/TODOreference/experimental/tupled-function.md @@ -0,0 +1,82 @@ +--- +layout: doc-page +title: "Tupled Function" +--- + +Tupled Function +---------------------- + +With functions bounded to arities up to 22 it was possible to generalize some operation on all function types using overloading. +Now that we have functions and tuples generalized to [arities above 22](../dropped-features/limit22.md) overloading is not an option anymore. +The type class `TupleFunction` provides a way to abstract directly over a function of any arity converting it to an equivalent function that receives all arguments in a single tuple. + +```scala +/** Type class relating a `FunctionN[..., R]` with an equivalent tupled function `Function1[TupleN[...], R]` + * + * @tparam F a function type + * @tparam G a tupled function type (function of arity 1 receiving a tuple as argument) + */ +@implicitNotFound("${F} cannot be tupled as ${G}") +sealed trait TupledFunction[F, G] { + def tupled(f: F): G + def untupled(g: G): F +} +``` + +The compiler will synthesize an instance of `TupledFunction[F, G]` if: + +* `F` is a function type of arity `N` +* `G` is a function with a single tuple argument of size `N` and its types are equal to the arguments of `F` +* The return type of `F` is equal to the return type of `G` +* `F` and `G` are the same sort of function (both are `(...) => R` or both are `(...) ?=> R`) +* If only one of `F` or `G` is instantiated the second one is inferred. + +Examples +-------- +`TupledFunction` can be used to generalize the `Function1.tupled`, ... `Function22.tupled` methods to functions of any arities. +The following defines `tupled` as [extension method](../contextual/extension-methods.html) ([full example](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-tupled.scala)). + +```scala +/** Creates a tupled version of this function: instead of N arguments, + * it accepts a single [[scala.Tuple]] with N elements as argument. + * + * @tparam F the function type + * @tparam Args the tuple type with the same types as the function arguments of F + * @tparam R the return type of F + */ +extension [F, Args <: Tuple, R](f: F) + def tupled(using tf: TupledFunction[F, Args => R]): Args => R = tf.tupled(f) +``` + +`TupledFunction` can be used to generalize the `Function.untupled` to a function of any arities ([full example](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-untupled.scala)) + +```scala +/** Creates an untupled version of this function: instead of a single argument of type [[scala.Tuple]] with N elements, + * it accepts N arguments. + * + * This is a generalization of [[scala.Function.untupled]] that work on functions of any arity + * + * @tparam F the function type + * @tparam Args the tuple type with the same types as the function arguments of F + * @tparam R the return type of F + */ +extension [F, Args <: Tuple, R](f: Args => R) + def untupled(using tf: TupledFunction[F, Args => R]): F = tf.untupled(f) +``` + +`TupledFunction` can also be used to generalize the [`Tuple1.compose`](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-compose.scala) and [`Tuple1.andThen`](https://github.com/lampepfl/dotty/blob/main/tests/run/tupled-function-andThen.scala) methods to compose functions of larger arities and with functions that return tuples. + +```scala +/** Composes two instances of TupledFunction into a new TupledFunction, with this function applied last. + * + * @tparam F a function type + * @tparam G a function type + * @tparam FArgs the tuple type with the same types as the function arguments of F and return type of G + * @tparam GArgs the tuple type with the same types as the function arguments of G + * @tparam R the return type of F + */ +extension [F, G, FArgs <: Tuple, GArgs <: Tuple, R](f: F) + def compose(g: G)(using tg: TupledFunction[G, GArgs => FArgs], tf: TupledFunction[F, FArgs => R]): GArgs => R = { + (x: GArgs) => tf.tupled(f)(tg.tupled(g)(x)) +} +``` diff --git a/docs/_spec/TODOreference/features-classification.md b/docs/_spec/TODOreference/features-classification.md new file mode 100644 index 000000000000..36cea3b9e72d --- /dev/null +++ b/docs/_spec/TODOreference/features-classification.md @@ -0,0 +1,199 @@ +--- +layout: doc-page +title: "A Classification of Proposed Language Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/features-classification.html +--- + +This document provides an overview of the constructs proposed for Scala 3 with the aim to facilitate the discussion what to include and when to include it. It classifies features into eight groups: (1) essential foundations, (2) simplifications, (3) restrictions, (4) dropped features, (5) changed features, (6) new features, (7) features oriented towards metaprogramming with the aim to replace existing macros, and (8) changes to type checking and inference. + +Each group contains sections classifying the status (i.e. relative importance to be a part of Scala 3, and relative urgency when to decide this) and the migration cost +of the constructs in it. + +The current document reflects the state of things as of April, 2019. It will be updated to reflect any future changes in that status. + +## Essential Foundations + +These new constructs directly model core features of [DOT](https://www.scala-lang.org/blog/2016/02/03/essence-of-scala.html), higher-kinded types, and the [SI calculus for implicit resolution](https://infoscience.epfl.ch/record/229878/files/simplicitly_1.pdf). + + - [Intersection types](new-types/intersection-types.md), replacing compound types, + - [Union types](new-types/union-types.md), + - [Type lambdas](new-types/type-lambdas.md), + replacing encodings using structural types and type projection. + - [Context functions](contextual/context-functions.md) offering abstraction over given parameters. + +**Status: essential** + +These are essential core features of Scala 3. Without them, Scala 3 would be a completely different language, with different foundations. + +**Migration cost: none to low** + +Since these are additions, there's generally no migration cost for old code. An exception are intersection types which replace compound types with slightly cleaned-up semantics. But few programs would be affected by this change. + +## Simplifications + +These constructs replace existing constructs with the aim of making the language safer and simpler to use, and to promote uniformity in code style. + + - [Trait parameters](other-new-features/trait-parameters.md) replace [early initializers](dropped-features/early-initializers.md) with a more generally useful construct. + - [Given instances](contextual/givens.md) + replace implicit objects and defs, focussing on intent over mechanism. + - [Using clauses](contextual/using-clauses.md) replace implicit parameters, avoiding their ambiguities. + - [Extension methods](contextual/extension-methods.md) replace implicit classes with a clearer and simpler mechanism. + - [Opaque type aliases](other-new-features/opaques.md) replace most uses + of value classes while guaranteeing absence of boxing. + - [Top-level definitions](dropped-features/package-objects.md) replace package objects, dropping syntactic boilerplate. + - [Export clauses](other-new-features/export.md) + provide a simple and general way to express aggregation, which can replace the + previous facade pattern of package objects inheriting from classes. + - [Vararg splices](changed-features/vararg-splices.md) now use the form `*` instead of `@ _*`, mirroring vararg expressions, + - [Creator applications](other-new-features/creator-applications.md) allow using simple function call syntax + instead of `new` expressions. `new` expressions stay around as a fallback for + the cases where creator applications cannot be used. + +With the exception of early initializers and old-style vararg splices, all superseded constructs continue to be available in Scala 3.0. The plan is to deprecate and phase them out later. + +Value classes (superseded by opaque type aliases) are a special case. There are currently no deprecation plans for value classes, since we might bring them back in a more general form if they are supported natively by the JVM as is planned by project Valhalla. + +**Status: bimodal: now or never / can delay** + +These are essential simplifications. If we decide to adopt them, we should do it for 3.0. Otherwise we are faced with the awkward situation that the Scala 3 documentation has to describe an old feature that will be replaced or superseded by a simpler one in the future. + +On the other hand, we need to decide now only about the new features in this list. The decision to drop the superseded features can be delayed. Of course, adopting a new feature without deciding to drop the superseded feature will make the language larger. + +**Migration cost: moderate** + +For the next several versions, old features will remain available and deprecation and rewrite techniques can make any migration effort low and gradual. + + +## Restrictions + +These constructs are restricted to make the language safer. + + - [Implicit Conversions](contextual/conversions.md): there is only one way to define implicit conversions instead of many, and potentially surprising implicit conversions require a language import. + - [Given Imports](contextual/given-imports.md): implicits now require a special form of import, to make the import clearly visible. + - [Type Projection](dropped-features/type-projection.md): only classes can be used as prefix `C` of a type projection `C#A`. Type projection on abstract types is no longer supported since it is unsound. + - [Multiversal equality](contextual/multiversal-equality.md) implements an "opt-in" scheme to rule out nonsensical comparisons with `==` and `!=`. + - [infix](https://github.com/lampepfl/dotty/pull/5975) + makes method application syntax uniform across code bases. + +Unrestricted implicit conversions continue to be available in Scala 3.0, but will be deprecated and removed later. Unrestricted versions of the other constructs in the list above are available only under `-source 3.0-migration`. + +**Status: now or never** + +These are essential restrictions. If we decide to adopt them, we should do it for 3.0. Otherwise we are faced with the awkward situation that the Scala 3 documentation has to describe a feature that will be restricted in the future. + +**Migration cost: low to high** + + - _low_: multiversal equality rules out code that is nonsensical, so any rewrites required by its adoption should be classified as bug fixes. + - _moderate_: Restrictions to implicits can be accommodated by straightforward rewriting. + - _high_: Unrestricted type projection cannot always rewritten directly since it is unsound in general. + +## Dropped Constructs + +These constructs are proposed to be dropped without a new construct replacing them. The motivation for dropping these constructs is to simplify the language and its implementation. + + - [DelayedInit](dropped-features/delayed-init.md), + - [Existential types](dropped-features/existential-types.md), + - [Procedure syntax](dropped-features/procedure-syntax.md), + - [Class shadowing](dropped-features/class-shadowing.md), + - [XML literals](dropped-features/xml.md), + - [Symbol literals](dropped-features/symlits.md), + - [Auto application](dropped-features/auto-apply.md), + - [Weak conformance](dropped-features/weak-conformance.md), + - [Compound types](new-types/intersection-types.md), + - [Auto tupling](https://github.com/lampepfl/dotty/pull/4311) (implemented, but not merged). + +The date when these constructs are dropped varies. The current status is: + + - Not implemented at all: + - DelayedInit, existential types, weak conformance. + - Supported under `-source 3.0-migration`: + - procedure syntax, class shadowing, symbol literals, auto application, auto tupling in a restricted form. + - Supported in 3.0, to be deprecated and phased out later: + - XML literals, compound types. + +**Status: mixed** + +Currently unimplemented features would require considerable implementation effort which would in most cases make the compiler more buggy and fragile and harder to understand. If we do not decide to drop them, they will probably show up as "not yet implemented" in the Scala 3.0 release. + +Currently implemented features could stay around indefinitely. Updated docs may simply ignore them, in the expectation that they might go away eventually. So the decision about their removal can be delayed. + +**Migration cost: moderate to high** + +Dropped features require rewrites to avoid their use in programs. These rewrites can sometimes be automatic (e.g. for procedure syntax, symbol literals, auto application) +and sometimes need to be manual (e.g. class shadowing, auto tupling). Sometimes the rewrites would have to be non-local, affecting use sites as well as definition sites (e.g., in the case of `DelayedInit`, unless we find a solution). + +## Changes + +These constructs have undergone changes to make them more regular and useful. + + - [Structural Types](changed-features/structural-types.md): They now allow pluggable implementations, which greatly increases their usefulness. Some usage patterns are restricted compared to the status quo. + - [Name-based pattern matching](changed-features/pattern-matching.md): The existing undocumented Scala 2 implementation has been codified in a slightly simplified form. + - [Eta expansion](changed-features/eta-expansion.md) is now performed universally also in the absence of an expected type. The postfix `_` operator is thus made redundant. It will be deprecated and dropped after Scala 3.0. + - [Implicit Resolution](changed-features/implicit-resolution.md): The implicit resolution rules have been cleaned up to make them more useful and less surprising. Implicit scope is restricted to no longer include package prefixes. + +Most aspects of old-style implicit resolution are still available under `-source 3.0-migration`. The other changes in this list are applied unconditionally. + +**Status: strongly advisable** + +The features have been implemented in their new form in Scala 3.0's compiler. They provide clear improvements in simplicity and functionality compared to the status quo. Going back would require significant implementation effort for a net loss of functionality. + +**Migration cost: low to high** + +Only a few programs should require changes, but some necessary changes might be non-local (as in the case of restrictions to implicit scope). + +## New Constructs + +These are additions to the language that make it more powerful or pleasant to use. + + - [Enums](enums/enums.md) provide concise syntax for enumerations and [algebraic data types](enums/adts.md). + - [Parameter untupling](other-new-features/parameter-untupling.md) avoids having to use `case` for tupled parameter destructuring. + - [Dependent function types](new-types/dependent-function-types.md) generalize dependent methods to dependent function values and types. + - [Polymorphic function types](https://github.com/lampepfl/dotty/pull/4672) generalize polymorphic methods to dependent function values and types. _Current status_: There is a proposal, and a prototype implementation, but the implementation has not been finalized or merged yet. + - [Kind polymorphism](other-new-features/kind-polymorphism.md) allows the definition of operators working equally on types and type constructors. + +**Status: mixed** + +Enums offer an essential simplification of fundamental use patterns, so they should be adopted for Scala 3.0. Auto-parameter tupling is a very small change that removes some awkwardness, so it might as well be adopted now. The other features constitute more specialized functionality which could be introduced in later versions. On the other hand, except for polymorphic function types they are all fully implemented, so if the Scala 3.0 spec does not include them, they might be still made available under a language flag. + +**Migration cost: none** + +Being new features, existing code migrates without changes. To be sure, sometimes it would be attractive to rewrite code to make use of the new features in order to increase clarity and conciseness. + +## Metaprogramming + +The following constructs together aim to put metaprogramming in Scala on a new basis. So far, metaprogramming was achieved by a combination of macros and libraries such as [Shapeless](https://github.com/milessabin/shapeless) that were in turn based on some key macros. Current Scala 2 macro mechanisms are a thin veneer on top the current Scala 2 compiler, which makes them fragile and in many cases impossible to port to Scala 3. + +It's worth noting that macros were never included in the [Scala 2 language specification](https://scala-lang.org/files/archive/spec/2.13/) and were so far made available only under an `-experimental` flag. This has not prevented their widespread usage. + +To enable porting most uses of macros, we are experimenting with the advanced language constructs listed below. These designs are more provisional than the rest of the proposed language constructs for Scala 3.0. There might still be some changes until the final release. Stabilizing the feature set needed for metaprogramming is our first priority. + +- [Match types](new-types/match-types.md) allow computation on types. +- [Inline](metaprogramming/inline.md) provides +by itself a straightforward implementation of some simple macros and is at the same time an essential building block for the implementation of complex macros. +- [Quotes and splices](metaprogramming/macros.md) provide a principled way to express macros and staging with a unified set of abstractions. +- [Type class derivation](contextual/derivation.md) provides an in-language implementation of the `Gen` macro in Shapeless and other foundational libraries. The new implementation is more robust, efficient and easier to use than the macro. +- [Implicit by-name parameters](contextual/by-name-context-parameters.md) provide a more robust in-language implementation of the `Lazy` macro in Shapeless. + +**Status: not yet settled** + +We know we need a practical replacement for current macros. The features listed above are very promising in that respect, but we need more complete implementations and more use cases to reach a final verdict. + +**Migration cost: very high** + +Existing macro libraries will have to be rewritten from the ground up. In many cases the rewritten libraries will turn out to be simpler and more robust than the old ones, but that does not relieve one of the cost of the rewrites. It's currently unclear to what degree users of macro libraries will be affected. We aim to provide sufficient functionality so that core macros can be re-implemented fully, but given the vast feature set of the various macro extensions to Scala 2 it is difficult to arrive at a workable limitation of scope. + +## Changes to Type Checking and Inference + +The Scala 3 compiler uses a new algorithm for type inference, which relies on a general subtype constraint solver. The new algorithm often [works better than the old](https://contributors.scala-lang.org/t/better-type-inference-for-scala-send-us-your-problematic-cases/2410), but there are inevitably situations where the results of both algorithms differ, leading to errors diagnosed by Scala 3 for programs that the Scala 2 compiler accepts. + +**Status: essential** + +The new type-checking and inference algorithms are the essential core of the new compiler. They cannot be reverted without dropping the whole implementation of Scala 3. + +**Migration cost: high** + +Some existing programs will break and, given the complex nature of type inference, it will not always be clear what change caused the breakage and how to fix it. + +In our experience, macros and changes in type and implicit argument inference together cause the large majority of problems encountered when porting existing code to Scala 3. The latter source of problems could be addressed systematically by a tool that added all inferred types and implicit arguments to a Scala 2 source code file. Most likely such a tool would be implemented as a [Scala 2 compiler plugin](https://docs.scala-lang.org/overviews/plugins/index.html). The resulting code would have a greatly increased likelihood to compile under Scala 3, but would often be bulky to the point of being unreadable. A second part of the rewriting tool should then selectively and iteratively remove type and implicit annotations that were synthesized by the first part as long as they compile under Scala 3. This second part could be implemented as a program that invokes the Scala 3 compiler `scalac` programmatically. + +Several people have proposed such a tool for some time now. I believe it is time we find the will and the resources to actually implement it. diff --git a/docs/_spec/TODOreference/language-versions/binary-compatibility.md b/docs/_spec/TODOreference/language-versions/binary-compatibility.md new file mode 100644 index 000000000000..df1c19f97868 --- /dev/null +++ b/docs/_spec/TODOreference/language-versions/binary-compatibility.md @@ -0,0 +1,13 @@ +--- +layout: doc-page +title: "Binary Compatibility" +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html +--- + +In Scala 2 different minor versions of the compiler were free to change the way how they encode different language features in JVM bytecode so each bump of the compiler's minor version resulted in breaking binary compatibility and if a project had any Scala dependencies they all needed to be (cross-)compiled to the same minor Scala version that was used in that project itself. On the contrary, Scala 3 has a stable encoding into JVM bytecode. + +In addition to classfiles the compilation process in Scala 3 also produces files with `.tasty` extension. The [TASTy](https://docs.scala-lang.org/scala3/guides/tasty-overview.html) format is an intermediate representation of Scala code containing full information about sources together with information provided by the typer. Some of this information is lost during generation of bytecode so Scala 3 compilers read TASTy files during compilation in addition to classfiles to know the exact types of values, methods, etc. in already compiled classes (although compilation from TASTy files only is also possible). TASTy files are also typically distributed together with classfiles in published artifacts. + +TASTy format is extensible but it preserves backward compatibility and the evolution happens between minor releases of the language. This means a Scala compiler in version `3.x1.y1` is able to read TASTy files produced by another compiler in version `3.x2.y2` if `x1 >= x2` (assuming two stable versions of the compiler are considered - `SNAPSHOT` or `NIGHTLY` compiler versions can read TASTy in an older stable format but their TASTY versions are not compatible between each other even if the compilers have the same minor version; also compilers in stable versions cannot read TASTy generated by an unstable version). + +TASTy version number has the format of `.-` and the numbering changes in parallel to language releases in such a way that a bump in language minor version corresponds to a bump in TASTy minor version (e.g. for Scala `3.0.0` the TASTy version is `28.0-0`). Experimental version set to 0 signifies a stable version while others are considered unstable/experimental. TASTy version is not strictly bound to the data format itself - any changes to the API of the standard library also require a change in TASTy minor version. diff --git a/docs/_spec/TODOreference/language-versions/language-versions.md b/docs/_spec/TODOreference/language-versions/language-versions.md new file mode 100644 index 000000000000..2dfd04857cab --- /dev/null +++ b/docs/_spec/TODOreference/language-versions/language-versions.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Language Versions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/index.html +--- + +Additional information on interoperability and migration between Scala 2 and 3 can be found [here](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html). diff --git a/docs/_spec/TODOreference/language-versions/source-compatibility.md b/docs/_spec/TODOreference/language-versions/source-compatibility.md new file mode 100644 index 000000000000..4d5b468ac8f2 --- /dev/null +++ b/docs/_spec/TODOreference/language-versions/source-compatibility.md @@ -0,0 +1,43 @@ +--- +layout: doc-page +title: "Source Compatibility" +nightlyOf: https://docs.scala-lang.org/scala3/reference/language-versions/source-compatibility.html +--- + +Scala 3 does NOT guarantee source compatibility between different minor language versions (e.g. some syntax valid in 3.x might get deprecated and then phased out in 3.y for y > x). There are also some syntax structures that were valid in Scala 2 but are not anymore in Scala 3. However the compiler provides a possibility to specify the desired version of syntax used in a particular file or globally for a run of the compiler to make migration between versions easier. + +The default Scala language syntax version currently supported by the Dotty compiler is [`3.2`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2$.html). There are also other language versions that can be specified instead: + +- [`3.0-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/0-migration$.html): Same as +`3.0` and `3.1`, but with a Scala 2 compatibility mode that helps moving Scala 2.13 sources over to Scala 3. In particular, it + + - flags some Scala 2 constructs that are disallowed in Scala 3 as migration warnings instead of hard errors, + - changes some rules to be more lenient and backwards compatible with Scala 2.13 + - gives some additional warnings where the semantics has changed between Scala 2.13 and 3.0 + - in conjunction with `-rewrite`, offer code rewrites from Scala 2.13 to 3.0. + +- [`3.0`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/0$.html), [`3.1`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/1$.html): the default set of features included in scala versions `3.0.0` to `3.1.3`. +- [`3.2`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2$.html): the same as `3.0` and `3.1`, but in addition: + - [stricter pattern bindings](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html) are now enabled (part of `future` in earlier `3.x` releases), producing warnings for refutable patterns. These warnings can be silenced to achieve the same runtime behavior, but in `future` they become errors and refutable patterns will not compile. + - [Nonlocal returns](https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html) now produce a warning upon usage (they are still an error under `future`). +- [`3.2-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/2-migration$.html): the same as `3.2`, but in conjunction with `-rewrite`, offer code rewrites from Scala `3.0/3.1` to `3.2`. +- [`future`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future$.html): A preview of changes that will be introduced in `3.x` versions after `3.2`. +Some Scala 2 specific idioms are dropped in this version. The feature set supported by this version may grow over time as features become stabilised for preview. + +- [`future-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future-migration$.html): Same as `future` but with additional helpers to migrate from `3.2`. Similarly to the helpers available under `3.0-migration`, these include migration warnings and optional rewrites. + +There are two ways to specify a language version : + +- with a `-source` command line setting, e.g. `-source 3.0-migration`. +- with a `scala.language` import at the top of a source file, e.g: + +```scala +package p +import scala.language.`future-migration` + +class C { ... } +``` + +Language imports supersede command-line settings in the source files where they are specified. Only one language import specifying a source version is allowed in a source file, and it must come before any definitions in that file. + +**Note**: The [Scala 3 Migration Guide](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html) gives further information to help the Scala programmer moving from Scala 2.13 to Scala 3. diff --git a/docs/_spec/TODOreference/metaprogramming/compiletime-ops.md b/docs/_spec/TODOreference/metaprogramming/compiletime-ops.md new file mode 100644 index 000000000000..a43c941ae943 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/compiletime-ops.md @@ -0,0 +1,294 @@ +--- +layout: doc-page +title: "Compile-time operations" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/compiletime-ops.html +--- + +## The `scala.compiletime` Package + +The [`scala.compiletime`](https://scala-lang.org/api/3.x/scala/compiletime.html) package contains helper definitions that provide support for compile-time operations over values. They are described in the following. + +### `constValue` and `constValueOpt` + +`constValue` is a function that produces the constant value represented by a +type. + +```scala +import scala.compiletime.constValue +import scala.compiletime.ops.int.S + +transparent inline def toIntC[N]: Int = + inline constValue[N] match + case 0 => 0 + case _: S[n1] => 1 + toIntC[n1] + +inline val ctwo = toIntC[2] +``` + +`constValueOpt` is the same as `constValue`, however returning an `Option[T]` +enabling us to handle situations where a value is not present. Note that `S` is +the type of the successor of some singleton type. For example the type `S[1]` is +the singleton type `2`. + +### `erasedValue` + +So far we have seen inline methods that take terms (tuples and integers) as +parameters. What if we want to base case distinctions on types instead? For +instance, one would like to be able to write a function `defaultValue`, that, +given a type `T`, returns optionally the default value of `T`, if it exists. +We can already express this using rewrite match expressions and a simple +helper function, `scala.compiletime.erasedValue`, which is defined as follows: + +```scala +def erasedValue[T]: T +``` + +The `erasedValue` function _pretends_ to return a value of its type argument `T`. +Calling this function will always result in a compile-time error unless the call +is removed from the code while inlining. + +Using `erasedValue`, we can then define `defaultValue` as follows: + +```scala +import scala.compiletime.erasedValue + +transparent inline def defaultValue[T] = + inline erasedValue[T] match + case _: Byte => Some(0: Byte) + case _: Char => Some(0: Char) + case _: Short => Some(0: Short) + case _: Int => Some(0) + case _: Long => Some(0L) + case _: Float => Some(0.0f) + case _: Double => Some(0.0d) + case _: Boolean => Some(false) + case _: Unit => Some(()) + case _ => None +``` + +Then: + +```scala +val dInt: Some[Int] = defaultValue[Int] +val dDouble: Some[Double] = defaultValue[Double] +val dBoolean: Some[Boolean] = defaultValue[Boolean] +val dAny: None.type = defaultValue[Any] +``` + +As another example, consider the type-level version of `toInt` below: +given a _type_ representing a Peano number, +return the integer _value_ corresponding to it. +Consider the definitions of numbers as in the _Inline +Match_ section above. Here is how `toIntT` can be defined: + +```scala +transparent inline def toIntT[N <: Nat]: Int = + inline scala.compiletime.erasedValue[N] match + case _: Zero.type => 0 + case _: Succ[n] => toIntT[n] + 1 + +inline val two = toIntT[Succ[Succ[Zero.type]]] +``` + +`erasedValue` is an `erased` method so it cannot be used and has no runtime +behavior. Since `toIntT` performs static checks over the static type of `N` we +can safely use it to scrutinize its return type (`S[S[Z]]` in this case). + +### `error` + +The `error` method is used to produce user-defined compile errors during inline expansion. +It has the following signature: + +```scala +inline def error(inline msg: String): Nothing +``` + +If an inline expansion results in a call `error(msgStr)` the compiler +produces an error message containing the given `msgStr`. + +```scala +import scala.compiletime.{error, codeOf} + +inline def fail() = + error("failed for a reason") + +fail() // error: failed for a reason +``` + +or + +```scala +inline def fail(inline p1: Any) = + error("failed on: " + codeOf(p1)) + +fail(identity("foo")) // error: failed on: identity[String]("foo") +``` + +### The `scala.compiletime.ops` package + +The [`scala.compiletime.ops`](https://scala-lang.org/api/3.x/scala/compiletime/ops.html) package contains types that provide support for +primitive operations on singleton types. For example, +`scala.compiletime.ops.int.*` provides support for multiplying two singleton +`Int` types, and `scala.compiletime.ops.boolean.&&` for the conjunction of two +`Boolean` types. When all arguments to a type in `scala.compiletime.ops` are +singleton types, the compiler can evaluate the result of the operation. + +```scala +import scala.compiletime.ops.int.* +import scala.compiletime.ops.boolean.* + +val conjunction: true && true = true +val multiplication: 3 * 5 = 15 +``` + +Many of these singleton operation types are meant to be used infix (as in [SLS §3.2.10](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#infix-types)). + +Since type aliases have the same precedence rules as their term-level +equivalents, the operations compose with the expected precedence rules: + +```scala +import scala.compiletime.ops.int.* +val x: 1 + 2 * 3 = 7 +``` + +The operation types are located in packages named after the type of the +left-hand side parameter: for instance, `scala.compiletime.ops.int.+` represents +addition of two numbers, while `scala.compiletime.ops.string.+` represents string +concatenation. To use both and distinguish the two types from each other, a +match type can dispatch to the correct implementation: + +```scala +import scala.compiletime.ops.* + +import scala.annotation.infix + +type +[X <: Int | String, Y <: Int | String] = (X, Y) match + case (Int, Int) => int.+[X, Y] + case (String, String) => string.+[X, Y] + +val concat: "a" + "b" = "ab" +val addition: 1 + 1 = 2 +``` + +## Summoning Implicits Selectively + +It is foreseen that many areas of typelevel programming can be done with rewrite +methods instead of implicits. But sometimes implicits are unavoidable. The +problem so far was that the Prolog-like programming style of implicit search +becomes viral: Once some construct depends on implicit search it has to be +written as a logic program itself. Consider for instance the problem of creating +a `TreeSet[T]` or a `HashSet[T]` depending on whether `T` has an `Ordering` or +not. We can create a set of implicit definitions like this: + +```scala +trait SetFor[T, S <: Set[T]] + +class LowPriority: + implicit def hashSetFor[T]: SetFor[T, HashSet[T]] = ... + +object SetsFor extends LowPriority: + implicit def treeSetFor[T: Ordering]: SetFor[T, TreeSet[T]] = ... +``` + +Clearly, this is not pretty. Besides all the usual indirection of implicit +search, we face the problem of rule prioritization where we have to ensure that +`treeSetFor` takes priority over `hashSetFor` if the element type has an +ordering. This is solved (clumsily) by putting `hashSetFor` in a superclass +`LowPriority` of the object `SetsFor` where `treeSetFor` is defined. Maybe the +boilerplate would still be acceptable if the crufty code could be contained. +However, this is not the case. Every user of the abstraction has to be +parameterized itself with a `SetFor` implicit. Considering the simple task _"I +want a `TreeSet[T]` if `T` has an ordering and a `HashSet[T]` otherwise"_, this +seems like a lot of ceremony. + +There are some proposals to improve the situation in specific areas, for +instance by allowing more elaborate schemes to specify priorities. But they all +keep the viral nature of implicit search programs based on logic programming. + +By contrast, the new `summonFrom` construct makes implicit search available +in a functional context. To solve the problem of creating the right set, one +would use it as follows: + +```scala +import scala.compiletime.summonFrom + +inline def setFor[T]: Set[T] = summonFrom { + case ord: Ordering[T] => new TreeSet[T]()(using ord) + case _ => new HashSet[T] +} +``` + +A `summonFrom` call takes a pattern matching closure as argument. All patterns +in the closure are type ascriptions of the form `identifier : Type`. + +Patterns are tried in sequence. The first case with a pattern `x: T` such that an implicit value of type `T` can be summoned is chosen. + +Alternatively, one can also use a pattern-bound given instance, which avoids the explicit using clause. For instance, `setFor` could also be formulated as follows: + +```scala +import scala.compiletime.summonFrom + +inline def setFor[T]: Set[T] = summonFrom { + case given Ordering[T] => new TreeSet[T] + case _ => new HashSet[T] +} +``` + +`summonFrom` applications must be reduced at compile time. + +Consequently, if we summon an `Ordering[String]` the code above will return a +new instance of `TreeSet[String]`. + +```scala +summon[Ordering[String]] + +println(setFor[String].getClass) // prints class scala.collection.immutable.TreeSet +``` + +**Note** `summonFrom` applications can raise ambiguity errors. Consider the following +code with two givens in scope of type `A`. The pattern match in `f` will raise +an ambiguity error of `f` is applied. + +```scala +class A +given a1: A = new A +given a2: A = new A + +inline def f: Any = summonFrom { + case given _: A => ??? // error: ambiguous givens +} +``` + +## `summonInline` + +The shorthand `summonInline` provides a simple way to write a `summon` that is delayed until the call is inlined. +Unlike `summonFrom`, `summonInline` also yields the implicit-not-found error, if a given instance of the summoned +type is not found. +```scala +import scala.compiletime.summonInline +import scala.annotation.implicitNotFound + +@implicitNotFound("Missing One") +trait Missing1 + +@implicitNotFound("Missing Two") +trait Missing2 + +trait NotMissing +given NotMissing = ??? + +transparent inline def summonInlineCheck[T <: Int](inline t : T) : Any = + inline t match + case 1 => summonInline[Missing1] + case 2 => summonInline[Missing2] + case _ => summonInline[NotMissing] + +val missing1 = summonInlineCheck(1) // error: Missing One +val missing2 = summonInlineCheck(2) // error: Missing Two +val notMissing : NotMissing = summonInlineCheck(3) +``` + +## Reference + +For more information about compile-time operations, see [PR #4768](https://github.com/lampepfl/dotty/pull/4768), +which explains how `summonFrom`'s predecessor (implicit matches) can be used for typelevel programming and code specialization and [PR #7201](https://github.com/lampepfl/dotty/pull/7201) which explains the new `summonFrom` syntax. diff --git a/docs/_spec/TODOreference/metaprogramming/inline.md b/docs/_spec/TODOreference/metaprogramming/inline.md new file mode 100644 index 000000000000..0c4800069bad --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/inline.md @@ -0,0 +1,390 @@ +--- +layout: doc-page +title: Inline +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/inline.html +--- + +## Inline Definitions + +`inline` is a new [soft modifier](../soft-modifier.md) that guarantees that a +definition will be inlined at the point of use. Example: + +```scala +object Config: + inline val logging = false + +object Logger: + + private var indent = 0 + + inline def log[T](msg: String, indentMargin: =>Int)(op: => T): T = + if Config.logging then + println(s"${" " * indent}start $msg") + indent += indentMargin + val result = op + indent -= indentMargin + println(s"${" " * indent}$msg = $result") + result + else op +end Logger +``` + +The `Config` object contains a definition of the **inline value** `logging`. +This means that `logging` is treated as a _constant value_, equivalent to its +right-hand side `false`. The right-hand side of such an `inline val` must itself +be a [constant expression](https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions). +Used in this way, `inline` is equivalent to Java and Scala 2's `final`. Note that `final`, meaning +_inlined constant_, is still supported in Scala 3, but will be phased out. + +The `Logger` object contains a definition of the **inline method** `log`. This +method will always be inlined at the point of call. + +In the inlined code, an `if-then-else` with a constant condition will be rewritten +to its `then`- or `else`-part. Consequently, in the `log` method above the +`if Config.logging` with `Config.logging == true` will get rewritten into its +`then`-part. + +Here's an example: + +```scala +var indentSetting = 2 + +def factorial(n: BigInt): BigInt = + log(s"factorial($n)", indentSetting) { + if n == 0 then 1 + else n * factorial(n - 1) + } +``` + +If `Config.logging == false`, this will be rewritten (simplified) to: + +```scala +def factorial(n: BigInt): BigInt = + if n == 0 then 1 + else n * factorial(n - 1) +``` + +As you notice, since neither `msg` or `indentMargin` were used, they do not +appear in the generated code for `factorial`. Also note the body of our `log` +method: the `else-` part reduces to just an `op`. In the generated code we do +not generate any closures because we only refer to a by-name parameter *once*. +Consequently, the code was inlined directly and the call was beta-reduced. + +In the `true` case the code will be rewritten to: + +```scala +def factorial(n: BigInt): BigInt = + val msg = s"factorial($n)" + println(s"${" " * indent}start $msg") + Logger.inline$indent_=(indent.+(indentSetting)) + val result = + if n == 0 then 1 + else n * factorial(n - 1) + Logger.inline$indent_=(indent.-(indentSetting)) + println(s"${" " * indent}$msg = $result") + result +``` + +Note that the by-value parameter `msg` is evaluated only once, per the usual Scala +semantics, by binding the value and reusing the `msg` through the body of +`factorial`. Also, note the special handling of the assignment to the private var +`indent`. It is achieved by generating a setter method `def inline$indent_=` and calling it instead. + +Inline methods always have to be fully applied. For instance, a call to +```scala +Logger.log[String]("some op", indentSetting) +``` +would be ill-formed and the compiler would complain that arguments are missing. +However, it is possible to pass wildcard arguments instead. For instance, +```scala +Logger.log[String]("some op", indentSetting)(_) +``` +would typecheck. + +### Recursive Inline Methods + +Inline methods can be recursive. For instance, when called with a constant +exponent `n`, the following method for `power` will be implemented by +straight inline code without any loop or recursion. + +```scala +inline def power(x: Double, n: Int): Double = + if n == 0 then 1.0 + else if n == 1 then x + else + val y = power(x, n / 2) + if n % 2 == 0 then y * y else y * y * x + +power(expr, 10) +// translates to +// +// val x = expr +// val y1 = x * x // ^2 +// val y2 = y1 * y1 // ^4 +// val y3 = y2 * x // ^5 +// y3 * y3 // ^10 +``` + +Parameters of inline methods can have an `inline` modifier as well. This means +that actual arguments to these parameters will be inlined in the body of the +`inline def`. `inline` parameters have call semantics equivalent to by-name parameters +but allow for duplication of the code in the argument. It is usually useful when constant +values need to be propagated to allow further optimizations/reductions. + +The following example shows the difference in translation between by-value, by-name and `inline` +parameters: + +```scala +inline def funkyAssertEquals(actual: Double, expected: =>Double, inline delta: Double): Unit = + if (actual - expected).abs > delta then + throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${delta}") + +funkyAssertEquals(computeActual(), computeExpected(), computeDelta()) +// translates to +// +// val actual = computeActual() +// def expected = computeExpected() +// if (actual - expected).abs > computeDelta() then +// throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${computeDelta()}") +``` + +### Rules for Overriding + +Inline methods can override other non-inline methods. The rules are as follows: + +1. If an inline method `f` implements or overrides another, non-inline method, the inline method can also be invoked at runtime. For instance, consider the scenario: + + ```scala + abstract class A: + def f: Int + def g: Int = f + + class B extends A: + inline def f = 22 + override inline def g = f + 11 + + val b = new B + val a: A = b + // inlined invocatons + assert(b.f == 22) + assert(b.g == 33) + // dynamic invocations + assert(a.f == 22) + assert(a.g == 33) + ``` + + The inlined invocations and the dynamically dispatched invocations give the same results. + +2. Inline methods are effectively final. + +3. Inline methods can also be abstract. An abstract inline method can be implemented only by other inline methods. It cannot be invoked directly: + + ```scala + abstract class A: + inline def f: Int + + object B extends A: + inline def f: Int = 22 + + B.f // OK + val a: A = B + a.f // error: cannot inline f in A. + ``` + +### Relationship to `@inline` + +Scala 2 also defines a `@inline` annotation which is used as a hint for the +backend to inline code. The `inline` modifier is a more powerful option: + +- expansion is guaranteed instead of best effort, +- expansion happens in the frontend instead of in the backend and +- expansion also applies to recursive methods. + + + +### The definition of constant expression + +Right-hand sides of inline values and of arguments for inline parameters must be +constant expressions in the sense defined by the [SLS §6.24](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions), +including _platform-specific_ extensions such as constant folding of pure +numeric computations. + +An inline value must have a literal type such as `1` or `true`. + +```scala +inline val four = 4 +// equivalent to +inline val four: 4 = 4 +``` + +It is also possible to have inline vals of types that do not have a syntax, such as `Short(4)`. + +```scala +trait InlineConstants: + inline val myShort: Short + +object Constants extends InlineConstants: + inline val myShort/*: Short(4)*/ = 4 +``` + +## Transparent Inline Methods + +Inline methods can additionally be declared `transparent`. +This means that the return type of the inline method can be +specialized to a more precise type upon expansion. Example: + +```scala +class A +class B extends A: + def m = true + +transparent inline def choose(b: Boolean): A = + if b then new A else new B + +val obj1 = choose(true) // static type is A +val obj2 = choose(false) // static type is B + +// obj1.m // compile-time error: `m` is not defined on `A` +obj2.m // OK +``` + +Here, the inline method `choose` returns an instance of either of the two types `A` or `B`. +If `choose` had not been declared to be `transparent`, the result +of its expansion would always be of type `A`, even though the computed value might be of the subtype `B`. +The inline method is a "blackbox" in the sense that details of its implementation do not leak out. +But if a `transparent` modifier is given, the expansion is the type of the expanded body. If the argument `b` +is `true`, that type is `A`, otherwise it is `B`. Consequently, calling `m` on `obj2` +type-checks since `obj2` has the same type as the expansion of `choose(false)`, which is `B`. +Transparent inline methods are "whitebox" in the sense that the type +of an application of such a method can be more specialized than its declared +return type, depending on how the method expands. + +In the following example, we see how the return type of `zero` is specialized to +the singleton type `0` permitting the addition to be ascribed with the correct +type `1`. + +```scala +transparent inline def zero: Int = 0 + +val one: 1 = zero + 1 +``` + +### Transparent vs. non-transparent inline + +As we already discussed, transparent inline methods may influence type checking at call site. +Technically this implies that transparent inline methods must be expanded during type checking of the program. +Other inline methods are inlined later after the program is fully typed. + +For example, the following two functions will be typed the same way but will be inlined at different times. + +```scala +inline def f1: T = ... +transparent inline def f2: T = (...): T +``` + +A noteworthy difference is the behavior of `transparent inline given`. +If there is an error reported when inlining that definition, it will be considered as an implicit search mismatch and the search will continue. +A `transparent inline given` can add a type ascription in its RHS (as in `f2` from the previous example) to avoid the precise type but keep the search behavior. +On the other hand, an `inline given` is taken as an implicit and then inlined after typing. +Any error will be emitted as usual. + +## Inline Conditionals + +An if-then-else expression whose condition is a constant expression can be simplified to +the selected branch. Prefixing an if-then-else expression with `inline` enforces that +the condition has to be a constant expression, and thus guarantees that the conditional will always +simplify. + +Example: + +```scala +inline def update(delta: Int) = + inline if delta >= 0 then increaseBy(delta) + else decreaseBy(-delta) +``` + +A call `update(22)` would rewrite to `increaseBy(22)`. But if `update` was called with +a value that was not a compile-time constant, we would get a compile time error like the one +below: + +```scala + | inline if delta >= 0 then ??? + | ^ + | cannot reduce inline if + | its condition + | delta >= 0 + | is not a constant value + | This location is in code that was inlined at ... +``` + +In a transparent inline, an `inline if` will force the inlining of any inline definition in its condition during type checking. + +## Inline Matches + +A `match` expression in the body of an `inline` method definition may be +prefixed by the `inline` modifier. If there is enough type information +at compile time to select a branch, the expression is reduced to that branch and the +type of the expression is the type of the right-hand side of that result. +If not, a compile-time error is raised that reports that the match cannot be reduced. + +The example below defines an inline method with a +single inline match expression that picks a case based on its static type: + +```scala +transparent inline def g(x: Any): Any = + inline x match + case x: String => (x, x) // Tuple2[String, String](x, x) + case x: Double => x + +g(1.0d) // Has type 1.0d which is a subtype of Double +g("test") // Has type (String, String) +``` + +The scrutinee `x` is examined statically and the inline match is reduced +accordingly returning the corresponding value (with the type specialized because `g` is declared `transparent`). +This example performs a simple type test over the scrutinee. +The type can have a richer structure like the simple ADT below. +`toInt` matches the structure of a number in [Church-encoding](https://en.wikipedia.org/wiki/Church_encoding) +and _computes_ the corresponding integer. + +```scala +trait Nat +case object Zero extends Nat +case class Succ[N <: Nat](n: N) extends Nat + +transparent inline def toInt(n: Nat): Int = + inline n match + case Zero => 0 + case Succ(n1) => toInt(n1) + 1 + +inline val natTwo = toInt(Succ(Succ(Zero))) +val intTwo: 2 = natTwo +``` + +`natTwo` is inferred to have the singleton type 2. + +## Reference + +For more information about the semantics of `inline`, see the [Scala 2020: Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) paper. diff --git a/docs/_spec/TODOreference/metaprogramming/macros-spec.md b/docs/_spec/TODOreference/metaprogramming/macros-spec.md new file mode 100644 index 000000000000..6045354fdbbc --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/macros-spec.md @@ -0,0 +1,714 @@ +--- +layout: doc-page +title: "Macros Spec" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros-spec.html +--- + +## Formalization + +* Multi-stage programming with generative and analytical macros[^2] +* Multi-Stage Macro Calculus, Chapter 4 of Scalable Metaprogramming in Scala 3[^1]. + Contains and extends the calculus of _Multi-stage programming with generative and analytical macros_ with type polymorphism. + +## Syntax + +The quotation syntax using `'` and `$` was chosen to mimic the string interpolation syntax of Scala. +Like a string double-quotation, a single-quote block can contain splices. +However, unlike strings, splices can contain quotes using the same rules. + +```scala +s" Hello $name" s" Hello ${name}" +'{ hello($name) } '{ hello(${name}) } +${ hello('name) } ${ hello('{name}) } +``` + +### Quotes +Quotes come in four flavors: quoted identifiers, quoted blocks, quoted block patterns and quoted type patterns. +Scala 2 used quoted identifiers to represent `Symbol` literals. They were deprecated in Scala 3, allowing to use them for quotation. +```scala +SimpleExpr ::= ... + | `'` alphaid // quoted identifier + | `'` `{` Block `}` // quoted block +Pattern ::= ... + | `'` `{` Block `}` // quoted block pattern + | `'` `[` Type `]` // quoted type pattern +``` + +Quoted blocks and quoted block patterns contain an expression equivalent to a normal block of code. +When entering either of those we track the fact that we are in a quoted block (`inQuoteBlock`) which is used for spliced identifiers. +When entering a quoted block pattern we additionally track the fact that we are in a quoted pattern (`inQuotePattern`) which is used to distinguish spliced blocks and splice patterns. +Lastly, the quoted type pattern simply contains a type. + +### Splices +Splices come in three flavors: spliced identifiers, spliced blocks and splice patterns. +Scala specifies identifiers containing `$` as valid identifiers but reserves them for compiler and standard library use only. +Unfortunately, many libraries have used such identifiers in Scala~2. Therefore to mitigate the cost of migration, we still support them. +We work around this by only allowing spliced identifiers[^3] within quoted blocks or quoted block patterns (`inQuoteBlock`). +Splice blocks and splice patterns can contain an arbitrary block or pattern respectively. +They are distinguished based on their surrounding quote (`inQuotePattern`), a quote block will contain spliced blocks, and a quote block pattern will contain splice patterns. + +```scala +SimpleExpr ::= ... + | `$` alphaid if inQuoteBlock // spliced identifier + | `$` `{` Block `}` if !inQuotePattern // spliced block + | `$` `{` Pattern `}` if inQuotePattern // splice pattern +``` + +### Quoted Pattern Type Variables +Quoted pattern type variables in quoted patterns and quoted type patterns do not require additional syntax. +Any type definition or reference with a name composed of lower cases is assumed to be a pattern type variable definition while typing. +A backticked type name with lower cases is interpreted as a reference to the type with that name. + + +## Implementation + +### Run-Time Representation + +The standard library defines the `Quotes` interface which contains all the logic and the abstract classes `Expr` and `Type`. +The compiler implements the `Quotes` interface and provides the implementation of `Expr` and `Type`. + +##### `class Expr` +Expressions of type `Expr[T]` are represented by the following abstract class: +```scala +abstract class Expr[+T] private[scala] +``` +The only implementation of `Expr` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps a typed AST and a `Scope` object with no methods of its own. +The `Scope` object is used to track the current splice scope and detect scope extrusions. + +##### `object Expr` +The companion object of `Expr` contains a few useful static methods; +the `apply`/`unapply` methods to use `ToExpr`/`FromExpr` with ease; +the `betaReduce` and `summon` methods. +It also contains methods to create expressions out of lists or sequences of expressions: `block`, `ofSeq`, `ofList`, `ofTupleFromSeq` and `ofTuple`. + +```scala +object Expr: + def apply[T](x: T)(using ToExpr[T])(using Quotes): Expr[T] = ... + def unapply[T](x: Expr[T])(using FromExpr[T])(using Quotes): Option[T] = ... + def betaReduce[T](e: Expr[T])(using Quotes): Expr[T] = ... + def summon[T: Type](using Quotes): Option[Expr[T]] = ... + def block[T](stats: List[Expr[Any]], e: Expr[T])(using Quotes): Expr[T] = ... + def ofSeq[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[Seq[T]] = ... + def ofList[T: Type](xs: Seq[Expr[T]])(using Quotes): Expr[List[T]] = ... + def ofTupleFromSeq(xs: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = ... + def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): + Expr[Tuple.InverseMap[T, Expr]] = ... +``` + +##### `class Type` +Types of type `Type[T]` are represented by the following abstract class: +```scala +abstract class Type[T <: AnyKind] private[scala]: + type Underlying = T +``` + +The only implementation of `Type` is in the compiler along with the implementation of `Quotes`. +It is a class that wraps the AST of a type and a `Scope` object with no methods of its own. +The upper bound of `T` is `AnyKind` which implies that `T` may be a higher-kinded type. +The `Underlying` alias is used to select the type from an instance of `Type`. +Users never need to use this alias as they can always use `T` directly. +`Underlying` is used for internal encoding while compiling the code (see _Type Healing_). + +##### `object Type` +The companion object of `Type` contains a few useful static methods. +The first and most important one is the `Type.of` given definition. +This instance of `Type[T]` is summoned by default when no other instance is available. +The `of` operation is an intrinsic operation that the compiler will transform into code that will generate the `Type[T]` at run-time. +Secondly, the `Type.show[T]` operation will show a string representation of the type, which is often useful when debugging. +Finally, the object defines `valueOfConstant` (and `valueOfTuple`) which can transform singleton types (or tuples of singleton types) into their value. + + +```scala +object Type: + given of[T <: AnyKind](using Quotes): Type[T] = ... + def show[T <: AnyKind](using Type[T])(using Quotes): String = ... + def valueOfConstant[T](using Type[T])(using Quotes): Option[T] = ... + def valueOfTuple[T <: Tuple](using Type[T])(using Quotes): Option[T] = ... +``` + +##### `Quotes` +The `Quotes` interface is where most of the primitive operations of the quotation system are defined. + +Quotes define all the `Expr[T]` methods as extension methods. +`Type[T]` does not have methods and therefore does not appear here. +These methods are available as long as `Quotes` is implicitly given in the current scope. + +The `Quotes` instance is also the entry point to the [reflection API](./refelction.md) through the `reflect` object. + +Finally, `Quotes` provides the internal logic used in quote un-pickling (`QuoteUnpickler`) in quote pattern matching (`QuoteMatching`). +These interfaces are added to the self-type of the trait to make sure they are implemented on this object but not visible to users of `Quotes`. + +Internally, the implementation of `Quotes` will also track its current splicing scope `Scope`. +This scope will be attached to any expression that is created using this `Quotes` instance. + +```scala +trait Quotes: + this: runtime.QuoteUnpickler & runtime.QuoteMatching => + + extension [T](self: Expr[T]) + def show: String + def matches(that: Expr[Any]): Boolean + def value(using FromExpr[T]): Option[T] + def valueOrAbort(using FromExpr[T]): T + end extension + + extension (self: Expr[Any]) + def isExprOf[X](using Type[X]): Boolean + def asExprOf[X](using Type[X]): Expr[X] + end extension + + // abstract object reflect ... +``` + + +##### `Scope` +The splice context is represented as a stack (immutable list) of `Scope` objects. +Each `Scope` contains the position of the splice (used for error reporting) and a reference to the enclosing splice scope `Scope`. +A scope is a sub-scope of another if the other is contained in its parents. +This check is performed when an expression is spliced into another using the `Scope` provided in the current scope in `Quotes` and the one in the `Expr` or `Type`. + +### Entry Points +The two entry points for multi-stage programming are macros and the `run` operation. + +#### Macros +Inline macro definitions will inline a top-level splice (a splice not nested in a quote). +This splice needs to be evaluated at compile-time. +In _Avoiding a complete interpreter_[^1], we stated the following restrictions: + + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are either literal constants, quoted expressions (parameters), `Type.of` for type parameters and a reference to `Quotes`. + +These restrictions make the implementation of the interpreter quite simple. +Java Reflection is used to call the single function call in the top-level splice. +The execution of that function is entirely done on compiled bytecode. +These are Scala static methods and may not always become Java static methods, they might be inside module objects. +As modules are encoded as class instances, we need to interpret the prefix of the method to instantiate it before we can invoke the method. + +The code of the arguments has not been compiled and therefore needs to be interpreted by the compiler. +Interpreting literal constants is as simple as extracting the constant from the AST that represents literals. +When interpreting a quoted expression, the contents of the quote is kept as an AST which is wrapped inside the implementation of `Expr`. +Calls to `Type.of[T]` also wrap the AST of the type inside the implementation of `Type`. +Finally, the reference to `Quotes` is supposed to be the reference to the quotes provided by the splice. +This reference is interpreted as a new instance of `Quotes` that contains a fresh initial `Scope` with no parents. + +The result of calling the method via Java Reflection will return an `Expr` containing a new AST that was generated by the implementation of that macro. +The scope of this `Expr` is checked to make sure it did not extrude from some splice or `run` operation. +Then the AST is extracted from the `Expr` and it is inserted as replacement for the AST that contained the top-level splice. + + +#### Run-time Multi-Stage Programming + +To be able to compile the code, the `scala.quoted.staging` library defines the `Compiler` trait. +An instance of `staging.Compiler` is a wrapper over the normal Scala~3 compiler. +To be instantiated it requires an instance of the JVM _classloader_ of the application. + +```scala +import scala.quoted.staging.* +given Compiler = Compiler.make(getClass.getClassLoader) +``` + +The classloader is needed for the compiler to know which dependencies have been loaded and to load the generated code using the same classloader. + +```scala +def mkPower2()(using Quotes): Expr[Double => Double] = ... + +run(mkPower2()) +``` +To run the previous example, the compiler will create code equivalent to the following class and compile it using a new `Scope` without parents. + +```scala +class RunInstance: + def exec(): Double => Double = ${ mkPower2() } +``` +Finally, `run` will interpret `(new RunInstance).exec()` to evaluate the contents of the quote. +To do this, the resulting `RunInstance` class is loaded in the JVM using Java Reflection, instantiated and then the `exec` method is invoked. + + +### Compilation + +Quotes and splices are primitive forms in the generated typed abstract syntax trees. +These need to be type-checked with some extra rules, e.g., staging levels need to be checked and the references to generic types need to be adapted. +Finally, quoted expressions that will be generated at run-time need to be encoded (serialized) and decoded (deserialized). + +#### Typing Quoted Expressions + +The typing process for quoted expressions and splices with `Expr` is relatively straightforward. +At its core, quotes are desugared into calls to `quote`, splices are desugared into calls to `splice`. +We track the quotation level when desugaring into these methods. + + +```scala +def quote[T](x: T): Quotes ?=> Expr[T] + +def splice[T](x: Quotes ?=> Expr[T]): T +``` + +It would be impossible to track the quotation levels if users wrote calls to these methods directly. +To know if it is a call to one of those methods we would need to type it first, but to type it we would need to know if it is one of these methods to update the quotation level. +Therefore these methods can only be used by the compiler. + +At run-time, the splice needs to have a reference to the `Quotes` that created its surrounding quote. +To simplify this for later phases, we track the current `Quotes` and encode a reference directly in the splice using `nestedSplice` instead of `splice`. + +```scala +def nestedSplice[T](q: Quotes)(x: q.Nested ?=> Expr[T]): T +``` +With this addition, the original `splice` is only used for top-level splices. + +The levels are mostly used to identify top-level splices that need to be evaluated while typing. +We do not use the quotation level to influence the typing process. +Level checking is performed at a later phase. +This ensures that a source expression in a quote will have the same elaboration as a source expression outside the quote. + + + +#### Quote Pattern Matching + +Pattern matching is defined in the trait `QuoteMatching`, which is part of the self type of `Quotes`. +It is implemented by `Quotes` but not available to users of `Quotes`. +To access it, the compiler generates a cast from `Quotes` to `QuoteMatching` and then selects one of its two members: `ExprMatch` or `TypeMatch`. +`ExprMatch` defines an `unapply` extractor method that is used to encode quote patterns and `TypeMatch` defines an `unapply` method for quoted type patterns. + +```scala +trait Quotes: + self: runtime.QuoteMatching & ... => + ... + +trait QuoteMatching: + object ExprMatch: + def unapply[TypeBindings <: Tuple, Tup <: Tuple] + (scrutinee: Expr[Any]) + (using pattern: Expr[Any]): Option[Tup] = ... + object TypeMatch: + ... +``` + +These extractor methods are only meant to be used in code generated by the compiler. +The call to the extractor that is generated has an already elaborated form that cannot be written in source, namely explicit type parameters and explicit contextual parameters. + +This extractor returns a tuple type `Tup` which cannot be inferred from the types in the method signature. +This type will be computed when typing the quote pattern and will be explicitly added to the extractor call. +To refer to type variables in arbitrary places of `Tup`, we need to define them all before their use, hence we have `TypeBindings`, which will contain all pattern type variable definitions. +The extractor also receives a given parameter of type `Expr[Any]` that will contain an expression that represents the pattern. +The compiler will explicitly add this pattern expression. +We use a given parameter because these are the only parameters we are allowed to add to the extractor call in a pattern position. + +This extractor is a bit convoluted, but it encodes away all the quotation-specific features. +It compiles the pattern down into a representation that the pattern matcher compiler phase understands. + +The quote patterns are encoded into two parts: a tuple pattern that is tasked with extracting the result of the match and a quoted expression representing the pattern. +For example, if the pattern has no `$` we will have an `EmptyTuple` as the pattern and `'{1}` to represent the pattern. + +```scala + case '{ 1 } => +// is elaborated to + case ExprMatch(EmptyTuple)(using '{1}) => +// ^^^^^^^^^^ ^^^^^^^^^^ +// pattern expression +``` +When extracting expressions, each pattern that is contained in a splice `${..}` will be placed in order in the tuple pattern. +In the following case, the `f` and `x` are placed in a tuple pattern `(f, x)`. +The type of the tuple is encoded in the `Tup` and not only in the tuple itself. +Otherwise, the extractor would return a tuple `Tuple` for which the types need to be tested which is in turn not possible due to type erasure. + +```scala + case '{ ((y: Int) => $f(y)).apply($x) } => +// is elaborated to + case ExprMatch[.., (Expr[Int => Int], Expr[Int])]((f, x))(using pattern) => +// pattern = '{ ((y: Int) => pat[Int](y)).apply(pat[Int]()) } +``` +The contents of the quote are transformed into a valid quote expression by replacing the splice with a marker expression `pat[T](..)`. +The type `T` is taken from the type of the splice and the arguments are the HOAS arguments. +This implies that a `pat[T]()` is a closed pattern and `pat[T](y)` is an HOAS pattern that can refer to `y`. + + +Type variables in quoted patterns are first normalized to have all definitions at the start of the pattern. +For each definition of a type variable `t` in the pattern we will add a type variable definition in `TypeBindings`. +Each one will have a corresponding `Type[t]` that will get extracted if the pattern matches. +These `Type[t]` are also listed in the `Tup` and added in the tuple pattern. +It is additionally marked as `using` in the pattern to make it implicitly available in this case branch. + + +```scala + case '{ type t; ($xs: List[t]).map[t](identity[t]) } => +// is elaborated to + case ExprMatch[(t), (Type[t], Expr[List[t]])]((using t, xs))(using p) => +// ^^^ ^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^ ^^^^^^^ +// type bindings result type pattern expression +// p = '{ @patternType type u; pat[List[u]]().map[u](identity[u]) } +``` + +The contents of the quote are transformed into a valid quote expression by replacing type variables with fresh ones that do not escape the quote scope. +These are also annotated to be easily identifiable as pattern variables. + +#### Level Consistency Checking +Level consistency checking is performed after typing the program as a static check. +To check level consistency we traverse the tree top-down remembering the context staging level. +Each local definition in scope is recorded with its level and each term reference to a definition is checked against the current staging level. +```scala +// level 0 +'{ // level 1 + val x = ... // level 1 with (x -> 1) + ${ // level 0 (x -> 1) + val y = ... // level 0 with (x -> 1, y -> 0) + x // error: defined at level 1 but used in level 0 + } + // level 1 (x -> 1) + x // x is ok +} +``` + +#### Type Healing + +When using a generic type `T` in a future stage, it is necessary to have a given `Type[T]` in scope. +The compiler needs to identify those references and link them with the instance of `Type[T]`. +For instance consider the following example: + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[T] } +``` + +For each reference to a generic type `T` that is defined at level 0 and used at level 1 or greater, the compiler will summon a `Type[T]`. +This is usually the given type that is provided as parameter, `t` in this case. +We can use the type `t.Underlying` to replace `T` as it is an alias of that type. +But `t.Underlying` contains the extra information that it is `t` that will be used in the evaluation of the quote. +In a sense, `Underlying` acts like a splice for types. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ List.empty[t.Underlying] } +``` + +Due to some technical limitations, it is not always possible to replace the type reference with the AST containing `t.Underlying`. +To overcome this limitation, we can simply define a list of type aliases at the start of the quote and insert the `t.Underlying` there. +This has the added advantage that we do not have to repeatedly insert the `t.Underlying` in the quote. + +```scala +def emptyList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + '{ type U = t.Underlying; List.empty[U] } +``` +These aliases can be used at any level within the quote and this transformation is only performed on quotes that are at level 0. + +```scala + '{ List.empty[T] ... '{ List.empty[T] } ... } +// becomes + '{ type U = t.Underlying; List.empty[U] ... '{ List.empty[U] } ... } +``` +If we define a generic type at level 1 or greater, it will not be subject to this transformation. +In some future compilation stage, when the definition of the generic type is at level 0, it will be subject to this transformation. +This simplifies the transformation logic and avoids leaking the encoding into code that a macro could inspect. + +```scala +'{ + def emptyList[T: Type](using Quotes): Expr[List[T]] = '{ List.empty[T] } + ... +} +``` +A similar transformation is performed on `Type.of[T]`. +Any generic type in `T` needs to have an implicitly given `Type[T]` in scope, which will also be used as a path. +The example: + +```scala +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[T] match ... +// becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + Type.of[t.Underlying] match ... +// then becomes +def empty[T](using t: Type[T])(using Quotes): Expr[T] = + t match ... +``` + +The operation `Type.of[t.Underlying]` can be optimized to just `t`. +But this is not always the case. +If the generic reference is nested in the type, we will need to keep the `Type.of`. + +```scala +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[T]] match ... +// becomes +def matchOnList[T](using t: Type[T])(using Quotes): Expr[List[T]] = + Type.of[List[t.Underlying]] match ... +``` + +By doing this transformation, we ensure that each abstract type `U` used in `Type.of` has an implicit `Type[U]` in scope. +This representation makes it simpler to identify parts of the type that are statically known from those that are known dynamically. +Type aliases are also added within the type of the `Type.of` though these are not valid source code. +These would look like `Type.of[{type U = t.Underlying; Map[U, U]}]` if written in source code. + + +#### Splice Normalization + +The contents of a splice may refer to variables defined in the enclosing quote. +This complicates the process of serialization of the contents of the quotes. +To make serialization simple, we first transform the contents of each level 1 splice. +Consider the following example: + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ powerCode('{x}, n) } +} +``` + +The variable `x` is defined in the quote and used in the splice. +The normal form will extract all references to `x` and replace them with a staged version of `x`. +We will replace the reference to `x` of type `T` with a `$y` where `y` is of type `Expr[T]`. +Then we wrap the new contents of the splice in a lambda that defines `y` and apply it to the quoted version of `x`. +After this transformation we have 2 parts, a lambda without references to the quote, which knows how to compute the contents of the splice, and a sequence of quoted arguments that refer to variables defined in the lambda. + +```scala +def power5to(n: Expr[Int]): Expr[Double] = '{ + val x: Int = 5 + ${ ((y: Expr[Int]) => powerCode('{$y}, n)).apply('x) } +} +``` + +In general, the splice normal form has the shape `${ .apply(*) }` and the following constraints: + * `` a lambda expression that does not refer to variables defined in the outer quote + * `` sequence of quoted expressions or `Type.of` containing references to variables defined in the enclosing quote and no references to local variables defined outside the enclosing quote + + +##### Function references normalization +A reference to a function `f` that receives parameters is not a valid value in Scala. +Such a function reference `f` can be eta-expaned as `x => f(x)` to be used as a lambda value. +Therefore function references cannot be transformed by the normalization as directly as other expressions as we cannot represent `'{f}` with a method reference type. +We can use the eta-expanded form of `f` in the normalized form. +For example, consider the reference to `f` below. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ '{ f(3)(4, 5) } } +} +``` + +To normalize this code, we can eta-expand the reference to `f` and place it in a quote containing a proper expression. +Therefore the normalized form of the argument `'{f}` becomes the quoted lambda `'{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }` and is an expression of type `Expr[Int => (Int, Int) => Int]`. +The eta-expansion produces one curried lambda per parameter list. +The application `f(3)(4, 5)` does not become `$g(3)(4, 5)` but `$g.apply(3).apply(4, 5)`. +We add the `apply` because `g` is not a quoted reference to a function but a curried lambda. + +```scala +'{ + def f(a: Int)(b: Int, c: Int): Int = 2 + a + b + c + ${ + ( + (g: Expr[Int => (Int, Int) => Int]) => '{$g.apply(3).apply(4, 5)} + ).apply('{ (a: Int) => (b: Int, c: Int) => f(a)(b, c) }) + } +} +``` + +Then we can apply it and beta-reduce the application when generating the code. + +```scala + (g: Expr[Int => Int => Int]) => betaReduce('{$g.apply(3).apply(4)}) +``` + + +##### Variable assignment normalization +A reference to a mutable variable in the left-hand side of an assignment cannot be transformed directly as it is not in an expression position. +```scala +'{ + var x: Int = 5 + ${ g('{x = 2}) } +} +``` + +We can use the same strategy used for function references by eta-expanding the assignment operation `x = _` into `y => x = y`. + +```scala +'{ + var x: Int = 5 + ${ + g( + ( + (f: Expr[Int => Unit]) => betaReduce('{$f(2)}) + ).apply('{ (y: Int) => x = $y }) + ) + } +} +``` + + +##### Type normalization +Types defined in the quote are subject to a similar transformation. +In this example, `T` is defined within the quote at level 1 and used in the splice again at level 1. + +```scala +'{ def f[T] = ${ '{g[T]} } } +``` + +The normalization will add a `Type[T]` to the lambda, and we will insert this reference. +The difference is that it will add an alias similar to the one used in type healing. +In this example, we create a `type U` that aliases the staged type. + +```scala +'{ + def f[T] = ${ + ( + (t: Type[T]) => '{type U = t.Underling; g[U]} + ).apply(Type.of[T]) + } +} +``` + +#### Serialization + +Quoted code needs to be pickled to make it available at run-time in the next compilation phase. +We implement this by pickling the AST as a TASTy binary. + +##### TASTy +The TASTy format is the typed abstract syntax tree serialization format of Scala 3. +It usually pickles the fully elaborated code after type-checking and is kept along the generated Java classfiles. + + +##### Pickling +We use TASTy as a serialization format for the contents of the quotes. +To show how serialization is performed, we will use the following example. +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ powerCode('{x}, '{n}) } * ${ powerCode('{2}, '{n}) } +} +``` + +This quote is transformed into the following code when normalizing the splices. + +```scala +'{ + val (x, n): (Double, Int) = (5, 2) + ${ + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) + } * ${ + ((m: Expr[Int]) => powerCode('{2}, m)).apply('n) + } +} +``` + +Splice normalization is a key part of the serialization process as it only allows references to variables defined in the quote in the arguments of the lambda in the splice. +This makes it possible to create a closed representation of the quote without much effort. +The first step is to remove all the splices and replace them with holes. +A hole is like a splice but it lacks the knowledge of how to compute the contents of the splice. +Instead, it knows the index of the hole and the contents of the arguments of the splice. +We can see this transformation in the following example where a hole is represented by `<< idx; holeType; args* >>`. + +```scala + ${ ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) } +// becomes + << 0; Double; x, n >> +``` + +As this was the first hole it has index 0. +The hole type is `Double`, which needs to be remembered now that we cannot infer it from the contents of the splice. +The arguments of the splice are `x` and `n`; note that they do not require quoting because they were moved out of the splice. + +References to healed types are handled in a similar way. +Consider the `emptyList` example, which shows the type aliases that are inserted into the quote. +```scala +'{ List.empty[T] } +// type healed to +'{ type U = t.Underlying; List.empty[U] } +``` +Instead of replacing a splice, we replace the `t.Underlying` type with a type hole. +The type hole is represented by `<< idx; bounds >>`. +```scala +'{ type U = << 0; Nothing..Any >>; List.empty[U] } +``` +Here, the bounds of `Nothing..Any` are the bounds of the original `T` type. +The types of a `Type.of` are transformed in the same way. + + +With these transformations, the contents of the quote or `Type.of` are guaranteed to be closed and therefore can be pickled. +The AST is pickled into TASTy, which is a sequence of bytes. +This sequence of bytes needs to be instantiated in the bytecode, but unfortunately it cannot be dumped into the classfile as bytes. +To reify it we encode the bytes into a Java `String`. +In the following examples we display this encoding in human readable form with the fictitious |tasty"..."| string literal. + +```scala +// pickled AST bytes encoded in a base64 string +tasty""" + val (x, n): (Double, Int) = (5, 2) + << 0; Double; x, n >> * << 1; Double; n >> +""" +// or +tasty""" + type U = << 0; Nothing..Any; >> + List.empty[U] +""" +``` +The contents of a quote or `Type.of` are not always pickled. +In some cases it is better to generate equivalent (smaller and/or faster) code that will compute the expression. +Literal values are compiled into a call to `Expr()` using the implementation of `ToExpr` to create the quoted expression. +This is currently performed only on literal values, but can be extended to any value for which we have a `ToExpr` defined in the standard library. +Similarly, for non-generic types we can use their respective `java.lang.Class` and convert them into a `Type` using a primitive operation `typeConstructorOf` defined in the reflection API. + +##### Unpickling + +Now that we have seen how a quote is pickled, we can look at how to unpickle it. +We will continue with the previous example. + +Holes were used to replace the splices in the quote. +When we perform this transformation we also need to remember the lambdas from the splices and their hole index. +When unpickling a hole, the corresponding splice lambda will be used to compute the contents of the hole. +The lambda will receive as parameters quoted versions of the arguments of the hole. +For example to compute the contents of `<< 0; Double; x, n >>` we will evaluate the following code + +```scala + ((y: Expr[Double], m: Expr[Int]) => powerCode(y, m)).apply('x, 'n) +``` + +The evaluation is not as trivial as it looks, because the lambda comes from compiled code and the rest is code that must be interpreted. +We put the AST of `x` and `n` into `Expr` objects to simulate the quotes and then we use Java Reflection to call the `apply` method. + +We may have many holes in a quote and therefore as many lambdas. +To avoid the instantiation of many lambdas, we can join them together into a single lambda. +Apart from the list of arguments, this lambda will also take the index of the hole that is being evaluated. +It will perform a switch match on the index and call the corresponding lambda in each branch. +Each branch will also extract the arguments depending on the definition of the lambda. +The application of the original lambdas are beta-reduced to avoid extra overhead. + +```scala +(idx: Int, args: Seq[Any]) => + idx match + case 0 => // for << 0; Double; x, n >> + val x = args(0).asInstanceOf[Expr[Double]] + val n = args(1).asInstanceOf[Expr[Int]] + powerCode(x, n) + case 1 => // for << 1; Double; n >> + val n = args(0).asInstanceOf[Expr[Int]] + powerCode('{2}, n) +``` + +This is similar to what we do for splices when we replace the type aliased with holes we keep track of the index of the hole. +Instead of lambdas, we will have a list of references to instances of `Type`. +From the following example we would extract `t`, `u`, ... . + +```scala + '{ type T1 = t1.Underlying; type Tn = tn.Underlying; ... } +// with holes + '{ type T1 = << 0; ... >>; type Tn = << n-1; ... >>; ... } +``` + +As the type holes are at the start of the quote, they will have the first `N` indices. +This implies that we can place the references in a sequence `Seq(t, u, ...)` where the index in the sequence is the same as the hole index. + +Lastly, the quote itself is replaced by a call to `QuoteUnpickler.unpickleExpr` which will unpickle the AST, evaluate the holes, i.e., splices, and wrap the resulting AST in an `Expr[Int]`. +This method takes takes the pickled |tasty"..."|, the types and the hole lambda. +Similarly, `Type.of` is replaced with a call to `QuoteUnpickler.unpickleType` but only receives the pickled |tasty"..."| and the types. +Because `QuoteUnpickler` is part of the self-type of the `Quotes` class, we have to cast the instance but know that this cast will always succeed. + +```scala +quotes.asInstanceOf[runtime.QuoteUnpickler].unpickleExpr[T]( + pickled = tasty"...", + types = Seq(...), + holes = (idx: Int, args: Seq[Any]) => idx match ... +) +``` + +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Multi-stage programming with generative and analytical macros](https://dl.acm.org/doi/10.1145/3486609.3487203). +[^3]: In quotes, identifiers starting with `$` must be surrounded by backticks (`` `$` ``). For example `$conforms` from `scala.Predef`. diff --git a/docs/_spec/TODOreference/metaprogramming/macros.md b/docs/_spec/TODOreference/metaprogramming/macros.md new file mode 100644 index 000000000000..e39f6f1022b8 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/macros.md @@ -0,0 +1,621 @@ +--- +layout: doc-page +title: "Macros" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros.html +--- + +> When developing macros enable `-Xcheck-macros` scalac option flag to have extra runtime checks. + +## Multi-Staging + +#### Quoted expressions +Multi-stage programming in Scala 3 uses quotes `'{..}` to delay, i.e., stage, execution of code and splices `${..}` to evaluate and insert code into quotes. +Quoted expressions are typed as `Expr[T]` with a covariant type parameter `T`. +It is easy to write statically safe code generators with these two concepts. +The following example shows a naive implementation of the $x^n$ mathematical operation. + +```scala +import scala.quoted.* +def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } + else if n == 1 then x + else '{ $x * ${ unrolledPowerCode(x, n-1) } } +``` + +```scala +'{ + val x = ... + ${ unrolledPowerCode('{x}, 3) } // evaluates to: x * x * x +} +``` + +Quotes and splices are duals of each other. +For an arbitrary expression `x` of type `T` we have `${'{x}} = x` and for an arbitrary expression `e` of type `Expr[T]` we have `'{${e}} = e`. + +#### Abstract types +Quotes can handle generic and abstract types using the type class `Type[T]`. +A quote that refers to a generic or abstract type `T` requires a given `Type[T]` to be provided in the implicit scope. +The following examples show how `T` is annotated with a context bound (`: Type`) to provide an implicit `Type[T]`, or the equivalent `using Type[T]` parameter. + +```scala +import scala.quoted.* +def singletonListExpr[T: Type](x: Expr[T])(using Quotes): Expr[List[T]] = + '{ List[T]($x) } // generic T used within a quote + +def emptyListExpr[T](using Type[T], Quotes): Expr[List[T]] = + '{ List.empty[T] } // generic T used within a quote +``` + +If no other instance is found, the default `Type.of[T]` is used. +The following example implicitly uses `Type.of[String]` and `Type.of[Option[U]]`. +```scala +val list1: Expr[List[String]] = + singletonListExpr('{"hello"}) // requires a given `Type[Sting]` +val list0: Expr[List[Option[T]]] = + emptyListExpr[Option[U]] // requires a given `Type[Option[U]]` +``` + + +The `Type.of[T]` method is a primitive operation that the compiler will handle specially. +It will provide the implicit if the type `T` is statically known, or if `T` contains some other types `Ui` for which we have an implicit `Type[Ui]`. +In the example, `Type.of[String]` has a statically known type and `Type.of[Option[U]]` requires an implicit `Type[U]` in scope. + +#### Quote context +We also track the current quotation context using a given `Quotes` instance. +To create a quote `'{..}` we require a given `Quotes` context, which should be passed as a contextual parameter `(using Quotes)` to the function. +Each splice will provide a new `Quotes` context within the scope of the splice. +Therefore quotes and splices can be seen as methods with the following signatures, but with special semantics. +```scala +def '[T](x: T): Quotes ?=> Expr[T] // def '[T](x: T)(using Quotes): Expr[T] + +def $[T](x: Quotes ?=> Expr[T]): T +``` + +The lambda with a question mark `?=>` is a contextual function; it is a lambda that takes its argument implicitly and provides it implicitly in the implementation the lambda. +`Quotes` are used for a variety of purposes that will be mentioned when covering those topics. + +## Quoted Values + +#### Lifting +While it is not possible to use cross-stage persistence of local variables, it is possible to lift them to the next stage. +To this end, we provide the `Expr.apply` method, which can take a value and lift it into a quoted representation of the value. + +```scala +val expr1plus1: Expr[Int] = '{ 1 + 1 } + +val expr2: Expr[Int] = Expr(1 + 1) // lift 2 into '{ 2 } +``` + +While it looks type wise similar to `'{ 1 + 1 }`, the semantics of `Expr(1 + 1)` are quite different. +`Expr(1 + 1)` will not stage or delay any computation; the argument is evaluated to a value and then lifted into a quote. +The quote will contain code that will create a copy of this value in the next stage. +`Expr` is polymorphic and user-extensible via the `ToExpr` type class. + +```scala +trait ToExpr[T]: + def apply(x: T)(using Quotes): Expr[T] +``` + +We can implement a `ToExpr` using a `given` definition that will add the definition to the implicits in scope. +In the following example we show how to implement a `ToExpr[Option[T]]` for any liftable type `T. + +```scala +given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with + def apply(opt: Option[T])(using Quotes): Expr[Option[T]] = + opt match + case Some(x) => '{ Some[T]( ${Expr(x)} ) } + case None => '{ None } +``` + +The `ToExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. + +#### Extracting values from quotes +To be able to generate optimized code using the method `unrolledPowerCode`, the macro implementation `powerCode` needs to first +determine whether the argument passed as parameter `n` is a known constant value. +This can be achieved via _unlifting_ using the `Expr.unapply` extractor from our library implementation, which will only match if `n` is a quoted constant and extracts its value. + +```scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + n match + case Expr(m) => // it is a constant: unlift code n='{m} into number m + unrolledPowerCode(x, m) + case _ => // not known: call power at run-time + '{ power($x, $n) } +``` + +Alternatively, the `n.value` method can be used to get an `Option[Int]` with the value or `n.valueOrAbort` to get the value directly. +```scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + // emits an error message if `n` is not a constant + unrolledPowerCode(x, n.valueOrAbort) +``` + +`Expr.unapply` and all variants of `value` are polymorphic and user-extensible via a given `FromExpr` type class. + +```scala +trait FromExpr[T]: + def unapply(x: Expr[T])(using Quotes): Option[T] +``` + +We can use `given` definitions to implement the `FromExpr` as we did for `ToExpr`. +The `FromExpr` for primitive types must be implemented as primitive operations in the system. +In our case, we use the reflection API to implement them. +To implement `FromExpr` for non-primitive types we use quote pattern matching (for example `OptionFromExpr`). + + +## Macros and Multi-Stage Programming + +The system supports multi-stage macros and run-time multi-stage programming using the same quotation abstractions. + +### Multi-Stage Macros + +#### Macros +We can generalize the splicing abstraction to express macros. +A macro consists of a top-level splice that is not nested in any quote. +Conceptually, the contents of the splice are evaluated one stage earlier than the program. +In other words, the contents are evaluated while compiling the program. The generated code resulting from the macro replaces the splice in the program. + +```scala +def power2(x: Double): Double = + ${ unrolledPowerCode('x, 2) } // x * x +``` + +#### Inline macros +Since using the splices in the middle of a program is not as ergonomic as calling a function; we hide the staging mechanism from end-users of macros. We have a uniform way of calling macros and normal functions. +For this, _we restrict the use of top-level splices to only appear in inline methods_[^1][^2]. + +```scala +// inline macro definition +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + +// user code +def power2(x: Double): Double = + powerMacro(x, 2) // x * x +``` + +The evaluation of the macro will only happen when the code is inlined into `power2`. +When inlined, the code is equivalent to the previous definition of `power2`. +A consequence of using inline methods is that none of the arguments nor the return type of the macro will have to mention the `Expr` types; this hides all aspects of metaprogramming from the end-users. + +#### Avoiding a complete interpreter +When evaluating a top-level splice, the compiler needs to interpret the code that is within the splice. +Providing an interpreter for the entire language is quite tricky, and it is even more challenging to make that interpreter run efficiently. +To avoid needing a complete interpreter, we can impose the following restrictions on splices to simplify the evaluation of the code in top-level splices. + * The top-level splice must contain a single call to a compiled static method. + * Arguments to the function are literal constants, quoted expressions (parameters), calls to `Type.of` for type parameters and a reference to `Quotes`. + +In particular, these restrictions disallow the use of splices in top-level splices. +Such a splice would require several stages of interpretation which would be unnecessarily inefficient. + +#### Compilation stages +The macro implementation (i.e., the method called in the top-level splice) can come from any pre-compiled library. +This provides a clear difference between the stages of the compilation process. +Consider the following 3 source files defined in distinct libraries. +```scala +// Macro.scala +def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = ... +inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } +``` + +```scala +// Lib.scala (depends on Macro.scala) +def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) +``` + +```scala +// App.scala (depends on Lib.scala) +@main def app() = power2(3.14) +``` +One way to syntactically visualize this is to put the application in a quote that delays the compilation of the application. +Then the application dependencies can be placed in an outer quote that contains the quoted application, and we repeat this recursively for dependencies of dependencies. + +```scala +'{ // macro library (compilation stage 1) + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + ... + inline def powerMacro(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + '{ // library using macros (compilation stage 2) + def power2(x: Double) = + ${ powerCode('x, '{2}) } // inlined from a call to: powerMacro(x, 2) + '{ power2(3.14) /* app (compilation stage 3) */ } + } +} +``` + +To make the system more versatile, we allow calling macros in the project where it is defined, with some restrictions. +For example, to compile `Macro.scala` and `Lib.scala` together in the same library. +To this end, we do not follow the simpler syntactic model and rely on semantic information from the source files. +When compiling a source, if we detect a call to a macro that is not compiled yet, we delay the compilation of this source to the following compilation stage. +In the example, we would delay the compilation of `Lib.scala` because it contains a compile-time call to `powerCode`. +Compilation stages are repeated until all sources are compiled, or no progress can be made. +If no progress is made, there was a cyclic dependency between the definition and the use of the macro. +We also need to detect if at runtime the macro depends on sources that have not been compiled yet. +These are detected by executing the macro and checking for JVM linking errors to classes that have not been compiled yet. + +### Run-Time Multi-Stage Programming + +See [Run-Time Multi-Stage Programming](./staging.md) + +## Safety + +Multi-stage programming is by design statically safe and cross-stage safe. + +### Static Safety + +#### Hygiene +All identifier names are interpreted as symbolic references to the corresponding variable in the context of the quote. +Therefore, while evaluating the quote, it is not possible to accidentally rebind a reference to a new variable with the same textual name. + +#### Well-typed +If a quote is well typed, then the generated code is well typed. +This is a simple consequence of tracking the type of each expression. +An `Expr[T]` can only be created from a quote that contains an expression of type `T. +Conversely, an `Expr[T]` can only be spliced in a location that expects a type `T. +As mentioned before, `Expr` is covariant in its type parameter. +This means that an `Expr[T]` can contain an expression of a subtype of `T. +When spliced in a location that expects a type `T, these expressions also have a valid type. + +### Cross-Stage Safety + +#### Level consistency +We define the _staging level_ of some code as the number of quotes minus the number of splices surrounding said code. +Local variables must be defined and used in the same staging level. + +It is never possible to access a local variable from a lower staging level as it does not yet exist. + +```scala +def badPower(x: Double, n: Int): Double = + ${ unrolledPowerCode('x, n) } // error: value of `n` not known yet +``` + + +In the context of macros and _cross-platform portability_, that is, +macros compiled on one machine but potentially executed on another, +we cannot support cross-stage persistence of local variables. +Therefore, local variables can only be accessed at precisely the same staging level in our system. + +```scala +def badPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + // error: `n` potentially not available in the next execution environment + '{ power($x, n) } +``` + + +The rules are slightly different for global definitions, such as `unrolledPowerCode`. +It is possible to generate code that contains a reference to a _global_ definition such as in `'{ power(2, 4) }`. +This is a limited form of cross-stage persistence that does not impede cross-platform portability, where we refer to the already compiled code for `power`. +Each compilation step will lower the staging level by one while keeping global definitions. +In consequence, we can refer to compiled definitions in macros such as `unrolledPowerCode` in `${ unrolledPowerCode('x, 2) }`. + +We can sumarize level consistency in two rules: + * Local variables can be used only at the same staging level as their definition + * Global variables can be used at any staging level + + +#### Type consistency +As Scala uses type erasure, generic types will be erased at run-time and hence in any following stage. +To ensure any quoted expression that refers to a generic type `T` does not lose the information it needs, we require a given `Type[T]` in scope. +The `Type[T]` will carry over the non-erased representation of the type into the next phase. +Therefore any generic type used at a higher staging level than its definition will require its `Type`. + +#### Scope extrusion +Within the contents of a splice, it is possible to have a quote that refers to a local variable defined in the outer quote. +If this quote is used within the splice, the variable will be in scope. +However, if the quote is somehow _extruded_ outside the splice, then variables might not be in scope anymore. +Quoted expressions can be extruded using side effects such as mutable state and exceptions. +The following example shows how a quote can be extruded using mutable state. +```scala +var x: Expr[T] = null +'{ (y: T) => ${ x = 'y; 1 } } +x // has value '{y} but y is not in scope +``` + +A second way a variable can be extruded is through the `run` method. +If `run` consumes a quoted variable reference, it will not be in scope anymore. +The result will reference a variable that is defined in the next stage. + +```scala +'{ (x: Int) => ${ run('x); ... } } +// evaluates to: '{ (x: Int) => ${ x; ... } 1 +``` + +To catch both scope extrusion scenarios, our system restricts the use of quotes by only allowing a quote to be spliced if it was not extruded from a splice scope. +Unlike level consistency, this is checked at run-time[^4] rather than compile-time to avoid making the static type system too complicated. + +Each `Quotes` instance contains a unique scope identifier and refers to its parent scope, forming a stack of identifiers. +The parent of the scope of a `Quotes` is the scope of the `Quotes` used to create the enclosing quote. +Top-level splices and `run` create new scope stacks. +Every `Expr` knows in which scope it was created. +When it is spliced, we check that the quote scope is either the same as the splice scope, or a parent scope thereof. + + +## Staged Lambdas + +When staging programs in a functional language there are two fundamental abstractions: a staged lambda `Expr[T => U]` and a staging lambda `Expr[T] => Expr[U]`. +The first is a function that will exist in the next stage, whereas the second is a function that exists in the current stage. +It is often convenient to have a mechanism to go from `Expr[T => U]` to `Expr[T] => Expr[U]` and vice versa. + +```scala +def later[T: Type, U: Type](f: Expr[T] => Expr[U]): Expr[T => U] = + '{ (x: T) => ${ f('x) } } + +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => '{ $f($x) } +``` + +Both conversions can be performed out of the box with quotes and splices. +But if `f` is a known lambda function, `'{ $f($x) }` will not beta-reduce the lambda in place. +This optimization is performed in a later phase of the compiler. +Not reducing the application immediately can simplify analysis of generated code. +Nevertheless, it is possible to beta-reduce the lambda in place using the `Expr.betaReduce` method. + +```scala +def now[T: Type, U: Type](f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => Expr.betaReduce('{ $f($x) }) +``` + +The `betaReduce` method will beta-reduce the outermost application of the expression if possible (regardless of arity). +If it is not possible to beta-reduce the expression, then it will return the original expression. + +## Staged Constructors +To create new class instances in a later stage, we can create them using factory methods (usually `apply` methods of an `object`), or we can instantiate them with a `new`. +For example, we can write `Some(1)` or `new Some(1)`, creating the same value. +In Scala 3, using the factory method call notation will fall back to a `new` if no `apply` method is found. +We follow the usual staging rules when calling a factory method. +Similarly, when we use a `new C`, the constructor of `C` is implicitly called, which also follows the usual staging rules. +Therefore for an arbitrary known class `C`, we can use both `'{ C(...) }` or `'{ new C(...) }` as constructors. + +## Staged Classes +Quoted code can contain any valid expression including local class definitions. +This allows the creation of new classes with specialized implementations. +For example, we can implement a new version of `Runnable` that will perform some optimized operation. +```scala +def mkRunnable(x: Int)(using Quotes): Expr[Runnable] = '{ + class MyRunnable extends Runnable: + def run(): Unit = ... // generate some custom code that uses `x` + new MyRunnable +} +``` + +The quoted class is a local class and its type cannot escape the enclosing quote. +The class must be used inside the quote or an instance of it can be returned using a known interface (`Runnable` in this case). + +## Quote Pattern Matching + +It is sometimes necessary to analyze the structure of the code or decompose the code into its sub-expressions. +A classic example is an embedded DSL, where a macro knows a set of definitions that it can reinterpret while compiling the code (for instance, to perform optimizations). +In the following example, we extend our previous implementation of `powCode` to look into `x` to perform further optimizations. + +```scala +def fusedPowCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + x match + case '{ power($y, $m) } => // we have (y^m)^n + fusedPowCode(y, '{ $n * $m }) // generate code for y^(n*m) + case _ => + '{ power($x, $n) } +``` + + +#### Sub-patterns + +In quoted patterns, the `$` binds the sub-expression to an expression `Expr` that can be used in that `case` branch. +The contents of `${..}` in a quote pattern are regular Scala patterns. +For example, we can use the `Expr(_)` pattern within the `${..}` to only match if it is a known value and extract it. + +```scala +def fusedUnrolledPowCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + x match + case '{ power($y, ${Expr(m)}) } => // we have (y^m)^n + fusedUnrolledPowCode(y, n * m) // generate code for y * ... * y + case _ => // ( n*m times ) + unrolledPowerCode(x, n) +``` + +These value extraction sub-patterns can be polymorphic using an instance of `FromExpr`. +In the following example, we show the implementation of `OptionFromExpr` which internally uses the `FromExpr[T]` to extract the value using the `Expr(x)` pattern. + +```scala +given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with + def unapply(x: Expr[Option[T]])(using Quotes): Option[Option[T]] = + x match + case '{ Some( ${Expr(x)} ) } => Some(Some(x)) + case '{ None } => Some(None) + case _ => None +``` + + + +#### Closed patterns +Patterns may contain two kinds of references: global references such as the call to the `power` method in `'{ power(...) }`, or references to bindings defined in the pattern such as `x` in `case '{ (x: Int) => x }`. +When extracting an expression from a quote, we need to ensure that we do not extrude any variable from the scope where it is defined. + +```scala +'{ (x: Int) => x + 1 } match + case '{ (y: Int) => $z } => + // should not match, otherwise: z = '{ x + 1 } +``` + +In this example, we see that the pattern should not match. +Otherwise, any use of the expression `z` would contain an unbound reference to `x`. +To avoid any such extrusion, we only match on a `${..}` if its expression is closed under the definitions within the pattern. +Therefore, the pattern will not match if the expression is not closed. + +#### HOAS patterns +To allow extracting expressions that may contain extruded references we offer a _higher-order abstract syntax_ (HOAS) pattern `$f(y)` (or `$f(y1,...,yn)`). +This pattern will eta-expand the sub-expression with respect to `y` and bind it to `f`. +The lambda arguments will replace the variables that might have been extruded. + +```scala +'{ ((x: Int) => x + 1).apply(2) } match + case '{ ((y: Int) => $f(y)).apply($z: Int) } => + // f may contain references to `x` (replaced by `$y`) + // f = (y: Expr[Int]) => '{ $y + 1 } + f(z) // generates '{ 2 + 1 } +``` + + +A HOAS pattern `$x(y1,...,yn)` will only match the expression if it does not contain references to variables defined in the pattern that are not in the set `y1,...,yn`. +In other words, the pattern will match if the expression only contains references to variables defined in the pattern that are in `y1,...,yn`. +Note that the HOAS patterns `$x()` are semantically equivalent to closed patterns `$x`. + + +#### Type variables + +Expressions may contain types that are not statically known. +For example, an `Expr[List[Int]]` may contain `list.map(_.toInt)` where `list` is a `List` of some type. +To cover all the possible cases we would need to explicitly match `list` on all possible types (`List[Int]`, `List[Int => Int]`, ...). +This is an infinite set of types and therefore pattern cases. +Even if we would know all possible types that a specific program could use, we may still end up with an unmanageable number of cases. +To overcome this, we introduce type variables in quoted patterns, which will match any type. + +In the following example, we show how type variables `t` and `u` match all possible pairs of consecutive calls to `map` on lists. +In the quoted patterns, types named with lower cases are identified as type variables. +This follows the same notation as type variables used in normal patterns. +```scala +def fuseMapCode(x: Expr[List[Int]]): Expr[List[Int]] = + x match + case '{ ($ls: List[t]).map[u]($f).map[Int]($g) } => + '{ $ls.map($g.compose($f)) } + ... + +fuseMapCode('{ List(1.2).map(f).map(g) }) // '{ List(1.2).map(g.compose(f)) } +fuseMapCode('{ List('a').map(h).map(i) }) // '{ List('a').map(i.compose(h)) } +``` +Variables `f` and `g` are inferred to be of type `Expr[t => u]` and `Expr[u => Int]` respectively. +Subsequently, we can infer `$g.compose($f)` to be of type `Expr[t => Int]` which is the type of the argument of `$ls.map(..)`. + +Type variables are abstract types that will be erased; this implies that to reference them in the second quote we need a given `Type[t]` and `Type[u]`. +The quoted pattern will implicitly provide those given types. +At run-time, when the pattern matches, the type of `t` and `u` will be known, and the `Type[t]` and `Type[u]` will contain the precise types in the expression. + +As `Expr` is covariant, the statically known type of the expression might not be the actual type. +Type variables can also be used to recover the precise type of the expression. +``scala +def let(x: Expr[Any])(using Quotes): Expr[Any] = + x match + case '{ $x: t } => + '{ val y: t = $x; y } + +let('{1}) // will return a `Expr[Any]` that contains an `Expr[Int]]` +``` + +While we can define the type variable in the middle of the pattern, their normal form is to define them as a `type` with a lower case name at the start of the pattern. +We use the Scala backquote `` `t` `` naming convention which interprets the string within the backquote as a literal name identifier. +This is typically used when we have names that contain special characters that are not allowed for normal Scala identifiers. +But we use it to explicitly state that this is a reference to that name and not the introduction of a new variable. +```scala + case '{ type t; $x: `t` } => +``` +This is a bit more verbose but has some expressivity advantages such as allowing to define bounds on the variables and be able to refer to them several times in any scope of the pattern. + +```scala + case '{ type t >: List[Int] <: Seq[Int]; $x: `t` } => + case '{ type t; $x: (`t`, `t`) } => +``` + + +#### Type patterns +It is possible to only have a type and no expression of that type. +To be able to inspect a type, we introduce quoted type pattern `case '[..] =>`. +It works the same way as a quoted pattern but is restricted to contain a type. +Type variables can be used in quoted type patterns to extract a type. + +```scala +def empty[T: Type]: Expr[T] = + Type.of[T] match + case '[String] => '{ "" } + case '[List[t]] => '{ List.empty[t] } + ... +``` + +`Type.of[T]` is used to summon the given instance of `Type[T]` in scope, it is equivalent to `summon[Type[T]]`. + +#### Type testing and casting +It is important to note that instance checks and casts on `Expr`, such as `isInstanceOf[Expr[T]]` and `asInstanceOf[Expr[T]]`, will only check if the instance is of the class `Expr` but will not be able to check the `T` argument. +These cases will issue a warning at compile-time, but if they are ignored, they can result in unexpected behavior. + +These operations can be supported correctly in the system. +For a simple type test it is possible to use the `isExprOf[T]` method of `Expr` to check if it is an instance of that type. +Similarly, it is possible to use `asExprOf[T]` to cast an expression to a given type. +These operations use a given `Type[T]` to work around type erasure. + + +## Sub-Expression Transformation + +The system provides a mechanism to transform all sub-expressions of an expression. +This is useful when the sub-expressions we want to transform are deep in the expression. +It is also necessary if the expression contains sub-expressions that cannot be matched using quoted patterns (such as local class definitions). + +```scala +trait ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] + def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + ... +``` + +Users can extend the `ExprMap` trait and implement the `transform` method. +This interface is flexible and can implement top-down, bottom-up, or other transformations. + +```scala +object OptimizeIdentity extends ExprMap: + def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = + transformChildren(e) match // bottom-up transformation + case '{ identity($x) } => x + case _ => e +``` + +The `transformChildren` method is implemented as a primitive that knows how to reach all the direct sub-expressions and calls `transform` on each one. +The type passed to `transform` is the expected type of this sub-expression in its expression. +For example while transforming `Some(1)` in `'{ val x: Option[Int] = Some(1); ...}` the type will be `Option[Int]` and not `Some[Int]`. +This implies that we can safely transform `Some(1)` into `None`. + +## Staged Implicit Summoning +When summoning implicit arguments using `summon`, we will find the given instances in the current scope. +It is possible to use `summon` to get staged implicit arguments by explicitly staging them first. +In the following example, we can pass an implicit `Ordering[T]` in a macro as an `Expr[Ordering[T]]` to its implementation. +Then we can splice it and give it implicitly in the next stage. + +```scala +inline def treeSetFor[T](using ord: Ordering[T]): Set[T] = + ${ setExpr[T](using 'ord) } + +def setExpr[T:Type](using ord: Expr[Ordering[T]])(using Quotes): Expr[Set[T]] = + '{ given Ordering[T] = $ord; new TreeSet[T]() } +``` + +We pass it as an implicit `Expr[Ordering[T]]` because there might be intermediate methods that can pass it along implicitly. + +An alternative is to summon implicit values in the scope where the macro is invoked. +Using the `Expr.summon` method we get an optional expression containing the implicit instance. +This provides the ability to search for implicit instances conditionally. + +```scala +def summon[T: Type](using Quotes): Option[Expr[T]] +``` + +```scala +inline def setFor[T]: Set[T] = + ${ setForExpr[T] } + +def setForExpr[T: Type]()(using Quotes): Expr[Set[T]] = + Expr.summon[Ordering[T]] match + case Some(ord) => + '{ new TreeSet[T]()($ord) } + case _ => + '{ new HashSet[T] } +``` + +## More details + +[More details](./macros-spec.md) + + +[^1]: [Scalable Metaprogramming in Scala 3](https://infoscience.epfl.ch/record/299370) +[^2]: [Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) +[^3]: Implemented in the Scala 3 Dotty project https://github.com/lampepfl/dotty. sbt library dependency `"org.scala-lang" %% "scala3-staging" % scalaVersion.value` +[^4]: Using the `-Xcheck-macros` compiler flag diff --git a/docs/_spec/TODOreference/metaprogramming/metaprogramming.md b/docs/_spec/TODOreference/metaprogramming/metaprogramming.md new file mode 100644 index 000000000000..3bce2d7c922e --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/metaprogramming.md @@ -0,0 +1,47 @@ +--- +layout: index +title: "Metaprogramming" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming.html +--- + +The following pages introduce the redesign of metaprogramming in Scala. They +introduce the following fundamental facilities: + +1. [`inline`](./inline.md) is a new modifier that guarantees that + a definition will be inlined at the point of use. The primary motivation + behind inline is to reduce the overhead behind function calls and access to + values. The expansion will be performed by the Scala compiler during the + `Typer` compiler phase. As opposed to inlining in some other ecosystems, + inlining in Scala is not merely a request to the compiler but is a + _command_. The reason is that inlining in Scala can drive other compile-time + operations, like inline pattern matching (enabling type-level + programming), macros (enabling compile-time, generative, metaprogramming) and + runtime code generation (multi-stage programming). + +2. [Compile-time ops](./compiletime-ops.md) are helper definitions in the + standard library that provide support for compile-time operations over values and types. + +3. [Macros](./macros.md) are built on two well-known fundamental + operations: quotation and splicing. Quotation converts program code to + data, specifically, a (tree-like) representation of this code. It is + expressed as `'{...}` for expressions and as `'[...]` for types. Splicing, + expressed as `${ ... }`, goes the other way: it converts a program's representation + to program code. Together with `inline`, these two abstractions allow + to construct program code programmatically. + +4. [Runtime Staging](./staging.md) Where macros construct code at _compile-time_, + staging lets programs construct new code at _runtime_. That way, + code generation can depend not only on static data but also on data available at runtime. This splits the evaluation of the program in two or more phases or ... + stages. Consequently, this method of generative programming is called "Multi-Stage Programming". Staging is built on the same foundations as macros. It uses + quotes and splices, but leaves out `inline`. + +5. [Reflection](./reflection.md) Quotations are a "black-box" + representation of code. They can be parameterized and composed using + splices, but their structure cannot be analyzed from the outside. TASTy + reflection gives a way to analyze code structure by partly revealing the representation type of a piece of code in a standard API. The representation + type is a form of typed abstract syntax tree, which gives rise to the `TASTy` + moniker. + +6. [TASTy Inspection](./tasty-inspect.md) Typed abstract syntax trees are serialized + in a custom compressed binary format stored in `.tasty` files. TASTy inspection allows + to load these files and analyze their content's tree structure. diff --git a/docs/_spec/TODOreference/metaprogramming/reflection.md b/docs/_spec/TODOreference/metaprogramming/reflection.md new file mode 100644 index 000000000000..b2d492657a4e --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/reflection.md @@ -0,0 +1,131 @@ +--- +layout: doc-page +title: "Reflection" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/reflection.html +--- + +Reflection enables inspection and construction of Typed Abstract Syntax Trees +(Typed-AST). It may be used on quoted expressions (`quoted.Expr`) and quoted +types (`quoted.Type`) from [Macros](./macros.md) or on full TASTy files. + +If you are writing macros, please first read [Macros](./macros.md). +You may find all you need without using quote reflection. + +## API: From quotes and splices to TASTy reflect trees and back + +With `quoted.Expr` and `quoted.Type` we can compute code but also analyze code +by inspecting the ASTs. [Macros](./macros.md) provide the guarantee that the +generation of code will be type-correct. Using quote reflection will break these +guarantees and may fail at macro expansion time, hence additional explicit +checks must be done. + +To provide reflection capabilities in macros we need to add an implicit parameter +of type `scala.quoted.Quotes` and import `quotes.reflect.*` from it in the scope +where it is used. + +```scala +import scala.quoted.* + +inline def natConst(inline x: Int): Int = ${natConstImpl('{x})} + +def natConstImpl(x: Expr[Int])(using Quotes): Expr[Int] = + import quotes.reflect.* + ... +``` + +### Extractors + +`import quotes.reflect.*` will provide all extractors and methods on `quotes.reflect.Tree`s. +For example the `Literal(_)` extractor used below. + +```scala +def natConstImpl(x: Expr[Int])(using Quotes): Expr[Int] = + import quotes.reflect.* + val tree: Term = x.asTerm + tree match + case Inlined(_, _, Literal(IntConstant(n))) => + if n <= 0 then + report.error("Parameter must be natural number") + '{0} + else + tree.asExprOf[Int] + case _ => + report.error("Parameter must be a known constant") + '{0} +``` + +We can easily know which extractors are needed using `Printer.TreeStructure.show`, +which returns the string representation the structure of the tree. Other printers +can also be found in the `Printer` module. + +```scala +tree.show(using Printer.TreeStructure) +// or +Printer.TreeStructure.show(tree) +``` + +The methods `quotes.reflect.Term.{asExpr, asExprOf}` provide a way to go back to +a `quoted.Expr`. Note that `asExpr` returns a `Expr[Any]`. On the other hand +`asExprOf[T]` returns a `Expr[T]`, if the type does not conform to it an exception +will be thrown at runtime. + +### Positions + +The `Position` in the context provides an `ofMacroExpansion` value. It corresponds +to the expansion site for macros. The macro authors can obtain various information +about that expansion site. The example below shows how we can obtain position +information such as the start line, the end line or even the source code at the +expansion point. + +```scala +def macroImpl()(quotes: Quotes): Expr[Unit] = + import quotes.reflect.* + val pos = Position.ofMacroExpansion + + val path = pos.sourceFile.jpath.toString + val start = pos.start + val end = pos.end + val startLine = pos.startLine + val endLine = pos.endLine + val startColumn = pos.startColumn + val endColumn = pos.endColumn + val sourceCode = pos.sourceCode + ... +``` + +### Tree Utilities + +`quotes.reflect` contains three facilities for tree traversal and +transformation. + +`TreeAccumulator` ties the knot of a traversal. By calling `foldOver(x, tree)(owner)` +we can dive into the `tree` node and start accumulating values of type `X` (e.g., +of type `List[Symbol]` if we want to collect symbols). The code below, for +example, collects the `val` definitions in the tree. + +```scala +def collectPatternVariables(tree: Tree)(using ctx: Context): List[Symbol] = + val acc = new TreeAccumulator[List[Symbol]]: + def foldTree(syms: List[Symbol], tree: Tree)(owner: Symbol): List[Symbol] = tree match + case ValDef(_, _, rhs) => + val newSyms = tree.symbol :: syms + foldTree(newSyms, body)(tree.symbol) + case _ => + foldOverTree(syms, tree)(owner) + acc(Nil, tree) +``` + +A `TreeTraverser` extends a `TreeAccumulator` and performs the same traversal +but without returning any value. Finally, a `TreeMap` performs a transformation. + +#### ValDef.let + +`quotes.reflect.ValDef` also offers a method `let` that allows us to bind the `rhs` (right-hand side) to a `val` and use it in `body`. +Additionally, `lets` binds the given `terms` to names and allows to use them in the `body`. +Their type definitions are shown below: + +```scala +def let(rhs: Term)(body: Ident => Term): Term = ... + +def lets(terms: List[Term])(body: List[Term] => Term): Term = ... +``` diff --git a/docs/_spec/TODOreference/metaprogramming/simple-smp.md b/docs/_spec/TODOreference/metaprogramming/simple-smp.md new file mode 100644 index 000000000000..2ba0155ad329 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/simple-smp.md @@ -0,0 +1,232 @@ +--- +layout: doc-page +title: "The Meta-theory of Symmetric Metaprogramming" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/simple-smp.html +--- + +This note presents a simplified variant of +[principled metaprogramming](./macros.md) +and sketches its soundness proof. The variant treats only dialogues +between two stages. A program can have quotes which can contain +splices (which can contain quotes, which can contain splices, and so +on). Or the program could start with a splice with embedded +quotes. The essential restriction is that (1) a term can contain top-level +quotes or top-level splices, but not both, and (2) quotes cannot appear +directly inside quotes and splices cannot appear directly inside +splices. In other words, the universe is restricted to two phases +only. + +Under this restriction we can simplify the typing rules so that there are +always exactly two environments instead of having a stack of environments. +The variant presented here differs from the full calculus also in that we +replace evaluation contexts with contextual typing rules. While this +is more verbose, it makes it easier to set up the meta theory. + +## Syntax +``` +Terms t ::= x variable + (x: T) => t lambda + t t application + ’t quote + ~t splice + +Simple terms u ::= x | (x: T) => u | u u + +Values v ::= (x: T) => t lambda + ’u quoted value + +Types T ::= A base type + T -> T function type + ’T quoted type +``` +## Operational semantics + +### Evaluation +``` + ((x: T) => t) v --> [x := v]t + + t1 --> t2 + --------------- + t1 t --> t2 t + + t1 --> t2 + --------------- + v t1 --> v t2 + + t1 ==> t2 + ------------- + ’t1 --> ’t2 +``` + +### Splicing +``` + ~’u ==> u + + t1 ==> t2 + ------------------------------- + (x: T) => t1 ==> (x: T) => t2 + + t1 ==> t2 + --------------- + t1 t ==> t2 t + + t1 ==> t2 + --------------- + u t1 ==> u t2 + + t1 --> t2 + ------------- + ~t1 ==> ~t2 + +``` +## Typing Rules + +Typing judgments are of the form `E1 * E2 |- t: T` where `E1, E2` are environments and +`*` is one of `~` and `’`. +``` + x: T in E2 + --------------- + E1 * E2 |- x: T + + + E1 * E2, x: T1 |- t: T2 + -------------------------------- + E1 * E2 |- (x: T1) => t: T -> T2 + + + E1 * E2 |- t1: T2 -> T E1 * E2 |- t2: T2 + ------------------------------------------- + E1 * E2 |- t1 t2: T + + + E2 ’ E1 |- t: T + ----------------- + E1 ~ E2 |- ’t: ’T + + + E2 ~ E1 |- t: ’T + ---------------- + E1 ’ E2 |- ~t: T +``` + +(Curiously, this looks a bit like a Christmas tree). + +## Soundness + +The meta-theory typically requires mutual inductions over two judgments. + +### Progress Theorem + + 1. If `E1 ~ |- t: T` then either `t = v` for some value `v` or `t --> t2` for some term `t2`. + 2. If ` ’ E2 |- t: T` then either `t = u` for some simple term `u` or `t ==> t2` for some term `t2`. + +Proof by structural induction over terms. + +To prove (1): + + - the cases for variables, lambdas and applications are as in [STLC](https://en.wikipedia.org/wiki/Simply_typed_lambda_calculus). + - If `t = ’t2`, then by inversion we have ` ’ E1 |- t2: T2` for some type `T2`. + By the second [induction hypothesis](https://en.wikipedia.org/wiki/Mathematical_induction) (I.H.), we have one of: + - `t2 = u`, hence `’t2` is a value, + - `t2 ==> t3`, hence `’t2 --> ’t3`. + - The case `t = ~t2` is not typable. + +To prove (2): + + - If `t = x` then `t` is a simple term. + - If `t = (x: T) => t2`, then either `t2` is a simple term, in which case `t` is as well. + Or by the second I.H. `t2 ==> t3`, in which case `t ==> (x: T) => t3`. + - If `t = t1 t2` then one of three cases applies: + + - `t1` and `t2` are a simple term, then `t` is as well a simple term. + - `t1` is not a simple term. Then by the second I.H., `t1 ==> t12`, hence `t ==> t12 t2`. + - `t1` is a simple term but `t2` is not. Then by the second I.H. `t2 ==> t22`, hence `t ==> t1 t22`. + + - The case `t = ’t2` is not typable. + - If `t = ~t2` then by inversion we have `E2 ~ |- t2: ’T2`, for some type `T2`. + By the first I.H., we have one of + + - `t2 = v`. Since `t2: ’T2`, we must have `v = ’u`, for some simple term `u`, hence `t = ~’u`. + By quote-splice reduction, `t ==> u`. + - `t2 --> t3`. Then by the context rule for `’t`, `t ==> ’t3`. + + +### Substitution Lemma + + 1. If `E1 ~ E2 |- s: S` and `E1 ~ E2, x: S |- t: T` then `E1 ~ E2 |- [x := s]t: T`. + 2. If `E1 ~ E2 |- s: S` and `E2, x: S ’ E1 |- t: T` then `E2 ’ E1 |- [x := s]t: T`. + +The proofs are by induction on typing derivations for `t`, analogous +to the proof for STL (with (2) a bit simpler than (1) since we do not +need to swap lambda bindings with the bound variable `x`). The +arguments that link the two hypotheses are as follows. + +To prove (1), let `t = ’t1`. Then `T = ’T1` for some type `T1` and the last typing rule is +``` + E2, x: S ’ E1 |- t1: T1 + ------------------------- + E1 ~ E2, x: S |- ’t1: ’T1 +``` +By the second I.H. `E2 ’ E1 |- [x := s]t1: T1`. By typing, `E1 ~ E2 |- ’[x := s]t1: ’T1`. +Since `[x := s]t = [x := s](’t1) = ’[x := s]t1` we get `[x := s]t: ’T1`. + +To prove (2), let `t = ~t1`. Then the last typing rule is +``` + E1 ~ E2, x: S |- t1: ’T + ----------------------- + E2, x: S ’ E1 |- ~t1: T +``` +By the first I.H., `E1 ~ E2 |- [x := s]t1: ’T`. By typing, `E2 ’ E1 |- ~[x := s]t1: T`. +Since `[x := s]t = [x := s](~t1) = ~[x := s]t1` we get `[x := s]t: T`. + + +### Preservation Theorem + + 1. If `E1 ~ E2 |- t1: T` and `t1 --> t2` then `E1 ~ E2 |- t2: T`. + 2. If `E1 ’ E2 |- t1: T` and `t1 ==> t2` then `E1 ’ E2 |- t2: T`. + +The proof is by structural induction on evaluation derivations. The proof of (1) is analogous +to the proof for STL, using the substitution lemma for the beta reduction case, with the addition of reduction of quoted terms, which goes as follows: + + - Assume the last rule was + ``` + t1 ==> t2 + ------------- + ’t1 --> ’t2 + ``` + By inversion of typing rules, we must have `T = ’T1` for some type `T1` such that `t1: T1`. + By the second I.H., `t2: T1`, hence `’t2: `T1`. + + +To prove (2): + + - Assume the last rule was `~’u ==> u`. The typing proof of `~’u` must have the form + + ``` + E1 ’ E2 |- u: T + ----------------- + E1 ~ E2 |- ’u: ’T + ----------------- + E1 ’ E2 |- ~’u: T + ``` + Hence, `E1 ’ E2 |- u: T`. + + - Assume the last rule was + ``` + t1 ==> t2 + ------------------------------- + (x: S) => t1 ==> (x: T) => t2 + ``` + By typing inversion, `E1 ' E2, x: S |- t1: T1` for some type `T1` such that `T = S -> T1`. + By the I.H, `t2: T1`. By the typing rule for lambdas the result follows. + + - The context rules for applications are equally straightforward. + + - Assume the last rule was + ``` + t1 ==> t2 + ------------- + ~t1 ==> ~t2 + ``` + By inversion of typing rules, we must have `t1: ’T`. + By the first I.H., `t2: ’T`, hence `~t2: T`. diff --git a/docs/_spec/TODOreference/metaprogramming/staging.md b/docs/_spec/TODOreference/metaprogramming/staging.md new file mode 100644 index 000000000000..6d9166e8249e --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/staging.md @@ -0,0 +1,121 @@ +--- +layout: doc-page +title: "Runtime Multi-Stage Programming" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/staging.html +--- + +The framework expresses at the same time compile-time metaprogramming and +multi-stage programming. We can think of compile-time metaprogramming as a +two stage compilation process: one that we write the code in top-level splices, +that will be used for code generation (macros) and one that will perform all +necessary evaluations at compile-time and an object program that we will run +as usual. What if we could synthesize code at run-time and offer one extra stage +to the programmer? Then we can have a value of type `Expr[T]` at run-time that we +can essentially treat as a typed-syntax tree that we can either _show_ as a +string (pretty-print) or compile and run. If the number of quotes exceeds the +number of splices by more than one (effectively handling at run-time values of type +`Expr[Expr[T]]`, `Expr[Expr[Expr[T]]]`, ...) then we talk about Multi-Stage +Programming. + +The motivation behind this _paradigm_ is to let runtime information affect or +guide code-generation. + +Intuition: The phase in which code is run is determined by the difference +between the number of splice scopes and quote scopes in which it is embedded. + + - If there are more splices than quotes, the code is run at compile-time i.e. + as a macro. In the general case, this means running an interpreter that + evaluates the code, which is represented as a typed abstract syntax tree. The + interpreter can fall back to reflective calls when evaluating an application + of a previously compiled method. If the splice excess is more than one, it + would mean that a macro’s implementation code (as opposed to the code it + expands to) invokes other macros. If macros are realized by interpretation, + this would lead to towers of interpreters, where the first interpreter would + itself interpret an interpreter code that possibly interprets another + interpreter and so on. + + - If the number of splices equals the number of quotes, the code is compiled + and run as usual. + + - If the number of quotes exceeds the number of splices, the code is staged. + That is, it produces a typed abstract syntax tree or type structure at + run-time. A quote excess of more than one corresponds to multi-staged + programming. + +Providing an interpreter for the full language is quite difficult, and it is +even more difficult to make that interpreter run efficiently. So we currently +impose the following restrictions on the use of splices. + + 1. A top-level splice must appear in an inline method (turning that method + into a macro) + + 2. The splice must call a previously compiled + method passing quoted arguments, constant arguments or inline arguments. + + 3. Splices inside splices (but no intervening quotes) are not allowed. + + +## API + +The framework as discussed so far allows code to be staged, i.e. be prepared +to be executed at a later stage. To run that code, there is another method +in class `Expr` called `run`. Note that `$` and `run` both map from `Expr[T]` +to `T` but only `$` is subject to the [Cross-Stage Safety](./macros.md#cross-stage-safety), whereas `run` is just a normal method. +`scala.quoted.staging.run` provides a `Quotes` that can be used to show the expression in its scope. +On the other hand `scala.quoted.staging.withQuotes` provides a `Quotes` without evaluating the expression. + +```scala +package scala.quoted.staging + +def run[T](expr: Quotes ?=> Expr[T])(using Compiler): T = ... + +def withQuotes[T](thunk: Quotes ?=> T)(using Compiler): T = ... +``` + +## Create a new Scala 3 project with staging enabled + +```shell +sbt new scala/scala3-staging.g8 +``` + +From [`scala/scala3-staging.g8`](https://github.com/scala/scala3-staging.g8). + +It will create a project with the necessary dependencies and some examples. + +In case you prefer to create the project on your own, make sure to define the following dependency in your [`build.sbt` build definition](https://www.scala-sbt.org/1.x/docs/Basic-Def.html) + +```scala +libraryDependencies += "org.scala-lang" %% "scala3-staging" % scalaVersion.value +``` + +and in case you use `scalac`/`scala` directly, then use the `-with-compiler` flag for both: + +```shell +scalac -with-compiler -d out Test.scala +scala -with-compiler -classpath out Test +``` + +## Example + +Now take exactly the same example as in [Macros](./macros.md). Assume that we +do not want to pass an array statically but generate code at run-time and pass +the value, also at run-time. Note, how we make a future-stage function of type +`Expr[Array[Int] => Int]` in line 6 below. Using `staging.run { ... }` we can evaluate an +expression at runtime. Within the scope of `staging.run` we can also invoke `show` on an expression +to get a source-like representation of the expression. + +```scala +import scala.quoted.* + +// make available the necessary compiler for runtime code generation +given staging.Compiler = staging.Compiler.make(getClass.getClassLoader) + +val f: Array[Int] => Int = staging.run { + val stagedSum: Expr[Array[Int] => Int] = + '{ (arr: Array[Int]) => ${sum('arr)}} + println(stagedSum.show) // Prints "(arr: Array[Int]) => { var sum = 0; ... }" + stagedSum +} + +f.apply(Array(1, 2, 3)) // Returns 6 +``` diff --git a/docs/_spec/TODOreference/metaprogramming/tasty-inspect.md b/docs/_spec/TODOreference/metaprogramming/tasty-inspect.md new file mode 100644 index 000000000000..e643775243e0 --- /dev/null +++ b/docs/_spec/TODOreference/metaprogramming/tasty-inspect.md @@ -0,0 +1,57 @@ +--- +layout: doc-page +title: "TASTy Inspection" +nightlyOf: https://docs.scala-lang.org/scala3/reference/metaprogramming/tasty-inspect.html +--- + +```scala +libraryDependencies += "org.scala-lang" %% "scala3-tasty-inspector" % scalaVersion.value +``` + +TASTy files contain the full typed tree of a class including source positions +and documentation. This is ideal for tools that analyze or extract semantic +information from the code. To avoid the hassle of working directly with the TASTy +file we provide the `Inspector` which loads the contents and exposes it +through the TASTy reflect API. + +## Inspecting TASTy files + +To inspect the trees of a TASTy file a consumer can be defined in the following way. + +```scala +import scala.quoted.* +import scala.tasty.inspector.* + +class MyInspector extends Inspector: + def inspect(using Quotes)(tastys: List[Tasty[quotes.type]]): Unit = + import quotes.reflect.* + for tasty <- tastys do + val tree = tasty.ast + // Do something with the tree +``` + +Then the consumer can be instantiated with the following code to get the tree of the `foo/Bar.tasty` file. + +```scala +object Test: + def main(args: Array[String]): Unit = + val tastyFiles = List("foo/Bar.tasty") + TastyInspector.inspectTastyFiles(tastyFiles)(new MyInspector) +``` + +Note that if we need to run the main (in the example below defined in an object called `Test`) after compilation we need to make the compiler available to the runtime: + +```shell +scalac -d out Test.scala +scala -with-compiler -classpath out Test +``` + +## Template project + +Using sbt version `1.1.5+`, do: + +```shell +sbt new scala/scala3-tasty-inspector.g8 +``` + +in the folder where you want to clone the template. diff --git a/docs/_spec/TODOreference/new-types/dependent-function-types-spec.md b/docs/_spec/TODOreference/new-types/dependent-function-types-spec.md new file mode 100644 index 000000000000..f3237ddf7b9a --- /dev/null +++ b/docs/_spec/TODOreference/new-types/dependent-function-types-spec.md @@ -0,0 +1,125 @@ +--- +layout: doc-page +title: "Dependent Function Types - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/dependent-function-types-spec.html +--- + +Initial implementation in [PR #3464](https://github.com/lampepfl/dotty/pull/3464). + +## Syntax + +``` +FunArgTypes ::= InfixType + | ‘(’ [ FunArgType {',' FunArgType } ] ‘)’ + | ‘(’ TypedFunParam {',' TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +``` + +Dependent function types associate to the right, e.g. +`(s: S) => (t: T) => U` is the same as `(s: S) => ((t: T) => U)`. + +## Implementation + +Dependent function types are shorthands for class types that define `apply` +methods with a dependent result type. Dependent function types desugar to +refinement types of `scala.FunctionN`. A dependent function type +`(x1: K1, ..., xN: KN) => R` of arity `N` translates to: + +```scala +FunctionN[K1, ..., Kn, R']: + def apply(x1: K1, ..., xN: KN): R +``` + +where the result type parameter `R'` is the least upper approximation of the +precise result type `R` without any reference to value parameters `x1, ..., xN`. + +The syntax and semantics of anonymous dependent functions is identical to the +one of regular functions. Eta expansion is naturally generalized to produce +dependent function types for methods with dependent result types. + +Dependent functions can be implicit, and generalize to arity `N > 22` in the +same way that other functions do, see +[the corresponding documentation](../dropped-features/limit22.md). + +## Examples + +The example below defines a trait `C` and the two dependent function types +`DF` and `IDF` and prints the results of the respective function applications: + +[depfuntype.scala]: https://github.com/lampepfl/dotty/blob/main/tests/pos/depfuntype.scala + +```scala +trait C { type M; val m: M } + +type DF = (x: C) => x.M + +type IDF = (x: C) ?=> x.M + +@main def test = + val c = new C { type M = Int; val m = 3 } + + val depfun: DF = (x: C) => x.m + val t = depfun(c) + println(s"t=$t") // prints "t=3" + + val idepfun: IDF = summon[C].m + val u = idepfun(using c) + println(s"u=$u") // prints "u=3" + +``` + +In the following example the depend type `f.Eff` refers to the effect type `CanThrow`: + +[eff-dependent.scala]: https://github.com/lampepfl/dotty/blob/main/tests/run/eff-dependent.scala + +```scala +trait Effect + +// Type X => Y +abstract class Fun[-X, +Y]: + type Eff <: Effect + def apply(x: X): Eff ?=> Y + +class CanThrow extends Effect +class CanIO extends Effect + +given ct: CanThrow = new CanThrow +given ci: CanIO = new CanIO + +class I2S extends Fun[Int, String]: + type Eff = CanThrow + def apply(x: Int) = x.toString + +class S2I extends Fun[String, Int]: + type Eff = CanIO + def apply(x: String) = x.length + +// def map(f: A => B)(xs: List[A]): List[B] +def map[A, B](f: Fun[A, B])(xs: List[A]): f.Eff ?=> List[B] = + xs.map(f.apply) + +// def mapFn[A, B]: (A => B) -> List[A] -> List[B] +def mapFn[A, B]: (f: Fun[A, B]) => List[A] => f.Eff ?=> List[B] = + f => xs => map(f)(xs) + +// def compose(f: A => B)(g: B => C)(x: A): C +def compose[A, B, C](f: Fun[A, B])(g: Fun[B, C])(x: A): + f.Eff ?=> g.Eff ?=> C = + g(f(x)) + +// def composeFn: (A => B) -> (B => C) -> A -> C +def composeFn[A, B, C]: + (f: Fun[A, B]) => (g: Fun[B, C]) => A => f.Eff ?=> g.Eff ?=> C = + f => g => x => compose(f)(g)(x) + +@main def test = + val i2s = new I2S + val s2i = new S2I + + assert(mapFn(i2s)(List(1, 2, 3)).mkString == "123") + assert(composeFn(i2s)(s2i)(22) == 2) +``` + +## Type Checking + +After desugaring no additional typing rules are required for dependent function types. diff --git a/docs/_spec/TODOreference/new-types/dependent-function-types.md b/docs/_spec/TODOreference/new-types/dependent-function-types.md new file mode 100644 index 000000000000..adbee1d8b3c8 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/dependent-function-types.md @@ -0,0 +1,49 @@ +--- +layout: doc-page +title: "Dependent Function Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/dependent-function-types.html +--- + +A dependent function type is a function type whose result depends +on the function's parameters. For example: + +```scala +trait Entry { type Key; val key: Key } + +def extractKey(e: Entry): e.Key = e.key // a dependent method + +val extractor: (e: Entry) => e.Key = extractKey // a dependent function value +// ^^^^^^^^^^^^^^^^^^^ +// a dependent function type +``` + +Scala already has _dependent methods_, i.e. methods where the result +type refers to some of the parameters of the method. Method +`extractKey` is an example. Its result type, `e.Key` refers to its +parameter `e` (we also say, `e.Key` _depends_ on `e`). But so far it +was not possible to turn such methods into function values, so that +they can be passed as parameters to other functions, or returned as +results. Dependent methods could not be turned into functions simply +because there was no type that could describe them. + +In Scala 3 this is now possible. The type of the `extractor` value above is + +```scala +(e: Entry) => e.Key +``` + +This type describes function values that take any argument `e` of type +`Entry` and return a result of type `e.Key`. + +Recall that a normal function type `A => B` is represented as an +instance of the [`Function1` trait](https://scala-lang.org/api/3.x/scala/Function1.html) +(i.e. `Function1[A, B]`) and analogously for functions with more parameters. Dependent functions +are also represented as instances of these traits, but they get an additional +refinement. In fact, the dependent function type above is just syntactic sugar for + +```scala +Function1[Entry, Entry#Key]: + def apply(e: Entry): e.Key +``` + +[More details](./dependent-function-types-spec.md) diff --git a/docs/_spec/TODOreference/new-types/match-types.md b/docs/_spec/TODOreference/new-types/match-types.md new file mode 100644 index 000000000000..d646dd11880b --- /dev/null +++ b/docs/_spec/TODOreference/new-types/match-types.md @@ -0,0 +1,247 @@ +--- +layout: doc-page +title: "Match Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/match-types.html +--- + +A match type reduces to one of its right-hand sides, depending on the type of +its scrutinee. For example: + +```scala +type Elem[X] = X match + case String => Char + case Array[t] => t + case Iterable[t] => t +``` + +This defines a type that reduces as follows: + +```scala +Elem[String] =:= Char +Elem[Array[Int]] =:= Int +Elem[List[Float]] =:= Float +Elem[Nil.type] =:= Nothing +``` + +Here `=:=` is understood to mean that left and right-hand sides are mutually +subtypes of each other. + +In general, a match type is of the form + +```scala +S match { P1 => T1 ... Pn => Tn } +``` + +where `S`, `T1`, ..., `Tn` are types and `P1`, ..., `Pn` are type patterns. Type +variables in patterns start with a lower case letter, as usual. + +Match types can form part of recursive type definitions. Example: + +```scala +type LeafElem[X] = X match + case String => Char + case Array[t] => LeafElem[t] + case Iterable[t] => LeafElem[t] + case AnyVal => X +``` + +Recursive match type definitions can also be given an upper bound, like this: + +```scala +type Concat[Xs <: Tuple, +Ys <: Tuple] <: Tuple = Xs match + case EmptyTuple => Ys + case x *: xs => x *: Concat[xs, Ys] +``` + +In this definition, every instance of `Concat[A, B]`, whether reducible or not, +is known to be a subtype of `Tuple`. This is necessary to make the recursive +invocation `x *: Concat[xs, Ys]` type check, since `*:` demands a `Tuple` as its +right operand. + +## Dependent Typing + +Match types can be used to define dependently typed methods. For instance, here +is the value level counterpart to the `LeafElem` type defined above (note the +use of the match type as the return type): + +```scala +def leafElem[X](x: X): LeafElem[X] = x match + case x: String => x.charAt(0) + case x: Array[t] => leafElem(x(0)) + case x: Iterable[t] => leafElem(x.head) + case x: AnyVal => x +``` + +This special mode of typing for match expressions is only used when the +following conditions are met: + +1. The match expression patterns do not have guards +2. The match expression scrutinee's type is a subtype of the match type + scrutinee's type +3. The match expression and the match type have the same number of cases +4. The match expression patterns are all [Typed Patterns](https://scala-lang.org/files/archive/spec/2.13/08-pattern-matching.html#typed-patterns), + and these types are `=:=` to their corresponding type patterns in the match + type + +So you know, while the case body will be expected to have the type on the right-hand +side of the corresponding match type case, that doesn't imply the match type argument +is constrained. Using the example, the last case body must conform to X, but that +doesn't constrain X to be AnyVal, and therefore a LeafElem[X] inside the body wouldn't +reduce; it would remain stuck, and as such just an abstract type. + +## Representation of Match Types + +The internal representation of a match type +``` +S match { P1 => T1 ... Pn => Tn } +``` +is `Match(S, C1, ..., Cn) <: B` where each case `Ci` is of the form +``` +[Xs] =>> P => T +``` + +Here, `[Xs]` is a type parameter clause of the variables bound in pattern `Pi`. +If there are no bound type variables in a case, the type parameter clause is +omitted and only the function type `P => T` is kept. So each case is either a +unary function type or a type lambda over a unary function type. + +`B` is the declared upper bound of the match type, or `Any` if no such bound is +given. We will leave it out in places where it does not matter for the +discussion. The scrutinee, bound, and pattern types must all be first-order +types. + +## Match Type Reduction + +Match type reduction follows the semantics of match expressions, that is, a +match type of the form `S match { P1 => T1 ... Pn => Tn }` reduces to `Ti` if +and only if `s: S match { _: P1 => T1 ... _: Pn => Tn }` evaluates to a value of +type `Ti` for all `s: S`. + +The compiler implements the following reduction algorithm: + +- If the scrutinee type `S` is an empty set of values (such as `Nothing` or + `String & Int`), do not reduce. +- Sequentially consider each pattern `Pi` + - If `S <: Pi` reduce to `Ti`. + - Otherwise, try constructing a proof that `S` and `Pi` are disjoint, or, in + other words, that no value `s` of type `S` is also of type `Pi`. + - If such proof is found, proceed to the next case (`Pi+1`), otherwise, do + not reduce. + +Disjointness proofs rely on the following properties of Scala types: + +1. Single inheritance of classes +2. Final classes cannot be extended +3. Constant types with distinct values are nonintersecting +4. Singleton paths to distinct values are nonintersecting, such as `object` definitions or singleton enum cases. + +Type parameters in patterns are minimally instantiated when computing `S <: Pi`. +An instantiation `Is` is _minimal_ for `Xs` if all type variables in `Xs` that +appear covariantly and nonvariantly in `Is` are as small as possible and all +type variables in `Xs` that appear contravariantly in `Is` are as large as +possible. Here, "small" and "large" are understood with respect to `<:`. + +For simplicity, we have omitted constraint handling so far. The full formulation +of subtyping tests describes them as a function from a constraint and a pair of +types to either _success_ and a new constraint or _failure_. In the context of +reduction, the subtyping test `S <: [Xs := Is] P` is understood to leave the +bounds of all variables in the input constraint unchanged, i.e. existing +variables in the constraint cannot be instantiated by matching the scrutinee +against the patterns. + +## Subtyping Rules for Match Types + +The following rules apply to match types. For simplicity, we omit environments +and constraints. + +1. The first rule is a structural comparison between two match types: + + ``` + S match { P1 => T1 ... Pm => Tm } <: T match { Q1 => U1 ... Qn => Un } + ``` + + if + + ``` + S =:= T, m >= n, Pi =:= Qi and Ti <: Ui for i in 1..n + ``` + + I.e. scrutinees and patterns must be equal and the corresponding bodies must + be subtypes. No case re-ordering is allowed, but the subtype can have more + cases than the supertype. + +2. The second rule states that a match type and its redux are mutual subtypes. + + ``` + S match { P1 => T1 ... Pn => Tn } <: U + U <: S match { P1 => T1 ... Pn => Tn } + ``` + + if + + `S match { P1 => T1 ... Pn => Tn }` reduces to `U` + +3. The third rule states that a match type conforms to its upper bound: + + ``` + (S match { P1 => T1 ... Pn => Tn } <: B) <: B + ``` + +## Termination + +Match type definitions can be recursive, which means that it's possible to run +into an infinite loop while reducing match types. + +Since reduction is linked to subtyping, we already have a cycle detection +mechanism in place. As a result, the following will already give a reasonable +error message: + +```scala +type L[X] = X match + case Int => L[X] + +def g[X]: L[X] = ??? +``` + +```scala + | val x: Int = g[Int] + | ^ + |Recursion limit exceeded. + |Maybe there is an illegal cyclic reference? + |If that's not the case, you could also try to + |increase the stacksize using the -Xss JVM option. + |A recurring operation is (inner to outer): + | + | subtype LazyRef(Test.L[Int]) <:< Int +``` + +Internally, the Scala compiler detects these cycles by turning selected stack overflows into +type errors. If there is a stack overflow during subtyping, the exception will +be caught and turned into a compile-time error that indicates a trace of the +subtype tests that caused the overflow without showing a full stack trace. + + +## Match Types Variance + +All type positions in a match type (scrutinee, patterns, bodies) are considered invariant. + +## Related Work + +Match types have similarities with +[closed type families](https://wiki.haskell.org/GHC/Type_families) in Haskell. +Some differences are: + +- Subtyping instead of type equalities. +- Match type reduction does not tighten the underlying constraint, whereas type + family reduction does unify. This difference in approach mirrors the + difference between local type inference in Scala and global type inference in + Haskell. + +Match types are also similar to Typescript's +[conditional types](https://github.com/Microsoft/TypeScript/pull/21316). The +main differences here are: + + - Conditional types only reduce if both the scrutinee and pattern are ground, + whereas match types also work for type parameters and abstract types. + - Match types support direct recursion. + - Conditional types distribute through union types. diff --git a/docs/_spec/TODOreference/new-types/new-types.md b/docs/_spec/TODOreference/new-types/new-types.md new file mode 100644 index 000000000000..84c157495d6f --- /dev/null +++ b/docs/_spec/TODOreference/new-types/new-types.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "New Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/index.html +--- + +This chapter documents the new types introduced in Scala 3. diff --git a/docs/_spec/TODOreference/new-types/polymorphic-function-types.md b/docs/_spec/TODOreference/new-types/polymorphic-function-types.md new file mode 100644 index 000000000000..1754bf844831 --- /dev/null +++ b/docs/_spec/TODOreference/new-types/polymorphic-function-types.md @@ -0,0 +1,94 @@ +--- +layout: doc-page +title: "Polymorphic Function Types" +nightlyOf: https://docs.scala-lang.org/scala3/reference/new-types/polymorphic-function-types.html +--- + +A polymorphic function type is a function type which accepts type parameters. +For example: + +```scala +// A polymorphic method: +def foo[A](xs: List[A]): List[A] = xs.reverse + +// A polymorphic function value: +val bar: [A] => List[A] => List[A] +// ^^^^^^^^^^^^^^^^^^^^^^^^^ +// a polymorphic function type + = [A] => (xs: List[A]) => foo[A](xs) +``` + +Scala already has _polymorphic methods_, i.e. methods which accepts type parameters. +Method `foo` above is an example, accepting a type parameter `A`. +So far, it +was not possible to turn such methods into polymorphic function values like `bar` above, +which can be passed as parameters to other functions, or returned as results. + +In Scala 3 this is now possible. The type of the `bar` value above is + +```scala +[A] => List[A] => List[A] +``` + +This type describes function values which take a type `A` as a parameter, +then take a list of type `List[A]`, and return a list of the same type `List[A]`. + +[More details](https://github.com/lampepfl/dotty/pull/4672) + + +## Example Usage + +Polymorphic function type are particularly useful +when callers of a method are required to provide a +function which has to be polymorphic, +meaning that it should accept arbitrary types as part of its inputs. + +For instance, consider the situation where we have +a data type to represent the expressions of a simple language +(consisting only of variables and function applications) +in a strongly-typed way: + +```scala +enum Expr[A]: + case Var(name: String) + case Apply[A, B](fun: Expr[B => A], arg: Expr[B]) extends Expr[A] +``` + +We would like to provide a way for users to map a function +over all immediate subexpressions of a given `Expr`. +This requires the given function to be polymorphic, +since each subexpression may have a different type. +Here is how to implement this using polymorphic function types: + +```scala +def mapSubexpressions[A](e: Expr[A])(f: [B] => Expr[B] => Expr[B]): Expr[A] = + e match + case Apply(fun, arg) => Apply(f(fun), f(arg)) + case Var(n) => Var(n) +``` + +And here is how to use this function to _wrap_ each subexpression +in a given expression with a call to some `wrap` function, +defined as a variable: + +```scala +val e0 = Apply(Var("f"), Var("a")) +val e1 = mapSubexpressions(e0)( + [B] => (se: Expr[B]) => Apply(Var[B => B]("wrap"), se)) +println(e1) // Apply(Apply(Var(wrap),Var(f)),Apply(Var(wrap),Var(a))) +``` + +## Relationship With Type Lambdas + +Polymorphic function types are not to be confused with +[_type lambdas_](type-lambdas.md). +While the former describes the _type_ of a polymorphic _value_, +the latter is an actual function value _at the type level_. + +A good way of understanding the difference is to notice that +**_type lambdas are applied in types, +whereas polymorphic functions are applied in terms_**: +One would call the function `bar` above +by passing it a type argument `bar[Int]` _within a method body_. +On the other hand, given a type lambda such as `type F = [A] =>> List[A]`, +one would call `F` _within a type expression_, as in `type Bar = F[Int]`. diff --git a/docs/_spec/TODOreference/other-new-features/control-syntax.md b/docs/_spec/TODOreference/other-new-features/control-syntax.md new file mode 100644 index 000000000000..92204690f0b7 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/control-syntax.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: New Control Syntax +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/control-syntax.html +--- + +Scala 3 has a new "quiet" syntax for control expressions that does not rely on +enclosing the condition in parentheses, and also allows to drop parentheses or braces +around the generators of a `for`-expression. Examples: +```scala +if x < 0 then + "negative" +else if x == 0 then + "zero" +else + "positive" + +if x < 0 then -x else x + +while x >= 0 do x = f(x) + +for x <- xs if x > 0 +yield x * x + +for + x <- xs + y <- ys +do + println(x + y) + +try body +catch case ex: IOException => handle +``` + +The rules in detail are: + + - The condition of an `if`-expression can be written without enclosing parentheses if it is followed by a `then`. + - The condition of a `while`-loop can be written without enclosing parentheses if it is followed by a `do`. + - The enumerators of a `for`-expression can be written without enclosing parentheses or braces if they are followed by a `yield` or `do`. + - A `do` in a `for`-expression expresses a `for`-loop. + - A `catch` can be followed by a single case on the same line. + If there are multiple cases, these have to appear within braces (just like in Scala 2) + or an indented block. +## Rewrites + +The Scala 3 compiler can rewrite source code from old syntax to new syntax and back. +When invoked with options `-rewrite -new-syntax` it will rewrite from old to new syntax, dropping parentheses and braces in conditions and enumerators. When invoked with options `-rewrite -old-syntax` it will rewrite in the reverse direction, inserting parentheses and braces as needed. diff --git a/docs/_spec/TODOreference/other-new-features/creator-applications.md b/docs/_spec/TODOreference/other-new-features/creator-applications.md new file mode 100644 index 000000000000..81f09d897955 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/creator-applications.md @@ -0,0 +1,57 @@ +--- +layout: doc-page +title: "Universal Apply Methods" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/creator-applications.html +--- + +Scala case classes generate apply methods, so that values of case classes can be created using simple +function application, without needing to write `new`. + +Scala 3 generalizes this scheme to all concrete classes. Example: + +```scala +class StringBuilder(s: String): + def this() = this("") + +StringBuilder("abc") // old: new StringBuilder("abc") +StringBuilder() // old: new StringBuilder() +``` + +This works since a companion object with two `apply` methods +is generated together with the class. The object looks like this: + +```scala +object StringBuilder: + inline def apply(s: String): StringBuilder = new StringBuilder(s) + inline def apply(): StringBuilder = new StringBuilder() +``` + +The synthetic object `StringBuilder` and its `apply` methods are called _constructor proxies_. +Constructor proxies are generated even for Java classes and classes coming from Scala 2. +The precise rules are as follows: + + 1. A constructor proxy companion object `object C` is created for a concrete class `C`, + provided the class does not have already a companion, and there is also no other value + or method named `C` defined or inherited in the scope where `C` is defined. + + 2. Constructor proxy `apply` methods are generated for a concrete class provided + + - the class has a companion object (which might have been generated in step 1), and + - that companion object does not already define a member named `apply`. + + Each generated `apply` method forwards to one constructor of the class. It has the + same type and value parameters as the constructor. + +Constructor proxy companions cannot be used as values by themselves. A proxy companion object must +be selected with `apply` (or be applied to arguments, in which case the `apply` is implicitly +inserted). + +Constructor proxies are also not allowed to shadow normal definitions. That is, +if an identifier resolves to a constructor proxy, and the same identifier is also +defined or imported in some other scope, an ambiguity is reported. + +## Motivation + +Leaving out `new` hides an implementation detail and makes code more pleasant to read. Even though +it requires a new rule, it will likely increase the perceived regularity of the language, since case +classes already provide function call creation syntax (and are often defined for this reason alone). diff --git a/docs/_spec/TODOreference/other-new-features/experimental-defs.md b/docs/_spec/TODOreference/other-new-features/experimental-defs.md new file mode 100644 index 000000000000..225b61161652 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/experimental-defs.md @@ -0,0 +1,318 @@ +--- +layout: doc-page +title: "Experimental Definitions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/experimental-defs.html +--- + +The [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) annotation allows the definition of an API that is not guaranteed backward binary or source compatibility. +This annotation can be placed on term or type definitions. + +## References to experimental definitions + +Experimental definitions can only be referenced in an experimental scope. Experimental scopes are defined as follows: + +1. The RHS of an experimental `def`, `val`, `var`, `given` or `type` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental + def x = () + + def d1 = x // error: value x is marked @experimental and therefore ... + @experimental def d2 = x + + val v1 = x // error: value x is marked @experimental and therefore ... + @experimental val v2 = x + + var vr1 = x // error: value x is marked @experimental and therefore ... + @experimental var vr2 = x + + lazy val lv1 = x // error: value x is marked @experimental and therefore ... + @experimental lazy val lv2 = x + ``` +
+ +
+ Example 2 + + ```scala + import scala.annotation.experimental + + @experimental + val x = () + + @experimental + def f() = () + + @experimental + object X: + def fx() = 1 + + def test1: Unit = + f() // error: def f is marked @experimental and therefore ... + x // error: value x is marked @experimental and therefore ... + X.fx() // error: object X is marked @experimental and therefore ... + import X.fx + fx() // error: object X is marked @experimental and therefore ... + + @experimental + def test2: Unit = + // references to f, x and X are ok because `test2` is experimental + f() + x + X.fx() + import X.fx + fx() + ``` +
+ +
+ Example 3 + + ```scala + import scala.annotation.experimental + + @experimental type E + + type A = E // error type E is marked @experimental and therefore ... + @experimental type B = E + ``` +
+ +
+ Example 4 + + ```scala + import scala.annotation.experimental + + @experimental class A + @experimental type X + @experimental type Y = Int + @experimental opaque type Z = Int + + def test: Unit = + new A // error: class A is marked @experimental and therefore ... + val i0: A = ??? // error: class A is marked @experimental and therefore ... + val i1: X = ??? // error: type X is marked @experimental and therefore ... + val i2: Y = ??? // error: type Y is marked @experimental and therefore ... + val i2: Z = ??? // error: type Y is marked @experimental and therefore ... + () + ``` +
+ +
+ Example 5 + + ```scala + @experimental + trait ExpSAM { + def foo(x: Int): Int + } + def bar(f: ExpSAM): Unit = {} // error: error form rule 2 + + def test: Unit = + bar(x => x) // error: reference to experimental SAM + () + ``` +
+ +2. The signatures of an experimental `def`, `val`, `var`, `given` and `type`, or constructors of `class` and `trait` are experimental scopes. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + @experimental class A + @experimental type X + @experimental type Y = Int + @experimental opaque type Z = Int + + def test1( + p1: A, // error: class A is marked @experimental and therefore ... + p2: List[A], // error: class A is marked @experimental and therefore ... + p3: X, // error: type X is marked @experimental and therefore ... + p4: Y, // error: type Y is marked @experimental and therefore ... + p5: Z, // error: type Z is marked @experimental and therefore ... + p6: Any = x // error: def x is marked @experimental and therefore ... + ): A = ??? // error: class A is marked @experimental and therefore ... + + @experimental def test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ): A = ??? + + class Test1( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error + p6: Any = x // error + ) {} + + @experimental class Test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ) {} + + trait Test1( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error + p6: Any = x // error + ) {} + + @experimental trait Test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ) {} + ``` +
+ +3. The `extends` clause of an experimental `class`, `trait` or `object` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + + @experimental class A1(x: Any) + class A2(x: Any) + + + @experimental class B1 extends A1(1) + class B2 extends A1(1) // error: class A1 is marked @experimental and therefore marked @experimental and therefore ... + + @experimental class C1 extends A2(x) + class C2 extends A2(x) // error def x is marked @experimental and therefore + ``` +
+ +4. The body of an experimental `class`, `trait` or `object` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + + @experimental class A { + def f = x // ok because A is experimental + } + + @experimental class B { + def f = x // ok because A is experimental + } + + @experimental object C { + def f = x // ok because A is experimental + } + + @experimental class D { + def f = { + object B { + x // ok because A is experimental + } + } + } + ``` + +
+ +5. Annotations of an experimental definition are in experimental scopes. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental class myExperimentalAnnot extends scala.annotation.Annotation + + @myExperimentalAnnot // error + def test: Unit = () + + @experimental + @myExperimentalAnnot + def test: Unit = () + ``` + +
+ +6. Any code compiled using a [_Nightly_](https://search.maven.org/artifact/org.scala-lang/scala3-compiler_3) or _Snapshot_ version of the compiler is considered to be in an experimental scope. +Can use the `-Yno-experimental` compiler flag to disable it and run as a proper release. + +In any other situation, a reference to an experimental definition will cause a compilation error. + +## Experimental inheritance + +All subclasses of an experimental `class` or `trait` must be marked as [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) even if they are in an experimental scope. +Anonymous classes and SAMs of experimental classes are considered experimental. + +We require explicit annotations to make sure we do not have completion or cycles issues with nested classes. This restriction could be relaxed in the future. + +## Experimental overriding + +For an overriding member `M` and overridden member `O`, if `O` is non-experimental then `M` must be non-experimental. + +This makes sure that we cannot have accidental binary incompatibilities such as the following change. +```diff +class A: + def f: Any = 1 +class B extends A: +- @experimental def f: Int = 2 +``` + +## Test frameworks + +Tests can be defined as experimental. Tests frameworks can execute tests using reflection even if they are in an experimental class, object or method. Examples: + +
+Example 1 + +Test that touch experimental APIs can be written as follows + +```scala +import scala.annotation.experimental + +@experimental def x = 2 + +class MyTests { + /*@Test*/ def test1 = x // error + @experimental /*@Test*/ def test2 = x +} + +@experimental +class MyExperimentalTests { + /*@Test*/ def test1 = x + /*@Test*/ def test2 = x +} +``` + +
diff --git a/docs/_spec/TODOreference/other-new-features/export.md b/docs/_spec/TODOreference/other-new-features/export.md new file mode 100644 index 000000000000..40e2ad9df248 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/export.md @@ -0,0 +1,234 @@ +--- +layout: doc-page +title: "Export Clauses" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/export.html +--- + +An export clause defines aliases for selected members of an object. Example: + +```scala +class BitMap +class InkJet + +class Printer: + type PrinterType + def print(bits: BitMap): Unit = ??? + def status: List[String] = ??? + +class Scanner: + def scan(): BitMap = ??? + def status: List[String] = ??? + +class Copier: + private val printUnit = new Printer { type PrinterType = InkJet } + private val scanUnit = new Scanner + + export scanUnit.scan + export printUnit.{status as _, *} + + def status: List[String] = printUnit.status ++ scanUnit.status +``` + +The two `export` clauses define the following _export aliases_ in class `Copier`: + +```scala +final def scan(): BitMap = scanUnit.scan() +final def print(bits: BitMap): Unit = printUnit.print(bits) +final type PrinterType = printUnit.PrinterType +``` + +They can be accessed inside `Copier` as well as from outside: + +```scala +val copier = new Copier +copier.print(copier.scan()) +``` + +An `export` clause has the same format as an import clause. Its general form is: + +```scala +export path . { sel_1, ..., sel_n } +``` + +It consists of a qualifier expression `path`, which must be a stable identifier, followed by +one or more selectors `sel_i` that identify what gets an alias. Selectors can be +of one of the following forms: + + - A _simple selector_ `x` creates aliases for all eligible members of `path` that are named `x`. + - A _renaming selector_ `x as y` creates aliases for all eligible members of `path` that are named `x`, but the alias is named `y` instead of `x`. + - An _omitting selector_ `x as _` prevents `x` from being aliased by a subsequent + wildcard selector. + - A _given selector_ `given x` has an optional type bound `x`. It creates aliases for all eligible given instances that conform to either `x`, or `Any` if `x` is omitted, except for members that are named by a previous simple, renaming, or omitting selector. + - A _wildcard selector_ `*` creates aliases for all eligible members of `path` except for given instances, + synthetic members generated by the compiler and those members that are named by a previous simple, renaming, or omitting selector. + \ + Notes: + - eligible construtor proxies are also included, even though they are synthetic members. + - members created by an export are also included. They are created by the compiler, but are not considered synthetic. + +A member is _eligible_ if all of the following holds: + + - its owner is not a base class of the class[(\*)](#note_class) containing the export clause, + - the member does not override a concrete definition that has as owner + a base class of the class containing the export clause. + - it is accessible at the export clause, + - it is not a constructor, nor the (synthetic) class part of an object, + - it is a given instance (declared with `given`) if and only if the export is from a _given selector_. + +It is a compile-time error if a simple or renaming selector does not identify +any eligible members. + +It is a compile-time error if a simple or renaming selector does not identify any eligible members. + +Type members are aliased by type definitions, and term members are aliased by method definitions. For instance: +```scala +object O: + class C(val x: Int) + def m(c: C): Int = c.x + 1 +export O.* + // generates + // type C = O.C + // def m(c: O.C): Int = O.m(c) +``` + +Export aliases copy the type and value parameters of the members they refer to. +Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: + + - Export aliases cannot be overridden, since they are final. + - Export aliases cannot override concrete members in base classes, since they are + not marked `override`. + - However, export aliases can implement deferred members of base classes. + +Export aliases for public value definitions that are accessed without +referring to private values in the qualifier path +are marked by the compiler as "stable" and their result types are the singleton types of the aliased definitions. This means that they can be used as parts of stable identifier paths, even though they are technically methods. For instance, the following is OK: +```scala +class C { type T } +object O { val c: C = ... } +export O.c +def f: c.T = ... +``` + + +**Restrictions:** + + 1. Export clauses can appear in classes or they can appear at the top-level. An export clause cannot appear as a statement in a block. + 1. If an export clause contains a wildcard or given selector, it is forbidden for its qualifier path to refer to a package. This is because it is not yet known how to safely track wildcard dependencies to a package for the purposes of incremental compilation. + 1. An export renaming hides un-renamed exports matching the target name. For instance, the following + clause would be invalid since `B` is hidden by the renaming `A as B`. + ```scala + export {A as B, B} // error: B is hidden + ``` + + 1. Renamings in an export clause must have pairwise different target names. For instance, the following clause would be invalid: + ```scala + export {A as C, B as C} // error: duplicate renaming + + 1. Simple renaming exports like + ```scala + export status as stat + ``` + are not supported yet. They would run afoul of the restriction that the + exported `a` cannot be already a member of the object containing the export. + This restriction might be lifted in the future. + + +(\*) **Note:** Unless otherwise stated, the term "class" in this discussion also includes object and trait definitions. + +## Motivation + +It is a standard recommendation to prefer composition over inheritance. This is really an application of the principle of least power: Composition treats components as blackboxes whereas inheritance can affect the internal workings of components through overriding. Sometimes the close coupling implied by inheritance is the best solution for a problem, but where this is not necessary the looser coupling of composition is better. + +So far, object-oriented languages including Scala made it much easier to use inheritance than composition. Inheritance only requires an `extends` clause whereas composition required a verbose elaboration of a sequence of forwarders. So in that sense, object-oriented languages are pushing +programmers to a solution that is often too powerful. Export clauses redress the balance. They make composition relationships as concise and easy to express as inheritance relationships. Export clauses also offer more flexibility than extends clauses since members can be renamed or omitted. + +Export clauses also fill a gap opened by the shift from package objects to top-level definitions. One occasionally useful idiom that gets lost in this shift is a package object inheriting from some class. The idiom is often used in a facade like pattern, to make members +of internal compositions available to users of a package. Top-level definitions are not wrapped in a user-defined object, so they can't inherit anything. However, top-level definitions can be export clauses, which supports the facade design pattern in a safer and +more flexible way. + +## Export Clauses in Extensions + +An export clause may also appear in an extension. + +Example: +```scala +class StringOps(x: String): + def *(n: Int): String = ... + def capitalize: String = ... + +extension (x: String) + def take(n: Int): String = x.substring(0, n) + def drop(n: Int): String = x.substring(n) + private def moreOps = new StringOps(x) + export moreOps.* +``` +In this case the qualifier expression must be an identifier that refers to a unique parameterless extension method in the same extension clause. The export will create +extension methods for all accessible term members +in the result of the qualifier path. For instance, the extension above would be expanded to +```scala +extension (x: String) + def take(n: Int): String = x.substring(0, n) + def drop(n: Int): String = x.substring(n) + private def moreOps = StringOps(x) + def *(n: Int): String = moreOps.*(n) + def capitalize: String = moreOps.capitalize +``` + +## Syntax changes: + +``` +TemplateStat ::= ... + | Export +TopStat ::= ... + | Export +ExtMethod ::= ... + | Export +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*’ | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} +``` + +## Elaboration of Export Clauses + +Export clauses raise questions about the order of elaboration during type checking. +Consider the following example: + +```scala +class B { val c: Int } +object a { val b = new B } +export a.* +export b.* +``` + +Is the `export b.*` clause legal? If yes, what does it export? Is it equivalent to `export a.b.*`? What about if we swap the last two clauses? + +``` +export b.* +export a.* +``` + +To avoid tricky questions like these, we fix the elaboration order of exports as follows. + +Export clauses are processed when the type information of the enclosing object or class is completed. Completion so far consisted of the following steps: + + 1. Elaborate any annotations of the class. + 2. Elaborate the parameters of the class. + 3. Elaborate the self type of the class, if one is given. + 4. Enter all definitions of the class as class members, with types to be completed + on demand. + 5. Determine the types of all parents of the class. + + With export clauses, the following steps are added: + + 6. Compute the types of all paths in export clauses. + 7. Enter export aliases for the eligible members of all paths in export clauses. + +It is important that steps 6 and 7 are done in sequence: We first compute the types of _all_ +paths in export clauses and only after this is done we enter any export aliases as class members. This means that a path of an export clause cannot refer to an alias made available +by another export clause of the same class. diff --git a/docs/_spec/TODOreference/other-new-features/indentation.md b/docs/_spec/TODOreference/other-new-features/indentation.md new file mode 100644 index 000000000000..e931030ab696 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/indentation.md @@ -0,0 +1,509 @@ +--- +layout: doc-page +title: "Optional Braces" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/indentation.html +--- + +Scala 3 enforces some rules on indentation and allows some occurrences of braces `{...}` to be optional: + +- First, some badly indented programs are flagged with warnings. +- Second, some occurrences of braces `{...}` are made optional. Generally, the rule + is that adding a pair of optional braces will not change the meaning of a well-indented program. + +These changes can be turned off with the compiler flag `-no-indent`. + +## Indentation Rules + +The compiler enforces two rules for well-indented programs, flagging violations as warnings. + + 1. In a brace-delimited region, no statement is allowed to start to the left + of the first statement after the opening brace that starts a new line. + + This rule is helpful for finding missing closing braces. It prevents errors like: + + ```scala + if (x < 0) { + println(1) + println(2) + + println("done") // error: indented too far to the left + ``` + + 2. If significant indentation is turned off (i.e. under Scala 2 mode or under `-no-indent`) and we are at the start of an indented sub-part of an expression, and the indented part ends in a newline, the next statement must start at an indentation width less than the sub-part. This prevents errors where an opening brace was forgotten, as in + + ```scala + if (x < 0) + println(1) + println(2) // error: missing `{` + ``` + +These rules still leave a lot of leeway how programs should be indented. For instance, they do not impose +any restrictions on indentation within expressions, nor do they require that all statements of an indentation block line up exactly. + +The rules are generally helpful in pinpointing the root cause of errors related to missing opening or closing braces. These errors are often quite hard to diagnose, in particular in large programs. + +## Optional Braces + +The compiler will insert `` or `` +tokens at certain line breaks. Grammatically, pairs of `` and `` tokens have the same effect as pairs of braces `{` and `}`. + +The algorithm makes use of a stack `IW` of previously encountered indentation widths. The stack initially holds a single element with a zero indentation width. The _current indentation width_ is the indentation width of the top of the stack. + +There are two rules: + + 1. An `` is inserted at a line break, if + + - An indentation region can start at the current position in the source, and + - the first token on the next line has an indentation width strictly greater + than the current indentation width + + An indentation region can start + + - after the leading parameters of an `extension`, or + - after a `with` in a given instance, or + - after a `:` at the start of a template body (see discussion of `` below), or + - after one of the following tokens: + + ``` + = => ?=> <- catch do else finally for + if match return then throw try while yield + ``` + + - after the closing `)` of a condition in an old-style `if` or `while`. + - after the closing `)` or `}` of the enumerations of an old-style `for` loop without a `do`. + + If an `` is inserted, the indentation width of the token on the next line + is pushed onto `IW`, which makes it the new current indentation width. + + 2. An `` is inserted at a line break, if + + - the first token on the next line has an indentation width strictly less + than the current indentation width, and + - the last token on the previous line is not one of the following tokens + which indicate that the previous statement continues: + ``` + then else do catch finally yield match + ``` + - if the first token on the next line is a + [leading infix operator](../changed-features/operators.md). + then its indentation width is less then the current indentation width, + and it either matches a previous indentation width or is also less + than the enclosing indentation width. + + If an `` is inserted, the top element is popped from `IW`. + If the indentation width of the token on the next line is still less than the new current indentation width, step (2) repeats. Therefore, several `` tokens + may be inserted in a row. + + The following two additional rules support parsing of legacy code with ad-hoc layout. They might be withdrawn in future language versions: + + - An `` is also inserted if the next token following a statement sequence starting with an `` closes an indentation region, i.e. is one of `then`, `else`, `do`, `catch`, `finally`, `yield`, `}`, `)`, `]` or `case`. + + - An `` is finally inserted in front of a comma that follows a statement sequence starting with an `` if the indented region is itself enclosed in parentheses. + +It is an error if the indentation width of the token following an `` does not match the indentation of some previous line in the enclosing indentation region. For instance, the following would be rejected. + +```scala +if x < 0 then + -x + else // error: `else` does not align correctly + x +``` + +Indentation tokens are only inserted in regions where newline statement separators are also inferred: +at the top-level, inside braces `{...}`, but not inside parentheses `(...)`, patterns or types. + +**Note:** The rules for leading infix operators above are there to make sure that +```scala + one + + two.match + case 1 => b + case 2 => c + + three +``` +is parsed as `one + (two.match ...) + three`. Also, that +```scala +if x then + a + + b + + c +else d +``` +is parsed as `if x then a + b + c else d`. + +## Optional Braces Around Template Bodies + +The Scala grammar uses the term _template body_ for the definitions of a class, trait, or object that are normally enclosed in braces. The braces around a template body can also be omitted by means of the following rule. + +A template body can alternatively consist of a colon followed by one or more indented statements. To this purpose we introduce a new `` token that reads as +the standard colon "`:`" but is generated instead of it where `` +is legal according to the context free syntax, but only if the previous token +is an alphanumeric identifier, a backticked identifier, or one of the tokens `this`, `super`, "`)`", and "`]`". + +An indentation region can start after a ``. A template body may be either enclosed in braces, or it may start with +` ` and end with ``. +Analogous rules apply for enum bodies, type refinements, and local packages containing nested definitions. + +With these new rules, the following constructs are all valid: + +```scala +trait A: + def f: Int + +class C(x: Int) extends A: + def f = x + +object O: + def f = 3 + +enum Color: + case Red, Green, Blue + +new A: + def f = 3 + +package p: + def a = 1 + +package q: + def b = 2 +``` + +In each case, the `:` at the end of line can be replaced without change of meaning by a pair of braces that enclose the following indented definition(s). + +The syntax changes allowing this are as follows: + +Define for an arbitrary sequence of tokens or non-terminals `TS`: + +``` +:<<< TS >>> ::= ‘{’ TS ‘}’ + | +``` +Then the grammar changes as follows: +``` +TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> +EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> +Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> +Packaging ::= ‘package’ QualId :<<< TopStats >>> +``` + +## Spaces vs Tabs + +Indentation prefixes can consist of spaces and/or tabs. Indentation widths are the indentation prefixes themselves, ordered by the string prefix relation. So, so for instance "2 tabs, followed by 4 spaces" is strictly less than "2 tabs, followed by 5 spaces", but "2 tabs, followed by 4 spaces" is incomparable to "6 tabs" or to "4 spaces, followed by 2 tabs". It is an error if the indentation width of some line is incomparable with the indentation width of the region that's current at that point. To avoid such errors, it is a good idea not to mix spaces and tabs in the same source file. + +## Indentation and Braces + +Indentation can be mixed freely with braces `{...}`, as well as brackets `[...]` and parentheses `(...)`. For interpreting indentation inside such regions, the following rules apply. + + 1. The assumed indentation width of a multiline region enclosed in braces is the + indentation width of the first token that starts a new line after the opening brace. + + 2. The assumed indentation width of a multiline region inside brackets or parentheses is: + + - if the opening bracket or parenthesis is at the end of a line, the indentation width of token following it, + - otherwise, the indentation width of the enclosing region. + + 3. On encountering a closing brace `}`, bracket `]` or parenthesis `)`, as many `` tokens as necessary are inserted to close all open nested indentation regions. + +For instance, consider: +```scala +{ + val x = f(x: Int, y => + x * ( + y + 1 + ) + + (x + + x) + ) +} +``` + - Here, the indentation width of the region enclosed by the braces is 3 (i.e. the indentation width of the +statement starting with `val`). + - The indentation width of the region in parentheses that follows `f` is also 3, since the opening + parenthesis is not at the end of a line. + - The indentation width of the region in parentheses around `y + 1` is 9 + (i.e. the indentation width of `y + 1`). + - Finally, the indentation width of the last region in parentheses starting with `(x` is 6 (i.e. the indentation width of the indented region following the `=>`. + +## Special Treatment of Case Clauses + +The indentation rules for `match` expressions and `catch` clauses are refined as follows: + +- An indentation region is opened after a `match` or `catch` also if the following `case` + appears at the indentation width that's current for the `match` itself. +- In that case, the indentation region closes at the first token at that + same indentation width that is not a `case`, or at any token with a smaller + indentation width, whichever comes first. + +The rules allow to write `match` expressions where cases are not indented themselves, as in the example below: + +```scala +x match +case 1 => print("I") +case 2 => print("II") +case 3 => print("III") +case 4 => print("IV") +case 5 => print("V") + +println(".") +``` + +## Using Indentation to Signal Statement Continuation + +Indentation is used in some situations to decide whether to insert a virtual semicolon between +two consecutive lines or to treat them as one statement. Virtual semicolon insertion is +suppressed if the second line is indented more relative to the first one, and either the second line +starts with "`(`", "`[`", or "`{`" or the first line ends with `return`. Examples: + +```scala +f(x + 1) + (2, 3) // equivalent to `f(x + 1)(2, 3)` + +g(x + 1) +(2, 3) // equivalent to `g(x + 1); (2, 3)` + +h(x + 1) + {} // equivalent to `h(x + 1){}` + +i(x + 1) +{} // equivalent to `i(x + 1); {}` + +if x < 0 then return + a + b // equivalent to `if x < 0 then return a + b` + +if x < 0 then return +println(a + b) // equivalent to `if x < 0 then return; println(a + b)` +``` +In Scala 2, a line starting with "`{`" always continues the function call on the preceding line, +irrespective of indentation, whereas a virtual semicolon is inserted in all other cases. +The Scala-2 behavior is retained under source `-no-indent` or `-source 3.0-migration`. + + + +## The End Marker + +Indentation-based syntax has many advantages over other conventions. But one possible problem is that it makes it hard to discern when a large indentation region ends, since there is no specific token that delineates the end. Braces are not much better since a brace by itself also contains no information about what region is closed. + +To solve this problem, Scala 3 offers an optional `end` marker. Example: + +```scala +def largeMethod(...) = + ... + if ... then ... + else + ... // a large block + end if + ... // more code +end largeMethod +``` + +An `end` marker consists of the identifier `end` and a follow-on specifier token that together constitute all the tokes of a line. Possible specifier tokens are +identifiers or one of the following keywords + +```scala +if while for match try new this val given +``` + +End markers are allowed in statement sequences. The specifier token `s` of an end marker must correspond to the statement that precedes it. This means: + +- If the statement defines a member `x` then `s` must be the same identifier `x`. +- If the statement defines a constructor then `s` must be `this`. +- If the statement defines an anonymous given, then `s` must be `given`. +- If the statement defines an anonymous extension, then `s` must be `extension`. +- If the statement defines an anonymous class, then `s` must be `new`. +- If the statement is a `val` definition binding a pattern, then `s` must be `val`. +- If the statement is a package clause that refers to package `p`, then `s` must be the same identifier `p`. +- If the statement is an `if`, `while`, `for`, `try`, or `match` statement, then `s` must be that same token. + +For instance, the following end markers are all legal: + +```scala +package p1.p2: + + abstract class C(): + + def this(x: Int) = + this() + if x > 0 then + val a :: b = + x :: Nil + end val + var y = + x + end y + while y > 0 do + println(y) + y -= 1 + end while + try + x match + case 0 => println("0") + case _ => + end match + finally + println("done") + end try + end if + end this + + def f: String + end C + + object C: + given C = + new C: + def f = "!" + end f + end new + end given + end C + + extension (x: C) + def ff: String = x.f ++ x.f + end extension + +end p2 +``` + +### When to Use End Markers + +It is recommended that `end` markers are used for code where the extent of an indentation region is not immediately apparent "at a glance". People will have different preferences what this means, but one can nevertheless give some guidelines that stem from experience. An end marker makes sense if + +- the construct contains blank lines, or +- the construct is long, say 15-20 lines or more, +- the construct ends heavily indented, say 4 indentation levels or more. + +If none of these criteria apply, it's often better to not use an end marker since the code will be just as clear and more concise. If there are several ending regions that satisfy one of the criteria above, we usually need an end marker only for the outermost closed region. So cascades of end markers as in the example above are usually better avoided. + +### Syntax + +``` +EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL +EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ + | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ +BlockStat ::= ... | EndMarker +TemplateStat ::= ... | EndMarker +TopStat ::= ... | EndMarker +``` + +## Example + +Here is a (somewhat meta-circular) example of code using indentation. It provides a concrete representation of indentation widths as defined above together with efficient operations for constructing and comparing indentation widths. + +```scala +enum IndentWidth: + case Run(ch: Char, n: Int) + case Conc(l: IndentWidth, r: Run) + + def <= (that: IndentWidth): Boolean = this match + case Run(ch1, n1) => + that match + case Run(ch2, n2) => n1 <= n2 && (ch1 == ch2 || n1 == 0) + case Conc(l, r) => this <= l + case Conc(l1, r1) => + that match + case Conc(l2, r2) => l1 == l2 && r1 <= r2 + case _ => false + + def < (that: IndentWidth): Boolean = + this <= that && !(that <= this) + + override def toString: String = + this match + case Run(ch, n) => + val kind = ch match + case ' ' => "space" + case '\t' => "tab" + case _ => s"'$ch'-character" + val suffix = if n == 1 then "" else "s" + s"$n $kind$suffix" + case Conc(l, r) => + s"$l, $r" + +object IndentWidth: + private inline val MaxCached = 40 + + private val spaces = IArray.tabulate(MaxCached + 1)(new Run(' ', _)) + private val tabs = IArray.tabulate(MaxCached + 1)(new Run('\t', _)) + + def Run(ch: Char, n: Int): Run = + if n <= MaxCached && ch == ' ' then + spaces(n) + else if n <= MaxCached && ch == '\t' then + tabs(n) + else + new Run(ch, n) + end Run + + val Zero = Run(' ', 0) +end IndentWidth +``` + +## Settings and Rewrites + +Significant indentation is enabled by default. It can be turned off by giving any of the options `-no-indent`, `-old-syntax` and `-source 3.0-migration`. If indentation is turned off, it is nevertheless checked that indentation conforms to the logical program structure as defined by braces. If that is not the case, the compiler issues a warning. + +The Scala 3 compiler can rewrite source code to indented code and back. +When invoked with options `-rewrite -indent` it will rewrite braces to +indented regions where possible. When invoked with options `-rewrite -no-indent` it will rewrite in the reverse direction, inserting braces for indentation regions. +The `-indent` option only works on [new-style syntax](./control-syntax.md). So to go from old-style syntax to new-style indented code one has to invoke the compiler twice, first with options `-rewrite -new-syntax`, then again with options +`-rewrite -indent`. To go in the opposite direction, from indented code to old-style syntax, it's `-rewrite -no-indent`, followed by `-rewrite -old-syntax`. + +## Variant: Indentation Marker `:` for Arguments + +Generally, the possible indentation regions coincide with those regions where braces `{...}` are also legal, no matter whether the braces enclose an expression or a set of definitions. There is one exception, though: Arguments to functions can be enclosed in braces but they cannot be simply indented instead. Making indentation always significant for function arguments would be too restrictive and fragile. + +To allow such arguments to be written without braces, a variant of the indentation scheme is implemented under language import +```scala +import language.experimental.fewerBraces +``` +In this variant, a `` token is also recognized where function argument would be expected. Examples: + +```scala +times(10): + println("ah") + println("ha") +``` + +or + +```scala +credentials `++`: + val file = Path.userHome / ".credentials" + if file.exists + then Seq(Credentials(file)) + else Seq() +``` + +or + +```scala +xs.map: + x => + val y = x - 1 + y * y +``` +What's more, a `:` in these settings can also be followed on the same line by the parameter part and arrow of a lambda. So the last example could be compressed to this: + +```scala +xs.map: x => + val y = x - 1 + y * y +``` +and the following would also be legal: +```scala +xs.foldLeft(0): (x, y) => + x + y +``` + +The grammar changes for this variant are as follows. + +``` +SimpleExpr ::= ... + | SimpleExpr ColonArgument +InfixExpr ::= ... + | InfixExpr id ColonArgument +ColonArgument ::= colon [LambdaStart] + indent (CaseClauses | Block) outdent +LambdaStart ::= FunParams (‘=>’ | ‘?=>’) + | HkTypeParamClause ‘=>’ +``` \ No newline at end of file diff --git a/docs/_spec/TODOreference/other-new-features/matchable.md b/docs/_spec/TODOreference/other-new-features/matchable.md new file mode 100644 index 000000000000..234fdf03220c --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/matchable.md @@ -0,0 +1,141 @@ +--- +layout: doc-page +title: "The Matchable Trait" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html +--- + +A new trait [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) controls the ability to pattern match. + +## The Problem + +The Scala 3 standard library has a type [`IArray`](https://scala-lang.org/api/3.x/scala.html#IArray-0) for immutable +arrays that is defined like this: + +```scala + opaque type IArray[+T] = Array[_ <: T] +``` + +The `IArray` type offers extension methods for `length` and `apply`, but not for `update`; hence it seems values of type `IArray` cannot be updated. + +However, there is a potential hole due to pattern matching. Consider: + +```scala +val imm: IArray[Int] = ... +imm match + case a: Array[Int] => a(0) = 1 +``` + +The test will succeed at runtime since [`IArray`](https://scala-lang.org/api/3.x/scala.html#IArray-0)s _are_ represented as +`Array`s at runtime. But if we allowed it, it would break the fundamental abstraction of immutable arrays. + +__Aside:__ One could also achieve the same by casting: + +```scala +imm.asInstanceOf[Array[Int]](0) = 1 +``` + +But that is not as much of a problem since in Scala `asInstanceOf` is understood to be low-level and unsafe. By contrast, a pattern match that compiles without warning or error should not break abstractions. + +Note also that the problem is not tied to [opaque types](opaques.md) as match selectors. The following slight variant with a value of parametric +type `T` as match selector leads to the same problem: + +```scala +def f[T](x: T) = x match + case a: Array[Int] => a(0) = 0 +f(imm) +``` + +Finally, note that the problem is not linked to just [opaque types](opaques.md). No unbounded type parameter or abstract type should be decomposable with a pattern match. + +## The Solution + +There is a new type [`scala.Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) that controls pattern matching. When typing a pattern match of a constructor pattern `C(...)` or +a type pattern `_: C` it is required that the selector type conforms +to `Matchable`. If that's not the case a warning is issued. For instance when compiling the example at the start of this section we get: + +``` +> sc ../new/test.scala -source future +-- Warning: ../new/test.scala:4:12 --------------------------------------------- +4 | case a: Array[Int] => a(0) = 0 + | ^^^^^^^^^^ + | pattern selector should be an instance of Matchable, + | but it has unmatchable type IArray[Int] instead +``` + +To allow migration from Scala 2 and cross-compiling +between Scala 2 and 3 the warning is turned on only for `-source future-migration` or higher. + +[`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) is a universal trait with `Any` as its parent class. It is extended by both [`AnyVal`](https://scala-lang.org/api/3.x/scala/AnyVal.html) and [`AnyRef`](https://scala-lang.org/api/3.x/scala/AnyRef.html). Since `Matchable` is a supertype of every concrete value or reference class it means that instances of such classes can be matched as before. However, match selectors of the following types will produce a warning: + +- Type `Any`: if pattern matching is required one should use `Matchable` instead. +- Unbounded type parameters and abstract types: If pattern matching is required they should have an upper bound `Matchable`. +- Type parameters and abstract types that are only bounded by some + universal trait: Again, `Matchable` should be added as a bound. + +Here is the hierarchy of top-level classes and traits with their defined methods: + +```scala +abstract class Any: + def getClass + def isInstanceOf + def asInstanceOf + def == + def != + def ## + def equals + def hashCode + def toString + +trait Matchable extends Any + +class AnyVal extends Any, Matchable +class Object extends Any, Matchable +``` + +[`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) is currently a marker trait without any methods. Over time +we might migrate methods `getClass` and `isInstanceOf` to it, since these are closely related to pattern-matching. + +## `Matchable` and Universal Equality + +Methods that pattern-match on selectors of type `Any` will need a cast once the +Matchable warning is turned on. The most common such method is the universal +`equals` method. It will have to be written as in the following example: + +```scala +class C(val x: String): + + override def equals(that: Any): Boolean = + that.asInstanceOf[Matchable] match + case that: C => this.x == that.x + case _ => false +``` + +The cast of `that` to [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) serves as an indication that universal equality +is unsafe in the presence of abstract types and opaque types since it cannot properly distinguish the meaning of a type from its representation. The cast +is guaranteed to succeed at run-time since `Any` and [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) both erase to +`Object`. + +For instance, consider the definitions + +```scala +opaque type Meter = Double +def Meter(x: Double): Meter = x + +opaque type Second = Double +def Second(x: Double): Second = x +``` + +Here, universal `equals` will return true for + +```scala + Meter(10).equals(Second(10)) +``` + +even though this is clearly false mathematically. With [multiversal equality](../contextual/multiversal-equality.md) one can mitigate that problem somewhat by turning + +```scala + import scala.language.strictEquality + Meter(10) == Second(10) +``` + +into a type error. diff --git a/docs/_spec/TODOreference/other-new-features/opaques-details.md b/docs/_spec/TODOreference/other-new-features/opaques-details.md new file mode 100644 index 000000000000..d7305a249089 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/opaques-details.md @@ -0,0 +1,126 @@ +--- +layout: doc-page +title: "Opaque Type Aliases: More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/opaques-details.html +--- + +## Syntax + +``` +Modifier ::= ... + | ‘opaque’ +``` + +`opaque` is a [soft modifier](../soft-modifier.md). It can still be used as a normal identifier when it is not in front of a definition keyword. + +Opaque type aliases must be members of classes, traits, or objects, or they are defined +at the top-level. They cannot be defined in local blocks. + +## Type Checking + +The general form of a (monomorphic) opaque type alias is + +```scala +opaque type T >: L <: U = R +``` + +where the lower bound `L` and the upper bound `U` may be missing, in which case they are assumed to be [`scala.Nothing`](https://scala-lang.org/api/3.x/scala/Nothing.html) and [`scala.Any`](https://scala-lang.org/api/3.x/scala/Any.html), respectively. If bounds are given, it is checked that the right-hand side `R` conforms to them, i.e. `L <: R` and `R <: U`. F-bounds are not supported for opaque type aliases: `T` is not allowed to appear in `L` or `U`. + +Inside the scope of the alias definition, the alias is transparent: `T` is treated +as a normal alias of `R`. Outside its scope, the alias is treated as the abstract type +```scala +type T >: L <: U +``` +A special case arises if the opaque type alias is defined in an object. Example: + +```scala +object o: + opaque type T = R +``` + +In this case we have inside the object (also for non-opaque types) that `o.T` is equal to +`T` or its expanded form `o.this.T`. Equality is understood here as mutual subtyping, i.e. +`o.T <: o.this.T` and `o.this.T <: T`. Furthermore, we have by the rules of opaque type aliases +that `o.this.T` equals `R`. The two equalities compose. That is, inside `o`, it is +also known that `o.T` is equal to `R`. This means the following code type-checks: + +```scala +object o: + opaque type T = Int + val x: Int = id(2) +def id(x: o.T): o.T = x +``` + +Opaque type aliases cannot be `private` and cannot be overridden in subclasses. +Opaque type aliases cannot have a context function type as right-hand side. + +## Type Parameters of Opaque Types + +Opaque type aliases can have a single type parameter list. The following aliases +are well-formed +```scala +opaque type F[T] = (T, T) +opaque type G = [T] =>> List[T] +``` +but the following are not: +```scala +opaque type BadF[T] = [U] =>> (T, U) +opaque type BadG = [T] =>> [U] => (T, U) +``` + +## Translation of Equality + +Comparing two values of opaque type with `==` or `!=` normally uses universal equality, +unless another overloaded `==` or `!=` operator is defined for the type. To avoid +boxing, the operation is mapped after type checking to the (in-)equality operator +defined on the underlying type. For instance, +```scala + opaque type T = Int + + ... + val x: T + val y: T + x == y // uses Int equality for the comparison. +``` + +## Top-level Opaque Types + +An opaque type alias on the top-level is transparent in all other top-level definitions in the sourcefile where it appears, but is opaque in nested +objects and classes and in all other source files. Example: +```scala +// in test1.scala +opaque type A = String +val x: A = "abc" + +object obj: + val y: A = "abc" // error: found: "abc", required: A + +// in test2.scala +def z: String = x // error: found: A, required: String +``` +This behavior becomes clear if one recalls that top-level definitions are placed in their own synthetic object. For instance, the code in `test1.scala` would expand to +```scala +object test1$package: + opaque type A = String + val x: A = "abc" + +object obj: + val y: A = "abc" // error: cannot assign "abc" to opaque type alias A +``` +The opaque type alias `A` is transparent in its scope, which includes the definition of `x`, but not the definitions of `obj` and `y`. + + +## Relationship to SIP 35 + +Opaque types in Scala 3 are an evolution from what is described in +[Scala SIP 35](https://docs.scala-lang.org/sips/opaque-types.html). + +The differences compared to the state described in this SIP are: + + 1. Opaque type aliases cannot be defined anymore in local statement sequences. + 2. The scope where an opaque type alias is visible is now the whole scope where + it is defined, instead of just a companion object. + 3. The notion of a companion object for opaque type aliases has been dropped. + 4. Opaque type aliases can have bounds. + 5. The notion of type equality involving opaque type aliases has been clarified. It was + strengthened with respect to the previous implementation of SIP 35. diff --git a/docs/_spec/TODOreference/other-new-features/opaques.md b/docs/_spec/TODOreference/other-new-features/opaques.md new file mode 100644 index 000000000000..d8c4d37bcb3b --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/opaques.md @@ -0,0 +1,179 @@ +--- +layout: doc-page +title: "Opaque Type Aliases" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/opaques.html +--- + +Opaque types aliases provide type abstraction without any overhead. Example: + +```scala +object MyMath: + + opaque type Logarithm = Double + + object Logarithm: + + // These are the two ways to lift to the Logarithm type + + def apply(d: Double): Logarithm = math.log(d) + + def safe(d: Double): Option[Logarithm] = + if d > 0.0 then Some(math.log(d)) else None + + end Logarithm + + // Extension methods define opaque types' public APIs + extension (x: Logarithm) + def toDouble: Double = math.exp(x) + def + (y: Logarithm): Logarithm = Logarithm(math.exp(x) + math.exp(y)) + def * (y: Logarithm): Logarithm = x + y + +end MyMath +``` + +This introduces `Logarithm` as a new abstract type, which is implemented as `Double`. +The fact that `Logarithm` is the same as `Double` is only known in the scope where +`Logarithm` is defined, which in the above example corresponds to the object `MyMath`. +Or in other words, within the scope, it is treated as a type alias, but this is opaque to the outside world +where, in consequence, `Logarithm` is seen as an abstract type that has nothing to do with `Double`. + +The public API of `Logarithm` consists of the `apply` and `safe` methods defined in the companion object. +They convert from `Double`s to `Logarithm` values. Moreover, an operation `toDouble` that converts the other way, and operations `+` and `*` are defined as extension methods on `Logarithm` values. +The following operations would be valid because they use functionality implemented in the `MyMath` object. + +```scala +import MyMath.Logarithm + +val l = Logarithm(1.0) +val l2 = Logarithm(2.0) +val l3 = l * l2 +val l4 = l + l2 +``` + +But the following operations would lead to type errors: + +```scala +val d: Double = l // error: found: Logarithm, required: Double +val l2: Logarithm = 1.0 // error: found: Double, required: Logarithm +l * 2 // error: found: Int(2), required: Logarithm +l / l2 // error: `/` is not a member of Logarithm +``` + +## Bounds For Opaque Type Aliases + +Opaque type aliases can also come with bounds. Example: + +```scala +object Access: + + opaque type Permissions = Int + opaque type PermissionChoice = Int + opaque type Permission <: Permissions & PermissionChoice = Int + + extension (x: PermissionChoice) + def | (y: PermissionChoice): PermissionChoice = x | y + extension (x: Permissions) + def & (y: Permissions): Permissions = x | y + extension (granted: Permissions) + def is(required: Permissions) = (granted & required) == required + def isOneOf(required: PermissionChoice) = (granted & required) != 0 + + val NoPermission: Permission = 0 + val Read: Permission = 1 + val Write: Permission = 2 + val ReadWrite: Permissions = Read | Write + val ReadOrWrite: PermissionChoice = Read | Write + +end Access +``` + +The `Access` object defines three opaque type aliases: + +- `Permission`, representing a single permission, +- `Permissions`, representing a set of permissions with the meaning "all of these permissions granted", +- `PermissionChoice`, representing a set of permissions with the meaning "at least one of these permissions granted". + +Outside the `Access` object, values of type `Permissions` may be combined using the `&` operator, +where `x & y` means "all permissions in `x` *and* in `y` granted". +Values of type `PermissionChoice` may be combined using the `|` operator, +where `x | y` means "a permission in `x` *or* in `y` granted". + +Note that inside the `Access` object, the `&` and `|` operators always resolve to the corresponding methods of `Int`, +because members always take precedence over extension methods. +For that reason, the `|` extension method in `Access` does not cause infinite recursion. + +In particular, the definition of `ReadWrite` must use `|`, the bitwise operator for `Int`, +even though client code outside `Access` would use `&`, the extension method on `Permissions`. +The internal representations of `ReadWrite` and `ReadOrWrite` are identical, but this is not visible to the client, +which is interested only in the semantics of `Permissions`, as in the example below. + +All three opaque type aliases have the same underlying representation type `Int`. The +`Permission` type has an upper bound `Permissions & PermissionChoice`. This makes +it known outside the `Access` object that `Permission` is a subtype of the other +two types. Hence, the following usage scenario type-checks. + +```scala +object User: + import Access.* + + case class Item(rights: Permissions) + extension (item: Item) + def +(other: Item): Item = Item(item.rights & other.rights) + + val roItem = Item(Read) // OK, since Permission <: Permissions + val woItem = Item(Write) + val rwItem = Item(ReadWrite) + val noItem = Item(NoPermission) + + assert(!roItem.rights.is(ReadWrite)) + assert(roItem.rights.isOneOf(ReadOrWrite)) + + assert(rwItem.rights.is(ReadWrite)) + assert(rwItem.rights.isOneOf(ReadOrWrite)) + + assert(!noItem.rights.is(ReadWrite)) + assert(!noItem.rights.isOneOf(ReadOrWrite)) + + assert((roItem + woItem).rights.is(ReadWrite)) +end User +``` +On the other hand, the call `roItem.rights.isOneOf(ReadWrite)` would give a type error: +```scala + assert(roItem.rights.isOneOf(ReadWrite)) + ^^^^^^^^^ + Found: (Access.ReadWrite : Access.Permissions) + Required: Access.PermissionChoice +``` +`Permissions` and `PermissionChoice` are different, unrelated types outside `Access`. + + +## Opaque Type Members on Classes +While typically, opaque types are used together with objects to hide implementation details of a module, they can also be used with classes. + +For example, we can redefine the above example of Logarithms as a class. +```scala +class Logarithms: + + opaque type Logarithm = Double + + def apply(d: Double): Logarithm = math.log(d) + + def safe(d: Double): Option[Logarithm] = + if d > 0.0 then Some(math.log(d)) else None + + def mul(x: Logarithm, y: Logarithm) = x + y +``` + +Opaque type members of different instances are treated as different: +```scala +val l1 = new Logarithms +val l2 = new Logarithms +val x = l1(1.5) +val y = l1(2.6) +val z = l2(3.1) +l1.mul(x, y) // type checks +l1.mul(x, z) // error: found l2.Logarithm, required l1.Logarithm +``` +In general, one can think of an opaque type as being only transparent in the scope of `private[this]`. + +[More details](opaques-details.md) diff --git a/docs/_spec/TODOreference/other-new-features/open-classes.md b/docs/_spec/TODOreference/other-new-features/open-classes.md new file mode 100644 index 000000000000..764c234df599 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/open-classes.md @@ -0,0 +1,80 @@ +--- +layout: doc-page +title: "Open Classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/open-classes.html +--- + +An `open` modifier on a class signals that the class is planned for extensions. Example: +```scala +// File Writer.scala +package p + +open class Writer[T]: + + /** Sends to stdout, can be overridden */ + def send(x: T) = println(x) + + /** Sends all arguments using `send` */ + def sendAll(xs: T*) = xs.foreach(send) +end Writer + +// File EncryptedWriter.scala +package p + +class EncryptedWriter[T: Encryptable] extends Writer[T]: + override def send(x: T) = super.send(encrypt(x)) +``` +An open class typically comes with some documentation that describes +the internal calling patterns between methods of the class as well as hooks that can be overridden. We call this the _extension contract_ of the class. It is different from the _external contract_ between a class and its users. + +Classes that are not open can still be extended, but only if at least one of two alternative conditions is met: + + - The extending class is in the same source file as the extended class. In this case, the extension is usually an internal implementation matter. + + - The language feature [`adhocExtensions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$adhocExtensions$.html) is enabled for the extending class. This is typically enabled by an import clause in the source file of the extension: + ```scala + import scala.language.adhocExtensions + ``` + Alternatively, the feature can be enabled by the compiler option `-language:adhocExtensions`. + If the feature is not enabled, the compiler will issue a "feature" warning. For instance, if the `open` modifier on class `Writer` is dropped, compiling `EncryptedWriter` would produce a warning: + ``` + -- Feature Warning: EncryptedWriter.scala:6:14 ---- + |class EncryptedWriter[T: Encryptable] extends Writer[T] + | ^ + |Unless class Writer is declared 'open', its extension + | in a separate file should be enabled + |by adding the import clause 'import scala.language.adhocExtensions' + |or by setting the compiler option -language:adhocExtensions. + ``` + +## Motivation + +When writing a class, there are three possible expectations of extensibility: + +1. The class is intended to allow extensions. This means one should expect +a carefully worked out and documented extension contract for the class. + +2. Extensions of the class are forbidden, for instance to make correctness or security guarantees. + +3. There is no firm decision either way. The class is not _a priori_ intended for extensions, but if others find it useful to extend on an _ad-hoc_ basis, let them go ahead. However, they are on their own in this case. There is no documented extension contract, and future versions of the class might break the extensions (by rearranging internal call patterns, for instance). + +The three cases are clearly distinguished by using `open` for (1), `final` for (2) and no modifier for (3). + +It is good practice to avoid _ad-hoc_ extensions in a code base, since they tend to lead to fragile systems that are hard to evolve. But there +are still some situations where these extensions are useful: for instance, +to mock classes in tests, or to apply temporary patches that add features or fix bugs in library classes. That's why _ad-hoc_ extensions are permitted, but only if there is an explicit opt-in via a language feature import. + +## Details + + - `open` is a soft modifier. It is treated as a normal identifier + unless it is in modifier position. + - An `open` class cannot be `final` or `sealed`. + - Traits or `abstract` classes are always `open`, so `open` is redundant for them. + +## Relationship with `sealed` + +A class that is neither `abstract` nor `open` is similar to a `sealed` class: it can still be extended, but only in the same source file. The difference is what happens if an extension of the class is attempted in another source file. For a `sealed` class, this is an error, whereas for a simple non-open class, this is still permitted provided the [`adhocExtensions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$adhocExtensions$.html) feature is enabled, and it gives a warning otherwise. + +## Migration + +`open` is a new modifier in Scala 3. To allow cross compilation between Scala 2.13 and Scala 3.0 without warnings, the feature warning for ad-hoc extensions is produced only under `-source future`. It will be produced by default from Scala 3.1 on. diff --git a/docs/_spec/TODOreference/other-new-features/other-new-features.md b/docs/_spec/TODOreference/other-new-features/other-new-features.md new file mode 100644 index 000000000000..974a8548cb68 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/other-new-features.md @@ -0,0 +1,7 @@ +--- +layout: index +title: "Other New Features" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features.html +--- + +The following pages document new features of Scala 3. diff --git a/docs/_spec/TODOreference/other-new-features/parameter-untupling-spec.md b/docs/_spec/TODOreference/other-new-features/parameter-untupling-spec.md new file mode 100644 index 000000000000..e5165550fc0d --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/parameter-untupling-spec.md @@ -0,0 +1,89 @@ +--- +layout: doc-page +title: "Parameter Untupling - More Details" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/parameter-untupling-spec.html +--- + +## Motivation + +Say you have a list of pairs + +```scala +val xs: List[(Int, Int)] +``` + +and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to their sum. +Previously, the best way to do this was with a pattern-matching decomposition: + +```scala +xs.map { + case (x, y) => x + y +} +``` +While correct, this is inconvenient. Instead, we propose to write it the following way: + +```scala +xs.map { + (x, y) => x + y +} +``` + +or, equivalently: + +```scala +xs.map(_ + _) +``` + +Generally, a function value with `n > 1` parameters can be converted to a function with tupled arguments if the expected type is a unary function type of the form `((T_1, ..., T_n)) => U`. + +## Type Checking + +The type checking happens in two steps: + +1. Check whether parameter untupling is feasible +2. Adapt the function and type check it + +### Feasibility Check + +Suppose a function `f` of the form `(p1, ..., pn) => e` (where `n > 1`), with `p1, ..., pn` as parameters and `e` as function body. + +If the expected type for checking `f` is a fully defined function type of the form `TupleN[T1, ..., Tn] => R` (or an equivalent SAM-type), where each type `Ti` fits the corresponding parameter `pi`. Then `f` is feasible for parameter untupling with the expected type `TupleN[T1, ..., Tn] => R`. + +A type `Ti` fits a parameter `pi` if one of the following two cases is `true`: + +* `pi` comes without a type, i.e. it is a simple identifier or `_`. +* `pi` is of the form `x: Ui` or `_: Ui` and `Ti <: Ui`. + +Parameter untupling composes with eta-expansion. That is, an n-ary function generated by eta-expansion can in turn be adapted to the expected type with parameter untupling. + +### Term adaptation + +If the function + +```scala +(p1, ..., pn) => e +``` + +is feasible for parameter untupling with the expected type `TupleN[T1, ..., Tn] => Te`, then continue to type check the following adapted function + +```scala +(x: TupleN[T1, ..., Tn]) => + def p1: T1 = x._1 + ... + def pn: Tn = x._n + e +``` + +with the same expected type. +## Migration + +Code like this could not be written before, hence the new notation is not ambiguous after adoption. + +It is possible that someone has written an implicit conversion from `(T1, ..., Tn) => R` to `TupleN[T1, ..., Tn] => R` for some `n`. +Such a conversion is now only useful for general conversions of function values, when parameter untupling is not applicable. +Some care is required to implement the conversion efficiently. +Obsolete conversions could be detected and fixed by [`Scalafix`](https://scalacenter.github.io/scalafix/). + +## Reference + +For more information, see [Issue #897](https://github.com/lampepfl/dotty/issues/897). diff --git a/docs/_spec/TODOreference/other-new-features/parameter-untupling.md b/docs/_spec/TODOreference/other-new-features/parameter-untupling.md new file mode 100644 index 000000000000..fcc1fa11d519 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/parameter-untupling.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: "Parameter Untupling" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/parameter-untupling.html +--- + +Say you have a list of pairs + +```scala +val xs: List[(Int, Int)] +``` + +and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to +their sum. Previously, the best way to do this was with a pattern-matching decomposition: + +```scala +xs map { + case (x, y) => x + y +} +``` + +While correct, this is also inconvenient and confusing, since the `case` +suggests that the pattern match could fail. As a shorter and clearer alternative Scala 3 now allows + +```scala +xs.map { + (x, y) => x + y +} +``` + +or, equivalently: + +```scala +xs.map(_ + _) +``` +and +```scala +def combine(i: Int, j: Int) = i + j +xs.map(combine) +``` + +Generally, a function value with `n > 1` parameters is wrapped in a +function type of the form `((T_1, ..., T_n)) => U` if that is the expected type. +The tuple parameter is decomposed and its elements are passed directly to the underlying function. + +More specifically, the adaptation is applied to the mismatching formal +parameter list. In particular, the adaptation is not a conversion +between function types. That is why the following is not accepted: + +```scala +val combiner: (Int, Int) => Int = _ + _ +xs.map(combiner) // Type Mismatch +``` + +The function value must be explicitly tupled, rather than the parameters untupled: +```scala +xs.map(combiner.tupled) +``` + +A conversion may be provided in user code: + +```scala +import scala.language.implicitConversions +transparent inline implicit def `fallback untupling`(f: (Int, Int) => Int): ((Int, Int)) => Int = + p => f(p._1, p._2) // use specialized apply instead of unspecialized `tupled` +xs.map(combiner) +``` + +Parameter untupling is attempted before conversions are applied, so that a conversion in scope +cannot subvert untupling. + +## Reference + +For more information see: + +* [More details](./parameter-untupling-spec.md) +* [Issue #897](https://github.com/lampepfl/dotty/issues/897). diff --git a/docs/_spec/TODOreference/other-new-features/safe-initialization.md b/docs/_spec/TODOreference/other-new-features/safe-initialization.md new file mode 100644 index 000000000000..757038eac786 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/safe-initialization.md @@ -0,0 +1,343 @@ +--- +layout: doc-page +title: "Safe Initialization" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/safe-initialization.html +--- + +Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Ysafe-init`. + +The design and implementation of the initialization checker is described in the +paper _Safe object initialization, abstractly_ [3]. + +## A Quick Glance + +To get a feel of how it works, we first show several examples below. + +### Parent-Child Interaction + +Given the following code snippet: + +``` scala +abstract class AbstractFile: + def name: String + val extension: String = name.substring(4) + +class RemoteFile(url: String) extends AbstractFile: + val localFile: String = s"${url.##}.tmp" // error: usage of `localFile` before it's initialized + def name: String = localFile +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/AbstractFile.scala:7:4 ------------------------------ +7 | val localFile: String = s"${url.##}.tmp" // error: usage of `localFile` before it's initialized + | ^ + | Access non-initialized field value localFile. Calling trace: + | -> val extension: String = name.substring(4) [ AbstractFile.scala:3 ] + | -> def name: String = localFile [ AbstractFile.scala:8 ] +``` + +### Inner-Outer Interaction + +Given the code below: + +``` scala +object Trees: + class ValDef { counter += 1 } + class EmptyValDef extends ValDef + val theEmptyValDef = new EmptyValDef + private var counter = 0 // error +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/trees.scala:5:14 ------------------------------------ +5 | private var counter = 0 // error + | ^ + | Access non-initialized field variable counter. Calling trace: + | -> val theEmptyValDef = new EmptyValDef [ trees.scala:4 ] + | -> class EmptyValDef extends ValDef [ trees.scala:3 ] + | -> class ValDef { counter += 1 } [ trees.scala:2 ] +``` + +### Functions + +Given the code below: + +``` scala +abstract class Parent: + val f: () => String = () => this.message + def message: String + +class Child extends Parent: + val a = f() + val b = "hello" // error + def message: String = b +``` + +The checker reports: + +``` scala +-- Warning: tests/init/neg/features-high-order.scala:7:6 ----------------------- +7 | val b = "hello" // error + | ^ + |Access non-initialized field value b. Calling trace: + | -> val a = f() [ features-high-order.scala:6 ] + | -> val f: () => String = () => this.message [ features-high-order.scala:2 ] + | -> def message: String = b [ features-high-order.scala:8 ] +``` +## Design Goals + +We establish the following design goals: + +- __Sound__: checking always terminates, and is sound for common and reasonable usage (over-approximation) +- __Expressive__: support common and reasonable initialization patterns +- __Friendly__: simple rules, minimal syntactic overhead, informative error messages +- __Modular__: modular checking, no analysis beyond project boundary +- __Fast__: instant feedback +- __Simple__: no changes to core type system, explainable by a simple theory + +By _reasonable usage_, we include the following use cases (but not restricted to them): + +- Access fields on `this` and outer `this` during initialization +- Call methods on `this` and outer `this` during initialization +- Instantiate inner class and call methods on such instances during initialization +- Capture fields in functions + +## Principles + +To achieve the goals, we uphold the following fundamental principles: +_stackability_, _monotonicity_, _scopability_ and _authority_. + +Stackability means that all fields of a class are initialized at the end of the +class body. Scala enforces this property in syntax by demanding that all fields +are initialized at the end of the primary constructor, except for the language +feature below: + +``` scala +var x: T = _ +``` + +Control effects such as exceptions may break this property, as the +following example shows: + +``` scala +class MyException(val b: B) extends Exception("") +class A: + val b = try { new B } catch { case myEx: MyException => myEx.b } + println(b.a) + +class B: + throw new MyException(this) + val a: Int = 1 +``` + +In the code above, the control effect teleport the uninitialized value +wrapped in an exception. In the implementation, we avoid the problem +by ensuring that the values that are thrown must be transitively initialized. + +Monotonicity means that the initialization status of an object should +not go backward: initialized fields continue to be initialized, a +field points to an initialized object may not later point to an +object under initialization. As an example, the following code will be rejected: + +``` scala +trait Reporter: + def report(msg: String): Unit + +class FileReporter(ctx: Context) extends Reporter: + ctx.typer.reporter = this // ctx now reaches an uninitialized object + val file: File = new File("report.txt") + def report(msg: String) = file.write(msg) +``` + +In the code above, suppose `ctx` points to a transitively initialized +object. Now the assignment at line 3 makes `this`, which is not fully +initialized, reachable from `ctx`. This makes field usage dangerous, +as it may indirectly reach uninitialized fields. + +Monotonicity is based on a well-known technique called _heap monotonic +typestate_ to ensure soundness in the presence of aliasing +[1]. Roughly speaking, it means initialization state should not go backwards. + +Scopability means that there are no side channels to access to partially +constructed objects. Control effects like coroutines, delimited +control, resumable exceptions may break the property, as they can transport a +value upper in the stack (not in scope) to be reachable from the current scope. +Static fields can also serve as a teleport thus breaks this property. In the +implementation, we need to enforce that teleported values are transitively +initialized. + +The three principles above contribute to _local reasoning about initialization_, +which means: + +> An initialized environment can only produce initialized values. + +For example, if the arguments to an `new`-expression are transitively +initialized, so is the result. If the receiver and arguments in a method call +are transitively initialized, so is the result. + +Local reasoning about initialization gives rise to a fast initialization +checker, as it avoids whole-program analysis. + +The principle of authority goes hand-in-hand with monotonicity: the principle +of monotonicity stipulates that initialization states cannot go backwards, while +the principle of authority stipulates that the initialization states may not +go forward at arbitrary locations due to aliasing. In Scala, we may only +advance initialization states of objects in the class body when a field is +defined with a mandatory initializer or at local reasoning points when the object +becomes transitively initialized. + +## Abstract Values + +There are three fundamental abstractions for initialization states of objects: + +- __Cold__: A cold object may have uninitialized fields. +- __Warm__: A warm object has all its fields initialized but may reach _cold_ objects. +- __Hot__: A hot object is transitively initialized, i.e., it only reaches warm objects. + +In the initialization checker, the abstraction `Warm` is refined to handle inner +classes and multiple constructors: + +- __Warm[C] { outer = V, ctor, args = Vs }__: A warm object of class `C`, where the immediate outer of `C` is `V`, the constructor is `ctor` and constructor arguments are `Vs`. + +The initialization checker checks each concrete class separately. The abstraction `ThisRef` +represents the current object under initialization: + +- __ThisRef[C]__: The current object of class `C` under initialization. + +The initialization state of the current object is stored in the abstract heap as an +abstract object. The abstract heap also serves as a cache for the field values +of warm objects. `Warm` and `ThisRef` are "addresses" of the abstract objects stored +in the abstract heap. + +Two more abstractions are introduced to support functions and conditional +expressions: + +- __Fun(e, V, C)__: An abstract function value where `e` is the code, `V` is the + abstract value for `this` inside the function body and the function is located + inside the class `C`. + +- __Refset(Vs)__: A set of abstract values `Vs`. + +A value `v` is _effectively hot_ if any of the following is true: + +- `v` is `Hot`. +- `v` is `ThisRef` and all fields of the underlying object are assigned. +- `v` is `Warm[C] { ... }` and + 1. `C` does not contain inner classes; and + 2. Calling any method on `v` encounters no initialization errors and the method return value is _effectively hot_; and + 3. Each field of `v` is _effectively hot_. +- `v` is `Fun(e, V, C)` and calling the function encounters no errors and the + function return value is _effectively hot_. +- The root object (refered by `ThisRef`) is _effectively hot_. + +An effectively hot value can be regarded as transitively initialized thus can +be safely leaked via method arguments or as RHS of reassignment. +The initialization checker tries to promote non-hot values to effectively hot +whenenver possible. + +## Rules + +With the established principles and design goals, the following rules are imposed: + +1. The field access `e.f` or method call `e.m()` is illegal if `e` is _cold_. + + A cold value should not be used. + +2. The field access `e.f` is invalid if `e` has the value `ThisRef` and `f` is not initialized. + +3. In an assignment `o.x = e`, the expression `e` must be _effectively hot_. + + This is how monotonicity is enforced in the system. Note that in an + initialization `val f: T = e`, the expression `e` may point to a non-hot + value. + +4. Arguments to method calls must be _effectively hot_. + + Escape of `this` in the constructor is commonly regarded as an anti-pattern. + + However, passing non-hot values as argument to another constructor is allowed, to support + creation of cyclic data structures. The checker will ensure that the escaped + non-initialized object is not used, i.e. calling methods or accessing fields + on the escaped object is not allowed. + + An exception is for calling synthetic `apply`s of case classes. For example, + the method call `Some.apply(e)` will be interpreted as `new Some(e)`, thus + is valid even if `e` is not hot. + + Another exception to this rule is parametric method calls. For example, in + `List.apply(e)`, the argument `e` may be non-hot. If that is the case, the + result value of the parametric method call is taken as _cold_. + +5. Method calls on hot values with effectively hot arguments produce hot results. + + This rule is assured by local reasoning about initialization. + +6. Method calls on `ThisRef` and warm values will be resolved statically and the + corresponding method bodies are checked. + +7. In a new expression `new p.C(args)`, if the values of `p` and `args` are + effectively hot, then the result value is also hot. + + This is assured by local reasoning about initialization. + +8. In a new expression `new p.C(args)`, if any value of `p` and `args` is not + effectively hot, then the result value takes the form `Warm[C] { outer = Vp, args = Vargs }`. The initialization code for the class `C` is checked again to make + sure the non-hot values are used properly. + + In the above, `Vp` is the widened value of `p` --- the widening happens if `p` + is a warm value `Warm[D] { outer = V, args }` and we widen it to + `Warm[D] { outer = Cold, args }`. + + The variable `Vargs` represents values of `args` with non-hot values widened + to `Cold`. + + The motivation for the widening is to finitize the abstract domain and ensure + termination of the initialization check. + +9. The scrutinee in a pattern match and the values in return and throw statements must be _effectively hot_. + +## Modularity + +The analysis takes the primary constructor of concrete classes as entry points. +It follows the constructors of super classes, which might be defined in another project. +The analysis takes advantage of TASTy for analyzing super classes defined in another project. + +The crossing of project boundary raises a concern about modularity. It is +well-known in object-oriented programming that superclass and subclass are +tightly coupled. For example, adding a method in the superclass requires +recompiling the child class for checking safe overriding. + +Initialization is no exception in this respect. The initialization of an object +essentially involves close interaction between subclass and superclass. If the +superclass is defined in another project, the crossing of project boundary +cannot be avoided for soundness of the analysis. + +Meanwhile, inheritance across project boundary has been under scrutiny and the +introduction of [open classes](./open-classes.md) mitigate the concern here. +For example, the initialization check could enforce that the constructors of +open classes may not contain method calls on `this` or introduce annotations as +a contract. + +The feedback from the community on the topic is welcome. + +## Back Doors + +Occasionally you may want to suppress warnings reported by the +checker. You can either write `e: @unchecked` to tell the checker to +skip checking for the expression `e`, or you may use the old trick: +mark some fields as lazy. + +## Caveats + +- The system cannot provide safety guarantee when extending Java or Scala 2 classes. +- Safe initialization of global objects is only partially checked. + +## References + +1. Fähndrich, M. and Leino, K.R.M., 2003, July. [_Heap monotonic typestates_](https://www.microsoft.com/en-us/research/publication/heap-monotonic-typestate/). In International Workshop on Aliasing, Confinement and Ownership in object-oriented programming (IWACO). +2. Fengyun Liu, Ondřej Lhoták, Aggelos Biboudis, Paolo G. Giarrusso, and Martin Odersky. [_A type-and-effect system for object initialization_](https://dl.acm.org/doi/10.1145/3428243). OOPSLA, 2020. +3. Fengyun Liu, Ondřej Lhoták, Enze Xing, Nguyen Cao Pham. [_Safe object initialization, abstractly_](https://dl.acm.org/doi/10.1145/3486610.3486895). Scala 2021. diff --git a/docs/_spec/TODOreference/other-new-features/targetName.md b/docs/_spec/TODOreference/other-new-features/targetName.md new file mode 100644 index 000000000000..63c4cf1ec0df --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/targetName.md @@ -0,0 +1,118 @@ +--- +layout: doc-page +title: "The @targetName annotation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/targetName.html +--- + +A [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation on a definition defines an alternate name for the implementation of that definition. Example: + +```scala +import scala.annotation.targetName + +object VecOps: + extension [T](xs: Vec[T]) + @targetName("append") + def ++= [T] (ys: Vec[T]): Vec[T] = ... +``` + +Here, the `++=` operation is implemented (in Byte code or native code) under the name `append`. The implementation name affects the code that is generated, and is the name under which code from other languages can call the method. For instance, `++=` could be invoked from Java like this: + +```java +VecOps.append(vec1, vec2) +``` + +The [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation has no bearing on Scala usages. Any application of that method in Scala has to use `++=`, not `append`. + +## Details + + 1. `@targetName` is defined in package `scala.annotation`. It takes a single argument + of type `String`. That string is called the _external name_ of the definition + that's annotated. + + 2. A `@targetName` annotation can be given for all kinds of definitions except a top-level `class`, `trait`, or `object`. + + 3. The name given in a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation must be a legal name + for the defined entities on the host platform. + + 4. It is recommended that definitions with symbolic names have a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation. This will establish an alternate name that is easier to search for and + will avoid cryptic encodings in runtime diagnostics. + + 5. Definitions with names in backticks that are not legal host platform names + should also have a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation. + +## Relationship with Overriding + +[`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotations are significant for matching two method definitions to decide whether they conflict or override each other. Two method definitions match if they have the same name, signature, and erased name. Here, + +- The _signature_ of a definition consists of the names of the erased types of all (value-) parameters and the method's result type. +- The _erased name_ of a method definition is its target name if a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation is given and its defined name otherwise. + +This means that `@targetName` annotations can be used to disambiguate two method definitions that would otherwise clash. For instance. + +```scala +def f(x: => String): Int = x.length +def f(x: => Int): Int = x + 1 // error: double definition +``` + +The two definitions above clash since their erased parameter types are both [`Function0`](https://scala-lang.org/api/3.x/scala/Function0.html), which is the type of the translation of a by-name-parameter. Hence they have the same names and signatures. But we can avoid the clash by adding a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation to either method or to both of them. Example: + +```scala +@targetName("f_string") +def f(x: => String): Int = x.length +def f(x: => Int): Int = x + 1 // OK +``` + +This will produce methods `f_string` and `f` in the generated code. + +However, [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotations are not allowed to break overriding relationships +between two definitions that have otherwise the same names and types. So the following would be in error: + +```scala +import annotation.targetName +class A: + def f(): Int = 1 +class B extends A: + @targetName("g") def f(): Int = 2 +``` + +The compiler reports here: + +``` +-- Error: test.scala:6:23 ------------------------------------------------------ +6 | @targetName("g") def f(): Int = 2 + | ^ + |error overriding method f in class A of type (): Int; + | method f of type (): Int should not have a @targetName + | annotation since the overridden member hasn't one either +``` + +The relevant overriding rules can be summarized as follows: + +- Two members can override each other if their names and signatures are the same, + and they either have the same erased names or the same types. +- If two members override, then both their erased names and their types must be the same. + +As usual, any overriding relationship in the generated code must also +be present in the original code. So the following example would also be in error: + +```scala +import annotation.targetName +class A: + def f(): Int = 1 +class B extends A: + @targetName("f") def g(): Int = 2 +``` + +Here, the original methods `g` and `f` do not override each other since they have +different names. But once we switch to target names, there is a clash that is reported by the compiler: + +``` +-- [E120] Naming Error: test.scala:4:6 ----------------------------------------- +4 |class B extends A: + | ^ + | Name clash between defined and inherited member: + | def f(): Int in class A at line 3 and + | def g(): Int in class B at line 5 + | have the same name and type after erasure. +1 error found +``` diff --git a/docs/_spec/TODOreference/other-new-features/threadUnsafe-annotation.md b/docs/_spec/TODOreference/other-new-features/threadUnsafe-annotation.md new file mode 100644 index 000000000000..ae1af1e4b671 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/threadUnsafe-annotation.md @@ -0,0 +1,18 @@ +--- +layout: doc-page +title: "The @threadUnsafe annotation" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/threadUnsafe-annotation.html +--- + +A new annotation [`@threadUnsafe`](https://scala-lang.org/api/3.x/scala/annotation/threadUnsafe.html) can be used on a field which defines +a `lazy val`. When this annotation is used, the initialization of the +[`lazy val`](../changed-features/lazy-vals-init.md) will use a faster mechanism which is not thread-safe. + +## Example + +```scala +import scala.annotation.threadUnsafe + +class Hello: + @threadUnsafe lazy val x: Int = 1 +``` diff --git a/docs/_spec/TODOreference/other-new-features/transparent-traits.md b/docs/_spec/TODOreference/other-new-features/transparent-traits.md new file mode 100644 index 000000000000..699ce0b9ddd8 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/transparent-traits.md @@ -0,0 +1,70 @@ +--- +layout: doc-page +title: "Transparent Traits" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/transparent-traits.html +--- + +Traits are used in two roles: + + 1. As mixins for other classes and traits + 2. As types of vals, defs, or parameters + +Some traits are used primarily in the first role, and we usually do not want to see them in inferred types. An example is the [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) trait that the compiler adds as a mixin trait to every case class or case object. In Scala 2, this parent trait sometimes makes inferred types more complicated than they should be. Example: + +```scala +trait Kind +case object Var extends Kind +case object Val extends Kind +val x = Set(if condition then Val else Var) +``` + +Here, the inferred type of `x` is `Set[Kind & Product & Serializable]` whereas one would have hoped it to be `Set[Kind]`. The reasoning for this particular type to be inferred is as follows: + +- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. +- A union type is widened in type inference to the least supertype that is not a union type. + In the example, this type is `Kind & Product & Serializable` since all three traits are traits of both `Val` and `Var`. + So that type becomes the inferred element type of the set. + +Scala 3 allows one to mark a mixin trait as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: + +```scala +transparent trait S +trait Kind +object Var extends Kind, S +object Val extends Kind, S +val x = Set(if condition then Val else Var) +``` + +Now `x` has inferred type `Set[Kind]`. The common transparent trait `S` does not +appear in the inferred type. + +## Transparent Traits + +The traits [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.html), [`java.io.Serializable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/Serializable.html) and [`java.lang.Comparable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Comparable.html) +are treated automatically as transparent. Other traits are turned into transparent traits using the modifier `transparent`. Scala 2 traits can also be made transparent +by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interoperability is no longer needed. + +Typically, transparent traits are traits +that influence the implementation of inheriting classes and traits that are not usually used as types by themselves. Two examples from the standard collection library are: + +- [`IterableOps`](https://scala-lang.org/api/3.x/scala/collection/IterableOps.html), which provides method implementations for an [`Iterable`](https://scala-lang.org/api/3.x/scala/collection/Iterable.html). +- [`StrictOptimizedSeqOps`](https://scala-lang.org/api/3.x/scala/collection/StrictOptimizedSeqOps.html), which optimises some of these implementations for sequences with efficient indexing. + +Generally, any trait that is extended recursively is a good candidate to be +declared transparent. + +## Rules for Inference + +Transparent traits can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference say that transparent traits are dropped from intersections where possible. + +The precise rules are as follows: + +- When inferring a type of a type variable, or the type of a val, or the return type of a def, +- where that type is not higher-kinded, +- and where `B` is its known upper bound or `Any` if none exists: +- If the type inferred so far is of the form `T1 & ... & Tn` where + `n >= 1`, replace the maximal number of transparent `Ti`s by `Any`, while ensuring that + the resulting type is still a subtype of the bound `B`. +- However, do not perform this widening if all transparent traits `Ti` can get replaced in that way. + +The last clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. diff --git a/docs/_spec/TODOreference/other-new-features/type-test.md b/docs/_spec/TODOreference/other-new-features/type-test.md new file mode 100644 index 000000000000..ec7a87230753 --- /dev/null +++ b/docs/_spec/TODOreference/other-new-features/type-test.md @@ -0,0 +1,181 @@ +--- +layout: doc-page +title: "TypeTest" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/type-test.html +--- + +## TypeTest + +When pattern matching there are two situations where a runtime type test must be performed. +The first case is an explicit type test using the ascription pattern notation. + +```scala +(x: X) match + case y: Y => +``` + +The second case is when an extractor takes an argument that is not a subtype of the scrutinee type. + +```scala +(x: X) match + case y @ Y(n) => + +object Y: + def unapply(x: Y): Some[Int] = ... +``` + +In both cases, a class test will be performed at runtime. +But when the type test is on an abstract type (type parameter or type member), the test cannot be performed because the type is erased at runtime. + +A [`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) can be provided to make this test possible. + +```scala +package scala.reflect + +trait TypeTest[-S, T]: + def unapply(s: S): Option[s.type & T] +``` + +It provides an extractor that returns its argument typed as a `T` if the argument is a `T`. +It can be used to encode a type test. + +```scala +def f[X, Y](x: X)(using tt: TypeTest[X, Y]): Option[Y] = x match + case tt(x @ Y(1)) => Some(x) + case tt(x) => Some(x) + case _ => None +``` + +To avoid the syntactic overhead the compiler will look for a type test automatically if it detects that the type test is on abstract types. +This means that `x: Y` is transformed to `tt(x)` and `x @ Y(_)` to `tt(x @ Y(_))` if there is a contextual `TypeTest[X, Y]` in scope. +The previous code is equivalent to + +```scala +def f[X, Y](x: X)(using TypeTest[X, Y]): Option[Y] = x match + case x @ Y(1) => Some(x) + case x: Y => Some(x) + case _ => None +``` + +We could create a type test at call site where the type test can be performed with runtime class tests directly as follows + +```scala +val tt: TypeTest[Any, String] = + new TypeTest[Any, String]: + def unapply(s: Any): Option[s.type & String] = s match + case s: String => Some(s) + case _ => None + +f[AnyRef, String]("acb")(using tt) +``` + +The compiler will synthesize a new instance of a type test if none is found in scope as: + +```scala +new TypeTest[A, B]: + def unapply(s: A): Option[s.type & B] = s match + case s: B => Some(s) + case _ => None +``` + +If the type tests cannot be done there will be an unchecked warning that will be raised on the `case s: B =>` test. + +The most common [`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) instances are the ones that take any parameters (i.e. `TypeTest[Any, T]`). +To make it possible to use such instances directly in context bounds we provide the alias + +```scala +package scala.reflect + +type Typeable[T] = TypeTest[Any, T] +``` + +This alias can be used as + +```scala +def f[T: Typeable]: Boolean = + "abc" match + case x: T => true + case _ => false + +f[String] // true +f[Int] // false +``` + +## TypeTest and ClassTag + +[`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) is a replacement for functionality provided previously by `ClassTag.unapply`. +Using [`ClassTag`](https://scala-lang.org/api/3.x/scala/reflect/ClassTag.html) instances was unsound since classtags can check only the class component of a type. +[`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) fixes that unsoundness. +[`ClassTag`](https://scala-lang.org/api/3.x/scala/reflect/ClassTag.html) type tests are still supported but a warning will be emitted after 3.0. + + +## Example + +Given the following abstract definition of Peano numbers that provides two given instances of types `TypeTest[Nat, Zero]` and `TypeTest[Nat, Succ]` + +```scala +import scala.reflect.* + +trait Peano: + type Nat + type Zero <: Nat + type Succ <: Nat + + def safeDiv(m: Nat, n: Succ): (Nat, Nat) + + val Zero: Zero + + val Succ: SuccExtractor + trait SuccExtractor: + def apply(nat: Nat): Succ + def unapply(succ: Succ): Some[Nat] + + given typeTestOfZero: TypeTest[Nat, Zero] + given typeTestOfSucc: TypeTest[Nat, Succ] +``` + +together with an implementation of Peano numbers based on type `Int` + +```scala +object PeanoInt extends Peano: + type Nat = Int + type Zero = Int + type Succ = Int + + def safeDiv(m: Nat, n: Succ): (Nat, Nat) = (m / n, m % n) + + val Zero: Zero = 0 + + val Succ: SuccExtractor = new: + def apply(nat: Nat): Succ = nat + 1 + def unapply(succ: Succ) = Some(succ - 1) + + def typeTestOfZero: TypeTest[Nat, Zero] = new: + def unapply(x: Nat): Option[x.type & Zero] = + if x == 0 then Some(x) else None + + def typeTestOfSucc: TypeTest[Nat, Succ] = new: + def unapply(x: Nat): Option[x.type & Succ] = + if x > 0 then Some(x) else None +``` + +it is possible to write the following program + +```scala +@main def test = + import PeanoInt.* + + def divOpt(m: Nat, n: Nat): Option[(Nat, Nat)] = + n match + case Zero => None + case s @ Succ(_) => Some(safeDiv(m, s)) + + val two = Succ(Succ(Zero)) + val five = Succ(Succ(Succ(two))) + + println(divOpt(five, two)) // prints "Some((2,1))" + println(divOpt(two, five)) // prints "Some((0,2))" + println(divOpt(two, Zero)) // prints "None" +``` + +Note that without the `TypeTest[Nat, Succ]` the pattern `Succ.unapply(nat: Succ)` would be unchecked. diff --git a/docs/_spec/TODOreference/overview.md b/docs/_spec/TODOreference/overview.md new file mode 100644 index 000000000000..b1e8281dfc16 --- /dev/null +++ b/docs/_spec/TODOreference/overview.md @@ -0,0 +1,155 @@ +--- +layout: doc-page +title: "Reference" +nightlyOf: https://docs.scala-lang.org/scala3/reference/overview.html +redirectFrom: overview.html +--- + +Scala 3 implements many language changes and improvements over Scala 2. +In this reference, we discuss design decisions and present important differences compared to Scala 2. + +## Goals + +The language redesign was guided by three main goals: + +- Strengthen Scala's foundations. + Make the full programming language compatible with the foundational work on the + [DOT calculus](https://infoscience.epfl.ch/record/227176/files/soundness_oopsla16.pdf) + and apply the lessons learned from that work. +- Make Scala easier and safer to use. + Tame powerful constructs such as implicits to provide a gentler learning curve. Remove warts and puzzlers. +- Further improve the consistency and expressiveness of Scala's language constructs. + +Corresponding to these goals, the language changes fall into seven categories: +(1) Core constructs to strengthen foundations, (2) simplifications and (3) [restrictions](#restrictions), to make the language easier and safer to use, (4) [dropped constructs](#dropped-constructs) to make the language smaller and more regular, (5) [changed constructs](#changes) to remove warts, and increase consistency and usability, (6) [new constructs](#new-constructs) to fill gaps and increase expressiveness, (7) a new, principled approach to metaprogramming that replaces [Scala 2 experimental macros](https://docs.scala-lang.org/overviews/macros/overview.html). + +## Essential Foundations + +These new constructs directly model core features of DOT, higher-kinded types, and the [SI calculus for implicit resolution](https://infoscience.epfl.ch/record/229878/files/simplicitly_1.pdf). + +- [Intersection types](new-types/intersection-types.md), replacing compound types, +- [Union types](new-types/union-types.md), +- [Type lambdas](new-types/type-lambdas.md), replacing encodings using structural types and type projection. +- [Context functions](contextual/context-functions.md), offering abstraction over given parameters. + +## Simplifications + +These constructs replace existing constructs with the aim of making the language safer and simpler to use, and to promote uniformity in code style. + +- [Trait parameters](other-new-features/trait-parameters.md) + replace [early initializers](dropped-features/early-initializers.md) with a more generally useful construct. +- [Given instances](contextual/givens.md) + replace implicit objects and defs, focussing on intent over mechanism. +- [Using clauses](contextual/using-clauses.md) + replace implicit parameters, avoiding their ambiguities. +- [Extension methods](contextual/extension-methods.md) + replace implicit classes with a clearer and simpler mechanism. +- [Opaque type aliases](other-new-features/opaques.md) + replace most uses of value classes while guaranteeing the absence of boxing. +- [Top-level definitions](dropped-features/package-objects.md) + replace package objects, dropping syntactic boilerplate. +- [Export clauses](other-new-features/export.md) + provide a simple and general way to express aggregation, which can replace + the previous facade pattern of package objects inheriting from classes. +- [Vararg splices](changed-features/vararg-splices.md) + now use the form `xs*` in function arguments and patterns instead of `xs: _*` and `xs @ _*`, +- [Universal apply methods](other-new-features/creator-applications.md) + allow using simple function call syntax instead of `new` expressions. `new` expressions stay around + as a fallback for the cases where creator applications cannot be used. + +With the exception of [early initializers](dropped-features/early-initializers.md) and old-style vararg patterns, all superseded constructs continue to be available in Scala 3.0. The plan is to deprecate and phase them out later. + +Value classes (superseded by opaque type aliases) are a special case. There are currently no deprecation plans for value classes, since we might bring them back in a more general form if they are supported natively by the JVM as is planned by [project Valhalla](https://openjdk.java.net/projects/valhalla/). + +## Restrictions + +These constructs are restricted to make the language safer. + +- [Implicit Conversions](contextual/conversions.md): + there is only one way to define implicit conversions instead of many, and potentially surprising implicit conversions require a language import. +- [Given Imports](contextual/given-imports.md): + implicits now require a special form of import, to make the import clearly visible. +- [Type Projection](dropped-features/type-projection.md): + only classes can be used as prefix `C` of a type projection `C#A`. Type projection on abstract types is no longer supported since it is unsound. +- [Multiversal Equality](contextual/multiversal-equality.md): + implement an "opt-in" scheme to rule out nonsensical comparisons with `==` and `!=`. +- [infix](changed-features/operators.md): + make method application syntax uniform across code bases. + +Unrestricted implicit conversions continue to be available in Scala 3.0, but will be deprecated and removed later. Unrestricted versions of the other constructs in the list above are available only under `-source 3.0-migration`. + +## Dropped Constructs + +These constructs are proposed to be dropped without a new construct replacing them. The motivation for dropping these constructs is to simplify the language and its implementation. + +- [DelayedInit](dropped-features/delayed-init.md), +- [Existential types](dropped-features/existential-types.md), +- [Procedure syntax](dropped-features/procedure-syntax.md), +- [Class shadowing](dropped-features/class-shadowing.md), +- [XML literals](dropped-features/xml.md), +- [Symbol literals](dropped-features/symlits.md), +- [Auto application](dropped-features/auto-apply.md), +- [Weak conformance](dropped-features/weak-conformance.md), +- Compound types (replaced by [Intersection types](new-types/intersection-types.md)), +- [Auto tupling](https://github.com/lampepfl/dotty/pull/4311) (implemented, but not merged). + +The date when these constructs are dropped varies. The current status is: + +- Not implemented at all: + - DelayedInit, existential types, weak conformance. +- Supported under `-source 3.0-migration`: + - procedure syntax, class shadowing, symbol literals, auto application, auto tupling in a restricted form. +- Supported in 3.0, to be deprecated and phased out later: + - [XML literals](dropped-features/xml.md), compound types. + +## Changes + +These constructs have undergone changes to make them more regular and useful. + +- [Structural Types](changed-features/structural-types.md): + They now allow pluggable implementations, which greatly increases their usefulness. Some usage patterns are restricted compared to the status quo. +- [Name-based pattern matching](changed-features/pattern-matching.md): + The existing undocumented Scala 2 implementation has been codified in a slightly simplified form. +- [Automatic Eta expansion](changed-features/eta-expansion.md): + Eta expansion is now performed universally also in the absence of an expected type. The postfix `_` operator is thus made redundant. It will be deprecated and dropped after Scala 3.0. +- [Implicit Resolution](changed-features/implicit-resolution.md): + The implicit resolution rules have been cleaned up to make them more useful and less surprising. Implicit scope is restricted to no longer include package prefixes. + +Most aspects of old-style implicit resolution are still available under `-source 3.0-migration`. The other changes in this list are applied unconditionally. + +## New Constructs + +These are additions to the language that make it more powerful or pleasant to use. + +- [Enums](enums/enums.md) provide concise syntax for enumerations and [algebraic data types](enums/adts.md). +- [Parameter untupling](other-new-features/parameter-untupling.md) avoids having to use `case` for tupled parameter destructuring. +- [Dependent function types](new-types/dependent-function-types.md) generalize dependent methods to dependent function values and types. +- [Polymorphic function types](new-types/polymorphic-function-types.md) generalize polymorphic methods to polymorphic function values and types. + _Current status_: There is a proposal and a merged prototype implementation, but the implementation has not been finalized (it is notably missing type inference support). +- [Kind polymorphism](other-new-features/kind-polymorphism.md) allows the definition of operators working equally on types and type constructors. +- [`@targetName` annotations](other-new-features/targetName.md) make it easier to interoperate with code written in other languages and give more flexibility for avoiding name clashes. + +## Metaprogramming + +The following constructs together aim to put metaprogramming in Scala on a new basis. So far, metaprogramming was achieved by a combination of macros and libraries such as [Shapeless](https://github.com/milessabin/shapeless) that were in turn based on some key macros. Current Scala 2 macro mechanisms are a thin veneer on top of the current Scala 2 compiler, which makes them fragile and in many cases impossible to port to Scala 3. + +It's worth noting that macros were never included in the [Scala 2 language specification](https://scala-lang.org/files/archive/spec/2.13/) and were so far made available only under an `-experimental` flag. This has not prevented their widespread usage. + +To enable porting most uses of macros, we are experimenting with the advanced language constructs listed below. These designs are more provisional than the rest of the proposed language constructs for Scala 3.0. There might still be some changes until the final release. Stabilizing the feature set needed for metaprogramming is our first priority. + +- [Match Types](new-types/match-types.md) + allow computation on types. +- [Inline](metaprogramming/inline.md) + provides by itself a straightforward implementation of some simple macros and is at the same time an essential building block for the implementation of complex macros. +- [Quotes and Splices](metaprogramming/macros.md) + provide a principled way to express macros and staging with a unified set of abstractions. +- [Type class derivation](contextual/derivation.md) + provides an in-language implementation of the `Gen` macro in Shapeless and other foundational libraries. The new implementation is more robust, efficient and easier to use than the macro. +- [By-name context parameters](contextual/by-name-context-parameters.md) + provide a more robust in-language implementation of the `Lazy` macro in [Shapeless](https://github.com/milessabin/shapeless). + +## See Also + +[A classification of proposed language features](./features-classification.md) is +an expanded version of this page that adds the status (i.e. relative importance to be a part of Scala 3, and relative urgency when to decide this) and expected migration cost +of each language construct. diff --git a/docs/_spec/TODOreference/soft-modifier.md b/docs/_spec/TODOreference/soft-modifier.md new file mode 100644 index 000000000000..c1329ebab1f0 --- /dev/null +++ b/docs/_spec/TODOreference/soft-modifier.md @@ -0,0 +1,27 @@ +--- +layout: doc-page +title: "Soft Keywords" +nightlyOf: https://docs.scala-lang.org/scala3/reference/soft-modifier.html +--- + +A soft modifier is one of the identifiers `infix`, `inline`, `opaque`, `open` and `transparent`. + +A soft keyword is a soft modifier, or one of `as`, `derives`, `end`, `extension`, `throws`, `using`, `|`, `+`, `-`, `*` + +A soft modifier is treated as potential modifier of a definition if it is followed by a hard modifier or a keyword combination starting a definition (`def`, `val`, `var`, `type`, `given`, `class`, `trait`, `object`, `enum`, `case class`, `case object`). Between the two words there may be a sequence of newline tokens and soft modifiers. + +Otherwise, soft keywords are treated specially in the following situations: + + - `inline`, if it is followed by any token that can start an expression. + - `derives`, if it appears after an extension clause or after + the name and possibly parameters of a class, trait, object, or enum definition. + - `end`, if it appears at the start of a line following a statement (i.e. definition or toplevel expression) + - `extension`, if it appears at the start of a statement and is followed by `(` or `[`. + - `using`, if it appears at the start of a parameter or argument list. + - `as`, in a renaming import clause + - `|`, if it separates two patterns in an alternative. + - `+`, `-`, if they appear in front of a type parameter. + - `*`, in a wildcard import, or it follows the type of a parameter, or if it appears in + a vararg splice `x*`. + +Everywhere else a soft keyword is treated as a normal identifier. diff --git a/docs/_spec/_config.yml b/docs/_spec/_config.yml new file mode 100644 index 000000000000..3cb9c8f0d440 --- /dev/null +++ b/docs/_spec/_config.yml @@ -0,0 +1,11 @@ +baseurl: /files/archive/spec/2.13 +latestScalaVersion: 2.13 +thisScalaVersion: 2.13 +versionCompareMessage: "an upcoming" +safe: true +lsi: false +highlighter: false +markdown: redcarpet +encoding: utf-8 +redcarpet: + extensions: ["no_intra_emphasis", "fenced_code_blocks", "autolink", "tables", "with_toc_data", "strikethrough", "lax_spacing", "space_after_headers", "superscript", "footnotes", "disable_indented_code_blocks"] diff --git a/docs/_spec/_includes/numbering.css b/docs/_spec/_includes/numbering.css new file mode 100644 index 000000000000..2a22ce28b558 --- /dev/null +++ b/docs/_spec/_includes/numbering.css @@ -0,0 +1,60 @@ +h1 { + /* must reset here */ + counter-reset: chapter {{ page.chapter }}; +} +h1:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + content: "Chapter " counter(chapter); + display: block; +} + +h2 { + /* must increment here */ + counter-increment: section; + counter-reset: subsection; +} +h2:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: counter(chapter) "." counter(section) ; + display: inline; + margin-right: 1em; +} +h2:after { + /* can only have one counter-reset per tag, so can't do it in h2/h2:before... */ + counter-reset: example; +} + +h3 { + /* must increment here */ + counter-increment: subsection; +} +h3:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: counter(chapter) "." counter(section) "." counter(subsection); + display: inline; + margin-right: 1em; +} +h3[id*='example'] { + /* must increment here */ + counter-increment: example; + display: inline; +} +h3[id*='example']:before { + /* and must reset again here */ + counter-reset: chapter {{ page.chapter }}; + + content: "Example " counter(chapter) "." counter(section) "." counter(example); + display: inline; + margin-right: 1em; +} + +.no-numbering, .no-numbering:before, .no-numbering:after { + content: normal; + counter-reset: none; + counter-increment: none; +} diff --git a/docs/_spec/_includes/table-of-contents.yml b/docs/_spec/_includes/table-of-contents.yml new file mode 100644 index 000000000000..b70f97da5424 --- /dev/null +++ b/docs/_spec/_includes/table-of-contents.yml @@ -0,0 +1,23 @@ + +
+ +

Table of Contents

+ +
    + {% assign sorted_pages = site.pages | sort:"name" %} + {% for post in sorted_pages %} + + {% if post.chapter >= 0 %} +
  1. + {{ post.title }} +
  2. + {% endif %} + {% endfor %} +
+
+ + diff --git a/docs/_spec/_includes/version-notice.yml b/docs/_spec/_includes/version-notice.yml new file mode 100644 index 000000000000..5a7286631c11 --- /dev/null +++ b/docs/_spec/_includes/version-notice.yml @@ -0,0 +1,3 @@ +{% if site.thisScalaVersion != site.latestScalaVersion %} +
This is the specification of {{ site.versionCompareMessage }} version of Scala. See the Scala {{ site.latestScalaVersion }} spec.
+{% endif %} diff --git a/docs/_spec/_layouts/default.yml b/docs/_spec/_layouts/default.yml new file mode 100644 index 000000000000..2589a105dff2 --- /dev/null +++ b/docs/_spec/_layouts/default.yml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + {{ page.title }} | Scala {{ site.thisScalaVersion }} + + + +
+ +
+ + +
+{% include version-notice.yml %} +{{ content }} +
+ + + + + + + diff --git a/docs/_spec/_layouts/toc.yml b/docs/_spec/_layouts/toc.yml new file mode 100644 index 000000000000..1106222bd088 --- /dev/null +++ b/docs/_spec/_layouts/toc.yml @@ -0,0 +1,34 @@ + + + + + + + + + + {{ page.title }} | Scala {{ site.thisScalaVersion }} + + + + + + + + +
+
+ + Scala Language Specification + Edit at GitHub +
+
Version {{ site.thisScalaVersion }}
+
+
+{% include version-notice.yml %} +{{ content }} +
+ + + + diff --git a/docs/_spec/docker-compose.yml b/docs/_spec/docker-compose.yml new file mode 100644 index 000000000000..3eadc939ed40 --- /dev/null +++ b/docs/_spec/docker-compose.yml @@ -0,0 +1,11 @@ +version: '2' + +services: + jekyll: + user: "${UID}:${GID}" + build: . + command: sh -c "chown $UID / && bundle exec jekyll serve --incremental --host=0.0.0.0 " + ports: + - '4000:4000' + volumes: + - .:/srv/jekyll diff --git a/docs/_spec/index.md b/docs/_spec/index.md new file mode 100644 index 000000000000..df126db7bd44 --- /dev/null +++ b/docs/_spec/index.md @@ -0,0 +1,55 @@ +--- +title: Scala Language Specification +layout: toc +--- + +{% include table-of-contents.yml %} + +#### Authors and Contributors + +Martin Odersky, Philippe Altherr, Vincent Cremet, Gilles Dubochet, Burak Emir, Philipp Haller, Stéphane Micheloud, Nikolay Mihaylov, Adriaan Moors, Lukas Rytz, Michel Schinz, Erik Stenman, Matthias Zenger + +Markdown Conversion by Iain McGinniss. + +#### Preface + +Scala is a Java-like programming language which unifies +object-oriented and functional programming. It is a pure +object-oriented language in the sense that every value is an +object. Types and behavior of objects are described by +classes. Classes can be composed using mixin composition. Scala is +designed to work seamlessly with less pure but mainstream +object-oriented languages like Java. + +Scala is a functional language in the sense that every function is a +value. Nesting of function definitions and higher-order functions are +naturally supported. Scala also supports a general notion of pattern +matching which can model the algebraic types used in many functional +languages. + +Scala has been designed to interoperate seamlessly with Java. +Scala classes can call Java methods, create Java objects, inherit from Java +classes and implement Java interfaces. None of this requires interface +definitions or glue code. + +Scala has been developed from 2001 in the programming methods +laboratory at EPFL. Version 1.0 was released in November 2003. This +document describes the second version of the language, which was +released in March 2006. It acts as a reference for the language +definition and some core library modules. It is not intended to teach +Scala or its concepts; for this there are [other documents](14-references.html). + +Scala has been a collective effort of many people. The design and the +implementation of version 1.0 was completed by Philippe Altherr, +Vincent Cremet, Gilles Dubochet, Burak Emir, Stéphane Micheloud, +Nikolay Mihaylov, Michel Schinz, Erik Stenman, Matthias Zenger, and +the author. Iulian Dragos, Gilles Dubochet, Philipp Haller, Sean +McDirmid, Lex Spoon, and Geoffrey Washburn joined in the effort to +develop the second version of the language and tools. Gilad Bracha, +Craig Chambers, Erik Ernst, Matthias Felleisen, Shriram Krishnamurti, +Gary Leavens, Sebastian Maneth, Erik Meijer, Klaus Ostermann, Didier +Rémy, Mads Torgersen, and Philip Wadler have shaped the design of +the language through lively and inspiring discussions and comments on +previous versions of this document. The contributors to the Scala +mailing list have also given very useful feedback that helped us +improve the language and its tools. diff --git a/docs/_spec/public/favicon.ico b/docs/_spec/public/favicon.ico new file mode 100644 index 000000000000..9eb6ef516488 Binary files /dev/null and b/docs/_spec/public/favicon.ico differ diff --git a/docs/_spec/public/fonts/Heuristica-Bold.woff b/docs/_spec/public/fonts/Heuristica-Bold.woff new file mode 100644 index 000000000000..904579683d54 Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-Bold.woff differ diff --git a/docs/_spec/public/fonts/Heuristica-BoldItalic.woff b/docs/_spec/public/fonts/Heuristica-BoldItalic.woff new file mode 100644 index 000000000000..a3c523445375 Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-BoldItalic.woff differ diff --git a/docs/_spec/public/fonts/Heuristica-Regular.woff b/docs/_spec/public/fonts/Heuristica-Regular.woff new file mode 100644 index 000000000000..f5c1f8b2dbc5 Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-Regular.woff differ diff --git a/docs/_spec/public/fonts/Heuristica-RegularItalic.woff b/docs/_spec/public/fonts/Heuristica-RegularItalic.woff new file mode 100644 index 000000000000..d2c8664593dc Binary files /dev/null and b/docs/_spec/public/fonts/Heuristica-RegularItalic.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-Bold.woff b/docs/_spec/public/fonts/LuxiMono-Bold.woff new file mode 100644 index 000000000000..8581bb5aa458 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-Bold.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-BoldOblique.woff b/docs/_spec/public/fonts/LuxiMono-BoldOblique.woff new file mode 100644 index 000000000000..607ccf5cd030 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-BoldOblique.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-Regular.woff b/docs/_spec/public/fonts/LuxiMono-Regular.woff new file mode 100644 index 000000000000..a478ad9ef2dd Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-Regular.woff differ diff --git a/docs/_spec/public/fonts/LuxiMono-RegularOblique.woff b/docs/_spec/public/fonts/LuxiMono-RegularOblique.woff new file mode 100644 index 000000000000..26999f990fa9 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiMono-RegularOblique.woff differ diff --git a/docs/_spec/public/fonts/LuxiSans-Bold.woff b/docs/_spec/public/fonts/LuxiSans-Bold.woff new file mode 100644 index 000000000000..162621568b53 Binary files /dev/null and b/docs/_spec/public/fonts/LuxiSans-Bold.woff differ diff --git a/docs/_spec/public/fonts/LuxiSans-Regular.woff b/docs/_spec/public/fonts/LuxiSans-Regular.woff new file mode 100644 index 000000000000..89d980218f7a Binary files /dev/null and b/docs/_spec/public/fonts/LuxiSans-Regular.woff differ diff --git a/docs/_spec/public/images/classhierarchy.pdf b/docs/_spec/public/images/classhierarchy.pdf new file mode 100644 index 000000000000..58e050174b65 Binary files /dev/null and b/docs/_spec/public/images/classhierarchy.pdf differ diff --git a/docs/_spec/public/images/classhierarchy.png b/docs/_spec/public/images/classhierarchy.png new file mode 100644 index 000000000000..3da25ecbf2d5 Binary files /dev/null and b/docs/_spec/public/images/classhierarchy.png differ diff --git a/docs/_spec/public/images/github-logo@2x.png b/docs/_spec/public/images/github-logo@2x.png new file mode 100644 index 000000000000..285b0fee2f32 Binary files /dev/null and b/docs/_spec/public/images/github-logo@2x.png differ diff --git a/docs/_spec/public/images/scala-spiral-white.png b/docs/_spec/public/images/scala-spiral-white.png new file mode 100644 index 000000000000..46aaf80824c1 Binary files /dev/null and b/docs/_spec/public/images/scala-spiral-white.png differ diff --git a/docs/_spec/public/octicons/LICENSE.txt b/docs/_spec/public/octicons/LICENSE.txt new file mode 100644 index 000000000000..259b43d14de3 --- /dev/null +++ b/docs/_spec/public/octicons/LICENSE.txt @@ -0,0 +1,9 @@ +(c) 2012-2014 GitHub + +When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos) + +Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) +Applies to all font files + +Code License: MIT (http://choosealicense.com/licenses/mit/) +Applies to all other files diff --git a/docs/_spec/public/octicons/octicons.css b/docs/_spec/public/octicons/octicons.css new file mode 100644 index 000000000000..a5dcd153a856 --- /dev/null +++ b/docs/_spec/public/octicons/octicons.css @@ -0,0 +1,235 @@ +@font-face { + font-family: 'octicons'; + src: url('octicons.eot?#iefix') format('embedded-opentype'), + url('octicons.woff') format('woff'), + url('octicons.ttf') format('truetype'), + url('octicons.svg#octicons') format('svg'); + font-weight: normal; + font-style: normal; +} + +/* + +.octicon is optimized for 16px. +.mega-octicon is optimized for 32px but can be used larger. + +*/ +.octicon, .mega-octicon { + font: normal normal normal 16px/1 octicons; + display: inline-block; + text-decoration: none; + text-rendering: auto; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} +.mega-octicon { font-size: 32px; } + + +.octicon-alert:before { content: '\f02d'} /*  */ +.octicon-alignment-align:before { content: '\f08a'} /*  */ +.octicon-alignment-aligned-to:before { content: '\f08e'} /*  */ +.octicon-alignment-unalign:before { content: '\f08b'} /*  */ +.octicon-arrow-down:before { content: '\f03f'} /*  */ +.octicon-arrow-left:before { content: '\f040'} /*  */ +.octicon-arrow-right:before { content: '\f03e'} /*  */ +.octicon-arrow-small-down:before { content: '\f0a0'} /*  */ +.octicon-arrow-small-left:before { content: '\f0a1'} /*  */ +.octicon-arrow-small-right:before { content: '\f071'} /*  */ +.octicon-arrow-small-up:before { content: '\f09f'} /*  */ +.octicon-arrow-up:before { content: '\f03d'} /*  */ +.octicon-beer:before { content: '\f069'} /*  */ +.octicon-book:before { content: '\f007'} /*  */ +.octicon-bookmark:before { content: '\f07b'} /*  */ +.octicon-briefcase:before { content: '\f0d3'} /*  */ +.octicon-broadcast:before { content: '\f048'} /*  */ +.octicon-browser:before { content: '\f0c5'} /*  */ +.octicon-bug:before { content: '\f091'} /*  */ +.octicon-calendar:before { content: '\f068'} /*  */ +.octicon-check:before { content: '\f03a'} /*  */ +.octicon-checklist:before { content: '\f076'} /*  */ +.octicon-chevron-down:before { content: '\f0a3'} /*  */ +.octicon-chevron-left:before { content: '\f0a4'} /*  */ +.octicon-chevron-right:before { content: '\f078'} /*  */ +.octicon-chevron-up:before { content: '\f0a2'} /*  */ +.octicon-circle-slash:before { content: '\f084'} /*  */ +.octicon-circuit-board:before { content: '\f0d6'} /*  */ +.octicon-clippy:before { content: '\f035'} /*  */ +.octicon-clock:before { content: '\f046'} /*  */ +.octicon-cloud-download:before { content: '\f00b'} /*  */ +.octicon-cloud-upload:before { content: '\f00c'} /*  */ +.octicon-code:before { content: '\f05f'} /*  */ +.octicon-color-mode:before { content: '\f065'} /*  */ +.octicon-comment-add:before, +.octicon-comment:before { content: '\f02b'} /*  */ +.octicon-comment-discussion:before { content: '\f04f'} /*  */ +.octicon-credit-card:before { content: '\f045'} /*  */ +.octicon-dash:before { content: '\f0ca'} /*  */ +.octicon-dashboard:before { content: '\f07d'} /*  */ +.octicon-database:before { content: '\f096'} /*  */ +.octicon-device-camera:before { content: '\f056'} /*  */ +.octicon-device-camera-video:before { content: '\f057'} /*  */ +.octicon-device-desktop:before { content: '\f27c'} /*  */ +.octicon-device-mobile:before { content: '\f038'} /*  */ +.octicon-diff:before { content: '\f04d'} /*  */ +.octicon-diff-added:before { content: '\f06b'} /*  */ +.octicon-diff-ignored:before { content: '\f099'} /*  */ +.octicon-diff-modified:before { content: '\f06d'} /*  */ +.octicon-diff-removed:before { content: '\f06c'} /*  */ +.octicon-diff-renamed:before { content: '\f06e'} /*  */ +.octicon-ellipsis:before { content: '\f09a'} /*  */ +.octicon-eye-unwatch:before, +.octicon-eye-watch:before, +.octicon-eye:before { content: '\f04e'} /*  */ +.octicon-file-binary:before { content: '\f094'} /*  */ +.octicon-file-code:before { content: '\f010'} /*  */ +.octicon-file-directory:before { content: '\f016'} /*  */ +.octicon-file-media:before { content: '\f012'} /*  */ +.octicon-file-pdf:before { content: '\f014'} /*  */ +.octicon-file-submodule:before { content: '\f017'} /*  */ +.octicon-file-symlink-directory:before { content: '\f0b1'} /*  */ +.octicon-file-symlink-file:before { content: '\f0b0'} /*  */ +.octicon-file-text:before { content: '\f011'} /*  */ +.octicon-file-zip:before { content: '\f013'} /*  */ +.octicon-flame:before { content: '\f0d2'} /*  */ +.octicon-fold:before { content: '\f0cc'} /*  */ +.octicon-gear:before { content: '\f02f'} /*  */ +.octicon-gift:before { content: '\f042'} /*  */ +.octicon-gist:before { content: '\f00e'} /*  */ +.octicon-gist-secret:before { content: '\f08c'} /*  */ +.octicon-git-branch-create:before, +.octicon-git-branch-delete:before, +.octicon-git-branch:before { content: '\f020'} /*  */ +.octicon-git-commit:before { content: '\f01f'} /*  */ +.octicon-git-compare:before { content: '\f0ac'} /*  */ +.octicon-git-merge:before { content: '\f023'} /*  */ +.octicon-git-pull-request-abandoned:before, +.octicon-git-pull-request:before { content: '\f009'} /*  */ +.octicon-globe:before { content: '\f0b6'} /*  */ +.octicon-graph:before { content: '\f043'} /*  */ +.octicon-heart:before { content: '\2665'} /* ♥ */ +.octicon-history:before { content: '\f07e'} /*  */ +.octicon-home:before { content: '\f08d'} /*  */ +.octicon-horizontal-rule:before { content: '\f070'} /*  */ +.octicon-hourglass:before { content: '\f09e'} /*  */ +.octicon-hubot:before { content: '\f09d'} /*  */ +.octicon-inbox:before { content: '\f0cf'} /*  */ +.octicon-info:before { content: '\f059'} /*  */ +.octicon-issue-closed:before { content: '\f028'} /*  */ +.octicon-issue-opened:before { content: '\f026'} /*  */ +.octicon-issue-reopened:before { content: '\f027'} /*  */ +.octicon-jersey:before { content: '\f019'} /*  */ +.octicon-jump-down:before { content: '\f072'} /*  */ +.octicon-jump-left:before { content: '\f0a5'} /*  */ +.octicon-jump-right:before { content: '\f0a6'} /*  */ +.octicon-jump-up:before { content: '\f073'} /*  */ +.octicon-key:before { content: '\f049'} /*  */ +.octicon-keyboard:before { content: '\f00d'} /*  */ +.octicon-law:before { content: '\f0d8'} /* */ +.octicon-light-bulb:before { content: '\f000'} /*  */ +.octicon-link:before { content: '\f05c'} /*  */ +.octicon-link-external:before { content: '\f07f'} /*  */ +.octicon-list-ordered:before { content: '\f062'} /*  */ +.octicon-list-unordered:before { content: '\f061'} /*  */ +.octicon-location:before { content: '\f060'} /*  */ +.octicon-gist-private:before, +.octicon-mirror-private:before, +.octicon-git-fork-private:before, +.octicon-lock:before { content: '\f06a'} /*  */ +.octicon-logo-github:before { content: '\f092'} /*  */ +.octicon-mail:before { content: '\f03b'} /*  */ +.octicon-mail-read:before { content: '\f03c'} /*  */ +.octicon-mail-reply:before { content: '\f051'} /*  */ +.octicon-mark-github:before { content: '\f00a'} /*  */ +.octicon-markdown:before { content: '\f0c9'} /*  */ +.octicon-megaphone:before { content: '\f077'} /*  */ +.octicon-mention:before { content: '\f0be'} /*  */ +.octicon-microscope:before { content: '\f089'} /*  */ +.octicon-milestone:before { content: '\f075'} /*  */ +.octicon-mirror-public:before, +.octicon-mirror:before { content: '\f024'} /*  */ +.octicon-mortar-board:before { content: '\f0d7'} /* */ +.octicon-move-down:before { content: '\f0a8'} /*  */ +.octicon-move-left:before { content: '\f074'} /*  */ +.octicon-move-right:before { content: '\f0a9'} /*  */ +.octicon-move-up:before { content: '\f0a7'} /*  */ +.octicon-mute:before { content: '\f080'} /*  */ +.octicon-no-newline:before { content: '\f09c'} /*  */ +.octicon-octoface:before { content: '\f008'} /*  */ +.octicon-organization:before { content: '\f037'} /*  */ +.octicon-package:before { content: '\f0c4'} /*  */ +.octicon-paintcan:before { content: '\f0d1'} /*  */ +.octicon-pencil:before { content: '\f058'} /*  */ +.octicon-person-add:before, +.octicon-person-follow:before, +.octicon-person:before { content: '\f018'} /*  */ +.octicon-pin:before { content: '\f041'} /*  */ +.octicon-playback-fast-forward:before { content: '\f0bd'} /*  */ +.octicon-playback-pause:before { content: '\f0bb'} /*  */ +.octicon-playback-play:before { content: '\f0bf'} /*  */ +.octicon-playback-rewind:before { content: '\f0bc'} /*  */ +.octicon-plug:before { content: '\f0d4'} /*  */ +.octicon-repo-create:before, +.octicon-gist-new:before, +.octicon-file-directory-create:before, +.octicon-file-add:before, +.octicon-plus:before { content: '\f05d'} /*  */ +.octicon-podium:before { content: '\f0af'} /*  */ +.octicon-primitive-dot:before { content: '\f052'} /*  */ +.octicon-primitive-square:before { content: '\f053'} /*  */ +.octicon-pulse:before { content: '\f085'} /*  */ +.octicon-puzzle:before { content: '\f0c0'} /*  */ +.octicon-question:before { content: '\f02c'} /*  */ +.octicon-quote:before { content: '\f063'} /*  */ +.octicon-radio-tower:before { content: '\f030'} /*  */ +.octicon-repo-delete:before, +.octicon-repo:before { content: '\f001'} /*  */ +.octicon-repo-clone:before { content: '\f04c'} /*  */ +.octicon-repo-force-push:before { content: '\f04a'} /*  */ +.octicon-gist-fork:before, +.octicon-repo-forked:before { content: '\f002'} /*  */ +.octicon-repo-pull:before { content: '\f006'} /*  */ +.octicon-repo-push:before { content: '\f005'} /*  */ +.octicon-rocket:before { content: '\f033'} /*  */ +.octicon-rss:before { content: '\f034'} /*  */ +.octicon-ruby:before { content: '\f047'} /*  */ +.octicon-screen-full:before { content: '\f066'} /*  */ +.octicon-screen-normal:before { content: '\f067'} /*  */ +.octicon-search-save:before, +.octicon-search:before { content: '\f02e'} /*  */ +.octicon-server:before { content: '\f097'} /*  */ +.octicon-settings:before { content: '\f07c'} /*  */ +.octicon-log-in:before, +.octicon-sign-in:before { content: '\f036'} /*  */ +.octicon-log-out:before, +.octicon-sign-out:before { content: '\f032'} /*  */ +.octicon-split:before { content: '\f0c6'} /*  */ +.octicon-squirrel:before { content: '\f0b2'} /*  */ +.octicon-star-add:before, +.octicon-star-delete:before, +.octicon-star:before { content: '\f02a'} /*  */ +.octicon-steps:before { content: '\f0c7'} /*  */ +.octicon-stop:before { content: '\f08f'} /*  */ +.octicon-repo-sync:before, +.octicon-sync:before { content: '\f087'} /*  */ +.octicon-tag-remove:before, +.octicon-tag-add:before, +.octicon-tag:before { content: '\f015'} /*  */ +.octicon-telescope:before { content: '\f088'} /*  */ +.octicon-terminal:before { content: '\f0c8'} /*  */ +.octicon-three-bars:before { content: '\f05e'} /*  */ +.octicon-tools:before { content: '\f031'} /*  */ +.octicon-trashcan:before { content: '\f0d0'} /*  */ +.octicon-triangle-down:before { content: '\f05b'} /*  */ +.octicon-triangle-left:before { content: '\f044'} /*  */ +.octicon-triangle-right:before { content: '\f05a'} /*  */ +.octicon-triangle-up:before { content: '\f0aa'} /*  */ +.octicon-unfold:before { content: '\f039'} /*  */ +.octicon-unmute:before { content: '\f0ba'} /*  */ +.octicon-versions:before { content: '\f064'} /*  */ +.octicon-remove-close:before, +.octicon-x:before { content: '\f081'} /*  */ +.octicon-zap:before { content: '\26A1'} /* ⚡ */ diff --git a/docs/_spec/public/octicons/octicons.eot b/docs/_spec/public/octicons/octicons.eot new file mode 100644 index 000000000000..22881a8b6c43 Binary files /dev/null and b/docs/_spec/public/octicons/octicons.eot differ diff --git a/docs/_spec/public/octicons/octicons.svg b/docs/_spec/public/octicons/octicons.svg new file mode 100644 index 000000000000..ea3e0f161528 --- /dev/null +++ b/docs/_spec/public/octicons/octicons.svg @@ -0,0 +1,198 @@ + + + + +(c) 2012-2014 GitHub + +When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos) + +Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) +Applies to all font files + +Code License: MIT (http://choosealicense.com/licenses/mit/) +Applies to all other files + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/_spec/public/octicons/octicons.ttf b/docs/_spec/public/octicons/octicons.ttf new file mode 100644 index 000000000000..189ca2813d49 Binary files /dev/null and b/docs/_spec/public/octicons/octicons.ttf differ diff --git a/docs/_spec/public/octicons/octicons.woff b/docs/_spec/public/octicons/octicons.woff new file mode 100644 index 000000000000..2b770e429f38 Binary files /dev/null and b/docs/_spec/public/octicons/octicons.woff differ diff --git a/docs/_spec/public/scripts/LICENSE-highlight b/docs/_spec/public/scripts/LICENSE-highlight new file mode 100644 index 000000000000..fe2f67b1628e --- /dev/null +++ b/docs/_spec/public/scripts/LICENSE-highlight @@ -0,0 +1,24 @@ +Copyright (c) 2006, Ivan Sagalaev +All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of highlight.js nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/_spec/public/scripts/LICENSE-toc b/docs/_spec/public/scripts/LICENSE-toc new file mode 100644 index 000000000000..4e236e8696c3 --- /dev/null +++ b/docs/_spec/public/scripts/LICENSE-toc @@ -0,0 +1,18 @@ +(The MIT License) +Copyright (c) 2013 Greg Allen +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/docs/_spec/public/scripts/highlight.pack.js b/docs/_spec/public/scripts/highlight.pack.js new file mode 100644 index 000000000000..bfeca09abb51 --- /dev/null +++ b/docs/_spec/public/scripts/highlight.pack.js @@ -0,0 +1 @@ +var hljs=new function(){function j(v){return v.replace(/&/gm,"&").replace(//gm,">")}function t(v){return v.nodeName.toLowerCase()}function h(w,x){var v=w&&w.exec(x);return v&&v.index==0}function r(w){var v=(w.className+" "+(w.parentNode?w.parentNode.className:"")).split(/\s+/);v=v.map(function(x){return x.replace(/^lang(uage)?-/,"")});return v.filter(function(x){return i(x)||/no(-?)highlight/.test(x)})[0]}function o(x,y){var v={};for(var w in x){v[w]=x[w]}if(y){for(var w in y){v[w]=y[w]}}return v}function u(x){var v=[];(function w(y,z){for(var A=y.firstChild;A;A=A.nextSibling){if(A.nodeType==3){z+=A.nodeValue.length}else{if(A.nodeType==1){v.push({event:"start",offset:z,node:A});z=w(A,z);if(!t(A).match(/br|hr|img|input/)){v.push({event:"stop",offset:z,node:A})}}}}return z})(x,0);return v}function q(w,y,C){var x=0;var F="";var z=[];function B(){if(!w.length||!y.length){return w.length?w:y}if(w[0].offset!=y[0].offset){return(w[0].offset"}function E(G){F+=""}function v(G){(G.event=="start"?A:E)(G.node)}while(w.length||y.length){var D=B();F+=j(C.substr(x,D[0].offset-x));x=D[0].offset;if(D==w){z.reverse().forEach(E);do{v(D.splice(0,1)[0]);D=B()}while(D==w&&D.length&&D[0].offset==x);z.reverse().forEach(A)}else{if(D[0].event=="start"){z.push(D[0].node)}else{z.pop()}v(D.splice(0,1)[0])}}return F+j(C.substr(x))}function m(y){function v(z){return(z&&z.source)||z}function w(A,z){return RegExp(v(A),"m"+(y.cI?"i":"")+(z?"g":""))}function x(D,C){if(D.compiled){return}D.compiled=true;D.k=D.k||D.bK;if(D.k){var z={};var E=function(G,F){if(y.cI){F=F.toLowerCase()}F.split(" ").forEach(function(H){var I=H.split("|");z[I[0]]=[G,I[1]?Number(I[1]):1]})};if(typeof D.k=="string"){E("keyword",D.k)}else{Object.keys(D.k).forEach(function(F){E(F,D.k[F])})}D.k=z}D.lR=w(D.l||/\b[A-Za-z0-9_]+\b/,true);if(C){if(D.bK){D.b="\\b("+D.bK.split(" ").join("|")+")\\b"}if(!D.b){D.b=/\B|\b/}D.bR=w(D.b);if(!D.e&&!D.eW){D.e=/\B|\b/}if(D.e){D.eR=w(D.e)}D.tE=v(D.e)||"";if(D.eW&&C.tE){D.tE+=(D.e?"|":"")+C.tE}}if(D.i){D.iR=w(D.i)}if(D.r===undefined){D.r=1}if(!D.c){D.c=[]}var B=[];D.c.forEach(function(F){if(F.v){F.v.forEach(function(G){B.push(o(F,G))})}else{B.push(F=="self"?D:F)}});D.c=B;D.c.forEach(function(F){x(F,D)});if(D.starts){x(D.starts,C)}var A=D.c.map(function(F){return F.bK?"\\.?("+F.b+")\\.?":F.b}).concat([D.tE,D.i]).map(v).filter(Boolean);D.t=A.length?w(A.join("|"),true):{exec:function(F){return null}}}x(y)}function c(T,L,J,R){function v(V,W){for(var U=0;U";V+=aa+'">';return V+Y+Z}function N(){if(!I.k){return j(C)}var U="";var X=0;I.lR.lastIndex=0;var V=I.lR.exec(C);while(V){U+=j(C.substr(X,V.index-X));var W=E(I,V);if(W){H+=W[1];U+=w(W[0],j(V[0]))}else{U+=j(V[0])}X=I.lR.lastIndex;V=I.lR.exec(C)}return U+j(C.substr(X))}function F(){if(I.sL&&!f[I.sL]){return j(C)}var U=I.sL?c(I.sL,C,true,S):e(C);if(I.r>0){H+=U.r}if(I.subLanguageMode=="continuous"){S=U.top}return w(U.language,U.value,false,true)}function Q(){return I.sL!==undefined?F():N()}function P(W,V){var U=W.cN?w(W.cN,"",true):"";if(W.rB){D+=U;C=""}else{if(W.eB){D+=j(V)+U;C=""}else{D+=U;C=V}}I=Object.create(W,{parent:{value:I}})}function G(U,Y){C+=U;if(Y===undefined){D+=Q();return 0}var W=v(Y,I);if(W){D+=Q();P(W,Y);return W.rB?0:Y.length}var X=z(I,Y);if(X){var V=I;if(!(V.rE||V.eE)){C+=Y}D+=Q();do{if(I.cN){D+=""}H+=I.r;I=I.parent}while(I!=X.parent);if(V.eE){D+=j(Y)}C="";if(X.starts){P(X.starts,"")}return V.rE?0:Y.length}if(A(Y,I)){throw new Error('Illegal lexeme "'+Y+'" for mode "'+(I.cN||"")+'"')}C+=Y;return Y.length||1}var M=i(T);if(!M){throw new Error('Unknown language: "'+T+'"')}m(M);var I=R||M;var S;var D="";for(var K=I;K!=M;K=K.parent){if(K.cN){D=w(K.cN,"",true)+D}}var C="";var H=0;try{var B,y,x=0;while(true){I.t.lastIndex=x;B=I.t.exec(L);if(!B){break}y=G(L.substr(x,B.index-x),B[0]);x=B.index+y}G(L.substr(x));for(var K=I;K.parent;K=K.parent){if(K.cN){D+=""}}return{r:H,value:D,language:T,top:I}}catch(O){if(O.message.indexOf("Illegal")!=-1){return{r:0,value:j(L)}}else{throw O}}}function e(y,x){x=x||b.languages||Object.keys(f);var v={r:0,value:j(y)};var w=v;x.forEach(function(z){if(!i(z)){return}var A=c(z,y,false);A.language=z;if(A.r>w.r){w=A}if(A.r>v.r){w=v;v=A}});if(w.language){v.second_best=w}return v}function g(v){if(b.tabReplace){v=v.replace(/^((<[^>]+>|\t)+)/gm,function(w,z,y,x){return z.replace(/\t/g,b.tabReplace)})}if(b.useBR){v=v.replace(/\n/g,"
")}return v}function p(A){var B=r(A);if(/no(-?)highlight/.test(B)){return}var y;if(b.useBR){y=document.createElementNS("http://www.w3.org/1999/xhtml","div");y.innerHTML=A.innerHTML.replace(/\n/g,"").replace(//g,"\n")}else{y=A}var z=y.textContent;var v=B?c(B,z,true):e(z);var x=u(y);if(x.length){var w=document.createElementNS("http://www.w3.org/1999/xhtml","div");w.innerHTML=v.value;v.value=q(x,u(w),z)}v.value=g(v.value);A.innerHTML=v.value;A.className+=" hljs "+(!B&&v.language||"");A.result={language:v.language,re:v.r};if(v.second_best){A.second_best={language:v.second_best.language,re:v.second_best.r}}}var b={classPrefix:"hljs-",tabReplace:null,useBR:false,languages:undefined};function s(v){b=o(b,v)}function l(){if(l.called){return}l.called=true;var v=document.querySelectorAll("pre code");Array.prototype.forEach.call(v,p)}function a(){addEventListener("DOMContentLoaded",l,false);addEventListener("load",l,false)}var f={};var n={};function d(v,x){var w=f[v]=x(this);if(w.aliases){w.aliases.forEach(function(y){n[y]=v})}}function k(){return Object.keys(f)}function i(v){return f[v]||f[n[v]]}this.highlight=c;this.highlightAuto=e;this.fixMarkup=g;this.highlightBlock=p;this.configure=s;this.initHighlighting=l;this.initHighlightingOnLoad=a;this.registerLanguage=d;this.listLanguages=k;this.getLanguage=i;this.inherit=o;this.IR="[a-zA-Z][a-zA-Z0-9_]*";this.UIR="[a-zA-Z_][a-zA-Z0-9_]*";this.NR="\\b\\d+(\\.\\d+)?";this.CNR="(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)";this.BNR="\\b(0b[01]+)";this.RSR="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~";this.BE={b:"\\\\[\\s\\S]",r:0};this.ASM={cN:"string",b:"'",e:"'",i:"\\n",c:[this.BE]};this.QSM={cN:"string",b:'"',e:'"',i:"\\n",c:[this.BE]};this.PWM={b:/\b(a|an|the|are|I|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such)\b/};this.CLCM={cN:"comment",b:"//",e:"$",c:[this.PWM]};this.CBCM={cN:"comment",b:"/\\*",e:"\\*/",c:[this.PWM]};this.HCM={cN:"comment",b:"#",e:"$",c:[this.PWM]};this.NM={cN:"number",b:this.NR,r:0};this.CNM={cN:"number",b:this.CNR,r:0};this.BNM={cN:"number",b:this.BNR,r:0};this.CSSNM={cN:"number",b:this.NR+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",r:0};this.RM={cN:"regexp",b:/\//,e:/\/[gim]*/,i:/\n/,c:[this.BE,{b:/\[/,e:/\]/,r:0,c:[this.BE]}]};this.TM={cN:"title",b:this.IR,r:0};this.UTM={cN:"title",b:this.UIR,r:0}}();hljs.registerLanguage("scala",function(d){var b={cN:"annotation",b:"@[A-Za-z]+"};var c={cN:"string",b:'u?r?"""',e:'"""',r:10};var a={cN:"symbol",b:"'\\w[\\w\\d_]*(?!')"};var e={cN:"type",b:"\\b[A-Z][A-Za-z0-9_]*",r:0};var h={cN:"title",b:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,r:0};var i={cN:"class",bK:"class object trait type",e:/[:={\[(\n;]/,c:[{cN:"keyword",bK:"extends with",r:10},h]};var g={cN:"function",bK:"def val",e:/[:={\[(\n;]/,c:[h]};var f={cN:"javadoc",b:"/\\*\\*",e:"\\*/",c:[{cN:"javadoctag",b:"@[A-Za-z]+"}],r:10};return{k:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},c:[d.CLCM,d.CBCM,c,d.QSM,a,e,g,i,d.CNM,b]}}); \ No newline at end of file diff --git a/docs/_spec/public/scripts/main.js b/docs/_spec/public/scripts/main.js new file mode 100644 index 000000000000..9ade9c770f1e --- /dev/null +++ b/docs/_spec/public/scripts/main.js @@ -0,0 +1,71 @@ +function currentChapter() { + var path = document.location.pathname; + var idx = path.lastIndexOf("/") + 1; + var chap = path.substring(idx, idx + 2); + return parseInt(chap, 10); +} + +function heading(i, heading, $heading) { + var currentLevel = parseInt(heading.tagName.substring(1)); + var result = ""; + if (currentLevel === this.headerLevel) { + this.headerCounts[this.headerLevel] += 1; + return "" + this.headerCounts[this.headerLevel] + " " + $heading.text(); + } else if (currentLevel < this.headerLevel) { + while(currentLevel < this.headerLevel) { + this.headerCounts[this.headerLevel] = 1; + this.headerLevel -= 1; + } + this.headerCounts[this.headerLevel] += 1; + return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); + } else { + while(currentLevel > this.headerLevel) { + this.headerLevel += 1; + this.headerCounts[this.headerLevel] = 1; + } + return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); + } +} + +// ignore when using wkhtmltopdf, or it won't work... +if(window.jekyllEnv !== 'spec-pdf') { + $('#toc').toc( + { + 'selectors': 'h1,h2,h3', + 'smoothScrolling': false, + 'chapter': currentChapter(), + 'headerLevel': 1, + 'headerCounts': [-1, currentChapter() - 1, 1, 1], + 'headerText': heading + } + ); +} + +// no language auto-detect so that EBNF isn't detected as scala +hljs.configure({ + languages: [] +}); + +// KaTeX configuration +document.addEventListener("DOMContentLoaded", function() { + renderMathInElement(document.body, { + delimiters: [ + {left: "´", right: "´", display: false}, // "display: false" -> inline + {left: "$$", right: "$$", display: true} + ], + ignoredTags: ['script', 'noscript', 'style', 'textarea'], + }); + // syntax highlighting after KaTeX is loaded, + // so that math can be used in code blocks + hljs.initHighlighting(); + $("pre nobr").addClass("fixws"); + // point when all necessary js is done, so PDF to be rendered + window.status = "loaded"; +}); + +$("#chapters a").each(function (index) { + if (document.location.pathname.endsWith($(this).attr("href"))) + $(this).addClass("chapter-active"); + else + $(this).removeClass("chapter-active"); +}); diff --git a/docs/_spec/public/scripts/toc.js b/docs/_spec/public/scripts/toc.js new file mode 100644 index 000000000000..5b0bded12cfc --- /dev/null +++ b/docs/_spec/public/scripts/toc.js @@ -0,0 +1,128 @@ +/*! + * toc - jQuery Table of Contents Plugin + * v0.3.2 + * http://projects.jga.me/toc/ + * copyright Greg Allen 2014 + * MIT License +*/ +(function($) { +var verboseIdCache = {}; +$.fn.toc = function(options) { + var self = this; + var opts = $.extend({}, jQuery.fn.toc.defaults, options); + + var container = $(opts.container); + var headings = $(opts.selectors, container); + var headingOffsets = []; + var activeClassName = opts.activeClass; + + var scrollTo = function(e, callback) { + $('li', self).removeClass(activeClassName); + $(e.target).parent().addClass(activeClassName); + }; + + //highlight on scroll + var timeout; + var highlightOnScroll = function(e) { + if (timeout) { + clearTimeout(timeout); + } + timeout = setTimeout(function() { + var top = $(window).scrollTop(), + highlighted, closest = Number.MAX_VALUE, index = 0; + + for (var i = 0, c = headingOffsets.length; i < c; i++) { + var currentClosest = Math.abs(headingOffsets[i] - top); + if (currentClosest < closest) { + index = i; + closest = currentClosest; + } + } + + $('li', self).removeClass(activeClassName); + highlighted = $('li:eq('+ index +')', self).addClass(activeClassName); + opts.onHighlight(highlighted); + }, 50); + }; + if (opts.highlightOnScroll) { + $(window).on('scroll', highlightOnScroll); + highlightOnScroll(); + } + + return this.each(function() { + //build TOC + var el = $(this); + var ul = $(opts.listType); + + headings.each(function(i, heading) { + var $h = $(heading); + headingOffsets.push($h.offset().top - opts.highlightOffset); + + var anchorName = opts.anchorName(i, heading, opts.prefix); + + //add anchor + if(heading.id !== anchorName) { + var anchor = $('').attr('id', anchorName).insertBefore($h); + } + + //build TOC item + var a = $('') + .text(opts.headerText(i, heading, $h)) + .attr('href', '#' + anchorName) + .on('click', function(e) { + $(window).off('scroll', highlightOnScroll); + scrollTo(e, function() { + $(window).on('scroll', highlightOnScroll); + }); + el.trigger('selected', $(this).attr('href')); + }); + + var li = $('
  • ') + .addClass(opts.itemClass(i, heading, $h, opts.prefix)) + .append(a); + + ul.append(li); + }); + el.html(ul); + }); +}; + + +jQuery.fn.toc.defaults = { + container: 'body', + listType: '
      ', + selectors: 'h1,h2,h3', + prefix: 'toc', + activeClass: 'toc-active', + onHighlight: function() {}, + highlightOnScroll: true, + highlightOffset: 100, + anchorName: function(i, heading, prefix) { + if(heading.id.length) { + return heading.id; + } + + var candidateId = $(heading).text().replace(/[^a-z0-9]/ig, ' ').replace(/\s+/g, '-').toLowerCase(); + if (verboseIdCache[candidateId]) { + var j = 2; + + while(verboseIdCache[candidateId + j]) { + j++; + } + candidateId = candidateId + '-' + j; + + } + verboseIdCache[candidateId] = true; + + return prefix + '-' + candidateId; + }, + headerText: function(i, heading, $heading) { + return $heading.text(); + }, + itemClass: function(i, heading, $heading, prefix) { + return prefix + '-' + $heading[0].tagName.toLowerCase(); + } + +}; + +})(jQuery); diff --git a/docs/_spec/public/stylesheets/fonts.css b/docs/_spec/public/stylesheets/fonts.css new file mode 100644 index 000000000000..36efb2bbd5a0 --- /dev/null +++ b/docs/_spec/public/stylesheets/fonts.css @@ -0,0 +1,73 @@ +@font-face { + font-family: 'Luxi Sans'; + src: local('Luxi Sans Regular'), + url('../fonts/LuxiSans-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} + +@font-face { + font-family: 'Luxi Sans'; + src: local('Luxi Sans Bold'), + url('../fonts/LuxiSans-Bold.woff') format('woff'); + font-weight: bold; + font-style: normal; +} + +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Regular'), + url('../fonts/LuxiMono-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Oblique'), + url('../fonts/LuxiMono-BoldOblique.woff') format('woff'); + font-weight: normal; + font-style: oblique; +} +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Bold'), + url('../fonts/LuxiMono-Bold.woff') format('woff'); + font-weight: bold; + font-style: normal; +} +@font-face { + font-family: 'Luxi Mono'; + src: local('Luxi Mono Bold Oblique'), + url('../fonts/LuxiMono-BoldOblique.woff') format('woff'); + font-weight: bold; + font-style: oblique; +} + +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Regular'), + url('../fonts/Heuristica-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Italic'), + url('../fonts/Heuristica-RegularItalic.woff') format('woff'); + font-weight: normal; + font-style: italic; +} +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Bold'), + url('../fonts/Heuristica-Bold.woff') format('woff'); + font-weight: bold; + font-style: normal; +} +@font-face { + font-family: 'Heuristica'; + src: local('Heuristica Bold Italic'), + url('../fonts/Heuristica-BoldItalic.woff') format('woff'); + font-weight: bold; + font-style: italic; +} diff --git a/docs/_spec/public/stylesheets/print.css b/docs/_spec/public/stylesheets/print.css new file mode 100644 index 000000000000..f0efff28b203 --- /dev/null +++ b/docs/_spec/public/stylesheets/print.css @@ -0,0 +1,42 @@ +/* This removes a few things from screen.css for printing */ + +body { + padding: 0px; + margin: 0px; +} + +.anchor, #navigation, .to_top, .version-notice, .hidden-print { + display: none !important; +} + +.print-only { + display: block; +} + +#content-container { + width: 100%; + float: none; +} + +/* no scrollbars, jump to next row.. */ +.highlight pre code { + overflow: hidden; + white-space: pre-wrap; +} + +main { + position: relative; + top: 32px; + margin: 0 0 0 0; + padding: 0px 32px; + max-width: none; + min-width: none; + min-height: none; + background-color: #FFF; +} + +/* Avoid clipped headings https://github.com/pdfkit/pdfkit/issues/113#issuecomment-7027798 */ +h2, h3, h4, h5, h6 { + padding: 0px; + margin: 0px; +} diff --git a/docs/_spec/public/stylesheets/screen-small.css b/docs/_spec/public/stylesheets/screen-small.css new file mode 100644 index 000000000000..674db7c49000 --- /dev/null +++ b/docs/_spec/public/stylesheets/screen-small.css @@ -0,0 +1,57 @@ +body { + padding: 0px; + margin: 0px; +} +aside.left { + position: relative; + margin: 0px auto; + overflow: visible; + height: inherit; + margin-bottom: 40px; + background-color: #073642; +} +header { + position: relative; + height: inherit; + min-height: 32px; +} +main { + max-width: 1000px; + min-width: 600px; + margin: 0 auto; +} + +#chapters a { + font-size: 14px; + max-height: 32px; + padding: 4px 8px; + white-space: nowrap; + display: inline-block; +} +#chapters > #github { + padding: 14px; +} + +#toc { + overflow: visible; +} +#toc .toc-active { + background: inherit; +} +#toc .toc-h1 { + display: inherit; +} +#toc .toc-h1 a { + padding-left: 10px; + color: #FFFFFF; + background: #72D0EB; +} +#toc .toc-h2 a { + padding-left: 30px; +} +#toc .toc-h3 a { + padding-left: 50px; +} +#toc a { + font-size: 14px; +} diff --git a/docs/_spec/public/stylesheets/screen-toc.css b/docs/_spec/public/stylesheets/screen-toc.css new file mode 100644 index 000000000000..7a04bd00f96c --- /dev/null +++ b/docs/_spec/public/stylesheets/screen-toc.css @@ -0,0 +1,37 @@ +body { + padding: 0px; + margin: 0px; +} +header { + height: 96px; + padding: 0px; + width: 100%; + position: relative; + color: #FFFFFF; +} +#header-main { + height: 68px; + line-height: 1.2; + font-size: 32px; +} +#header-sub { + padding-left: 64px; + height: 28px; + background-color:#72D0EB; + vertical-align: middle; +} +#scala-logo { + padding: 10px; +} +#title { + vertical-align: middle; +} +#github { + height: 40px; + padding: 14px; + float: right; + font-size: 0px; +} +li { + margin: 5px; +} diff --git a/docs/_spec/public/stylesheets/screen.css b/docs/_spec/public/stylesheets/screen.css new file mode 100644 index 000000000000..2073613eaea7 --- /dev/null +++ b/docs/_spec/public/stylesheets/screen.css @@ -0,0 +1,521 @@ +/* from https://gist.github.com/andyferra/2554919 */ + +body { + font-family:Heuristica,Georgia,serif; + color: #222222; + line-height: 1.6; + + padding-bottom: 10px; + background-color: white; + padding-left: 30px; +} + +#content-container > *:first-child { + margin-top: 0 !important; +} +#content-container > *:last-child { + margin-bottom: 0 !important; +} + +a { + color: #08C; + text-decoration: none; +} +a:hover, a:focus { + +} +a.absent { + color: #cc0000; +} +a.anchor { + display: block; + margin-left: -35px; + padding-left: 10px; + cursor: pointer; + position: absolute; + top: 0; + left: 0; + bottom: 0; + color: black; + width: 35px; height: 100%; +} + +a.anchor span { + vertical-align: middle; +} + +h1, h2, h3, h4, h5, h6 { + margin: 30px 0 0px; + padding: 0; + /* Fix anchor position due to header */ + padding-top: 32px; + margin-top: -32px; + font-weight: bold; + -webkit-font-smoothing: antialiased; + cursor: text; + position: relative; + pointer-events: none; +} + +h1, h2 { + font-weight: normal; +} + +h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, h5:hover a.anchor, h6:hover a.anchor { + text-decoration: none; +} + +h1:hover a.anchor span, h2:hover a.anchor span, h3:hover a.anchor span, h4:hover a.anchor span, h5:hover a.anchor span, h6:hover a.anchor span { + display: inline-block; +} + +h1 a.anchor span, h2 a.anchor span, h3 a.anchor span, h4 a.anchor span, h5 a.anchor span, h6 a.anchor span { + display: none; +} + +h1 a.anchor:hover span, h2 a.anchor:hover span, h3 a.anchor:hover span, h4 a.anchor:hover span, h5 a.anchor:hover span, h6 a.anchor:hover span { + display: inline-block; +} + +h1 tt, h1 code { + font-size: inherit; +} + +h2 tt, h2 code { + font-size: inherit; +} + +h3 tt, h3 code { + font-size: inherit; +} + +h4 tt, h4 code { + font-size: inherit; +} + +h5 tt, h5 code { + font-size: inherit; +} + +h6 tt, h6 code { + font-size: inherit; +} + +h1 { + font-size: 28px; + color: black; +} + +h2 { + font-size: 24px; + color: black; +} + +h3 { + font-size: 18px; +} + +h4 { + font-size: 16px; +} + +h5 { + font-size: 14px; +} + +h6 { + color: #777777; + font-size: 14px; +} + +p, blockquote, ul, ol, dl, li, table, pre { + margin: 5px 0 15px; + -moz-font-feature-settings: "onum"; + -ms-font-feature-settings: "onum"; + -webkit-font-feature-settings: "onum"; + font-feature-settings: "onum"; +} + +hr { + background: transparent repeat-x 0 0; + border: 0 none; + color: #cccccc; + height: 4px; + padding: 0; +} + +body > h2:first-child { + margin-top: 0; + padding-top: 0; +} +body > h1:first-child { + margin-top: 0; + padding-top: 0; +} +body > h1:first-child + h2 { + margin-top: 0; + padding-top: 0; +} +body > h3:first-child, body > h4:first-child, body > h5:first-child, body > h6:first-child { + margin-top: 0; + padding-top: 0; +} + +a:first-child h1, a:first-child h2, a:first-child h3, a:first-child h4, a:first-child h5, a:first-child h6 { + margin-top: 0; + padding-top: 0; +} + +h1 p, h2 p, h3 p, h4 p, h5 p, h6 p { + margin-top: 0; +} + +li p.first { + display: inline-block; +} + +ul, ol { + padding-left: 30px; +} + +ul :first-child, ol :first-child { + margin-top: 0; +} + +ul :last-child, ol :last-child { + margin-bottom: 0; +} + +dl { + padding: 0; +} +dl dt { + font-size: 14px; + font-weight: bold; + font-style: italic; + padding: 0; + margin: 15px 0 5px; +} +dl dt:first-child { + padding: 0; +} +dl dt > :first-child { + margin-top: 0; +} +dl dt > :last-child { + margin-bottom: 0; +} +dl dd { + margin: 0 0 15px; + padding: 0 15px; +} +dl dd > :first-child { + margin-top: 0; +} +dl dd > :last-child { + margin-bottom: 0; +} + +blockquote { + border-left: 4px solid #dddddd; + padding: 0 15px; + color: #222222; +} +blockquote > :first-child { + margin-top: 0; +} +blockquote > :last-child { + margin-bottom: 0; +} +blockquote:before { + content: "Example"; + color: #777777; + font-size: 14px; + font-weight: bold; +} + +table { + padding: 0; + margin: 0; + border: none; + border-collapse: collapse; +} +table tr { + background-color: white; +} +table tr:nth-child(2n) { + background-color: #f8f8f8; +} +table tr th { + background-color: #EAEAEA; + font-weight: bold; + text-align: left; + padding: 5px 13px; +} +table tr td { + text-align: left; + padding: 5px 13px; +} +table tr th :first-child, table tr td :first-child { + margin-top: 0; +} +table tr th :last-child, table tr td :last-child { + margin-bottom: 0; +} + +img { + max-width: 100%; +} + +span.frame { + display: block; + overflow: hidden; +} +span.frame > span { + border: 1px solid #dddddd; + display: block; + float: left; + overflow: hidden; + margin: 13px 0 0; + padding: 7px; + width: auto; +} +span.frame span img { + display: block; + float: left; +} +span.frame span span { + clear: both; + color: #333333; + display: block; + padding: 5px 0 0; +} +span.align-center { + display: block; + overflow: hidden; + clear: both; +} +span.align-center > span { + display: block; + overflow: hidden; + margin: 13px auto 0; + text-align: center; +} +span.align-center span img { + margin: 0 auto; + text-align: center; +} +span.align-right { + display: block; + overflow: hidden; + clear: both; +} +span.align-right > span { + display: block; + overflow: hidden; + margin: 13px 0 0; + text-align: right; +} +span.align-right span img { + margin: 0; + text-align: right; +} +span.float-left { + display: block; + margin-right: 13px; + overflow: hidden; + float: left; +} +span.float-left span { + margin: 13px 0 0; +} +span.float-right { + display: block; + margin-left: 13px; + overflow: hidden; + float: right; +} +span.float-right > span { + display: block; + overflow: hidden; + margin: 13px auto 0; + text-align: right; +} + +pre, code, tt { + font:14px "Luxi Mono", 'andale mono', 'lucida console', monospace; + line-height:1.5; +} + +.highlight pre { + background-color: #F8F8F8; + border-radius: 3px; + overflow: auto; + padding: 6px 10px; + white-space: nowrap; +} + +code { + background-color: transparent; + border: none; + margin: 0; + padding: 0; + white-space: pre; +} + +aside.left { + height: 100%; + position: fixed; + direction: rtl; + overflow: auto; + left: 0px; + width: 320px; + bottom: -32px; + font-family: "Luxi Sans", serif; + background-color: #073642; +} + +aside.left > nav { + direction: ltr; + top: 32px; + padding-bottom: 32px; +} + +article, aside, details, figcaption, figure, footer, header, hgroup, main, nav, section, summary { + display: block; +} + +audio, canvas, img, svg, video { + vertical-align: middle; +} + +audio, canvas, progress, video { + display: inline-block; + vertical-align: baseline; +} + +main { + position: relative; + top: 32px; + margin: 0 0 0 320px; + padding: 0px 32px; + max-width: 800px; + min-width: 800px; + min-height: 580px; + background-color: #FFF; +} + +header { + position: fixed; + top: 0px; + left: 0px; + height: 32px; + width: 100%; + background-color: #002B36; + margin: 0px 0px; + padding: 0px 0px; + font-family: "Luxi Sans", serif; + font-weight: bold; + z-index: 10; + overflow: hidden; + text-shadow: 1px 1px 0px rgba(0, 43, 54, 0.15); +} + +#chapters a { + color: #FFFFFF; + text-decoration: none; + font-size: 0.63vw; + padding: 100% 5px; +} + +#chapters a:hover, #chapters a:focus, #github:hover, #github:focus { + background: #DC322F; + -webkit-transition: background .2s ease-in; + -moz-transition: background .2s ease-in; + -ms-transition: background .2s ease-in; + -o-transition: background .2s ease-in; + transition: background .2s ease-in; +} + +#chapters a.chapter-active { + background: #72D0EB; +} + + +#toc ul { + margin: 0; + padding: 0; + list-style: none; +} + +#toc li { + margin: 0; + padding: 0; +} + +#toc a { + color: #FFFFFF; /*#073642;*/ + font-weight: bold; + font-size: 12px; + display: block; + text-shadow: 1px 1px 0px rgba(0, 43, 54, 0.15); +} + +#toc a:hover, #toc a:focus { + background: #DC322F; + text-decoration: none; + -webkit-transition: background .2s ease-in; + -moz-transition: background .2s ease-in; + -ms-transition: background .2s ease-in; + -o-transition: background .2s ease-in; + transition: background .2s ease-in; +} + +#toc .toc-h1 { + display: none; +} + +#toc .toc-h2 a { + padding-left: 10px; +} + +#toc .toc-h3 a { + padding-left: 30px; +} + +#toc .toc-active { + background: #72D0EB; +} + +#toc .toc-active a { + color: #FFFFFF; +} + +#chapters > #github { + padding: 0px; + float: right; +} + +.hljs{ + background: #f8f8f8; +} +/* proper rendering of MathJax into highlighted code blocks */ +.fixws { white-space: pre; } +.fixws .math { white-space: nowrap; } + +.version-notice { + background-color: #C93A3A; + color: #f2f2f2; + border:1px solid #ccc; + padding: 1em; + margin-bottom: 1em; +} +.version-notice a { + color: #f2f2f2; + font-weight: bold; + text-decoration: underline; +} + +.print-only { + display: none; +} diff --git a/docs/_spec/spec-toc.xslt b/docs/_spec/spec-toc.xslt new file mode 100644 index 000000000000..437b15e3e6f4 --- /dev/null +++ b/docs/_spec/spec-toc.xslt @@ -0,0 +1,64 @@ + + + + + + + Table of Contents + + + ./public/stylesheets/fonts.css + + + + +

      Table of Contents

      +
      + + +
      + +
    • + + + +
        + added to prevent self-closing tags in QtXmlPatterns + +
      +
    • + + diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 93ca5624a5f8..30ad05d18cf1 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -165,11 +165,18 @@ subsection: directory: docs/contributing index: contributing/index.md subsection: - - page: contributing/contribute-knowledge.md - page: contributing/getting-started.md - - page: contributing/workflow.md - - page: contributing/testing.md - - page: contributing/debugging.md + - index: contributing/workflow.md + subsection: + - page: contributing/issues/reproduce.md + - page: contributing/issues/cause.md + - page: contributing/issues/areas.md + - page: contributing/issues/debugging.md + - page: contributing/issues/other-debugging.md + - page: contributing/issues/inspection.md + - page: contributing/issues/efficiency.md + - page: contributing/issues/testing.md + - page: contributing/issues/checklist.md - title: IDEs and Tools directory: tools index: contributing/tools/index.md @@ -178,17 +185,27 @@ subsection: - page: contributing/tools/mill.md - page: contributing/tools/scalafix.md - title: Procedures + directory: procedures index: contributing/procedures/index.md subsection: - page: contributing/procedures/release.md - page: contributing/procedures/vulpix.md + - title: High Level Architecture + directory: architecture + index: contributing/architecture/index.md + subsection: + - page: contributing/architecture/lifecycle.md + - page: contributing/architecture/context.md + - page: contributing/architecture/phases.md + - page: contributing/architecture/types.md + - page: contributing/architecture/time.md + - page: contributing/architecture/symbols.md - title: Internals directory: docs/internals index: internals/index.md subsection: - page: internals/backend.md - page: internals/classpaths.md - - page: internals/core-data-structures.md - page: internals/contexts.md - page: internals/dotc-scalac.md - page: internals/higher-kinded-v2.md diff --git a/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala b/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala new file mode 100644 index 000000000000..87208573eff9 --- /dev/null +++ b/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala @@ -0,0 +1,8 @@ +package scala.scalajs.runtime + +import scala.scalajs.js + +@inline +final class AnonFunctionXXL(f: js.Function1[IArray[Object], Object]) extends scala.runtime.FunctionXXL { + override def apply(xs: IArray[Object]): Object = f(xs) +} diff --git a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala new file mode 100644 index 000000000000..477ac6d742f7 --- /dev/null +++ b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala @@ -0,0 +1,12 @@ +package scala.annotation +package unchecked + +/** An annotation for mutable variables that are allowed to capture + * the root capability `cap`. Allowing this is not capture safe since + * it can cause leakage of capabilities from local scopes by assigning + * values retaining such capabilties to the annotated variable in + * an outer scope. + */ +class uncheckedCaptures extends StaticAnnotation + + diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index fb1721f98b35..866e5dbd18cd 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -4,27 +4,43 @@ import annotation.experimental @experimental object caps: - /** The universal capture reference */ + /** The universal capture reference (deprecated) */ + @deprecated("Use `cap` instead") val `*`: Any = () - object unsafe: + /** The universal capture reference */ + val cap: Any = () - /** If argument is of type `cs T`, converts to type `box cs T`. This - * avoids the error that would be raised when boxing `*`. - */ - extension [T](x: T) def unsafeBox: T = x + object unsafe: - /** If argument is of type `box cs T`, converts to type `cs T`. This - * avoids the error that would be raised when unboxing `*`. - */ - extension [T](x: T) def unsafeUnbox: T = x + extension [T](x: T) + /** If argument is of type `cs T`, converts to type `box cs T`. This + * avoids the error that would be raised when boxing `*`. + */ + @deprecated(since = "3.3") + def unsafeBox: T = x + + /** If argument is of type `box cs T`, converts to type `cs T`. This + * avoids the error that would be raised when unboxing `*`. + */ + @deprecated(since = "3.3") + def unsafeUnbox: T = x + + extension [T, U](f: T => U) + /** If argument is of type `box cs T`, converts to type `cs T`. This + * avoids the error that would be raised when unboxing `*`. + */ + @deprecated(since = "3.3") + def unsafeBoxFunArg: T => U = f - /** If argument is of type `box cs T`, converts to type `cs T`. This - * avoids the error that would be raised when unboxing `*`. - */ - extension [T, U](f: T => U) def unsafeBoxFunArg: T => U = f end unsafe + /** An annotation that expresses the sealed modifier on a type parameter + * Should not be directly referred to in source + */ + @deprecated("The Sealed annotation should not be directly used in source code.\nUse the `sealed` modifier on type parameters instead.") + class Sealed extends annotation.Annotation + /** Mixing in this trait forces a trait or class to be pure, i.e. * have no capabilities retained in its self type. */ diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala index 996fe3ff8da2..8243e7dc4a4b 100644 --- a/library/src/scala/quoted/Expr.scala +++ b/library/src/scala/quoted/Expr.scala @@ -103,7 +103,7 @@ object Expr { case 20 => ofTupleFromSeq20(seq) case 21 => ofTupleFromSeq21(seq) case 22 => ofTupleFromSeq22(seq) - case _ => '{ Tuple.fromIArray(IArray(${Varargs(seq)}: _*)) } + case _ => ofTupleFromSeqXXL(seq) } } @@ -214,6 +214,18 @@ object Expr { case Seq('{ $x1: t1 }, '{ $x2: t2 }, '{ $x3: t3 }, '{ $x4: t4 }, '{ $x5: t5 }, '{ $x6: t6 }, '{ $x7: t7 }, '{ $x8: t8 }, '{ $x9: t9 }, '{ $x10: t10 }, '{ $x11: t11 }, '{ $x12: t12 }, '{ $x13: t13 }, '{ $x14: t14 }, '{ $x15: t15 }, '{ $x16: t16 }, '{ $x17: t17 }, '{ $x18: t18 }, '{ $x19: t19 }, '{ $x20: t20 }, '{ $x21: t21 }, '{ $x22: t22 }) => '{ Tuple22($x1, $x2, $x3, $x4, $x5, $x6, $x7, $x8, $x9, $x10, $x11, $x12, $x13, $x14, $x15, $x16, $x17, $x18, $x19, $x20, $x21, $x22) } + private def ofTupleFromSeqXXL(seq: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = + val tupleTpe = tupleTypeFromSeq(seq) + tupleTpe.asType match + case '[tpe] => + '{ Tuple.fromIArray(IArray(${Varargs(seq)}*)).asInstanceOf[tpe & Tuple] } + + private def tupleTypeFromSeq(seq: Seq[Expr[Any]])(using Quotes): quotes.reflect.TypeRepr = + import quotes.reflect.* + val consRef = Symbol.classSymbol("scala.*:").typeRef + seq.foldLeft(TypeRepr.of[EmptyTuple]) { (ts, expr) => + AppliedType(consRef, expr.asTerm.tpe :: ts :: Nil) + } /** Given a tuple of the form `(Expr[A1], ..., Expr[An])`, outputs a tuple `Expr[(A1, ..., An)]`. */ def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): Expr[Tuple.InverseMap[T, Expr]] = { diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 48a387e64169..b6e5a12da2d8 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -1,6 +1,7 @@ package scala.quoted import scala.annotation.experimental +import scala.annotation.implicitNotFound import scala.reflect.TypeTest /** Current Quotes in scope @@ -21,7 +22,25 @@ transparent inline def quotes(using q: Quotes): q.type = q * * It contains the low-level Typed AST API metaprogramming API. * This API does not have the static type guarantees that `Expr` and `Type` provide. + * `Quotes` are generated from an enclosing `${ ... }` or `scala.staging.run`. For example: + * ```scala sc:nocompile + * import scala.quoted._ + * inline def myMacro: Expr[T] = + * ${ /* (quotes: Quotes) ?=> */ myExpr } + * def myExpr(using Quotes): Expr[T] = + * '{ f(${ /* (quotes: Quotes) ?=> */ myOtherExpr }) } + * } + * def myOtherExpr(using Quotes): Expr[U] = '{ ... } + * ``` */ + +@implicitNotFound("""explain=Maybe this method is missing a `(using Quotes)` parameter. + +Maybe that splice `$ { ... }` is missing? +Given instances of `Quotes` are generated from an enclosing splice `$ { ... }` (or `scala.staging.run` call). +A splice can be thought as a method with the following signature. + def $[T](body: Quotes ?=> Expr[T]): T +""") trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // Extension methods for `Expr[T]` @@ -1410,9 +1429,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * ) * ``` * - * @param owner: owner of the generated `meth` symbol - * @param tpe: Type of the definition - * @param rhsFn: Function that receives the `meth` symbol and the a list of references to the `params` + * @param owner owner of the generated `meth` symbol + * @param tpe Type of the definition + * @param rhsFn Function that receives the `meth` symbol and the a list of references to the `params` */ def apply(owner: Symbol, tpe: MethodType, rhsFn: (Symbol, List[Tree]) => Tree): Block } @@ -1745,7 +1764,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Returns a type tree reference to the symbol * - * @param sym The type symbol for which we are creating a type tree reference. + * @param typeSymbol The type symbol for which we are creating a type tree reference. */ def ref(typeSymbol: Symbol): TypeTree } @@ -2374,7 +2393,16 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is this a given parameter clause `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ def isGiven: Boolean /** Is this a erased parameter clause `(erased x1: X1, ..., xn: Xn)` */ + // TODO:deprecate in 3.4 and stabilize `erasedArgs` and `hasErasedArgs`. + // @deprecated("Use `hasErasedArgs`","3.4") def isErased: Boolean + + /** List of `erased` flags for each parameter of the clause */ + @experimental + def erasedArgs: List[Boolean] + /** Whether the clause has any erased parameters */ + @experimental + def hasErasedArgs: Boolean end TermParamClauseMethods /** A type parameter clause `[X1, ..., Xn]` */ @@ -2650,7 +2678,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => */ def isContextFunctionType: Boolean - /** Is this type an erased function type? + /** Is this type a function type with erased parameters? * * @see `isFunctionType` */ @@ -3143,9 +3171,19 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Extension methods of `MethodType` */ trait MethodTypeMethods: extension (self: MethodType) - /** Is this the type of given parameter clause `(implicit X1, ..., Xn)`, `(given X1, ..., Xn)` or `(given x1: X1, ..., xn: Xn)` */ + /** Is this the type of using parameter clause `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ def isImplicit: Boolean + /** Is this the type of erased parameter clause `(erased x1: X1, ..., xn: Xn)` */ + // TODO:deprecate in 3.4 and stabilize `erasedParams` and `hasErasedParams`. + // @deprecated("Use `hasErasedParams`","3.4") def isErased: Boolean + + /** List of `erased` flags for each parameters of the clause */ + @experimental + def erasedParams: List[Boolean] + /** Whether the clause has any erased parameters */ + @experimental + def hasErasedParams: Boolean def param(idx: Int): TypeRepr end extension end MethodTypeMethods @@ -3747,9 +3785,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * @param parent The owner of the method * @param name The name of the method * @param tpe The type of the method (MethodType, PolyType, ByNameType) - * @param flags extra flags to with which the symbol should be constructed + * @param flags extra flags to with which the symbol should be constructed. `Method` flag will be added. Can be `Private | Protected | Override | Deferred | Final | Method | Implicit | Given | Local | JavaStatic` * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. */ + // Keep: `flags` doc aligned with QuotesImpl's `validMethodFlags` def newMethod(parent: Symbol, name: String, tpe: TypeRepr, flags: Flags, privateWithin: Symbol): Symbol /** Generates a new val/var/lazy val symbol with the given parent, name and type. @@ -3763,11 +3802,12 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * @param parent The owner of the val/var/lazy val * @param name The name of the val/var/lazy val * @param tpe The type of the val/var/lazy val - * @param flags extra flags to with which the symbol should be constructed + * @param flags extra flags to with which the symbol should be constructed. Can be `Private | Protected | Override | Deferred | Final | Param | Implicit | Lazy | Mutable | Local | ParamAccessor | Module | Package | Case | CaseAccessor | Given | Enum | JavaStatic` * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ + // Keep: `flags` doc aligned with QuotesImpl's `validValFlags` def newVal(parent: Symbol, name: String, tpe: TypeRepr, flags: Flags, privateWithin: Symbol): Symbol /** Generates a pattern bind symbol with the given parent, name and type. @@ -3778,11 +3818,12 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * * @param parent The owner of the binding * @param name The name of the binding - * @param flags extra flags to with which the symbol should be constructed + * @param flags extra flags to with which the symbol should be constructed. `Case` flag will be added. Can be `Case` * @param tpe The type of the binding * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ + // Keep: `flags` doc aligned with QuotesImpl's `validBindFlags` def newBind(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol /** Definition not available */ @@ -4256,6 +4297,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => */ def FunctionClass(arity: Int, isImplicit: Boolean = false, isErased: Boolean = false): Symbol + /** The `scala.runtime.ErasedFunction` built-in trait. */ + @experimental + def ErasedFunctionClass: Symbol + /** Function-like object that maps arity to symbols for classes `scala.TupleX`. * - 0th element is `NoSymbol` * - 1st element is `NoSymbol` @@ -4312,6 +4357,13 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is this symbol `abstract` */ def Abstract: Flags + /** Is this an abstract override method? + * + * This corresponds to a definition declared as "abstract override def" in the source. + * See https://stackoverflow.com/questions/23645172/why-is-abstract-override-required-not-override-alone-in-subtrait for examples. + */ + @experimental def AbsOverride: Flags + /** Is this generated by Scala compiler. * Corresponds to ACC_SYNTHETIC in the JVM. */ @@ -4851,7 +4903,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => case self: ValDef => self } val body = tree.body.map(transformStatement(_)(tree.symbol)) - ClassDef.copy(tree)(tree.name, constructor.asInstanceOf[DefDef], parents, self, body) // cast as workaround for lampepfl/dotty#14821. TODO remove when referenceVersion >= 3.2.0-RC1 + ClassDef.copy(tree)(tree.name, constructor, parents, self, body) case tree: Import => Import.copy(tree)(transformTerm(tree.expr)(owner), tree.selectors) case tree: Export => diff --git a/library/src/scala/runtime/ErasedFunction.scala b/library/src/scala/runtime/ErasedFunction.scala new file mode 100644 index 000000000000..7e9211bba75a --- /dev/null +++ b/library/src/scala/runtime/ErasedFunction.scala @@ -0,0 +1,11 @@ +package scala.runtime + +import scala.annotation.experimental + +/** Marker trait for function types with erased parameters. + * + * This trait will be refined with an `apply` method with erased parameters: + * ErasedFunction { def apply([erased] x_1: P_1, ..., [erased] x_N: P_N): R } + * This type will be erased to FunctionL, where L = N - count(erased). + */ +@experimental trait ErasedFunction diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index d8c89c7abf28..0edbe0e748f4 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -10,7 +10,7 @@ import scala.annotation.* object LazyVals { @nowarn private[this] val unsafe: sun.misc.Unsafe = { - def throwInitializationException() = + def throwInitializationException() = throw new ExceptionInInitializerError( new IllegalStateException("Can't find instance of sun.misc.Unsafe") ) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 401926dbab4d..091e75fa06e1 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -61,6 +61,22 @@ object language: @compileTimeOnly("`saferExceptions` can only be used at compile time in import statements") object saferExceptions + /** Adds support for clause interleaving: + * Methods can now have as many type clauses as they like, this allows to have type bounds depend on terms: `def f(x: Int)[A <: x.type]: A` + * + * @see [[http://dotty.epfl.ch/docs/reference/other-new-features/explicit-nulls.html]] + */ + @compileTimeOnly("`clauseInterleaving` can only be used at compile time in import statements") + object clauseInterleaving + + /** Adds support for relaxed imports of extension methods. + * Extension methods with the same name can be imported from several places. + * + * @see [[http://dotty.epfl.ch/docs/reference/contextual/extension-methods]] + */ + @compileTimeOnly("`relaxedExtensionImports` can only be used at compile time in import statements") + object relaxedExtensionImports + /** Experimental support for pure function type syntax * * @see [[https://dotty.epfl.ch/docs/reference/experimental/purefuns]] diff --git a/library/src/scala/util/NotGiven.scala b/library/src/scala/util/NotGiven.scala index 99cc903d4426..973e709042cb 100644 --- a/library/src/scala/util/NotGiven.scala +++ b/library/src/scala/util/NotGiven.scala @@ -31,11 +31,13 @@ trait LowPriorityNotGiven { } object NotGiven extends LowPriorityNotGiven { + private val cachedValue = new NotGiven[Nothing]() + /** A value of type `NotGiven` to signal a successful search for `NotGiven[C]` (i.e. a failing * search for `C`). A reference to this value will be explicitly constructed by Dotty's * implicit search algorithm */ - def value: NotGiven[Nothing] = new NotGiven[Nothing]() + def value: NotGiven[Nothing] = cachedValue /** One of two ambiguous methods used to emulate negation in Scala 2 */ given amb1[T](using ev: T): NotGiven[T] = ??? diff --git a/library/src/scala/util/boundary.scala b/library/src/scala/util/boundary.scala index 3c6c6982c7ee..2edd754bbb93 100644 --- a/library/src/scala/util/boundary.scala +++ b/library/src/scala/util/boundary.scala @@ -1,4 +1,5 @@ package scala.util +import scala.annotation.implicitNotFound /** A boundary that can be exited by `break` calls. * `boundary` and `break` represent a unified and superior alternative for the @@ -34,6 +35,7 @@ object boundary: /** Labels are targets indicating which boundary will be exited by a `break`. */ + @implicitNotFound("explain=A Label is generated from an enclosing `scala.util.boundary` call.\nMaybe that boundary is missing?") final class Label[-T] /** Abort current computation and instead return `value` as the value of diff --git a/project/Build.scala b/project/Build.scala index a56a00ee4790..f3ec6bb54548 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -80,9 +80,9 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.2.2" + val referenceVersion = "3.3.0" - val baseVersion = "3.3.0" + val baseVersion = "3.3.1" // Versions used by the vscode extension to create a new project // This should be the latest published releases. @@ -98,7 +98,7 @@ object Build { * set to 3.1.3. If it is going to be 3.1.0, it must be set to the latest * 3.0.x release. */ - val previousDottyVersion = "3.2.2" + val previousDottyVersion = "3.3.0" object CompatMode { final val BinaryCompatible = 0 @@ -360,6 +360,7 @@ object Build { // Settings used when compiling dotty with a non-bootstrapped dotty lazy val commonBootstrappedSettings = commonDottySettings ++ NoBloopExport.settings ++ Seq( + // To enable support of scaladoc and language-server projects you need to change this to true and use sbt as your build server bspEnabled := false, (Compile / unmanagedSourceDirectories) += baseDirectory.value / "src-bootstrapped", @@ -546,7 +547,7 @@ object Build { // get libraries onboard libraryDependencies ++= Seq( - "org.scala-lang.modules" % "scala-asm" % "9.4.0-scala-1", // used by the backend + "org.scala-lang.modules" % "scala-asm" % "9.5.0-scala-1", // used by the backend Dependencies.oldCompilerInterface, // we stick to the old version to avoid deprecation warnings "org.jline" % "jline-reader" % "3.19.0", // used by the REPL "org.jline" % "jline-terminal" % "3.19.0", @@ -925,7 +926,6 @@ object Build { lazy val `stdlib-bootstrapped` = project.in(file("stdlib-bootstrapped")). withCommonSettings(Bootstrapped). dependsOn(dottyCompiler(Bootstrapped) % "provided; compile->runtime; test->test"). - dependsOn(`scala3-tasty-inspector` % "test->test"). settings(commonBootstrappedSettings). settings( moduleName := "scala-library", @@ -1130,6 +1130,7 @@ object Build { enablePlugins(DottyJSPlugin). dependsOn(`scala3-library-bootstrappedJS`). settings( + bspEnabled := false, scalacOptions --= Seq("-Xfatal-warnings", "-deprecation"), // Required to run Scala.js tests. @@ -1217,6 +1218,18 @@ object Build { org.scalajs.jsenv.Input.Script(f) +: (Test / jsEnvInput).value }, + Test / unmanagedSourceDirectories ++= { + val linkerConfig = scalaJSStage.value match { + case FastOptStage => (Test / fastLinkJS / scalaJSLinkerConfig).value + case FullOptStage => (Test / fullLinkJS / scalaJSLinkerConfig).value + } + + if (linkerConfig.moduleKind != ModuleKind.NoModule && !linkerConfig.closureCompiler) + Seq(baseDirectory.value / "test-require-multi-modules") + else + Nil + }, + (Compile / managedSources) ++= { val dir = fetchScalaJSSource.value ( @@ -1935,7 +1948,7 @@ object ScaladocConfigs { } lazy val DefaultGenerationConfig = Def.task { - def distLocation = (dist / pack).value + def distLocation = (dist / Compile / pack).value DefaultGenerationSettings.value } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 1dbf732a5b6e..54bc6ecadfe0 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -4,24 +4,27 @@ import sbt._ * to ensure the same version of the dependency is used in all projects */ object Dependencies { - private val jacksonVersion = "2.13.3" + private val jacksonVersion = "2.15.1" val `jackson-databind` = "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion val `jackson-dataformat-yaml` = "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % jacksonVersion - private val flexmarkVersion = "0.42.12" + // Freeze on 0.62.x as 0.64.0 requires Java 11 + private val flexmarkVersion = "0.62.2" val flexmarkDeps = Seq( "com.vladsch.flexmark" % "flexmark" % flexmarkVersion, - "com.vladsch.flexmark" % "flexmark-html-parser" % flexmarkVersion, + "com.vladsch.flexmark" % "flexmark-util-ast" % flexmarkVersion, + "com.vladsch.flexmark" % "flexmark-util-data" % flexmarkVersion, + "com.vladsch.flexmark" % "flexmark-util-html" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-anchorlink" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-autolink" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-emoji" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-gfm-strikethrough" % flexmarkVersion, - "com.vladsch.flexmark" % "flexmark-ext-gfm-tables" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-gfm-tasklist" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-wikilink" % flexmarkVersion, + "com.vladsch.flexmark" % "flexmark-ext-tables" % flexmarkVersion, "com.vladsch.flexmark" % "flexmark-ext-yaml-front-matter" % flexmarkVersion, ) diff --git a/project/DocumentationWebsite.scala b/project/DocumentationWebsite.scala index e24917a60803..5f8e499af62f 100644 --- a/project/DocumentationWebsite.scala +++ b/project/DocumentationWebsite.scala @@ -1,4 +1,5 @@ import java.io.File +import java.net.URI import java.nio.file.Paths import sbt._ import Build._ @@ -48,7 +49,7 @@ object DocumentationWebsite { sbt.IO.touch(inkuireDestinationFile) def tryFetch(retries: Int, timeout: Duration): Unit = { - val downloadProcess = (new java.net.URL(inkuireLink) #> inkuireDestinationFile).run() + val downloadProcess = (new URI(inkuireLink).toURL #> inkuireDestinationFile).run() val result: Future[Int] = Future(blocking(downloadProcess.exitValue())) try { Await.result(result, timeout) match { diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 689a4b8f1614..112a5601615c 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -3,12 +3,20 @@ import com.typesafe.tools.mima.core._ object MiMaFilters { val Library: Seq[ProblemFilter] = Seq( - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.caps.unsafeBox"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.caps.unsafeUnbox"), + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.unchecked.uncheckedCaptures"), + + // Scala.js only: new runtime support class in 3.2.3; not available to users + ProblemFilters.exclude[MissingClassProblem]("scala.scalajs.runtime.AnonFunctionXXL"), + + // New experimental features in 3.3.X + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.clauseInterleaving"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$clauseInterleaving$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.relaxedExtensionImports"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$relaxedExtensionImports$"), + // end of New experimental features in 3.3.X + ) + val TastyCore: Seq[ProblemFilter] = Seq( ) - val TastyCore: Seq[ProblemFilter] = Seq() val Interfaces: Seq[ProblemFilter] = Seq( - ProblemFilters.exclude[MissingClassProblem]("dotty.tools.dotc.interfaces.DiagnosticRelatedInformation"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("dotty.tools.dotc.interfaces.Diagnostic.diagnosticRelatedInformation") ) } diff --git a/project/build.properties b/project/build.properties index 8b9a0b0ab037..46e43a97ed86 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.8.0 +sbt.version=1.8.2 diff --git a/project/build.sbt b/project/build.sbt index e19492c42022..188dfa5c6702 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -1,7 +1,4 @@ // Used by VersionUtil to get gitHash and commitDate libraryDependencies += "org.eclipse.jgit" % "org.eclipse.jgit" % "4.11.0.201803080745-r" - -Compile / unmanagedSourceDirectories += - baseDirectory.value / "../language-server/src/dotty/tools/languageserver/config" libraryDependencies += Dependencies.`jackson-databind` diff --git a/project/plugins.sbt b/project/plugins.sbt index aba843ca2c3c..ccbcdeed22fc 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,14 +8,14 @@ libraryDependencySchemes += addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.12.0") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.10") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.20") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") +addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1") -addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.13") +addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.17") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") diff --git a/project/scripts/bisect.scala b/project/scripts/bisect.scala index e60e632a7feb..2e554a885c79 100755 --- a/project/scripts/bisect.scala +++ b/project/scripts/bisect.scala @@ -31,6 +31,7 @@ val usageMessage = """ |The optional may be any combination of: |* --dry-run | Don't try to bisect - just make sure the validation command works correctly + | |* --releases | Bisect only releases from the given range (defaults to all releases). | The range format is ..., where both and are optional, e.g. @@ -38,8 +39,12 @@ val usageMessage = """ | * 3.2.1-RC1-bin-20220620-de3a82c-NIGHTLY.. | * ..3.3.0-RC1-bin-20221124-e25362d-NIGHTLY | The ranges are treated as inclusive. + | |* --bootstrapped - | Publish locally and test a bootstrapped compiler rather than a nonboostrapped one + | Publish locally and test a bootstrapped compiler rather than a nonboostrapped one. + | + |* --should-fail + | Expect the validation command to fail rather that succeed. This can be used e.g. to find out when some illegal code started to compile. | |Warning: The bisect script should not be run multiple times in parallel because of a potential race condition while publishing artifacts locally. @@ -54,7 +59,7 @@ val usageMessage = """ val validationScript = scriptOptions.validationCommand.validationScript val releases = Releases.fromRange(scriptOptions.releasesRange) - val releaseBisect = ReleaseBisect(validationScript, releases) + val releaseBisect = ReleaseBisect(validationScript, shouldFail = scriptOptions.shouldFail, releases) releaseBisect.verifyEdgeReleases() @@ -64,18 +69,19 @@ val usageMessage = """ println(s"First bad release: ${firstBadRelease.version}") println("\nFinished bisecting releases\n") - val commitBisect = CommitBisect(validationScript, bootstrapped = scriptOptions.bootstrapped, lastGoodRelease.hash, firstBadRelease.hash) + val commitBisect = CommitBisect(validationScript, shouldFail = scriptOptions.shouldFail, bootstrapped = scriptOptions.bootstrapped, lastGoodRelease.hash, firstBadRelease.hash) commitBisect.bisect() -case class ScriptOptions(validationCommand: ValidationCommand, dryRun: Boolean, bootstrapped: Boolean, releasesRange: ReleasesRange) +case class ScriptOptions(validationCommand: ValidationCommand, dryRun: Boolean, bootstrapped: Boolean, releasesRange: ReleasesRange, shouldFail: Boolean) object ScriptOptions: def fromArgs(args: Seq[String]) = val defaultOptions = ScriptOptions( validationCommand = null, dryRun = false, bootstrapped = false, - ReleasesRange(first = None, last = None) + ReleasesRange(first = None, last = None), + shouldFail = false ) parseArgs(args, defaultOptions) @@ -86,6 +92,7 @@ object ScriptOptions: case "--releases" :: argsRest => val range = ReleasesRange.tryParse(argsRest.head).get parseArgs(argsRest.tail, options.copy(releasesRange = range)) + case "--should-fail" :: argsRest => parseArgs(argsRest, options.copy(shouldFail = true)) case _ => val command = ValidationCommand.fromArgs(args) options.copy(validationCommand = command) @@ -182,7 +189,7 @@ case class Release(version: String): override def toString: String = version -class ReleaseBisect(validationScript: File, allReleases: Vector[Release]): +class ReleaseBisect(validationScript: File, shouldFail: Boolean, allReleases: Vector[Release]): assert(allReleases.length > 1, "Need at least 2 releases to bisect") private val isGoodReleaseCache = collection.mutable.Map.empty[Release, Boolean] @@ -217,21 +224,22 @@ class ReleaseBisect(validationScript: File, allReleases: Vector[Release]): isGoodReleaseCache.getOrElseUpdate(release, { println(s"Testing ${release.version}") val result = Seq(validationScript.getAbsolutePath, release.version).! - val isGood = result == 0 + val isGood = if(shouldFail) result != 0 else result == 0 // invert the process status if failure was expected println(s"Test result: ${release.version} is a ${if isGood then "good" else "bad"} release\n") isGood }) -class CommitBisect(validationScript: File, bootstrapped: Boolean, lastGoodHash: String, fistBadHash: String): +class CommitBisect(validationScript: File, shouldFail: Boolean, bootstrapped: Boolean, lastGoodHash: String, fistBadHash: String): def bisect(): Unit = println(s"Starting bisecting commits $lastGoodHash..$fistBadHash\n") val scala3CompilerProject = if bootstrapped then "scala3-compiler-bootstrapped" else "scala3-compiler" val scala3Project = if bootstrapped then "scala3-bootstrapped" else "scala3" + val validationCommandStatusModifier = if shouldFail then "! " else "" // invert the process status if failure was expected val bisectRunScript = s""" |scalaVersion=$$(sbt "print ${scala3CompilerProject}/version" | tail -n1) |rm -r out |sbt "clean; ${scala3Project}/publishLocal" - |${validationScript.getAbsolutePath} "$$scalaVersion" + |${validationCommandStatusModifier}${validationScript.getAbsolutePath} "$$scalaVersion" """.stripMargin "git bisect start".! s"git bisect bad $fistBadHash".! diff --git a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java index 3e1e291ab7d1..25b934000144 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java +++ b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java @@ -39,13 +39,16 @@ public void doReport(Diagnostic dia, Context ctx) { StringBuilder rendered = new StringBuilder(); rendered.append(messageAndPos(dia, ctx)); Message message = dia.msg(); + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append(message.message()); String diagnosticCode = String.valueOf(message.errorId().errorNumber()); boolean shouldExplain = Diagnostic.shouldExplain(dia, ctx); if (shouldExplain && !message.explanation().isEmpty()) { rendered.append(explanation(message, ctx)); + messageBuilder.append(System.lineSeparator()).append(explanation(message, ctx)); } - delegate.log(new Problem(position, message.message(), severity, rendered.toString(), diagnosticCode)); + delegate.log(new Problem(position, messageBuilder.toString(), severity, rendered.toString(), diagnosticCode)); } private static Severity severityOf(int level) { diff --git a/sbt-test/java-compat/i15288/QueryRequest.java b/sbt-test/java-compat/i15288/QueryRequest.java new file mode 100644 index 000000000000..e43487e09449 --- /dev/null +++ b/sbt-test/java-compat/i15288/QueryRequest.java @@ -0,0 +1,9 @@ +interface CopyableBuilder {} +interface ToCopyableBuilder {} + +public class QueryRequest implements ToCopyableBuilder { + public static Builder builder() { throw new UnsupportedOperationException(); } + public interface Builder extends CopyableBuilder { + void build(); + } +} diff --git a/sbt-test/java-compat/i15288/Test.scala b/sbt-test/java-compat/i15288/Test.scala new file mode 100644 index 000000000000..e03617ac4c33 --- /dev/null +++ b/sbt-test/java-compat/i15288/Test.scala @@ -0,0 +1,2 @@ +class Test: + def makeQuery = QueryRequest.builder().build() diff --git a/sbt-test/java-compat/i15288/build.sbt b/sbt-test/java-compat/i15288/build.sbt new file mode 100644 index 000000000000..63e314982c41 --- /dev/null +++ b/sbt-test/java-compat/i15288/build.sbt @@ -0,0 +1 @@ +scalaVersion := sys.props("plugin.scalaVersion") diff --git a/sbt-test/java-compat/i15288/test b/sbt-test/java-compat/i15288/test new file mode 100644 index 000000000000..ad1a8a5987ee --- /dev/null +++ b/sbt-test/java-compat/i15288/test @@ -0,0 +1,5 @@ +## This could just be a pos test checked by FromTastyTests, but +## ParallelTesting#compileTastyInDir does not support test with multiple files +## currently. +> compile +> doc diff --git a/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties b/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties index 8b9a0b0ab037..46e43a97ed86 100644 --- a/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties +++ b/sbt-test/sbt-dotty/dotty-knowledge.i17/project/build.properties @@ -1 +1 @@ -sbt.version=1.8.0 +sbt.version=1.8.2 diff --git a/sbt-test/sbt-dotty/tasty-inspector-jars/build.sbt b/sbt-test/sbt-dotty/tasty-inspector-jars/build.sbt index 59dd85290bf0..3bece1b43fa7 100644 --- a/sbt-test/sbt-dotty/tasty-inspector-jars/build.sbt +++ b/sbt-test/sbt-dotty/tasty-inspector-jars/build.sbt @@ -15,7 +15,7 @@ lazy val inspector = project .settings( scalaVersion := dottyVersion, libraryDependencies += "org.scala-lang" %% "scala3-tasty-inspector" % scalaVersion.value, - runTest := + runTest := Def.sequential( Def.task(IO.copyFile((lib/Compile/packageBin).value, jarDest)), (Compile/run).toTask(" " + jarDest.getAbsolutePath) diff --git a/sbt-test/source-dependencies/implicit-search/changes/A1.scala b/sbt-test/source-dependencies/implicit-search/changes/A1.scala index 7aa91d096277..69c493db2131 100644 --- a/sbt-test/source-dependencies/implicit-search/changes/A1.scala +++ b/sbt-test/source-dependencies/implicit-search/changes/A1.scala @@ -1 +1 @@ -object A +object A diff --git a/sbt-test/source-dependencies/java-static/test b/sbt-test/source-dependencies/java-static/test index 42890ca74f4d..0bb6f50169e4 100644 --- a/sbt-test/source-dependencies/java-static/test +++ b/sbt-test/source-dependencies/java-static/test @@ -2,7 +2,7 @@ # the statics as an object without a file and so the Analyzer must know to look for the # object's linked class. # This test verifies this happens. -# The test compiles a Java class with a static field. +# The test compiles a Java class with a static field. # It then adds a Scala object that references the static field. Because the object only depends on a # static member and because the Java source is not included in the compilation (since it didn't change), # this triggers the special case above. diff --git a/sbt-test/source-dependencies/restore-classes/changes/A2.scala b/sbt-test/source-dependencies/restore-classes/changes/A2.scala index 10d738255cca..778f16ab95cc 100644 --- a/sbt-test/source-dependencies/restore-classes/changes/A2.scala +++ b/sbt-test/source-dependencies/restore-classes/changes/A2.scala @@ -2,4 +2,4 @@ object A { val x = "a" } -class C +class C diff --git a/sbt-test/source-dependencies/sam/A.scala b/sbt-test/source-dependencies/sam/A.scala new file mode 100644 index 000000000000..eb870b8332b6 --- /dev/null +++ b/sbt-test/source-dependencies/sam/A.scala @@ -0,0 +1,3 @@ +trait A { + def foo(): Int +} diff --git a/sbt-test/source-dependencies/sam/B.scala b/sbt-test/source-dependencies/sam/B.scala new file mode 100644 index 000000000000..87dfb28cdb33 --- /dev/null +++ b/sbt-test/source-dependencies/sam/B.scala @@ -0,0 +1,2 @@ +class B: + val f: A = () => 1 diff --git a/sbt-test/source-dependencies/sam/build.sbt b/sbt-test/source-dependencies/sam/build.sbt new file mode 100644 index 000000000000..63e314982c41 --- /dev/null +++ b/sbt-test/source-dependencies/sam/build.sbt @@ -0,0 +1 @@ +scalaVersion := sys.props("plugin.scalaVersion") diff --git a/sbt-test/source-dependencies/sam/changes/A.scala b/sbt-test/source-dependencies/sam/changes/A.scala new file mode 100644 index 000000000000..e9b339f2d1a4 --- /dev/null +++ b/sbt-test/source-dependencies/sam/changes/A.scala @@ -0,0 +1,3 @@ +trait A { + def foo(): String +} diff --git a/sbt-test/source-dependencies/sam/test b/sbt-test/source-dependencies/sam/test new file mode 100644 index 000000000000..3c4c9a0f001b --- /dev/null +++ b/sbt-test/source-dependencies/sam/test @@ -0,0 +1,7 @@ +> compile + +# change the SAM type +$ copy-file changes/A.scala A.scala + +# Both A.scala and B.scala should be recompiled, producing a compile error +-> compile diff --git a/scaladoc-testcases/src/example/typeAndObjects/binaryops.scala b/scaladoc-testcases/src/example/typeAndObjects/binaryops.scala index 890d39732ca8..6474d04f91ef 100644 --- a/scaladoc-testcases/src/example/typeAndObjects/binaryops.scala +++ b/scaladoc-testcases/src/example/typeAndObjects/binaryops.scala @@ -5,7 +5,7 @@ sealed trait Expr object Expr{ case class BinaryOp(offset: Int, lhs: Expr, op: BinaryOp.Op, rhs: Expr) extends Expr - + object BinaryOp{ sealed trait Op case object `<<` extends Op diff --git a/scaladoc-testcases/src/tests/contextBounds.scala b/scaladoc-testcases/src/tests/contextBounds.scala index 1925f7f40994..794af0b8b8f8 100644 --- a/scaladoc-testcases/src/tests/contextBounds.scala +++ b/scaladoc-testcases/src/tests/contextBounds.scala @@ -25,15 +25,15 @@ class A: def a[T <: String | Int : ([T] =>> T match { case String => A case Int => B })](t: T): T = t - def falsePositive[T](evidence$1: ClassTag[T]): Int + def falsePositive[T](evidence$1: ClassTag[T]): Int = 1 // Scala spec stats that behaviour of names with `$` is undefined. // Scaladoc documents definition below as `def falsePositive2[T: ClassTag]: Int` // that is equivalent of methods below - // def falsePositive2[T](implicit evidence$3: ClassTag[T]): Int + // def falsePositive2[T](implicit evidence$3: ClassTag[T]): Int // = 1 class Outer[A]: - def falsePositiveInner[T](implicit evidence$3: ClassTag[A]): Int + def falsePositiveInner[T](implicit evidence$3: ClassTag[A]): Int = 1 \ No newline at end of file diff --git a/scaladoc-testcases/src/tests/extendsCall.scala b/scaladoc-testcases/src/tests/extendsCall.scala new file mode 100644 index 000000000000..b90af8162e15 --- /dev/null +++ b/scaladoc-testcases/src/tests/extendsCall.scala @@ -0,0 +1,6 @@ +package tests +package extendsCall + +class Impl() extends Base(Seq.empty, c = "-") //expected: class Impl() extends Base + +class Base(val a: Seq[String], val b: String = "", val c: String = "") //expected: class Base(val a: Seq[String], val b: String, val c: String) diff --git a/scaladoc-testcases/src/tests/extensionParams.scala b/scaladoc-testcases/src/tests/extensionParams.scala index 7892676af2c4..0e2225d8aa3c 100644 --- a/scaladoc-testcases/src/tests/extensionParams.scala +++ b/scaladoc-testcases/src/tests/extensionParams.scala @@ -10,11 +10,11 @@ extension [A](a: A)(using Int) def f1[B](b: B): (A, B) = ??? -extension [A](a: A)(using Int) +extension [A](a: A)(using String) def f2(b: A): (A, A) = ??? -extension [A](a: A)(using Int) +extension [A](a: A)(using Number) def f3(using String)(b: A): (A, A) = ??? @@ -22,7 +22,7 @@ extension (a: Char)(using Int) def f4(using String)(b: Int): Unit = ??? -extension (a: Char)(using Int) +extension (a: String)(using Int) def f5[B](using String)(b: B): Unit = ??? @@ -34,15 +34,15 @@ extension [A <: List[Char]](using String)(using Unit)(a: A)(using Int)(using Num def f7[B, C](b: B)(c: C): (A, B) = ??? -extension [A <: List[Char]](using String)(using Unit)(a: A)(using Int)(using Number) +extension [A <: List[Char]](using String)(using Number)(a: A)(using Int)(using Unit) def f8(b: Any)(c: Any): Any = ??? -extension [A <: List[Char]](using String)(using Unit)(a: A)(using Int)(using Number) +extension [A <: List[Char]](using Unit)(using String)(a: A)(using Int)(using Number) def f9[B, C](using Int)(b: B)(c: C): (A, B) = ??? -extension [A <: List[Char]](using String)(using Unit)(a: A)(using Int)(using Number) +extension [A <: List[Char]](using Number)(using Unit)(a: A)(using Int)(using String) def f10(using Int)(b: Any)(c: Any): Any = ??? @@ -52,3 +52,22 @@ extension [A <: List[Char]](using String)(using Unit)(a: A)(using Int)(using Num extension (using String)(using Unit)(a: Animal)(using Int)(using Number) def f11(b: Any)(c: Any): Any = ??? + +extension (a: Int) + def f13(): Any + = ??? + +extension (using Unit)(a: Int) + def f14(): Any + = ??? + +import scala.language.experimental.clauseInterleaving + +extension (using String)(using Int)(a: Animal)(using Unit)(using Number) + def f16(b: Any)[T](c: T): T + = ??? + def f17[D](b: D)[T](c: T): T + = ??? + + + diff --git a/scaladoc-testcases/src/tests/methodsAndConstructors.scala b/scaladoc-testcases/src/tests/methodsAndConstructors.scala index b8925c593b4c..132d35035b30 100644 --- a/scaladoc-testcases/src/tests/methodsAndConstructors.scala +++ b/scaladoc-testcases/src/tests/methodsAndConstructors.scala @@ -60,3 +60,8 @@ class Methods: def withImplicitParam2(v: String)(implicit ab: Double, a: Int, b: String): String = ??? + import scala.language.experimental.clauseInterleaving + + def clauseInterleaving[T](x: T)[U](y: U)(using (T, U)): (T, U) + = ??? + diff --git a/scaladoc-testcases/src/tests/snippetComments.scala b/scaladoc-testcases/src/tests/snippetComments.scala index 39b15648103e..9f54b8a465f1 100644 --- a/scaladoc-testcases/src/tests/snippetComments.scala +++ b/scaladoc-testcases/src/tests/snippetComments.scala @@ -3,7 +3,7 @@ package tests.snippetComments /** * This is my codeblock - * + * * ``` * //{{ * import xd @@ -20,7 +20,7 @@ package tests.snippetComments * val y = 2 // comment in the same line * // comment in new line * val z = 3 - * + * * //{{ * val hideMe = 7 * //}} diff --git a/scaladoc/resources/dotty_res/scripts/ux.js b/scaladoc/resources/dotty_res/scripts/ux.js index 304f2af9e129..0ead006af84d 100644 --- a/scaladoc/resources/dotty_res/scripts/ux.js +++ b/scaladoc/resources/dotty_res/scripts/ux.js @@ -309,6 +309,10 @@ document var selected = document.getElementById(location.hash.substring(1)); if (selected) { selected.classList.toggle("expand"); + selected.classList.toggle("expanded"); + const btn = selected.querySelector(".icon-button"); + btn.classList.toggle("expand"); + btn.classList.toggle("expanded"); } } } diff --git a/scaladoc/resources/dotty_res/styles/theme/components/api-member.css b/scaladoc/resources/dotty_res/styles/theme/components/api-member.css index c1a491815201..47b64c304a70 100644 --- a/scaladoc/resources/dotty_res/styles/theme/components/api-member.css +++ b/scaladoc/resources/dotty_res/styles/theme/components/api-member.css @@ -42,6 +42,10 @@ margin-block-end: 0; } +.documentableElement .doc img { + max-width: 100%; +} + .documentableElement .annotations { display: none; } diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/container.css b/scaladoc/resources/dotty_res/styles/theme/layout/container.css index 849235e2fa82..53ede0e3dfff 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/container.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/container.css @@ -19,6 +19,11 @@ p { --header-height: calc(8 * var(--base-spacing)); } +.site-container img{ + max-width: 100%; + height: auto; +} + /* Scrollbar */ ::-webkit-scrollbar { diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/content.css b/scaladoc/resources/dotty_res/styles/theme/layout/content.css index 0ba37d0752b0..39a7b053752d 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/content.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/content.css @@ -8,6 +8,19 @@ scroll-behavior: smooth; } +/* blog footer */ +.blog-author { + color: gray; +} + +.blog-author img#author-img { + width: auto; + height: auto; + max-width:100px; + max-height:100px; + border-radius: 50%; +} + #content { display: flex; flex-flow: row; @@ -91,6 +104,7 @@ @media (max-width: 768px) { #content { + padding-top: calc(10 * var(--base-spacing)); padding-bottom: calc(6 * var(--base-spacing)); } @@ -143,9 +157,9 @@ } #content h2 { - color: var(--text-primary); - margin-block-start: calc(6* var(--base-spacing)); - margin-block-end: calc(3* var(--base-spacing)); + color: var(--text-primary); + margin-block-start: calc(6* var(--base-spacing)); + margin-block-end: calc(3* var(--base-spacing)); } #content .cover > h2 { @@ -166,7 +180,7 @@ /* content first paragraph */ .first-p { - margin-block-start: calc(2* var(--base-spacing)); + margin-block-start: calc(2* var(--base-spacing)); } #content .first-p { @@ -226,18 +240,32 @@ /* content link */ #content a { - color: var(--text-primary); - text-decoration: underline solid 1px; - -webkit-text-decoration-line: underline; /* Safari */ - text-decoration-line: underline; - text-underline-offset: 2px; - transition: text-decoration-color .2s ease-in-out; + color: var(--text-primary); + text-decoration: underline solid 1px; + -webkit-text-decoration-line: underline; /* Safari */ + text-decoration-line: underline; + text-underline-offset: 2px; + transition: text-decoration-color .2s ease-in-out; } #content a:hover { text-decoration-color: transparent; } +#content a.anchor { + color: transparent; + margin-left: -23px; + padding-right: 3px; + transition: color .4s ease-out; +} + +#content a.anchor::before { + content: "\f0c1"; + font-family: "Font Awesome 5 Free"; + font-weight: 900; + font-size: 20px; +} + #content .cover-header { margin-block-end: calc(2 * var(--base-spacing)); } diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/header.css b/scaladoc/resources/dotty_res/styles/theme/layout/header.css index 034f9ed43087..85e6b0240899 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/header.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/header.css @@ -63,6 +63,14 @@ align-items: center; } +.logo-container .project-logo { + max-width: 40px; +} + +.logo-container .project-logo img { + max-width: 100%; +} + #mobile-menu-toggle { display: none; } @@ -72,7 +80,7 @@ overflow: hidden; white-space: nowrap; text-overflow: ellipsis; - width: calc(9 * var(--base-spacing)); + width: auto; } .single { @@ -89,6 +97,7 @@ #mobile-menu-toggle { margin-left: calc(3 * var(--base-spacing)); display: block; + padding: 16px; } .header-container-right .text-button { @@ -102,6 +111,10 @@ #search-toggle { display: none; } + + .projectVersion{ + max-width: calc(12 * var(--base-spacing)); + } } diff --git a/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css b/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css index a7c08eedb4be..6fa692ab4662 100644 --- a/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css +++ b/scaladoc/resources/dotty_res/styles/theme/layout/mobileMenu.css @@ -154,6 +154,8 @@ #mobile-menu-close { margin-left: auto; + width: 48px; + height: 48px; } #mobile-menu-close:disabled { diff --git a/scaladoc/src/dotty/tools/scaladoc/DocContext.scala b/scaladoc/src/dotty/tools/scaladoc/DocContext.scala index 7f208daff29a..acc93ccb332f 100644 --- a/scaladoc/src/dotty/tools/scaladoc/DocContext.scala +++ b/scaladoc/src/dotty/tools/scaladoc/DocContext.scala @@ -1,11 +1,9 @@ package dotty.tools.scaladoc import java.io.File -import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths -import scala.jdk.CollectionConverters._ import dotty.tools.scaladoc.site.StaticSiteContext import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.util.SourceFile @@ -13,9 +11,6 @@ import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans import java.io.ByteArrayOutputStream import java.io.PrintStream -import scala.io.Codec -import java.net.URL -import scala.util.Try import scala.collection.mutable import dotty.tools.scaladoc.util.Check.checkJekyllIncompatPath diff --git a/scaladoc/src/dotty/tools/scaladoc/ExternalDocLink.scala b/scaladoc/src/dotty/tools/scaladoc/ExternalDocLink.scala index 97e0d309d6b8..536d759388f3 100644 --- a/scaladoc/src/dotty/tools/scaladoc/ExternalDocLink.scala +++ b/scaladoc/src/dotty/tools/scaladoc/ExternalDocLink.scala @@ -1,6 +1,6 @@ package dotty.tools.scaladoc -import java.net.URL +import java.net.{URI, URL} import scala.util.matching._ import scala.util.{ Try, Success, Failure } @@ -25,10 +25,12 @@ object ExternalDocLink: case Failure(e) => fail(mapping, s"Unable to parse $descr. Exception $e occured") } + private def stripIndex(url: String): String = url.stripSuffix("index.html").stripSuffix("/") + "/" + def parseLegacy(mapping: String): Either[String, ExternalDocLink] = mapping.split("#").toList match case path :: apiUrl :: Nil => for { - url <- tryParse(mapping, "url")(URL(apiUrl)) + url <- tryParse(mapping, "url")(URI(stripIndex(apiUrl)).toURL) } yield ExternalDocLink( List(s"${Regex.quote(path)}.*".r), url, @@ -40,7 +42,7 @@ object ExternalDocLink: def parse(mapping: String): Either[String, ExternalDocLink] = def parsePackageList(elements: List[String]) = elements match - case List(urlStr) => tryParse(mapping, "packageList")(Some(URL(urlStr))) + case List(urlStr) => tryParse(mapping, "packageList")(Some(URI(urlStr).toURL)) case Nil => Right(None) case other => fail(mapping, s"Provided multiple package lists: $other") @@ -55,7 +57,7 @@ object ExternalDocLink: case regexStr :: docToolStr :: urlStr :: rest => for { regex <- tryParse(mapping, "regex")(regexStr.r) - url <- tryParse(mapping, "url")(URL(urlStr)) + url <- tryParse(mapping, "url")(URI(stripIndex(urlStr)).toURL) doctool <- doctoolByName(docToolStr) packageList <- parsePackageList(rest) } yield ExternalDocLink( diff --git a/scaladoc/src/dotty/tools/scaladoc/Main.scala b/scaladoc/src/dotty/tools/scaladoc/Main.scala index da35e63561fd..36b8b1daf4c4 100644 --- a/scaladoc/src/dotty/tools/scaladoc/Main.scala +++ b/scaladoc/src/dotty/tools/scaladoc/Main.scala @@ -1,16 +1,6 @@ package dotty.tools.scaladoc -import java.util.ServiceLoader -import java.io.File -import java.util.jar._ -import scala.jdk.CollectionConverters._ -import collection.immutable.ArraySeq - -import java.nio.file.Files - -import dotty.tools.dotc.config.Settings._ -import dotty.tools.dotc.config.CommonScalaSettings -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.ContextBase /** Main class for the doctool when used from cli. */ class Main: diff --git a/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala b/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala index da34e97efdf5..fa02e87548e6 100644 --- a/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala +++ b/scaladoc/src/dotty/tools/scaladoc/Scaladoc.scala @@ -1,20 +1,15 @@ package dotty.tools.scaladoc -import java.util.ServiceLoader import java.io.File import java.io.FileWriter -import java.util.jar._ -import scala.jdk.CollectionConverters._ -import collection.immutable.ArraySeq +import java.nio.file.Paths -import java.nio.file.{ Files, Paths } +import collection.immutable.ArraySeq import dotty.tools.dotc.config.Settings._ import dotty.tools.dotc.config.{ CommonScalaSettings, AllScalaSettings } import dotty.tools.dotc.reporting.Reporter import dotty.tools.dotc.core.Contexts._ - -import dotty.tools.scaladoc.Inkuire import dotty.tools.scaladoc.Inkuire._ object Scaladoc: diff --git a/scaladoc/src/dotty/tools/scaladoc/ScaladocCommand.scala b/scaladoc/src/dotty/tools/scaladoc/ScaladocCommand.scala index b91b8307208b..8b438a27f33e 100644 --- a/scaladoc/src/dotty/tools/scaladoc/ScaladocCommand.scala +++ b/scaladoc/src/dotty/tools/scaladoc/ScaladocCommand.scala @@ -1,20 +1,9 @@ package dotty.tools.scaladoc -import java.util.ServiceLoader -import java.io.File -import java.util.jar._ -import scala.jdk.CollectionConverters._ -import collection.immutable.ArraySeq - -import java.nio.file.Files - import dotty.tools.dotc.config.Settings._ -import dotty.tools.dotc.config.CommonScalaSettings -import dotty.tools.scaladoc.Scaladoc._ -import dotty.tools.dotc.config.Settings.Setting.value import dotty.tools.dotc.config.Properties._ import dotty.tools.dotc.config.CliCommand -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.Context object ScaladocCommand extends CliCommand: type ConcreteSettings = ScaladocSettings diff --git a/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala b/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala index ee7c6cd4980f..96e7854b45cf 100644 --- a/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala +++ b/scaladoc/src/dotty/tools/scaladoc/ScaladocSettings.scala @@ -1,20 +1,7 @@ package dotty.tools.scaladoc -import java.util.ServiceLoader -import java.io.File -import java.util.jar._ -import scala.jdk.CollectionConverters._ -import collection.immutable.ArraySeq - -import java.nio.file.Files - import dotty.tools.dotc.config.Settings._ import dotty.tools.dotc.config.AllScalaSettings -import dotty.tools.scaladoc.Scaladoc._ -import dotty.tools.dotc.config.Settings.Setting.value -import dotty.tools.dotc.config.Properties._ -import dotty.tools.dotc.config.CliCommand -import dotty.tools.dotc.core.Contexts._ class ScaladocSettings extends SettingGroup with AllScalaSettings: val unsupportedSettings = Seq( diff --git a/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala b/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala index f4fe674dbcb4..a07029d06c50 100644 --- a/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala +++ b/scaladoc/src/dotty/tools/scaladoc/SocialLinks.scala @@ -1,9 +1,5 @@ package dotty.tools.scaladoc -import java.nio.file.Path -import java.nio.file.Paths -import dotty.tools.dotc.core.Contexts.Context - enum SocialLinks(val url: String, val className: String): case Github(ghUrl: String) extends SocialLinks(ghUrl, "gh") case Twitter(tUrl: String) extends SocialLinks(tUrl, "twitter") diff --git a/scaladoc/src/dotty/tools/scaladoc/SourceLinks.scala b/scaladoc/src/dotty/tools/scaladoc/SourceLinks.scala index a9e300040fb8..b3732bcbc946 100644 --- a/scaladoc/src/dotty/tools/scaladoc/SourceLinks.scala +++ b/scaladoc/src/dotty/tools/scaladoc/SourceLinks.scala @@ -2,7 +2,6 @@ package dotty.tools.scaladoc import java.nio.file.Path import java.nio.file.Paths -import dotty.tools.dotc.core.Contexts.Context import scala.util.matching.Regex def pathToString(p: Path) = diff --git a/scaladoc/src/dotty/tools/scaladoc/api.scala b/scaladoc/src/dotty/tools/scaladoc/api.scala index 90a03658c90e..5af55f76a211 100644 --- a/scaladoc/src/dotty/tools/scaladoc/api.scala +++ b/scaladoc/src/dotty/tools/scaladoc/api.scala @@ -44,24 +44,24 @@ enum Modifier(val name: String, val prefix: Boolean): case Transparent extends Modifier("transparent", true) case Infix extends Modifier("infix", true) -case class ExtensionTarget(name: String, typeParams: Seq[TypeParameter], argsLists: Seq[ParametersList], signature: Signature, dri: DRI, position: Long) +case class ExtensionTarget(name: String, typeParams: Seq[TypeParameter], argsLists: Seq[TermParameterList], signature: Signature, dri: DRI, position: Long) case class ImplicitConversion(from: DRI, to: DRI) trait ImplicitConversionProvider { def conversion: Option[ImplicitConversion] } trait Classlike: def typeParams: Seq[TypeParameter] = Seq.empty - def argsLists: Seq[ParametersList] = Seq.empty + def argsLists: Seq[TermParameterList] = Seq.empty enum Kind(val name: String): case RootPackage extends Kind("") case Package extends Kind("package") - case Class(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[ParametersList]) + case Class(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[TermParameterList]) extends Kind("class") with Classlike case Object extends Kind("object") with Classlike - case Trait(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[ParametersList]) + case Trait(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[TermParameterList]) extends Kind("trait") with Classlike - case Enum(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[ParametersList]) extends Kind("enum") with Classlike + case Enum(override val typeParams: Seq[TypeParameter], override val argsLists: Seq[TermParameterList]) extends Kind("enum") with Classlike case EnumCase(kind: Object.type | Kind.Type | Val.type | Class) extends Kind("case") - case Def(typeParams: Seq[TypeParameter], argsLists: Seq[ParametersList]) + case Def(paramLists: Seq[Either[TermParameterList,TypeParameterList]]) extends Kind("def") case Extension(on: ExtensionTarget, m: Kind.Def) extends Kind("def") case Constructor(base: Kind.Def) extends Kind("def") @@ -97,12 +97,12 @@ object Annotation: case class LinkParameter(name: Option[String] = None, dri: DRI, value: String) extends AnnotationParameter case class UnresolvedParameter(name: Option[String] = None, unresolvedText: String) extends AnnotationParameter -case class ParametersList( - parameters: Seq[Parameter], +case class TermParameterList( + parameters: Seq[TermParameter], modifiers: String ) -case class Parameter( +case class TermParameter( annotations: Seq[Annotation], modifiers: String, name: Option[String], @@ -112,6 +112,8 @@ case class Parameter( isGrouped: Boolean = false ) +type TypeParameterList = Seq[TypeParameter] + case class TypeParameter( annotations: Seq[Annotation], variance: "" | "+" | "-", diff --git a/scaladoc/src/dotty/tools/scaladoc/compat.scala b/scaladoc/src/dotty/tools/scaladoc/compat.scala index fc660d97cb5d..d2095b9cc98c 100644 --- a/scaladoc/src/dotty/tools/scaladoc/compat.scala +++ b/scaladoc/src/dotty/tools/scaladoc/compat.scala @@ -3,7 +3,6 @@ package dotty.tools.scaladoc import java.util.stream.Stream // comment out - wrong error! import java.util.stream.Collectors import java.util.Collections -import java.nio.file.Path import com.vladsch.flexmark.util.ast.{Node => MdNode} import dotty.tools.scaladoc.tasty.comments.wiki.WikiDocElement import scala.jdk.CollectionConverters._ @@ -37,4 +36,4 @@ extension [V](jlist: JList[V]) extension [V](jset: JSet[V]) def ++ (other: JSet[V]): JSet[V] = - Stream.of(jset, other).flatMap(_.stream).collect(Collectors.toSet()) \ No newline at end of file + Stream.of(jset, other).flatMap(_.stream).collect(Collectors.toSet()) diff --git a/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala b/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala index 865d78193886..4201cae4e2e6 100644 --- a/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala @@ -6,13 +6,14 @@ import com.vladsch.flexmark.parser.core._ import com.vladsch.flexmark.parser.block._ import com.vladsch.flexmark.util.ast.Block import com.vladsch.flexmark.util.ast.BlockContent -import com.vladsch.flexmark.util.options.DataHolder +import com.vladsch.flexmark.util.data.DataHolder import com.vladsch.flexmark.util.sequence.BasedSequence import com.vladsch.flexmark.util.sequence.SegmentedSequence import java.{util => ju} import ju.regex.Matcher import ju.regex.Pattern +import scala.jdk.CollectionConverters._ /** Copied from FencedCodeBlockParser. */ @@ -21,8 +22,11 @@ object WikiCodeBlockParser { private val CLOSING_FENCE = Pattern.compile("^(\\}{3})(?=[ \t]*$)$") class Factory extends CustomBlockParserFactory { + override def apply(options: DataHolder): BlockParserFactory = + new WikiCodeBlockParser.BlockFactory(options) + override def getAfterDependents = - new ju.HashSet[Class[_ <: CustomBlockParserFactory]](ju.Arrays.asList( + new ju.HashSet[Class[?]](ju.Arrays.asList( classOf[BlockQuoteParser.Factory], classOf[HeadingParser.Factory], //FencedCodeBlockParser.Factory.class, @@ -33,7 +37,7 @@ object WikiCodeBlockParser { )) override def getBeforeDependents = - new ju.HashSet[Class[_ <: CustomBlockParserFactory]](ju.Arrays.asList( + new ju.HashSet[Class[?]](ju.Arrays.asList( //BlockQuoteParser.Factory.class, //HeadingParser.Factory.class, //FencedCodeBlockParser.Factory.class, @@ -44,9 +48,6 @@ object WikiCodeBlockParser { )) override def affectsGlobalScope = false - - override def create(options: DataHolder) = - new WikiCodeBlockParser.BlockFactory(options) } private[WikiCodeBlockParser] class BlockFactory (val options: DataHolder) @@ -83,7 +84,7 @@ class WikiCodeBlockParser( final private val block = new FencedCodeBlock() private var content = new BlockContent - private val codeContentBlock = options.get(Parser.FENCED_CODE_CONTENT_BLOCK) + private val codeContentBlock = Parser.FENCED_CODE_CONTENT_BLOCK.get(options) def getBlock: Block = block def getFenceIndent: Int = fenceIndent @@ -141,7 +142,7 @@ class WikiCodeBlockParser( codeBlock.setCharsFromContent block.appendChild(codeBlock) } else { - val codeBlock = new Text(SegmentedSequence.of(segments)) + val codeBlock = new Text(SegmentedSequence.create(segments.asScala.toSeq:_*)) block.appendChild(codeBlock) } } diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala index 719033959b47..93b86ce0bc51 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala @@ -2,18 +2,9 @@ package dotty.tools.scaladoc package renderers import util.HTML._ -import scala.jdk.CollectionConverters._ -import java.net.URI -import java.net.URL import dotty.tools.scaladoc.site._ -import scala.util.Try import org.jsoup.Jsoup -import java.nio.file.Paths -import java.nio.file.Path import java.nio.file.Files -import java.nio.file.FileVisitOption -import java.io.File -import dotty.tools.scaladoc.staticFileSymbolUUID class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: DocContext) extends Renderer(rootPackage, members, extension = "html"): @@ -41,7 +32,7 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do case _ => Nil) :+ (Attr("data-pathToRoot") := pathToRoot(page.link.dri)) - html(attrs: _*)( + val htmlTag = html(attrs: _*)( head((mkHead(page) :+ docHead):_*), body( if !page.hasFrame then docBody @@ -49,6 +40,10 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do ) ) + val doctypeTag = s"" + val finalTag = raw(doctypeTag + htmlTag.toString) + finalTag + override def render(): Unit = val renderedResources = renderResources() super.render() @@ -203,11 +198,11 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do } val darkProjectLogoElem = - darkProjectLogo.flatMap { + darkProjectLogo.orElse(projectLogo).flatMap { case Resource.File(path, _) => Some(span(id := "dark-project-logo", cls := "project-logo")(img(src := resolveRoot(link.dri, path)))) case _ => None - }.orElse(projectLogoElem) + } val parentsHtml = val innerTags = parents.flatMap[TagArg](b => Seq( diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Locations.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Locations.scala index deb676e812c8..689234cdd29c 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Locations.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Locations.scala @@ -1,18 +1,10 @@ package dotty.tools.scaladoc package renderers -import util.HTML._ import scala.jdk.CollectionConverters._ import java.net.URI -import java.net.URL import dotty.tools.scaladoc.site._ -import scala.util.Try -import org.jsoup.Jsoup import java.nio.file.Paths -import java.nio.file.Path -import java.nio.file.Files -import java.io.File -import scala.util.matching._ import dotty.tools.scaladoc.util.Escape._ val UnresolvedLocationLink = "#" diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/MarkdownRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/MarkdownRenderer.scala index 12d41ac86218..6f20276e907e 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/MarkdownRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/MarkdownRenderer.scala @@ -2,17 +2,6 @@ package dotty.tools.scaladoc package renderers import util.HTML._ -import scala.jdk.CollectionConverters._ -import java.net.URI -import java.net.URL -import dotty.tools.scaladoc.site._ -import scala.util.Try -import org.jsoup.Jsoup -import java.nio.file.Paths -import java.nio.file.Path -import java.nio.file.Files -import java.nio.file.FileVisitOption -import java.io.File class MarkdownRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: DocContext) extends Renderer(rootPackage, members, extension = "md"): diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala index 5d5f3e9b20d5..996b422b44fd 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala @@ -7,9 +7,6 @@ import util.HTML.{div, *} import scala.jdk.CollectionConverters.* import dotty.tools.scaladoc.translators.FilterAttributes -import dotty.tools.scaladoc.tasty.comments.markdown.DocFlexmarkRenderer -import com.vladsch.flexmark.util.ast.Node as MdNode -import dotty.tools.scaladoc.tasty.comments.wiki.WikiDocElement import org.jsoup.Jsoup import translators.* @@ -72,7 +69,7 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext def source(m: Member): Seq[AppliedTag] = summon[DocContext].sourceLinks.pathTo(m).fold(Nil){ link => - tableRow("Source", a(href := link)(m.sources.fold("(source)")(_.path.getFileName().toString()))) + tableRow("Source", a(href := link, target := "_blank")(m.sources.fold("(source)")(_.path.getFileName().toString()))) } def deprecation(m: Member): Seq[AppliedTag] = m.deprecated.fold(Nil){ a => @@ -409,12 +406,12 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext case (Some(on), members) => val typeSig = SignatureBuilder() .keyword("extension ") - .generics(on.typeParams) + .typeParamList(on.typeParams) .content val argsSig = SignatureBuilder() - .functionParameters(on.argsLists) + .functionTermParameters(on.argsLists) .content - val sig = typeSig ++ Signature(Plain(s"(${on.name}: ")) ++ on.signature ++ Signature(Plain(")")) ++ argsSig + val sig = typeSig ++ argsSig MGroup(span(cls := "groupHeader")(sig.map(renderElement(_))), members.sortBy(_.name).toSeq, on.name) -> on.position }.toSeq.sortBy(_._2).map(_._1) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala index dc2157131e0b..1a43ea8648a8 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala @@ -2,18 +2,11 @@ package dotty.tools.scaladoc package renderers import util.HTML._ -import scala.jdk.CollectionConverters._ import collection.mutable.ListBuffer -import java.net.URI -import java.net.URL import dotty.tools.scaladoc.site._ -import scala.util.Try -import org.jsoup.Jsoup import java.nio.file.Paths import java.nio.file.Path import java.nio.file.Files -import java.nio.file.FileVisitOption -import java.io.File case class Page(link: Link, content: Member | ResolvedTemplate | String, children: Seq[Page], hidden: Boolean = false): def withNewChildren(newChildren: Seq[Page]) = copy(children = children ++ newChildren) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala index bae43980a11d..b84c07b4bade 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala @@ -2,16 +2,10 @@ package dotty.tools.scaladoc package renderers import util.HTML._ -import scala.jdk.CollectionConverters._ -import java.net.URI -import java.net.URL -import dotty.tools.scaladoc.site._ -import scala.util.Try -import org.jsoup.Jsoup +import java.net.{URI, URL} import java.nio.file.Paths import java.nio.file.Path import java.nio.file.Files -import java.io.File import dotty.tools.scaladoc.translators.FilterAttributes import util._ import translators._ @@ -180,17 +174,16 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: case Kind.Extension(on, _) => val typeSig = SignatureBuilder() .keyword("extension ") - .generics(on.typeParams) + .typeParamList(on.typeParams) .content val argsSig = SignatureBuilder() - .functionParameters(on.argsLists) + .functionTermParameters(on.argsLists) .content flattenToText(typeSig ++ argsSig) case _ => "" def docPartRenderPlain(d: DocPart): String = import dotty.tools.scaladoc.tasty.comments.wiki._ - import com.vladsch.flexmark.util.ast.{Node => MdNode} def renderPlain(wd: WikiDocElement): String = wd match case Paragraph(text) => renderPlain(text) @@ -572,4 +565,4 @@ trait Resources(using ctx: DocContext) extends Locations, Writer: case Resource.URL(url) => Nil case Resource.URLToCopy(url, dest) => - Seq(copy(new URL(url).openStream(), dest)) + Seq(copy(URI(url).toURL.openStream(), dest)) diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala index ef80b4f2d327..ef7c06416e27 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala @@ -3,8 +3,7 @@ package renderers import util.HTML._ import scala.jdk.CollectionConverters._ -import java.net.URI -import java.net.URL +import java.net.{URI, URL} import dotty.tools.scaladoc.site._ import scala.util.Try import org.jsoup.Jsoup @@ -40,7 +39,7 @@ trait SiteRenderer(using DocContext) extends Locations: def processLocalLink(str: String): String = val staticSiteRootPath = content.ctx.root.toPath.toAbsolutePath - def asValidURL: Option[String] = Try(URL(str)).toOption.map(_ => str) + def asValidURL: Option[String] = Try(URI(str).toURL).toOption.map(_ => str) def asAsset: Option[String] = Option.when( Files.exists(staticSiteRootPath.resolve("_assets").resolve(str.stripPrefix("/"))) )( diff --git a/scaladoc/src/dotty/tools/scaladoc/site/BlogParser.scala b/scaladoc/src/dotty/tools/scaladoc/site/BlogParser.scala new file mode 100644 index 000000000000..68e709a339b2 --- /dev/null +++ b/scaladoc/src/dotty/tools/scaladoc/site/BlogParser.scala @@ -0,0 +1,26 @@ +package dotty.tools.scaladoc.site + +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory +import com.fasterxml.jackson.databind.DeserializationFeature +import java.io.File +import scala.beans.{BooleanBeanProperty, BeanProperty} +import scala.util.Try + +case class BlogConfig( + @BeanProperty input: String, + @BeanProperty output: String, + @BooleanBeanProperty hidden: Boolean +): + def this() = this(null, null, false) + +object BlogParser: + def readYml(content: File | String): BlogConfig = + val mapper = ObjectMapper(YAMLFactory()) + .findAndRegisterModules() + + content match + case f: File => + val ymlFile = f.toPath.resolve("blog.yml").toFile + if ymlFile.exists then mapper.readValue(ymlFile, classOf[BlogConfig]) else new BlogConfig + case s: String => Try(mapper.readValue(s, classOf[BlogConfig])).getOrElse(new BlogConfig) diff --git a/scaladoc/src/dotty/tools/scaladoc/site/FlexmarkSectionWrapper.scala b/scaladoc/src/dotty/tools/scaladoc/site/FlexmarkSectionWrapper.scala index 12e93505ab59..ec0bd241602a 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/FlexmarkSectionWrapper.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/FlexmarkSectionWrapper.scala @@ -4,7 +4,6 @@ package site import com.vladsch.flexmark.util.{ast => mdu, sequence} import com.vladsch.flexmark.{ast => mda} import com.vladsch.flexmark.formatter.Formatter -import com.vladsch.flexmark.util.options.MutableDataSet import scala.jdk.CollectionConverters._ import dotty.tools.scaladoc.tasty.comments.markdown.Section diff --git a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala index de3f511c8e67..7a90a462cba0 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala @@ -23,6 +23,10 @@ class StaticSiteContext( val docsPath = root.toPath.resolve("_docs") val blogPath = root.toPath.resolve("_blog") + def resolveNewBlogPath(stringPath: String): Path = + if stringPath.nonEmpty then root.toPath.resolve(stringPath) + else blogPath + def relativize(path: Path): Path = if args.apiSubdirectory then docsPath.relativize(path) diff --git a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala index c9ace108c9b2..489720cc5936 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteLoader.scala @@ -5,6 +5,7 @@ import java.io.File import java.nio.file.Files import java.nio.file.{ Paths, Path } import scala.io._ +import dotty.tools.scaladoc.site.BlogParser class StaticSiteLoader(val root: File, val args: Scaladoc.Args)(using StaticSiteContext, CompilerContext): val ctx: StaticSiteContext = summon[StaticSiteContext] @@ -114,10 +115,12 @@ class StaticSiteLoader(val root: File, val args: Scaladoc.Args)(using StaticSite } def loadBlog(): Option[LoadedTemplate] = { + val blogConfig = BlogParser.readYml(root) + val rootPath = Option(blogConfig.input).map(input => ctx.resolveNewBlogPath(input)).getOrElse(ctx.blogPath) + val defaultDirectory = Option(blogConfig.output).getOrElse("blog") + type Date = (String, String, String) - val rootPath = ctx.blogPath - val defaultDirectory = "blog" - if (!Files.exists(rootPath)) None + if (!Files.exists(rootPath) || blogConfig.hidden) None else { val indexPageOpt = Seq( rootPath.resolve("index.md"), diff --git a/scaladoc/src/dotty/tools/scaladoc/site/common.scala b/scaladoc/src/dotty/tools/scaladoc/site/common.scala index 6c4852961fec..0811d217537f 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/common.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/common.scala @@ -12,12 +12,13 @@ import com.vladsch.flexmark.ext.gfm.tasklist.TaskListExtension import com.vladsch.flexmark.ext.tables.TablesExtension import com.vladsch.flexmark.ext.yaml.front.matter.{AbstractYamlFrontMatterVisitor, YamlFrontMatterExtension} import com.vladsch.flexmark.parser.{Parser, ParserEmulationProfile} -import com.vladsch.flexmark.util.options.{DataHolder, MutableDataSet} import com.vladsch.flexmark.ext.wikilink.WikiLinkExtension import com.vladsch.flexmark.formatter.Formatter import com.vladsch.flexmark.html.HtmlRenderer import scala.jdk.CollectionConverters._ +import com.vladsch.flexmark.util.data.DataHolder +import com.vladsch.flexmark.util.data.MutableDataSet val docsRootDRI: DRI = DRI(location = "_docs/index", symbolUUID = staticFileSymbolUUID) val apiPageDRI: DRI = DRI(location = "api/index") @@ -62,11 +63,16 @@ def yamlParser(using ctx: StaticSiteContext): Parser = Parser.builder(defaultMar def loadTemplateFile(file: File, defaultTitle: Option[TemplateName] = None)(using ctx: StaticSiteContext): TemplateFile = { val lines = Files.readAllLines(file.toPath).asScala.toList - val (config, content) = if (lines.head == ConfigSeparator) { + val (config, content) = if (!lines.isEmpty && lines.head == ConfigSeparator) { // Taking the second occurrence of ConfigSeparator. // The rest may appear within the content. - val index = lines.drop(1).indexOf(ConfigSeparator) + 2 - (lines.take(index), lines.drop(index)) + val secondSeparatorIndex = lines.drop(1).indexOf(ConfigSeparator) + if secondSeparatorIndex != -1 then + (lines.take(secondSeparatorIndex + 2), lines.drop(secondSeparatorIndex + 2)) + else + // If there is no second occurrence of ConfigSeparator, we assume that the + // whole file is config. + (lines.tail, Nil) } else (Nil, lines) val configParsed = yamlParser.parse(config.mkString(LineSeparator)) diff --git a/scaladoc/src/dotty/tools/scaladoc/site/templates.scala b/scaladoc/src/dotty/tools/scaladoc/site/templates.scala index fe51bbe0614d..92e0096e5af1 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/templates.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/templates.scala @@ -11,7 +11,6 @@ import com.vladsch.flexmark.ext.gfm.tasklist.TaskListExtension import com.vladsch.flexmark.ext.tables.TablesExtension import com.vladsch.flexmark.ext.yaml.front.matter.{AbstractYamlFrontMatterVisitor, YamlFrontMatterExtension} import com.vladsch.flexmark.parser.{Parser, ParserEmulationProfile} -import com.vladsch.flexmark.util.options.{DataHolder, MutableDataSet} import com.vladsch.flexmark.html.HtmlRenderer import com.vladsch.flexmark.formatter.Formatter import liqp.Template diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala index 69e7c7764985..33f0e089053a 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala @@ -4,7 +4,6 @@ package snippets import com.vladsch.flexmark.util.{ast => mdu, sequence} import com.vladsch.flexmark.{ast => mda} import com.vladsch.flexmark.formatter.Formatter -import com.vladsch.flexmark.util.options.MutableDataSet import scala.jdk.CollectionConverters._ import dotty.tools.scaladoc.tasty.comments.markdown.ExtendedFencedCodeBlock diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 38cc90330265..2c7017f76636 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -12,6 +12,9 @@ import NameNormalizer._ import SyntheticsSupport._ import dotty.tools.dotc.core.NameKinds +// Please use this only for things defined in the api.scala file +import dotty.tools.{scaladoc => api} + trait ClassLikeSupport: self: TastyParser => import qctx.reflect._ @@ -45,7 +48,7 @@ trait ClassLikeSupport: .filter(s => s.exists && !s.isHiddenByVisibility) .map( _.tree.asInstanceOf[DefDef]) constr.fold(Nil)( - _.termParamss.map(pList => ParametersList(pList.params.map(p => mkParameter(p, parameterModifier)), paramListModifier(pList.params))) + _.termParamss.map(pList => api.TermParameterList(pList.params.map(p => mkParameter(p, parameterModifier)), paramListModifier(pList.params))) ) if classDef.symbol.flags.is(Flags.Module) then Kind.Object @@ -61,14 +64,18 @@ trait ClassLikeSupport: signatureOnly: Boolean = false, modifiers: Seq[Modifier] = classDef.symbol.getExtraModifiers(), ): Member = - def unpackTreeToClassDef(tree: Tree): ClassDef = tree match - case tree: ClassDef => tree - case TypeDef(_, tbt: TypeBoundsTree) => unpackTreeToClassDef(tbt.tpe.typeSymbol.tree) - case TypeDef(_, tt: TypeTree) => unpackTreeToClassDef(tt.tpe.typeSymbol.tree) - case c: Apply => - c.symbol.owner.tree.symbol.tree match + def unpackTreeToClassDef(tree: Tree): ClassDef = + def unpackApply(a: Apply) = + a.symbol.owner.tree match case tree: ClassDef => tree - case tt: TypeTree => unpackTreeToClassDef(tt.tpe.typeSymbol.tree) + + tree match + case tree: ClassDef => tree + case TypeDef(_, tbt: TypeBoundsTree) => unpackTreeToClassDef(tbt.tpe.typeSymbol.tree) + case TypeDef(_, tt: TypeTree) => unpackTreeToClassDef(tt.tpe.typeSymbol.tree) + case c: Apply => unpackApply(c) + case Block(_, c: Apply) => unpackApply(c) + case tt: TypeTree => unpackTreeToClassDef(tt.tpe.typeSymbol.tree) def signatureWithName(s: dotty.tools.scaladoc.Signature): dotty.tools.scaladoc.Signature = s match @@ -142,11 +149,12 @@ trait ClassLikeSupport: dd.symbol.extendedSymbol.map { extSym => val memberInfo = unwrapMemberInfo(c, dd.symbol) val typeParams = dd.symbol.extendedTypeParams.map(mkTypeArgument(_, memberInfo.genericTypes)) - val termParams = dd.symbol.extendedTermParamLists.zipWithIndex.flatMap { case (paramList, index) => - memberInfo.paramLists(index) match - case EvidenceOnlyParameterList => Nil - case info: RegularParameterList => - Seq(ParametersList(paramList.params.map(mkParameter(_, memberInfo = info)), paramListModifier(paramList.params))) + val termParams = dd.symbol.extendedTermParamLists.zipWithIndex.flatMap { case (termParamList, index) => + memberInfo.termParamLists(index) match + case MemberInfo.EvidenceOnlyParameterList => None + case MemberInfo.RegularParameterList(info) => + Some(api.TermParameterList(termParamList.params.map(mkParameter(_, memberInfo = info)), paramListModifier(termParamList.params))) + case _ => assert(false, "memberInfo.termParamLists contains a type parameter list !") } val target = ExtensionTarget( extSym.symbol.normalizedName, @@ -335,44 +343,67 @@ trait ClassLikeSupport: def parseMethod( c: ClassDef, methodSymbol: Symbol, - emptyParamsList: Boolean = false, paramPrefix: Symbol => String = _ => "", specificKind: (Kind.Def => Kind) = identity ): Member = val method = methodSymbol.tree.asInstanceOf[DefDef] - val paramLists: List[TermParamClause] = methodSymbol.nonExtensionTermParamLists - val genericTypes: List[TypeDef] = if (methodSymbol.isClassConstructor) Nil else methodSymbol.nonExtensionLeadingTypeParams + val paramLists = methodSymbol.nonExtensionParamLists val memberInfo = unwrapMemberInfo(c, methodSymbol) - val basicKind: Kind.Def = Kind.Def( - genericTypes.map(mkTypeArgument(_, memberInfo.genericTypes, memberInfo.contextBounds)), - paramLists.zipWithIndex.flatMap { (pList, index) => - memberInfo.paramLists(index) match - case EvidenceOnlyParameterList => Nil - case info: RegularParameterList => - Seq(ParametersList(pList.params.map( + val unshuffledMemberInfoParamLists = + if methodSymbol.isExtensionMethod && methodSymbol.isRightAssoc then + // Taken from RefinedPrinter.scala + // If you change the names of the clauses below, also change them in right-associative-extension-methods.md + val (leftTyParams, rest1) = memberInfo.paramLists.span(_.isType) + val (leadingUsing, rest2) = rest1.span(_.isUsing) + val (rightTyParams, rest3) = rest2.span(_.isType) + val (rightParam, rest4) = rest3.splitAt(1) + val (leftParam, rest5) = rest4.splitAt(1) + val (trailingUsing, rest6) = rest5.span(_.isUsing) + if leftParam.nonEmpty then + // leftTyParams ::: leadingUsing ::: leftParam ::: trailingUsing ::: rightTyParams ::: rightParam ::: rest6 + // because of takeRight after, this is equivalent to the following: + rightTyParams ::: rightParam ::: rest6 + else + memberInfo.paramLists // it wasn't a binary operator, after all. + else + memberInfo.paramLists + + val croppedUnshuffledMemberInfoParamLists = unshuffledMemberInfoParamLists.takeRight(paramLists.length) + + val basicDefKind: Kind.Def = Kind.Def( + paramLists.zip(croppedUnshuffledMemberInfoParamLists).flatMap{ + case (_: TermParamClause, MemberInfo.EvidenceOnlyParameterList) => Nil + case (pList: TermParamClause, MemberInfo.RegularParameterList(info)) => + Some(Left(api.TermParameterList(pList.params.map( mkParameter(_, paramPrefix, memberInfo = info)), paramListModifier(pList.params) - )) + ))) + case (TypeParamClause(genericTypeList), MemberInfo.TypeParameterList(memInfoTypes)) => + Some(Right(genericTypeList.map(mkTypeArgument(_, memInfoTypes, memberInfo.contextBounds)))) + case (_,_) => + assert(false, s"croppedUnshuffledMemberInfoParamLists and SymOps.nonExtensionParamLists disagree on whether this clause is a type or term one") } ) val methodKind = - if methodSymbol.isClassConstructor then Kind.Constructor(basicKind) - else if methodSymbol.flags.is(Flags.Implicit) then extractImplicitConversion(method.returnTpt.tpe) match - case Some(conversion) if paramLists.size == 0 || (paramLists.size == 1 && paramLists.head.params.size == 0) => - Kind.Implicit(basicKind, Some(conversion)) - case None if paramLists.size == 1 && paramLists(0).params.size == 1 => - Kind.Implicit(basicKind, Some( - ImplicitConversion( - paramLists(0).params(0).tpt.tpe.typeSymbol.dri, - method.returnTpt.tpe.typeSymbol.dri - ) - )) - case _ => - Kind.Implicit(basicKind, None) - else if methodSymbol.flags.is(Flags.Given) then Kind.Given(basicKind, Some(method.returnTpt.tpe.asSignature), extractImplicitConversion(method.returnTpt.tpe)) - else specificKind(basicKind) + if methodSymbol.isClassConstructor then Kind.Constructor(basicDefKind) + else if methodSymbol.flags.is(Flags.Implicit) then + val termParamLists: List[TermParamClause] = methodSymbol.nonExtensionTermParamLists + extractImplicitConversion(method.returnTpt.tpe) match + case Some(conversion) if termParamLists.size == 0 || (termParamLists.size == 1 && termParamLists.head.params.size == 0) => + Kind.Implicit(basicDefKind, Some(conversion)) + case None if termParamLists.size == 1 && termParamLists(0).params.size == 1 => + Kind.Implicit(basicDefKind, Some( + ImplicitConversion( + termParamLists(0).params(0).tpt.tpe.typeSymbol.dri, + method.returnTpt.tpe.typeSymbol.dri + ) + )) + case _ => + Kind.Implicit(basicDefKind, None) + else if methodSymbol.flags.is(Flags.Given) then Kind.Given(basicDefKind, Some(method.returnTpt.tpe.asSignature), extractImplicitConversion(method.returnTpt.tpe)) + else specificKind(basicDefKind) val origin = if !methodSymbol.isOverridden then Origin.RegularlyDefined else val overriddenSyms = methodSymbol.allOverriddenSymbols.map(_.owner) @@ -404,7 +435,7 @@ trait ClassLikeSupport: val inlinePrefix = if argument.symbol.flags.is(Flags.Inline) then "inline " else "" val nameIfNotSynthetic = Option.when(!argument.symbol.flags.is(Flags.Synthetic))(argument.symbol.normalizedName) val name = argument.symbol.normalizedName - Parameter( + api.TermParameter( argument.symbol.getAnnotations(), inlinePrefix + prefix(argument.symbol), nameIfNotSynthetic, @@ -498,7 +529,7 @@ trait ClassLikeSupport: experimental: Option[Annotation] = None ) = Member( name = symbol.normalizedName, - fullName = symbol.fullName, + fullName = symbol.normalizedFullName, dri = symbol.dri, kind = kind, visibility = symbol.getVisibility(), @@ -514,16 +545,26 @@ trait ClassLikeSupport: experimental = experimental ) - object EvidenceOnlyParameterList - type RegularParameterList = Map[String, TypeRepr] - type ParameterList = RegularParameterList | EvidenceOnlyParameterList.type case class MemberInfo( - genericTypes: Map[String, TypeBounds], - paramLists: List[ParameterList], + paramLists: List[MemberInfo.ParameterList], res: TypeRepr, contextBounds: Map[String, DSignature] = Map.empty, - ) + ){ + val genericTypes: Map[String, TypeBounds] = paramLists.collect{ case MemberInfo.TypeParameterList(types) => types }.headOption.getOrElse(Map()) + + val termParamLists: List[MemberInfo.ParameterList] = paramLists.filter(_.isTerm) + } + + object MemberInfo: + enum ParameterList(val isTerm: Boolean, val isUsing: Boolean): + inline def isType = !isTerm + case EvidenceOnlyParameterList extends ParameterList(isTerm = true, isUsing = false) + case RegularParameterList(m: Map[String, TypeRepr])(isUsing: Boolean) extends ParameterList(isTerm = true, isUsing) + case TypeParameterList(m: Map[String, TypeBounds]) extends ParameterList(isTerm = false, isUsing = false) + + export ParameterList.{RegularParameterList, EvidenceOnlyParameterList, TypeParameterList} + def unwrapMemberInfo(c: ClassDef, symbol: Symbol): MemberInfo = @@ -541,10 +582,12 @@ trait ClassLikeSupport: symbol.paramSymss.flatten.find(_.name == name).exists(_.flags.is(Flags.Implicit)) def handlePolyType(memberInfo: MemberInfo, polyType: PolyType): MemberInfo = - MemberInfo(polyType.paramNames.zip(polyType.paramBounds).toMap, memberInfo.paramLists, polyType.resType) + val typeParamList = MemberInfo.TypeParameterList(polyType.paramNames.zip(polyType.paramBounds).toMap) + MemberInfo(memberInfo.paramLists :+ typeParamList, polyType.resType) def handleMethodType(memberInfo: MemberInfo, methodType: MethodType): MemberInfo = val rawParams = methodType.paramNames.zip(methodType.paramTypes).toMap + val isUsing = methodType.isImplicit val (evidences, notEvidences) = rawParams.partition(e => isSyntheticEvidence(e._1)) def findParamRefs(t: TypeRepr): Seq[ParamRef] = t match @@ -573,14 +616,15 @@ trait ClassLikeSupport: val newParams = notEvidences ++ paramsThatLookLikeContextBounds - val newLists: List[ParameterList] = if newParams.isEmpty && contextBounds.nonEmpty - then memberInfo.paramLists ++ Seq(EvidenceOnlyParameterList) - else memberInfo.paramLists ++ Seq(newParams) + val termParamList = if newParams.isEmpty && contextBounds.nonEmpty + then MemberInfo.EvidenceOnlyParameterList + else MemberInfo.RegularParameterList(newParams)(isUsing) + - MemberInfo(memberInfo.genericTypes, newLists , methodType.resType, contextBounds.toMap) + MemberInfo(memberInfo.paramLists :+ termParamList, methodType.resType, contextBounds.toMap) def handleByNameType(memberInfo: MemberInfo, byNameType: ByNameType): MemberInfo = - MemberInfo(memberInfo.genericTypes, memberInfo.paramLists, byNameType.underlying) + MemberInfo(memberInfo.paramLists, byNameType.underlying) def recursivelyCalculateMemberInfo(memberInfo: MemberInfo): MemberInfo = memberInfo.res match case p: PolyType => recursivelyCalculateMemberInfo(handlePolyType(memberInfo, p)) @@ -588,7 +632,7 @@ trait ClassLikeSupport: case b: ByNameType => handleByNameType(memberInfo, b) case _ => memberInfo - recursivelyCalculateMemberInfo(MemberInfo(Map.empty, List.empty, baseTypeRepr)) + recursivelyCalculateMemberInfo(MemberInfo(List.empty, baseTypeRepr)) private def paramListModifier(parameters: Seq[ValDef]): String = if parameters.size > 0 then diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala index 0cdb3535c3ff..8a703cfb5d24 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala @@ -317,6 +317,8 @@ trait InkuireSupport(using DocContext) extends Resources: inner(tpe, vars) case tl @ TypeLambda(paramNames, _, resType) => Inkuire.TypeLambda(paramNames.map(Inkuire.TypeLambda.argument), inner(resType, vars)) //TODO [Inkuire] Type bounds + case pt @ PolyType(paramNames, _, resType) => + Inkuire.TypeLambda(paramNames.map(Inkuire.TypeLambda.argument), inner(resType, vars)) //TODO [Inkuire] Type bounds case r: Refinement => inner(r.info, vars) //TODO [Inkuire] Refinements case t @ AppliedType(tpe, typeList) => @@ -357,10 +359,8 @@ trait InkuireSupport(using DocContext) extends Resources: Inkuire.Type.unresolved //TODO [Inkuire] <- should be handled by Singleton case, but didn't work case MatchType(bond, sc, cases) => inner(sc, vars) - case ParamRef(TypeLambda(names, _, _), i) => - Inkuire.TypeLambda.argument(names(i)) - case ParamRef(m: MethodType, i) => - inner(m.paramTypes(i), vars) + case ParamRef(binder: LambdaType, i) => + Inkuire.TypeLambda.argument(binder.paramNames(i)) case RecursiveType(tp) => inner(tp, vars) case m@MethodType(_, typeList, resType) => diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/NameNormalizer.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/NameNormalizer.scala index 687ad6ecbf44..196c3e056b36 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/NameNormalizer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/NameNormalizer.scala @@ -17,6 +17,18 @@ object NameNormalizer { val escaped = escapedName(constructorNormalizedName) escaped } + + def ownerNameChain: List[String] = { + import reflect.* + if s.isNoSymbol then List.empty + else if s == defn.EmptyPackageClass then List.empty + else if s == defn.RootPackage then List.empty + else if s == defn.RootClass then List.empty + else s.owner.ownerNameChain :+ s.normalizedName + } + + def normalizedFullName: String = + s.ownerNameChain.mkString(".") private val ignoredKeywords: Set[String] = Set("this") diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/Scaladoc2AnchorCreator.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/Scaladoc2AnchorCreator.scala index ba59f77495b1..3c34a1c9bba9 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/Scaladoc2AnchorCreator.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/Scaladoc2AnchorCreator.scala @@ -7,10 +7,66 @@ import scala.util.matching.Regex object Scaladoc2AnchorCreator: - def getScaladoc2Type(using Quotes)(t: reflect.Tree) = - import reflect.* - val regex = t match - case d: DefDef => "def" - case t: TypeDef => "type" - case v: ValDef => "val|var" - t.show(using Printer.TreeShortCode).split(regex, 2)(1).replace(" ","") + def getScaladoc2Type(using Quotes)(sym: quotes.reflect.Symbol) = signatureAnchor(sym) + + /** Creates the signature anchor + * + * - `X` for a `type X ...` + * - `x:X` for a `val x: X` + * - `f[U1,...](x1:T1,...)(impliciti1:U1,impliciti2:U2,...)...:R` for a `def f[U1, ...](x1: T1, ...)(implicit i1: U1, i2: U2...)...: R` + * + * Types are printed without their paths. No spaces are printed in the output. + */ + private def signatureAnchor(using Quotes)(sym: quotes.reflect.Symbol): String = + import quotes.reflect.* + def signatureType(tp: quotes.reflect.TypeRepr): String = + tp match + case mt @ MethodType(paramNames, paramTypes, res) => + val implicitPrefix = if mt.isImplicit then "implicit" else "" + val closeClause = res match + case _: MethodOrPoly => ")" + case _ => "):" + paramNames.zip(paramTypes.map(signatureType)) + .map((name, tpe) => s"$implicitPrefix$name:$tpe") + .mkString("(", ",", closeClause) + signatureType(res) + case PolyType(paramNames, paramBounds, res) => + val closeClause = res match + case _: MethodOrPoly => "]" + case _ => "]:" + paramNames.zip(paramBounds.map(signatureType)) + .map((name, tpe) => s"$name$tpe") + .mkString("[", ",", closeClause) + signatureType(res) + case TypeLambda(paramNames, paramBounds, res) => + paramNames.zip(paramBounds.map(signatureType)) + .map((name, tpe) => s"$name$tpe") + .mkString("[", ",", "]") + "=>" + signatureType(res) + case ByNameType(tp) => + ":" + signatureType(tp) + case TypeBounds(low, hi) => + val lowBound = if low =:= defn.NothingClass.typeRef then "" else ">:" + signatureType(low) + val hiBound = if low =:= defn.AnyClass.typeRef then "" else "<:" + signatureType(hi) + lowBound + hiBound + case tp: ParamRef => + tp.binder match + case binder: MethodType => binder.paramNames(tp.paramNum) + ".type" + case binder: PolyType => binder.paramNames(tp.paramNum) + case binder: LambdaType => binder.paramNames(tp.paramNum) + case AppliedType(tycon, args) => + args.map { + case tp: TypeBounds => "_" + signatureType(tp) + case tp => signatureType(tp) + }.mkString(signatureType(tycon) + "[", ",", "]") + case tp: AnnotatedType => + signatureType(tp.underlying) + "@" + tp.annotation.symbol.owner.name + case tp: ThisType => + signatureType(tp.tref) + ".this" + case tp: TypeRef => + tp.name + case tp => + // TODO handle other cases without using show (show does not have a stable representation) + tp.show(using Printer.TypeReprShortCode).replace(" ","") + + sym match + case sym if sym.isType => sym.name + case sym if sym.flags.is(Flags.Method) => sym.name + signatureType(sym.info) + case sym => sym.name + ":" + signatureType(sym.info) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/SymOps.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/SymOps.scala index b4a1fc197d9a..5bc1b98a7fff 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/SymOps.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/SymOps.scala @@ -143,7 +143,9 @@ object SymOps: import reflect._ sym.flags.is(Flags.Artifact) - def isLeftAssoc: Boolean = !sym.name.endsWith(":") + def isRightAssoc: Boolean = sym.name.endsWith(":") + + def isLeftAssoc: Boolean = !sym.isRightAssoc def extendedSymbol: Option[reflect.ValDef] = import reflect.* @@ -172,41 +174,36 @@ object SymOps: ) case _ => Nil -> Nil + def extendedParamLists: List[reflect.ParamClause] = sym.splitExtensionParamList._1 + + def extendedTypeParamLists: List[reflect.TypeParamClause] = + sym.extendedParamLists.collect { + case typeClause: reflect.TypeParamClause => typeClause + } + def extendedTypeParams: List[reflect.TypeDef] = - import reflect.* - sym.tree match - case tree: DefDef => - tree.leadingTypeParams - case _ => Nil + sym.extendedTypeParamLists.headOption.map(_.params).getOrElse(List()) def extendedTermParamLists: List[reflect.TermParamClause] = - import reflect.* - sym.splitExtensionParamList._1.collect { - case tpc: TermParamClause => tpc + sym.extendedParamLists.collect { + case tpc: reflect.TermParamClause => tpc } - def nonExtensionTermParamLists: List[reflect.TermParamClause] = - import reflect.* - if sym.nonExtensionLeadingTypeParams.nonEmpty then - sym.nonExtensionParamLists.dropWhile { - case _: TypeParamClause => false - case _ => true - }.drop(1).collect { - case tpc: TermParamClause => tpc - } - else - sym.nonExtensionParamLists.collect { - case tpc: TermParamClause => tpc - } - def nonExtensionParamLists: List[reflect.ParamClause] = sym.splitExtensionParamList._2 + def nonExtensionTermParamLists: List[reflect.TermParamClause] = + sym.nonExtensionParamLists.collect { + case tpc: reflect.TermParamClause => tpc + } + + def nonExtensionTypeParamLists: List[reflect.TypeParamClause] = + sym.nonExtensionParamLists.collect { + case typeClause: reflect.TypeParamClause => typeClause + } + def nonExtensionLeadingTypeParams: List[reflect.TypeDef] = - import reflect.* - sym.nonExtensionParamLists.collectFirst { - case TypeParamClause(params) => params - }.toList.flatten + sym.nonExtensionTypeParamLists.headOption.map(_.params).getOrElse(List()) end extension @@ -238,7 +235,7 @@ class SymOpsWithLinkCache: def constructPathForScaladoc2: String = val l = escapeUrl(location.mkString("/")) val scaladoc2Anchor = if anchor.isDefined then { - "#" + getScaladoc2Type(sym.tree) + "#" + getScaladoc2Type(sym) } else "" docURL + l + extension + scaladoc2Anchor diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala index 0cf2669407c8..c94eda9409b2 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala @@ -290,10 +290,10 @@ trait TypesSupport: } inner(sc) ++ keyword(" match ").l ++ plain("{\n").l ++ casesTexts ++ plain(spaces + "}").l - case ParamRef(TypeLambda(names, _, _), i) => tpe(names.apply(i)).l - case ParamRef(m: MethodType, i) => tpe(m.paramNames(i)).l ++ plain(".type").l + case ParamRef(binder: LambdaType, i) => tpe(binder.paramNames(i)).l + case RecursiveType(tp) => inner(tp) case MatchCase(pattern, rhs) => diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala index 66844f5049d3..ff4405d3ec71 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala @@ -7,7 +7,6 @@ import scala.util.Try import com.vladsch.flexmark.util.{ast => mdu, sequence} import com.vladsch.flexmark.{ast => mda} import com.vladsch.flexmark.formatter.Formatter -import com.vladsch.flexmark.util.options.MutableDataSet import com.vladsch.flexmark.util.sequence.BasedSequence import scala.quoted._ diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala index f5dd0ea88528..edf9051c0ed7 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala @@ -9,7 +9,6 @@ import com.vladsch.flexmark.formatter.Formatter import com.vladsch.flexmark.parser.Parser import com.vladsch.flexmark.util.sequence.CharSubSequence import com.vladsch.flexmark.parser.ParserEmulationProfile -import com.vladsch.flexmark.ext.gfm.tables.TablesExtension import com.vladsch.flexmark.ext.gfm.strikethrough.StrikethroughExtension import com.vladsch.flexmark.ext.gfm.tasklist.TaskListExtension import com.vladsch.flexmark.ext.emoji.EmojiExtension @@ -17,10 +16,12 @@ import com.vladsch.flexmark.ext.autolink.AutolinkExtension import com.vladsch.flexmark.ext.anchorlink.AnchorLinkExtension import com.vladsch.flexmark.ext.yaml.front.matter.YamlFrontMatterExtension import com.vladsch.flexmark.ext.wikilink.WikiLinkExtension -import com.vladsch.flexmark.util.options.{ DataHolder, MutableDataSet } -import com.vladsch.flexmark.util.builder.Extension import scala.jdk.CollectionConverters._ +import com.vladsch.flexmark.util.misc.Extension +import com.vladsch.flexmark.ext.tables.TablesExtension +import com.vladsch.flexmark.util.data.MutableDataSet +import com.vladsch.flexmark.util.data.DataHolder object MarkdownParser { diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala index ad5533d634ad..d797eaed7fbf 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala @@ -15,6 +15,9 @@ import com.vladsch.flexmark._ import dotty.tools.scaladoc.snippets._ import scala.jdk.CollectionConverters._ +import com.vladsch.flexmark.util.data.MutableDataHolder +import com.vladsch.flexmark.util.data.DataHolder +import com.vladsch.flexmark.html.renderer.NodeRenderingHandler.CustomNodeRenderer class DocLinkNode( val target: DocLink, @@ -40,7 +43,7 @@ class DocFlexmarkParser(resolveLink: String => DocLink) extends Parser.ParserExt class Factory extends LinkRefProcessorFactory: override def getBracketNestingLevel(options: DataHolder) = 1 override def getWantExclamationPrefix(options: DataHolder) = false - override def create(doc: Document): LinkRefProcessor = + override def apply(doc: Document): LinkRefProcessor = new WikiLinkLinkRefProcessor(doc): override def createNode(nodeChars: BasedSequence): Node = val chars = nodeChars.toString.substring(2, nodeChars.length - 2) @@ -75,7 +78,7 @@ case class DocFlexmarkRenderer(renderLink: (DocLink, String) => String) ) object Factory extends NodeRendererFactory: - override def create(options: DataHolder): NodeRenderer = Render + override def apply(options: DataHolder): NodeRenderer = Render def extend(htmlRendererBuilder: HtmlRenderer.Builder, tpe: String): Unit = htmlRendererBuilder.nodeRendererFactory(Factory) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala index 1fa1a604c85a..421c7eaab76f 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SectionRenderingExtension.scala @@ -3,17 +3,23 @@ package tasty.comments.markdown import com.vladsch.flexmark.html.* import com.vladsch.flexmark.html.renderer.* +import com.vladsch.flexmark.html.renderer.NodeRenderingHandler.CustomNodeRenderer import com.vladsch.flexmark.parser.* import com.vladsch.flexmark.ext.wikilink.* import com.vladsch.flexmark.ext.wikilink.internal.WikiLinkLinkRefProcessor import com.vladsch.flexmark.util.ast.* import com.vladsch.flexmark.util.options.* import com.vladsch.flexmark.util.sequence.BasedSequence -import com.vladsch.flexmark.util.html.{AttributeImpl, Attributes} import com.vladsch.flexmark.* import com.vladsch.flexmark.ast.FencedCodeBlock import scala.collection.mutable +import com.vladsch.flexmark.util.data.MutableDataHolder +import com.vladsch.flexmark.util.html.Attributes +import com.vladsch.flexmark.util.html.AttributeImpl +import com.vladsch.flexmark.util.data.DataHolder +import com.vladsch.flexmark.util.html.Attribute +import com.vladsch.flexmark.util.html.MutableAttributes object SectionRenderingExtension extends HtmlRenderer.HtmlRendererExtension: @@ -30,18 +36,18 @@ object SectionRenderingExtension extends HtmlRenderer.HtmlRendererExtension: repeatedIds.update((c, header.getText), repeatedIds((c, header.getText)) + 1) val id = idGenerator.getId(header.getText.append(ifSuffixStr)) val anchor = AnchorLink(s"#$id") - val attributes = Attributes() val headerClass: String = header.getLevel match case 1 => "h500" case 2 => "h500" case 3 => "h400" case 4 => "h300" case _ => "h50" - attributes.addValue(AttributeImpl.of("class", headerClass)) + val attributes = MutableAttributes() + attributes.addValue("class", headerClass) val embeddedAttributes = EmbeddedAttributeProvider.EmbeddedNodeAttributes(header, attributes) header.prependChild(embeddedAttributes) header.prependChild(anchor) - html.attr(AttributeImpl.of("id", id)).withAttr.tag("section", false, false, () => { + html.attr("id", id).withAttr.tag("section", false, false, () => { c.render(header) body.foreach(c.render) }) @@ -59,7 +65,8 @@ object SectionRenderingExtension extends HtmlRenderer.HtmlRendererExtension: ) object Factory extends NodeRendererFactory: - override def create(options: DataHolder): NodeRenderer = Render + override def apply(options: DataHolder): NodeRenderer = Render + def extend(htmlRendererBuilder: HtmlRenderer.Builder, tpe: String): Unit = htmlRendererBuilder.nodeRendererFactory(Factory) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala index e70b0883a31e..e980c5fc44ef 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala @@ -13,6 +13,9 @@ import com.vladsch.flexmark.util.options._ import com.vladsch.flexmark.util.sequence.BasedSequence import com.vladsch.flexmark._ import com.vladsch.flexmark.ast.FencedCodeBlock +import com.vladsch.flexmark.util.data.MutableDataHolder +import com.vladsch.flexmark.html.renderer.NodeRenderingHandler.CustomNodeRenderer +import com.vladsch.flexmark.util.data.DataHolder /** * SnippetRenderingExtension is responsible for running an analysis for scala codeblocks in the static documentation/scaladoc comments. @@ -39,7 +42,7 @@ object SnippetRenderingExtension extends HtmlRenderer.HtmlRendererExtension: ) object Factory extends NodeRendererFactory: - override def create(options: DataHolder): NodeRenderer = Render + override def apply(options: DataHolder): NodeRenderer = Render def extend(htmlRendererBuilder: HtmlRenderer.Builder, tpe: String): Unit = htmlRendererBuilder.nodeRendererFactory(Factory) diff --git a/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala b/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala index 44eba3a39807..8ed7436bb11d 100644 --- a/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala @@ -29,7 +29,7 @@ class ImplicitMembersExtensionTransformer(using DocContext) extends(Module => Mo case m @ Member(_, _, _, Kind.Extension(ExtensionTarget(_, _, _, _, MyDri, _), _), Origin.RegularlyDefined) => val kind = m.kind match case Kind.Extension(_, d) => d - case _ => Kind.Def(Nil, Nil) + case _ => Kind.Def(Nil) Seq(m.withOrigin(Origin.ExtensionFrom(source.name, source.dri)).withKind(kind)) case m @ Member(_, _, _, conversionProvider: ImplicitConversionProvider, Origin.RegularlyDefined) => diff --git a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala index 88561282afb0..fd8dfc4f5b6c 100644 --- a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala +++ b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala @@ -58,8 +58,8 @@ class ScalaSignatureProvider: builder.kind(showKind), builder.name(member.name, member.dri), builder - .generics(kind.typeParams) - .functionParameters(kind.argsLists) + .typeParamList(kind.typeParams) + .functionTermParameters(kind.argsLists) .parentsSignature(member) ) @@ -106,8 +106,7 @@ class ScalaSignatureProvider: builder.kind(showKind), builder.name(method.name, method.dri), builder - .generics(kind.typeParams) - .functionParameters(kind.argsLists) + .functionParameters(kind.paramLists) .pipe { builder => instance.fold(builder)(i => builder.plain(": ").signature(i)) } @@ -151,7 +150,7 @@ class ScalaSignatureProvider: builder.modifiersAndVisibility(typeDef), builder.kind(tpe), builder.name(typeDef.name, typeDef.dri), - builder.generics(tpe.typeParams).pipe { bdr => + builder.typeParamList(tpe.typeParams).pipe { bdr => if (!tpe.opaque) { (if tpe.concreate then bdr.plain(" = ") else bdr) .signature(typeDef.signature) diff --git a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala index acbfe87b5d25..d28dd6ca18fe 100644 --- a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala +++ b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala @@ -26,7 +26,7 @@ case class SignatureBuilder(content: Signature = Nil) extends ScalaSignatureUtil def annotationsBlock(d: Member): SignatureBuilder = d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation)} - def annotationsInline(d: Parameter): SignatureBuilder = + def annotationsInline(d: TermParameter): SignatureBuilder = d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation) } def annotationsInline(t: TypeParameter): SignatureBuilder = @@ -74,21 +74,27 @@ case class SignatureBuilder(content: Signature = Nil) extends ScalaSignatureUtil def kind(k: Kind) = keyword(k.name + " ") - def generics(on: Seq[TypeParameter]) = list(on.toList, List(Plain("[")), List(Plain("]"))){ (bdr, e) => + + def functionParameters(paramss: Seq[ Either[TermParameterList,TypeParameterList] ]) = + this.list(paramss, separator = List(Plain(""))) { + case (bld, Left(params: TermParameterList)) => bld.termParamList(params) + case (bld, Right(params: TypeParameterList)) => bld.typeParamList(params) + } + + def termParamList(params: TermParameterList) = + this.list(params.parameters, prefix = List(Plain("("), Keyword(params.modifiers)), suffix = List(Plain(")")), forcePrefixAndSuffix = true) { (bld, p) => + val annotationsAndModifiers = bld.annotationsInline(p) + .keyword(p.modifiers) + val name = p.name.fold(annotationsAndModifiers)(annotationsAndModifiers.name(_, p.dri).plain(": ")) + name.signature(p.signature) + } + + def typeParamList(on: TypeParameterList) = list(on.toList, List(Plain("[")), List(Plain("]"))){ (bdr, e) => bdr.annotationsInline(e).keyword(e.variance).tpe(e.name, Some(e.dri)).signature(e.signature) } - def functionParameters(params: Seq[ParametersList]) = - if params.isEmpty then this.plain("") - else if params.size == 1 && params(0).parameters == Nil then this.plain("()") - else this.list(params, separator = List(Plain(""))) { (bld, pList) => - bld.list(pList.parameters, prefix = List(Plain("("), Keyword(pList.modifiers)), suffix = List(Plain(")")), forcePrefixAndSuffix = true) { (bld, p) => - val annotationsAndModifiers = bld.annotationsInline(p) - .keyword(p.modifiers) - val name = p.name.fold(annotationsAndModifiers)(annotationsAndModifiers.name(_, p.dri).plain(": ")) - name.signature(p.signature) - } - } + def functionTermParameters(paramss: Seq[TermParameterList]) = + this.list(paramss, separator = List(Plain(""))) { (bld, pList) => bld.termParamList(pList) } trait ScalaSignatureUtils: extension (tokens: Seq[String]) def toSignatureString(): String = diff --git a/scaladoc/src/dotty/tools/scaladoc/util/html.scala b/scaladoc/src/dotty/tools/scaladoc/util/html.scala index e66ba3a4b706..72776a7413aa 100644 --- a/scaladoc/src/dotty/tools/scaladoc/util/html.scala +++ b/scaladoc/src/dotty/tools/scaladoc/util/html.scala @@ -108,6 +108,7 @@ object HTML: val onclick=Attr("onclick") val titleAttr =Attr("title") val onkeyup = Attr("onkeyup") + val target = Attr("target") def raw(content: String): AppliedTag = new AppliedTag(content) def raw(content: StringBuilder): AppliedTag = content diff --git a/scaladoc/test-documentations/emptyPage/_docs/hello.md b/scaladoc/test-documentations/emptyPage/_docs/hello.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/scaladoc/test-documentations/noConfigEnd/_docs/hello.md b/scaladoc/test-documentations/noConfigEnd/_docs/hello.md new file mode 100644 index 000000000000..3809c65bce02 --- /dev/null +++ b/scaladoc/test-documentations/noConfigEnd/_docs/hello.md @@ -0,0 +1,3 @@ +--- +title: My page +foo: bar diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala index 4d8a9f46f21e..d5b7a0b9b6f8 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala @@ -43,7 +43,7 @@ abstract class SignatureTest( val unexpected = unexpectedFromSources.flatMap(actualSignatures.get).flatten val expectedButNotFound = expectedFromSources.flatMap { - case (k, v) => findMissingSingatures(v, actualSignatures.getOrElse(k, Nil)) + case (k, v) => findMissingSignatures(v, actualSignatures.getOrElse(k, Nil)) } val missingReport = Option.when(!ignoreMissingSignatures && !expectedButNotFound.isEmpty) @@ -75,7 +75,7 @@ abstract class SignatureTest( private val unexpectedRegex = raw"(.+)//unexpected".r private val identifierRegex = raw"^\s*(`.*`|(?:\w+)(?:_[^\[\(\s]+)|\w+|[^\[\(\s]+)".r - private def findMissingSingatures(expected: Seq[String], actual: Seq[String]): Set[String] = + private def findMissingSignatures(expected: Seq[String], actual: Seq[String]): Set[String] = expected.toSet &~ actual.toSet extension (s: String) @@ -84,10 +84,12 @@ abstract class SignatureTest( private def findName(signature: String, kinds: Seq[String]): Option[String] = for - kindMatch <- kinds.flatMap(k => s"\\b$k\\b".r.findFirstMatchIn(signature)).headOption + kindMatch <- kinds.flatMap(k =>s"\\b$k\\b".r.findFirstMatchIn(signature)).headOption + kind <- Option(kindMatch.group(0)) // to filter out nulls afterKind <- Option(kindMatch.after(0)) // to filter out nulls - nameMatch <- identifierRegex.findFirstMatchIn(afterKind) - yield nameMatch.group(1) + name <- if kind.contains("extension") then Some(signature) // The name of an extension will always be the signature itself + else identifierRegex.findFirstMatchIn(afterKind).map(_.group(1)) + yield name private def signaturesFromSources(source: Source, kinds: Seq[String]): Seq[SignatureRes] = source.getLines.map(_.trim) @@ -110,6 +112,9 @@ abstract class SignatureTest( def processFile(path: Path): Unit = if filterFunc(path) then val document = Jsoup.parse(IO.read(path)) + val documentable = document.select(".groupHeader").forEach { element => + signatures += element.text + } val content = document.select(".documentableElement").forEach { elem => val annotations = elem.select(".annotations").eachText.asScala.mkString("") val other = elem.select(".header .other-modifiers").eachText.asScala.mkString("") @@ -123,12 +128,11 @@ abstract class SignatureTest( signatures += all } - IO.foreachFileIn(output, processFile) signatures.result object SignatureTest { val classlikeKinds = Seq("class", "object", "trait", "enum") // TODO add docs for packages - val members = Seq("type", "def", "val", "var", "given") + val members = Seq("type", "def", "val", "var", "given", "extension") val all = classlikeKinds ++ members } diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala index 49316b08dbc0..a09234be5512 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala @@ -39,7 +39,7 @@ class PackageObjectSymbolSignatures extends SignatureTest("packageObjectSymbolSi class MergedPackageSignatures extends SignatureTest("mergedPackage", SignatureTest.all.filterNot(_ == "object"), sourceFiles = List("mergedPackage1", "mergedPackage2", "mergedPackage3")) -class ExtensionMethodSignature extends SignatureTest("extensionMethodSignatures", SignatureTest.all) +class ExtensionMethodSignature extends SignatureTest("extensionMethodSignatures", SignatureTest.all.filterNot(_ == "extension")) class ExtensionMethodParamsSignature extends SignatureTest("extensionParams", SignatureTest.all) @@ -108,3 +108,5 @@ class ImplicitMembers extends SignatureTest( ) class NonScala3Parent extends SignatureTest("nonScala3Parent", SignatureTest.all) + +class ExtendsCall extends SignatureTest("extendsCall", SignatureTest.all) diff --git a/scaladoc/test/dotty/tools/scaladoc/site/BlogParserTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/BlogParserTest.scala new file mode 100644 index 000000000000..e27c257c8e4a --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/site/BlogParserTest.scala @@ -0,0 +1,19 @@ +package dotty.tools.scaladoc +package site + +import org.junit.Test +import org.junit.Assert._ + +class BlogParserTest: + + private val blogConfig = + """input: blog + |output: blog + |hidden: false + |""".stripMargin + + @Test + def loadBlog(): Unit = assertEquals( + BlogConfig("blog", "blog", false), + BlogParser.readYml(blogConfig) + ) \ No newline at end of file diff --git a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala index 7ce16933997a..e012044156cc 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala @@ -95,6 +95,22 @@ class SiteGeneratationTest extends BaseHtmlTest: testApiPages(mainTitle = projectName, parents = Nil, hasToplevelIndexIndex = false) } + @Test + def emptyPage() = withGeneratedSite(testDocPath.resolve("emptyPage")){ + withHtmlFile("docs/hello.html") { content => + // There should be no content as the page body is empty. + content.assertTextsIn("#content", Nil*) + } + } + + @Test + def noConfigEnd() = withGeneratedSite(testDocPath.resolve("noConfigEnd")){ + withHtmlFile("docs/hello.html") { content => + // There should be no content as the page body is empty. + content.assertTextsIn("#content", Nil*) + } + } + @Test def staticLinking() = withGeneratedSite(testDocPath.resolve("static-links")){ diff --git a/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala b/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala index 203ab9cf5ed1..f07868ad4f44 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala @@ -202,6 +202,7 @@ class TemplateFileTests: content -> "md" ) ) + @Test def markdown(): Unit = testTemplate( @@ -222,10 +223,10 @@ class TemplateFileTests: ext = "md" ) { t => assertEquals( - """
      - |

      Hello there!

      + """
      + |

      Hello there2!

      |
      """.stripMargin, - t.resolveInner(RenderingContext(Map("msg" -> "there"))).code.trim()) + t.resolveInner(RenderingContext(Map("msg" -> "there2"))).code.trim()) } @Test diff --git a/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala index 1f28c938033d..616f7ae7f35e 100644 --- a/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala @@ -13,7 +13,6 @@ import dotty.tools.dotc.reporting.{ Diagnostic, StoreReporter } import com.vladsch.flexmark.util.{ast => mdu, sequence} import com.vladsch.flexmark.{ast => mda} import com.vladsch.flexmark.formatter.Formatter -import com.vladsch.flexmark.util.options.MutableDataSet import scala.jdk.CollectionConverters._ import dotty.tools.scaladoc.tasty.comments.markdown.ExtendedFencedCodeBlock diff --git a/semanticdb/project/build.properties b/semanticdb/project/build.properties index 8b9a0b0ab037..46e43a97ed86 100644 --- a/semanticdb/project/build.properties +++ b/semanticdb/project/build.properties @@ -1 +1 @@ -sbt.version=1.8.0 +sbt.version=1.8.2 diff --git a/staging/src/scala/quoted/staging/Compiler.scala b/staging/src/scala/quoted/staging/Compiler.scala index c9abe3fa75c3..fbe6a3915a08 100644 --- a/staging/src/scala/quoted/staging/Compiler.scala +++ b/staging/src/scala/quoted/staging/Compiler.scala @@ -13,7 +13,7 @@ object Compiler: /** Create a new instance of the compiler using the the classloader of the application. * - * Usuage: + * Usage: * ``` * import scala.quoted.staging._ * given Compiler = Compiler.make(getClass.getClassLoader) diff --git a/staging/test-resources/repl-staging/i6007 b/staging/test-resources/repl-staging/i6007 index be9d5c0f92d6..0d6fbd0cffb1 100644 --- a/staging/test-resources/repl-staging/i6007 +++ b/staging/test-resources/repl-staging/i6007 @@ -3,7 +3,7 @@ scala> import quoted.staging.{Compiler => StagingCompiler, _} scala> implicit def compiler: StagingCompiler = StagingCompiler.make(getClass.getClassLoader) def compiler: quoted.staging.Compiler scala> def v(using Quotes) = '{ (if true then Some(1) else None).map(v => v+1) } -def v(using x$1: quoted.Quotes): quoted.Expr[Option[Int]] +def v(using x$1: quoted.Quotes): scala.quoted.Expr[Option[Int]] scala> scala.quoted.staging.withQuotes(v.show) val res0: String = (if (true) scala.Some.apply[scala.Int](1) else scala.None).map[scala.Int](((v: scala.Int) => v.+(1))) scala> scala.quoted.staging.run(v) diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index ac0357068c55..39d559234868 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -122,7 +122,8 @@ Standard-Section: "ASTs" TopLevelStat* MATCHtpt Length bound_Term? sel_Term CaseDef* -- sel match { CaseDef } where `bound` is optional upper bound of all rhs BYNAMEtpt underlying_Term -- => underlying SHAREDterm term_ASTRef -- Link to previously serialized term - HOLE Length idx_Nat arg_Tree* -- Hole where a splice goes with sequence number idx, splice is applied to arguments `arg`s + HOLE Length idx_Nat tpe_Type arg_Tree* -- Splice hole with index `idx`, the type of the hole `tpe`, type and term arguments of the hole `arg`s + CaseDef = CASEDEF Length pat_Term rhs_Tree guard_Tree? -- case pat if guard => rhs ImplicitArg = IMPLICITARG arg_Term -- implicit unapply argument diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties index 8b9a0b0ab037..46e43a97ed86 100644 --- a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties @@ -1 +1 @@ -sbt.version=1.8.0 +sbt.version=1.8.2 diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties b/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties index 8b9a0b0ab037..46e43a97ed86 100644 --- a/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties @@ -1 +1 @@ -sbt.version=1.8.0 +sbt.version=1.8.2 diff --git a/tests/coverage/pos/Constructor.scala b/tests/coverage/pos/Constructor.scala index 251370ec8e6e..536bfa26f386 100644 --- a/tests/coverage/pos/Constructor.scala +++ b/tests/coverage/pos/Constructor.scala @@ -1,10 +1,20 @@ package covtest class C: + def this(arg: String) = { + this() + g() + } + + def this(x: Int) = + this(x.toString() + "foo") + def f(x: Int) = x def x = 1 f(x) + def g(): Int = 2 + object O: def g(y: Int) = y def y = 1 diff --git a/tests/coverage/pos/Constructor.scoverage.check b/tests/coverage/pos/Constructor.scoverage.check index 678da472fd4c..6a6742c9118d 100644 --- a/tests/coverage/pos/Constructor.scoverage.check +++ b/tests/coverage/pos/Constructor.scoverage.check @@ -24,10 +24,78 @@ covtest C Class covtest.C -f + 28 -33 +36 3 + +DefDef +false +0 +false +def this + +1 +Constructor.scala +covtest +C +Class +covtest.C + +69 +72 +5 +g +Apply +false +0 +false +g() + +2 +Constructor.scala +covtest +C +Class +covtest.C + +80 +88 +8 + +DefDef +false +0 +false +def this + +3 +Constructor.scala +covtest +C +Class +covtest.C + +108 +128 +9 ++ +Apply +false +0 +false +x.toString() + "foo" + +4 +Constructor.scala +covtest +C +Class +covtest.C +f +133 +138 +11 f DefDef false @@ -35,16 +103,16 @@ false false def f -1 +5 Constructor.scala covtest C Class covtest.C x -48 -53 -4 +153 +158 +12 x DefDef false @@ -52,16 +120,16 @@ false false def x -2 +6 Constructor.scala covtest C Class covtest.C -60 -64 -5 +165 +169 +13 f Apply false @@ -69,16 +137,16 @@ false false f(x) -3 +7 Constructor.scala covtest C Class covtest.C -62 -63 -5 +167 +168 +13 x Select false @@ -86,16 +154,33 @@ false false x -4 +8 +Constructor.scala +covtest +C +Class +covtest.C +g +173 +178 +15 +g +DefDef +false +0 +false +def g + +9 Constructor.scala covtest O$ Object covtest.O$ g -78 -83 -8 +203 +208 +18 g DefDef false @@ -103,16 +188,16 @@ false false def g -5 +10 Constructor.scala covtest O$ Object covtest.O$ y -98 -103 -9 +223 +228 +19 y DefDef false @@ -120,16 +205,16 @@ false false def y -6 +11 Constructor.scala covtest O$ Object covtest.O$ -110 -114 -10 +235 +239 +20 g Apply false @@ -137,16 +222,16 @@ false false g(y) -7 +12 Constructor.scala covtest O$ Object covtest.O$ -112 -113 -10 +237 +238 +20 y Ident false diff --git a/tests/init/neg/apply2.scala b/tests/init/neg/apply2.scala old mode 100644 new mode 100755 index 83f64a6dd3c7..c6c7fe5fedd2 --- a/tests/init/neg/apply2.scala +++ b/tests/init/neg/apply2.scala @@ -3,8 +3,8 @@ object O: println(n) class B: - val a = A(this) + val a = A(this) // error val b = new B - val n = 10 // error + val n = 10 end O diff --git a/tests/init/neg/closureLeak.check b/tests/init/neg/closureLeak.check index 7019f2274ab6..a90acaa8ed00 100644 --- a/tests/init/neg/closureLeak.check +++ b/tests/init/neg/closureLeak.check @@ -1,16 +1,14 @@ -- Error: tests/init/neg/closureLeak.scala:11:14 ----------------------------------------------------------------------- 11 | l.foreach(a => a.addX(this)) // error | ^^^^^^^^^^^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Fun { this = ThisRef[class Outer], owner = class Outer }. Calling trace: - | -> class Outer { [ closureLeak.scala:1 ] - | ^ - | -> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] - | ^^^^^^^^^^^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (class Outer) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> class Outer { [ closureLeak.scala:1 ] + | ^ + |-> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] + | ^^^^^^^^^^^^^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class Outer]. - | Non initialized field(s): value p. Promotion trace: - | -> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] - | ^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class Outer) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value p. Promotion trace: + |-> l.foreach(a => a.addX(this)) // error [ closureLeak.scala:11 ] + | ^^^^ diff --git a/tests/init/neg/cycle-structure.check b/tests/init/neg/cycle-structure.check index fb7b54c7cac2..dfe7c9b85e2f 100644 --- a/tests/init/neg/cycle-structure.check +++ b/tests/init/neg/cycle-structure.check @@ -1,14 +1,14 @@ -- Error: tests/init/neg/cycle-structure.scala:3:13 -------------------------------------------------------------------- 3 | val x = B(this) // error | ^^^^^^^ - | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: + | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: | -> case class A(b: B) { [ cycle-structure.scala:1 ] | ^ | -> val x = B(this) // error [ cycle-structure.scala:3 ] | ^^^^^^^ | | It leads to the following error during object initialization: - | Access field value x on a cold object. Calling trace: + | Access field value x on an uninitialized (Cold) object. Calling trace: | -> case class B(a: A) { [ cycle-structure.scala:7 ] | ^ | -> val x1 = a.x [ cycle-structure.scala:8 ] @@ -16,14 +16,14 @@ -- Error: tests/init/neg/cycle-structure.scala:9:13 -------------------------------------------------------------------- 9 | val x = A(this) // error | ^^^^^^^ - | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: + | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: | -> case class B(a: A) { [ cycle-structure.scala:7 ] | ^ | -> val x = A(this) // error [ cycle-structure.scala:9 ] | ^^^^^^^ | | It leads to the following error during object initialization: - | Access field value x on a cold object. Calling trace: + | Access field value x on an uninitialized (Cold) object. Calling trace: | -> case class A(b: B) { [ cycle-structure.scala:1 ] | ^ | -> val x1 = b.x [ cycle-structure.scala:2 ] diff --git a/tests/init/neg/default-this.check b/tests/init/neg/default-this.check index 6d08a64450d4..f64f36304e9b 100644 --- a/tests/init/neg/default-this.check +++ b/tests/init/neg/default-this.check @@ -1,14 +1,13 @@ -- Error: tests/init/neg/default-this.scala:9:8 ------------------------------------------------------------------------ 9 | compare() // error | ^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class B]. - | Non initialized field(s): value result. Calling trace: - | -> class B extends A { [ default-this.scala:6 ] - | ^ - | -> val result = updateThenCompare(5) [ default-this.scala:11 ] - | ^^^^^^^^^^^^^^^^^^^^ - | -> def updateThenCompare(c: Int): Boolean = { [ default-this.scala:7 ] - | ^ - | -> compare() // error [ default-this.scala:9 ] - | ^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class B) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value result. Calling trace: + |-> class B extends A { [ default-this.scala:6 ] + | ^ + |-> val result = updateThenCompare(5) [ default-this.scala:11 ] + | ^^^^^^^^^^^^^^^^^^^^ + |-> def updateThenCompare(c: Int): Boolean = { [ default-this.scala:7 ] + | ^ + |-> compare() // error [ default-this.scala:9 ] + | ^^^^^^^ diff --git a/tests/init/neg/i15363.check b/tests/init/neg/i15363.check index 84cf268ef8a1..9912aa186a5b 100644 --- a/tests/init/neg/i15363.check +++ b/tests/init/neg/i15363.check @@ -1,14 +1,14 @@ -- Error: tests/init/neg/i15363.scala:3:10 ----------------------------------------------------------------------------- 3 | val b = new B(this) // error | ^^^^^^^^^^^ - | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: + | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: | -> class A: [ i15363.scala:1 ] | ^ | -> val b = new B(this) // error [ i15363.scala:3 ] | ^^^^^^^^^^^ | | It leads to the following error during object initialization: - | Access field value m on a cold object. Calling trace: + | Access field value m on an uninitialized (Cold) object. Calling trace: | -> class B(a: A): [ i15363.scala:7 ] | ^ | -> val x = a.m [ i15363.scala:8 ] diff --git a/tests/init/neg/i15459.check b/tests/init/neg/i15459.check index 93ba28554895..a8c9972276db 100644 --- a/tests/init/neg/i15459.check +++ b/tests/init/neg/i15459.check @@ -1,12 +1,11 @@ -- Error: tests/init/neg/i15459.scala:3:10 ----------------------------------------------------------------------------- 3 | println(this) // error | ^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class Sub]. - | Non initialized field(s): value b. Calling trace: - | -> class Sub extends Sup: [ i15459.scala:5 ] - | ^ - | -> class Sup: [ i15459.scala:1 ] - | ^ - | -> println(this) // error [ i15459.scala:3 ] - | ^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class Sub) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value b. Calling trace: + |-> class Sub extends Sup: [ i15459.scala:5 ] + | ^ + |-> class Sup: [ i15459.scala:1 ] + | ^ + |-> println(this) // error [ i15459.scala:3 ] + | ^^^^ diff --git a/tests/init/neg/inherit-non-hot.check b/tests/init/neg/inherit-non-hot.check index 408196333a27..068ba9662fd1 100644 --- a/tests/init/neg/inherit-non-hot.check +++ b/tests/init/neg/inherit-non-hot.check @@ -1,17 +1,17 @@ -- Error: tests/init/neg/inherit-non-hot.scala:6:32 -------------------------------------------------------------------- 6 | if b == null then b = new B(this) // error | ^^^^^^^^^^^^^^^ - | The RHS of reassignment must be hot. Found = Warm[class B] { outer = Hot, args = (Cold) }. Calling trace: - | -> class C extends A { [ inherit-non-hot.scala:15 ] - | ^ - | -> val bAgain = toB.getBAgain [ inherit-non-hot.scala:16 ] - | ^^^ - | -> def toB: B = [ inherit-non-hot.scala:5 ] - | ^ - | -> if b == null then b = new B(this) // error [ inherit-non-hot.scala:6 ] - | ^^^^^^^^^^^^^^^ + |The RHS of reassignment must be transitively initialized (Hot). It was found to be a non-transitively initialized (Warm) object of type (class B) { outer = a transitively initialized (Hot) object, args = (an uninitialized (Cold) object) }. Calling trace: + |-> class C extends A { [ inherit-non-hot.scala:15 ] + | ^ + |-> val bAgain = toB.getBAgain [ inherit-non-hot.scala:16 ] + | ^^^ + |-> def toB: B = [ inherit-non-hot.scala:5 ] + | ^ + |-> if b == null then b = new B(this) // error [ inherit-non-hot.scala:6 ] + | ^^^^^^^^^^^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Cannot prove that the field value a is hot. Found = Cold. Promotion trace: - | -> class B(a: A) { [ inherit-non-hot.scala:10 ] - | ^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Could not verify that the field value a is transitively initialized (Hot). It was found to be an uninitialized (Cold) object. Promotion trace: + |-> class B(a: A) { [ inherit-non-hot.scala:10 ] + | ^^^^ diff --git a/tests/init/neg/inlined-method.check b/tests/init/neg/inlined-method.check index 62bec184b825..f3061bcb63ed 100644 --- a/tests/init/neg/inlined-method.check +++ b/tests/init/neg/inlined-method.check @@ -1,12 +1,11 @@ -- Error: tests/init/neg/inlined-method.scala:8:45 --------------------------------------------------------------------- 8 | scala.runtime.Scala3RunTime.assertFailed(message) // error | ^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class InlineError]. - | Non initialized field(s): value v. Calling trace: - | -> class InlineError { [ inlined-method.scala:1 ] - | ^ - | -> Assertion.failAssert(this) [ inlined-method.scala:2 ] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - | -> scala.runtime.Scala3RunTime.assertFailed(message) // error [ inlined-method.scala:8 ] - | ^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class InlineError) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value v. Calling trace: + |-> class InlineError { [ inlined-method.scala:1 ] + | ^ + |-> Assertion.failAssert(this) [ inlined-method.scala:2 ] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + |-> scala.runtime.Scala3RunTime.assertFailed(message) // error [ inlined-method.scala:8 ] + | ^^^^^^^ diff --git a/tests/init/neg/inner-first.check b/tests/init/neg/inner-first.check index e1df69fbd4a2..fe90423c828f 100644 --- a/tests/init/neg/inner-first.check +++ b/tests/init/neg/inner-first.check @@ -1,10 +1,9 @@ -- Error: tests/init/neg/inner-first.scala:3:12 ------------------------------------------------------------------------ 3 | println(this) // error | ^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = ThisRef[class B]. - | Non initialized field(s): value n. Calling trace: - | -> class B: [ inner-first.scala:2 ] - | ^ - | -> println(this) // error [ inner-first.scala:3 ] - | ^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be the original object of type (class B) where initialization checking started. Only transitively initialized arguments may be passed to methods (except constructors). + |Non initialized field(s): value n. Calling trace: + |-> class B: [ inner-first.scala:2 ] + | ^ + |-> println(this) // error [ inner-first.scala:3 ] + | ^^^^ diff --git a/tests/init/neg/interleaving-params.scala b/tests/init/neg/interleaving-params.scala new file mode 100755 index 000000000000..f0f9cbaf3f53 --- /dev/null +++ b/tests/init/neg/interleaving-params.scala @@ -0,0 +1,9 @@ +import scala.language.experimental.clauseInterleaving + +class Params{ + def bar[T](x: T)[T]: String = ??? // error + def zoo(x: Int)[T, U](x: U): T = ??? // error + def bbb[T <: U](x: U)[U]: U = ??? // error // error + def f0[T](implicit x: T)[U](y: U) = (x,y) // error + def f1[T](implicit x: T)[U] = (x,y) // error +} \ No newline at end of file diff --git a/tests/init/neg/leak-warm.check b/tests/init/neg/leak-warm.check index d4d563fc456e..c2fc561a3668 100644 --- a/tests/init/neg/leak-warm.check +++ b/tests/init/neg/leak-warm.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/leak-warm.scala:19:18 ------------------------------------------------------------------------- 19 | val l2 = l.map(_.m()) // error | ^^^^^^^^^^^^ - | Call method method map on a cold object. Calling trace: + | Call method method map on an uninitialized (Cold) object. Calling trace: | -> object leakWarm { [ leak-warm.scala:1 ] | ^ | -> val l2 = l.map(_.m()) // error [ leak-warm.scala:19 ] diff --git a/tests/init/neg/promotion-loop.check b/tests/init/neg/promotion-loop.check index 3d1eb7e74aec..bc05640d10d2 100644 --- a/tests/init/neg/promotion-loop.check +++ b/tests/init/neg/promotion-loop.check @@ -1,15 +1,14 @@ -- Error: tests/init/neg/promotion-loop.scala:16:10 -------------------------------------------------------------------- 16 | println(b) // error | ^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Warm[class B] { outer = ThisRef[class Test] }. Calling trace: - | -> class Test { test => [ promotion-loop.scala:1 ] - | ^ - | -> println(b) // error [ promotion-loop.scala:16 ] - | ^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a non-transitively initialized (Warm) object of type (class B) { outer = the original object of type (class Test) where initialization checking started }. Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> class Test { test => [ promotion-loop.scala:1 ] + | ^ + |-> println(b) // error [ promotion-loop.scala:16 ] + | ^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Cannot prove that the field value outer is hot. Found = ThisRef[class Test]. - | Non initialized field(s): value n. Promotion trace: - | -> val outer = test [ promotion-loop.scala:12 ] - | ^^^^^^^^^^^^^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Could not verify that the field value outer is transitively initialized (Hot). It was found to be the original object of type (class Test) where initialization checking started. + |Non initialized field(s): value n. Promotion trace: + |-> val outer = test [ promotion-loop.scala:12 ] + | ^^^^^^^^^^^^^^^^ diff --git a/tests/init/neg/promotion-segment3.check b/tests/init/neg/promotion-segment3.check index 220af18bd29a..a7320b5c3ed3 100644 --- a/tests/init/neg/promotion-segment3.check +++ b/tests/init/neg/promotion-segment3.check @@ -1,12 +1,11 @@ -- Error: tests/init/neg/promotion-segment3.scala:9:6 ------------------------------------------------------------------ 9 | bar(new B) // error | ^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Warm[class B] { outer = ThisRef[class A] }. Calling trace: - | -> class A: [ promotion-segment3.scala:2 ] - | ^ - | -> bar(new B) // error [ promotion-segment3.scala:9 ] - | ^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a non-transitively initialized (Warm) object of type (class B) { outer = the original object of type (class A) where initialization checking started }. Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> class A: [ promotion-segment3.scala:2 ] + | ^ + |-> bar(new B) // error [ promotion-segment3.scala:9 ] + | ^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Promotion cancelled as the value contains inner class C. + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Promotion cancelled as the value contains inner class C. diff --git a/tests/init/neg/secondary-ctor4.check b/tests/init/neg/secondary-ctor4.check index 1bf1a7286357..e867ba65ded5 100644 --- a/tests/init/neg/secondary-ctor4.check +++ b/tests/init/neg/secondary-ctor4.check @@ -1,14 +1,14 @@ -- Error: tests/init/neg/secondary-ctor4.scala:54:14 ------------------------------------------------------------------- 54 | val c = new C(b, 5) // error | ^^^^^^^^^^^ - | Problematic object instantiation: arg 1 is not hot (transitively initialized). Calling trace: + | Problematic object instantiation: arg 1 is not transitively initialized (Hot). Calling trace: | -> class D { [ secondary-ctor4.scala:52 ] | ^ | -> val c = new C(b, 5) // error [ secondary-ctor4.scala:54 ] | ^^^^^^^^^^^ | | It leads to the following error during object initialization: - | Access field value n on a cold object. Calling trace: + | Access field value n on an uninitialized (Cold) object. Calling trace: | -> def this(b: B, x: Int) = this(b) [ secondary-ctor4.scala:49 ] | ^^^^^^^ | -> class C(b: B) extends A(b) with T { [ secondary-ctor4.scala:48 ] @@ -24,7 +24,7 @@ -- Error: tests/init/neg/secondary-ctor4.scala:42:4 -------------------------------------------------------------------- 42 | new A(new B(new D)) // error | ^^^^^^^^^^^^^^^^^^^ - |Problematic object instantiation: the outer M.this and arg 1 are not hot (transitively initialized). Calling trace: + |Problematic object instantiation: the outer M.this and arg 1 are not transitively initialized (Hot). Calling trace: |-> class N(d: D) extends M(d) { [ secondary-ctor4.scala:59 ] | ^ |-> def this(d: D) = { [ secondary-ctor4.scala:7 ] @@ -33,7 +33,7 @@ | ^^^^^^^^^^^^^^^^^^^ | |It leads to the following error during object initialization: - |Access field value n on a cold object. Calling trace: + |Access field value n on an uninitialized (Cold) object. Calling trace: |-> def this(b: B) = { [ secondary-ctor4.scala:17 ] | ^ |-> Inner().foo() [ secondary-ctor4.scala:26 ] diff --git a/tests/init/neg/t3273.check b/tests/init/neg/t3273.check index e548a5964cac..0fe7ea78871c 100644 --- a/tests/init/neg/t3273.check +++ b/tests/init/neg/t3273.check @@ -1,28 +1,26 @@ -- Error: tests/init/neg/t3273.scala:4:42 ------------------------------------------------------------------------------ 4 | val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error | ^^^^^^^^^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Fun { this = ThisRef[object Test], owner = object Test }. Calling trace: - | -> object Test { [ t3273.scala:3 ] - | ^ - | -> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] - | ^^^^^^^^^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (object Test) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> object Test { [ t3273.scala:3 ] + | ^ + |-> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] + | ^^^^^^^^^^^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Access non-initialized value num1. Promotion trace: - | -> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] - | ^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Access non-initialized value num1. Promotion trace: + |-> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] + | ^^^^ -- Error: tests/init/neg/t3273.scala:5:61 ------------------------------------------------------------------------------ 5 | val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Cannot prove the method argument is hot. Only hot values are safe to leak. - | Found = Fun { this = ThisRef[object Test], owner = object Test }. Calling trace: - | -> object Test { [ t3273.scala:3 ] - | ^ - | -> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |Could not verify that the method argument is transitively initialized (Hot). It was found to be a function where "this" is (the original object of type (object Test) where initialization checking started). Only transitively initialized arguments may be passed to methods (except constructors). Calling trace: + |-> object Test { [ t3273.scala:3 ] + | ^ + |-> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - | Promoting the value to hot (transitively initialized) failed due to the following problem: - | Access non-initialized value num2. Promotion trace: - | -> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] - | ^^^^ + |Promoting the value to transitively initialized (Hot) failed due to the following problem: + |Access non-initialized value num2. Promotion trace: + |-> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] + | ^^^^ diff --git a/tests/init/neg/unsound1.check b/tests/init/neg/unsound1.check index c3057a6a6067..d114ba072db6 100644 --- a/tests/init/neg/unsound1.check +++ b/tests/init/neg/unsound1.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/unsound1.scala:2:35 --------------------------------------------------------------------------- 2 | if (m > 0) println(foo(m - 1).a2.n) // error | ^^^^^^^^^^^^^^^ - | Access field variable n on a cold object. Calling trace: + | Access field variable n on an uninitialized (Cold) object. Calling trace: | -> class A(m: Int) { [ unsound1.scala:1 ] | ^ | -> if (m > 0) println(foo(m - 1).a2.n) // error [ unsound1.scala:2 ] diff --git a/tests/init/neg/unsound2.check b/tests/init/neg/unsound2.check index a90b16c8bf71..69d1278e94df 100644 --- a/tests/init/neg/unsound2.check +++ b/tests/init/neg/unsound2.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/unsound2.scala:5:26 --------------------------------------------------------------------------- 5 | def getN: Int = a.n // error | ^^^ - | Access field value n on a cold object. Calling trace: + | Access field value n on an uninitialized (Cold) object. Calling trace: | -> case class A(x: Int) { [ unsound2.scala:1 ] | ^ | -> println(foo(x).getB) [ unsound2.scala:8 ] diff --git a/tests/init/neg/unsound3.check b/tests/init/neg/unsound3.check index d62b97e1abaf..c32e66272d1a 100644 --- a/tests/init/neg/unsound3.check +++ b/tests/init/neg/unsound3.check @@ -1,7 +1,7 @@ -- Error: tests/init/neg/unsound3.scala:10:38 -------------------------------------------------------------------------- 10 | if (x < 12) then foo().getC().b else newB // error | ^^^^^^^^^^^^^^ - | Access field value b on a cold object. Calling trace: + | Access field value b on an uninitialized (Cold) object. Calling trace: | -> class C { [ unsound3.scala:5 ] | ^ | -> val b = foo() [ unsound3.scala:12 ] diff --git a/tests/init/pos/interleaving-overload.scala b/tests/init/pos/interleaving-overload.scala new file mode 100755 index 000000000000..260b3538214a --- /dev/null +++ b/tests/init/pos/interleaving-overload.scala @@ -0,0 +1,24 @@ +import scala.language.experimental.clauseInterleaving + +class A{ + + def f1[T](x: Any)[U] = ??? + def f1[T](x: Int)[U] = ??? + + f1(1) + f1("hello") + f1[Boolean]("a")[Int] + f1[Boolean](1)[Int] + + case class B[U](x: Int) + def b[U](x: Int) = B[U](x) + + def f2[T]: [U] => Int => B[U] = [U] => (x: Int) => b[U](x) + + f2(1) + f2[Any](1) + f2[Any][Any](1) + + b[Int](5) + +} \ No newline at end of file diff --git a/tests/init/pos/interleaving-params.scala b/tests/init/pos/interleaving-params.scala new file mode 100755 index 000000000000..9f98b5f35d5b --- /dev/null +++ b/tests/init/pos/interleaving-params.scala @@ -0,0 +1,19 @@ +import scala.collection.mutable.AbstractSet +import scala.collection.mutable.BitSet +import scala.language.experimental.clauseInterleaving + +class Params{ + type U + def foo[T](x: T)[U >: x.type <: T](using U)[L <: List[U]](l: L): L = ??? + def aaa(x: U): U = ??? + def bbb[T <: U](x: U)[U]: U = ??? + + foo[AbstractSet[Int]](BitSet())[AbstractSet[Int]](using BitSet())[List[AbstractSet[Int]]](List[AbstractSet[Int]]()) +} + +class Param2 extends Params { + type U = AbstractSet[Int] + + aaa(BitSet()) + bbb[BitSet](BitSet())[AbstractSet[Int]] +} \ No newline at end of file diff --git a/tests/init/pos/recursive.scala b/tests/init/pos/recursive.scala new file mode 100644 index 000000000000..74b658330e03 --- /dev/null +++ b/tests/init/pos/recursive.scala @@ -0,0 +1,9 @@ +class A { + def p(cb: Int => Int): Int = cb(0) + + val q: List[Int] = { + def f(x: Int): Int => Int = y => p(f(y)) + List(1, 2).map(f(3)) + } + val n: Int = 4 +} \ No newline at end of file diff --git a/tests/init/pos/self-ref.scala b/tests/init/pos/self-ref.scala new file mode 100644 index 000000000000..1a9f199b9f7a --- /dev/null +++ b/tests/init/pos/self-ref.scala @@ -0,0 +1,9 @@ +class A { + def foo(a: Int) = { + lazy val x: Int = if (a == 0) x else 0 + println(x) + } + foo(0) + + val y = 5 +} diff --git a/tests/neg-custom-args/boxmap.scala b/tests/neg-custom-args/boxmap.scala index e66b0a8ec808..1696ac3505e4 100644 --- a/tests/neg-custom-args/boxmap.scala +++ b/tests/neg-custom-args/boxmap.scala @@ -1,5 +1,5 @@ import annotation.retains -type Top = Any @retains(caps.*) +type Top = Any @retains(caps.cap) type Box[+T <: Top] = ([K <: Top] -> (T => K) -> K) @@ -16,6 +16,6 @@ def test[A <: Top, B <: Top] = def lazymap[A <: Top, B <: Top](b: Box[A])(f: A => B) = () => b[Box[B]]((x: A) => box(f(x))) val x0: (b: Box[A]) -> (f: A => B) -> (() -> Box[B]) = lazymap[A, B] // error - val x: (b: Box[A]) -> (f: A => B) -> {b, f} (() -> Box[B]) = lazymap[A, B] // works - val y: (b: Box[A]) -> (f: A => B) -> {*} (() -> Box[B]) = lazymap[A, B] // works + val x: (b: Box[A]) -> (f: A => B) -> (() ->{b, f} Box[B]) = lazymap[A, B] // works + val y: (b: Box[A]) -> (f: A => B) -> (() ->{cap} Box[B]) = lazymap[A, B] // works () diff --git a/tests/neg-custom-args/capt-wf.scala b/tests/neg-custom-args/capt-wf.scala index 3bd80e0d0f68..67e1bc9906fe 100644 --- a/tests/neg-custom-args/capt-wf.scala +++ b/tests/neg-custom-args/capt-wf.scala @@ -1,35 +1,35 @@ class C -type Cap = {*} C +type Cap = C^ object foo def test(c: Cap, other: String): Unit = - val x1: {*} C = ??? // OK - val x2: {other} C = ??? // error: cs is empty + val x1: C^ = ??? // OK + val x2: C^{other} = ??? // error: cs is empty val s1 = () => "abc" - val x3: {s1} C = ??? // error: cs is empty + val x3: C^{s1} = ??? // error: cs is empty val x3a: () -> String = s1 val s2 = () => if x1 == null then "" else "abc" - val x4: {s2} C = ??? // OK - val x5: {c, c} C = ??? // error: redundant - val x6: {c} {c} C = ??? // error: redundant - val x7: {c} Cap = ??? // error: redundant - val x8: {*} {c} C = ??? // OK - val x9: {c, *} C = ??? // error: redundant - val x10: {*, c} C = ??? // error: redundant + val x4: C^{s2} = ??? // OK + val x5: C^{c, c} = ??? // error: redundant + // val x6: C^{c}^{c} = ??? // would be syntax error + val x7: Cap^{c} = ??? // error: redundant + // val x8: C^{c}^{cap} = ??? // would be syntax error + val x9: C^{c, cap} = ??? // error: redundant + val x10: C^{cap, c} = ??? // error: redundant def even(n: Int): Boolean = if n == 0 then true else odd(n - 1) def odd(n: Int): Boolean = if n == 1 then true else even(n - 1) val e1 = even val o1 = odd - val y1: {e1} String = ??? // error cs is empty - val y2: {o1} String = ??? // error cs is empty + val y1: String^{e1} = ??? // error cs is empty + val y2: String^{o1} = ??? // error cs is empty lazy val ev: (Int -> Boolean) = (n: Int) => lazy val od: (Int -> Boolean) = (n: Int) => if n == 1 then true else ev(n - 1) if n == 0 then true else od(n - 1) - val y3: {ev} String = ??? // error cs is empty + val y3: String^{ev} = ??? // error cs is empty () \ No newline at end of file diff --git a/tests/neg-custom-args/captures/boundschecks.scala b/tests/neg-custom-args/captures/boundschecks.scala index cf4eab28f19d..766d89d2f37b 100644 --- a/tests/neg-custom-args/captures/boundschecks.scala +++ b/tests/neg-custom-args/captures/boundschecks.scala @@ -6,13 +6,13 @@ object test { class C[X <: Tree](x: X) - def foo(t: {*} Tree) = + def foo(t: Tree^) = f(t) // error - f[{*} Tree](t) // error + f[Tree^](t) // error f[Tree](t) // error val c1 = C(t) // error - val c2 = C[{*} Tree](t) // error + val c2 = C[Tree^](t) // error val c3 = C[Tree](t) // error - val foo: C[{*} Tree] = ??? + val foo: C[Tree^] = ??? } diff --git a/tests/neg-custom-args/captures/boundschecks2.scala b/tests/neg-custom-args/captures/boundschecks2.scala index f6927b04931b..923758d722f9 100644 --- a/tests/neg-custom-args/captures/boundschecks2.scala +++ b/tests/neg-custom-args/captures/boundschecks2.scala @@ -6,8 +6,8 @@ object test { class C[X <: Tree](x: X) - val foo: C[{*} Tree] = ??? // error - type T = C[{*} Tree] // error + val foo: C[Tree^] = ??? // error + type T = C[Tree^] // error val bar: T -> T = ??? - val baz: C[{*} Tree] -> Unit = ??? // error + val baz: C[Tree^] -> Unit = ??? // error } diff --git a/tests/neg-custom-args/captures/box-adapt-boxing.scala b/tests/neg-custom-args/captures/box-adapt-boxing.scala index 7a624d4225fc..ea133051a21a 100644 --- a/tests/neg-custom-args/captures/box-adapt-boxing.scala +++ b/tests/neg-custom-args/captures/box-adapt-boxing.scala @@ -1,23 +1,23 @@ trait Cap -def main(io: {*} Cap, fs: {*} Cap): Unit = { - val test1: {} Unit -> Unit = _ => { // error - type Op = [T] -> ({io} T -> Unit) -> Unit - val f: ({io} Cap) -> Unit = ??? +def main(io: Cap^, fs: Cap^): Unit = { + val test1: Unit -> Unit = _ => { // error + type Op = [T] -> (T ->{io} Unit) -> Unit + val f: (Cap^{io}) -> Unit = ??? val op: Op = ??? - op[{io} Cap](f) + op[Cap^{io}](f) // expected type of f: {io} (box {io} Cap) -> Unit // actual type: ({io} Cap) -> Unit // adapting f to the expected type will also // charge the environment with {io} } - val test2: {} Unit -> Unit = _ => { + val test2: Unit -> Unit = _ => { type Box[X] = X type Op0[X] = Box[X] -> Unit type Op1[X] = Unit -> Box[X] - val f: Unit -> ({io} Cap) -> Unit = ??? - val test: {} Op1[{io} Op0[{io} Cap]] = f + val f: Unit -> (Cap^{io}) -> Unit = ??? + val test: Op1[Op0[Cap^{io}]^{io}]^{} = f // expected: {} Unit -> box {io} (box {io} Cap) -> Unit // actual: Unit -> ({io} Cap) -> Unit // @@ -31,8 +31,8 @@ def main(io: {*} Cap, fs: {*} Cap): Unit = { type Box[X] = X type Id[X] = Box[X] -> Unit type Op[X] = Unit -> Box[X] - val f: Unit -> ({io} Cap) -> Unit = ??? - val g: Op[{fs} Id[{io} Cap]] = f // error - val h: {} Op[{io} Id[{io} Cap]] = f + val f: Unit -> (Cap^{io}) -> Unit = ??? + val g: Op[Id[Cap^{io}]^{fs}] = f // error + val h: Op[Id[Cap^{io}]^{io}] = f } } diff --git a/tests/neg-custom-args/captures/box-adapt-cases.scala b/tests/neg-custom-args/captures/box-adapt-cases.scala index 049ff385d73c..7010444eecb5 100644 --- a/tests/neg-custom-args/captures/box-adapt-cases.scala +++ b/tests/neg-custom-args/captures/box-adapt-cases.scala @@ -3,27 +3,27 @@ trait Cap { def use(): Int } def test1(): Unit = { type Id[X] = [T] -> (op: X => T) -> T - val x: Id[{*} Cap] = ??? - x(cap => cap.use()) // error + val x: Id[Cap^] = ??? + x(cap => cap.use()) // was error, now OK } -def test2(io: {*} Cap): Unit = { +def test2(io: Cap^{cap}): Unit = { type Id[X] = [T] -> (op: X -> T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // error } -def test3(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} X -> T) -> T +def test3(io: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: X ->{io} T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // ok } -def test4(io: {*} Cap, fs: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} X -> T) -> T +def test4(io: Cap^{cap}, fs: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: X ->{io} T) -> T - val x: Id[{io, fs} Cap] = ??? + val x: Id[Cap^{io, fs}] = ??? x(cap => cap.use()) // error } diff --git a/tests/neg-custom-args/captures/box-adapt-cov.scala b/tests/neg-custom-args/captures/box-adapt-cov.scala index 2040a1c4654d..96901e81458d 100644 --- a/tests/neg-custom-args/captures/box-adapt-cov.scala +++ b/tests/neg-custom-args/captures/box-adapt-cov.scala @@ -1,14 +1,14 @@ trait Cap -def test1(io: {*} Cap) = { +def test1(io: Cap^{cap}) = { type Op[X] = [T] -> Unit -> X - val f: Op[{io} Cap] = ??? - val x: [T] -> Unit -> ({io} Cap) = f // error + val f: Op[Cap^{io}] = ??? + val x: [T] -> Unit -> Cap^{io} = f // error } -def test2(io: {*} Cap) = { - type Op[X] = [T] -> Unit -> {io} X - val f: Op[{io} Cap] = ??? - val x: Unit -> ({io} Cap) = f[Unit] // error - val x1: {io} Unit -> ({io} Cap) = f[Unit] // ok +def test2(io: Cap^{cap}) = { + type Op[X] = [T] -> Unit -> X^{io} + val f: Op[Cap^{io}] = ??? + val x: Unit -> Cap^{io} = f[Unit] // error + val x1: Unit ->{io} Cap^{io} = f[Unit] // ok } diff --git a/tests/neg-custom-args/captures/box-adapt-cs.scala b/tests/neg-custom-args/captures/box-adapt-cs.scala index e35388efd203..a39ed0200151 100644 --- a/tests/neg-custom-args/captures/box-adapt-cs.scala +++ b/tests/neg-custom-args/captures/box-adapt-cs.scala @@ -1,19 +1,17 @@ trait Cap { def use(): Int } -def test1(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} X -> T) -> T +def test1(io: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: X ->{io} T) -> T - val x: Id[{io} Cap] = ??? - val f: ({*} Cap) -> Unit = ??? + val x: Id[Cap^{io}] = ??? + val f: (Cap^{cap}) -> Unit = ??? x(f) // ok - // actual: {*} Cap -> Unit - // expected: {io} box {io} Cap -> Unit } -def test2(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {*} X -> T) -> T +def test2(io: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: X => T) -> T - val x: Id[{*} Cap] = ??? - val f: ({io} Cap) -> Unit = ??? + val x: Id[Cap^] = ??? + val f: Cap^{io} -> Unit = ??? x(f) // error } diff --git a/tests/neg-custom-args/captures/box-adapt-depfun.scala b/tests/neg-custom-args/captures/box-adapt-depfun.scala index 294e2c33f7fa..9416ffa040ab 100644 --- a/tests/neg-custom-args/captures/box-adapt-depfun.scala +++ b/tests/neg-custom-args/captures/box-adapt-depfun.scala @@ -1,23 +1,23 @@ trait Cap { def use(): Int } -def test1(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} X -> T) -> T +def test1(io: Cap^): Unit = { + type Id[X] = [T] -> (op: X ->{io} T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap]^{io} = ??? x(cap => cap.use()) // ok } -def test2(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {io} (x: X) -> T) -> T +def test2(io: Cap^): Unit = { + type Id[X] = [T] -> (op: (x: X) ->{io} T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // should work when the expected type is a dependent function } -def test3(io: {*} Cap): Unit = { - type Id[X] = [T] -> (op: {} (x: X) -> T) -> T +def test3(io: Cap^{cap}): Unit = { + type Id[X] = [T] -> (op: (x: X) ->{} T) -> T - val x: Id[{io} Cap] = ??? + val x: Id[Cap^{io}] = ??? x(cap => cap.use()) // error } diff --git a/tests/neg-custom-args/captures/box-adapt-typefun.scala b/tests/neg-custom-args/captures/box-adapt-typefun.scala index b14b07e72e9b..65a06cd68ed9 100644 --- a/tests/neg-custom-args/captures/box-adapt-typefun.scala +++ b/tests/neg-custom-args/captures/box-adapt-typefun.scala @@ -1,13 +1,13 @@ trait Cap { def use(): Int } -def test1(io: {*} Cap): Unit = { +def test1(io: Cap^{cap}): Unit = { type Op[X] = [T] -> X -> Unit - val f: [T] -> ({io} Cap) -> Unit = ??? - val op: Op[{io} Cap] = f // error + val f: [T] -> (Cap^{io}) -> Unit = ??? + val op: Op[Cap^{io}] = f // error } -def test2(io: {*} Cap): Unit = { +def test2(io: Cap^{cap}): Unit = { type Lazy[X] = [T] -> Unit -> X - val f: Lazy[{io} Cap] = ??? - val test: [T] -> Unit -> ({io} Cap) = f // error + val f: Lazy[Cap^{io}] = ??? + val test: [T] -> Unit -> (Cap^{io}) = f // error } diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index 90cf6c145c33..b1d8fb3b5404 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -1,20 +1,14 @@ --- Warning: tests/neg-custom-args/captures/byname.scala:17:18 ---------------------------------------------------------- -17 | def h(x: {cap1} -> I) = x // warning - | ^ - | Style: by-name `->` should immediately follow closing `}` of capture set - | to avoid confusion with function type. - | That is, `{c}-> T` instead of `{c} -> T`. -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/byname.scala:10:6 ---------------------------------------- 10 | h(f2()) // error | ^^^^ - | Found: {cap1} (x$0: Int) -> Int - | Required: {cap2} (x$0: Int) -> Int + | Found: (x$0: Int) ->{cap1} Int + | Required: (x$0: Int) ->{cap2} Int | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/byname.scala:19:5 ---------------------------------------- 19 | h(g()) // error | ^^^ - | Found: {cap2} () ?-> I - | Required: {cap1} () ?-> I + | Found: () ?->{cap2} I + | Required: () ?->{cap1} I | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/byname.scala b/tests/neg-custom-args/captures/byname.scala index 1838647f2899..ac13174eb4f4 100644 --- a/tests/neg-custom-args/captures/byname.scala +++ b/tests/neg-custom-args/captures/byname.scala @@ -5,16 +5,16 @@ def test(cap1: Cap, cap2: Cap) = def g(x: Int) = if cap2 == cap2 then 1 else x def g2(x: Int) = if cap1 == cap1 then 1 else x def f2() = if cap1 == cap1 then g2 else g2 - def h(ff: => {cap2} Int -> Int) = ff + def h(ff: => Int ->{cap2} Int) = ff h(f()) // ok h(f2()) // error class I -def test2(cap1: Cap, cap2: Cap): {cap1} I = +def test2(cap1: Cap, cap2: Cap): I^{cap1} = def f() = if cap1 == cap1 then I() else I() def g() = if cap2 == cap2 then I() else I() - def h(x: {cap1} -> I) = x // warning + def h(x: ->{cap1} I) = x // ok h(f()) // OK h(g()) // error diff --git a/tests/neg-custom-args/captures/capt-depfun.scala b/tests/neg-custom-args/captures/capt-depfun.scala index c01eed7c4b25..20226b239198 100644 --- a/tests/neg-custom-args/captures/capt-depfun.scala +++ b/tests/neg-custom-args/captures/capt-depfun.scala @@ -1,9 +1,9 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) class Str def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? val ac: ((x: Cap) => Str @retains(x) => Str @retains(x)) = ??? - val dc: (({y, z} Str) => {y, z} Str) = ac(g()) // error + val dc: ((Str^{y, z}) => Str^{y, z}) = ac(g()) // error diff --git a/tests/neg-custom-args/captures/capt-depfun2.scala b/tests/neg-custom-args/captures/capt-depfun2.scala index 52dd74aabf9f..cb4bc5f9634d 100644 --- a/tests/neg-custom-args/captures/capt-depfun2.scala +++ b/tests/neg-custom-args/captures/capt-depfun2.scala @@ -1,6 +1,6 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) class Str def f(y: Cap, z: Cap) = diff --git a/tests/neg-custom-args/captures/capt-env.scala b/tests/neg-custom-args/captures/capt-env.scala index 52fa4abfdaa8..6602678af167 100644 --- a/tests/neg-custom-args/captures/capt-env.scala +++ b/tests/neg-custom-args/captures/capt-env.scala @@ -1,5 +1,5 @@ class C -type Cap = {*} C +type Cap = C^ class Pair[+A, +B](x: A, y: B): def fst: A = x diff --git a/tests/neg-custom-args/captures/capt-test.scala b/tests/neg-custom-args/captures/capt-test.scala index 1799fc5073ca..f14951f410c4 100644 --- a/tests/neg-custom-args/captures/capt-test.scala +++ b/tests/neg-custom-args/captures/capt-test.scala @@ -2,8 +2,8 @@ import annotation.retains import language.experimental.erasedDefinitions class CT[E <: Exception] -type CanThrow[E <: Exception] = CT[E] @retains(caps.*) -type Top = Any @retains(caps.*) +type CanThrow[E <: Exception] = CT[E] @retains(caps.cap) +type Top = Any @retains(caps.cap) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?=> R @@ -14,14 +14,14 @@ def raise[E <: Exception](e: E): Nothing throws E = throw e def foo(x: Boolean): Int throws Fail = if x then 1 else raise(Fail()) -def handle[E <: Exception, R <: Top](op: (CanThrow[E]) => R)(handler: E => R): R = +def handle[E <: Exception, sealed R <: Top](op: (CanThrow[E]) => R)(handler: E => R): R = val x: CanThrow[E] = ??? try op(x) catch case ex: E => handler(ex) def test: Unit = - val b = handle[Exception, () => Nothing] { + val b = handle[Exception, () => Nothing] { // error (x: CanThrow[Exception]) => () => raise(new Exception)(using x) - } { // error + } { (ex: Exception) => ??? } diff --git a/tests/neg-custom-args/captures/capt-wf-typer.scala b/tests/neg-custom-args/captures/capt-wf-typer.scala index 4fc50caed1f7..09b2841d3c77 100644 --- a/tests/neg-custom-args/captures/capt-wf-typer.scala +++ b/tests/neg-custom-args/captures/capt-wf-typer.scala @@ -1,11 +1,11 @@ import annotation.retains class C -type Cap = {*} C +type Cap = C^ object foo def test(c: Cap, other: String): Unit = - val x7: {c} String = ??? // OK + val x7: String^{c} = ??? // OK val x8: String @retains(x7 + x7) = ??? // error val x9: String @retains(foo) = ??? // error () \ No newline at end of file diff --git a/tests/neg-custom-args/captures/capt-wf2.scala b/tests/neg-custom-args/captures/capt-wf2.scala index ddde535fcab0..6c65e0dc77f7 100644 --- a/tests/neg-custom-args/captures/capt-wf2.scala +++ b/tests/neg-custom-args/captures/capt-wf2.scala @@ -1,5 +1,5 @@ @annotation.capability class C def test(c: C) = - var x: {c} Any = ??? - val y: {x} Any = x // error + var x: Any^{c} = ??? + val y: Any^{x} = x // error diff --git a/tests/neg-custom-args/captures/capt1.check b/tests/neg-custom-args/captures/capt1.check index 51ed3e6736cf..85d3b2a7ddcb 100644 --- a/tests/neg-custom-args/captures/capt1.check +++ b/tests/neg-custom-args/captures/capt1.check @@ -1,21 +1,21 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:4:2 ------------------------------------------ 4 | () => if x == null then y else y // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Found: {x} () -> ? C + | Found: () ->{x} C^? | Required: () -> C | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:7:2 ------------------------------------------ 7 | () => if x == null then y else y // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Found: {x} () -> ? C + | Found: () ->{x} C^? | Required: Matchable | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:14:2 ----------------------------------------- 14 | def f(y: Int) = if x == null then y else y // error | ^ - | Found: {x} Int -> Int + | Found: Int ->{x} Int | Required: Matchable 15 | f | @@ -23,7 +23,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:21:2 ----------------------------------------- 21 | class F(y: Int) extends A: // error | ^ - | Found: {x} A + | Found: A^{x} | Required: A 22 | def m() = if x == null then y else y 23 | F(22) @@ -32,7 +32,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:26:2 ----------------------------------------- 26 | new A: // error | ^ - | Found: {x} A + | Found: A^{x} | Required: A 27 | def m() = if x == null then y else y | @@ -40,14 +40,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:32:24 ---------------------------------------- 32 | val z2 = h[() -> Cap](() => x) // error | ^^^^^^^ - | Found: {x} () -> Cap - | Required: () -> box {*} C - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:33:5 ----------------------------------------- -33 | (() => C()) // error - | ^^^^^^^^^ - | Found: ? () -> Cap - | Required: () -> box {*} C + | Found: () ->{x} box C^ + | Required: () -> box C^ | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/capt1.scala b/tests/neg-custom-args/captures/capt1.scala index 59ba874b02f5..651184e8d2c9 100644 --- a/tests/neg-custom-args/captures/capt1.scala +++ b/tests/neg-custom-args/captures/capt1.scala @@ -1,21 +1,21 @@ import annotation.retains class C -def f(x: C @retains(caps.*), y: C): () -> C = +def f(x: C @retains(caps.cap), y: C): () -> C = () => if x == null then y else y // error -def g(x: C @retains(caps.*), y: C): Matchable = +def g(x: C @retains(caps.cap), y: C): Matchable = () => if x == null then y else y // error -def h1(x: C @retains(caps.*), y: C): Any = +def h1(x: C @retains(caps.cap), y: C): Any = def f() = if x == null then y else y () => f() // ok -def h2(x: C @retains(caps.*)): Matchable = +def h2(x: C @retains(caps.cap)): Matchable = def f(y: Int) = if x == null then y else y // error f class A -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) def h3(x: Cap): A = class F(y: Int) extends A: // error @@ -27,10 +27,10 @@ def h4(x: Cap, y: Int): A = def m() = if x == null then y else y def foo() = - val x: C @retains(caps.*) = ??? + val x: C @retains(caps.cap) = ??? def h[X](a: X)(b: X) = a val z2 = h[() -> Cap](() => x) // error - (() => C()) // error + (() => C()) val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // ok val z4 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // what was inferred for z3 diff --git a/tests/neg-custom-args/captures/capt2.scala b/tests/neg-custom-args/captures/capt2.scala index 8b08832dfdb9..cd6f41424a22 100644 --- a/tests/neg-custom-args/captures/capt2.scala +++ b/tests/neg-custom-args/captures/capt2.scala @@ -1,9 +1,9 @@ //import scala.retains class C -type Cap = {*} C +type Cap = C^ -def f1(c: Cap): (() -> {c} C) = () => c // error, but would be OK under capture abbreciations for funciton types -def f2(c: Cap): ({c} () -> C) = () => c // error +def f1(c: Cap): (() -> C^{c}) = () => c // error, but would be OK under capture abbreciations for funciton types +def f2(c: Cap): (() ->{c} C) = () => c // error def h5(x: Cap): () -> C = f1(x) // error diff --git a/tests/neg-custom-args/captures/capt3.scala b/tests/neg-custom-args/captures/capt3.scala index 84164d433029..44a7ffdc6c4a 100644 --- a/tests/neg-custom-args/captures/capt3.scala +++ b/tests/neg-custom-args/captures/capt3.scala @@ -1,6 +1,6 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) def test1() = val x: Cap = C() diff --git a/tests/neg-custom-args/captures/caseclass/Test_2.scala b/tests/neg-custom-args/captures/caseclass/Test_2.scala index 4eac6a260292..bffc0a295bdc 100644 --- a/tests/neg-custom-args/captures/caseclass/Test_2.scala +++ b/tests/neg-custom-args/captures/caseclass/Test_2.scala @@ -2,7 +2,7 @@ def test(c: C) = val pure: () -> Unit = () => () val impure: () => Unit = pure - val mixed: {c} () -> Unit = pure + val mixed: () ->{c} Unit = pure val x = Ref(impure) val _: Ref = x // error val y = x.copy() @@ -16,10 +16,10 @@ def test(c: C) = val yc2: Ref = y2 val x3 = Ref(mixed) - val _: {c} Ref = x3 + val _: Ref^{c} = x3 val y3 = x3.copy() - val yc3: {c} Ref = y3 + val yc3: Ref^{c} = y3 val y4 = y3 match case Ref(xx) => xx - val y4c: {x3} () -> Unit = y4 + val y4c: () ->{x3} Unit = y4 diff --git a/tests/neg-custom-args/captures/cc-depfun.scala b/tests/neg-custom-args/captures/cc-depfun.scala index c4ef303f4712..106a73dd7ce1 100644 --- a/tests/neg-custom-args/captures/cc-depfun.scala +++ b/tests/neg-custom-args/captures/cc-depfun.scala @@ -1,9 +1,9 @@ trait Cap { def use(): Unit } def main() = { - val f: (io: {*} Cap) -> {} () -> Unit = + val f: (io: Cap^) -> () ->{} Unit = io => () => io.use() // error - val g: ({*} Cap) -> {} () -> Unit = + val g: (Cap^) -> () ->{} Unit = io => () => io.use() // error } diff --git a/tests/neg-custom-args/captures/cc-subst-param-exact.scala b/tests/neg-custom-args/captures/cc-subst-param-exact.scala new file mode 100644 index 000000000000..35e4acb95fdc --- /dev/null +++ b/tests/neg-custom-args/captures/cc-subst-param-exact.scala @@ -0,0 +1,33 @@ +import language.experimental.captureChecking +import caps.* + +trait Ref[T] { def set(x: T): T } +def test() = { + + def swap[T](x: Ref[T]^)(y: Ref[T]^{x}): Unit = ??? + def foo[T](x: Ref[T]^): Unit = + swap(x)(x) + + def bar[T](x: () => Ref[T]^)(y: Ref[T]^{x}): Unit = + swap(x())(y) // error + + def baz[T](x: Ref[T]^)(y: Ref[T]^{x}): Unit = + swap(x)(y) +} + +trait IO +type Op = () -> Unit +def test2(c: IO^, f: Op^{c}) = { + def run(io: IO^)(op: Op^{io}): Unit = op() + run(c)(f) + + def bad(getIO: () => IO^, g: Op^{getIO}): Unit = + run(getIO())(g) // error +} + +def test3() = { + def run(io: IO^)(op: Op^{io}): Unit = ??? + val myIO: IO^ = ??? + val myOp: Op^{myIO} = ??? + run(myIO)(myOp) +} diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index 0049f42a5db5..47207f913f1d 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/cc-this.scala:8:15 --------------------------------------- 8 | val y: C = this // error | ^^^^ - | Found: (C.this : {C.this.x} C) + | Found: (C.this : C^{C.this.x}) | Required: C | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index 086524d307a2..e0df7c857c85 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -2,5 +2,5 @@ -- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- 2 |class D extends C: // error |^ - |reference (scala.caps.* : Any) is not included in allowed capture set {} of pure base class class C -3 | this: {*} D => + |reference (scala.caps.cap : Any) is not included in allowed capture set {} of pure base class class C +3 | this: D^ => diff --git a/tests/neg-custom-args/captures/cc-this2/D_2.scala b/tests/neg-custom-args/captures/cc-this2/D_2.scala index 793f3f6353a9..b22e5e456092 100644 --- a/tests/neg-custom-args/captures/cc-this2/D_2.scala +++ b/tests/neg-custom-args/captures/cc-this2/D_2.scala @@ -1,3 +1,3 @@ class D extends C: // error - this: {*} D => + this: D^ => diff --git a/tests/neg-custom-args/captures/cc-this3.check b/tests/neg-custom-args/captures/cc-this3.check index 705cdfbc00d7..d57471c6872e 100644 --- a/tests/neg-custom-args/captures/cc-this3.check +++ b/tests/neg-custom-args/captures/cc-this3.check @@ -1,14 +1,14 @@ -- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this3.scala:8:6 --------------------------------------- 8 |class B extends A: // error | ^ - | illegal inheritance: self type {*} B of class B does not conform to self type {} A + | illegal inheritance: self type B^ of class B does not conform to self type A^{} | of parent class A | | longer explanation available when compiling with `-explain` -- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this3.scala:11:6 -------------------------------------- 11 |class C(val f: () => Int) extends A // error | ^ - | illegal inheritance: self type {C.this.f} C of class C does not conform to self type {} A + | illegal inheritance: self type C^{C.this.f} of class C does not conform to self type A^{} | of parent class A | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc-this3.scala b/tests/neg-custom-args/captures/cc-this3.scala index eeb9606f0c81..25af19dd6c4a 100644 --- a/tests/neg-custom-args/captures/cc-this3.scala +++ b/tests/neg-custom-args/captures/cc-this3.scala @@ -6,13 +6,13 @@ class A: val x: A = this class B extends A: // error - this: {*} B => + this: B^ => class C(val f: () => Int) extends A // error class A2 class B2 extends A2: // ok - this: {*} B2 => + this: B2^ => class C2(val f: () => Int) extends A2 // ok diff --git a/tests/neg-custom-args/captures/cc-this4.check b/tests/neg-custom-args/captures/cc-this4.check index a54ca8d57f4e..52c06f5bbc30 100644 --- a/tests/neg-custom-args/captures/cc-this4.check +++ b/tests/neg-custom-args/captures/cc-this4.check @@ -2,5 +2,5 @@ 1 |open class C: // error | ^ | class C needs an explicitly declared self type since its - | inferred self type {} C + | inferred self type C^{} | is not visible in other compilation units that define subclasses. diff --git a/tests/neg-custom-args/captures/cc-this5.check b/tests/neg-custom-args/captures/cc-this5.check index 8cc1ac9ccc5d..84ac97474b80 100644 --- a/tests/neg-custom-args/captures/cc-this5.check +++ b/tests/neg-custom-args/captures/cc-this5.check @@ -5,14 +5,14 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/cc-this5.scala:21:15 ------------------------------------- 21 | val x: A = this // error | ^^^^ - | Found: (A.this : {c} A) + | Found: (A.this : A^{c}) | Required: A | | longer explanation available when compiling with `-explain` -- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this5.scala:7:9 --------------------------------------- 7 | object D extends C: // error | ^ - | illegal inheritance: self type {c} D.type of object D does not conform to self type {} C + | illegal inheritance: self type D.type^{c} of object D does not conform to self type C^{} | of parent class C | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc1.scala b/tests/neg-custom-args/captures/cc1.scala index 10a9793eabe8..6787b417a3b2 100644 --- a/tests/neg-custom-args/captures/cc1.scala +++ b/tests/neg-custom-args/captures/cc1.scala @@ -1,5 +1,5 @@ import annotation.retains object Test: - def f[A <: Matchable @retains(caps.*)](x: A): Matchable = x // error + def f[A <: Matchable @retains(caps.cap)](x: A): Matchable = x // error diff --git a/tests/neg-custom-args/captures/class-constr.scala b/tests/neg-custom-args/captures/class-constr.scala index eeedf1043f37..9afb6972ccfa 100644 --- a/tests/neg-custom-args/captures/class-constr.scala +++ b/tests/neg-custom-args/captures/class-constr.scala @@ -6,10 +6,10 @@ class C(x: Cap, @constructorOnly y: Cap) def test(a: Cap, b: Cap) = val f = () => C(a, b) - val f_ok: {a, b} () -> {a} C = f - val f_no1: {a, b} () -> C = f // error - val f_no2: {a} () -> {a} C = f // error - val f_no3: {b} () -> {a} C = f // error + val f_ok: () ->{a, b} C^{a} = f + val f_no1: () ->{a, b} C = f // error + val f_no2: () ->{a} C^{a} = f // error + val f_no3: () ->{a} C^{a} = f // error class D: val xz = @@ -19,6 +19,6 @@ def test(a: Cap, b: Cap) = println(b) 2 val d = () => new D() - val d_ok1: {a, b} () -> {a, b} D = d - val d_ok2: () -> {a, b} D = d // because of function shorthand - val d_ok3: {a, b} () -> {b} D = d // error, but should work + val d_ok1: () ->{a, b} D^{a, b} = d + val d_ok2: () -> D^{a, b} = d // because of function shorthand + val d_ok3: () ->{a, b} D^{b} = d // error, but should work diff --git a/tests/neg-custom-args/captures/class-contra.check b/tests/neg-custom-args/captures/class-contra.check index 69a5f0097de8..6d4c89f872ad 100644 --- a/tests/neg-custom-args/captures/class-contra.check +++ b/tests/neg-custom-args/captures/class-contra.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/class-contra.scala:12:39 --------------------------------- -12 | def fun(x: K{val f: {a} T}) = x.setf(a) // error +12 | def fun(x: K{val f: T^{a}}) = x.setf(a) // error | ^ - | Found: (a : {x, y} T) + | Found: (a : T^{x, y}) | Required: T | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/class-contra.scala b/tests/neg-custom-args/captures/class-contra.scala index 270aaf9309a9..210fd4e331f1 100644 --- a/tests/neg-custom-args/captures/class-contra.scala +++ b/tests/neg-custom-args/captures/class-contra.scala @@ -1,13 +1,13 @@ class C -type Cap = {*} C +type Cap = C^ -class K(val f: {*} T): - def setf(x: {f} T) = ??? +class K(val f: T^): + def setf(x: T^{f}) = ??? class T def test(x: Cap, y: Cap) = - val a: {x, y} T = ??? - def fun(x: K{val f: {a} T}) = x.setf(a) // error + val a: T^{x, y} = ??? + def fun(x: K{val f: T^{a}}) = x.setf(a) // error () \ No newline at end of file diff --git a/tests/neg-custom-args/captures/classes.scala b/tests/neg-custom-args/captures/classes.scala index e4c141ea981b..3572e31a6f50 100644 --- a/tests/neg-custom-args/captures/classes.scala +++ b/tests/neg-custom-args/captures/classes.scala @@ -1,12 +1,12 @@ class B -type Cap = {*} B +type Cap = B^ class C0(n: Cap) // was error: class parameter must be a `val`, now OK class C(val n: Cap): - def foo(): {n} B = n + def foo(): B^{n} = n def test(x: Cap, y: Cap) = val c0 = C(x) val c1: C = c0 // error val c2 = if ??? then C(x) else identity(C(y)) - val c3: {x} C { val n: {x, y} B } = c2 // error + val c3: C { val n: B^{x, y} }^{x} = c2 // error diff --git a/tests/neg-custom-args/captures/ctest.scala b/tests/neg-custom-args/captures/ctest.scala index 08bec16d8177..ad10b43a7773 100644 --- a/tests/neg-custom-args/captures/ctest.scala +++ b/tests/neg-custom-args/captures/ctest.scala @@ -1,6 +1,6 @@ class CC -type Cap = {*} CC +type Cap = CC^ def test(cap1: Cap, cap2: Cap) = - var b: List[String => String] = Nil // was error, now OK - val bc = b.head // error + var b: List[String => String] = Nil // error + val bc = b.head // was error, now OK diff --git a/tests/neg-custom-args/captures/curried-simplified.check b/tests/neg-custom-args/captures/curried-simplified.check index 5d23a7a4955e..6a792314e4e3 100644 --- a/tests/neg-custom-args/captures/curried-simplified.check +++ b/tests/neg-custom-args/captures/curried-simplified.check @@ -1,42 +1,42 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:7:28 ---------------------------- 7 | def y1: () -> () -> Int = x1 // error | ^^ - | Found: {x} () -> {x} () -> Int + | Found: () ->? () ->{x} Int | Required: () -> () -> Int | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:9:28 ---------------------------- 9 | def y2: () -> () => Int = x2 // error | ^^ - | Found: {x} () -> {*} () -> Int + | Found: () ->{x} () => Int | Required: () -> () => Int | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:11:39 --------------------------- 11 | def y3: Cap -> Protect[Int -> Int] = x3 // error | ^^ - | Found: ? (x$0: Cap) -> {x$0} Int -> Int + | Found: (x$0: Cap) ->? Int ->{x$0} Int | Required: Cap -> Protect[Int -> Int] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:15:33 --------------------------- -15 | def y5: Cap -> {} Int -> Int = x5 // error - | ^^ - | Found: ? Cap -> {x} Int -> Int - | Required: Cap -> {} Int -> Int +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:15:32 --------------------------- +15 | def y5: Cap -> Int ->{} Int = x5 // error + | ^^ + | Found: Cap ->? Int ->{x} Int + | Required: Cap -> Int ->{} Int | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:17:49 --------------------------- -17 | def y6: Cap -> {} Cap -> Protect[Int -> Int] = x6 // error - | ^^ - | Found: ? (x$0: Cap) -> {x$0} (x$0: Cap) -> {x$0, x$0} Int -> Int - | Required: Cap -> {} Cap -> Protect[Int -> Int] +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:17:48 --------------------------- +17 | def y6: Cap -> Cap ->{} Protect[Int -> Int] = x6 // error + | ^^ + | Found: (x$0: Cap) ->? (x$0: Cap) ->{x$0} Int ->{x$0, x$0} Int + | Required: Cap -> Cap ->{} Protect[Int -> Int] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:19:49 --------------------------- -19 | def y7: Cap -> Protect[Cap -> {} Int -> Int] = x7 // error - | ^^ - | Found: ? (x$0: Cap) -> {x$0} (x: Cap) -> {x$0, x} Int -> Int - | Required: Cap -> Protect[Cap -> {} Int -> Int] +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/curried-simplified.scala:19:48 --------------------------- +19 | def y7: Cap -> Protect[Cap -> Int ->{} Int] = x7 // error + | ^^ + | Found: (x$0: Cap) ->? (x: Cap) ->{x$0} Int ->{x$0, x} Int + | Required: Cap -> Protect[Cap -> Int ->{} Int] | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/curried-simplified.scala b/tests/neg-custom-args/captures/curried-simplified.scala index 25b23370d154..988cf7c11c45 100644 --- a/tests/neg-custom-args/captures/curried-simplified.scala +++ b/tests/neg-custom-args/captures/curried-simplified.scala @@ -3,19 +3,19 @@ type Protect[T] = T def test(x: Cap, y: Cap) = - def x1: {x} () -> () -> Int = ??? + def x1: () -> () ->{x} Int = ??? def y1: () -> () -> Int = x1 // error - def x2: {x} () -> () => Int = ??? + def x2: () ->{x} () => Int = ??? def y2: () -> () => Int = x2 // error def x3: Cap -> Int -> Int = ??? def y3: Cap -> Protect[Int -> Int] = x3 // error def x4: Cap -> Protect[Int -> Int] = ??? - def y4: Cap -> {} Int -> Int = x4 // ok - def x5: Cap -> {x} Int -> Int = ??? - def y5: Cap -> {} Int -> Int = x5 // error + def y4: Cap -> Int ->{} Int = x4 // ok + def x5: Cap -> Int ->{x} Int = ??? + def y5: Cap -> Int ->{} Int = x5 // error def x6: Cap -> Cap -> Int -> Int = ??? - def y6: Cap -> {} Cap -> Protect[Int -> Int] = x6 // error + def y6: Cap -> Cap ->{} Protect[Int -> Int] = x6 // error def x7: Cap -> (x: Cap) -> Int -> Int = ??? - def y7: Cap -> Protect[Cap -> {} Int -> Int] = x7 // error + def y7: Cap -> Protect[Cap -> Int ->{} Int] = x7 // error diff --git a/tests/neg-custom-args/captures/emptyref-in-self.scala b/tests/neg-custom-args/captures/emptyref-in-self.scala index 60f782deca6b..8bac47212f5b 100644 --- a/tests/neg-custom-args/captures/emptyref-in-self.scala +++ b/tests/neg-custom-args/captures/emptyref-in-self.scala @@ -1,3 +1,3 @@ -class Zip[A, B](underlying: String, other: {*} String) { - this: {underlying, other} Zip[A, B] => // error +class Zip[A, B](underlying: String, other: String^) { + this: Zip[A, B]^{underlying, other} => // error } diff --git a/tests/neg-custom-args/captures/eta.check b/tests/neg-custom-args/captures/eta.check index ebd63855181b..a77d66382095 100644 --- a/tests/neg-custom-args/captures/eta.check +++ b/tests/neg-custom-args/captures/eta.check @@ -1,14 +1,14 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/eta.scala:4:9 -------------------------------------------- 4 | g // error | ^ - | Found: ? () -> A - | Required: () -> {f} Proc + | Found: () ->? A + | Required: () -> Proc^{f} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/eta.scala:6:14 ------------------------------------------- 6 | bar( () => f ) // error | ^^^^^^^ - | Found: {f} () -> box {f} () -> Unit - | Required: () -> box ? () -> Unit + | Found: () ->{f} box () ->{f} Unit + | Required: () -> box () ->? Unit | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/eta.scala b/tests/neg-custom-args/captures/eta.scala index 3d9d759d2203..5cc0196a04c6 100644 --- a/tests/neg-custom-args/captures/eta.scala +++ b/tests/neg-custom-args/captures/eta.scala @@ -1,7 +1,7 @@ - type Proc = (() -> Unit) - def foo(f: {*} Proc): {} Proc = - def bar[A <: {f} Proc](g: () -> A): () -> {f} Proc = + type Proc = () -> Unit + def foo(f: Proc^): Proc^{} = + def bar[A <: Proc^{f}](g: () -> A): () -> Proc^{f} = g // error - val stowaway: () -> {f} Proc = + val stowaway: () -> Proc^{f} = bar( () => f ) // error () => { stowaway.apply().apply() } \ No newline at end of file diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index aca5d9217d64..8dca91bc8e43 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -1,17 +1,17 @@ -- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- 2 |class Err extends Exception: // error |^ - |reference (scala.caps.* : Any) is not included in allowed capture set {} of pure base class class Throwable -3 | self: {*} Err => + |reference (scala.caps.cap : Any) is not included in allowed capture set {} of pure base class class Throwable +3 | self: Err^ => -- Error: tests/neg-custom-args/captures/exception-definitions.scala:10:6 ---------------------------------------------- -10 |class Err4(c: {*} Any) extends AnyVal // error - |^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - |reference (Err4.this.c : {*} Any) is not included in allowed capture set {} of pure base class class AnyVal +10 |class Err4(c: Any^) extends AnyVal // error + |^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |reference (Err4.this.c : Any^) is not included in allowed capture set {} of pure base class class AnyVal -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ - |(c : {*} Any) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable + |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable -- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- -8 | class Err3(c: {*} Any) extends Exception // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | reference (Err3.this.c : {*} Any) is not included in allowed capture set {} of pure base class class Throwable +8 | class Err3(c: Any^) extends Exception // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | reference (Err3.this.c : Any^) is not included in allowed capture set {} of pure base class class Throwable diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala index 9f3539b7febf..996f64ae4bd1 100644 --- a/tests/neg-custom-args/captures/exception-definitions.scala +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -1,12 +1,12 @@ class Err extends Exception: // error - self: {*} Err => + self: Err^ => -def test(c: {*} Any) = +def test(c: Any^) = class Err2 extends Exception: val x = c // error - class Err3(c: {*} Any) extends Exception // error + class Err3(c: Any^) extends Exception // error -class Err4(c: {*} Any) extends AnyVal // error +class Err4(c: Any^) extends AnyVal // error diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala new file mode 100644 index 000000000000..830563f51de3 --- /dev/null +++ b/tests/neg-custom-args/captures/filevar.scala @@ -0,0 +1,18 @@ +import language.experimental.captureChecking +import compiletime.uninitialized + +class File: + def write(x: String): Unit = ??? + +class Service: + var file: File^ = uninitialized // error + def log = file.write("log") + +def withFile[T](op: (f: File^) => T): T = + op(new File) + +def test = + withFile: f => + val o = Service() + o.file = f + o.log diff --git a/tests/neg-custom-args/captures/heal-tparam-cs.scala b/tests/neg-custom-args/captures/heal-tparam-cs.scala index 3ff34d0a8a42..58d12f8b6ce5 100644 --- a/tests/neg-custom-args/captures/heal-tparam-cs.scala +++ b/tests/neg-custom-args/captures/heal-tparam-cs.scala @@ -2,31 +2,31 @@ import language.experimental.captureChecking trait Cap { def use(): Unit } -def localCap[T](op: (cap: {*} Cap) => T): T = ??? +def localCap[sealed T](op: (cap: Cap^{cap}) => T): T = ??? -def main(io: {*} Cap, net: {*} Cap): Unit = { +def main(io: Cap^{cap}, net: Cap^{cap}): Unit = { val test1 = localCap { cap => // error () => { cap.use() } } - val test2: (cap: {*} Cap) -> {cap} () -> Unit = + val test2: (cap: Cap^{cap}) -> () ->{cap} Unit = localCap { cap => // should work - (cap1: {*} Cap) => () => { cap1.use() } + (cap1: Cap^{cap}) => () => { cap1.use() } } - val test3: (cap: {io} Cap) -> {io} () -> Unit = + val test3: (cap: Cap^{io}) -> () ->{io} Unit = localCap { cap => // should work - (cap1: {io} Cap) => () => { cap1.use() } + (cap1: Cap^{io}) => () => { cap1.use() } } - val test4: (cap: {io} Cap) -> {net} () -> Unit = + val test4: (cap: Cap^{io}) -> () ->{net} Unit = localCap { cap => // error - (cap1: {io} Cap) => () => { cap1.use() } + (cap1: Cap^{io}) => () => { cap1.use() } } - def localCap2[T](op: (cap: {io} Cap) => T): T = ??? + def localCap2[sealed T](op: (cap: Cap^{io}) => T): T = ??? - val test5: {io} () -> Unit = + val test5: () ->{io} Unit = localCap2 { cap => // ok () => { cap.use() } } diff --git a/tests/neg-custom-args/captures/i15049.scala b/tests/neg-custom-args/captures/i15049.scala index 4e32172c025d..d978e0e1ad0f 100644 --- a/tests/neg-custom-args/captures/i15049.scala +++ b/tests/neg-custom-args/captures/i15049.scala @@ -1,10 +1,10 @@ class Session: def request = "Response" class Foo: - private val session: {*} Session = new Session - def withSession[T](f: ({*} Session) => T): T = f(session) + private val session: Session^{cap} = new Session + def withSession[sealed T](f: (Session^{cap}) => T): T = f(session) -def Test = +def Test: Unit = val f = new Foo f.withSession(s => s).request // error - f.withSession[{*} Session](t => t) // error + f.withSession[Session^](t => t) // error diff --git a/tests/neg-custom-args/captures/i15116.check b/tests/neg-custom-args/captures/i15116.check index 7c73a7ff52ff..4b637a7c2e40 100644 --- a/tests/neg-custom-args/captures/i15116.check +++ b/tests/neg-custom-args/captures/i15116.check @@ -2,27 +2,27 @@ 3 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Bar.this.m} Foo{val m: {Bar.this.m} String} + | Foo{val m: String^{Bar.this.m}}^{Bar.this.m} | with non-empty capture set {Bar.this.m}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:5:6 -------------------------------------------------------------- 5 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Baz.this} Foo{val m: {*} String} + | Foo{val m: String^}^{Baz.this} | with non-empty capture set {Baz.this}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:7:6 -------------------------------------------------------------- 7 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Bar1.this.m} Foo{val m: {Bar1.this.m} String} + | Foo{val m: String^{Bar1.this.m}}^{Bar1.this.m} | with non-empty capture set {Bar1.this.m}. | The type needs to be declared explicitly. -- Error: tests/neg-custom-args/captures/i15116.scala:9:6 -------------------------------------------------------------- 9 | val x = Foo(m) // error | ^^^^^^^^^^^^^^ | Non-local value x cannot have an inferred type - | {Baz2.this} Foo{val m: {*} String} + | Foo{val m: String^}^{Baz2.this} | with non-empty capture set {Baz2.this}. | The type needs to be declared explicitly. diff --git a/tests/neg-custom-args/captures/i15116.scala b/tests/neg-custom-args/captures/i15116.scala index 1659f251df3e..c4dc6c88d56c 100644 --- a/tests/neg-custom-args/captures/i15116.scala +++ b/tests/neg-custom-args/captures/i15116.scala @@ -1,9 +1,9 @@ -class Foo(m: {*} String) -class Bar(val m: {*} String): +class Foo(m: String^) +class Bar(val m: String^): val x = Foo(m) // error -trait Baz(val m: {*} String): +trait Baz(val m: String^): val x = Foo(m) // error -class Bar1(m: {*} String): +class Bar1(m: String^): val x = Foo(m) // error -trait Baz2(m: {*} String): +trait Baz2(m: String^): val x = Foo(m) // error diff --git a/tests/neg-custom-args/captures/i15749.scala b/tests/neg-custom-args/captures/i15749.scala deleted file mode 100644 index 00d1811498f7..000000000000 --- a/tests/neg-custom-args/captures/i15749.scala +++ /dev/null @@ -1,15 +0,0 @@ -class Unit -object unit extends Unit - -type Top = {*} Any - -type LazyVal[T] = {*} Unit -> T - -class Foo[T](val x: T) - -// Foo[□ {*} Unit -> T] -type BoxedLazyVal[T] = Foo[LazyVal[T]] - -def force[A](v: BoxedLazyVal[A]): A = - // Γ ⊢ v.x : □ {*} Unit -> A - v.x(unit) // error: (unbox v.x)(unit), where (unbox v.x) should be untypable \ No newline at end of file diff --git a/tests/neg-custom-args/captures/i15749a.scala b/tests/neg-custom-args/captures/i15749a.scala deleted file mode 100644 index 9e439e28e98c..000000000000 --- a/tests/neg-custom-args/captures/i15749a.scala +++ /dev/null @@ -1,21 +0,0 @@ -class Unit -object unit extends Unit - -type Top = {*} Any - -type Wrapper[T] = [X] -> (op: {*} T -> X) -> X - -def test = - - def wrapper[T](x: T): Wrapper[T] = - [X] => (op: {*} T -> X) => op(x) - - def strictMap[A <: Top, B <: Top](mx: Wrapper[A])(f: {*} A -> B): Wrapper[B] = - mx((x: A) => wrapper(f(x))) - - def force[A](thunk: {*} Unit -> A): A = thunk(unit) - - def forceWrapper[A](mx: Wrapper[{*} Unit -> A]): Wrapper[A] = - // Γ ⊢ mx: Wrapper[□ {*} Unit => A] - // `force` should be typed as ∀(□ {*} Unit -> A) A, but it can not - strictMap[{*} Unit -> A, A](mx)(t => force[A](t)) // error diff --git a/tests/neg-custom-args/captures/i15772.check b/tests/neg-custom-args/captures/i15772.check index a587f2d262ed..949f7ca48588 100644 --- a/tests/neg-custom-args/captures/i15772.check +++ b/tests/neg-custom-args/captures/i15772.check @@ -1,28 +1,28 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:20:49 --------------------------------------- -20 | val boxed1 : (({*} C) => Unit) -> Unit = box1(c) // error - | ^^^^^^^ - | Found: {c} ({*} ({c} C{val arg: {*} C}) -> Unit) -> Unit - | Required: (({*} C) => Unit) -> Unit +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:20:46 --------------------------------------- +20 | val boxed1 : ((C^) => Unit) -> Unit = box1(c) // error + | ^^^^^^^ + | Found: (C{val arg: C^}^{c} => Unit) ->{c} Unit + | Required: (C^ => Unit) -> Unit | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:27:38 --------------------------------------- -27 | val boxed2 : Observe[{*} C] = box2(c) // error - | ^^^^^^^ - | Found: {c} ({*} ({c} C{val arg: {*} C}) -> Unit) -> Unit - | Required: Observe[{*} C] +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:27:35 --------------------------------------- +27 | val boxed2 : Observe[C^] = box2(c) // error + | ^^^^^^^ + | Found: (C{val arg: C^}^{c} => Unit) ->{c} Unit + | Required: Observe[C^] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:33:37 --------------------------------------- -33 | val boxed2 : Observe[{*} C] = box2(c) // error - | ^ - | Found: {*} C - | Required: box {*} C{val arg: ? C} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:33:33 --------------------------------------- +33 | val boxed2 : Observe[C]^ = box2(c) // error + | ^^^^^^^ + | Found: (C{val arg: C^}^ => Unit) ->? Unit + | Required: (C => Unit) => Unit | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:44:2 ---------------------------------------- 44 | x: (() -> Unit) // error | ^ - | Found: {x} () -> Unit + | Found: () ->{x} Unit | Required: () -> Unit | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i15772.scala b/tests/neg-custom-args/captures/i15772.scala index d3afdb6c63f1..e4efb6b9ccab 100644 --- a/tests/neg-custom-args/captures/i15772.scala +++ b/tests/neg-custom-args/captures/i15772.scala @@ -1,6 +1,6 @@ type Observe[T] = (T => Unit) -> Unit -def unsafe(cap: {*} C) = cap.bad() +def unsafe(cap: C^) = cap.bad() def box1[T](v: T) : (T => Unit) -> Unit = { (fn: T => Unit) => fn(v) @@ -10,35 +10,35 @@ def box2[T](v: T) : Observe[T] = { (fn: T => Unit) => fn(v) } -class C(val arg: {*} C) { +class C(val arg: C^) { def bad() = println("I've gone bad!") } -def main1(x: {*} C) : () -> Int = +def main1(x: C^) : () -> Int = () => - val c : {x} C = new C(x) - val boxed1 : (({*} C) => Unit) -> Unit = box1(c) // error - boxed1((cap: {*} C) => unsafe(c)) + val c : C^{x} = new C(x) + val boxed1 : ((C^) => Unit) -> Unit = box1(c) // error + boxed1((cap: C^) => unsafe(c)) 0 -def main2(x: {*} C) : () -> Int = +def main2(x: C^) : () -> Int = () => - val c : {x} C = new C(x) - val boxed2 : Observe[{*} C] = box2(c) // error - boxed2((cap: {*} C) => unsafe(c)) + val c : C^{x} = new C(x) + val boxed2 : Observe[C^] = box2(c) // error + boxed2((cap: C^) => unsafe(c)) 0 -def main3(x: {*} C) = - def c : {*} C = new C(x) - val boxed2 : Observe[{*} C] = box2(c) // error - boxed2((cap: {*} C) => unsafe(c)) +def main3(x: C^) = + def c : C^ = new C(x) + val boxed2 : Observe[C]^ = box2(c) // error + boxed2((cap: C^) => unsafe(c)) 0 trait File: def write(s: String): Unit -def main(io: {*} Any) = - val sayHello: (({io} File) => Unit) = (file: {io} File) => file.write("Hello World!\r\n") - val filesList : List[{io} File] = ??? +def main(io: Any^) = + val sayHello: ((File^{io}) => Unit) = (file: File^{io}) => file.write("Hello World!\r\n") + val filesList : List[File]^{io} = ??? val x = () => filesList.foreach(sayHello) x: (() -> Unit) // error diff --git a/tests/neg-custom-args/captures/i15921.scala b/tests/neg-custom-args/captures/i15921.scala index 291673746e33..233ef23991fc 100644 --- a/tests/neg-custom-args/captures/i15921.scala +++ b/tests/neg-custom-args/captures/i15921.scala @@ -1,7 +1,7 @@ trait Stream { def close(): Unit = (); def write(x: Any): Unit = () } object Test { - def usingLogFile[T](op: (c: {*} Stream) => T): T = + def usingLogFile[T](op: (c: Stream^) => T): T = val logFile = new Stream { } val result = op(logFile) logFile.close() diff --git a/tests/neg-custom-args/captures/i15923-cases.scala b/tests/neg-custom-args/captures/i15923-cases.scala deleted file mode 100644 index 5fbb95355a60..000000000000 --- a/tests/neg-custom-args/captures/i15923-cases.scala +++ /dev/null @@ -1,15 +0,0 @@ -trait Cap { def use(): Int } -type Id[X] = [T] -> (op: X => T) -> T -def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) - -def foo(x: Id[{*} Cap]) = { - x(_.use()) // error -} - -def bar(io: {*} Cap, x: Id[{io} Cap]) = { - x(_.use()) -} - -def barAlt(a: {*} Cap, b: {*} Cap, x: Id[{a, b} Cap]) = { - x(_.use()) -} diff --git a/tests/neg-custom-args/captures/i15923.scala b/tests/neg-custom-args/captures/i15923.scala index ac7ee995150e..3994b34f5928 100644 --- a/tests/neg-custom-args/captures/i15923.scala +++ b/tests/neg-custom-args/captures/i15923.scala @@ -3,12 +3,12 @@ type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) def bar() = { - def withCap[X](op: ({*} Cap) => X): X = { - val cap: {*} Cap = new Cap { def use() = { println("cap is used"); 0 } } + def withCap[sealed X](op: (Cap^) => X): X = { + val cap: Cap^ = new Cap { def use() = { println("cap is used"); 0 } } val result = op(cap) result } - val leak = withCap(cap => mkId(cap)) - leak { cap => cap.use() } // error -} + val leak = withCap(cap => mkId(cap)) // error + leak { cap => cap.use() } +} \ No newline at end of file diff --git a/tests/neg-custom-args/captures/i16114.scala b/tests/neg-custom-args/captures/i16114.scala index cc491226f9df..d22c7f02d5fb 100644 --- a/tests/neg-custom-args/captures/i16114.scala +++ b/tests/neg-custom-args/captures/i16114.scala @@ -1,46 +1,46 @@ trait Cap { def use(): Int; def close(): Unit } -def mkCap(): {*} Cap = ??? +def mkCap(): Cap^ = ??? def expect[T](x: T): x.type = x -def withCap[T](op: ({*} Cap) => T): T = { - val cap: {*} Cap = mkCap() +def withCap[T](op: Cap^ => T): T = { + val cap: Cap^ = mkCap() val result = op(cap) cap.close() result } -def main(fs: {*} Cap): Unit = { - def badOp(io: {*} Cap): {} Unit -> Unit = { - val op1: {io} Unit -> Unit = (x: Unit) => // error // limitation - expect[{*} Cap] { +def main(fs: Cap^): Unit = { + def badOp(io: Cap^{cap}): Unit ->{} Unit = { + val op1: Unit ->{io} Unit = (x: Unit) => // error // limitation + expect[Cap^] { io.use() fs } - val op2: {fs} Unit -> Unit = (x: Unit) => // error // limitation - expect[{*} Cap] { + val op2: Unit ->{fs} Unit = (x: Unit) => // error // limitation + expect[Cap^] { fs.use() io } - val op3: {io} Unit -> Unit = (x: Unit) => // ok - expect[{*} Cap] { + val op3: Unit ->{io} Unit = (x: Unit) => // ok + expect[Cap^] { io.use() io } - val op4: {} Unit -> Unit = (x: Unit) => // ok - expect[{*} Cap](io) + val op4: Unit ->{} Unit = (x: Unit) => // ok + expect[Cap^](io) - val op: {} Unit -> Unit = (x: Unit) => // error - expect[{*} Cap] { + val op: Unit -> Unit = (x: Unit) => // error + expect[Cap^] { io.use() io } op } - val leaked: {} Unit -> Unit = withCap(badOp) + val leaked: Unit -> Unit = withCap(badOp) leaked(()) } diff --git a/tests/neg-custom-args/captures/impurefuns.scala b/tests/neg-custom-args/captures/impurefuns.scala new file mode 100644 index 000000000000..d15d9a466307 --- /dev/null +++ b/tests/neg-custom-args/captures/impurefuns.scala @@ -0,0 +1,3 @@ +def f(x: Object^): Any = + val f: Int =>{x} Int = ??? // error // error // error + f diff --git a/tests/neg-custom-args/captures/inner-classes.scala b/tests/neg-custom-args/captures/inner-classes.scala index cf4073b36f81..181b830e4996 100644 --- a/tests/neg-custom-args/captures/inner-classes.scala +++ b/tests/neg-custom-args/captures/inner-classes.scala @@ -5,21 +5,21 @@ object test: def foo(fs: FileSystem) = trait LazyList[+A]: - this: {fs} LazyList[A] => + this: LazyList[A]^{fs} => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? - final class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: // error + final class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: // error def isEmpty = false def head = x - def tail: {this} LazyList[T] = xs() + def tail: LazyList[T]^{this} = xs() end LazyCons new LazyCons(1, () => LazyNil) diff --git a/tests/neg-custom-args/captures/io.scala b/tests/neg-custom-args/captures/io.scala index ae686d6b154e..f481bf357fc8 100644 --- a/tests/neg-custom-args/captures/io.scala +++ b/tests/neg-custom-args/captures/io.scala @@ -3,17 +3,17 @@ sealed trait IO: def puts(msg: Any): Unit = println(msg) def test1 = - val IO : IO @retains(caps.*) = new IO {} + val IO : IO @retains(caps.cap) = new IO {} def foo = {IO; IO.puts("hello") } val x : () -> Unit = () => foo // error: Found: (() -> Unit) retains IO; Required: () -> Unit def test2 = - val IO : IO @retains(caps.*) = new IO {} - def puts(msg: Any, io: IO @retains(caps.*)) = println(msg) + val IO : IO @retains(caps.cap) = new IO {} + def puts(msg: Any, io: IO @retains(caps.cap)) = println(msg) def foo() = puts("hello", IO) val x : () -> Unit = () => foo() // error: Found: (() -> Unit) retains IO; Required: () -> Unit -type Capability[T] = T @retains(caps.*) +type Capability[T] = T @retains(caps.cap) def test3 = val IO : Capability[IO] = new IO {} diff --git a/tests/neg-custom-args/captures/lazylist.check b/tests/neg-custom-args/captures/lazylist.check index 471e2a038450..4b7611fc3fb7 100644 --- a/tests/neg-custom-args/captures/lazylist.check +++ b/tests/neg-custom-args/captures/lazylist.check @@ -1,42 +1,42 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:17:15 ------------------------------------- 17 | def tail = xs() // error | ^^^^ - | Found: {LazyCons.this.xs} lazylists.LazyList[T] + | Found: lazylists.LazyList[T]^{LazyCons.this.xs} | Required: lazylists.LazyList[T] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:35:29 ------------------------------------- 35 | val ref1c: LazyList[Int] = ref1 // error | ^^^^ - | Found: (ref1 : {cap1} lazylists.LazyCons[Int]{val xs: {cap1} () -> {*} lazylists.LazyList[Int]}) - | Required: lazylists.LazyList[Int] + | Found: (ref1 : lazylists.LazyCons[Int]{val xs: () ->{cap1} lazylists.LazyList[Int]^}^{cap1}) + | Required: lazylists.LazyList[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:37:36 ------------------------------------- -37 | val ref2c: {ref1} LazyList[Int] = ref2 // error +37 | val ref2c: LazyList[Int]^{ref1} = ref2 // error | ^^^^ - | Found: (ref2 : {cap2, ref1} lazylists.LazyList[Int]) - | Required: {ref1} lazylists.LazyList[Int] + | Found: (ref2 : lazylists.LazyList[Int]^{cap2, ref1}) + | Required: lazylists.LazyList[Int]^{ref1} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:39:36 ------------------------------------- -39 | val ref3c: {cap2} LazyList[Int] = ref3 // error +39 | val ref3c: LazyList[Int]^{cap2} = ref3 // error | ^^^^ - | Found: (ref3 : {cap2, ref1} lazylists.LazyList[Int]) - | Required: {cap2} lazylists.LazyList[Int] + | Found: (ref3 : lazylists.LazyList[Int]^{cap2, ref1}) + | Required: lazylists.LazyList[Int]^{cap2} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:41:48 ------------------------------------- -41 | val ref4c: {cap1, ref3, cap3} LazyList[Int] = ref4 // error +41 | val ref4c: LazyList[Int]^{cap1, ref3, cap3} = ref4 // error | ^^^^ - | Found: (ref4 : {cap3, cap2, ref1, cap1} lazylists.LazyList[Int]) - | Required: {cap1, ref3, cap3} lazylists.LazyList[Int] + | Found: (ref4 : lazylists.LazyList[Int]^{cap3, cap2, ref1, cap1}) + | Required: lazylists.LazyList[Int]^{cap1, ref3, cap3} | | longer explanation available when compiling with `-explain` -- [E164] Declaration Error: tests/neg-custom-args/captures/lazylist.scala:22:6 ---------------------------------------- -22 | def tail: {*} LazyList[Nothing] = ??? // error overriding +22 | def tail: LazyList[Nothing]^ = ??? // error overriding | ^ | error overriding method tail in class LazyList of type -> lazylists.LazyList[Nothing]; - | method tail of type -> {*} lazylists.LazyList[Nothing] has incompatible type + | method tail of type -> lazylists.LazyList[Nothing]^ has incompatible type | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/lazylist.scala b/tests/neg-custom-args/captures/lazylist.scala index 2674f15a0ee3..e6e4d003f7ae 100644 --- a/tests/neg-custom-args/captures/lazylist.scala +++ b/tests/neg-custom-args/captures/lazylist.scala @@ -1,17 +1,17 @@ package lazylists abstract class LazyList[+T]: - this: ({*} LazyList[T]) => + this: LazyList[T]^ => def isEmpty: Boolean def head: T def tail: LazyList[T] - def map[U](f: T => U): {f, this} LazyList[U] = + def map[U](f: T => U): LazyList[U]^{f, this} = if isEmpty then LazyNil else LazyCons(f(head), () => tail.map(f)) -class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: +class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: def isEmpty = false def head = x def tail = xs() // error @@ -19,13 +19,13 @@ class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: object LazyNil extends LazyList[Nothing]: def isEmpty = true def head = ??? - def tail: {*} LazyList[Nothing] = ??? // error overriding + def tail: LazyList[Nothing]^ = ??? // error overriding -def map[A, B](xs: {*} LazyList[A], f: A => B): {f, xs} LazyList[B] = +def map[A, B](xs: LazyList[A]^, f: A => B): LazyList[B]^{f, xs} = xs.map(f) class CC -type Cap = {*} CC +type Cap = CC^ def test(cap1: Cap, cap2: Cap, cap3: Cap) = def f[T](x: LazyList[T]): LazyList[T] = if cap1 == cap1 then x else LazyNil @@ -34,8 +34,8 @@ def test(cap1: Cap, cap2: Cap, cap3: Cap) = val ref1 = LazyCons(1, () => f(LazyNil)) val ref1c: LazyList[Int] = ref1 // error val ref2 = map(ref1, g) - val ref2c: {ref1} LazyList[Int] = ref2 // error + val ref2c: LazyList[Int]^{ref1} = ref2 // error val ref3 = ref1.map(g) - val ref3c: {cap2} LazyList[Int] = ref3 // error + val ref3c: LazyList[Int]^{cap2} = ref3 // error val ref4 = (if cap1 == cap2 then ref1 else ref2).map(h) - val ref4c: {cap1, ref3, cap3} LazyList[Int] = ref4 // error + val ref4c: LazyList[Int]^{cap1, ref3, cap3} = ref4 // error diff --git a/tests/neg-custom-args/captures/lazylists-exceptions.check b/tests/neg-custom-args/captures/lazylists-exceptions.check index bd6fad047fe9..f58ed265d3be 100644 --- a/tests/neg-custom-args/captures/lazylists-exceptions.check +++ b/tests/neg-custom-args/captures/lazylists-exceptions.check @@ -1,8 +1,9 @@ -- Error: tests/neg-custom-args/captures/lazylists-exceptions.scala:36:2 ----------------------------------------------- 36 | try // error | ^ - | The expression's type {*} LazyList[Int] is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. + | Result of `try` cannot have type LazyList[Int]^ since + | that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. 37 | tabulate(10) { i => 38 | if i > 9 then throw Ex1() 39 | i * i diff --git a/tests/neg-custom-args/captures/lazylists-exceptions.scala b/tests/neg-custom-args/captures/lazylists-exceptions.scala index 6cba934d61e8..6a72facf7285 100644 --- a/tests/neg-custom-args/captures/lazylists-exceptions.scala +++ b/tests/neg-custom-args/captures/lazylists-exceptions.scala @@ -1,31 +1,31 @@ import language.experimental.saferExceptions trait LazyList[+A]: - this: {*} LazyList[A] => + this: LazyList[A]^ => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -final class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: - this: {*} LazyList[T] => +final class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: + this: LazyList[T]^ => def isEmpty = false def head = x - def tail: {this} LazyList[T] = xs() + def tail: LazyList[T]^{this} = xs() end LazyCons extension [A](x: A) - def #:(xs1: => {*} LazyList[A]): {xs1} LazyList[A] = + def #:(xs1: => LazyList[A]^): LazyList[A]^{xs1} = LazyCons(x, () => xs1) -def tabulate[A](n: Int)(gen: Int => A): {gen} LazyList[A] = - def recur(i: Int): {gen} LazyList[A] = +def tabulate[A](n: Int)(gen: Int => A): LazyList[A]^{gen} = + def recur(i: Int): LazyList[A]^{gen} = if i == n then LazyNil else gen(i) #: recur(i + 1) recur(0) diff --git a/tests/neg-custom-args/captures/lazylists1.check b/tests/neg-custom-args/captures/lazylists1.check index f91e2500dc15..127a0563c3c9 100644 --- a/tests/neg-custom-args/captures/lazylists1.check +++ b/tests/neg-custom-args/captures/lazylists1.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists1.scala:25:66 ----------------------------------- -25 | def concat(other: {f} LazyList[A]): {this, f} LazyList[A] = ??? : ({xs, f} LazyList[A]) // error +25 | def concat(other: LazyList[A]^{f}): LazyList[A]^{this, f} = ??? : (LazyList[A]^{xs, f}) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Found: {xs, f} LazyList[A] - | Required: {Mapped.this, f} LazyList[A] + | Found: LazyList[A]^{xs, f} + | Required: LazyList[A]^{Mapped.this, f} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/lazylists1.scala b/tests/neg-custom-args/captures/lazylists1.scala index c6475223b783..99472c13ebec 100644 --- a/tests/neg-custom-args/captures/lazylists1.scala +++ b/tests/neg-custom-args/captures/lazylists1.scala @@ -1,27 +1,27 @@ class CC -type Cap = {*} CC +type Cap = CC^{cap} trait LazyList[+A]: - this: ({*} LazyList[A]) => + this: LazyList[A]^{cap} => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {xs, f} LazyList[B] = +extension [A](xs: LazyList[A]^{cap}) + def map[B](f: A => B): LazyList[B]^{xs, f} = final class Mapped extends LazyList[B]: - this: ({xs, f} Mapped) => + this: (Mapped^{xs, f}) => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) // OK - def drop(n: Int): {this} LazyList[B] = ??? : ({xs, f} LazyList[B]) // OK - def concat(other: {f} LazyList[A]): {this, f} LazyList[A] = ??? : ({xs, f} LazyList[A]) // error + def tail: LazyList[B]^{this} = xs.tail.map(f) // OK + def drop(n: Int): LazyList[B]^{this} = ??? : (LazyList[B]^{xs, f}) // OK + def concat(other: LazyList[A]^{f}): LazyList[A]^{this, f} = ??? : (LazyList[A]^{xs, f}) // error new Mapped diff --git a/tests/neg-custom-args/captures/lazylists2.check b/tests/neg-custom-args/captures/lazylists2.check index 812170aabdfe..72efbc08f8e2 100644 --- a/tests/neg-custom-args/captures/lazylists2.check +++ b/tests/neg-custom-args/captures/lazylists2.check @@ -1,24 +1,24 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists2.scala:18:4 ------------------------------------ 18 | final class Mapped extends LazyList[B]: // error | ^ - | Found: {f, xs} LazyList[? B] - | Required: {f} LazyList[B] -19 | this: ({xs, f} Mapped) => + | Found: LazyList[B^?]^{f, xs} + | Required: LazyList[B]^{f} +19 | this: (Mapped^{xs, f}) => 20 | def isEmpty = false 21 | def head: B = f(xs.head) -22 | def tail: {this} LazyList[B] = xs.tail.map(f) +22 | def tail: LazyList[B]^{this} = xs.tail.map(f) 23 | new Mapped | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists2.scala:27:4 ------------------------------------ 27 | final class Mapped extends LazyList[B]: // error | ^ - | Found: {f, xs} LazyList[? B] - | Required: {xs} LazyList[B] -28 | this: ({xs, f} Mapped) => + | Found: LazyList[B^?]^{f, xs} + | Required: LazyList[B]^{xs} +28 | this: Mapped^{xs, f} => 29 | def isEmpty = false 30 | def head: B = f(xs.head) -31 | def tail: {this} LazyList[B] = xs.tail.map(f) +31 | def tail: LazyList[B]^{this} = xs.tail.map(f) 32 | new Mapped | | longer explanation available when compiling with `-explain` @@ -26,19 +26,19 @@ 40 | def head: B = f(xs.head) // error | ^ |(f : A => B) cannot be referenced here; it is not included in the allowed capture set {xs} of the self type of class Mapped --- Error: tests/neg-custom-args/captures/lazylists2.scala:41:49 -------------------------------------------------------- -41 | def tail: {this} LazyList[B] = xs.tail.map(f) // error - | ^ +-- Error: tests/neg-custom-args/captures/lazylists2.scala:41:48 -------------------------------------------------------- +41 | def tail: LazyList[B]^{this}= xs.tail.map(f) // error + | ^ |(f : A => B) cannot be referenced here; it is not included in the allowed capture set {xs} of the self type of class Mapped -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylists2.scala:45:4 ------------------------------------ 45 | final class Mapped extends LazyList[B]: // error | ^ - | Found: {f, xs} LazyList[? B] - | Required: {xs} LazyList[B] -46 | this: ({xs, f} Mapped) => + | Found: LazyList[B^?]^{f, xs} + | Required: LazyList[B]^{xs} +46 | this: (Mapped^{xs, f}) => 47 | def isEmpty = false 48 | def head: B = f(xs.head) -49 | def tail: {xs, f} LazyList[B] = xs.tail.map(f) +49 | def tail: LazyList[B]^{xs, f} = xs.tail.map(f) 50 | new Mapped | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/lazylists2.scala b/tests/neg-custom-args/captures/lazylists2.scala index 574fb5a1a488..f6c1cf95a8ed 100644 --- a/tests/neg-custom-args/captures/lazylists2.scala +++ b/tests/neg-custom-args/captures/lazylists2.scala @@ -1,62 +1,62 @@ class CC -type Cap = {*} CC +type Cap = CC^ trait LazyList[+A]: - this: ({*} LazyList[A]) => + this: LazyList[A]^ => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {f} LazyList[B] = +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{f} = final class Mapped extends LazyList[B]: // error - this: ({xs, f} Mapped) => + this: (Mapped^{xs, f}) => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) + def tail: LazyList[B]^{this} = xs.tail.map(f) new Mapped - def map2[B](f: A => B): {xs} LazyList[B] = + def map2[B](f: A => B): LazyList[B]^{xs} = final class Mapped extends LazyList[B]: // error - this: ({xs, f} Mapped) => + this: Mapped^{xs, f} => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) + def tail: LazyList[B]^{this} = xs.tail.map(f) new Mapped - def map3[B](f: A => B): {xs} LazyList[B] = + def map3[B](f: A => B): LazyList[B]^{xs} = final class Mapped extends LazyList[B]: - this: ({xs} Mapped) => + this: Mapped^{xs} => def isEmpty = false def head: B = f(xs.head) // error - def tail: {this} LazyList[B] = xs.tail.map(f) // error + def tail: LazyList[B]^{this}= xs.tail.map(f) // error new Mapped - def map4[B](f: A => B): {xs} LazyList[B] = + def map4[B](f: A => B): LazyList[B]^{xs} = final class Mapped extends LazyList[B]: // error - this: ({xs, f} Mapped) => + this: (Mapped^{xs, f}) => def isEmpty = false def head: B = f(xs.head) - def tail: {xs, f} LazyList[B] = xs.tail.map(f) + def tail: LazyList[B]^{xs, f} = xs.tail.map(f) new Mapped def map5[B](f: A => B): LazyList[B] = class Mapped extends LazyList[B]: - this: ({xs, f} Mapped) => + this: (Mapped^{xs, f}) => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) + def tail: LazyList[B]^{this} = xs.tail.map(f) class Mapped2 extends Mapped: // error this: Mapped => new Mapped2 diff --git a/tests/neg-custom-args/captures/lazyref.check b/tests/neg-custom-args/captures/lazyref.check index 7471f8f4f686..8c91ec13b5d8 100644 --- a/tests/neg-custom-args/captures/lazyref.check +++ b/tests/neg-custom-args/captures/lazyref.check @@ -1,28 +1,28 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:19:28 -------------------------------------- 19 | val ref1c: LazyRef[Int] = ref1 // error | ^^^^ - | Found: (ref1 : {cap1} LazyRef[Int]{val elem: {cap1} () -> Int}) + | Found: (ref1 : LazyRef[Int]{val elem: () ->{cap1} Int}^{cap1}) | Required: LazyRef[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:21:35 -------------------------------------- -21 | val ref2c: {cap2} LazyRef[Int] = ref2 // error +21 | val ref2c: LazyRef[Int]^{cap2} = ref2 // error | ^^^^ - | Found: (ref2 : {cap2, ref1} LazyRef[Int]{val elem: {*} () -> Int}) - | Required: {cap2} LazyRef[Int] + | Found: (ref2 : LazyRef[Int]{val elem: () => Int}^{cap2, ref1}) + | Required: LazyRef[Int]^{cap2} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:23:35 -------------------------------------- -23 | val ref3c: {ref1} LazyRef[Int] = ref3 // error +23 | val ref3c: LazyRef[Int]^{ref1} = ref3 // error | ^^^^ - | Found: (ref3 : {cap2, ref1} LazyRef[Int]{val elem: {*} () -> Int}) - | Required: {ref1} LazyRef[Int] + | Found: (ref3 : LazyRef[Int]{val elem: () => Int}^{cap2, ref1}) + | Required: LazyRef[Int]^{ref1} | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazyref.scala:25:35 -------------------------------------- -25 | val ref4c: {cap1} LazyRef[Int] = ref4 // error +25 | val ref4c: LazyRef[Int]^{cap1} = ref4 // error | ^^^^ - | Found: (ref4 : {cap2, cap1} LazyRef[Int]{val elem: {*} () -> Int}) - | Required: {cap1} LazyRef[Int] + | Found: (ref4 : LazyRef[Int]{val elem: () => Int}^{cap2, cap1}) + | Required: LazyRef[Int]^{cap1} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/lazyref.scala b/tests/neg-custom-args/captures/lazyref.scala index 8395e5cb42cd..99aa10d5d2b2 100644 --- a/tests/neg-custom-args/captures/lazyref.scala +++ b/tests/neg-custom-args/captures/lazyref.scala @@ -1,15 +1,15 @@ class CC -type Cap = {*} CC +type Cap = CC^ class LazyRef[T](val elem: () => T): val get: () => T = elem - def map[U](f: T => U): {f, this} LazyRef[U] = + def map[U](f: T => U): LazyRef[U]^{f, this} = new LazyRef(() => f(elem())) -def map[A, B](ref: {*} LazyRef[A], f: A => B): {f, ref} LazyRef[B] = +def map[A, B](ref: LazyRef[A]^, f: A => B): LazyRef[B]^{f, ref} = new LazyRef(() => f(ref.elem())) -def mapc[A, B]: (ref: {*} LazyRef[A], f: A => B) -> {f, ref} LazyRef[B] = +def mapc[A, B]: (ref: LazyRef[A]^, f: A => B) -> LazyRef[B]^{f, ref} = (ref1, f1) => map[A, B](ref1, f1) def test(cap1: Cap, cap2: Cap) = @@ -18,8 +18,8 @@ def test(cap1: Cap, cap2: Cap) = val ref1 = LazyRef(() => f(0)) val ref1c: LazyRef[Int] = ref1 // error val ref2 = map(ref1, g) - val ref2c: {cap2} LazyRef[Int] = ref2 // error + val ref2c: LazyRef[Int]^{cap2} = ref2 // error val ref3 = ref1.map(g) - val ref3c: {ref1} LazyRef[Int] = ref3 // error + val ref3c: LazyRef[Int]^{ref1} = ref3 // error val ref4 = (if cap1 == cap2 then ref1 else ref2).map(g) - val ref4c: {cap1} LazyRef[Int] = ref4 // error + val ref4c: LazyRef[Int]^{cap1} = ref4 // error diff --git a/tests/neg-custom-args/captures/nestedclass.check b/tests/neg-custom-args/captures/nestedclass.check index cb4421ece0ec..2987318caf4f 100644 --- a/tests/neg-custom-args/captures/nestedclass.check +++ b/tests/neg-custom-args/captures/nestedclass.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/nestedclass.scala:15:15 ---------------------------------- 15 | val xsc: C = xs // error | ^^ - | Found: (xs : {cap1} C) + | Found: (xs : C^{cap1}) | Required: C | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/nestedclass.scala b/tests/neg-custom-args/captures/nestedclass.scala index 38adf7998868..0581f9ce9b2d 100644 --- a/tests/neg-custom-args/captures/nestedclass.scala +++ b/tests/neg-custom-args/captures/nestedclass.scala @@ -1,5 +1,5 @@ class CC -type Cap = {*} CC +type Cap = CC^ abstract class C: def head: String diff --git a/tests/neg-custom-args/captures/override-adapt-box-selftype.scala b/tests/neg-custom-args/captures/override-adapt-box-selftype.scala index a4dc92429192..f44add78e246 100644 --- a/tests/neg-custom-args/captures/override-adapt-box-selftype.scala +++ b/tests/neg-custom-args/captures/override-adapt-box-selftype.scala @@ -4,45 +4,45 @@ class IO class C object Test1 { - abstract class A[X] { this: {} A[X] => + abstract class A[X] { this: A[X] => def foo(x: X): X } - def test(io: {*} IO) = { - class B extends A[{io} C] { // X =:= {io} C // error - override def foo(x: {io} C): {io} C = ??? + def test(io: IO^) = { + class B extends A[C^{io}] { // X =:= {io} C // error + override def foo(x: C^{io}): C^{io} = ??? } } } -def Test2(io: {*} IO, fs: {io} IO, ct: {*} IO) = { - abstract class A[X] { this: {io} A[X] => +def Test2(io: IO^{cap}, fs: IO^{io}, ct: IO^) = { + abstract class A[X] { this: A[X]^{io} => def foo(x: X): X } - class B1 extends A[{io} C] { - override def foo(x: {io} C): {io} C = ??? + class B1 extends A[C^{io}] { + override def foo(x: C^{io}): C^{io} = ??? } - class B2 extends A[{ct} C] { // error - override def foo(x: {ct} C): {ct} C = ??? + class B2 extends A[C^{ct}] { // error + override def foo(x: C^{ct}): C^{ct} = ??? } - class B3 extends A[{fs} C] { - override def foo(x: {fs} C): {fs} C = ??? + class B3 extends A[C^{fs}] { + override def foo(x: C^{fs}): C^{fs} = ??? } } -def Test3(io: {*} IO, ct: {*} IO) = { - abstract class A[X] { this: {*} A[X] => +def Test3(io: IO^, ct: IO^) = { + abstract class A[X] { this: A[X]^ => def foo(x: X): X } - class B1 extends A[{io} C] { - override def foo(x: {io} C): {io} C = ??? + class B1 extends A[C^{io}] { + override def foo(x: C^{io}): C^{io} = ??? } - class B2 extends A[{io, ct} C] { - override def foo(x: {io, ct} C): {io, ct} C = ??? + class B2 extends A[C^{io, ct}] { + override def foo(x: C^{io, ct}): C^{io, ct} = ??? } } diff --git a/tests/neg-custom-args/captures/override-adapt-box.scala b/tests/neg-custom-args/captures/override-adapt-box.scala index 64ba8743bf91..70023dfbc941 100644 --- a/tests/neg-custom-args/captures/override-adapt-box.scala +++ b/tests/neg-custom-args/captures/override-adapt-box.scala @@ -1,14 +1,14 @@ import language.experimental.captureChecking -abstract class A[X] { this: ({} A[X]) => +abstract class A[X] { this: A[X]^{} => def foo(x: X): X } class IO class C -def test(io: {*} IO) = { - class B extends A[{io} C] { // X =:= {io} C // error - override def foo(x: {io} C): {io} C = ??? +def test(io: IO^{cap}) = { + class B extends A[C^{io}] { // X =:= {io} C // error + override def foo(x: C^{io}): C^{io} = ??? } } diff --git a/tests/neg-custom-args/captures/override-boxed.scala b/tests/neg-custom-args/captures/override-boxed.scala index 720b50732f61..d66d28d15aaa 100644 --- a/tests/neg-custom-args/captures/override-boxed.scala +++ b/tests/neg-custom-args/captures/override-boxed.scala @@ -1,7 +1,8 @@ + class A -def test(x: {*} Any) = +def test(x: Any^{cap}) = abstract class Getter: - def get(): {x} A - class PolyGetter[T <: {x} A] extends Getter: + def get(): A^{x} + class PolyGetter[T <: A^{x}] extends Getter: override def get(): T = ??? // error diff --git a/tests/neg-custom-args/captures/real-try.check b/tests/neg-custom-args/captures/real-try.check index 9745470f219c..c8df3777bcfa 100644 --- a/tests/neg-custom-args/captures/real-try.check +++ b/tests/neg-custom-args/captures/real-try.check @@ -1,8 +1,15 @@ +-- [E129] Potential Issue Warning: tests/neg-custom-args/captures/real-try.scala:30:4 ---------------------------------- +30 | b.x + | ^^^ + | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-custom-args/captures/real-try.scala:12:2 ----------------------------------------------------------- 12 | try // error | ^ - | The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. + | Result of `try` cannot have type () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. 13 | () => foo(1) 14 | catch 15 | case _: Ex1 => ??? @@ -10,14 +17,20 @@ -- Error: tests/neg-custom-args/captures/real-try.scala:18:2 ----------------------------------------------------------- 18 | try // error | ^ - | The expression's type {*} () -> ? Cell[Unit] is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. + | Result of `try` cannot have type () => Cell[Unit]^? since + | that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. 19 | () => Cell(foo(1)) 20 | catch 21 | case _: Ex1 => ??? 22 | case _: Ex2 => ??? --- Error: tests/neg-custom-args/captures/real-try.scala:30:4 ----------------------------------------------------------- -30 | b.x // error - | ^^^ - | The expression's type box {*} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/real-try.scala:24:10 ---------------------------------------------------------- +24 | val b = try // error + | ^ + | Result of `try` cannot have type Cell[box () => Unit]^? since + | the part box () => Unit of that type captures the root capability `cap`. + | This is often caused by a locally generated exception capability leaking as part of its result. +25 | Cell(() => foo(1))//: Cell[box {ev} () => Unit] <: Cell[box {cap} () => Unit] +26 | catch +27 | case _: Ex1 => ??? +28 | case _: Ex2 => ??? diff --git a/tests/neg-custom-args/captures/real-try.scala b/tests/neg-custom-args/captures/real-try.scala index 94e1eafd9af2..a826fdaa4af7 100644 --- a/tests/neg-custom-args/captures/real-try.scala +++ b/tests/neg-custom-args/captures/real-try.scala @@ -8,7 +8,7 @@ def foo(i: Int): (CanThrow[Ex1], CanThrow[Ex2]) ?-> Unit = class Cell[+T](val x: T) -def test() = +def test(): Unit = try // error () => foo(1) catch @@ -21,10 +21,10 @@ def test() = case _: Ex1 => ??? case _: Ex2 => ??? - val b = try // ok here, but error on use - Cell(() => foo(1))//: Cell[box {ev} () => Unit] <: Cell[box {*} () => Unit] + val b = try // error + Cell(() => foo(1))//: Cell[box {ev} () => Unit] <: Cell[box {cap} () => Unit] catch case _: Ex1 => ??? case _: Ex2 => ??? - b.x // error + b.x diff --git a/tests/neg-custom-args/captures/sealed-leaks.scala b/tests/neg-custom-args/captures/sealed-leaks.scala new file mode 100644 index 000000000000..bf46b52194c1 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-leaks.scala @@ -0,0 +1,20 @@ + +import java.io.* +def Test2 = + + def usingLogFile[sealed T](op: FileOutputStream^ => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result + + val later = usingLogFile { f => () => f.write(0) } // error + val later2 = usingLogFile[(() => Unit) | Null] { f => () => f.write(0) } // error + + var x: (FileOutputStream^) | Null = null // error + def foo(f: FileOutputStream^, g: FileOutputStream^) = + var y = if ??? then f else g // error + + usingLogFile { f => x = f } + + later() \ No newline at end of file diff --git a/tests/neg-custom-args/captures/stack-alloc.scala b/tests/neg-custom-args/captures/stack-alloc.scala index b646c0736f2c..71b544dbe88d 100644 --- a/tests/neg-custom-args/captures/stack-alloc.scala +++ b/tests/neg-custom-args/captures/stack-alloc.scala @@ -5,7 +5,7 @@ class Pooled val stack = mutable.ArrayBuffer[Pooled]() var nextFree = 0 -def withFreshPooled[T](op: ({*} Pooled) => T): T = +def withFreshPooled[sealed T](op: Pooled^ => T): T = if nextFree >= stack.size then stack.append(new Pooled) val pooled = stack(nextFree) nextFree = nextFree + 1 diff --git a/tests/neg-custom-args/captures/try.check b/tests/neg-custom-args/captures/try.check index d4bcc859d256..4af370bfba1a 100644 --- a/tests/neg-custom-args/captures/try.check +++ b/tests/neg-custom-args/captures/try.check @@ -1,17 +1,15 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:23:49 ------------------------------------------ +-- Error: tests/neg-custom-args/captures/try.scala:23:16 --------------------------------------------------------------- 23 | val a = handle[Exception, CanThrow[Exception]] { // error - | ^ - | Found: ? ({*} CT[Exception]) -> CanThrow[Exception] - | Required: {*} CanThrow[Exception] -> box {*} CT[Exception] -24 | (x: CanThrow[Exception]) => x -25 | }{ - | - | longer explanation available when compiling with `-explain` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Sealed type variable R cannot be instantiated to box CT[Exception]^ since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method handle + | leaking as part of its result. -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/try.scala:29:43 ------------------------------------------ 29 | val b = handle[Exception, () -> Nothing] { // error | ^ - | Found: ? (x: {*} CT[Exception]) -> {x} () -> Nothing - | Required: {*} (x$0: CanThrow[Exception]) -> () -> Nothing + | Found: (x: CT[Exception]^) ->? () ->{x} Nothing + | Required: (x$0: CanThrow[Exception]) => () -> Nothing 30 | (x: CanThrow[Exception]) => () => raise(new Exception)(using x) 31 | } { | @@ -22,23 +20,18 @@ 49 | () => 50 | raise(new Exception)(using x) 51 | 22 -52 |} { // error +52 |} { // error | ^ - | Found: {x$0} () -> Int + | Found: () ->{x$0} Int | Required: () -> Int 53 | (ex: Exception) => () => 22 54 |} | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/try.scala:40:4 ---------------------------------------------------------------- -35 | val xx = handle { -36 | (x: CanThrow[Exception]) => -37 | () => -38 | raise(new Exception)(using x) -39 | 22 -40 | } { // error - | ^ - | The expression's type box {x$0, *} () -> Int is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. -41 | (ex: Exception) => () => 22 -42 | } +-- Error: tests/neg-custom-args/captures/try.scala:35:11 --------------------------------------------------------------- +35 | val xx = handle { // error + | ^^^^^^ + | Sealed type variable R cannot be instantiated to box () => Int since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method handle + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/try.scala b/tests/neg-custom-args/captures/try.scala index 9489766d41be..3c6f0605d8b9 100644 --- a/tests/neg-custom-args/captures/try.scala +++ b/tests/neg-custom-args/captures/try.scala @@ -2,8 +2,8 @@ import annotation.retains import language.experimental.erasedDefinitions class CT[E <: Exception] -type CanThrow[E <: Exception] = CT[E] @retains(caps.*) -type Top = Any @retains(caps.*) +type CanThrow[E <: Exception] = CT[E] @retains(caps.cap) +type Top = Any @retains(caps.cap) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?=> R @@ -14,7 +14,7 @@ def raise[E <: Exception](e: E): Nothing throws E = throw e def foo(x: Boolean): Int throws Fail = if x then 1 else raise(Fail()) -def handle[E <: Exception, R <: Top](op: CanThrow[E] => R)(handler: E => R): R = +def handle[E <: Exception, sealed R <: Top](op: CanThrow[E] => R)(handler: E => R): R = val x: CanThrow[E] = ??? try op(x) catch case ex: E => handler(ex) @@ -32,12 +32,12 @@ def test = (ex: Exception) => ??? } - val xx = handle { + val xx = handle { // error (x: CanThrow[Exception]) => () => raise(new Exception)(using x) 22 - } { // error + } { (ex: Exception) => () => 22 } val yy = xx :: Nil @@ -49,6 +49,6 @@ val global: () -> Int = handle { () => raise(new Exception)(using x) 22 -} { // error +} { // error (ex: Exception) => () => 22 } diff --git a/tests/neg-custom-args/captures/try3.scala b/tests/neg-custom-args/captures/try3.scala index 8c5bc18bf3be..4c6835353c3f 100644 --- a/tests/neg-custom-args/captures/try3.scala +++ b/tests/neg-custom-args/captures/try3.scala @@ -1,10 +1,10 @@ import java.io.IOException class CT[E] -type CanThrow[E] = {*} CT[E] -type Top = {*} Any +type CanThrow[E] = CT[E]^ +type Top = Any^ -def handle[E <: Exception, T <: Top](op: CanThrow[E] ?=> T)(handler: E => T): T = +def handle[E <: Exception, sealed T <: Top](op: CanThrow[E] ?=> T)(handler: E => T): T = val x: CanThrow[E] = ??? try op(using x) catch case ex: E => handler(ex) @@ -14,12 +14,12 @@ def raise[E <: Exception](ex: E)(using CanThrow[E]): Nothing = @main def Test: Int = def f(a: Boolean) = - handle { + handle { // error if !a then raise(IOException()) (b: Boolean) => if !b then raise(IOException()) 0 - } { // error + } { ex => (b: Boolean) => -1 } val g = f(true) diff --git a/tests/neg-custom-args/captures/unbox.scala b/tests/neg-custom-args/captures/unbox.scala index c615cf1d9176..33702a954068 100644 --- a/tests/neg-custom-args/captures/unbox.scala +++ b/tests/neg-custom-args/captures/unbox.scala @@ -1,4 +1,5 @@ -type Proc = {*} () => Unit +import language.`3.2` +type Proc = () => Unit val xs: List[Proc] = ??? diff --git a/tests/neg-custom-args/captures/usingLogFile-alt.check b/tests/neg-custom-args/captures/usingLogFile-alt.check new file mode 100644 index 000000000000..31e97b7dfda1 --- /dev/null +++ b/tests/neg-custom-args/captures/usingLogFile-alt.check @@ -0,0 +1,7 @@ +-- Error: tests/neg-custom-args/captures/usingLogFile-alt.scala:18:2 --------------------------------------------------- +18 | usingFile( // error + | ^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingFile + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/usingLogFile-alt.scala b/tests/neg-custom-args/captures/usingLogFile-alt.scala new file mode 100644 index 000000000000..6b529ee6f892 --- /dev/null +++ b/tests/neg-custom-args/captures/usingLogFile-alt.scala @@ -0,0 +1,23 @@ +// Reported in issue #17517 + +import language.experimental.captureChecking +import java.io.* + +object Test: + class Logger(f: OutputStream^): + def log(msg: String): Unit = ??? + + def usingFile[sealed T](name: String, op: OutputStream^ => T): T = + val f = new FileOutputStream(name) + val result = op(f) + f.close() + result + + def usingLogger[sealed T](f: OutputStream^)(op: Logger^{f} => T): T = ??? + + usingFile( // error + "foo", + file => { + usingLogger(file)(l => () => l.log("test")) + } + ) diff --git a/tests/neg-custom-args/captures/usingLogFile.check b/tests/neg-custom-args/captures/usingLogFile.check index 05fb385a64f7..d3bc9082202c 100644 --- a/tests/neg-custom-args/captures/usingLogFile.check +++ b/tests/neg-custom-args/captures/usingLogFile.check @@ -1,39 +1,47 @@ --- Error: tests/neg-custom-args/captures/usingLogFile.scala:33:2 ------------------------------------------------------- -33 | later3() // error - | ^^^^^^ - | box {*} () -> Unit cannot be box-converted to a type that can be selected or applied - | since one of their capture sets contains the root capability `*` --- Error: tests/neg-custom-args/captures/usingLogFile.scala:37:9 ------------------------------------------------------- -37 | later4.x() // error - | ^^^^^^^^ - | The expression's type box {*} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:6 ------------------------------------------------------- +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:31:6 ------------------------------------------------------- +31 | var later3: () => Unit = () => () // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Mutable variable later3 cannot have type box () => Unit since + | that type captures the root capability `cap`. + | This restriction serves to prevent local capabilities from escaping the scope where they are defined. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:35:6 ------------------------------------------------------- +35 | var later4: Cell[() => Unit] = Cell(() => ()) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Mutable variable later4 cannot have type Test2.Cell[() => Unit] since + | the part () => Unit of that type captures the root capability `cap`. + | This restriction serves to prevent local capabilities from escaping the scope where they are defined. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:14 ------------------------------------------------------ 23 | val later = usingLogFile { f => () => f.write(0) } // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Non-local value later cannot have an inferred type - | {x$0} () -> Unit - | with non-empty capture set {x$0}. - | The type needs to be declared explicitly. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:29:9 ------------------------------------------------------- -29 | later2.x() // error - | ^^^^^^^^ - | The expression's type box {x$0, *} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. + | ^^^^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingLogFile + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:28:23 ------------------------------------------------------ +28 | private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error + | ^^^^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box Test2.Cell[() => Unit]^? since + | the part () => Unit of that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingLogFile + | leaking as part of its result. -- Error: tests/neg-custom-args/captures/usingLogFile.scala:47:6 ------------------------------------------------------- 47 | val later = usingLogFile { f => () => f.write(0) } // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | Non-local value later cannot have an inferred type - | {x$0} () -> Unit - | with non-empty capture set {x$0}. + | () => Unit + | with non-empty capture set {x$0, cap}. | The type needs to be declared explicitly. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:62:25 ------------------------------------------------------ +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:62:16 ------------------------------------------------------ 62 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | The expression's type box {x$0, *} (x$0: Int) -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. --- Error: tests/neg-custom-args/captures/usingLogFile.scala:71:25 ------------------------------------------------------ -71 | val later = usingFile("logfile", usingLogger(_, l => () => l.log("test"))) // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | The expression's type box {x$0, *} () -> Unit is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. + | ^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box (x$0: Int) => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingFile + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:71:16 ------------------------------------------------------ +71 | val later = usingFile("logfile", // error + | ^^^^^^^^^ + | Sealed type variable T cannot be instantiated to box () => Unit since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method usingFile + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/usingLogFile.scala b/tests/neg-custom-args/captures/usingLogFile.scala index 8b367239050d..e7c23573ca6e 100644 --- a/tests/neg-custom-args/captures/usingLogFile.scala +++ b/tests/neg-custom-args/captures/usingLogFile.scala @@ -3,7 +3,7 @@ import annotation.capability object Test1: - def usingLogFile[T](op: FileOutputStream => T): T = + def usingLogFile[sealed T](op: FileOutputStream => T): T = val logFile = FileOutputStream("log") val result = op(logFile) logFile.close() @@ -14,7 +14,7 @@ object Test1: object Test2: - def usingLogFile[T](op: ({*} FileOutputStream) => T): T = + def usingLogFile[sealed T](op: FileOutputStream^ => T): T = val logFile = FileOutputStream("log") val result = op(logFile) logFile.close() @@ -25,20 +25,20 @@ object Test2: class Cell[+T](val x: T) - private val later2 = usingLogFile { f => Cell(() => f.write(0)) } - later2.x() // error + private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error + later2.x() - var later3: () => Unit = () => () + var later3: () => Unit = () => () // error usingLogFile { f => later3 = () => f.write(0) } - later3() // error + later3() - var later4: Cell[() => Unit] = Cell(() => ()) + var later4: Cell[() => Unit] = Cell(() => ()) // error usingLogFile { f => later4 = Cell(() => f.write(0)) } - later4.x() // error + later4.x() object Test3: - def usingLogFile[T](op: ({*} FileOutputStream) => T) = + def usingLogFile[sealed T](op: FileOutputStream^ => T) = val logFile = FileOutputStream("log") val result = op(logFile) logFile.close() @@ -47,10 +47,10 @@ object Test3: val later = usingLogFile { f => () => f.write(0) } // error object Test4: - class Logger(f: {*} OutputStream): + class Logger(f: OutputStream^): def log(msg: String): Unit = ??? - def usingFile[T](name: String, op: ({*} OutputStream) => T): T = + def usingFile[sealed T](name: String, op: OutputStream^ => T): T = val f = new FileOutputStream(name) val result = op(f) f.close() @@ -63,10 +63,11 @@ object Test4: later(1) - def usingLogger[T](f: {*} OutputStream, op: ({f} Logger) => T): T = + def usingLogger[sealed T](f: OutputStream^, op: Logger^{f} => T): T = val logger = Logger(f) op(logger) def test = - val later = usingFile("logfile", usingLogger(_, l => () => l.log("test"))) // error + val later = usingFile("logfile", // error + usingLogger(_, l => () => l.log("test"))) // ok, since we can widen `l` to `file` instead of to `cap` later() diff --git a/tests/neg-custom-args/captures/vars.check b/tests/neg-custom-args/captures/vars.check index 4b9ab5723ce6..e7055c810bb0 100644 --- a/tests/neg-custom-args/captures/vars.check +++ b/tests/neg-custom-args/captures/vars.check @@ -1,32 +1,26 @@ +-- Error: tests/neg-custom-args/captures/vars.scala:13:6 --------------------------------------------------------------- +13 | var a: String => String = f // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Mutable variable a cannot have type box String => String since + | that type captures the root capability `cap`. + | This restriction serves to prevent local capabilities from escaping the scope where they are defined. +-- Error: tests/neg-custom-args/captures/vars.scala:14:6 --------------------------------------------------------------- +14 | var b: List[String => String] = Nil // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Mutable variable b cannot have type List[String => String] since + | the part String => String of that type captures the root capability `cap`. + | This restriction serves to prevent local capabilities from escaping the scope where they are defined. -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:11:24 ----------------------------------------- 11 | val z2c: () -> Unit = z2 // error | ^^ - | Found: {z2} () -> Unit + | Found: () ->{z2} Unit | Required: () -> Unit | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:15:10 ----------------------------------------- -15 | val u = a // error - | ^ - | Found: (a : box {*} String -> String) - | Required: {*} (x$0: String) -> String - | - | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/vars.scala:16:2 --------------------------------------------------------------- -16 | a("") // error - | ^ - | box {*} String -> String cannot be box-converted to a type that can be selected or applied - | since one of their capture sets contains the root capability `*` --- Error: tests/neg-custom-args/captures/vars.scala:17:4 --------------------------------------------------------------- -17 | b.head // error - | ^^^^^^ - | The expression's type box {*} String -> String is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. --- Error: tests/neg-custom-args/captures/vars.scala:32:8 --------------------------------------------------------------- +-- Error: tests/neg-custom-args/captures/vars.scala:32:2 --------------------------------------------------------------- 32 | local { cap3 => // error - | ^ - | The expression's type box {x$0, *} (x$0: String) -> String is not allowed to capture the root capability `*`. - | This usually means that a capability persists longer than its allowed lifetime. -33 | def g(x: String): String = if cap3 == cap3 then "" else "a" -34 | g -35 | } + | ^^^^^ + | Sealed type variable T cannot be instantiated to box (x$0: String) => String since + | that type captures the root capability `cap`. + | This is often caused by a local capability in the body of method local + | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/vars.scala b/tests/neg-custom-args/captures/vars.scala index 2ad8fec53619..b7761952167e 100644 --- a/tests/neg-custom-args/captures/vars.scala +++ b/tests/neg-custom-args/captures/vars.scala @@ -1,20 +1,20 @@ class CC -type Cap = {*} CC +type Cap = CC^ def test(cap1: Cap, cap2: Cap) = def f(x: String): String = if cap1 == cap1 then "" else "a" var x = f val y = x val z = () => if x("") == "" then "a" else "b" - val zc: {cap1} () -> String = z + val zc: () ->{cap1} String = z val z2 = () => { x = identity } val z2c: () -> Unit = z2 // error - var a: String => String = f // was error, now OK - var b: List[String => String] = Nil // was error, now OK - val u = a // error - a("") // error - b.head // error + var a: String => String = f // error + var b: List[String => String] = Nil // error + val u = a // was error, now ok + a("") // was error, now ok + b.head // was error, now ok def scope = val cap3: Cap = CC() @@ -27,7 +27,7 @@ def test(cap1: Cap, cap2: Cap) = val s = scope val sc: String => String = scope - def local[T](op: Cap -> T): T = op(CC()) + def local[sealed T](op: Cap -> T): T = op(CC()) local { cap3 => // error def g(x: String): String = if cap3 == cap3 then "" else "a" @@ -35,7 +35,7 @@ def test(cap1: Cap, cap2: Cap) = } class Ref: - var elem: {cap1} String -> String = null + var elem: String ->{cap1} String = null val r = Ref() r.elem = f diff --git a/tests/neg-custom-args/deprecation/14034b.scala b/tests/neg-custom-args/deprecation/14034b.scala index d22a945fe10d..07960bba9574 100644 --- a/tests/neg-custom-args/deprecation/14034b.scala +++ b/tests/neg-custom-args/deprecation/14034b.scala @@ -9,6 +9,6 @@ type Foo0 = Exp // error type Foo = Option[Exp] // error type Bar = Option[exp.type] // error type Baz = Exp | Int // error -type Quux = [X] =>> X match // error - case Exp => Int +type Quux = [X] =>> X match + case Exp => Int // error type Quuz[A <: Exp] = Int // error diff --git a/tests/neg-custom-args/deprecation/i11022.check b/tests/neg-custom-args/deprecation/i11022.check new file mode 100644 index 000000000000..464f2827c49e --- /dev/null +++ b/tests/neg-custom-args/deprecation/i11022.check @@ -0,0 +1,20 @@ +-- Error: tests/neg-custom-args/deprecation/i11022.scala:8:7 ----------------------------------------------------------- +8 |val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg-custom-args/deprecation/i11022.scala:8:19 ---------------------------------------------------------- +8 |val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg-custom-args/deprecation/i11022.scala:9:7 ----------------------------------------------------------- +9 |val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg-custom-args/deprecation/i11022.scala:9:23 ---------------------------------------------------------- +9 |val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass +-- Error: tests/neg-custom-args/deprecation/i11022.scala:10:14 --------------------------------------------------------- +10 |val c: Unit = CaseClass(42).magic() // error: deprecated apply method + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass diff --git a/tests/neg-custom-args/deprecation/i11022.scala b/tests/neg-custom-args/deprecation/i11022.scala new file mode 100644 index 000000000000..4608017eeed9 --- /dev/null +++ b/tests/neg-custom-args/deprecation/i11022.scala @@ -0,0 +1,11 @@ +@deprecated("no CaseClass") +case class CaseClass(rgb: Int): + def magic(): Unit = () + +object CaseClass: + def notDeprecated(): Unit = () + +val a: CaseClass = CaseClass(42) // error: deprecated type // error: deprecated apply method +val b: CaseClass = new CaseClass(42) // error: deprecated type // error: deprecated class +val c: Unit = CaseClass(42).magic() // error: deprecated apply method +val d: Unit = CaseClass.notDeprecated() // compiles diff --git a/tests/neg-custom-args/erased/by-name.scala b/tests/neg-custom-args/erased/by-name.scala new file mode 100644 index 000000000000..707cfd96734b --- /dev/null +++ b/tests/neg-custom-args/erased/by-name.scala @@ -0,0 +1,4 @@ +def f(x: => Int, erased y: => Int) = x // error +def g(erased x: => Int, y: => Int) = y // error + +val h: (erased => Int, Int) => Int = (erased x, y) => y // error diff --git a/tests/neg-custom-args/erased/erased-in-tuples.scala b/tests/neg-custom-args/erased/erased-in-tuples.scala new file mode 100644 index 000000000000..11a251c3bd4d --- /dev/null +++ b/tests/neg-custom-args/erased/erased-in-tuples.scala @@ -0,0 +1,16 @@ +@main def Test() = + val x = 5 + val y = 7 + + val t1 = (x, erased y) // error + val t2 = (erased x, y) // error + val t1a = (x: Int, erased y: Int) // error + val t2a = (erased x: Int, y: Int) // error + + val nest = (x, (x, erased y)) // error + + def use(f: (Int, Int) => Any) = f(5, 6) + + use((_, erased _)) // error + + (x, erased y) // error diff --git a/tests/neg-custom-args/erased/lambda-infer.scala b/tests/neg-custom-args/erased/lambda-infer.scala new file mode 100644 index 000000000000..2eebf8186b0d --- /dev/null +++ b/tests/neg-custom-args/erased/lambda-infer.scala @@ -0,0 +1,23 @@ +type F = (Int, erased Int) => Int + +erased class A + +@main def Test() = + val a: F = (x, y) => x + 1 // error: Expected F got (Int, Int) => Int + val b: F = (x, erased y) => x + 1 // ok + val c: F = (_, _) => 5 // error: Expected F got (Int, Int) => Int + val d: F = (_, erased _) => 5 // ok + + def use(f: F) = f(5, 6) + + use { (x, y) => x } // error: Expected F got (Int, Int) => Int + + def singleParam(f: (erased Int) => Int) = f(5) + + singleParam(x => 5) // error: Expected (erased Int) => Int got Int => Int + singleParam((erased x) => 5) // ok + + def erasedClass(f: A => Int) = f(new A) + + erasedClass(_ => 5) // ok since A is implicitly erased + diff --git a/tests/neg-custom-args/erased/multiple-args-consume.scala b/tests/neg-custom-args/erased/multiple-args-consume.scala new file mode 100644 index 000000000000..e4aaacca8969 --- /dev/null +++ b/tests/neg-custom-args/erased/multiple-args-consume.scala @@ -0,0 +1,13 @@ +def foo(erased x: Int, y: Int) = y +def bar(x: Int, erased y: Int) = x + +def consumeFoo(f: (erased x: Int, y: Int) => Int) = f(0, 1) + +val fooF: (erased x: Int, y: Int) => Int = foo +val barF: (x: Int, erased y: Int) => Int = bar + +val a = consumeFoo(foo) // ok +val b = consumeFoo(bar) // error + +val c = consumeFoo(fooF) // ok +val d = consumeFoo(barF) // error diff --git a/tests/neg-custom-args/erased/multiple-args.scala b/tests/neg-custom-args/erased/multiple-args.scala new file mode 100644 index 000000000000..fb9bce8e4573 --- /dev/null +++ b/tests/neg-custom-args/erased/multiple-args.scala @@ -0,0 +1,11 @@ +def foo(x: Int, erased y: Int): Int = x +def bar(erased x: Int, y: Int): Int = y + +val fooF: (x: Int, erased y: Int) => Int = foo + +val fooG: (erased x: Int, y: Int) => Int = foo // error + +val barF: (x: Int, erased y: Int) => Int = bar // error + +val barG: (erased x: Int, y: Int) => Int = bar + diff --git a/tests/neg-custom-args/erased/poly-functions.scala b/tests/neg-custom-args/erased/poly-functions.scala new file mode 100644 index 000000000000..000a2ca49cc9 --- /dev/null +++ b/tests/neg-custom-args/erased/poly-functions.scala @@ -0,0 +1,16 @@ +object Test: + // Poly functions with erased parameters are disallowed as an implementation restriction + + type T1 = [X] => (erased x: X, y: Int) => Int // error + type T2 = [X] => (x: X, erased y: Int) => X // error + + val t1 = [X] => (erased x: X, y: Int) => y // error + val t2 = [X] => (x: X, erased y: Int) => x // error + + // Erased classes should be detected too + erased class A + + type T3 = [X] => (x: A, y: X) => X // error + + val t3 = [X] => (x: A, y: X) => y // error + diff --git a/tests/neg-custom-args/explain/constructor-proxy-shadowing.check b/tests/neg-custom-args/explain/constructor-proxy-shadowing.check new file mode 100644 index 000000000000..db223ba33640 --- /dev/null +++ b/tests/neg-custom-args/explain/constructor-proxy-shadowing.check @@ -0,0 +1,75 @@ +-- [E177] Reference Error: tests/neg-custom-args/explain/constructor-proxy-shadowing.scala:10:12 ----------------------- +10 | val x = A22("") // error: shadowing + | ^^^ + | Reference to constructor proxy for class A22 in class A + | shadows outer reference to method A22 in object Test + | + | The instance needs to be created with an explicit `new`. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | There is an ambiguity in the meaning of the call + | + | A22(...) + | + | It could mean creating an instance of class A22 in class A with + | + | new A22(...) + | + | Or it could mean calling method A22 in object Test as in + | + | A22(...) + | + | To disambiguate, use an explicit `new` if you mean the former, + | or use a full prefix for A22 if you mean the latter. + -------------------------------------------------------------------------------------------------------------------- +-- [E177] Reference Error: tests/neg-custom-args/explain/constructor-proxy-shadowing.scala:11:12 ----------------------- +11 | val y = A33("") // error: shadowing + | ^^^ + | Reference to constructor proxy for class A33 in class A + | shadows outer reference to object A33 in object Test + | + | The instance needs to be created with an explicit `new`. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | There is an ambiguity in the meaning of the call + | + | A33(...) + | + | It could mean creating an instance of class A33 in class A with + | + | new A33(...) + | + | Or it could mean calling the apply method of object A33 in object Test as in + | + | A33.apply(...) + | + | To disambiguate, use an explicit `new` if you mean the former, + | or use a full prefix for A33 if you mean the latter. + -------------------------------------------------------------------------------------------------------------------- +-- [E177] Reference Error: tests/neg-custom-args/explain/constructor-proxy-shadowing.scala:16:8 ------------------------ +16 |val x = Seq(3) // error: shadowing + | ^^^ + | Reference to constructor proxy for class Seq + | shadows outer reference to getter Seq in package scala + | + | The instance needs to be created with an explicit `new`. + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | There is an ambiguity in the meaning of the call + | + | Seq(...) + | + | It could mean creating an instance of class Seq with + | + | new Seq(...) + | + | Or it could mean calling the apply method of getter Seq in package scala as in + | + | Seq.apply(...) + | + | To disambiguate, use an explicit `new` if you mean the former, + | or use a full prefix for Seq if you mean the latter. + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/constructor-proxy-shadowing.scala b/tests/neg-custom-args/explain/constructor-proxy-shadowing.scala new file mode 100644 index 000000000000..c47fc2f4859b --- /dev/null +++ b/tests/neg-custom-args/explain/constructor-proxy-shadowing.scala @@ -0,0 +1,16 @@ + +object Test extends App { + def A22(s: String): String = s + class A33(s: String) + object A33: + def apply(s: String) = ??? + class A { + class A22(s: String) + class A33(s: String) + val x = A22("") // error: shadowing + val y = A33("") // error: shadowing + } +} + +class Seq(n: Int) +val x = Seq(3) // error: shadowing diff --git a/tests/neg-custom-args/explain/hidden-type-errors.check b/tests/neg-custom-args/explain/hidden-type-errors.check new file mode 100644 index 000000000000..551d1d7b16ba --- /dev/null +++ b/tests/neg-custom-args/explain/hidden-type-errors.check @@ -0,0 +1,23 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/explain/hidden-type-errors/Test.scala:6:24 ------------------------ +6 | val x = X.doSomething("XXX") // error + | ^^^^^^^^^^^^^^^^^^^^ + | Found: String + | Required: Int + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | Tree: t12717.A.bar("XXX") + | I tried to show that + | String + | conforms to + | Int + | but the comparison trace ended with `false`: + | + | ==> String <: Int + | ==> String <: Int + | <== String <: Int = false + | <== String <: Int = false + | + | The tests were made under the empty constraint + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/hidden-type-errors/Macro.scala b/tests/neg-custom-args/explain/hidden-type-errors/Macro.scala similarity index 100% rename from tests/neg-custom-args/hidden-type-errors/Macro.scala rename to tests/neg-custom-args/explain/hidden-type-errors/Macro.scala diff --git a/tests/neg-custom-args/hidden-type-errors/Test.scala b/tests/neg-custom-args/explain/hidden-type-errors/Test.scala similarity index 100% rename from tests/neg-custom-args/hidden-type-errors/Test.scala rename to tests/neg-custom-args/explain/hidden-type-errors/Test.scala diff --git a/tests/neg-custom-args/i11637.check b/tests/neg-custom-args/explain/i11637.check similarity index 92% rename from tests/neg-custom-args/i11637.check rename to tests/neg-custom-args/explain/i11637.check index 0664a05f4f86..82424396a43b 100644 --- a/tests/neg-custom-args/i11637.check +++ b/tests/neg-custom-args/explain/i11637.check @@ -1,4 +1,4 @@ --- [E057] Type Mismatch Error: tests/neg-custom-args/i11637.scala:11:33 ------------------------------------------------ +-- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i11637.scala:11:33 ---------------------------------------- 11 | var h = new HKT3_1[FunctorImpl](); // error // error | ^ | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any @@ -26,7 +26,7 @@ | | The tests were made under the empty constraint -------------------------------------------------------------------------------------------------------------------- --- [E057] Type Mismatch Error: tests/neg-custom-args/i11637.scala:11:21 ------------------------------------------------ +-- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i11637.scala:11:21 ---------------------------------------- 11 | var h = new HKT3_1[FunctorImpl](); // error // error | ^ | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any diff --git a/tests/neg-custom-args/i11637.scala b/tests/neg-custom-args/explain/i11637.scala similarity index 100% rename from tests/neg-custom-args/i11637.scala rename to tests/neg-custom-args/explain/i11637.scala diff --git a/tests/neg-custom-args/i15575.check b/tests/neg-custom-args/explain/i15575.check similarity index 87% rename from tests/neg-custom-args/i15575.check rename to tests/neg-custom-args/explain/i15575.check index f69111efeb96..e254e0a5e22e 100644 --- a/tests/neg-custom-args/i15575.check +++ b/tests/neg-custom-args/explain/i15575.check @@ -1,4 +1,4 @@ --- [E057] Type Mismatch Error: tests/neg-custom-args/i15575.scala:3:27 ------------------------------------------------- +-- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i15575.scala:3:27 ----------------------------------------- 3 | def bar[T]: Unit = foo[T & Any] // error | ^ | Type argument T & Any does not conform to lower bound Any @@ -18,7 +18,7 @@ | | The tests were made under the empty constraint --------------------------------------------------------------------------------------------------------------------- --- [E057] Type Mismatch Error: tests/neg-custom-args/i15575.scala:7:14 ------------------------------------------------- +-- [E057] Type Mismatch Error: tests/neg-custom-args/explain/i15575.scala:7:14 ----------------------------------------- 7 | val _ = foo[String] // error | ^ | Type argument String does not conform to lower bound CharSequence diff --git a/tests/neg-custom-args/i15575.scala b/tests/neg-custom-args/explain/i15575.scala similarity index 100% rename from tests/neg-custom-args/i15575.scala rename to tests/neg-custom-args/explain/i15575.scala diff --git a/tests/neg-custom-args/i16601a.check b/tests/neg-custom-args/explain/i16601a.check similarity index 89% rename from tests/neg-custom-args/i16601a.check rename to tests/neg-custom-args/explain/i16601a.check index 604f71993ada..63be0d2cd2b2 100644 --- a/tests/neg-custom-args/i16601a.check +++ b/tests/neg-custom-args/explain/i16601a.check @@ -1,4 +1,4 @@ --- [E042] Type Error: tests/neg-custom-args/i16601a.scala:1:27 --------------------------------------------------------- +-- [E042] Type Error: tests/neg-custom-args/explain/i16601a.scala:1:27 ------------------------------------------------- 1 |@main def Test: Unit = new concurrent.ExecutionContext // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ExecutionContext is a trait; it cannot be instantiated diff --git a/tests/neg-custom-args/i16601a.scala b/tests/neg-custom-args/explain/i16601a.scala similarity index 100% rename from tests/neg-custom-args/i16601a.scala rename to tests/neg-custom-args/explain/i16601a.scala diff --git a/tests/neg-custom-args/explain/i16888.check b/tests/neg-custom-args/explain/i16888.check new file mode 100644 index 000000000000..53103576d158 --- /dev/null +++ b/tests/neg-custom-args/explain/i16888.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg-custom-args/explain/i16888.scala:1:38 -------------------------------------------------- +1 |def test = summon[scala.quoted.Quotes] // error + | ^ + | No given instance of type quoted.Quotes was found for parameter x of method summon in object Predef + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Maybe this method is missing a `(using Quotes)` parameter. + | + | Maybe that splice `$ { ... }` is missing? + | Given instances of `Quotes` are generated from an enclosing splice `$ { ... }` (or `scala.staging.run` call). + | A splice can be thought as a method with the following signature. + | def $[T](body: Quotes ?=> Expr[T]): T + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/i16888.scala b/tests/neg-custom-args/explain/i16888.scala new file mode 100644 index 000000000000..9d3fd0f2f57e --- /dev/null +++ b/tests/neg-custom-args/explain/i16888.scala @@ -0,0 +1 @@ +def test = summon[scala.quoted.Quotes] // error diff --git a/tests/neg-custom-args/explain/labelNotFound.check b/tests/neg-custom-args/explain/labelNotFound.check new file mode 100644 index 000000000000..594a838aeeed --- /dev/null +++ b/tests/neg-custom-args/explain/labelNotFound.check @@ -0,0 +1,10 @@ +-- [E172] Type Error: tests/neg-custom-args/explain/labelNotFound.scala:2:30 ------------------------------------------- +2 | scala.util.boundary.break(1) // error + | ^ + |No given instance of type scala.util.boundary.Label[Int] was found for parameter label of method break in object boundary + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A Label is generated from an enclosing `scala.util.boundary` call. + | Maybe that boundary is missing? + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/explain/labelNotFound.scala b/tests/neg-custom-args/explain/labelNotFound.scala new file mode 100644 index 000000000000..2618600702da --- /dev/null +++ b/tests/neg-custom-args/explain/labelNotFound.scala @@ -0,0 +1,2 @@ +object Test: + scala.util.boundary.break(1) // error diff --git a/tests/neg-custom-args/fatal-warnings/i15503b.scala b/tests/neg-custom-args/fatal-warnings/i15503b.scala index 8a4a055150f9..c8a2d6bc2074 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503b.scala +++ b/tests/neg-custom-args/fatal-warnings/i15503b.scala @@ -2,49 +2,91 @@ val a = 1 // OK +var cs = 3 // OK + val b = // OK + var e3 = 2 // error val e1 = 1 // error def e2 = 2 // error 1 val c = // OK - val e1 = 1 // OK + var e1 = 1 // error not set + def e2 = e1 // OK + val e3 = e2 // OK + e3 + +val g = // OK + var e1 = 1 // OK def e2 = e1 // OK - e2 + val e3 = e2 // OK + e1 = e3 // OK + e3 def d = 1 // OK def e = // OK val e1 = 1 // error def e2 = 2 // error + var e3 = 4 // error 1 def f = // OK val f1 = 1 // OK - def f2 = f1 // OK + var f2 = f1 // error not set + def f3 = f2 // OK + f3 + +def h = // OK + val f1 = 1 // OK + var f2 = f1 // OK + def f3 = f2 // OK + f2 = f3 // OK f2 class Foo { + val a = 1 // OK + + var cs = 3 // OK + val b = // OK + var e3 = 2 // error val e1 = 1 // error def e2 = 2 // error 1 val c = // OK - val e1 = 1 // OK + var e1 = 1 // error not set + def e2 = e1 // OK + val e3 = e2 // OK + e3 + + val g = // OK + var e1 = 1 // OK def e2 = e1 // OK - e2 + val e3 = e2 // OK + e1 = e3 // OK + e3 def d = 1 // OK def e = // OK val e1 = 1 // error def e2 = 2 // error + var e3 = 4 // error 1 def f = // OK val f1 = 1 // OK - def f2 = f1 // OK + var f2 = f1 // error not set + def f3 = f2 // OK + f3 + + def h = // OK + val f1 = 1 // OK + var f2 = f1 // OK + def f3 = f2 // OK + f2 = f3 // OK f2 } @@ -68,7 +110,7 @@ package foo.scala2.tests: new a.Inner } def f2 = { - var x = 100 + var x = 100 // error not set x } } @@ -89,7 +131,7 @@ package foo.scala2.tests: } package test.foo.twisted.i16682: - def myPackage = + def myPackage = object IntExtractor: // OK def unapply(s: String): Option[Int] = s.toIntOption diff --git a/tests/neg-custom-args/fatal-warnings/i15503c.scala b/tests/neg-custom-args/fatal-warnings/i15503c.scala index 630846df4e5d..e4e15116bf0d 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503c.scala +++ b/tests/neg-custom-args/fatal-warnings/i15503c.scala @@ -12,12 +12,24 @@ class A: private[this] val f = e // OK private val g = f // OK + private[A] var h = 1 // OK + private[this] var i = h // error not set + private var j = i // error not set + + private[this] var k = 1 // OK + private var l = 2 // OK + private val m = // error + k = l + l = k + l + private def fac(x: Int): Int = // error if x == 0 then 1 else x * fac(x - 1) val x = 1 // OK def y = 2 // OK def z = g // OK + var w = 2 // OK package foo.test.contructors: case class A private (x:Int) // OK @@ -25,7 +37,12 @@ package foo.test.contructors: class C private (private val x: Int) // error class D private (private val x: Int): // OK def y = x - + class E private (private var x: Int): // error not set + def y = x + class F private (private var x: Int): // OK + def y = + x = 3 + x package test.foo.i16682: object myPackage: diff --git a/tests/neg-custom-args/fatal-warnings/i15503i.scala b/tests/neg-custom-args/fatal-warnings/i15503i.scala index fefead7f01a3..768e4d5c3ce0 100644 --- a/tests/neg-custom-args/fatal-warnings/i15503i.scala +++ b/tests/neg-custom-args/fatal-warnings/i15503i.scala @@ -142,8 +142,8 @@ package foo.test.possibleclasses.withvar: private var y: Int // OK )( s: Int, // OK - var t: Int, // OK - private var z: Int // OK + var t: Int, // OK global scope can be set somewhere else + private var z: Int // error not set ) { def a = k + y + s + t + z } @@ -159,11 +159,11 @@ package foo.test.possibleclasses.withvar: class AllUsed( k: Int, // OK - private var y: Int // OK + private var y: Int // error not set )( s: Int, // OK - var t: Int, // OK - private var z: Int // OK + var t: Int, // OK global scope can be set somewhere else + private var z: Int // error not set ) { def a = k + y + s + t + z } @@ -299,7 +299,7 @@ package foo.test.i17175: } { println(i) } - + package foo.test.i17117: package example { object test1 { diff --git a/tests/neg-custom-args/fatal-warnings/i15662.scala b/tests/neg-custom-args/fatal-warnings/i15662.scala index 1d5ff21eb3ba..afe505922603 100644 --- a/tests/neg-custom-args/fatal-warnings/i15662.scala +++ b/tests/neg-custom-args/fatal-warnings/i15662.scala @@ -3,7 +3,6 @@ case class Composite[T](v: T) def m(composite: Composite[_]): Unit = composite match { case Composite[Int](v) => println(v) // error: cannot be checked at runtime - case _ => println("OTHER") } def m2(composite: Composite[_]): Unit = diff --git a/tests/neg-custom-args/fatal-warnings/i16639a.scala b/tests/neg-custom-args/fatal-warnings/i16639a.scala new file mode 100644 index 000000000000..c62910b7f566 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16639a.scala @@ -0,0 +1,207 @@ +// scalac: -Wunused:all +// +class Bippy(a: Int, b: Int) { + private def this(c: Int) = this(c, c) // warn /Dotty:NoWarn + private def boop(x: Int) = x+a+b // error + private def bippy(x: Int): Int = bippy(x) // error TODO: could warn + final private val MILLIS1 = 2000 // error no warn, /Dotty:Warn + final private val MILLIS2: Int = 1000 // error + final private val HI_COMPANION: Int = 500 // no warn, accessed from companion + def hi() = Bippy.HI_INSTANCE +} +object Bippy { + def hi(x: Bippy) = x.HI_COMPANION + private val HI_INSTANCE: Int = 500 // no warn, accessed from instance + private val HEY_INSTANCE: Int = 1000 // error warn + private lazy val BOOL: Boolean = true // error warn +} + +class A(val msg: String) +class B1(msg: String) extends A(msg) +class B2(msg0: String) extends A(msg0) +class B3(msg0: String) extends A("msg") // error /Dotty: unused explicit parameter + +trait Bing + +trait Accessors { + private var v1: Int = 0 // error warn + private var v2: Int = 0 // error warn, never set + private var v3: Int = 0 // warn, never got /Dotty: no warn even if not used + private var v4: Int = 0 // no warn + + private[this] var v5 = 0 // error warn, never set + private[this] var v6 = 0 // warn, never got /Dotty: no warn even if not used + private[this] var v7 = 0 // no warn + + def bippy(): Int = { + v3 = 3 + v4 = 4 + v6 = 6 + v7 = 7 + v2 + v4 + v5 + v7 + } +} + +class StableAccessors { + private var s1: Int = 0 // error warn + private var s2: Int = 0 // error warn, never set + private var s3: Int = 0 // warn, never got /Dotty: no warn even if not usued + private var s4: Int = 0 // no warn + + private[this] var s5 = 0 // error warn, never set + private[this] var s6 = 0 // no warn, limitation /Dotty: Why limitation ? + private[this] var s7 = 0 // no warn + + def bippy(): Int = { + s3 = 3 + s4 = 4 + s6 = 6 + s7 = 7 + s2 + s4 + s5 + s7 + } +} + +trait DefaultArgs { + // warn about default getters for x2 and x3 + private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 // no more warn warn since #17061 + + def boppy() = bippy(5, 100, 200) +} + + +class Outer { + class Inner +} + +trait Locals { + def f0 = { + var x = 1 // error warn + var y = 2 + y = 3 + y + y + } + def f1 = { + val a = new Outer // no warn + val b = new Outer // error warn + new a.Inner + } + def f2 = { + var x = 100 // error warn about it being a var, var not set + x + } +} + +object Types { + private object Dongo { def f = this } // no more warn since #17061 + private class Bar1 // error warn + private class Bar2 // no warn + private type Alias1 = String // error warn + private type Alias2 = String // no warn + def bippo = (new Bar2).toString + + def f(x: Alias2) = x.length + + def l1() = { + object HiObject { def f = this } // no more warn since #17061 + class Hi { // error warn + def f1: Hi = new Hi + def f2(x: Hi) = x + } + class DingDongDoobie // error warn + class Bippy // no warn + type Something = Bippy // no warn + type OtherThing = String // error warn + (new Bippy): Something + } +} + +trait Underwarn { + def f(): Seq[Int] + + def g() = { + val Seq(_, _) = f() // no warn + true + } +} + +class OtherNames { + private def x_=(i: Int): Unit = () // no more warn since #17061 + private def x: Int = 42 // error Dotty triggers unused private member : To investigate + private def y_=(i: Int): Unit = () // // no more warn since #17061 + private def y: Int = 42 + + def f = y +} + + +trait Forever { + def f = { + val t = Option((17, 42)) + for { + ns <- t + (i, j) = ns // no warn + } yield (i + j) + } + def g = { + val t = Option((17, 42)) + for { + ns <- t + (i, j) = ns // no warn + } yield 42 // val emitted only if needed, hence nothing unused + } +} + +trait Ignorance { + private val readResolve = 42 // error ignore /dotty triggers unused private member/ why should we ignore ? +} + +trait CaseyKasem { + def f = 42 match { + case x if x < 25 => "no warn" + case y if toString.nonEmpty => "no warn" + y + case z => "warn" + } +} +trait CaseyAtTheBat { + def f = Option(42) match { + case Some(x) if x < 25 => "no warn" + case Some(y @ _) if toString.nonEmpty => "no warn" + case Some(z) => "warn" + case None => "no warn" + } +} + +class `not even using companion privates` + +object `not even using companion privates` { + private implicit class `for your eyes only`(i: Int) { // no more warn since #17061 + def f = i + } +} + +class `no warn in patmat anonfun isDefinedAt` { + def f(pf: PartialFunction[String, Int]) = pf("42") + def g = f { + case s => s.length // no warn (used to warn case s => true in isDefinedAt) + } +} + +// this is the ordinary case, as AnyRef is an alias of Object +class `nonprivate alias is enclosing` { + class C + type C2 = C + private class D extends C2 // error warn +} + +object `classof something` { + private class intrinsically + def f = classOf[intrinsically].toString() +} + +trait `short comings` { + def f: Int = { + val x = 42 // error /Dotty only triggers in dotty + 17 + } +} + diff --git a/tests/neg-custom-args/fatal-warnings/i16728.check b/tests/neg-custom-args/fatal-warnings/i16728.check new file mode 100644 index 000000000000..a797baf19be0 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16728.check @@ -0,0 +1,4 @@ +-- Error: tests/neg-custom-args/fatal-warnings/i16728.scala:16:11 ------------------------------------------------------ +16 | case tx : C[Int]#X => // error + | ^ + | the type test for C[Int] cannot be checked at runtime because its type arguments can't be determined from A diff --git a/tests/neg-custom-args/fatal-warnings/i16728.scala b/tests/neg-custom-args/fatal-warnings/i16728.scala new file mode 100644 index 000000000000..42c860cc40b2 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i16728.scala @@ -0,0 +1,32 @@ +class A[T] { + class X { + def outer : A.this.type = A.this + } +} + +class B extends A[Int] +class C[T] extends A[T] + +object Test { + def main(args: Array[String]) : Unit = { + val b0 = new B + val b0x : A[?]#X = new b0.X + + def test = b0x match { + case tx : C[Int]#X => // error + val c : C[Int] = tx.outer + c + case _ => + "no match" + } + + def test2 = b0x match { + case tx : C[Int]#X @unchecked => // ok + val c : C[Int] = tx.outer + c + case _ => + "no match" + } + + } +} \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i8711.check b/tests/neg-custom-args/fatal-warnings/i8711.check index 0035af0755d4..491d1678b5ac 100644 --- a/tests/neg-custom-args/fatal-warnings/i8711.check +++ b/tests/neg-custom-args/fatal-warnings/i8711.check @@ -6,3 +6,7 @@ 12 | case x: C => x // error | ^^^^ | Unreachable case +-- [E030] Match case Unreachable Error: tests/neg-custom-args/fatal-warnings/i8711.scala:17:9 -------------------------- +17 | case x: (B | C) => x // error + | ^^^^^^^^^^ + | Unreachable case diff --git a/tests/neg-custom-args/fatal-warnings/i8711.scala b/tests/neg-custom-args/fatal-warnings/i8711.scala index e37f7a8b039f..46fc5a85c90a 100644 --- a/tests/neg-custom-args/fatal-warnings/i8711.scala +++ b/tests/neg-custom-args/fatal-warnings/i8711.scala @@ -12,4 +12,9 @@ object Test { case x: C => x // error case _ => } + + def baz(x: A) = x match { + case x: (B | C) => x // error + case _ => + } } diff --git a/tests/neg-custom-args/fatal-warnings/nonunit-statement.scala b/tests/neg-custom-args/fatal-warnings/nonunit-statement.scala new file mode 100644 index 000000000000..399d132edfae --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/nonunit-statement.scala @@ -0,0 +1,198 @@ +// scalac: -Wnonunit-statement -Wvalue-discard +import collection.ArrayOps +import collection.mutable.{ArrayBuilder, LinkedHashSet, ListBuffer} +import concurrent._ +import scala.reflect.ClassTag + +class C { + import ExecutionContext.Implicits._ + def c = { + def improved = Future(42) + def stale = Future(27) + improved // error + stale + } +} +class D { + def d = { + class E + new E().toString // error + new E().toString * 2 + } +} +class F { + import ExecutionContext.Implicits._ + Future(42) // error +} +// unused template expression uses synthetic method of class +case class K(s: String) { + copy() // error +} +// mutations returning this are ok +class Mutate { + val b = ListBuffer.empty[Int] + b += 42 // nowarn, returns this.type + val xs = List(42) + 27 +: xs // error + + def f(x: Int): this.type = this + def g(): Unit = f(42) // nowarn +} +// some uninteresting expressions may warn for other reasons +class WhoCares { + null // error for purity + ??? // nowarn for impurity +} +// explicit Unit ascription to opt out of warning, even for funky applies +class Absolution { + def f(i: Int): Int = i+1 + import ExecutionContext.Implicits._ + // Future(42): Unit // nowarn { F(42)(ctx) }: Unit where annot is on F(42) + // f(42): Unit // nowarn +} +// warn uni-branched unless user disables it with -Wnonunit-if:false +class Boxed[A](a: A) { + def isEmpty = false + def foreach[U](f: A => U): Unit = + if (!isEmpty) f(a) // error (if) + def forall(f: A => Boolean): Unit = + if (!isEmpty) { + println(".") + f(a) // error (if) + } + def take(p: A => Boolean): Option[A] = { + while (isEmpty || !p(a)) () + Some(a).filter(p) + } +} +class Unibranch[A, B] { + def runWith[U](action: B => U): A => Boolean = { x => + val z = null.asInstanceOf[B] + val fellback = false + if (!fellback) action(z) // error (if) + !fellback + } + def f(i: Int): Int = { + def g = 17 + if (i < 42) { + g // error block statement + println("uh oh") + g // error (if) + } + while (i < 42) { + g // error + println("uh oh") + g // error + } + 42 + } +} +class Dibranch { + def i: Int = ??? + def j: Int = ??? + def f(b: Boolean): Int = { + // if-expr might have an uninteresting LUB + if (b) { // error, at least one branch looks interesting + println("true") + i + } + else { + println("false") + j + } + 42 + } +} +class Next[A] { + val all = ListBuffer.empty[A] + def f(it: Iterator[A], g: A => A): Unit = + while (it.hasNext) + all += g(it.next()) // nowarn +} +class Setting[A] { + def set = LinkedHashSet.empty[A] + def f(a: A): Unit = { + set += a // error because cannot know whether the `set` was supposed to be consumed or assigned + println(set) + } +} +// neither StringBuilder warns, because either append is Java method or returns this.type +// while loop looks like if branch with block1(block2, jump to label), where block2 typed as non-unit +class Strung { + def iterator = Iterator.empty[String] + def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = { + val jsb = b.underlying + if (start.length != 0) jsb.append(start) // error (value-discard) + val it = iterator + if (it.hasNext) { + jsb.append(it.next()) + while (it.hasNext) { + jsb.append(sep) // nowarn (java) + jsb.append(it.next()) // error (value-discard) + } + } + if (end.length != 0) jsb.append(end) // error (value-discard) + b + } + def f(b: java.lang.StringBuilder, it: Iterator[String]): String = { + while (it.hasNext) { + b.append("\n") // nowarn (java) + b.append(it.next()) // error (value-discard) + } + b.toString + } + def g(b: java.lang.StringBuilder, it: Iterator[String]): String = { + while (it.hasNext) it.next() // error + b.toString + } +} +class J { + import java.util.Collections + def xs: java.util.List[Int] = ??? + def f(): Int = { + Collections.checkedList[Int](xs, classOf[Int]) + 42 + } +} +class Variant { + var bs = ListBuffer.empty[Int] + val xs = ListBuffer.empty[Int] + private[this] val ys = ListBuffer.empty[Int] + private[this] var zs = ListBuffer.empty[Int] + def f(i: Int): Unit = { + bs.addOne(i) + xs.addOne(i) + ys.addOne(i) + zs.addOne(i) + println("done") + } +} +final class ArrayOops[A](private val xs: Array[A]) extends AnyVal { + def other: ArrayOps[A] = ??? + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- other) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } +} +class Depends { + def f[A](a: A): a.type = a + def g() = { + val d = new Depends + f(d) + () + } +} diff --git a/tests/neg-custom-args/hidden-type-errors.check b/tests/neg-custom-args/hidden-type-errors.check deleted file mode 100644 index a373e409af2f..000000000000 --- a/tests/neg-custom-args/hidden-type-errors.check +++ /dev/null @@ -1,28 +0,0 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/hidden-type-errors/Test.scala:6:24 -------------------------------- -6 | val x = X.doSomething("XXX") // error - | ^^^^^^^^^^^^^^^^^^^^ - | Found: String - | Required: Int - | This location contains code that was inlined from Test.scala:6 - -Explanation -=========== - -Tree: t12717.A.bar("XXX") - -I tried to show that - String -conforms to - Int -but the comparison trace ended with `false`: - - ==> String <: Int - ==> String <: Int (recurring) - ==> String <: Int (recurring) - <== String <: Int (recurring) = false - <== String <: Int (recurring) = false - <== String <: Int = false - -The tests were made under the empty constraint - -1 error found diff --git a/tests/neg-custom-args/isInstanceOf/enum-approx2.scala b/tests/neg-custom-args/isInstanceOf/enum-approx2.scala index 516b765ec64b..c7c8a6c4e1fb 100644 --- a/tests/neg-custom-args/isInstanceOf/enum-approx2.scala +++ b/tests/neg-custom-args/isInstanceOf/enum-approx2.scala @@ -7,4 +7,4 @@ class Test { case Fun(x: Exp[Int => String]) => ??? // error case _ => } -} \ No newline at end of file +} diff --git a/tests/neg-custom-args/isInstanceOf/i11178.scala b/tests/neg-custom-args/isInstanceOf/i11178.scala index 0d6867eba75f..47e8b4c3acab 100644 --- a/tests/neg-custom-args/isInstanceOf/i11178.scala +++ b/tests/neg-custom-args/isInstanceOf/i11178.scala @@ -36,4 +36,4 @@ object Test3 { case _: Bar[Boolean] => ??? // error case _ => ??? } -} \ No newline at end of file +} diff --git a/tests/neg-custom-args/isInstanceOf/i17435.scala b/tests/neg-custom-args/isInstanceOf/i17435.scala new file mode 100644 index 000000000000..e32149db3137 --- /dev/null +++ b/tests/neg-custom-args/isInstanceOf/i17435.scala @@ -0,0 +1,23 @@ +import scala.collection.mutable + +object Test: + type JsonPrimitive = String | Int | Double | Boolean | None.type + + type Rec[JA[_], JO[_], A] = A match + case JsonPrimitive => JsonPrimitive | JA[Rec[JA, JO, JsonPrimitive]] | JO[Rec[JA, JO, JsonPrimitive]] + case _ => A | JA[Rec[JA, JO, A]] | JO[Rec[JA, JO, A]] + + type Json = Rec[[A] =>> mutable.Buffer[A], [A] =>> mutable.Map[String, A], JsonPrimitive] + + type JsonObject = mutable.Map[String, Json] + + type JsonArray = mutable.Buffer[Json] + + def encode(x: Json): Int = x match + case str: String => 1 + case b: Boolean => 2 + case i: Int => 3 + case d: Double => 4 + case arr: JsonArray => 5 // error + case obj: JsonObject => 6 // error + case _ => 7 diff --git a/tests/neg-custom-args/isInstanceOf/i5826.scala b/tests/neg-custom-args/isInstanceOf/i5826.scala index bff95e740b4f..c63bf3ab4aef 100644 --- a/tests/neg-custom-args/isInstanceOf/i5826.scala +++ b/tests/neg-custom-args/isInstanceOf/i5826.scala @@ -1,6 +1,6 @@ class Foo { - def test[A]: List[Int] | A => Int = { - case ls: List[Int] => ls.head // error + def test[A]: (List[Int] | A) => Int = { + case ls: List[Int] => ls.head // error, A = List[String] case _ => 0 } @@ -17,4 +17,25 @@ class Foo { case ls: A[X] => 4 // error case _ => 0 } + + def test4[A](x: List[Int] | (A => Int)) = x match { + case ls: List[Int] => ls.head // error, List extends Int => T + case _ => 0 + } + + final class C[T] extends A[T] + + def test5[T](x: A[T] | B[T] | Option[T]): Boolean = x.isInstanceOf[C[String]] // error + + def test6[T](x: A[T] | B[T] | Option[T]): Boolean = x.isInstanceOf[C[T]] + + def test7[A](x: Option[Int] | (A => Int)) = x match { + case ls: Option[Int] => ls.head // OK, Option decomposes to Some and None + case _ => 0 + } + + def test8(x: List[Int] | A[String]) = x match { + case ls: List[Int] => ls.head // OK, List decomposes to :: and Nil + case _ => 0 + } } diff --git a/tests/neg-custom-args/isInstanceOf/i8932.scala b/tests/neg-custom-args/isInstanceOf/i8932.scala index f77c28c7b0a7..84d2f7d4990a 100644 --- a/tests/neg-custom-args/isInstanceOf/i8932.scala +++ b/tests/neg-custom-args/isInstanceOf/i8932.scala @@ -9,4 +9,4 @@ def bugReport[A](foo: Foo[A]): Foo[A] = case dummy: Dummy => ??? } -def test = bugReport(new Dummy: Foo[String]) \ No newline at end of file +def test = bugReport(new Dummy: Foo[String]) diff --git a/tests/neg-custom-args/no-experimental/14034.scala b/tests/neg-custom-args/no-experimental/14034.scala index c0b4cc6899db..ab824c43395e 100644 --- a/tests/neg-custom-args/no-experimental/14034.scala +++ b/tests/neg-custom-args/no-experimental/14034.scala @@ -7,6 +7,6 @@ type Foo0 = Exp // error type Foo = Option[Exp] // error type Bar = Option[exp.type] // error type Baz = Exp | Int // error -type Quux = [X] =>> X match // error - case Exp => Int +type Quux = [X] =>> X match + case Exp => Int // error type Quuz[A <: Exp] = Int // error diff --git a/tests/neg-custom-args/no-experimental/i17292.scala b/tests/neg-custom-args/no-experimental/i17292.scala new file mode 100644 index 000000000000..381d252dbea8 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/i17292.scala @@ -0,0 +1,7 @@ +import annotation.experimental + +class Foo { @experimental type Bar = (Int, String) } + +val f: Foo = Foo() + +def g: Tuple.Elem[f.Bar, 0] = ??? // error diff --git a/tests/neg-custom-args/no-experimental/i17292b.scala b/tests/neg-custom-args/no-experimental/i17292b.scala new file mode 100644 index 000000000000..f644dd60ecd5 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/i17292b.scala @@ -0,0 +1,21 @@ +import annotation.experimental +type A[T] = Int +class Foo { + @experimental type Bar = (Int, String) +} + +type Elem1[X <: Tuple, N <: Int] = X match { case x *: xs => N match { case 0 => x } } +type Elem2[X <: Tuple, N <: Int] + +val f: Foo = Foo() + +def bar1: f.Bar = ??? // error +def bar2 = // error + ??? : f.Bar // error + +def g0: Elem1[f.Bar, 0] = ??? // error +def g1(a: Elem1[f.Bar, 0]) = ??? // error +def g2 = + ??? : Elem1[f.Bar, 0] // error + +def h: Elem2[f.Bar, 0] = ??? // error diff --git a/tests/neg-macros/i10127-a.scala b/tests/neg-macros/i10127-a.scala index 3e23cf10bd30..2da4d0924870 100644 --- a/tests/neg-macros/i10127-a.scala +++ b/tests/neg-macros/i10127-a.scala @@ -1,7 +1,7 @@ import scala.quoted.* object T { - def impl[A](using t: Type[A])(using Quotes): Expr[Unit] = { + def impl[A](t: Type[A])(using Quotes): Expr[Unit] = { Expr.summon[t.Underlying] // error '{} } diff --git a/tests/neg-macros/i10127-b.scala b/tests/neg-macros/i10127-b.scala index 2e87e92efa63..13992bf95362 100644 --- a/tests/neg-macros/i10127-b.scala +++ b/tests/neg-macros/i10127-b.scala @@ -4,7 +4,7 @@ case class T(x: Type[_ <: Any]) object T { def impl[A](t: T)(using ctx: Quotes): Expr[Unit] = { - Expr.summon[t.x.Underlying] // error // error + Expr.summon[t.x.Underlying] // error '{} } } \ No newline at end of file diff --git a/tests/neg-macros/i13376a.scala b/tests/neg-macros/i13376a.scala new file mode 100644 index 000000000000..563513eed232 --- /dev/null +++ b/tests/neg-macros/i13376a.scala @@ -0,0 +1,6 @@ +import scala.quoted.* +trait C: + type T + def foo: T +inline def makro(inline x: C): x.T = ${ impl[x.type]('x) } // error // error +def impl[CC <: C](xp: Expr[CC])(using Quotes): Expr[CC#T] = '{ $xp.foo } diff --git a/tests/neg-macros/i13376b.scala b/tests/neg-macros/i13376b.scala new file mode 100644 index 000000000000..8866c24102fd --- /dev/null +++ b/tests/neg-macros/i13376b.scala @@ -0,0 +1,6 @@ +import scala.quoted.* +trait C: + type T + def foo: T +inline def makro(x: C): x.T = ${ impl[x.type]('x) } +def impl[CC <: C](xp: Expr[CC])(using Quotes): Expr[CC#T] = '{ $xp.foo } // error diff --git a/tests/neg-macros/i14123a.scala b/tests/neg-macros/i14123a.scala new file mode 100644 index 000000000000..29978f85102c --- /dev/null +++ b/tests/neg-macros/i14123a.scala @@ -0,0 +1,4 @@ +import scala.quoted._ + +def f(foo: Any => Any)(using Quotes): Expr[Any] = + '{ println(${ foo[Int]('{???}); ??? }) } // error diff --git a/tests/neg-macros/i14123b.scala b/tests/neg-macros/i14123b.scala new file mode 100644 index 000000000000..80cadf518766 --- /dev/null +++ b/tests/neg-macros/i14123b.scala @@ -0,0 +1,23 @@ +package x + +import scala.quoted._ + +object Impl { + + sealed trait UpdateOp[+T] + case class Assignment[T](value:Expr[T]) extends UpdateOp[T] + case class Update(operation:Expr[Unit]) extends UpdateOp[Nothing] + + def genRead[B:Type](newBuilder: Expr[B], + readVal: (Expr[B]) => UpdateOp[B] + )(using Quotes): Expr[B] = + '{ + var x = $newBuilder + ${readVal[B]('x) match { // error: method apply in trait Function1 does not take type parameters + case Assignment(value) => '{ x = $value } // error + case Update(operation) => operation // error + }} + x + } + +} diff --git a/tests/neg-macros/i15917.scala b/tests/neg-macros/i15917.scala new file mode 100644 index 000000000000..3eecc38b21f9 --- /dev/null +++ b/tests/neg-macros/i15917.scala @@ -0,0 +1,6 @@ +import scala.quoted.* + +def m(using Quotes): Expr[Option[_]] = + val s = 3 + type st = s.type + '{ Some(${ Expr(s) }: st) } // error diff --git a/tests/neg-macros/i16355a.scala b/tests/neg-macros/i16355a.scala new file mode 100644 index 000000000000..8870b7777263 --- /dev/null +++ b/tests/neg-macros/i16355a.scala @@ -0,0 +1,35 @@ +//> using scala "3.2.1" +import scala.quoted.Expr +import scala.quoted.Type +import scala.quoted.quotes +import scala.quoted.Quotes + +object macros { + + inline transparent def mkNames[A]: List[Any] = ${ mkNamesImpl[A] } + + def mkNamesImpl[A: Type](using Quotes): Expr[List[Any]] = { + import quotes.reflect._ + + val fieldNames = TypeRepr.of[A].typeSymbol.declaredFields.map(_.name) + + val types = fieldNames + .map { f => + val t1 = ConstantType(StringConstant(f)) + t1.asType match { + case '[t1Type] => TypeRepr.of[(t1Type, "aa")] + } + } + .reduceLeft[TypeRepr](OrType(_, _)) + + types.asType match { + case '[ttt] => + Expr.ofList[ttt]( + fieldNames.map { v => + Expr[(v.type, "aa")](v -> "aa").asExprOf[ttt] // error + } + ) + } + } + +} diff --git a/tests/neg-macros/i16355b.scala b/tests/neg-macros/i16355b.scala new file mode 100644 index 000000000000..763810979ddf --- /dev/null +++ b/tests/neg-macros/i16355b.scala @@ -0,0 +1,4 @@ +import scala.quoted._ +def test(v: String)(using Quotes): Any = + Type.of : Type[v.type] // error + Type.of[v.type] // error diff --git a/tests/neg-macros/i16582.check b/tests/neg-macros/i16582.check new file mode 100644 index 000000000000..c06fe0d9829f --- /dev/null +++ b/tests/neg-macros/i16582.check @@ -0,0 +1,15 @@ + +-- Error: tests/neg-macros/i16582/Test_2.scala:5:27 -------------------------------------------------------------------- +5 | val o2 = ownerDoesNotWork(2) // error + | ^^^^^^^^^^^^^^^^^^^ + | Exception occurred while executing macro expansion. + | dotty.tools.dotc.core.CyclicReference: Recursive value o2 needs type + | + | See full stack trace using -Ydebug + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Macro_1.scala:7 +7 | ${ownerWorksImpl('in)} + | ^^^^^^^^^^^^^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/i16582/Macro_1.scala b/tests/neg-macros/i16582/Macro_1.scala new file mode 100644 index 000000000000..c28c83166607 --- /dev/null +++ b/tests/neg-macros/i16582/Macro_1.scala @@ -0,0 +1,28 @@ +import scala.quoted.* + +inline def ownerWorks(in: Int): Any = + ${ownerWorksImpl('in)} + +transparent inline def ownerDoesNotWork(in: Int): Any = + ${ownerWorksImpl('in)} + +def ownerWorksImpl(in: Expr[Int])(using Quotes): Expr[String] = + import quotes.reflect.* + val position = Position.ofMacroExpansion + val file = position.sourceFile + val owner0 = Symbol.spliceOwner.maybeOwner + val ownerName = owner0.tree match { + case ValDef(name, _, _) => + name + case DefDef(name, _, _, _) => + name + case t => report.errorAndAbort(s"unexpected tree shape: ${t.show}") + } + val path = file.path + val line = position.startLine + val column = position.startColumn + val v = in.valueOrAbort + val out = Expr(s"val $ownerName $v: $file @ ${position.startLine}") + out + + diff --git a/tests/neg-macros/i16582/Test_2.scala b/tests/neg-macros/i16582/Test_2.scala new file mode 100644 index 000000000000..7cfd65febd00 --- /dev/null +++ b/tests/neg-macros/i16582/Test_2.scala @@ -0,0 +1,6 @@ +def test= + val o1 = ownerWorks(1) + println(o1) + + val o2 = ownerDoesNotWork(2) // error + println(o2) diff --git a/tests/neg-macros/i16835.check b/tests/neg-macros/i16835.check new file mode 100644 index 000000000000..fb02f3c7f13f --- /dev/null +++ b/tests/neg-macros/i16835.check @@ -0,0 +1,6 @@ + +-- Error: tests/neg-macros/i16835/Test_2.scala:1:17 -------------------------------------------------------------------- +1 |def test: Unit = foo // error + | ^^^ + | my error + | my second error diff --git a/tests/neg-macros/i16835/Macro_1.scala b/tests/neg-macros/i16835/Macro_1.scala new file mode 100644 index 000000000000..ddee5dbecb4e --- /dev/null +++ b/tests/neg-macros/i16835/Macro_1.scala @@ -0,0 +1,21 @@ +import scala.quoted.* + +class Bar + +inline def foo: Unit = ${ fooExpr } + +def fooExpr(using Quotes): Expr[Unit] = + import quotes.reflect.* + Implicits.search(TypeRepr.of[Bar]) match + case res: ImplicitSearchSuccess => '{} + case failure: ImplicitSearchFailure => + report.errorAndAbort(failure.explanation) + + +inline given bar: Bar = ${ barExpr } + +def barExpr(using Quotes): Expr[Bar] = + import quotes.reflect.* + report.error(s"my error") + report.error(s"my second error") + '{ new Bar } diff --git a/tests/neg-macros/i16835/Test_2.scala b/tests/neg-macros/i16835/Test_2.scala new file mode 100644 index 000000000000..0dc2d39d6c3d --- /dev/null +++ b/tests/neg-macros/i16835/Test_2.scala @@ -0,0 +1 @@ +def test: Unit = foo // error diff --git a/tests/neg-macros/i17103.scala b/tests/neg-macros/i17103.scala new file mode 100644 index 000000000000..bd4b41d8b559 --- /dev/null +++ b/tests/neg-macros/i17103.scala @@ -0,0 +1,16 @@ +import scala.quoted.* + +def test(using Quotes): Expr[Unit] = + '{ + trait C: + def d: Int + val c: C = ??? + ${ + val expr = '{ + val cRef: c.type = ??? + cRef.d // error + () + } + expr + } + } \ No newline at end of file diff --git a/tests/neg-macros/i17351/Macro_1.scala b/tests/neg-macros/i17351/Macro_1.scala new file mode 100644 index 000000000000..b80999e1bce5 --- /dev/null +++ b/tests/neg-macros/i17351/Macro_1.scala @@ -0,0 +1,16 @@ +import scala.quoted.* + +inline def gen: Unit = ${ genImpl } + +def genImpl(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + val valDefSymbol = Symbol.newVal(Symbol.spliceOwner, "bar", TypeRepr.of[Unit], Flags.EmptyFlags, Symbol.spliceOwner) + + val valDef = ValDef(valDefSymbol, Some('{ () }.asTerm)) + + Block( + List(valDef), + '{ () }.asTerm + ).asExprOf[Unit] +} diff --git a/tests/neg-macros/i17351/Test_2.scala b/tests/neg-macros/i17351/Test_2.scala new file mode 100644 index 000000000000..209c23204ad3 --- /dev/null +++ b/tests/neg-macros/i17351/Test_2.scala @@ -0,0 +1 @@ +val foo = gen // error diff --git a/tests/neg-macros/i6436.check b/tests/neg-macros/i6436.check index 43e93b2e64e5..d563abb5424c 100644 --- a/tests/neg-macros/i6436.check +++ b/tests/neg-macros/i6436.check @@ -2,6 +2,8 @@ 5 | case '{ StringContext(${Varargs(parts)}*) } => // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | No given instance of type scala.quoted.Quotes was found + | + | longer explanation available when compiling with `-explain` -- [E006] Not Found Error: tests/neg-macros/i6436.scala:6:34 ----------------------------------------------------------- 6 | val ps: Seq[Expr[String]] = parts // error | ^^^^^ diff --git a/tests/neg-macros/i6762.scala b/tests/neg-macros/i6762.scala index a8df289b26c2..054945e213d6 100644 --- a/tests/neg-macros/i6762.scala +++ b/tests/neg-macros/i6762.scala @@ -2,4 +2,4 @@ import scala.quoted.* type G[X] case class Foo[T](x: T) -def f(word: String)(using Quotes): Expr[Foo[G[String]]] = '{Foo(${Expr(word)})} // error // error +def f(word: String)(using Quotes): Expr[Foo[G[String]]] = '{Foo(${Expr(word)})} // error diff --git a/tests/neg-macros/i6991.check b/tests/neg-macros/i6991.check new file mode 100644 index 000000000000..57d611a09053 --- /dev/null +++ b/tests/neg-macros/i6991.check @@ -0,0 +1,10 @@ +-- [E050] Type Error: tests/neg-macros/i6991.scala:11:14 --------------------------------------------------------------- +11 | case '{($x: Foo)($bar: String)} => '{"Hello World"} // error + | ^^^^^^^ + | expression does not take parameters + | + | longer explanation available when compiling with `-explain` +-- [E008] Not Found Error: tests/neg-macros/i6991.scala:12:23 ---------------------------------------------------------- +12 | case '{($x: Foo).apply($bar: String)} => '{"Hello World"} // error + | ^^^^^^^^^^^^^^^ + | value apply is not a member of macros.Foo diff --git a/tests/neg-macros/i6991.scala b/tests/neg-macros/i6991.scala new file mode 100644 index 000000000000..c6838261ed7a --- /dev/null +++ b/tests/neg-macros/i6991.scala @@ -0,0 +1,16 @@ +import scala.quoted._ + +object macros { + inline def mcr(x: => Any): Any = ${mcrImpl('x)} + + class Foo // { def apply(str: String) = "100" } + class Bar { def apply(str: String) = "100" } + + def mcrImpl(body: Expr[Any])(using ctx: Quotes): Expr[Any] = { + body match { + case '{($x: Foo)($bar: String)} => '{"Hello World"} // error + case '{($x: Foo).apply($bar: String)} => '{"Hello World"} // error + case '{($x: Bar)($bar: String)} => '{"Hello World"} + } + } +} diff --git a/tests/neg-macros/i8887.scala b/tests/neg-macros/i8887.scala new file mode 100644 index 000000000000..944544ba33dc --- /dev/null +++ b/tests/neg-macros/i8887.scala @@ -0,0 +1,5 @@ +import scala.quoted._ + +def expr[X](x: Any)(using Quotes): Expr[Any] = + '{ foo[x.type] } // error +def foo[X]: Any = ??? diff --git a/tests/neg-macros/quote-this-a.scala b/tests/neg-macros/quote-this-a.scala index 11621176526b..9f71aca0a7fb 100644 --- a/tests/neg-macros/quote-this-a.scala +++ b/tests/neg-macros/quote-this-a.scala @@ -4,9 +4,7 @@ class Foo { def f(using Quotes): Unit = '{ def bar[T](x: T): T = x - bar[ - this.type // error - ] { + bar[this.type] { this // error } } diff --git a/tests/neg-scalajs/js-trait-ctor-param.check b/tests/neg-scalajs/js-trait-ctor-param.check new file mode 100644 index 000000000000..bc5296b3c76f --- /dev/null +++ b/tests/neg-scalajs/js-trait-ctor-param.check @@ -0,0 +1,4 @@ +-- Error: tests/neg-scalajs/js-trait-ctor-param.scala:9:34 ------------------------------------------------------------- +9 |trait NonNativeBagHolderTrait(val bag: Bag) extends js.Any // error + | ^^^^^^^^^^^^ + | A non-native JS trait cannot have constructor parameters diff --git a/tests/neg-scalajs/js-trait-ctor-param.scala b/tests/neg-scalajs/js-trait-ctor-param.scala new file mode 100644 index 000000000000..c907b0d9b606 --- /dev/null +++ b/tests/neg-scalajs/js-trait-ctor-param.scala @@ -0,0 +1,9 @@ +import scala.scalajs.js +import scala.scalajs.js.annotation.* + +@js.native +trait Bag extends js.Any { + val str: String +} + +trait NonNativeBagHolderTrait(val bag: Bag) extends js.Any // error diff --git a/tests/neg/17077.scala b/tests/neg/17077.scala new file mode 100644 index 000000000000..26a052a7bf97 --- /dev/null +++ b/tests/neg/17077.scala @@ -0,0 +1,14 @@ +case class IsIntResult() + +object IsInt: + def unapply(x: Int): IsIntResult = IsIntResult() + +@main def test = + val v: String | Int = "Blop" + val res = + v match + case IsInt() => 43 // error: cannot use a product of arity zero as a return type for unapply + // see UnapplyInvalidReturnType in messages.scala + // and https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html#fixed-arity-extractors + case _ => 42 + println(res) diff --git a/tests/neg/6570-1.check b/tests/neg/6570-1.check index fa53e71cbb6b..bdbadd0f752a 100644 --- a/tests/neg/6570-1.check +++ b/tests/neg/6570-1.check @@ -7,7 +7,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce N[Box[Int & String]] - | failed since selector Box[Int & String] + | failed since selector Box[Int & String] | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -23,7 +23,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce M[T] - | failed since selector T + | failed since selector T | does not uniquely determine parameter x in | case Cov[x] => N[x] | The computed bounds for the parameter are: diff --git a/tests/neg/6571.check b/tests/neg/6571.check index 42997407765f..4172abb2919b 100644 --- a/tests/neg/6571.check +++ b/tests/neg/6571.check @@ -7,7 +7,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.M[Test.Inv[Int] & Test.Inv[String]] - | failed since selector Test.Inv[Int] & Test.Inv[String] + | failed since selector Test.Inv[Int] & Test.Inv[String] | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -20,7 +20,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.M[Test.Inv[String] & Test.Inv[Int]] - | failed since selector Test.Inv[String] & Test.Inv[Int] + | failed since selector Test.Inv[String] & Test.Inv[Int] | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/ambiref.check b/tests/neg/ambiref.check index 95b542c7aae3..5d701b3b3b71 100644 --- a/tests/neg/ambiref.check +++ b/tests/neg/ambiref.check @@ -1,32 +1,32 @@ -- [E049] Reference Error: tests/neg/ambiref.scala:8:14 ---------------------------------------------------------------- 8 | println(x) // error | ^ - | Reference to x is ambiguous, - | it is both defined in object Test + | Reference to x is ambiguous. + | It is both defined in object Test | and inherited subsequently in class D | | longer explanation available when compiling with `-explain` -- [E049] Reference Error: tests/neg/ambiref.scala:10:14 --------------------------------------------------------------- 10 | println(x) // error | ^ - | Reference to x is ambiguous, - | it is both defined in object Test + | Reference to x is ambiguous. + | It is both defined in object Test | and inherited subsequently in anonymous class test1.C {...} | | longer explanation available when compiling with `-explain` -- [E049] Reference Error: tests/neg/ambiref.scala:17:14 --------------------------------------------------------------- 17 | println(y) // error | ^ - | Reference to y is ambiguous, - | it is both defined in method c + | Reference to y is ambiguous. + | It is both defined in method c | and inherited subsequently in anonymous class D {...} | | longer explanation available when compiling with `-explain` -- [E049] Reference Error: tests/neg/ambiref.scala:25:16 --------------------------------------------------------------- 25 | println(y) // error | ^ - | Reference to y is ambiguous, - | it is both defined in method c + | Reference to y is ambiguous. + | It is both defined in method c | and inherited subsequently in class E | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/constructor-proxy-shadowing.scala b/tests/neg/constructor-proxy-shadowing.scala deleted file mode 100644 index 857ef986cb79..000000000000 --- a/tests/neg/constructor-proxy-shadowing.scala +++ /dev/null @@ -1,10 +0,0 @@ - -object Test extends App { - def A22(s: String): String = s - class A { - class A22(s: String) { - def run = s - } - val x = A22("") // error: shadowing - } -} \ No newline at end of file diff --git a/tests/neg/enum-values.check b/tests/neg/enum-values.check index 37990e8f312e..23337de1b2c4 100644 --- a/tests/neg/enum-values.check +++ b/tests/neg/enum-values.check @@ -24,8 +24,8 @@ | | failed with: | - | Found: Array[example.Tag[?]] - | Required: Array[example.ListLike[?]] + | Found: example.ListLike.type + | Required: Nothing -- [E008] Not Found Error: tests/neg/enum-values.scala:34:52 ----------------------------------------------------------- 34 | val typeCtorsK: Array[TypeCtorsK[?]] = TypeCtorsK.values // error | ^^^^^^^^^^^^^^^^^ @@ -38,8 +38,8 @@ | | failed with: | - | Found: Array[example.Tag[?]] - | Required: Array[example.TypeCtorsK[?[_$1]]] + | Found: example.TypeCtorsK.type + | Required: Nothing -- [E008] Not Found Error: tests/neg/enum-values.scala:36:6 ------------------------------------------------------------ 36 | Tag.valueOf("Int") // error | ^^^^^^^^^^^ diff --git a/tests/neg/enumsAccess.scala b/tests/neg/enumsAccess.scala index 18b91b346b6a..8a8e9af8910f 100644 --- a/tests/neg/enumsAccess.scala +++ b/tests/neg/enumsAccess.scala @@ -63,7 +63,7 @@ object test5 { enum E5[T](x: T) { case C3() extends E5[INT](defaultX)// error: illegal reference // error: illegal reference case C4 extends E5[INT](defaultX) // error: illegal reference // error: illegal reference - case C5 extends E5[E5[_]](E5.this) // error: type mismatch + case C5 extends E5[E5[_]](E5.this) // error: cannot be instantiated // error: conflicting base types // error: type mismatch } object E5 { diff --git a/tests/neg/extension-methods.scala b/tests/neg/extension-methods.scala index e075105762f9..a11b2cca5add 100644 --- a/tests/neg/extension-methods.scala +++ b/tests/neg/extension-methods.scala @@ -15,4 +15,4 @@ object Test { def f2[T]: T = ??? // error: T is already defined as type T def f3(xs: List[T]) = ??? // error: xs is already defined as value xs } -} \ No newline at end of file +} diff --git a/tests/neg/i11118.check b/tests/neg/i11118.check new file mode 100644 index 000000000000..0af98c7f580a --- /dev/null +++ b/tests/neg/i11118.check @@ -0,0 +1,12 @@ +-- Warning: tests/neg/i11118.scala:2:12 -------------------------------------------------------------------------------- +2 |val (a,b) = (1,2,3) // error // warning + | ^^^^^^^ + | pattern's type (Any, Any) does not match the right hand side expression's type (Int, Int, Int) + | + | If the narrowing is intentional, this can be communicated by adding `: @unchecked` after the expression, + | which may result in a MatchError at runtime. + | This patch can be rewritten automatically under -rewrite -source 3.2-migration. +-- Error: tests/neg/i11118.scala:2:4 ----------------------------------------------------------------------------------- +2 |val (a,b) = (1,2,3) // error // warning + | ^ + | this case is unreachable since type (Int, Int, Int) is not a subclass of class Tuple2 diff --git a/tests/neg/i11118.scala b/tests/neg/i11118.scala new file mode 100644 index 000000000000..23d9b2b604b6 --- /dev/null +++ b/tests/neg/i11118.scala @@ -0,0 +1,2 @@ +// https://github.com/lampepfl/dotty/issues/11118 +val (a,b) = (1,2,3) // error // warning diff --git a/tests/neg/i11982a.check b/tests/neg/i11982a.check index bc07c82059cc..1977aa30e8b5 100644 --- a/tests/neg/i11982a.check +++ b/tests/neg/i11982a.check @@ -6,7 +6,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Tuple.Tail[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameter xs in | case _ *: xs => xs | The computed bounds for the parameter are: @@ -21,7 +21,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Tuple.Tail[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameter xs in | case _ *: xs => xs | The computed bounds for the parameter are: @@ -36,7 +36,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Tuple.Tail[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameter xs in | case _ *: xs => xs | The computed bounds for the parameter are: diff --git a/tests/neg/i12049.check b/tests/neg/i12049.check index a58624ec6778..11c648e35a57 100644 --- a/tests/neg/i12049.check +++ b/tests/neg/i12049.check @@ -7,7 +7,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce M[B] - | failed since selector B + | failed since selector B | does not match case A => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -18,7 +18,7 @@ -- Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------------------- 14 |val y3: String = ??? : Last[Int *: Int *: Boolean *: String *: EmptyTuple] // error | ^ - | Match type reduction failed since selector EmptyTuple.type + | Match type reduction failed since selector EmptyTuple.type | matches none of the cases | | case _ *: _ *: t => Last[t] @@ -26,7 +26,7 @@ -- Error: tests/neg/i12049.scala:22:26 --------------------------------------------------------------------------------- 22 |val z3: (A, B, A) = ??? : Reverse[(A, B, A)] // error | ^ - | Match type reduction failed since selector A *: EmptyTuple.type + | Match type reduction failed since selector A *: EmptyTuple.type | matches none of the cases | | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] @@ -39,7 +39,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce M[B] - | failed since selector B + | failed since selector B | does not match case A => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -48,7 +48,7 @@ -- Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------------------- 25 |val _ = summon[String =:= Last[Int *: Int *: Boolean *: String *: EmptyTuple]] // error | ^ - | Match type reduction failed since selector EmptyTuple.type + | Match type reduction failed since selector EmptyTuple.type | matches none of the cases | | case _ *: _ *: t => Last[t] @@ -56,7 +56,7 @@ -- Error: tests/neg/i12049.scala:26:29 --------------------------------------------------------------------------------- 26 |val _ = summon[(A, B, A) =:= Reverse[(A, B, A)]] // error | ^ - | Match type reduction failed since selector A *: EmptyTuple.type + | Match type reduction failed since selector A *: EmptyTuple.type | matches none of the cases | | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] @@ -69,7 +69,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce M[B] - | failed since selector B + | failed since selector B | does not match case A => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case diff --git a/tests/neg/i12682.check b/tests/neg/i12682.check new file mode 100644 index 000000000000..605414938529 --- /dev/null +++ b/tests/neg/i12682.check @@ -0,0 +1,51 @@ +-- [E049] Reference Error: tests/neg/i12682.scala:6:12 ----------------------------------------------------------------- +6 | val x = m(1) // error + | ^ + | Reference to m is ambiguous. + | It is both defined in object C + | and inherited subsequently in object T + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | The identifier m is ambiguous because a name binding of lower precedence + | in an inner scope cannot shadow a binding with higher precedence in + | an outer scope. + | + | The precedence of the different kinds of name bindings, from highest to lowest, is: + | - Definitions in an enclosing scope + | - Inherited definitions and top-level definitions in packages + | - Names introduced by import of a specific name + | - Names introduced by wildcard import + | - Definitions from packages in other files + | Note: + | - As a rule, definitions take precedence over imports. + | - Definitions in an enclosing scope take precedence over inherited definitions, + | which can result in ambiguities in nested classes. + | - When importing, you can avoid naming conflicts by renaming: + | import scala.{m => mTick} + --------------------------------------------------------------------------------------------------------------------- +-- [E049] Reference Error: tests/neg/i12682.scala:13:10 ---------------------------------------------------------------- +13 | def d = m(42) // error + | ^ + | Reference to m is ambiguous. + | It is both imported by import X._ + | and imported subsequently by import Y._ + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | The identifier m is ambiguous because two name bindings of equal precedence + | were introduced in the same scope. + | + | The precedence of the different kinds of name bindings, from highest to lowest, is: + | - Definitions in an enclosing scope + | - Inherited definitions and top-level definitions in packages + | - Names introduced by import of a specific name + | - Names introduced by wildcard import + | - Definitions from packages in other files + | Note: + | - As a rule, definitions take precedence over imports. + | - Definitions in an enclosing scope take precedence over inherited definitions, + | which can result in ambiguities in nested classes. + | - When importing, you can avoid naming conflicts by renaming: + | import scala.{m => mTick} + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i12682.scala b/tests/neg/i12682.scala new file mode 100644 index 000000000000..0b37816ef0df --- /dev/null +++ b/tests/neg/i12682.scala @@ -0,0 +1,13 @@ +// scalac: -explain + +object C: + def m(x: Int) = 1 + object T extends K: + val x = m(1) // error +class K: + def m(i: Int) = 2 +object X extends K +object Y extends K +object D: + import X.*, Y.* + def d = m(42) // error diff --git a/tests/neg/i13558.check b/tests/neg/i13558.check deleted file mode 100644 index ab10a42cdd32..000000000000 --- a/tests/neg/i13558.check +++ /dev/null @@ -1,26 +0,0 @@ --- [E008] Not Found Error: tests/neg/i13558.scala:23:14 ---------------------------------------------------------------- -23 | println(a.id) // error - | ^^^^ - | value id is not a member of testcode.A. - | An extension method was tried, but could not be fully constructed: - | - | testcode.ExtensionA.id(a) - | - | failed with: - | - | Reference to id is ambiguous, - | it is both imported by import testcode.ExtensionB._ - | and imported subsequently by import testcode.ExtensionA._ --- [E008] Not Found Error: tests/neg/i13558.scala:29:14 ---------------------------------------------------------------- -29 | println(a.id) // error - | ^^^^ - | value id is not a member of testcode.A. - | An extension method was tried, but could not be fully constructed: - | - | testcode.ExtensionB.id(a) - | - | failed with: - | - | Reference to id is ambiguous, - | it is both imported by import testcode.ExtensionA._ - | and imported subsequently by import testcode.ExtensionB._ diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala new file mode 100644 index 000000000000..d8273e546dab --- /dev/null +++ b/tests/neg/i13757-match-type-anykind.scala @@ -0,0 +1,16 @@ +object Test: + type AnyKindMatchType1[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + case Option[a] => Int + + type AnyKindMatchType2[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded + case Option => Int // error: Missing type parameter for Option + + type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + case _ => Int + + type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded // error + case _ => Int + + type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded // error + case _ => Int +end Test diff --git a/tests/neg/i13780.check b/tests/neg/i13780.check index 56b6a67ac8e7..aa0a47db5737 100644 --- a/tests/neg/i13780.check +++ b/tests/neg/i13780.check @@ -10,7 +10,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Head[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameters a, b in | case (a, b) => a | The computed bounds for the parameters are: @@ -30,7 +30,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Head[X] - | failed since selector X + | failed since selector X | does not uniquely determine parameters a, b in | case (a, b) => a | The computed bounds for the parameters are: diff --git a/tests/neg/i15618.check b/tests/neg/i15618.check index 91f557b12dcf..099e3fe0a0b7 100644 --- a/tests/neg/i15618.check +++ b/tests/neg/i15618.check @@ -9,10 +9,16 @@ | Note: a match type could not be fully reduced: | | trying to reduce ScalaType[T] - | failed since selector T + | failed since selector T | does not match case Float16 => Float | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining cases | | case Float32 => Float | case Int32 => Int +-- [E172] Type Error: tests/neg/i15618.scala:21:33 --------------------------------------------------------------------- +21 | def toArray: Array[T] = Array() // error + | ^ + | No ClassTag available for T + | + | where: T is a type in class Tensor2 with bounds <: Int | Float diff --git a/tests/neg/i15618.scala b/tests/neg/i15618.scala index fd38c8c48f6b..087bc462b211 100644 --- a/tests/neg/i15618.scala +++ b/tests/neg/i15618.scala @@ -16,8 +16,16 @@ class Tensor[T <: DType](dtype: T): def toSeq: Seq[ScalaType[T]] = Seq() def toArray: Array[ScalaType[T]] = Array() // error +class Tensor2[T <: Int | Float](dtype: T): + def toSeq: Seq[T] = Seq() + def toArray: Array[T] = Array() // error + @main def Test = val t = Tensor(Float32) // Tensor[Float32] println(t.toSeq.headOption) // works, Seq[Float] println(t.toArray.headOption) // ClassCastException + + val t2 = Tensor2(0.0f) // Tensor2[Float] + println(t.toSeq.headOption) + println(t.toArray.headOption) diff --git a/tests/neg/i1643.scala b/tests/neg/i1643.scala index a10422de6eab..1745539d73f5 100644 --- a/tests/neg/i1643.scala +++ b/tests/neg/i1643.scala @@ -1,4 +1,4 @@ -trait T extends Array { // error // error +trait T extends Array { // error def t1(as: String*): Array[String] = { varargs1(as*) } // error def t2(as: String*): Array[String] = { super.varargs1(as*) } // error } @@ -7,7 +7,7 @@ class C extends Base_1 { // error def c2(as: String*): Array[String] = { super.varargs1(as*) } // error } object Test extends App { - val t = new T {} // error + val t = new T {} println(t.t1("a", "b").mkString(",")) println(t.t2("a", "b").mkString(",")) val c = new C {} diff --git a/tests/neg/i16453.check b/tests/neg/i16453.check new file mode 100644 index 000000000000..e01ddf5cab7a --- /dev/null +++ b/tests/neg/i16453.check @@ -0,0 +1,45 @@ +-- [E172] Type Error: tests/neg/i16453.scala:21:19 --------------------------------------------------------------------- +21 | summon[List[Int]] // error + | ^ + | No given instance of type List[Int] was found for parameter x of method summon in object Predef +-- [E172] Type Error: tests/neg/i16453.scala:23:21 --------------------------------------------------------------------- +23 | summon[Option[Int]] // error + | ^ + |No given instance of type Option[Int] was found for parameter x of method summon in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Option[Int]: + |- final lazy given val baz3: Char + |- final lazy given val bar3: Int +-- [E172] Type Error: tests/neg/i16453.scala:24:26 --------------------------------------------------------------------- +24 | implicitly[Option[Char]] // error + | ^ + |No given instance of type Option[Char] was found for parameter e of method implicitly in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Option[Char]: + |- final lazy given val baz3: Char +-- [E172] Type Error: tests/neg/i16453.scala:25:20 --------------------------------------------------------------------- +25 | implicitly[String] // error + | ^ + |No given instance of type String was found for parameter e of method implicitly in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to String: + |- final lazy given val baz3: Char +-- [E172] Type Error: tests/neg/i16453.scala:35:16 --------------------------------------------------------------------- +35 | summon[String] // error + | ^ + |No given instance of type String was found for parameter x of method summon in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to String: + |- implicit val baz2: Char +-- [E172] Type Error: tests/neg/i16453.scala:36:25 --------------------------------------------------------------------- +36 | implicitly[Option[Int]] // error + | ^ + |No given instance of type Option[Int] was found for parameter e of method implicitly in object Predef + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Option[Int]: + |- implicit val bar2: Int diff --git a/tests/neg/i16453.scala b/tests/neg/i16453.scala new file mode 100644 index 000000000000..00495c39e21a --- /dev/null +++ b/tests/neg/i16453.scala @@ -0,0 +1,37 @@ +import scala.language.implicitConversions + +trait Foo { type T } + +// This one is irrelevant, shouldn't be included in error message +given irrelevant: Long = ??? + +/** Use Scala 3 givens/conversions */ +def testScala3() = { + given c1[T]: Conversion[T, Option[T]] = ??? + given c2[F <: Foo](using f: F): Conversion[f.T, Option[f.T]] = ??? + given Conversion[Char, String] = ??? + given Conversion[Char, Option[Int]] = ??? + + given foo: Foo with + type T = Int + given bar3: Int = 0 + given baz3: Char = 'a' + + // This should get the usual error + summon[List[Int]] // error + + summon[Option[Int]] // error + implicitly[Option[Char]] // error + implicitly[String] // error +} + +/** Use Scala 2 implicits */ +def testScala2() = { + implicit def toOpt[T](t: T): Option[T] = ??? + implicit def char2Str(c: Char): String = ??? + implicit val bar2: Int = 1 + implicit val baz2: Char = 'b' + + summon[String] // error + implicitly[Option[Int]] // error +} diff --git a/tests/neg/i16820.check b/tests/neg/i16820.check new file mode 100644 index 000000000000..48824d683244 --- /dev/null +++ b/tests/neg/i16820.check @@ -0,0 +1,30 @@ +-- [E178] Type Error: tests/neg/i16820.scala:5:11 ---------------------------------------------------------------------- +5 | val x1 = f // error + | ^ + | missing argument list for method f in object Test + | + | def f(xs: Int*): Int + | + | longer explanation available when compiling with `-explain` +-- [E100] Syntax Error: tests/neg/i16820.scala:6:11 -------------------------------------------------------------------- +6 | val x2 = g // error + | ^ + | method g in object Test must be called with () argument + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i16820.scala:7:40 ---------------------------------------------------------------------- +7 | val x3 = java.nio.file.Paths.get(".").toRealPath // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | missing argument list for method toRealPath in trait Path + | + | def toRealPath(x$0: java.nio.file.LinkOption*): java.nio.file.Path + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i16820.scala:11:14 --------------------------------------------------------------------- +11 |def test = Foo(3) // error + | ^^^^^^ + | missing argument list for method apply in object Foo + | + | def apply(x: Int)(xs: String*): Foo + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16820.scala b/tests/neg/i16820.scala new file mode 100644 index 000000000000..abdc741b9f0e --- /dev/null +++ b/tests/neg/i16820.scala @@ -0,0 +1,11 @@ +object Test: + def f(xs: Int*) = xs.sum + def g() = 1 + + val x1 = f // error + val x2 = g // error + val x3 = java.nio.file.Paths.get(".").toRealPath // error + +// #14567 +case class Foo(x: Int)(xs: String*) +def test = Foo(3) // error diff --git a/tests/neg/i16842.scala b/tests/neg/i16842.scala new file mode 100644 index 000000000000..1e7e5cc14339 --- /dev/null +++ b/tests/neg/i16842.scala @@ -0,0 +1,25 @@ +sealed trait Expr1 +sealed trait Literal extends Expr1 + +case class ArrayLiter(elems: List[Expr1]) extends Literal + +sealed trait SemanticType { + type T // the type with which a literal of this semanticType is represented +} +case object SemanticInt extends SemanticType { + type T = Int +} + +case class SemanticArray[U <: SemanticType](dim: Int) extends SemanticType { + type T = List[U] +} + +sealed trait Expr2[+T] +class Liter[T <: SemanticType](val ty: T, val value: ty.T) extends Expr2[T] + +def typecheckArrayLiter( + a: ArrayLiter +): Liter[SemanticArray[SemanticType]] = { + val x: List[Expr2[SemanticInt.type]] = List() + Liter(SemanticArray[SemanticInt.type], x) // error // error +} diff --git a/tests/neg/i16861.scala b/tests/neg/i16861.scala new file mode 100644 index 000000000000..50c56974d027 --- /dev/null +++ b/tests/neg/i16861.scala @@ -0,0 +1,2 @@ +given foo[T]: Any = summon[bar] // error +def bar: Nothing = ??? \ No newline at end of file diff --git a/tests/neg/i16861a.scala b/tests/neg/i16861a.scala new file mode 100644 index 000000000000..b93f884f5e56 --- /dev/null +++ b/tests/neg/i16861a.scala @@ -0,0 +1,4 @@ +import scala.quoted.* +trait Foo +object Foo: + inline given foo[T <: Foo]: T = summon[Type.of[T]] // error diff --git a/tests/neg/i16920.check b/tests/neg/i16920.check new file mode 100644 index 000000000000..131ba4c6265e --- /dev/null +++ b/tests/neg/i16920.check @@ -0,0 +1,88 @@ +-- [E008] Not Found Error: tests/neg/i16920.scala:20:11 ---------------------------------------------------------------- +20 | "five".wow // error + | ^^^^^^^^^^ + | value wow is not a member of String. + | An extension method was tried, but could not be fully constructed: + | + | Two.wow("five") + | + | failed with: + | + | Found: ("five" : String) + | Required: Int +-- [E008] Not Found Error: tests/neg/i16920.scala:28:6 ----------------------------------------------------------------- +28 | 5.wow // error + | ^^^^^ + | value wow is not a member of Int. + | An extension method was tried, but could not be fully constructed: + | + | AlsoFails.wow(5) + | + | failed with: + | + | Found: (5 : Int) + | Required: Boolean +-- [E008] Not Found Error: tests/neg/i16920.scala:29:11 ---------------------------------------------------------------- +29 | "five".wow // error + | ^^^^^^^^^^ + | value wow is not a member of String. + | An extension method was tried, but could not be fully constructed: + | + | AlsoFails.wow("five") + | + | failed with: + | + | Found: ("five" : String) + | Required: Boolean +-- [E008] Not Found Error: tests/neg/i16920.scala:36:6 ----------------------------------------------------------------- +36 | 5.wow // error + | ^^^^^ + | value wow is not a member of Int. + | An extension method was tried, but could not be fully constructed: + | + | Three.wow(5) + | + | failed with: + | + | Ambiguous extension methods: + | both Three.wow(5) + | and Two.wow(5) + | are possible expansions of 5.wow +-- [E008] Not Found Error: tests/neg/i16920.scala:44:11 ---------------------------------------------------------------- +44 | "five".wow // error + | ^^^^^^^^^^ + | value wow is not a member of String. + | An extension method was tried, but could not be fully constructed: + | + | Two.wow("five") + | + | failed with: + | + | Found: ("five" : String) + | Required: Int +-- [E008] Not Found Error: tests/neg/i16920.scala:51:11 ---------------------------------------------------------------- +51 | "five".wow // error + | ^^^^^^^^^^ + | value wow is not a member of String. + | An extension method was tried, but could not be fully constructed: + | + | Two.wow("five") + | + | failed with: + | + | Found: ("five" : String) + | Required: Int +-- [E008] Not Found Error: tests/neg/i16920.scala:58:6 ----------------------------------------------------------------- +58 | 5.wow // error + | ^^^^^ + | value wow is not a member of Int. + | An extension method was tried, but could not be fully constructed: + | + | Three.wow(5) + | + | failed with: + | + | Ambiguous extension methods: + | both Three.wow(5) + | and Two.wow(5) + | are possible expansions of 5.wow diff --git a/tests/neg/i16920.scala b/tests/neg/i16920.scala new file mode 100644 index 000000000000..38345e811c1f --- /dev/null +++ b/tests/neg/i16920.scala @@ -0,0 +1,59 @@ +import language.experimental.relaxedExtensionImports + +object One: + extension (s: String) + def wow: Unit = println(s) + +object Two: + extension (i: Int) + def wow: Unit = println(i) + +object Three: + extension (i: Int) + def wow: Unit = println(i) + +object Fails: + import One._ + def test: Unit = + import Two._ + 5.wow + "five".wow // error + +object AlsoFails: + extension (s: Boolean) + def wow = println(s) + import One._ + import Two._ + def test: Unit = + 5.wow // error + "five".wow // error + +object Fails2: + import One._ + import Two._ + import Three._ + def test: Unit = + 5.wow // error + "five".wow // ok + +object Fails3: + import One._ + import Two.wow + def test: Unit = + 5.wow // ok + "five".wow // error + +object Fails4: + import Two.wow + import One._ + def test: Unit = + 5.wow // ok + "five".wow // error + +object Fails5: + import One.wow + import Two.wow + import Three.wow + def test: Unit = + 5.wow // error + "five".wow // ok \ No newline at end of file diff --git a/tests/neg/i17002.scala b/tests/neg/i17002.scala new file mode 100644 index 000000000000..c2a21dd3d415 --- /dev/null +++ b/tests/neg/i17002.scala @@ -0,0 +1,20 @@ +import scala.annotation.compileTimeOnly + +sealed trait Test[T] + +object Test: + @compileTimeOnly("Error") + given test0[T]: Test[T] = ??? + + @compileTimeOnly("Error") + given test1[T]: Test[T]() + + @compileTimeOnly("Error") + implicit class ic(x: Int): + def foo = 2 + + test0 // error + + test1 // error + + 2.foo // error \ No newline at end of file diff --git a/tests/neg/i17021.ext-java/A.java b/tests/neg/i17021.ext-java/A.java new file mode 100644 index 000000000000..536e9caa4a38 --- /dev/null +++ b/tests/neg/i17021.ext-java/A.java @@ -0,0 +1,6 @@ +// Derives from run/i17021.defs, but with a Java protected member +package p1; + +public class A { + protected int foo() { return 1; } +} diff --git a/tests/neg/i17021.ext-java/Test.scala b/tests/neg/i17021.ext-java/Test.scala new file mode 100644 index 000000000000..c700ed8138d7 --- /dev/null +++ b/tests/neg/i17021.ext-java/Test.scala @@ -0,0 +1,14 @@ +// Derives from run/i17021.defs +// but with a Java protected member +// which leads to a compile error +package p2: + trait B extends p1.A: + def bar: Int = foo // error: method bar accesses protected method foo inside a concrete trait method: use super.foo instead + + class C extends B: + override def foo: Int = 2 + +object Test: + def main(args: Array[String]): Unit = + val n = new p2.C().bar + assert(n == 2, n) diff --git a/tests/neg/i17089.scala b/tests/neg/i17089.scala new file mode 100644 index 000000000000..46968aa6f093 --- /dev/null +++ b/tests/neg/i17089.scala @@ -0,0 +1,4 @@ +object o: + trait T private[o]() + +def test = new o.T { } // error diff --git a/tests/neg/i17122.check b/tests/neg/i17122.check new file mode 100644 index 000000000000..683908c5af0f --- /dev/null +++ b/tests/neg/i17122.check @@ -0,0 +1,5 @@ +-- [E172] Type Error: tests/neg/i17122.scala:7:14 ---------------------------------------------------------------------- +7 |def test = m() // error + | ^ + | No given instance of type A was found for parameter of C + | Where C is an alias of: (A) ?=> B diff --git a/tests/neg/i17122.scala b/tests/neg/i17122.scala new file mode 100644 index 000000000000..fcf9af106488 --- /dev/null +++ b/tests/neg/i17122.scala @@ -0,0 +1,7 @@ +case class A() +case class B() + +type C = A ?=> B +def m(): C = ??? + +def test = m() // error diff --git a/tests/neg/i17123.check b/tests/neg/i17123.check new file mode 100644 index 000000000000..e858de67b73a --- /dev/null +++ b/tests/neg/i17123.check @@ -0,0 +1,86 @@ +-- [E100] Syntax Error: tests/neg/i17123.scala:7:2 --------------------------------------------------------------------- +7 | m1 // error + | ^^ + | method m1 in object ConfusingErrorMessage must be called with () argument + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:9:2 ----------------------------------------------------------------------- +9 | m2 // error + | ^^ + | missing argument list for method m2 in object ConfusingErrorMessage + | + | def m2()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:10:4 ---------------------------------------------------------------------- +10 | m2() // error + | ^^^^ + | missing argument list for method m2 in object ConfusingErrorMessage + | + | def m2()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:11:2 ---------------------------------------------------------------------- +11 | m3 // error + | ^^ + | missing argument list for method m3 in object ConfusingErrorMessage + | + | def m3()()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:12:4 ---------------------------------------------------------------------- +12 | m3() // error + | ^^^^ + | missing argument list for method m3 in object ConfusingErrorMessage + | + | def m3()()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:13:6 ---------------------------------------------------------------------- +13 | m3()() // error + | ^^^^^^ + | missing argument list for method m3 in object ConfusingErrorMessage + | + | def m3()()(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:15:2 ---------------------------------------------------------------------- +15 | f3 // error + | ^^ + | missing argument list for method f3 in object ConfusingErrorMessage + | + | def f3()(i: Int)(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:16:2 ---------------------------------------------------------------------- +16 | f3() // error + | ^^^^ + | missing argument list for method f3 in object ConfusingErrorMessage + | + | def f3()(i: Int)(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:17:6 ---------------------------------------------------------------------- +17 | f3()(2) // error + | ^^^^^^^ + | missing argument list for method f3 in object ConfusingErrorMessage + | + | def f3()(i: Int)(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:19:2 ---------------------------------------------------------------------- +19 | i3 // error + | ^^ + | missing argument list for method i3 in object ConfusingErrorMessage + | + | def i3()(using d: DummyImplicit)(): Unit + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/i17123.scala:20:2 ---------------------------------------------------------------------- +20 | i3() // error + | ^^^^ + | missing argument list for method i3 in object ConfusingErrorMessage + | + | def i3()(using d: DummyImplicit)(): Unit + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i17123.scala b/tests/neg/i17123.scala new file mode 100644 index 000000000000..6547a375fec3 --- /dev/null +++ b/tests/neg/i17123.scala @@ -0,0 +1,22 @@ +object ConfusingErrorMessage { + def m1() = () + def m2()() = () + def m3()()() = () + def f3()(i: Int)() = () + def i3()(using d: DummyImplicit)() = () + m1 // error + m1() + m2 // error + m2() // error + m3 // error + m3() // error + m3()() // error + m3()()() + f3 // error + f3() // error + f3()(2) // error + f3()(2)() + i3 // error + i3() // error + i3()() +} diff --git a/tests/neg/i17168.scala b/tests/neg/i17168.scala new file mode 100644 index 000000000000..c31889c979b7 --- /dev/null +++ b/tests/neg/i17168.scala @@ -0,0 +1,3 @@ +type F[X <: String] = X + +val a = summon[F[Int] =:= Int] // error diff --git a/tests/neg/i17266.check b/tests/neg/i17266.check new file mode 100644 index 000000000000..7e07e3d43de4 --- /dev/null +++ b/tests/neg/i17266.check @@ -0,0 +1,88 @@ +-- [E181] Potential Issue Error: tests/neg/i17266.scala:4:2 ------------------------------------------------------------ +4 | synchronized { // error + | ^^^^^^^^^^^^ + | Suspicious top-level unqualified call to synchronized + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as synchronized are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + --------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:17:2 ----------------------------------------------------------- +17 | synchronized { // error + | ^^^^^^^^^^^^ + | Suspicious top-level unqualified call to synchronized + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as synchronized are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + -------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:108:2 ---------------------------------------------------------- +108 | wait() // error + | ^^^^ + | Suspicious top-level unqualified call to wait + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as wait are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:115:2 ---------------------------------------------------------- +115 | wait() // error + | ^^^^ + | Suspicious top-level unqualified call to wait + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as wait are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:121:2 ---------------------------------------------------------- +121 | wait(10) // error + | ^^^^ + | Suspicious top-level unqualified call to wait + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as wait are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:128:2 ---------------------------------------------------------- +128 | wait(10) // error + | ^^^^ + | Suspicious top-level unqualified call to wait + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as wait are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:134:2 ---------------------------------------------------------- +134 | hashCode() // error + | ^^^^^^^^ + | Suspicious top-level unqualified call to hashCode + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as hashCode are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Error: tests/neg/i17266.scala:141:2 ---------------------------------------------------------- +141 | hashCode() // error + | ^^^^^^^^ + | Suspicious top-level unqualified call to hashCode + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as hashCode are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i17266.scala b/tests/neg/i17266.scala new file mode 100644 index 000000000000..5b74ea76810b --- /dev/null +++ b/tests/neg/i17266.scala @@ -0,0 +1,144 @@ +// scalac: -Werror -explain + +def test1 = + synchronized { // error + println("hello") + } + +def test2 = + this.synchronized { // not an error (should be?) + println("hello") + } + +object MyLib + +def test3 = + import MyLib.* + synchronized { // error + println("hello") + } + +def test4 = + 1.synchronized { // not an error (should be?) + println("hello") + } + +object Test4: + synchronized { // not an error + println("hello") + } + +object Test5: + def test5 = + synchronized { // not an error + println("hello") + } + +object Test6: + import MyLib.* + synchronized { // not an error + println("hello") + } + +object Test7: + import MyLib.* + def test7 = + synchronized { // not an error + println("hello") + } + +/* +object Test7b: + def test8 = + import MyLib.* + synchronized { // already an error: Reference to synchronized is ambiguous. + println("hello") + } +*/ + +class Test8: + synchronized { // not an error + println("hello") + } + +class Test9: + def test5 = + synchronized { // not an error + println("hello") + } + +class Test10: + import MyLib.* + synchronized { // not an error + println("hello") + } + +class Test11: + import MyLib.* + def test7 = + synchronized { // not an error + println("hello") + } + +trait Test12: + synchronized { // not an error + println("hello") + } + +trait Test13: + def test5 = + synchronized { // not an error + println("hello") + } + +trait Test14: + import MyLib.* + synchronized { // not an error + println("hello") + } + +trait Test15: + import MyLib.* + def test7 = + synchronized { // not an error + println("hello") + } + +def test16 = + wait() // error + +def test17 = + this.wait() // not an error (should be?) + +def test18 = + import MyLib.* + wait() // error + +def test19 = + 1.wait() // not an error (should be?) + +def test20 = + wait(10) // error + +def test21 = + this.wait(10) // not an error (should be?) + +def test22 = + import MyLib.* + wait(10) // error + +def test23 = + 1.wait(10) // not an error (should be?) + +def test24 = + hashCode() // error + +def test25 = + this.hashCode() // not an error (should be?) + +def test26 = + import MyLib.* + hashCode() // error + +def test27 = + 1.hashCode()// not an error (should be? probably not) diff --git a/tests/neg/i18109.scala b/tests/neg/i18109.scala new file mode 100644 index 000000000000..7df13b0c36ff --- /dev/null +++ b/tests/neg/i18109.scala @@ -0,0 +1,11 @@ +package foo {} + +package bar { + object Test { + def qux[A] = 123 + def main(args: Array[String]): Unit = { + val y = qux[foo.type] // error + val x = valueOf[foo.type] // error + } + } +} \ No newline at end of file diff --git a/tests/neg/i4820.scala b/tests/neg/i4820.scala deleted file mode 100644 index e19183b17b14..000000000000 --- a/tests/neg/i4820.scala +++ /dev/null @@ -1,2 +0,0 @@ -class Foo[A] -class Bar[A] extends Foo // error diff --git a/tests/neg/i4820b.scala b/tests/neg/i4820b.scala deleted file mode 100644 index 4a7b3da3fb1b..000000000000 --- a/tests/neg/i4820b.scala +++ /dev/null @@ -1,5 +0,0 @@ -trait SetOps[A, +C <: SetOps[A, C]] { - def concat(that: Iterable[A]): C = ??? -} - -class Set1[A] extends SetOps // error: should be SetOps[A, Set1[A]] diff --git a/tests/neg/i4820c.scala b/tests/neg/i4820c.scala deleted file mode 100644 index 6956b23363b5..000000000000 --- a/tests/neg/i4820c.scala +++ /dev/null @@ -1,2 +0,0 @@ -trait Foo[A] -class Bar[A] extends Foo // error \ No newline at end of file diff --git a/tests/neg/i6779.check b/tests/neg/i6779.check index 8e05c22eb640..f1e1b9d5557b 100644 --- a/tests/neg/i6779.check +++ b/tests/neg/i6779.check @@ -11,7 +11,7 @@ | value f is not a member of T. | An extension method was tried, but could not be fully constructed: | - | Test.f[G[T]](x)(given_Stuff) + | Test.f[G[T]](x) | | failed with: | diff --git a/tests/neg/i7816.scala b/tests/neg/i7816.scala index f1eed694a085..41dd6c2ea98e 100644 --- a/tests/neg/i7816.scala +++ b/tests/neg/i7816.scala @@ -1,4 +1,4 @@ object A { def f()(>) = ??? // error - import f.NonExistent // error + import f.NonExistent } \ No newline at end of file diff --git a/tests/neg/i9803.check b/tests/neg/i9803.check index cc7d56d585b0..20225f1f5bc5 100644 --- a/tests/neg/i9803.check +++ b/tests/neg/i9803.check @@ -1,8 +1,8 @@ -- [E049] Reference Error: tests/neg/i9803.scala:15:10 ----------------------------------------------------------------- 15 | println(f421()) // error | ^^^^ - | Reference to f421 is ambiguous, - | it is both imported by name by import bugs.shadowing.x.f421 + | Reference to f421 is ambiguous. + | It is both imported by name by import bugs.shadowing.x.f421 | and imported by name subsequently by import bugs.shadowing.y.f421 | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/indent-colons.check b/tests/neg/indent-colons.check index 102d41592014..f77d491f8b8f 100644 --- a/tests/neg/indent-colons.check +++ b/tests/neg/indent-colons.check @@ -47,15 +47,31 @@ | Not found: file | | longer explanation available when compiling with `-explain` --- Error: tests/neg/indent-colons.scala:5:2 ---------------------------------------------------------------------------- +-- [E178] Type Error: tests/neg/indent-colons.scala:5:2 ---------------------------------------------------------------- 5 | tryEither: // error | ^^^^^^^^^ - | missing arguments for method tryEither --- Error: tests/neg/indent-colons.scala:11:2 --------------------------------------------------------------------------- + | missing argument list for method tryEither + | + | def tryEither[T](x: T)(y: Int => T): T + | + | where: T is a type variable + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/indent-colons.scala:11:2 --------------------------------------------------------------- 11 | tryEither: // error | ^^^^^^^^^ - | missing arguments for method tryEither --- Error: tests/neg/indent-colons.scala:18:2 --------------------------------------------------------------------------- + | missing argument list for method tryEither + | + | def tryEither[T](x: T)(y: Int => T): T + | + | where: T is a type variable + | + | longer explanation available when compiling with `-explain` +-- [E178] Type Error: tests/neg/indent-colons.scala:18:2 --------------------------------------------------------------- 18 | Some(3).fold: // error | ^^^^^^^^^^^^ - | missing arguments for method fold in class Option + | missing argument list for method fold in class Option + | + | final def fold[B](ifEmpty: => B)(f: A => B): B + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/interleaving-ab.scala b/tests/neg/interleaving-ab.scala new file mode 100644 index 000000000000..e446626a2982 --- /dev/null +++ b/tests/neg/interleaving-ab.scala @@ -0,0 +1,11 @@ +import scala.language.experimental.clauseInterleaving + +object Ab: + given String = "" + given Double = 0 + + def illegal[A][B](x: A)(using B): B = summon[B] // error: Type parameter lists must be separated by a term or using parameter list + + def ab[A](x: A)[B](using B): B = summon[B] + def test = + ab[Int](0: Int) // error diff --git a/tests/neg/interleaving-params.scala b/tests/neg/interleaving-params.scala new file mode 100644 index 000000000000..dc6762cf0214 --- /dev/null +++ b/tests/neg/interleaving-params.scala @@ -0,0 +1,9 @@ +import scala.language.experimental.clauseInterleaving + +class Params{ + def bar[T](x: T)[T]: String = ??? // error + def zoo(x: Int)[T, U](x: U): T = ??? // error + def bbb[T <: U](x: U)[U]: U = ??? // error // error + def f0[T](implicit x: T)[U](y: U) = (x,y) // error + def f1[T](implicit x: T)[U] = (x,y) // error +} diff --git a/tests/neg/interleaving-signatureCollision.scala b/tests/neg/interleaving-signatureCollision.scala new file mode 100644 index 000000000000..a6a729ed3b62 --- /dev/null +++ b/tests/neg/interleaving-signatureCollision.scala @@ -0,0 +1,5 @@ +import scala.language.experimental.clauseInterleaving + +object signatureCollision: + def f[T](x: T)[U](y: U) = (x,y) + def f[T](x: T, y: T) = (x,y) // error diff --git a/tests/neg/interleaving-typeApply.check b/tests/neg/interleaving-typeApply.check new file mode 100644 index 000000000000..a50c1455bfbb --- /dev/null +++ b/tests/neg/interleaving-typeApply.check @@ -0,0 +1,30 @@ +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:10:11 -------------------------------------------- +10 | f3[String]() // error + | ^ + | Type argument String does not conform to upper bound Int + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:11:16 -------------------------------------------- +11 | f5[Int][Unit] // error + | ^ + | Type argument Unit does not conform to upper bound String + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:19 -------------------------------------------- +12 | f5[String][Unit] // error // error + | ^ + | Type argument Unit does not conform to upper bound String + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:11 -------------------------------------------- +12 | f5[String][Unit] // error // error + | ^ + | Type argument String does not conform to upper bound Int + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:13:11 -------------------------------------------- +13 | f7[String]()[Unit] // error + | ^ + | Type argument String does not conform to upper bound Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/interleaving-typeApply.scala b/tests/neg/interleaving-typeApply.scala new file mode 100644 index 000000000000..ad21fe2f0329 --- /dev/null +++ b/tests/neg/interleaving-typeApply.scala @@ -0,0 +1,14 @@ +import scala.language.experimental.clauseInterleaving + +object typeApply: + + def f3[T <: Int](using DummyImplicit)[U <: String](): T => T = ??? + def f5[T <: Int](using DummyImplicit)[U <: String]: [X <: Unit] => X => X = ??? + def f7[T <: Int](using DummyImplicit)[U <: String]()[X <: Unit]: X => X = ??? + + @main def test = { + f3[String]() // error + f5[Int][Unit] // error + f5[String][Unit] // error // error + f7[String]()[Unit] // error + } diff --git a/tests/neg/interleaving-unmatched.scala b/tests/neg/interleaving-unmatched.scala new file mode 100644 index 000000000000..2ce3074d07fa --- /dev/null +++ b/tests/neg/interleaving-unmatched.scala @@ -0,0 +1,5 @@ +import scala.language.experimental.clauseInterleaving + +object unmatched: + def f1[T (x: T)] = ??? // error + def f2(x: Any[)T] = ??? // error // error diff --git a/tests/neg/matchtype-seq.check b/tests/neg/matchtype-seq.check index aba1e312da01..9c37fc08c4df 100644 --- a/tests/neg/matchtype-seq.check +++ b/tests/neg/matchtype-seq.check @@ -1,7 +1,7 @@ -- Error: tests/neg/matchtype-seq.scala:9:11 --------------------------------------------------------------------------- 9 | identity[T1[3]]("") // error | ^ - | Match type reduction failed since selector (3 : Int) + | Match type reduction failed since selector (3 : Int) | matches none of the cases | | case (1 : Int) => Int @@ -9,7 +9,7 @@ -- Error: tests/neg/matchtype-seq.scala:10:11 -------------------------------------------------------------------------- 10 | identity[T1[3]](1) // error | ^ - | Match type reduction failed since selector (3 : Int) + | Match type reduction failed since selector (3 : Int) | matches none of the cases | | case (1 : Int) => Int @@ -23,7 +23,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T1[Int] - | failed since selector Int + | failed since selector Int | does not match case (1 : Int) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -40,7 +40,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T1[Int] - | failed since selector Int + | failed since selector Int | does not match case (1 : Int) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -57,7 +57,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T2[Int] - | failed since selector Int + | failed since selector Int | does not match case (1 : Int) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -81,7 +81,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T2[Int] - | failed since selector Int + | failed since selector Int | does not match case (1 : Int) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -98,7 +98,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T3[Test.A] - | failed since selector Test.A + | failed since selector Test.A | does not match case Test.B => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -115,7 +115,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T3[Test.A] - | failed since selector Test.A + | failed since selector Test.A | does not match case Test.B => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -132,7 +132,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T5[Test.A] - | failed since selector Test.A + | failed since selector Test.A | does not match case Test.C => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -149,7 +149,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T5[Test.A] - | failed since selector Test.A + | failed since selector Test.A | does not match case Test.C => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -166,7 +166,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T7[Test.D] - | failed since selector Test.D + | failed since selector Test.D | does not match case Test.A2 => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -183,7 +183,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T7[Test.D] - | failed since selector Test.D + | failed since selector Test.D | does not match case Test.A2 => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -200,7 +200,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T8[Test.E2] - | failed since selector Test.E2 + | failed since selector Test.E2 | does not match case Test.E1 => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -224,7 +224,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T8[Test.E2] - | failed since selector Test.E2 + | failed since selector Test.E2 | does not match case Test.E1 => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -241,7 +241,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Nothing, String)] - | failed since selector (Nothing, String) + | failed since selector (Nothing, String) | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -254,7 +254,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(String, Nothing)] - | failed since selector (String, Nothing) + | failed since selector (String, Nothing) | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -267,7 +267,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Int, Nothing)] - | failed since selector (Int, Nothing) + | failed since selector (Int, Nothing) | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -280,7 +280,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Nothing, Int)] - | failed since selector (Nothing, Int) + | failed since selector (Nothing, Int) | is uninhabited (there are no values of that type). | | longer explanation available when compiling with `-explain` @@ -293,7 +293,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(?, ?)] - | failed since selector (?, ?) + | failed since selector (?, ?) | does not match case (Int, String) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -310,7 +310,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(?, ?)] - | failed since selector (?, ?) + | failed since selector (?, ?) | does not match case (Int, String) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -327,7 +327,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Any, Any)] - | failed since selector (Any, Any) + | failed since selector (Any, Any) | does not match case (Int, String) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -344,7 +344,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.T9[(Any, Any)] - | failed since selector (Any, Any) + | failed since selector (Any, Any) | does not match case (Int, String) => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -361,7 +361,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.TA[Test.Box2[Int, Int, String]] - | failed since selector Test.Box2[Int, Int, String] + | failed since selector Test.Box2[Int, Int, String] | does not match case Test.Box2[Int, Int, Int] => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -378,7 +378,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.TA[Test.Box2[Int, Int, String]] - | failed since selector Test.Box2[Int, Int, String] + | failed since selector Test.Box2[Int, Int, String] | does not match case Test.Box2[Int, Int, Int] => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -395,7 +395,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test.TD[Test.Box2_C[Int, Int, String]] - | failed since selector Test.Box2_C[Int, Int, String] + | failed since selector Test.Box2_C[Int, Int, String] | does not match case Test.Box2_C[Int, Int, Int] => Int | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -412,7 +412,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test2.M[Some[A]] - | failed since selector Some[A] + | failed since selector Some[A] | does not match case Option[Int] => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -429,7 +429,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test2.M[Some[A]] - | failed since selector Some[A] + | failed since selector Some[A] | does not match case Option[Int] => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -446,7 +446,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test3.M[Test3.Inv[A]] - | failed since selector Test3.Inv[A] + | failed since selector Test3.Inv[A] | does not match case Test3.Inv[Int] => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case @@ -463,7 +463,7 @@ | Note: a match type could not be fully reduced: | | trying to reduce Test4.M[Test4.Inv[Foo.this.A]] - | failed since selector Test4.Inv[Foo.this.A] + | failed since selector Test4.Inv[Foo.this.A] | does not match case Test4.Inv[Int] => String | and cannot be shown to be disjoint from it either. | Therefore, reduction cannot advance to the remaining case diff --git a/tests/neg/namedTypeParams.check b/tests/neg/namedTypeParams.check new file mode 100644 index 000000000000..3f6f9f7913e8 --- /dev/null +++ b/tests/neg/namedTypeParams.check @@ -0,0 +1,102 @@ +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:2:8 ------------------------------------------------------------ +2 |class D[type T] // error: identifier expected, but `type` found + | ^^^^ + | an identifier expected, but 'type' found + | + | longer explanation available when compiling with `-explain` +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:11:13 ---------------------------------------------------------- +11 | val x: C[T = Int] = // error: ']' expected, but `=` found // error + | ^ + | ']' expected, but '=' found +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:12:12 ---------------------------------------------------------- +12 | new C[T = Int] // error: ']' expected, but `=` found // error + | ^ + | ']' expected, but '=' found +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:14:22 ---------------------------------------------------------- +14 | class E extends C[T = Int] // error: ']' expected, but `=` found // error + | ^ + | ']' expected, but '=' found +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:15:22 ---------------------------------------------------------- +15 | class F extends C[T = Int]() // error: ']' expected, but `=` found // error + | ^ + | ']' expected, but '=' found +-- [E040] Syntax Error: tests/neg/namedTypeParams.scala:19:19 ---------------------------------------------------------- +19 | f[X = Int, String](1, "") // error // error + | ^ + | '=' expected, but ']' found +-- Error: tests/neg/namedTypeParams.scala:6:8 -------------------------------------------------------------------------- +6 | f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental + | ^^^ + | Named type arguments are experimental, + | they must be enabled with a `experimental.namedTypeArguments` language import or setting +-- Error: tests/neg/namedTypeParams.scala:6:17 ------------------------------------------------------------------------- +6 | f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental + | ^^^ + | Named type arguments are experimental, + | they must be enabled with a `experimental.namedTypeArguments` language import or setting +-- [E006] Not Found Error: tests/neg/namedTypeParams.scala:11:11 ------------------------------------------------------- +11 | val x: C[T = Int] = // error: ']' expected, but `=` found // error + | ^ + | Not found: type T + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/namedTypeParams.scala:12:10 ------------------------------------------------------- +12 | new C[T = Int] // error: ']' expected, but `=` found // error + | ^ + | Not found: type T + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/namedTypeParams.scala:14:20 ------------------------------------------------------- +14 | class E extends C[T = Int] // error: ']' expected, but `=` found // error + | ^ + | Not found: type T + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/namedTypeParams.scala:15:20 ------------------------------------------------------- +15 | class F extends C[T = Int]() // error: ']' expected, but `=` found // error + | ^ + | Not found: type T + | + | longer explanation available when compiling with `-explain` +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:19:18 ---------------------------------------------------------- +19 | f[X = Int, String](1, "") // error // error + | ^ + | Type parameter String is undefined. Expected one of X, Y. +-- Error: tests/neg/namedTypeParams.scala:20:12 ------------------------------------------------------------------------ +20 | f[X = Int][X = Int][Y = String](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[X = Int, X = Int] +-- Error: tests/neg/namedTypeParams.scala:22:12 ------------------------------------------------------------------------ +22 | f[X = Int][Y = String](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[X = Int, Y = String] +-- Error: tests/neg/namedTypeParams.scala:23:12 ------------------------------------------------------------------------ +23 | f[X = Int][String](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[X = Int, String] +-- Error: tests/neg/namedTypeParams.scala:25:15 ------------------------------------------------------------------------ +25 | f[Y = String][X = Int](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[Y = String, X = Int] +-- Error: tests/neg/namedTypeParams.scala:26:15 ------------------------------------------------------------------------ +26 | f[Y = String][Int](1, "") // error: illegal repeated type application + | ^^^^^^^^^^^^^^^^^^ + | illegal repeated type application + | You might have meant something like: + | Test.f[Y = String, Int] +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:33:9 ----------------------------------------------------------- +33 | f2[Y = String][X = Int](1, "") // error: Y is undefined + | ^^^^^^ + | Type parameter Y is undefined. Expected one of X. +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:34:9 ----------------------------------------------------------- +34 | f2[Y = String](1, "") // error: Y is undefined + | ^^^^^^ + | Type parameter Y is undefined. Expected one of X. diff --git a/tests/neg/namedTypeParams.scala b/tests/neg/namedTypeParams.scala index 8ed7c92241ea..53ef14188e12 100644 --- a/tests/neg/namedTypeParams.scala +++ b/tests/neg/namedTypeParams.scala @@ -5,7 +5,7 @@ object Test0: def f[X, Y](x: X, y: Y): Int = ??? f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental -object Test { +object Test: import language.experimental.namedTypeArguments val x: C[T = Int] = // error: ']' expected, but `=` found // error @@ -24,4 +24,11 @@ object Test { f[Y = String][X = Int](1, "") // error: illegal repeated type application f[Y = String][Int](1, "") // error: illegal repeated type application -} + +object TestInterleaving: + import language.experimental.namedTypeArguments + import language.experimental.clauseInterleaving + def f2[X](using DummyImplicit)[Y](x: X, y: Y): Int = ??? + + f2[Y = String][X = Int](1, "") // error: Y is undefined + f2[Y = String](1, "") // error: Y is undefined diff --git a/tests/neg/outdent-dot.check b/tests/neg/outdent-dot.check new file mode 100644 index 000000000000..c93c3bcfba73 --- /dev/null +++ b/tests/neg/outdent-dot.check @@ -0,0 +1,18 @@ +-- Error: tests/neg/outdent-dot.scala:6:5 ------------------------------------------------------------------------------ +6 | .toString // error + | ^ + | The start of this line does not match any of the previous indentation widths. + | Indentation width of current line : 5 spaces + | This falls between previous widths: 2 spaces and 6 spaces +-- Error: tests/neg/outdent-dot.scala:11:3 ----------------------------------------------------------------------------- +11 | .filter: x => // error + | ^ + | The start of this line does not match any of the previous indentation widths. + | Indentation width of current line : 3 spaces + | This falls between previous widths: 2 spaces and 6 spaces +-- Error: tests/neg/outdent-dot.scala:13:4 ----------------------------------------------------------------------------- +13 | println("foo") // error + | ^ + | The start of this line does not match any of the previous indentation widths. + | Indentation width of current line : 4 spaces + | This falls between previous widths: 2 spaces and 6 spaces diff --git a/tests/neg/outdent-dot.scala b/tests/neg/outdent-dot.scala new file mode 100644 index 000000000000..d0e882a3c073 --- /dev/null +++ b/tests/neg/outdent-dot.scala @@ -0,0 +1,13 @@ +def Block(f: => Int): Int = f + +def bar(): String = + Block: + 2 + 2 + .toString // error + +def foo(xs: List[Int]) = + xs.map: x => + x + 1 + .filter: x => // error + x > 0 + println("foo") // error diff --git a/tests/neg/overrides.scala b/tests/neg/overrides.scala index 48f3260721e9..c8fc8de97f7c 100644 --- a/tests/neg/overrides.scala +++ b/tests/neg/overrides.scala @@ -42,6 +42,9 @@ class A[T] { def next: T = ??? + import scala.language.experimental.clauseInterleaving + + def b[U <: T](x: Int)[V >: T](y: String) = false } class B extends A[Int] { @@ -52,6 +55,20 @@ class B extends A[Int] { override def next(): Int = ??? // error: incompatible type + import scala.language.experimental.clauseInterleaving + + override def b[T <: Int](x: Int)(y: String) = true // error +} + +class C extends A[String] { + + override def f(x: String) = x // error + + override def next: Int = ??? // error: incompatible type + + import scala.language.experimental.clauseInterleaving + + override def b[T <: String](x: Int)[U >: Int](y: String) = true // error: incompatible type } class X { @@ -103,4 +120,3 @@ class C extends A { override def m: Int = 42 // error: has incompatible type } } - diff --git a/tests/neg/recursive-lower-constraint.scala b/tests/neg/recursive-lower-constraint.scala index 8009ab5fce6e..cf45d8b95171 100644 --- a/tests/neg/recursive-lower-constraint.scala +++ b/tests/neg/recursive-lower-constraint.scala @@ -3,5 +3,5 @@ class Bar extends Foo[Bar] class A { def foo[T <: Foo[T], U >: Foo[T] <: T](x: T): T = x - foo(new Bar) // error + foo(new Bar) // error // error } diff --git a/tests/neg/safeThrowsStrawman2.scala b/tests/neg/safeThrowsStrawman2.scala index 7d87baad6fa4..8d95494e30e0 100644 --- a/tests/neg/safeThrowsStrawman2.scala +++ b/tests/neg/safeThrowsStrawman2.scala @@ -24,7 +24,7 @@ def bar(x: Boolean)(using CanThrow[Fail]): Int = val x = new CanThrow[Fail]() // OK, x is erased val y: Any = new CanThrow[Fail]() // error: illegal reference to erased class CanThrow val y2: Any = new CTF() // error: illegal reference to erased class CanThrow - println(foo(true, ctf)) // error: ctf is declared as erased, but is in fact used + println(foo(true, ctf)) // not error: ctf will be erased at erasure val a = (1, new CanThrow[Fail]()) // error: illegal reference to erased class CanThrow def b: (Int, CanThrow[Fail]) = ??? def c = b._2 // ok; we only check creation sites diff --git a/tests/neg/syntax-error-recovery.check b/tests/neg/syntax-error-recovery.check index 0bf626210fed..18d877833d79 100644 --- a/tests/neg/syntax-error-recovery.check +++ b/tests/neg/syntax-error-recovery.check @@ -94,12 +94,6 @@ | Not found: bam | | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/neg/syntax-error-recovery.scala:61:10 ------------------------------------------------- -61 | println(bam) // error - | ^^^ - | Not found: bam - | - | longer explanation available when compiling with `-explain` -- [E129] Potential Issue Warning: tests/neg/syntax-error-recovery.scala:7:2 ------------------------------------------- 6 | 2 7 | } diff --git a/tests/neg/syntax-error-recovery.scala b/tests/neg/syntax-error-recovery.scala index 775abeb97bdb..b6663cc9c70a 100644 --- a/tests/neg/syntax-error-recovery.scala +++ b/tests/neg/syntax-error-recovery.scala @@ -58,5 +58,5 @@ object Test2: def foo5(x: Int) = foo2(foo2(,) // error // error - println(bam) // error + println(bam) // error \ No newline at end of file diff --git a/tests/neg/t12715.scala b/tests/neg/t12715.scala new file mode 100644 index 000000000000..b24d51a6e9fa --- /dev/null +++ b/tests/neg/t12715.scala @@ -0,0 +1,17 @@ +trait A { def f: String } +trait B extends A { def f = "B" } +trait C extends A { override val f = "C" } +trait D extends C { override val f = "D" } +trait E extends A, B { def d = super.f } +final class O1 extends B, C, D, E // error: parent trait E has a super call which binds to the value D.f. Super calls can only target methods. +final class O2 extends B, C, E, D // error: parent trait E has a super call which binds to the value C.f. Super calls can only target methods. +final class O3 extends B, E, C, D + +object Main: + def main(args: Array[String]): Unit = + println(O1().f) // D + println(O2().f) // D + println(O3().f) // D + println(O3().d) // B + O1().d // was: NoSuchMethodError: 'java.lang.String D.f$(D)' + O2().d // was: NoSuchMethodError: 'java.lang.String C.f$(C)' diff --git a/tests/neg/t12715b.scala b/tests/neg/t12715b.scala new file mode 100644 index 000000000000..da024116d4b3 --- /dev/null +++ b/tests/neg/t12715b.scala @@ -0,0 +1,16 @@ +trait B: + def f: Float = 1.0f + +class A(override val f: Float) extends B + +trait C extends B: + abstract override val f = super.f + 100.0f + +trait D extends B: + abstract override val f = super.f + 1000.0f + +class ACD10 extends A(10.0f) with C with D // error: parent trait D has a super call to method B.f, which binds to the value C.f. Super calls can only target methods. + +object Test: + def main(args: Array[String]): Unit = + new ACD10 // was: NoSuchMethodError: 'float C.f$(C)' diff --git a/tests/neg/t9419.scala b/tests/neg/t9419.scala new file mode 100644 index 000000000000..e9358c0ba641 --- /dev/null +++ b/tests/neg/t9419.scala @@ -0,0 +1,24 @@ +trait Magic[S]: + def init: S + def step(s: S): String + +object IntMagic extends Magic[Int]: + def init = 0 + def step(s: Int): String = (s - 1).toString + +object StrMagic extends Magic[String]: + def init = "hi" + def step(s: String): String = s.reverse + +object Main: + def onestep[T](m: () => Magic[T]): String = m().step(m().init) + def unostep[T](m: => Magic[T]): String = m.step(m.init) + + val iter: Iterator[Magic[?]] = Iterator.tabulate(Int.MaxValue)(i => if i % 2 == 0 then IntMagic else StrMagic) + + // was: class java.lang.String cannot be cast to class java.lang.Integer + def main(args: Array[String]): Unit = + onestep(() => iter.next()) // error + unostep(iter.next()) // error + val m = iter.next() + unostep(m) // ok, because m is a value diff --git a/tests/neg/t9419.zio-http.scala b/tests/neg/t9419.zio-http.scala new file mode 100644 index 000000000000..cff9ec51e6f9 --- /dev/null +++ b/tests/neg/t9419.zio-http.scala @@ -0,0 +1,18 @@ +// Minimisation of how the fix for t9419 affected zio-http +import java.util.concurrent.Future as JFuture + +trait Test: + def shutdownGracefully(): JFuture[_] + + def executedWildcard(jFuture: => JFuture[_]): Unit + def executedGeneric[A](jFuture: => JFuture[A]): Unit + def executedWildGen[A](jFuture: => JFuture[? <: A]): Unit + + // Even though JFuture is morally covariant, at least currently, + // there's no definition-side variance, so it's treated as invariant. + // So we have to be concerned that two different values of `JFuture[A]` + // with different types, blowing up together. So error in `fails`. + def works = executedWildcard(shutdownGracefully()) + def fails = executedGeneric(shutdownGracefully()) // error + def fixed = executedGeneric(shutdownGracefully().asInstanceOf[JFuture[Any]]) // fix + def best2 = executedWildGen(shutdownGracefully()) // even better, use use-site variance in the method diff --git a/tests/patmat/i11541.scala b/tests/patmat/i11541.scala new file mode 100644 index 000000000000..4ac1af08c80b --- /dev/null +++ b/tests/patmat/i11541.scala @@ -0,0 +1,13 @@ +import scala.reflect.ClassTag + +class Test: + type A + + given ClassTag[A] = ??? + + var a: A | Null = null + + a match { //WARNING: match may not be exhaustive. It would fail on pattern case: _: A + case null => + case a: A => + } diff --git a/tests/patmat/java-enum1/ParameterModifier.java b/tests/patmat/java-enum1/ParameterModifier.java new file mode 100644 index 000000000000..c9ddc157ba7e --- /dev/null +++ b/tests/patmat/java-enum1/ParameterModifier.java @@ -0,0 +1,8 @@ +public enum ParameterModifier { + Repeated, + Plain, + ByName; + + private ParameterModifier() { + } +} diff --git a/tests/patmat/java-enum1/Test.scala b/tests/patmat/java-enum1/Test.scala new file mode 100644 index 000000000000..b6ea483d8fb4 --- /dev/null +++ b/tests/patmat/java-enum1/Test.scala @@ -0,0 +1,6 @@ +class Test: + private def showParameterModifier(base: String, pm: ParameterModifier): String = pm match { + case ParameterModifier.Plain => base + case ParameterModifier.Repeated => base + "*" + case ParameterModifier.ByName => "=> " + base + } diff --git a/tests/pending/neg/cc-depfun.scala b/tests/pending/neg/cc-depfun.scala new file mode 100644 index 000000000000..4d600872d208 --- /dev/null +++ b/tests/pending/neg/cc-depfun.scala @@ -0,0 +1,14 @@ +import language.experimental.captureChecking + +// compare with neg-custom-args/captures/depfun.scala, which produces errors +// but the errors go away if ->{} gets replaced by ->. + +trait Cap { def use(): Unit } + +def main() = { + val f: (io: Cap^) -> () -> Unit = + io => () => io.use() // error + + val g: (Cap^) -> () -> Unit = + io => () => io.use() // error +} diff --git a/tests/pending/neg/i16451.check b/tests/pending/neg/i16451.check new file mode 100644 index 000000000000..e53085e8eafa --- /dev/null +++ b/tests/pending/neg/i16451.check @@ -0,0 +1,24 @@ +-- Error: tests/neg/i16451.scala:13:9 ---------------------------------------------------------------------------------- +13 | case x: Wrapper[Color.Red.type] => Some(x) // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] +-- Error: tests/neg/i16451.scala:21:9 ---------------------------------------------------------------------------------- +21 | case x: Wrapper[Color.Red.type] => Some(x) // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Any +-- Error: tests/neg/i16451.scala:25:9 ---------------------------------------------------------------------------------- +25 | case x: Wrapper[Color.Red.type] => Some(x) // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] +-- Error: tests/neg/i16451.scala:29:9 ---------------------------------------------------------------------------------- +29 | case x: Wrapper[Color.Red.type] => Some(x) // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from A1 +-- Error: tests/neg/i16451.scala:34:11 --------------------------------------------------------------------------------- +34 | case x: Wrapper[Color.Red.type] => x // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] +-- Error: tests/neg/i16451.scala:39:11 --------------------------------------------------------------------------------- +39 | case x: Wrapper[Color.Red.type] => x // error + | ^ + |the type test for Wrapper[(Color.Red : Color)] cannot be checked at runtime because its type arguments can't be determined from Wrapper[Color] diff --git a/tests/pending/neg/i16451.scala b/tests/pending/neg/i16451.scala new file mode 100644 index 000000000000..49997d2bcf92 --- /dev/null +++ b/tests/pending/neg/i16451.scala @@ -0,0 +1,40 @@ +// scalac: -Werror +enum Color: + case Red, Green + +case class Wrapper[A](value: A) + +object Test: + def test_correct(x: Wrapper[Color]): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // error + case null => None + + def test_different(x: Wrapper[Color]): Option[Wrapper[Color]] = x match + case x @ Wrapper(_: Color.Red.type) => Some(x) + case x @ Wrapper(_: Color.Green.type) => None + + def test_any(x: Any): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // error + case _ => None + + def test_wrong(x: Wrapper[Color]): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // error + case null => None + + def t2[A1 <: Wrapper[Color]](x: A1): Option[Wrapper[Color.Red.type]] = x match + case x: Wrapper[Color.Red.type] => Some(x) // error + case null => None + + def test_wrong_seq(xs: Seq[Wrapper[Color]]): Seq[Wrapper[Color.Red.type]] = + xs.collect { + case x: Wrapper[Color.Red.type] => x // error + } + + def test_wrong_seq2(xs: Seq[Wrapper[Color]]): Seq[Wrapper[Color.Red.type]] = + xs.collect { x => x match + case x: Wrapper[Color.Red.type] => x // error + } + + def main(args: Array[String]): Unit = + println(test_wrong_seq(Seq(Wrapper(Color.Red), Wrapper(Color.Green)))) + // outputs: List(Wrapper(Red), Wrapper(Green)) diff --git a/tests/pending/pos/i16826.scala b/tests/pending/pos/i16826.scala new file mode 100644 index 000000000000..a938ab42dac3 --- /dev/null +++ b/tests/pending/pos/i16826.scala @@ -0,0 +1,10 @@ +import language.experimental.captureChecking +class A +class B(a: {*} A) +class C(a: {*} A): + def setB(b: {a} B): Unit = ??? + + +def test(a1: {*} A)(b1: {a1} B) = + val c = new C(a1) + c.setB(b1) diff --git a/tests/pos-custom-args/bounded1.scala b/tests/pos-custom-args/bounded1.scala index 5fb7f0da904b..e16da4935a14 100644 --- a/tests/pos-custom-args/bounded1.scala +++ b/tests/pos-custom-args/bounded1.scala @@ -1,27 +1,27 @@ // To be revisited class CC -type Cap = {*} CC +type Cap = CC^ def test(c: Cap) = - class B[X <: {c} Object](x: X): + class B[X <: Object^{c}](x: X): def elem = x def lateElem = () => x def f(x: Int): Int = if c == c then x else 0 val b = new B(f) val r1 = b.elem - val r1c: {c} Int -> Int = r1 + val r1c: Int^{c} -> Int = r1 val r2 = b.lateElem - val r2c: () -> {c} Int -> Int = r2 // was error now OK + val r2c: () -> Int^{c} -> Int = r2 // was error now OK def test2(c: Cap) = - class B[X <: {*} Any](x: X): + class B[X <: Any^](x: X): def elem = x def lateElem = () => x def f(x: Int): Int = if c == c then x else 0 val b = new B(f) val r1 = b.elem - val r1c: {c} Int -> Int = r1 + val r1c: Int ->{c} Int = r1 val r2 = b.lateElem - val r2c: () -> {c} Int -> Int = r2 // was error now OK \ No newline at end of file + val r2c: () -> Int ->{c} Int = r2 // was error now OK \ No newline at end of file diff --git a/tests/pos-custom-args/captures/bounded.scala b/tests/pos-custom-args/captures/bounded.scala index 85c1a67387b5..7959df7d50cf 100644 --- a/tests/pos-custom-args/captures/bounded.scala +++ b/tests/pos-custom-args/captures/bounded.scala @@ -1,14 +1,14 @@ class CC -type Cap = {*} CC +type Cap = CC^ def test(c: Cap) = - class B[X <: {c} Object](x: X): + class B[X <: Object^{c}](x: X): def elem = x def lateElem = () => x def f(x: Int): Int = if c == c then x else 0 val b = new B(f) val r1 = b.elem - val r1c: {c} Int -> Int = r1 + val r1c: Int ->{c} Int = r1 val r2 = b.lateElem - val r2c: {c} () -> {c} Int -> Int = r2 \ No newline at end of file + val r2c: () ->{c} Int ->{c} Int = r2 \ No newline at end of file diff --git a/tests/pos-custom-args/captures/boxed1.scala b/tests/pos-custom-args/captures/boxed1.scala index ba198335f51d..8c6b63ef0134 100644 --- a/tests/pos-custom-args/captures/boxed1.scala +++ b/tests/pos-custom-args/captures/boxed1.scala @@ -6,6 +6,6 @@ def foo(x: => Int): Unit = () def test(c: Cap) = val f = () => { c; 1 } - val _: {c} () -> Int = f + val _: () ->{c} Int = f val g = () => Box(f) - val _: () -> Box[{f} () -> Int] = g + val _: () -> Box[() ->{f} Int] = g diff --git a/tests/pos-custom-args/captures/boxmap-paper.scala b/tests/pos-custom-args/captures/boxmap-paper.scala index aff4c38e1b9d..9d5bb49af25d 100644 --- a/tests/pos-custom-args/captures/boxmap-paper.scala +++ b/tests/pos-custom-args/captures/boxmap-paper.scala @@ -12,25 +12,25 @@ def map[A, B](c: Cell[A])(f: A => B): Cell[B] def pureMap[A, B](c: Cell[A])(f: A -> B): Cell[B] = c[Cell[B]]((x: A) => cell(f(x))) -def lazyMap[A, B](c: Cell[A])(f: A => B): {f} () -> Cell[B] +def lazyMap[A, B](c: Cell[A])(f: A => B): () ->{f} Cell[B] = () => c[Cell[B]]((x: A) => cell(f(x))) trait IO: def print(s: String): Unit -def test(io: {*} IO) = +def test(io: IO^) = - val loggedOne: {io} () -> Int = () => { io.print("1"); 1 } + val loggedOne: () ->{io} Int = () => { io.print("1"); 1 } - val c: Cell[{io} () -> Int] - = cell[{io} () -> Int](loggedOne) + val c: Cell[() ->{io} Int] + = cell[() ->{io} Int](loggedOne) - val g = (f: {io} () -> Int) => + val g = (f: () ->{io} Int) => val x = f(); io.print(" + ") val y = f(); io.print(s" = ${x + y}") - val r = lazyMap[{io} () -> Int, Unit](c)(f => g(f)) - val r2 = lazyMap[{io} () -> Int, Unit](c)(g) + val r = lazyMap[() ->{io} Int, Unit](c)(f => g(f)) + val r2 = lazyMap[() ->{io} Int, Unit](c)(g) val r3 = lazyMap(c)(g) val _ = r() val _ = r2() diff --git a/tests/pos-custom-args/captures/byname.scala b/tests/pos-custom-args/captures/byname.scala index 35b8876d0058..efd76618469d 100644 --- a/tests/pos-custom-args/captures/byname.scala +++ b/tests/pos-custom-args/captures/byname.scala @@ -1,12 +1,12 @@ import annotation.retainsByName class CC -type Cap = {*} CC +type Cap = CC^ class I -def test(cap1: Cap, cap2: Cap): {cap1} I = +def test(cap1: Cap, cap2: Cap): I^{cap1} = def f() = if cap1 == cap1 then I() else I() - def h(x: {cap1}-> I) = x + def h(x: ->{cap} I) = x h(f()) // OK def hh(x: -> I @retainsByName(cap1)) = x h(f()) diff --git a/tests/pos-custom-args/captures/caps-universal.scala b/tests/pos-custom-args/captures/caps-universal.scala index d84f2b7b2584..3768c640fd68 100644 --- a/tests/pos-custom-args/captures/caps-universal.scala +++ b/tests/pos-custom-args/captures/caps-universal.scala @@ -1,7 +1,7 @@ import annotation.retains val foo: Int => Int = x => x -val bar: (Int -> Int) @retains(caps.*) = foo -val baz: {*} Int -> Int = bar +val bar: (Int -> Int) @retains(caps.cap) = foo +val baz: Int => Int = bar diff --git a/tests/pos-custom-args/captures/capt-capability.scala b/tests/pos-custom-args/captures/capt-capability.scala index 4dbd6e32f2a4..830d341c7bca 100644 --- a/tests/pos-custom-args/captures/capt-capability.scala +++ b/tests/pos-custom-args/captures/capt-capability.scala @@ -1,7 +1,7 @@ import annotation.capability @capability class Cap -def f1(c: Cap): {c} () -> c.type = () => c // ok +def f1(c: Cap): () ->{c} c.type = () => c // ok def f2: Int = val g: Boolean => Int = ??? @@ -17,8 +17,8 @@ def f3: Int = def foo() = val x: Cap = ??? val y: Cap = x - val x2: {x} () -> Cap = ??? - val y2: {x} () -> Cap = x2 + val x2: () ->{x} Cap = ??? + val y2: () ->{x} Cap = x2 val z1: () => Cap = f1(x) def h[X](a: X)(b: X) = a diff --git a/tests/pos-custom-args/captures/capt-depfun.scala b/tests/pos-custom-args/captures/capt-depfun.scala index 0e9786b2ee34..e3abbe0994c5 100644 --- a/tests/pos-custom-args/captures/capt-depfun.scala +++ b/tests/pos-custom-args/captures/capt-depfun.scala @@ -1,6 +1,6 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) type T = (x: Cap) -> String @retains(x) @@ -8,7 +8,7 @@ type ID[X] = X val aa: ((x: Cap) -> String @retains(x)) = (x: Cap) => "" -def f(y: Cap, z: Cap): String @retains(caps.*) = +def f(y: Cap, z: Cap): String @retains(caps.cap) = val a: ((x: Cap) -> String @retains(x)) = (x: Cap) => "" val b = a(y) val c: String @retains(y) = b @@ -16,6 +16,6 @@ def f(y: Cap, z: Cap): String @retains(caps.*) = val d = a(g()) val ac: ((x: Cap) -> ID[String @retains(x) -> String @retains(x)]) = ??? - val bc: (({y} String) -> {y} String) = ac(y) - val dc: (String -> {y, z} String) = ac(g()) + val bc: String^{y} -> String^{y} = ac(y) + val dc: String -> String^{y, z} = ac(g()) c diff --git a/tests/pos-custom-args/captures/capt-depfun2.scala b/tests/pos-custom-args/captures/capt-depfun2.scala index 1c747d5885e6..e4645cfcc920 100644 --- a/tests/pos-custom-args/captures/capt-depfun2.scala +++ b/tests/pos-custom-args/captures/capt-depfun2.scala @@ -1,9 +1,9 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) def f(y: Cap, z: Cap) = def g(): C @retains(y, z) = ??? val ac: ((x: Cap) -> Array[String @retains(x)]) = ??? - val dc: Array[? >: String <: {y, z} String] = ac(g()) // needs to be inferred + val dc: Array[? >: String <: String]^{y, z} = ac(g()) // needs to be inferred val ec = ac(y) diff --git a/tests/pos-custom-args/captures/capt-env.scala b/tests/pos-custom-args/captures/capt-env.scala new file mode 100644 index 000000000000..be24ed618606 --- /dev/null +++ b/tests/pos-custom-args/captures/capt-env.scala @@ -0,0 +1,8 @@ +class C +type Cap = C^ + +def test(c: Cap) = + def x = () => () => c; () + def y = () => x() + def z = () => x()() + diff --git a/tests/pos-custom-args/captures/capt-test.scala b/tests/pos-custom-args/captures/capt-test.scala index c61577e96eb1..e229c685d846 100644 --- a/tests/pos-custom-args/captures/capt-test.scala +++ b/tests/pos-custom-args/captures/capt-test.scala @@ -19,10 +19,10 @@ def map[A, B](f: A => B)(xs: LIST[A]): LIST[B] = xs.map(f) class C -type Cap = {*} C +type Cap = C^ class Foo(x: Cap): - this: {x} Foo => + this: Foo^{x} => def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () @@ -32,7 +32,7 @@ def test(c: Cap, d: Cap) = val zs = val z = g CONS(z, ys) - val zsc: LIST[{d, y} Cap -> Unit] = zs + val zsc: LIST[Cap ->{d, y} Unit] = zs val a4 = zs.map(identity) - val a4c: LIST[{d, y} Cap -> Unit] = a4 + val a4c: LIST[Cap ->{d, y} Unit] = a4 diff --git a/tests/pos-custom-args/captures/capt0.scala b/tests/pos-custom-args/captures/capt0.scala index 52d6253af46b..013ff3a4ee19 100644 --- a/tests/pos-custom-args/captures/capt0.scala +++ b/tests/pos-custom-args/captures/capt0.scala @@ -1,7 +1,7 @@ object Test: def test() = - val x: {*} Any = "abc" + val x: Any^ = "abc" val y: Object @scala.annotation.retains(x) = ??? - val z: Object @scala.annotation.retains(x, caps.*) = y: Object @annotation.retains(x) + val z: Object @scala.annotation.retains(x, caps.cap) = y: Object @annotation.retains(x) diff --git a/tests/pos-custom-args/captures/capt1.scala b/tests/pos-custom-args/captures/capt1.scala index cc39790623d4..8d2285f1fa50 100644 --- a/tests/pos-custom-args/captures/capt1.scala +++ b/tests/pos-custom-args/captures/capt1.scala @@ -1,9 +1,9 @@ class C -type Cap = {*} C -def f1(c: Cap): {c} () -> c.type = () => c // ok +type Cap = C^ +def f1(c: Cap): () ->{c} c.type = () => c // ok def f2: Int = - val g: {*} Boolean -> Int = ??? + val g: Boolean ->{cap} Int = ??? val x = g(true) x @@ -13,11 +13,11 @@ def f3: Int = val x = g.apply(true) x -def foo(): {*} C = - val x: {*} C = ??? - val y: {x} C = x - val x2: {x} () -> C = ??? - val y2: {x} () -> {x} C = x2 +def foo(): C^ = + val x: C^ = ??? + val y: C^{x} = x + val x2: () ->{x} C = ??? + val y2: () ->{x} C^{x} = x2 val z1: () => Cap = f1(x) def h[X](a: X)(b: X) = a diff --git a/tests/pos-custom-args/captures/capt2.scala b/tests/pos-custom-args/captures/capt2.scala index 77c0caaf0f1d..45381bf602ed 100644 --- a/tests/pos-custom-args/captures/capt2.scala +++ b/tests/pos-custom-args/captures/capt2.scala @@ -1,9 +1,9 @@ import annotation.retains class C -type Cap = C @retains(caps.*) +type Cap = C @retains(caps.cap) def test1() = - val y: {*} String = "" + val y: String^ = "" def x: Object @retains(y) = y def test2() = @@ -13,8 +13,8 @@ def test2() = z: (() -> Unit) @retains(x) def z2: (() -> Unit) @retains(y) = y z2: (() -> Unit) @retains(y) - val p: {*} () -> String = () => "abc" - val q: {p} C = ??? - val _ = p: ({p} () -> String) + val p: () => String = () => "abc" + val q: C^{p} = ??? + val _ = p: (() ->{p} String) diff --git a/tests/pos-custom-args/captures/caseclass.scala b/tests/pos-custom-args/captures/caseclass.scala index a845da181e9f..ffbf878dca49 100644 --- a/tests/pos-custom-args/captures/caseclass.scala +++ b/tests/pos-custom-args/captures/caseclass.scala @@ -1,6 +1,6 @@ @annotation.capability class C object test1: - case class Ref(x: {*} String) + case class Ref(x: String^) def test(c: C) = val x1 = Ref("hello") @@ -14,7 +14,7 @@ object test2: val pure: () -> Unit = () => () val impure: () => Unit = pure - val mixed: {c} () -> Unit = pure + val mixed: () ->{c} Unit = pure val x = Ref(impure) val y0 = x.copy(pure) val yc0: Ref = y0 @@ -25,10 +25,10 @@ object test2: val yc2: Ref = y2 val x3 = Ref(mixed) - val _: {c} Ref = x3 + val _: Ref^{c} = x3 val y3 = x3.copy() - val yc3: {c} Ref = y3 + val yc3: Ref^{c} = y3 val y4 = y3 match case Ref(xx) => xx - val y4c: {x3} () -> Unit = y4 + val y4c: () ->{x3} Unit = y4 diff --git a/tests/pos-custom-args/captures/cc-dep-param.scala b/tests/pos-custom-args/captures/cc-dep-param.scala new file mode 100644 index 000000000000..1440cd4d7d40 --- /dev/null +++ b/tests/pos-custom-args/captures/cc-dep-param.scala @@ -0,0 +1,8 @@ +import language.experimental.captureChecking + +trait Foo[T] +def test(): Unit = + val a: Foo[Int]^ = ??? + val useA: () ->{a} Unit = ??? + def foo[X](x: Foo[X]^, op: () ->{x} Unit): Unit = ??? + foo(a, useA) diff --git a/tests/pos-custom-args/captures/cc-expand.scala b/tests/pos-custom-args/captures/cc-expand.scala index 87b2c34caf5f..1bed7b1cf001 100644 --- a/tests/pos-custom-args/captures/cc-expand.scala +++ b/tests/pos-custom-args/captures/cc-expand.scala @@ -5,11 +5,11 @@ object Test: class B class C class CTC - type CT = CTC @retains(caps.*) + type CT = CTC @retains(caps.cap) def test(ct: CT, dt: CT) = - def x0: A -> {ct} B = ??? + def x0: A -> B^{ct} = ??? def x1: A -> B @retains(ct) = ??? def x2: A -> B -> C @retains(ct) = ??? diff --git a/tests/pos-custom-args/captures/cc-this.scala b/tests/pos-custom-args/captures/cc-this.scala index 77414fa9b8c0..2124ee494041 100644 --- a/tests/pos-custom-args/captures/cc-this.scala +++ b/tests/pos-custom-args/captures/cc-this.scala @@ -5,7 +5,7 @@ def eff(using Cap): Unit = () def test(using Cap) = class C(val x: () => Int): - val y: {*} C = this + val y: C^ = this def f = () => eff @@ -14,4 +14,4 @@ def test(using Cap) = def c1 = new C(f) def c2 = c1 def c3 = c2.y - val _ = c3: {*} C + val _ = c3: C^ diff --git a/tests/pos-custom-args/captures/classes.scala b/tests/pos-custom-args/captures/classes.scala index f14a7e6dd84e..bc827dcfc67d 100644 --- a/tests/pos-custom-args/captures/classes.scala +++ b/tests/pos-custom-args/captures/classes.scala @@ -1,22 +1,22 @@ import annotation.retains class B -type Cap = {*} B +type Cap = B^ class C(val n: Cap): - this: {n} C => - def foo(): {n} B = n + this: C^{n} => + def foo(): B^{n} = n def test(x: Cap, y: Cap, z: Cap) = val c0 = C(x) - val c1: {x} C {val n: {x} B} = c0 + val c1: C{val n: B^{x}}^{x} = c0 val d = c1.foo() - d: {x} B + d: B^{x} val c2 = if ??? then C(x) else C(y) val c2a = identity(c2) - val c3: {x, y} C { val n: {x, y} B } = c2 + val c3: C{ val n: B^{x, y} }^{x, y} = c2 val d1 = c3.foo() - d1: B @retains(x, y) + d1: B^{x, y} class Local: @@ -29,7 +29,7 @@ def test(x: Cap, y: Cap, z: Cap) = end Local val l = Local() - val l1: {x, y} Local = l + val l1: Local^{x, y} = l val l2 = Local(x) - val l3: {x, y, z} Local = l2 + val l3: Local^{x, y, z} = l2 diff --git a/tests/pos-custom-args/captures/compare-refined.scala b/tests/pos-custom-args/captures/compare-refined.scala index c60bfee602b3..306f2216ab82 100644 --- a/tests/pos-custom-args/captures/compare-refined.scala +++ b/tests/pos-custom-args/captures/compare-refined.scala @@ -2,11 +2,11 @@ abstract class LIST[+T]: def map[U](f: T => U): LIST[U] = ??? class C -type Cap = {*} C +type Cap = C^ def test(d: Cap) = - val zsc: LIST[{d} Cap -> Unit] = ??? - val a4 = zsc.map[{d} Cap -> Unit]((x: {d} Cap -> Unit) => x) - val a5 = zsc.map[{d} Cap -> Unit](identity[{d} Cap -> Unit]) - val a6 = zsc.map(identity[{d} Cap -> Unit]) + val zsc: LIST[Cap ->{d} Unit] = ??? + val a4 = zsc.map[Cap ->{d} Unit]((x: Cap ->{d} Unit) => x) + val a5 = zsc.map[Cap ->{d} Unit](identity[Cap ->{d} Unit]) + val a6 = zsc.map(identity[Cap ->{d} Unit]) val a7 = zsc.map(identity) diff --git a/tests/pos-custom-args/captures/curried-shorthands.scala b/tests/pos-custom-args/captures/curried-shorthands.scala index 7c58729a3041..c68dc4b5cdbf 100644 --- a/tests/pos-custom-args/captures/curried-shorthands.scala +++ b/tests/pos-custom-args/captures/curried-shorthands.scala @@ -10,15 +10,15 @@ object Test: val f3 = (f: Int => Int) => println(f(3)) (xs: List[Int]) => xs.map(_ + 1) - val f3c: (Int => Int) -> {} List[Int] -> List[Int] = f3 + val f3c: (Int => Int) -> List[Int] ->{} List[Int] = f3 class LL[A]: - def drop(n: Int): {this} LL[A] = ??? + def drop(n: Int): LL[A]^{this} = ??? def test(ct: CanThrow[Exception]) = - def xs: {ct} LL[Int] = ??? + def xs: LL[Int]^{ct} = ??? val ys = xs.drop(_) - val ysc: Int -> {ct} LL[Int] = ys + val ysc: Int -> LL[Int]^{ct} = ys diff --git a/tests/pos-custom-args/captures/filevar.scala b/tests/pos-custom-args/captures/filevar.scala new file mode 100644 index 000000000000..a6cc7ca9ff47 --- /dev/null +++ b/tests/pos-custom-args/captures/filevar.scala @@ -0,0 +1,37 @@ +import language.experimental.captureChecking +import annotation.capability +import compiletime.uninitialized + +object test1: + class File: + def write(x: String): Unit = ??? + + class Service(f: File^): + def log = f.write("log") + + def withFile[T](op: (f: File^) => T): T = + op(new File) + + def test = + withFile: f => + val o = Service(f) + o.log + +object test2: + @capability class IO + + class File: + def write(x: String): Unit = ??? + + class Service(io: IO): + var file: File^{io} = uninitialized + def log = file.write("log") + + def withFile[T](io: IO)(op: (f: File^{io}) => T): T = + op(new File) + + def test(io: IO) = + withFile(io): f => + val o = Service(io) + o.file = f + o.log diff --git a/tests/pos-custom-args/captures/hk-param.scala b/tests/pos-custom-args/captures/hk-param.scala index b0e894d865e9..bf2f75f29e7f 100644 --- a/tests/pos-custom-args/captures/hk-param.scala +++ b/tests/pos-custom-args/captures/hk-param.scala @@ -1,17 +1,17 @@ /** Concrete collection type: View */ -trait View[+A] extends Itable[A], ILike[A, [X] =>> {*} View[X]]: - override def fromIterable[B](c: {*} Itable[B]): {c} View[B] = ??? +trait View[+A] extends Itable[A], ILike[A, [X] =>> View[X]^]: + override def fromIterable[B](c: Itable[B]^): View[B]^{c} = ??? trait IPolyTransforms[+A, +C[A]] extends Any: - def fromIterable[B](coll: {*} Itable[B]): C[B] + def fromIterable[B](coll: Itable[B]^): C[B] -trait ILike[+A, +C[X] <: {*} Itable[X]] extends IPolyTransforms[A, C] +trait ILike[+A, +C[X] <: Itable[X]^] extends IPolyTransforms[A, C] /** Base trait for generic collections */ -trait Itable[+A] extends ItableOnce[A] with ILike[A, {*} Itable] +trait Itable[+A] extends ItableOnce[A] with ILike[A, Itable^] /** Iterator can be used only once */ trait ItableOnce[+A] { - this: {*} ItableOnce[A] => - def iterator: {this} Iterator[A] + this: ItableOnce[A]^ => + def iterator: Iterator[A]^{this} } diff --git a/tests/pos-custom-args/captures/i15749.scala b/tests/pos-custom-args/captures/i15749.scala new file mode 100644 index 000000000000..4959c003a918 --- /dev/null +++ b/tests/pos-custom-args/captures/i15749.scala @@ -0,0 +1,15 @@ +class Unit +object unit extends Unit + +type Top = Any^{cap} + +type LazyVal[T] = Unit ->{cap} T + +class Foo[T](val x: T) + +// Foo[□ Unit => T] +type BoxedLazyVal[T] = Foo[LazyVal[T]] + +def force[A](v: BoxedLazyVal[A]): A = + // Γ ⊢ v.x : □ {cap} Unit -> A + v.x(unit) // was error: (unbox v.x)(unit), where (unbox v.x) should be untypable, now ok \ No newline at end of file diff --git a/tests/pos-custom-args/captures/i15749a.scala b/tests/pos-custom-args/captures/i15749a.scala new file mode 100644 index 000000000000..fe5f4d75dae1 --- /dev/null +++ b/tests/pos-custom-args/captures/i15749a.scala @@ -0,0 +1,21 @@ +class Unit +object u extends Unit + +type Top = Any^ + +type Wrapper[T] = [X] -> (op: T ->{cap} X) -> X + +def test = + + def wrapper[T](x: T): Wrapper[T] = + [X] => (op: T ->{cap} X) => op(x) + + def strictMap[A <: Top, sealed B <: Top](mx: Wrapper[A])(f: A ->{cap} B): Wrapper[B] = + mx((x: A) => wrapper(f(x))) + + def force[A](thunk: Unit ->{cap} A): A = thunk(u) + + def forceWrapper[sealed A](mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = + // Γ ⊢ mx: Wrapper[□ {cap} Unit => A] + // `force` should be typed as ∀(□ {cap} Unit -> A) A, but it can not + strictMap[Unit ->{cap} A, A](mx)(t => force[A](t)) // error diff --git a/tests/pos-custom-args/captures/i15922.scala b/tests/pos-custom-args/captures/i15922.scala index 8547f7598eef..23109a3ba8f4 100644 --- a/tests/pos-custom-args/captures/i15922.scala +++ b/tests/pos-custom-args/captures/i15922.scala @@ -2,13 +2,13 @@ trait Cap { def use(): Int } type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) -def withCap[X](op: ({*} Cap) => X): X = { - val cap: {*} Cap = new Cap { def use() = { println("cap is used"); 0 } } +def withCap[X](op: (Cap^) => X): X = { + val cap: Cap^ = new Cap { def use() = { println("cap is used"); 0 } } val result = op(cap) result } -def leaking(c: {*} Cap): Id[{c} Cap] = mkId(c) +def leaking(c: Cap^): Id[Cap^{c}] = mkId(c) def test = val bad = withCap(leaking) diff --git a/tests/pos-custom-args/captures/i15923-cases.scala b/tests/pos-custom-args/captures/i15923-cases.scala new file mode 100644 index 000000000000..136b8950eb26 --- /dev/null +++ b/tests/pos-custom-args/captures/i15923-cases.scala @@ -0,0 +1,15 @@ +trait Cap { def use(): Int } +type Id[X] = [T] -> (op: X => T) -> T +def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) + +def foo(x: Id[Cap^{cap}]) = { + x(_.use()) // was error, now OK +} + +def bar(io: Cap^{cap}, x: Id[Cap^{io}]) = { + x(_.use()) +} + +def barAlt(a: Cap^{cap}, b: Cap^{cap}, x: Id[Cap]^{a, b}) = { + x(_.use()) +} diff --git a/tests/neg-custom-args/captures/i15925.scala b/tests/pos-custom-args/captures/i15925.scala similarity index 69% rename from tests/neg-custom-args/captures/i15925.scala rename to tests/pos-custom-args/captures/i15925.scala index 433d27a98414..63b6962ff9f8 100644 --- a/tests/neg-custom-args/captures/i15925.scala +++ b/tests/pos-custom-args/captures/i15925.scala @@ -1,13 +1,13 @@ import language.experimental.captureChecking class Unit -object unit extends Unit +object u extends Unit type Foo[X] = [T] -> (op: X => T) -> T type Lazy[X] = Unit => X def force[X](fx: Foo[Lazy[X]]): X = - fx[X](f => f(unit)) // error + fx[X](f => f(u)) def force2[X](fx: Foo[Unit => X]): X = - fx[X](f => f(unit)) // error + fx[X](f => f(u)) diff --git a/tests/pos-custom-args/captures/i16116.scala b/tests/pos-custom-args/captures/i16116.scala index 2f5d5304dca5..0311e744f146 100644 --- a/tests/pos-custom-args/captures/i16116.scala +++ b/tests/pos-custom-args/captures/i16116.scala @@ -17,7 +17,7 @@ object Test { @capability class CpsTransform[F[_]] { - def await[T](ft: F[T]): { this } T = ??? + def await[T](ft: F[T]): T^{ this } = ??? } transparent inline def cpsAsync[F[_]](using m:CpsMonad[F]) = @@ -27,7 +27,7 @@ object Test { def apply[A](expr: (CpsTransform[F], C) ?=> A): F[A] = ??? } - def asyncPlus[F[_]](a:Int, b:F[Int])(using cps: CpsTransform[F]): { cps } Int = + def asyncPlus[F[_]](a:Int, b:F[Int])(using cps: CpsTransform[F]): Int^{ cps } = a + (cps.await(b).asInstanceOf[Int]) def testExample1Future(): Unit = diff --git a/tests/pos-custom-args/captures/i16226.scala b/tests/pos-custom-args/captures/i16226.scala index 8edf3f54d739..4cd7f0ceea81 100644 --- a/tests/pos-custom-args/captures/i16226.scala +++ b/tests/pos-custom-args/captures/i16226.scala @@ -1,14 +1,14 @@ @annotation.capability class Cap class LazyRef[T](val elem: () => T): - val get: {elem} () -> T = elem - def map[U](f: T => U): {f, this} LazyRef[U] = + val get: () ->{elem} T = elem + def map[U](f: T => U): LazyRef[U]^{f, this} = new LazyRef(() => f(elem())) -def map[A, B](ref: {*} LazyRef[A], f: A => B): {f, ref} LazyRef[B] = +def map[A, B](ref: LazyRef[A]^, f: A => B): LazyRef[B]^{f, ref} = new LazyRef(() => f(ref.elem())) def main(io: Cap) = { - def mapd[A, B]: ({io} LazyRef[A], A => B) => {*} LazyRef[B] = + def mapd[A, B]: (LazyRef[A]^{io}, A => B) => LazyRef[B]^ = (ref1, f1) => map[A, B](ref1, f1) } diff --git a/tests/pos-custom-args/captures/i16871.scala b/tests/pos-custom-args/captures/i16871.scala new file mode 100644 index 000000000000..3251a7135346 --- /dev/null +++ b/tests/pos-custom-args/captures/i16871.scala @@ -0,0 +1,3 @@ +import scala.language.experimental.captureChecking + +val f: [X] => Int => Int = [X] => (x: Int) => x \ No newline at end of file diff --git a/tests/pos-custom-args/captures/iterators.scala b/tests/pos-custom-args/captures/iterators.scala index 50be2012e25c..10a7f57cd68f 100644 --- a/tests/pos-custom-args/captures/iterators.scala +++ b/tests/pos-custom-args/captures/iterators.scala @@ -1,19 +1,19 @@ package cctest abstract class Iterator[T]: - thisIterator: {*} Iterator[T] => + thisIterator: Iterator[T]^ => def hasNext: Boolean def next: T - def map(f: {*} T => T): {f, this} Iterator[T] = new Iterator: + def map(f: T => T): Iterator[T]^{f, this} = new Iterator: def hasNext = thisIterator.hasNext def next = f(thisIterator.next) end Iterator class C -type Cap = {*} C +type Cap = C^ -def map[T, U](it: {*} Iterator[T], f: {*} T => U): {it, f} Iterator[U] = new Iterator: +def map[T, U](it: Iterator[T]^, f: T^ => U): Iterator[U]^{it, f} = new Iterator: def hasNext = it.hasNext def next = f(it.next) diff --git a/tests/pos-custom-args/captures/lazylists-exceptions.scala b/tests/pos-custom-args/captures/lazylists-exceptions.scala index 2d4ebb245dca..8f1fba2bf2dc 100644 --- a/tests/pos-custom-args/captures/lazylists-exceptions.scala +++ b/tests/pos-custom-args/captures/lazylists-exceptions.scala @@ -4,52 +4,52 @@ import scala.compiletime.uninitialized trait LzyList[+A]: def isEmpty: Boolean def head: A - def tail: {this} LzyList[A] + def tail: LzyList[A]^{this} object LzyNil extends LzyList[Nothing]: def isEmpty = true def head = ??? def tail = ??? -final class LzyCons[+A](hd: A, tl: () => {*} LzyList[A]) extends LzyList[A]: +final class LzyCons[+A](hd: A, tl: () => LzyList[A]^) extends LzyList[A]: private var forced = false - private var cache: {this} LzyList[A] = uninitialized + private var cache: LzyList[A]^{this} = uninitialized private def force = if !forced then { cache = tl(); forced = true } cache def isEmpty = false def head = hd - def tail: {this} LzyList[A] = force + def tail: LzyList[A]^{this} = force end LzyCons -extension [A](xs: {*} LzyList[A]) - def map[B](f: A => B): {xs, f} LzyList[B] = +extension [A](xs: LzyList[A]^) + def map[B](f: A => B): LzyList[B]^{xs, f} = if xs.isEmpty then LzyNil else LzyCons(f(xs.head), () => xs.tail.map(f)) - def filter(p: A => Boolean): {xs, p} LzyList[A] = + def filter(p: A => Boolean): LzyList[A]^{xs, p} = if xs.isEmpty then LzyNil else if p(xs.head) then lazyCons(xs.head, xs.tail.filter(p)) else xs.tail.filter(p) - def concat(ys: {*} LzyList[A]): {xs, ys} LzyList[A] = + def concat(ys: LzyList[A]^): LzyList[A]^{xs, ys} = if xs.isEmpty then ys else xs.head #: xs.tail.concat(ys) - def drop(n: Int): {xs} LzyList[A] = + def drop(n: Int): LzyList[A]^{xs} = if n == 0 then xs else xs.tail.drop(n - 1) end extension extension [A](x: A) - def #:(xs1: => {*} LzyList[A]): {xs1} LzyList[A] = + def #:(xs1: => LzyList[A]^): LzyList[A]^{xs1} = LzyCons(x, () => xs1) -def lazyCons[A](x: A, xs1: => {*} LzyList[A]): {xs1} LzyList[A] = +def lazyCons[A](x: A, xs1: => LzyList[A]^): LzyList[A]^{xs1} = LzyCons(x, () => xs1) -def tabulate[A](n: Int)(gen: Int => A): {gen} LzyList[A] = - def recur(i: Int): {gen} LzyList[A] = +def tabulate[A](n: Int)(gen: Int => A): LzyList[A]^{gen} = + def recur(i: Int): LzyList[A]^{gen} = if i == n then LzyNil else gen(i) #: recur(i + 1) recur(0) @@ -69,16 +69,16 @@ def test(using cap1: CanThrow[Ex1], cap2: CanThrow[Ex2]) = x * x def x1 = xs.map(f) - def x1c: {cap1} LzyList[Int] = x1 + def x1c: LzyList[Int]^{cap1} = x1 def x2 = x1.concat(xs.map(g).filter(_ > 0)) - def x2c: {cap1, cap2} LzyList[Int] = x2 + def x2c: LzyList[Int]^{cap1, cap2} = x2 val x3 = tabulate(10) { i => if i > 9 then throw Ex1() i * i } - val x3c: {cap1} LzyList[Int] = x3 + val x3c: LzyList[Int]^{cap1} = x3 class LimitExceeded extends Exception diff --git a/tests/pos-custom-args/captures/lazylists-mono.scala b/tests/pos-custom-args/captures/lazylists-mono.scala index 44ab36ded6a2..c91bedd8f1cf 100644 --- a/tests/pos-custom-args/captures/lazylists-mono.scala +++ b/tests/pos-custom-args/captures/lazylists-mono.scala @@ -1,26 +1,26 @@ class CC -type Cap = {*} CC +type Cap = CC^ //------------------------------------------------- def test(E: Cap) = trait LazyList[+A]: - protected def contents: {E} () -> (A, {E} LazyList[A]) + protected def contents: () ->{E} (A, LazyList[A]^{E}) def isEmpty: Boolean def head: A = contents()._1 - def tail: {E} LazyList[A] = contents()._2 + def tail: LazyList[A]^{E} = contents()._2 - class LazyCons[+A](override val contents: {E} () -> (A, {E} LazyList[A])) + class LazyCons[+A](override val contents: () ->{E} (A, LazyList[A]^{E})) extends LazyList[A]: def isEmpty: Boolean = false object LazyNil extends LazyList[Nothing]: - def contents: {E} () -> (Nothing, LazyList[Nothing]) = ??? + def contents: () ->{E} (Nothing, LazyList[Nothing]) = ??? def isEmpty: Boolean = true - extension [A](xs: {E} LazyList[A]) - def map[B](f: {E} A -> B): {E} LazyList[B] = + extension [A](xs: LazyList[A]^{E}) + def map[B](f: A ->{E} B): LazyList[B]^{E} = if xs.isEmpty then LazyNil else val cons = () => (f(xs.head), xs.tail.map(f)) diff --git a/tests/pos-custom-args/captures/lazylists.scala b/tests/pos-custom-args/captures/lazylists.scala index fd130c87cdea..273f21c1fcf3 100644 --- a/tests/pos-custom-args/captures/lazylists.scala +++ b/tests/pos-custom-args/captures/lazylists.scala @@ -1,26 +1,26 @@ class CC -type Cap = {*} CC +type Cap = CC^ trait LazyList[+A]: - this: {*} LazyList[A] => + this: LazyList[A]^ => def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {xs, f} LazyList[B] = +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{xs, f} = final class Mapped extends LazyList[B]: - this: {xs, f} Mapped => + this: Mapped^{xs, f} => def isEmpty = false def head: B = f(xs.head) - def tail: {this} LazyList[B] = xs.tail.map(f) // OK + def tail: LazyList[B]^{this} = xs.tail.map(f) // OK if xs.isEmpty then LazyNil else new Mapped @@ -30,12 +30,12 @@ def test(cap1: Cap, cap2: Cap) = val xs = class Initial extends LazyList[String]: - this: {cap1} Initial => + this: Initial^{cap1} => def isEmpty = false def head = f("") def tail = LazyNil new Initial - val xsc: {cap1} LazyList[String] = xs + val xsc: LazyList[String]^{cap1} = xs val ys = xs.map(g) - val ysc: {cap1, cap2} LazyList[String] = ys + val ysc: LazyList[String]^{cap1, cap2} = ys diff --git a/tests/pos-custom-args/captures/lazylists1.scala b/tests/pos-custom-args/captures/lazylists1.scala index a59e7c0da12f..62b34f442221 100644 --- a/tests/pos-custom-args/captures/lazylists1.scala +++ b/tests/pos-custom-args/captures/lazylists1.scala @@ -1,28 +1,28 @@ class CC -type Cap = {*} CC +type Cap = CC^ trait LazyList[+A]: def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] - def concat[B >: A](other: {*} LazyList[B]): {this, other} LazyList[B] + def tail: LazyList[A]^{this} + def concat[B >: A](other: LazyList[B]^): LazyList[B]^{this, other} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? - def concat[B](other: {*} LazyList[B]): {other} LazyList[B] = other + def concat[B](other: LazyList[B]^): LazyList[B]^{other} = other -final class LazyCons[+A](x: A)(xs: () => {*} LazyList[A]) extends LazyList[A]: +final class LazyCons[+A](x: A)(xs: () => LazyList[A]^) extends LazyList[A]: def isEmpty = false def head = x - def tail: {this} LazyList[A] = xs() - def concat[B >: A](other: {*} LazyList[B]): {this, other} LazyList[B] = + def tail: LazyList[A]^{this} = xs() + def concat[B >: A](other: LazyList[B]^): LazyList[B]^{this, other} = LazyCons(head)(() => tail.concat(other)) -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {xs, f} LazyList[B] = +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{xs, f} = if xs.isEmpty then LazyNil else LazyCons(f(xs.head))(() => xs.tail.map(f)) @@ -31,9 +31,9 @@ def test(cap1: Cap, cap2: Cap) = def g(x: String): String = if cap2 == cap2 then "" else "a" val xs = new LazyCons("")(() => if f("") == f("") then LazyNil else LazyNil) - val xsc: {cap1} LazyList[String] = xs + val xsc: LazyList[String]^{cap1} = xs val ys = xs.map(g) - val ysc: {cap1, cap2} LazyList[String] = ys + val ysc: LazyList[String]^{cap1, cap2} = ys val zs = new LazyCons("")(() => if g("") == g("") then LazyNil else LazyNil) val as = xs.concat(zs) - val asc: {xs, zs} LazyList[String] = as + val asc: LazyList[String]^{xs, zs} = as diff --git a/tests/pos-custom-args/captures/lazyref.scala b/tests/pos-custom-args/captures/lazyref.scala index 0d988dc3e17b..3dae51b491b4 100644 --- a/tests/pos-custom-args/captures/lazyref.scala +++ b/tests/pos-custom-args/captures/lazyref.scala @@ -1,24 +1,24 @@ @annotation.capability class Cap class LazyRef[T](val elem: () => T): - val get: {elem} () -> T = elem - def map[U](f: T => U): {f, this} LazyRef[U] = + val get: () ->{elem} T = elem + def map[U](f: T => U): LazyRef[U]^{f, this} = new LazyRef(() => f(elem())) -def map[A, B](ref: {*} LazyRef[A], f: A => B): {f, ref} LazyRef[B] = +def map[A, B](ref: LazyRef[A]^, f: A => B): LazyRef[B]^{f, ref} = new LazyRef(() => f(ref.elem())) -def mapc[A, B]: (ref: {*} LazyRef[A], f: A => B) => {f, ref} LazyRef[B] = +def mapc[A, B]: (ref: LazyRef[A]^, f: A => B) => LazyRef[B]^{f, ref} = (ref1, f1) => map[A, B](ref1, f1) def test(cap1: Cap, cap2: Cap) = def f(x: Int) = if cap1 == cap1 then x else 0 def g(x: Int) = if cap2 == cap2 then x else 0 val ref1 = LazyRef(() => f(0)) - val ref1c: {cap1} LazyRef[Int] = ref1 + val ref1c: LazyRef[Int]^{cap1} = ref1 val ref2 = map(ref1, g) - val ref2c: {cap2, ref1} LazyRef[Int] = ref2 + val ref2c: LazyRef[Int]^{cap2, ref1} = ref2 val ref3 = ref1.map(g) - val ref3c: {cap2, ref1} LazyRef[Int] = ref3 + val ref3c: LazyRef[Int]^{cap2, ref1} = ref3 val ref4 = (if cap1 == cap2 then ref1 else ref2).map(g) - val ref4c: {cap1, cap2} LazyRef[Int] = ref4 + val ref4c: LazyRef[Int]^{cap1, cap2} = ref4 diff --git a/tests/pos-custom-args/captures/list-encoding.scala b/tests/pos-custom-args/captures/list-encoding.scala index 87630467023e..d959b523404b 100644 --- a/tests/pos-custom-args/captures/list-encoding.scala +++ b/tests/pos-custom-args/captures/list-encoding.scala @@ -7,7 +7,7 @@ type Op[T, C] = (v: T) => (s: C) => C type List[T] = - [C] -> (op: Op[T, C]) -> {op} (s: C) -> C + [C] -> (op: Op[T, C]) -> (s: C) ->{op} C def nil[T]: List[T] = [C] => (op: Op[T, C]) => (s: C) => s @@ -15,7 +15,7 @@ def nil[T]: List[T] = def cons[T](hd: T, tl: List[T]): List[T] = [C] => (op: Op[T, C]) => (s: C) => op(hd)(tl(op)(s)) -def foo(c: {*} Cap) = +def foo(c: Cap^) = def f(x: String @retains(c), y: String @retains(c)) = cons(x, cons(y, nil)) def g(x: String @retains(c), y: Any) = diff --git a/tests/pos-custom-args/captures/lists.scala b/tests/pos-custom-args/captures/lists.scala index 6389ec933b32..56473e68d49f 100644 --- a/tests/pos-custom-args/captures/lists.scala +++ b/tests/pos-custom-args/captures/lists.scala @@ -2,7 +2,7 @@ abstract class LIST[+T]: def isEmpty: Boolean def head: T def tail: LIST[T] - def map[U](f: {*} T -> U): LIST[U] = + def map[U](f: T => U): LIST[U] = if isEmpty then NIL else CONS(f(head), tail.map(f)) @@ -28,9 +28,9 @@ def test(c: Cap, d: Cap, e: Cap) = val zs = val z = g CONS(z, ys) - val zsc: LIST[{d, y} Cap -> Unit] = zs + val zsc: LIST[Cap ->{d, y} Unit] = zs val z1 = zs.head - val z1c: {y, d} Cap -> Unit = z1 + val z1c: Cap ->{y, d} Unit = z1 val ys1 = zs.tail val y1 = ys1.head @@ -38,53 +38,53 @@ def test(c: Cap, d: Cap, e: Cap) = def m1[A, B] = (f: A => B) => (xs: LIST[A]) => xs.map(f) - def m1c: (f: String => Int) -> {f} LIST[String] -> LIST[Int] = m1[String, Int] + def m1c: (f: String => Int) -> LIST[String] ->{f} LIST[Int] = m1[String, Int] def m2 = [A, B] => (f: A => B) => (xs: LIST[A]) => xs.map(f) - def m2c: [A, B] -> (f: A => B) -> {f} LIST[A] -> LIST[B] = m2 + def m2c: [A, B] -> (f: A => B) -> LIST[A] ->{f} LIST[B] = m2 def eff[A](x: A) = if x == e then x else x val eff2 = [A] => (x: A) => if x == e then x else x - val a0 = identity[{d, y} Cap -> Unit] - val a0c: {d, y} ({d, y} Cap -> Unit) -> {d, y} Cap -> Unit = a0 - val a1 = zs.map[{d, y} Cap -> Unit](a0) - val a1c: LIST[{d, y} Cap -> Unit] = a1 - val a2 = zs.map[{d, y} Cap -> Unit](identity[{d, y} Cap -> Unit]) - val a2c: LIST[{d, y} Cap -> Unit] = a2 - val a3 = zs.map(identity[{d, y} Cap -> Unit]) - val a3c: LIST[{d, y} Cap -> Unit] = a3 + val a0 = identity[Cap ->{d, y} Unit] + val a0c: (Cap ->{d, y} Unit) ->{d, y} Cap ->{d, y} Unit = a0 + val a1 = zs.map[Cap ->{d, y} Unit](a0) + val a1c: LIST[Cap ->{d, y} Unit] = a1 + val a2 = zs.map[Cap ->{d, y} Unit](identity[Cap ->{d, y} Unit]) + val a2c: LIST[Cap ->{d, y} Unit] = a2 + val a3 = zs.map(identity[Cap ->{d, y} Unit]) + val a3c: LIST[Cap ->{d, y} Unit] = a3 val a4 = zs.map(identity) - val a4c: LIST[{d, c} Cap -> Unit] = a4 - val a5 = map[{d, y} Cap -> Unit, {d, y} Cap -> Unit](identity)(zs) - val a5c: LIST[{d, c} Cap -> Unit] = a5 - val a6 = m1[{d, y} Cap -> Unit, {d, y} Cap -> Unit](identity)(zs) - val a6c: LIST[{d, c} Cap -> Unit] = a6 + val a4c: LIST[Cap ->{d, c} Unit] = a4 + val a5 = map[Cap ->{d, y} Unit, Cap ->{d, y} Unit](identity)(zs) + val a5c: LIST[Cap ->{d, c} Unit] = a5 + val a6 = m1[Cap ->{d, y} Unit, Cap ->{d, y} Unit](identity)(zs) + val a6c: LIST[Cap ->{d, c} Unit] = a6 - val b0 = eff[{d, y} Cap -> Unit] - val b0c: {e, d, y} ({d, y} Cap -> Unit) -> {d, y} Cap -> Unit = b0 - val b1 = zs.map[{d, y} Cap -> Unit](a0) - val b1c: {e} LIST[{d, y} Cap -> Unit] = b1 - val b2 = zs.map[{d, y} Cap -> Unit](eff[{d, y} Cap -> Unit]) - val b2c: {e} LIST[{d, y} Cap -> Unit] = b2 - val b3 = zs.map(eff[{d, y} Cap -> Unit]) - val b3c: {e} LIST[{d, y} Cap -> Unit] = b3 + val b0 = eff[Cap ->{d, y} Unit] + val b0c: (Cap ->{d, y} Unit) ->{e, d, y} Cap ->{d, y} Unit = b0 + val b1 = zs.map[Cap ->{d, y} Unit](a0) + val b1c: LIST[Cap ->{d, y} Unit]^{e} = b1 + val b2 = zs.map[Cap ->{d, y} Unit](eff[Cap ->{d, y} Unit]) + val b2c: LIST[Cap ->{d, y} Unit]^{e} = b2 + val b3 = zs.map(eff[Cap ->{d, y} Unit]) + val b3c: LIST[Cap ->{d, y} Unit]^{e} = b3 val b4 = zs.map(eff) - val b4c: {e} LIST[{d, c} Cap -> Unit] = b4 - val b5 = map[{d, y} Cap -> Unit, {d, y} Cap -> Unit](eff)(zs) - val b5c: {e} LIST[{d, c} Cap -> Unit] = b5 - val b6 = m1[{d, y} Cap -> Unit, {d, y} Cap -> Unit](eff)(zs) - val b6c: {e} LIST[{d, c} Cap -> Unit] = b6 + val b4c: LIST[Cap ->{d, c} Unit]^{e} = b4 + val b5 = map[Cap ->{d, y} Unit, Cap ->{d, y} Unit](eff)(zs) + val b5c: LIST[Cap ->{d, c} Unit]^{e} = b5 + val b6 = m1[Cap ->{d, y} Unit, Cap ->{d, y} Unit](eff)(zs) + val b6c: LIST[Cap ->{d, c} Unit]^{e} = b6 - val c0 = eff2[{d, y} Cap -> Unit] - val c0c: {e, d, y} ({d, y} Cap -> Unit) -> {d, y} Cap -> Unit = c0 - val c1 = zs.map[{d, y} Cap -> Unit](a0) - val c1c: {e} LIST[{d, y} Cap -> Unit] = c1 - val c2 = zs.map[{d, y} Cap -> Unit](eff2[{d, y} Cap -> Unit]) - val c2c: {e} LIST[{d, y} Cap -> Unit] = c2 - val c3 = zs.map(eff2[{d, y} Cap -> Unit]) - val c3c: {e} LIST[{d, y} Cap -> Unit] = c3 + val c0 = eff2[Cap ->{d, y} Unit] + val c0c: (Cap ->{d, y} Unit) ->{e, d, y} Cap ->{d, y} Unit = c0 + val c1 = zs.map[Cap ->{d, y} Unit](a0) + val c1c: LIST[Cap ->{d, y} Unit]^{e} = c1 + val c2 = zs.map[Cap ->{d, y} Unit](eff2[Cap ->{d, y} Unit]) + val c2c: LIST[Cap ->{d, y} Unit]^{e} = c2 + val c3 = zs.map(eff2[Cap ->{d, y} Unit]) + val c3c: LIST[Cap ->{d, y} Unit]^{e} = c3 diff --git a/tests/pos-custom-args/captures/logger.scala b/tests/pos-custom-args/captures/logger.scala index e5b6c834ffe0..3f417da8c1be 100644 --- a/tests/pos-custom-args/captures/logger.scala +++ b/tests/pos-custom-args/captures/logger.scala @@ -7,9 +7,9 @@ class Logger(using fs: FileSystem): def log(s: String): Unit = ??? def test(using fs: FileSystem) = - val l: {fs} Logger = Logger(using fs) + val l: Logger^{fs} = Logger(using fs) l.log("hello world!") - val xs: {l} LazyList[Int] = + val xs: LazyList[Int]^{l} = LazyList.from(1) .map { i => l.log(s"computing elem # $i") @@ -19,25 +19,25 @@ def test(using fs: FileSystem) = trait LazyList[+A]: def isEmpty: Boolean def head: A - def tail: {this} LazyList[A] + def tail: LazyList[A]^{this} object LazyNil extends LazyList[Nothing]: def isEmpty: Boolean = true def head = ??? def tail = ??? -final class LazyCons[+T](val x: T, val xs: () => {*} LazyList[T]) extends LazyList[T]: +final class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: def isEmpty = false def head = x - def tail: {this} LazyList[T] = xs() + def tail: LazyList[T]^{this} = xs() end LazyCons extension [A](x: A) - def #::(xs1: => {*} LazyList[A]): {xs1} LazyList[A] = + def #::(xs1: => LazyList[A]^): LazyList[A]^{xs1} = LazyCons(x, () => xs1) -extension [A](xs: {*} LazyList[A]) - def map[B](f: A => B): {xs, f} LazyList[B] = +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{xs, f} = if xs.isEmpty then LazyNil else f(xs.head) #:: xs.tail.map(f) @@ -50,17 +50,17 @@ class Pair[+A, +B](x: A, y: B): def snd: B = y def test2(ct: CanThrow[Exception], fs: FileSystem) = - def x: {ct} Int -> String = ??? - def y: {fs} Logger = ??? + def x: Int ->{ct} String = ??? + def y: Logger^{fs} = ??? def p = Pair(x, y) def f = () => p.fst /* - val l1: {*} Int -> String = ??? - val l2: {c} Object = ??? + val l1: Int => String = ??? + val l2: Object^{c} = ??? val pd = () => Pair(l1, l2) - val p2: Pair[{*} Int -> String, {c} Object] = pd() + val p2: Pair[Int => String, Object]^{c} = pd() val hd = () => p2.fst */ \ No newline at end of file diff --git a/tests/pos-custom-args/captures/nested-classes.scala b/tests/pos-custom-args/captures/nested-classes.scala new file mode 100644 index 000000000000..b16fc4365183 --- /dev/null +++ b/tests/pos-custom-args/captures/nested-classes.scala @@ -0,0 +1,21 @@ +import language.experimental.captureChecking +import annotation.{capability, constructorOnly} + +@capability class IO +class Blah +class Pkg(using @constructorOnly io: IO): + class Foo: + def m(foo: Blah^{io}) = ??? +class Pkg2(using io: IO): + class Foo: + def m(foo: Blah^{io}): Any = io; ??? + +def main(using io: IO) = + val pkg = Pkg() + val f = pkg.Foo() + f.m(???) + val pkg2 = Pkg2() + val f2 = pkg2.Foo() + f2.m(???) + + diff --git a/tests/pos-custom-args/captures/nonvariant-inf.scala b/tests/pos-custom-args/captures/nonvariant-inf.scala index 6569f35042e8..4798f98c9fce 100644 --- a/tests/pos-custom-args/captures/nonvariant-inf.scala +++ b/tests/pos-custom-args/captures/nonvariant-inf.scala @@ -3,7 +3,7 @@ trait Iterable[+A] /** Base trait for instances that can construct a collection from an iterable */ trait FromIterable { - type C[X] <: {*} Iterable[X] - def fromIterable[B](it: {*} Iterable[B]): {it} C[B] + type C[X] <: Iterable[X]^ + def fromIterable[B](it: Iterable[B]^): C[B]^{it} def empty[A]: C[A] = fromIterable(??? : Iterable[A]) } diff --git a/tests/pos-custom-args/captures/override-adapt-box-pos-alt.scala b/tests/pos-custom-args/captures/override-adapt-box-pos-alt.scala index c7e4d38723d7..bb6b4030dbff 100644 --- a/tests/pos-custom-args/captures/override-adapt-box-pos-alt.scala +++ b/tests/pos-custom-args/captures/override-adapt-box-pos-alt.scala @@ -9,9 +9,9 @@ abstract class A[X] { class C -def test(io: {*} IO) = { - class B extends A[{io} C] { // X =:= {io} C - def foo(x: Unit): {io} C = ??? - def bar(op: ({io} C) => Int): Int = 0 +def test(io: IO^) = { + class B extends A[C^{io}] { // X =:= {io} C + def foo(x: Unit): C^{io} = ??? + def bar(op: (C^{io}) => Int): Int = 0 } } diff --git a/tests/pos-custom-args/captures/override-adapt-box-pos.scala b/tests/pos-custom-args/captures/override-adapt-box-pos.scala index 7496a138070d..9adaec6896cf 100644 --- a/tests/pos-custom-args/captures/override-adapt-box-pos.scala +++ b/tests/pos-custom-args/captures/override-adapt-box-pos.scala @@ -4,16 +4,16 @@ class IO abstract class A[X, Y] { def foo(x: Unit): X - def bar(x: Int, y: {} IO): X + def bar(x: Int, y: IO^{}): X def baz(x: Y): X } class C -def test(io: {*} IO) = { - class B extends A[{io} C, {} C] { // X =:= {io} C - override def foo(x: Unit): {io} C = ??? - override def bar(x: Int, y: {} IO): {io} C = ??? - override def baz(x: {} C): {io} C = ??? +def test(io: IO^) = { + class B extends A[C^{io}, C^{}] { // X =:= {io} C + override def foo(x: Unit): C^{io} = ??? + override def bar(x: Int, y: IO^{}): C^{io} = ??? + override def baz(x: C^{}): C^{io} = ??? } } diff --git a/tests/pos-custom-args/captures/overrides.scala b/tests/pos-custom-args/captures/overrides.scala index 7e70afe7a327..ac5b9cd9ddc4 100644 --- a/tests/pos-custom-args/captures/overrides.scala +++ b/tests/pos-custom-args/captures/overrides.scala @@ -1,4 +1,4 @@ -import caps.* +import caps.cap abstract class Foo: def foo: () => Unit = () => () diff --git a/tests/pos-custom-args/captures/pairs.scala b/tests/pos-custom-args/captures/pairs.scala index 9c8ec003d28d..bc20d20ffd92 100644 --- a/tests/pos-custom-args/captures/pairs.scala +++ b/tests/pos-custom-args/captures/pairs.scala @@ -12,21 +12,21 @@ object Generic: def g(x: Cap): Unit = if d == x then () val p = Pair(f, g) val x1 = p.fst - val x1c: {c} Cap -> Unit = x1 + val x1c: Cap ->{c} Unit = x1 val y1 = p.snd - val y1c: {d} Cap -> Unit = y1 + val y1c: Cap ->{d} Unit = y1 object Monomorphic: - class Pair(x: Cap => Unit, y: {*} Cap -> Unit): - def fst: {x} Cap -> Unit = x - def snd: {y} Cap -> Unit = y + class Pair(x: Cap => Unit, y: Cap => Unit): + def fst: Cap ->{x} Unit = x + def snd: Cap ->{y} Unit = y def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () def g(x: Cap): Unit = if d == x then () val p = Pair(f, g) val x1 = p.fst - val x1c: {c} Cap -> Unit = x1 + val x1c: Cap ->{c} Unit = x1 val y1 = p.snd - val y1c: {d} Cap -> Unit = y1 + val y1c: Cap ->{d} Unit = y1 diff --git a/tests/pos-custom-args/captures/selftype-alias.scala b/tests/pos-custom-args/captures/selftype-alias.scala new file mode 100644 index 000000000000..180c7b27b146 --- /dev/null +++ b/tests/pos-custom-args/captures/selftype-alias.scala @@ -0,0 +1,8 @@ +import language.experimental.captureChecking + +type AnyIterableOnce[A] = IterableOnce[A]^ + +/** Iterator can be used only once */ +trait IterableOnce[+A]: + this: AnyIterableOnce[A] => + def iterator: Iterator[A]^{this} diff --git a/tests/pos-custom-args/captures/stack-alloc.scala b/tests/pos-custom-args/captures/stack-alloc.scala index 03b6708a3119..7013f978c281 100644 --- a/tests/pos-custom-args/captures/stack-alloc.scala +++ b/tests/pos-custom-args/captures/stack-alloc.scala @@ -5,7 +5,7 @@ class Pooled val stack = mutable.ArrayBuffer[Pooled]() var nextFree = 0 -def withFreshPooled[T](op: ({*} Pooled) => T): T = +def withFreshPooled[T](op: Pooled^ => T): T = if nextFree >= stack.size then stack.append(new Pooled) val pooled = stack(nextFree) nextFree = nextFree + 1 diff --git a/tests/pos-custom-args/captures/trickyTrailingUpArrow.scala b/tests/pos-custom-args/captures/trickyTrailingUpArrow.scala new file mode 100644 index 000000000000..71b663de5354 --- /dev/null +++ b/tests/pos-custom-args/captures/trickyTrailingUpArrow.scala @@ -0,0 +1,9 @@ +object Test: + var x = 0 + type FreshContext = String^ + x += 1 + + inline def ctx(using c: String) = c + + val y: String^ -> Unit = ??? + val z: String^ ?-> Unit = ??? diff --git a/tests/pos-custom-args/captures/try.scala b/tests/pos-custom-args/captures/try.scala index b2dcf6f11dd0..05c41be69001 100644 --- a/tests/pos-custom-args/captures/try.scala +++ b/tests/pos-custom-args/captures/try.scala @@ -2,7 +2,7 @@ import annotation.retains import language.experimental.erasedDefinitions class CT[E <: Exception] -type CanThrow[E <: Exception] = CT[E] @retains(caps.*) +type CanThrow[E <: Exception] = CT[E] @retains(caps.cap) infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?-> R diff --git a/tests/pos-custom-args/captures/unsafe-unbox.scala b/tests/pos-custom-args/captures/unsafe-unbox.scala index b228d8c07925..63a32d58f640 100644 --- a/tests/pos-custom-args/captures/unsafe-unbox.scala +++ b/tests/pos-custom-args/captures/unsafe-unbox.scala @@ -1,4 +1,7 @@ -import caps.unsafe.* +import annotation.unchecked.uncheckedCaptures def test = + @uncheckedCaptures var finalizeActions = collection.mutable.ListBuffer[() => Unit]() - val action = finalizeActions.remove(0).unsafeUnbox + val action = finalizeActions.remove(0) + + diff --git a/tests/pos-custom-args/captures/vars.scala b/tests/pos-custom-args/captures/vars.scala index 12721158a2bb..ccf2cd587eb1 100644 --- a/tests/pos-custom-args/captures/vars.scala +++ b/tests/pos-custom-args/captures/vars.scala @@ -5,13 +5,13 @@ def test(cap1: Cap, cap2: Cap) = var x = f val y = x val z = () => if x("") == "" then "a" else "b" - val zc: {cap1} () -> String = z + val zc: () ->{cap1} String = z val z2 = () => { x = identity } - val z2c: {cap1} () -> Unit = z2 + val z2c: () ->{cap1} Unit = z2 class Ref: - var elem: {cap1} String -> String = null + var elem: String ->{cap1} String = null val r = Ref() r.elem = f - val fc: {cap1} String -> String = r.elem + val fc: String ->{cap1} String = r.elem diff --git a/tests/pos-custom-args/captures/vars1.scala b/tests/pos-custom-args/captures/vars1.scala index c008bac2e72f..56548e5a9c30 100644 --- a/tests/pos-custom-args/captures/vars1.scala +++ b/tests/pos-custom-args/captures/vars1.scala @@ -1,9 +1,12 @@ import caps.unsafe.* +import annotation.unchecked.uncheckedCaptures object Test: type ErrorHandler = (Int, String) => Unit + @uncheckedCaptures var defaultIncompleteHandler: ErrorHandler = ??? + @uncheckedCaptures var incompleteHandler: ErrorHandler = defaultIncompleteHandler val x = incompleteHandler.unsafeUnbox val _ : ErrorHandler = x @@ -11,11 +14,17 @@ object Test: def defaultIncompleteHandler1(): ErrorHandler = ??? val defaultIncompleteHandler2: ErrorHandler = ??? - var incompleteHandler1: ErrorHandler = defaultIncompleteHandler1().unsafeBox - var incompleteHandler2: ErrorHandler = defaultIncompleteHandler2.unsafeBox - private var incompleteHandler7 = defaultIncompleteHandler1().unsafeBox - private var incompleteHandler8 = defaultIncompleteHandler2.unsafeBox + @uncheckedCaptures + var incompleteHandler1: ErrorHandler = defaultIncompleteHandler1() + @uncheckedCaptures + var incompleteHandler2: ErrorHandler = defaultIncompleteHandler2 + @uncheckedCaptures + private var incompleteHandler7 = defaultIncompleteHandler1() + @uncheckedCaptures + private var incompleteHandler8 = defaultIncompleteHandler2 + + incompleteHandler1 = defaultIncompleteHandler2 + incompleteHandler1 = defaultIncompleteHandler2 + val saved = incompleteHandler1 + - incompleteHandler1 = defaultIncompleteHandler2.unsafeBox - incompleteHandler1 = defaultIncompleteHandler2.unsafeBox - val saved = incompleteHandler1.unsafeUnbox diff --git a/tests/pos-custom-args/erased/erased-class-as-args.scala b/tests/pos-custom-args/erased/erased-class-as-args.scala new file mode 100644 index 000000000000..74c827fbd54b --- /dev/null +++ b/tests/pos-custom-args/erased/erased-class-as-args.scala @@ -0,0 +1,22 @@ +erased class A + +erased class B(val x: Int) extends A + +type T = (x: A, y: Int) => Int + +type TSub[-T <: A] = (erased x: T, y: Int) => Int + +def useT(f: T) = f(new A, 5) + +def useTSub(f: TSub[B]) = f(new B(5), 5) + +@main def Test() = + val tInfer = (x: A, y: Int) => y + 1 + val tExpl: T = (x, y) => y + 1 + assert(useT((erased x, y) => y + 1) == 6) + assert(useT(tInfer) == 6) + assert(useT(tExpl) == 6) + + val tSub: TSub[A] = (x, y) => y + 1 + assert(useT(tSub) == 6) + assert(useTSub(tSub) == 6) diff --git a/tests/pos-custom-args/erased/erased-soft-keyword.scala b/tests/pos-custom-args/erased/erased-soft-keyword.scala new file mode 100644 index 000000000000..fdb884628c7d --- /dev/null +++ b/tests/pos-custom-args/erased/erased-soft-keyword.scala @@ -0,0 +1,18 @@ +def f1(x: Int, erased y: Int) = 0 +def f2(x: Int, erased: Int) = 0 +inline def f3(x: Int, inline erased: Int) = 0 +def f4(x: Int, erased inline: Int) = 0 +// inline def f5(x: Int, erased inline y: Int) = 0 // should parse but rejected later + +def f6(using erased y: Int) = 0 +def f7(using erased: Int) = 0 +inline def f8(using inline erased: Int) = 0 +def f9(using erased inline: Int) = 0 +// inline def f10(using erased inline x: Int) = 0 // should parse but rejected later +def f11(using erased Int) = 0 + +val v1 = (erased: Int) => 0 +val v2: Int => Int = erased => 0 +val v3 = (erased x: Int) => 0 +val v4: (erased Int) => Int = (erased x) => 0 +val v5: (erased: Int) => Int = x => 0 diff --git a/tests/pos-custom-args/erased/tailrec.scala b/tests/pos-custom-args/erased/tailrec.scala new file mode 100644 index 000000000000..cebcf4785c7a --- /dev/null +++ b/tests/pos-custom-args/erased/tailrec.scala @@ -0,0 +1,20 @@ +import scala.annotation.tailrec + +erased class Foo1 +class Foo2 + +@tailrec +final def test1(n: Int, acc: Int): (Foo1, Foo2) ?=> Int = + if n <= 0 then acc + else test1(n - 1, acc * n) + +@tailrec +final def test2(n: Int, acc: Int): Foo1 ?=> Int = + if n <= 0 then acc + else test2(n - 1, acc * n) + +@main def Test() = + given Foo1 = Foo1() + given Foo2 = Foo2() + test1(10, 0) + test2(10, 0) diff --git a/tests/pos-custom-args/no-experimental/dotty-experimental.scala b/tests/pos-custom-args/no-experimental/dotty-experimental.scala index 320c68dbea50..72d16ddd9b15 100644 --- a/tests/pos-custom-args/no-experimental/dotty-experimental.scala +++ b/tests/pos-custom-args/no-experimental/dotty-experimental.scala @@ -1,6 +1,6 @@ package dotty.tools object test { - val x = caps.unsafe.unsafeBox + val x = caps.cap } diff --git a/tests/pos-special/adhoc-extension/A.scala b/tests/pos-custom-args/strict/adhoc-extension/A.scala similarity index 100% rename from tests/pos-special/adhoc-extension/A.scala rename to tests/pos-custom-args/strict/adhoc-extension/A.scala diff --git a/tests/pos-special/adhoc-extension/B.scala b/tests/pos-custom-args/strict/adhoc-extension/B.scala similarity index 100% rename from tests/pos-special/adhoc-extension/B.scala rename to tests/pos-custom-args/strict/adhoc-extension/B.scala diff --git a/tests/pos-custom-args/i10383.scala b/tests/pos-custom-args/strict/i10383.scala similarity index 100% rename from tests/pos-custom-args/i10383.scala rename to tests/pos-custom-args/strict/i10383.scala diff --git a/tests/pos-custom-args/strict/i16808.scala b/tests/pos-custom-args/strict/i16808.scala new file mode 100644 index 000000000000..602ceed94161 --- /dev/null +++ b/tests/pos-custom-args/strict/i16808.scala @@ -0,0 +1,2 @@ +def collectKeys[A, B, C](xs: Map[A, B])(f: PartialFunction[A, C]): Map[C, B] = + xs.collect{ case (f(c) , b) => (c, b) } \ No newline at end of file diff --git a/tests/pos-special/i7296.scala b/tests/pos-custom-args/strict/i7296.scala similarity index 100% rename from tests/pos-special/i7296.scala rename to tests/pos-custom-args/strict/i7296.scala diff --git a/tests/pos-java16+/java-records/FromScala.scala b/tests/pos-java16+/java-records/FromScala.scala new file mode 100644 index 000000000000..67747e658432 --- /dev/null +++ b/tests/pos-java16+/java-records/FromScala.scala @@ -0,0 +1,43 @@ +object C: + def useR1: Unit = + // constructor signature + val r = R1(123, "hello") + + // accessors + val i: Int = r.i + val s: String = r.s + + // methods + val iRes: Int = r.getInt() + val sRes: String = r.getString() + + // supertype + val record: java.lang.Record = r + + def useR2: Unit = + // constructor signature + val r2 = R2.R(123, "hello") + + // accessors signature + val i: Int = r2.i + val s: String = r2.s + + // method + val i2: Int = r2.getInt + + // supertype + val isIntLike: IntLike = r2 + val isRecord: java.lang.Record = r2 + + def useR3 = + // constructor signature + val r3 = R3(123, 42L, "hi") + new R3("hi", 123) + // accessors signature + val i: Int = r3.i + val l: Long = r3.l + val s: String = r3.s + // method + val l2: Long = r3.l(43L, 44L) + // supertype + val isRecord: java.lang.Record = r3 diff --git a/tests/pos-java16+/java-records/IntLike.scala b/tests/pos-java16+/java-records/IntLike.scala new file mode 100644 index 000000000000..1f760018a975 --- /dev/null +++ b/tests/pos-java16+/java-records/IntLike.scala @@ -0,0 +1,2 @@ +trait IntLike: + def getInt: Int diff --git a/tests/pos-java16+/java-records/R1.java b/tests/pos-java16+/java-records/R1.java new file mode 100644 index 000000000000..832d288547ab --- /dev/null +++ b/tests/pos-java16+/java-records/R1.java @@ -0,0 +1,9 @@ +public record R1(int i, String s) { + public String getString() { + return s + i; + } + + public int getInt() { + return 0; + } +} diff --git a/tests/pos-java16+/java-records/R2.java b/tests/pos-java16+/java-records/R2.java new file mode 100644 index 000000000000..4b3f881628b9 --- /dev/null +++ b/tests/pos-java16+/java-records/R2.java @@ -0,0 +1,13 @@ +public class R2 { + final record R(int i, String s) implements IntLike { + public int getInt() { + return i; + } + + // Canonical constructor + public R(int i, java.lang.String s) { + this.i = i; + this.s = s.intern(); + } + } +} diff --git a/tests/pos-java16+/java-records/R3.java b/tests/pos-java16+/java-records/R3.java new file mode 100644 index 000000000000..616481a0ae1f --- /dev/null +++ b/tests/pos-java16+/java-records/R3.java @@ -0,0 +1,22 @@ +public record R3(int i, long l, String s) { + + // User-specified accessor + public int i() { + return i + 1; // evil >:) + } + + // Not an accessor - too many parameters + public long l(long a1, long a2) { + return a1 + a2; + } + + // Secondary constructor + public R3(String s, int i) { + this(i, 42L, s); + } + + // Compact constructor + public R3 { + s = s.intern(); + } +} \ No newline at end of file diff --git a/tests/pos-macros/i10127-a.scala b/tests/pos-macros/i10127-a.scala new file mode 100644 index 000000000000..3b9efc2a829d --- /dev/null +++ b/tests/pos-macros/i10127-a.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +object T { + def impl[A](using t: Type[A])(using Quotes): Expr[Unit] = { + Expr.summon[t.Underlying] + '{} + } +} \ No newline at end of file diff --git a/tests/pos-macros/i12440.scala b/tests/pos-macros/i12440.scala index 4b4c56fef568..02122be28deb 100644 --- a/tests/pos-macros/i12440.scala +++ b/tests/pos-macros/i12440.scala @@ -5,6 +5,12 @@ trait Mirror: class Eq: + def test0(using Quotes): Unit = '{ + type T + ${ summonType[T]; ??? } + ${ summonType[List[T]]; ??? } + } + def test1(using Quotes): Unit = '{ val m: Mirror = ??? ${ summonType[m.ElemTypes]; ??? } diff --git a/tests/pos-macros/i13376a.scala b/tests/pos-macros/i13376a.scala new file mode 100644 index 000000000000..8e746d0e34a8 --- /dev/null +++ b/tests/pos-macros/i13376a.scala @@ -0,0 +1,7 @@ +import scala.quoted.* +trait C: + type T + def foo: T +inline def makro(x: C): x.T = ${ impl[x.T]('x) } +def impl[U: Type](xp: Expr[C { def foo: U }])(using Quotes): Expr[U] = + '{ $xp.foo } diff --git a/tests/pos-macros/i13376b.scala b/tests/pos-macros/i13376b.scala new file mode 100644 index 000000000000..8aa171ff07dd --- /dev/null +++ b/tests/pos-macros/i13376b.scala @@ -0,0 +1,7 @@ +import scala.quoted.* +trait C: + type T + def foo: T +inline def makro(inline x: C): C#T = ${ impl('x) } +def impl[U: Type](xp: Expr[C { def foo: U }])(using Quotes): Expr[U] = + '{ $xp.foo } diff --git a/tests/pos-macros/i14131.scala b/tests/pos-macros/i14131.scala new file mode 100644 index 000000000000..76c01839a17f --- /dev/null +++ b/tests/pos-macros/i14131.scala @@ -0,0 +1,11 @@ +class Dog: + inline given bark(using msg: String = "Woof!"): String = s"bark: $msg" + +class Wolf: + private val dog: Dog = Dog() + export dog.given + +def test = + val w = Wolf() + import w.given + summon[String] diff --git a/tests/pos-macros/i15165a/Macro_1.scala b/tests/pos-macros/i15165a/Macro_1.scala new file mode 100644 index 000000000000..8838d4c06bd1 --- /dev/null +++ b/tests/pos-macros/i15165a/Macro_1.scala @@ -0,0 +1,9 @@ +import scala.quoted.* + +inline def valToFun[T](inline expr: T): T = + ${ impl('expr) } + +def impl[T: Type](expr: Expr[T])(using quotes: Quotes): Expr[T] = + expr match + case '{ { val ident = ($a: α); $rest(ident): T } } => + '{ { (y: α) => $rest(y) }.apply(???) } diff --git a/tests/pos-macros/i15165a/Test_2.scala b/tests/pos-macros/i15165a/Test_2.scala new file mode 100644 index 000000000000..f7caa67b2df7 --- /dev/null +++ b/tests/pos-macros/i15165a/Test_2.scala @@ -0,0 +1,4 @@ +def test = valToFun { + val a: Int = 1 + a + 1 +} diff --git a/tests/pos-macros/i15165b/Macro_1.scala b/tests/pos-macros/i15165b/Macro_1.scala new file mode 100644 index 000000000000..5d62db37e313 --- /dev/null +++ b/tests/pos-macros/i15165b/Macro_1.scala @@ -0,0 +1,16 @@ +import scala.quoted.* + +inline def valToFun[T](inline expr: T): T = + ${ impl('expr) } + +def impl[T: Type](expr: Expr[T])(using quotes: Quotes): Expr[T] = + expr match + case '{ { val ident = ($a: α); $rest(ident): T } } => + '{ + { (y: α) => + ${ + val bound = '{ ${ rest }(y) } + Expr.betaReduce(bound) + } + }.apply($a) + } diff --git a/tests/pos-macros/i15165b/Test_2.scala b/tests/pos-macros/i15165b/Test_2.scala new file mode 100644 index 000000000000..f7caa67b2df7 --- /dev/null +++ b/tests/pos-macros/i15165b/Test_2.scala @@ -0,0 +1,4 @@ +def test = valToFun { + val a: Int = 1 + a + 1 +} diff --git a/tests/pos-macros/i15165c/Macro_1.scala b/tests/pos-macros/i15165c/Macro_1.scala new file mode 100644 index 000000000000..036363bf274f --- /dev/null +++ b/tests/pos-macros/i15165c/Macro_1.scala @@ -0,0 +1,9 @@ +import scala.quoted.* + +inline def valToFun[T](inline expr: T): T = + ${ impl('expr) } + +def impl[T: Type](expr: Expr[T])(using quotes: Quotes): Expr[T] = + expr match + case '{ type α; { val ident = ($a: `α`); $rest(ident): `α` & T } } => + '{ { (y: α) => $rest(y) }.apply(???) } diff --git a/tests/pos-macros/i15165c/Test_2.scala b/tests/pos-macros/i15165c/Test_2.scala new file mode 100644 index 000000000000..f7caa67b2df7 --- /dev/null +++ b/tests/pos-macros/i15165c/Test_2.scala @@ -0,0 +1,4 @@ +def test = valToFun { + val a: Int = 1 + a + 1 +} diff --git a/tests/pos-macros/i15475.scala b/tests/pos-macros/i15475.scala new file mode 100644 index 000000000000..20993cd46d15 --- /dev/null +++ b/tests/pos-macros/i15475.scala @@ -0,0 +1,13 @@ +def test = + transform { + val a: Seq[Generic[?]] = ??? + a.foreach { to => + to.mthd() + } + } + +transparent inline def transform[T](expr: T): T = ??? + +trait Generic[+T] { + def mthd(): Generic[T] = ??? +} diff --git a/tests/pos-macros/i15475a/Macro_1.scala b/tests/pos-macros/i15475a/Macro_1.scala new file mode 100644 index 000000000000..b1bd676e7e17 --- /dev/null +++ b/tests/pos-macros/i15475a/Macro_1.scala @@ -0,0 +1,17 @@ +package x + +import scala.quoted.* + + +transparent inline def xtransform[T](inline expr:T) = ${ + X.transform('expr) +} + +object X { + + def transform[T:Type](x: Expr[T])(using Quotes):Expr[T] = { + import quotes.reflect.* + x + } + +} diff --git a/tests/pos-macros/i15475a/Test_2.scala b/tests/pos-macros/i15475a/Test_2.scala new file mode 100644 index 000000000000..7757a14950de --- /dev/null +++ b/tests/pos-macros/i15475a/Test_2.scala @@ -0,0 +1,15 @@ +package x + +def hello = { + xtransform { + val a: Seq[Generic[?]] = null + a + .foreach { to => + to.mthd() + } + } +} + +trait Generic[+T] { + def mthd(): Generic[T] = this +} diff --git a/tests/pos-macros/i15709.scala b/tests/pos-macros/i15709.scala new file mode 100644 index 000000000000..845ed35d1a55 --- /dev/null +++ b/tests/pos-macros/i15709.scala @@ -0,0 +1,4 @@ +import quoted.* + +inline def foo(s: Singleton): Unit = ${ fooImpl('s) } +def fooImpl(s: Expr[Singleton])(using Quotes) = '{} diff --git a/tests/pos-macros/i16008/Macro_1.scala b/tests/pos-macros/i16008/Macro_1.scala new file mode 100644 index 000000000000..b1cc1e6b2b9d --- /dev/null +++ b/tests/pos-macros/i16008/Macro_1.scala @@ -0,0 +1,24 @@ +import scala.quoted.* + +enum MyEnum0: + case Marked + case Marked2(i: Int) + +trait MyMarker + +enum MyEnum(val value: String): + case Marked extends MyEnum("marked") with MyMarker + case Marked2(i: Int) extends MyEnum("marked") with MyMarker + +inline def enumMacro: Unit = ${ enumMacroExpr } + +private def enumMacroExpr(using Quotes): Expr[Unit] = + import quotes.reflect.* + assert(TypeRepr.of[MyEnum0].typeSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum0.Marked.type].termSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum0.Marked2].typeSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum].typeSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum.Marked.type].termSymbol.flags.is(Flags.Enum)) + assert(TypeRepr.of[MyEnum.Marked2].typeSymbol.flags.is(Flags.Enum)) + + '{} diff --git a/tests/pos-macros/i16008/Test_2.scala b/tests/pos-macros/i16008/Test_2.scala new file mode 100644 index 000000000000..43631e59e4b2 --- /dev/null +++ b/tests/pos-macros/i16008/Test_2.scala @@ -0,0 +1 @@ +def test = enumMacro diff --git a/tests/pos-macros/i16615.scala b/tests/pos-macros/i16615.scala new file mode 100644 index 000000000000..3cc2d271fa87 --- /dev/null +++ b/tests/pos-macros/i16615.scala @@ -0,0 +1,19 @@ +import scala.quoted.* + +trait Api: + type Reader[E] + +def bugImpl[T: Type, Q[_]: Type](using Quotes) = + '{ + val p: Api = ??? + ${ + Type.of[p.Reader[T]] + Type.of[Q[p.Reader[T]]] + Type.of[p.Reader[Q[p.Reader[T]]]] + Type.of[List[p.Reader[T]]] + Type.of[p.Reader[List[p.Reader[T]]]] + Type.of[p.Reader[List[T]]] + Type.of[p.Reader[Q[T]]] + Expr(1) + } + } diff --git a/tests/pos-macros/i16835/Macro_1.scala b/tests/pos-macros/i16835/Macro_1.scala new file mode 100644 index 000000000000..133d9f38d1da --- /dev/null +++ b/tests/pos-macros/i16835/Macro_1.scala @@ -0,0 +1,79 @@ +import scala.quoted.* +import scala.deriving.Mirror + +// derivation code is a slightly modified version of: https://github.com/lampepfl/dotty-macro-examples/blob/main/macroTypeClassDerivation/src/macro.scala +object Derivation { + + // Typeclass instance gets constructed as part of a macro + inline given deriveFullyConstrucedByMacro[A](using Mirror.ProductOf[A]): Show[A] = Derivation.deriveShow[A] + + // Typeclass instance is built inside as part of a method, only the 'show' impl is filled in by a macro + inline given derivePartiallyConstructedByMacro[A](using Mirror.ProductOf[A]): Show[A] = + new { + def show(value: A): String = Derivation.show(value) + } + + inline def show[T](value: T): String = ${ showValue('value) } + + inline def deriveShow[T]: Show[T] = ${ deriveCaseClassShow[T] } + + private def deriveCaseClassShow[T](using quotes: Quotes, tpe: Type[T]): Expr[Show[T]] = { + import quotes.reflect.* + // Getting the case fields of the case class + val fields: List[Symbol] = TypeTree.of[T].symbol.caseFields + + '{ + new Show[T] { + override def show(t: T): String = + ${ showValue('t) } + } + } + } + + def showValue[T: Type](value: Expr[T])(using Quotes): Expr[String] = { + import quotes.reflect.* + + val fields: List[Symbol] = TypeTree.of[T].symbol.caseFields + + val vTerm: Term = value.asTerm + val valuesExprs: List[Expr[String]] = fields.map(showField(vTerm, _)) + val exprOfList: Expr[List[String]] = Expr.ofList(valuesExprs) + '{ "{ " + $exprOfList.mkString(", ") + " }" } + } + + /** Create a quoted String representation of a given field of the case class */ + private def showField(using Quotes)(caseClassTerm: quotes.reflect.Term, field: quotes.reflect.Symbol): Expr[String] = { + import quotes.reflect.* + + val fieldValDef: ValDef = field.tree.asInstanceOf[ValDef] + val fieldTpe: TypeRepr = fieldValDef.tpt.tpe + val fieldName: String = fieldValDef.name + + val tcl: Term = lookupShowFor(fieldTpe) // Show[$fieldTpe] + val fieldValue: Term = Select(caseClassTerm, field) // v.field + val strRepr: Expr[String] = applyShow(tcl, fieldValue).asExprOf[String] + '{ ${ Expr(fieldName) } + ": " + $strRepr } // summon[Show[$fieldTpe]].show(v.field) + } + + /** Look up the Show[$t] typeclass for a given type t */ + private def lookupShowFor(using Quotes)(t: quotes.reflect.TypeRepr): quotes.reflect.Term = { + import quotes.reflect.* + t.asType match { + case '[tpe] => + Implicits.search(TypeRepr.of[Show[tpe]]) match { + case res: ImplicitSearchSuccess => res.tree + case failure: DivergingImplicit => report.errorAndAbort(s"Diverving: ${failure.explanation}") + case failure: NoMatchingImplicits => report.errorAndAbort(s"NoMatching: ${failure.explanation}") + case failure: AmbiguousImplicits => report.errorAndAbort(s"Ambiguous: ${failure.explanation}") + case failure: ImplicitSearchFailure => + report.errorAndAbort(s"catch all: ${failure.explanation}") + } + } + } + + /** Composes the tree: $tcl.show($arg) */ + private def applyShow(using Quotes)(tcl: quotes.reflect.Term, arg: quotes.reflect.Term): quotes.reflect.Term = { + import quotes.reflect.* + Apply(Select.unique(tcl, "show"), arg :: Nil) + } +} diff --git a/tests/pos-macros/i16835/Show_1.scala b/tests/pos-macros/i16835/Show_1.scala new file mode 100644 index 000000000000..61f6b2dccd80 --- /dev/null +++ b/tests/pos-macros/i16835/Show_1.scala @@ -0,0 +1,11 @@ +trait Show[A] { + def show(value: A): String +} + +object Show { + given identity: Show[String] = a => a + + given int: Show[Int] = _.toString() + + given list[A](using A: Show[A]): Show[List[A]] = _.map(A.show).toString() +} diff --git a/tests/pos-macros/i16835/Test_2.scala b/tests/pos-macros/i16835/Test_2.scala new file mode 100644 index 000000000000..61019b1417b6 --- /dev/null +++ b/tests/pos-macros/i16835/Test_2.scala @@ -0,0 +1,30 @@ +import scala.deriving.* + +object usage { + final case class Person(name: String, age: Int, otherNames: List[String], p2: Person2) + + final case class Person2(name: String, age: Int, otherNames: List[String]) + + locally { + import Derivation.deriveFullyConstrucedByMacro + // works for case classes without other nested case classes inside + summon[Show[Person2]] + + // also derives instances with nested case classes + summon[Show[Person]] + } + + locally { + import Derivation.derivePartiallyConstructedByMacro + + // works for case classes without other nested case classes inside + summon[Show[Person2]] + + // fails for case classes with other nested case classes inside, + // note how that error is not a `NonMatching', `Diverging` or `Ambiguous` implicit search error but something else + /* + catch all: given instance deriveWithConstructionOutsideMacro in object Derivation does not match type io.github.arainko.ducktape.issue_repros.Show[Person2] + */ + summon[Show[Person]] + } +} \ No newline at end of file diff --git a/tests/pos-macros/i16843a/Macro_1.scala b/tests/pos-macros/i16843a/Macro_1.scala new file mode 100644 index 000000000000..98c1505910e6 --- /dev/null +++ b/tests/pos-macros/i16843a/Macro_1.scala @@ -0,0 +1,13 @@ +import scala.quoted.* + +case class Foo(x: Int) + +inline def foo = ${ fooImpl } + +def fooImpl(using Quotes) = + val tmp = '{ + 1 match + case x @ (y: Int) => 0 + } + + '{} diff --git a/tests/pos-macros/i16843a/Test_2.scala b/tests/pos-macros/i16843a/Test_2.scala new file mode 100644 index 000000000000..25406428d0cf --- /dev/null +++ b/tests/pos-macros/i16843a/Test_2.scala @@ -0,0 +1 @@ +val x = foo diff --git a/tests/pos-macros/i16843b/Macro_1.scala b/tests/pos-macros/i16843b/Macro_1.scala new file mode 100644 index 000000000000..631bde56f1f1 --- /dev/null +++ b/tests/pos-macros/i16843b/Macro_1.scala @@ -0,0 +1,18 @@ +import scala.quoted.* + +inline def foo: Int = ${ fooImpl } + +def fooImpl(using Quotes): Expr[Int] = + '{ + val b = ${ + val a = '{ + (1: Int) match + case x @ (y: Int) => 0 + } + a + } + + (1: Int) match + case x @ (y: Int) => 0 + } + diff --git a/tests/pos-macros/i16843b/Test_2.scala b/tests/pos-macros/i16843b/Test_2.scala new file mode 100644 index 000000000000..54c769c9618f --- /dev/null +++ b/tests/pos-macros/i16843b/Test_2.scala @@ -0,0 +1 @@ +def test = foo diff --git a/tests/pos-macros/i16959/Macro_1.scala b/tests/pos-macros/i16959/Macro_1.scala new file mode 100644 index 000000000000..61483bff7ff1 --- /dev/null +++ b/tests/pos-macros/i16959/Macro_1.scala @@ -0,0 +1,17 @@ +import scala.quoted.* + +inline def test = ${ testImpl } + +def testImpl(using Quotes) = + import quotes.reflect.* + + val int = PackedType[Int] + val string = PackedType[String] + + assert(Type.show[(int.U, string.U, string.U)] == "scala.Tuple3[scala.Int, java.lang.String, java.lang.String]") + + '{ () } + +final class PackedType[T](using t: Type[T]): + opaque type U = T + given tpe: Type[U] = t diff --git a/tests/pos-macros/i16959/Test_2.scala b/tests/pos-macros/i16959/Test_2.scala new file mode 100644 index 000000000000..e9772d026451 --- /dev/null +++ b/tests/pos-macros/i16959/Test_2.scala @@ -0,0 +1 @@ +def app = test diff --git a/tests/pos-macros/i16961/Macro_1.scala b/tests/pos-macros/i16961/Macro_1.scala new file mode 100644 index 000000000000..20ec6b439ec8 --- /dev/null +++ b/tests/pos-macros/i16961/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +inline def myMacro = ${ myMacroImpl } + +def myMacroImpl(using Quotes) = + import quotes.reflect.* + + PolyType(List("arg"))(_ => List(TypeBounds.empty), _ => TypeRepr.of[Any]) match + case _: TypeLambda => quotes.reflect.report.errorAndAbort("PolyType should not be a TypeLambda") + case _ => '{ () } // Ok diff --git a/tests/pos-macros/i16961/Test_2.scala b/tests/pos-macros/i16961/Test_2.scala new file mode 100644 index 000000000000..76a9e17659db --- /dev/null +++ b/tests/pos-macros/i16961/Test_2.scala @@ -0,0 +1 @@ +def test = myMacro diff --git a/tests/pos-macros/i17026.scala b/tests/pos-macros/i17026.scala new file mode 100644 index 000000000000..d8845ef1d086 --- /dev/null +++ b/tests/pos-macros/i17026.scala @@ -0,0 +1,3 @@ +import scala.quoted.* +def macroImpl(using Quotes) = + '{ def weird[A: Type](using Quotes) = Type.of[A] } diff --git a/tests/pos-macros/i17026b.scala b/tests/pos-macros/i17026b.scala new file mode 100644 index 000000000000..98a29066462e --- /dev/null +++ b/tests/pos-macros/i17026b.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +def macroImpl(using Quotes) = + '{ + def weird[A: ToExpr: Type](a: A)(using quotes: Quotes) = + '{ Some(${ Expr(a) }) } + } diff --git a/tests/pos-macros/i17037.scala b/tests/pos-macros/i17037.scala new file mode 100644 index 000000000000..1048d84ffe96 --- /dev/null +++ b/tests/pos-macros/i17037.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +class Foo: + type Bar = Int + +def macroImpl(using Quotes) = + val foo = new Foo + Type.of[foo.Bar] diff --git a/tests/pos-macros/i17037b.scala b/tests/pos-macros/i17037b.scala new file mode 100644 index 000000000000..60d2bec33330 --- /dev/null +++ b/tests/pos-macros/i17037b.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +class Foo: + type Bar = Int + +def macroImpl(using Quotes) = + val foo = Foo() + Type.of[foo.Bar] match + case '[foo.Bar] => '{true} + case _ => '{false} diff --git a/tests/pos-macros/i17037c.scala b/tests/pos-macros/i17037c.scala new file mode 100644 index 000000000000..56cd8f7a2d41 --- /dev/null +++ b/tests/pos-macros/i17037c.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +class Foo: + type Bar = Int + def macroImpl(using Quotes) = + val foo = new Foo + Type.of[this.Bar] diff --git a/tests/pos-macros/i17039.scala b/tests/pos-macros/i17039.scala new file mode 100644 index 000000000000..6f983b138526 --- /dev/null +++ b/tests/pos-macros/i17039.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +def macroImpl(using Quotes) = + val t = summon[Type[Int]] + Type.of[Int] match + case '[t.Underlying] => '{true} + case _ => '{false} diff --git a/tests/pos-macros/i17103a.scala b/tests/pos-macros/i17103a.scala new file mode 100644 index 000000000000..ffd0c15f28b2 --- /dev/null +++ b/tests/pos-macros/i17103a.scala @@ -0,0 +1,21 @@ +import scala.quoted.* + +trait C0: + def d: Int + +def test(using Quotes): Expr[Unit] = + '{ + trait C1 extends C0: + def d: Int + trait C extends C1: + def d: Int + val c: C = ??? + ${ + val expr = '{ + val cRef: C = ??? + cRef.d // calls C0.d + () + } + expr + } + } diff --git a/tests/pos-macros/i17103b.scala b/tests/pos-macros/i17103b.scala new file mode 100644 index 000000000000..0fbe86f0cf73 --- /dev/null +++ b/tests/pos-macros/i17103b.scala @@ -0,0 +1,21 @@ +import scala.quoted.* + +trait C0: + def d: Int + +def test(using Quotes): Expr[Unit] = + '{ + trait C1 extends C0: + def d: Int + trait C extends C1: + def d: Int + val c: C = ??? + ${ + val expr = '{ + val cRef: c.type = ??? + cRef.d // calls C0.d + () + } + expr + } + } diff --git a/tests/pos-macros/i17227/Macro_1.scala b/tests/pos-macros/i17227/Macro_1.scala new file mode 100644 index 000000000000..b483336119cb --- /dev/null +++ b/tests/pos-macros/i17227/Macro_1.scala @@ -0,0 +1,22 @@ +import scala.quoted.* + +inline def foo(f: Int => Int): Int => Int = ${impl('f)} +inline def bar(inline f: Int => Int): Int => Int = ${impl('f)} +inline def baz(inline f: (Int => Int)*): Int => Int = ${impl2('f)} + +def impl(f: Expr[Int => Int])(using Quotes): Expr[Int => Int] = + assertNoNamedArgs(f) + '{identity} + +def impl2(f: Expr[Seq[Int => Int]])(using Quotes): Expr[Int => Int] = + assertNoNamedArgs(f) + '{identity} + +def assertNoNamedArgs(expr: Expr[Any])(using Quotes): Unit = + import quotes.reflect.* + new TreeTraverser { + override def traverseTree(tree: Tree)(owner: Symbol): Unit = tree match + case _: NamedArg => + report.throwError(s"Unexpected NamedArg after inlining: ${tree}", tree.pos) + case _ => traverseTreeChildren(tree)(owner) + }.traverseTree(expr.asTerm)(Symbol.spliceOwner) diff --git a/tests/pos-macros/i17227/Test_2.scala b/tests/pos-macros/i17227/Test_2.scala new file mode 100644 index 000000000000..4106113d94c0 --- /dev/null +++ b/tests/pos-macros/i17227/Test_2.scala @@ -0,0 +1,6 @@ +def g(i: Int): Int = i + +def test = + foo(f = g) + bar(f = g) + baz(f = g) diff --git a/tests/pos-macros/i17293.scala b/tests/pos-macros/i17293.scala new file mode 100644 index 000000000000..57eba1181903 --- /dev/null +++ b/tests/pos-macros/i17293.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +trait OuterTrait { + trait X +} + +def exampleMacro[T <: OuterTrait: Type](expr: Expr[T])(using Quotes): Expr[OuterTrait#X] = { + '{ + val prefix: T = ${ expr } + new prefix.X {} + } +} diff --git a/tests/pos-macros/i17293b.scala b/tests/pos-macros/i17293b.scala new file mode 100644 index 000000000000..a8b73ba6176b --- /dev/null +++ b/tests/pos-macros/i17293b.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +trait OuterTrait { self => + trait X + + def exampleMacro[T <: self.type: Type](expr: Expr[T])(using Quotes): Expr[self.X] = { + '{ + val prefix: T = ${ expr } + new prefix.X {} + } + } +} \ No newline at end of file diff --git a/tests/pos-macros/i17409.scala b/tests/pos-macros/i17409.scala new file mode 100644 index 000000000000..449e0576d84b --- /dev/null +++ b/tests/pos-macros/i17409.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +transparent inline def thing = + ${ thingImpl } + +def thingImpl(using Quotes): Expr[Any] = + '{ + def makeThing: { def me: this.type } = ??? + makeThing + } diff --git a/tests/pos-macros/i17434a/Macro.scala b/tests/pos-macros/i17434a/Macro.scala new file mode 100644 index 000000000000..0e399d82a9d1 --- /dev/null +++ b/tests/pos-macros/i17434a/Macro.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +object SelectDynamicMacroImpl { + def selectImpl[E: Type]( + ref: Expr[SQLSyntaxProvider[_]], + name: Expr[String] + )(using Quotes): Expr[SQLSyntax] = '{SQLSyntax("foo")} +} diff --git a/tests/pos-macros/i17434a/Test.scala b/tests/pos-macros/i17434a/Test.scala new file mode 100644 index 000000000000..8e7c314b238d --- /dev/null +++ b/tests/pos-macros/i17434a/Test.scala @@ -0,0 +1,23 @@ +// test.scala +import scala.language.dynamics + +trait SQLSyntaxProvider[A] extends Dynamic{ + def field(name: String): SQLSyntax = ??? + + inline def selectDynamic(inline name: String): SQLSyntax = + select[A](this, name) + + inline def select[E](ref: SQLSyntaxProvider[A], inline name: String): SQLSyntax = + ${ SelectDynamicMacroImpl.selectImpl[E]('ref, 'name) } +} + +class SQLSyntax(value: String) +trait SQLSyntaxSupport[A] +case class ColumnSQLSyntaxProvider[S <: SQLSyntaxSupport[A], A](support: S) extends SQLSyntaxProvider[A] + +case class Account(id: Long, name: String) +object Account extends SQLSyntaxSupport[Account] + +def Test() = + val p = ColumnSQLSyntaxProvider[Account.type, Account](Account) + assert(p.name == SQLSyntax("name")) diff --git a/tests/pos-macros/i17434b/Macro.scala b/tests/pos-macros/i17434b/Macro.scala new file mode 100644 index 000000000000..adca2888f777 --- /dev/null +++ b/tests/pos-macros/i17434b/Macro.scala @@ -0,0 +1,29 @@ +trait NameOf: + transparent inline def nameOf(inline expr: Any): String = ${NameOfImpl.nameOf('expr)} + transparent inline def nameOf[T](inline expr: T => Any): String = ${NameOfImpl.nameOf('expr)} +object NameOf extends NameOf + +import scala.compiletime.* + +import scala.annotation.tailrec +import scala.quoted.* + +object NameOfImpl { + def nameOf(expr: Expr[Any])(using Quotes): Expr[String] = { + import quotes.reflect.* + @tailrec def extract(tree: Tree): String = tree match { + case Ident(name) => name + case Select(_, name) => name + case Block(List(stmt), term) => extract(stmt) + case DefDef("$anonfun", _, _, Some(term)) => extract(term) + case Block(_, term) => extract(term) + case Apply(term, _) if term.symbol.fullName != ".throw" => extract(term) + case TypeApply(term, _) => extract(term) + case Inlined(_, _, term) => extract(term) + case Typed(term, _) => extract(term) + case _ => throw new MatchError(s"Unsupported expression: ${expr.show}") + } + val name = extract(expr.asTerm) + Expr(name) + } +} diff --git a/tests/pos-macros/i17434b/Test.scala b/tests/pos-macros/i17434b/Test.scala new file mode 100644 index 000000000000..5e71f9c95965 --- /dev/null +++ b/tests/pos-macros/i17434b/Test.scala @@ -0,0 +1,6 @@ +import NameOf._ +def test() = + def func1(x: Int): String = ??? + val funcVal = func1 _ + assert(nameOf(funcVal) == "funcVal") + assert(nameOf(func1 _) == "func1") diff --git a/tests/pos-macros/i17434c/Macro.scala b/tests/pos-macros/i17434c/Macro.scala new file mode 100644 index 000000000000..dc3d2a533117 --- /dev/null +++ b/tests/pos-macros/i17434c/Macro.scala @@ -0,0 +1,3 @@ +import scala.quoted.* +inline def foo[T](expr: T => Any): Unit = ${impl('expr)} +def impl(expr: Expr[Any])(using Quotes): Expr[Unit] = '{} diff --git a/tests/pos-macros/i17434c/Test.scala b/tests/pos-macros/i17434c/Test.scala new file mode 100644 index 000000000000..6561dd193b63 --- /dev/null +++ b/tests/pos-macros/i17434c/Test.scala @@ -0,0 +1 @@ +def test(f: Int => Any) = foo(f) diff --git a/tests/pos-macros/i17434d/Macro.scala b/tests/pos-macros/i17434d/Macro.scala new file mode 100644 index 000000000000..a76c8aab58e4 --- /dev/null +++ b/tests/pos-macros/i17434d/Macro.scala @@ -0,0 +1,2 @@ +import scala.quoted.* +def impl[E: Type](ref: Expr[Foo[_]])(using Quotes): Expr[Unit] = '{ } diff --git a/tests/pos-macros/i17434d/Test.scala b/tests/pos-macros/i17434d/Test.scala new file mode 100644 index 000000000000..3af0ddecd061 --- /dev/null +++ b/tests/pos-macros/i17434d/Test.scala @@ -0,0 +1,4 @@ +trait Foo[A]: + inline def foo(): Unit = bar[this.type](this) + inline def bar[E](ref: Foo[A]): Unit = ${ impl[E]('ref) } +def test(p: Foo[Int]) = p.foo() diff --git a/tests/pos-macros/i17606/Macros_1.scala b/tests/pos-macros/i17606/Macros_1.scala new file mode 100644 index 000000000000..245f2df66e7b --- /dev/null +++ b/tests/pos-macros/i17606/Macros_1.scala @@ -0,0 +1,14 @@ +package example + +import scala.quoted.* + +object A { + inline def f(inline a: Any): Boolean = ${ impl('a) } + + def impl(a: Expr[Any])(using Quotes): Expr[Boolean] = { + a match { + case '{ new String($x: Array[Byte]) } => Expr(true) + case _ => quotes.reflect.report.errorAndAbort("Expected match", a) + } + } +} diff --git a/tests/pos-macros/i17606/Test_2.scala b/tests/pos-macros/i17606/Test_2.scala new file mode 100644 index 000000000000..ebf535bc2ae9 --- /dev/null +++ b/tests/pos-macros/i17606/Test_2.scala @@ -0,0 +1,8 @@ +package example + +object Main { + def main(args: Array[String]): Unit = { + val x = A.f(new String(Array.empty[Byte])) + println(x) + } +} diff --git a/tests/pos-macros/i7405b.scala b/tests/pos-macros/i7405b.scala index df7218608e88..6c73c275e15f 100644 --- a/tests/pos-macros/i7405b.scala +++ b/tests/pos-macros/i7405b.scala @@ -3,7 +3,7 @@ import scala.quoted.* class Foo { def f(using Quotes): Expr[Any] = { '{ - trait X { + trait X extends A { type Y def y: Y = ??? } @@ -17,3 +17,7 @@ class Foo { } } } + +trait A: + type Y + def y: Y = ??? diff --git a/tests/pos-macros/i8100b.scala b/tests/pos-macros/i8100b.scala new file mode 100644 index 000000000000..ecba10e439d2 --- /dev/null +++ b/tests/pos-macros/i8100b.scala @@ -0,0 +1,37 @@ +import scala.quoted.* + +def f[T](using t: Type[T])(using Quotes) = + '{ + // @SplicedType type t$1 = t.Underlying + type T2 = T // type T2 = t$1 + ${ + + val t0: T = ??? + val t1: T2 = ??? // val t1: T = ??? + val tp1 = Type.of[T] // val tp1 = t + val tp2 = Type.of[T2] // val tp2 = t + '{ + // @SplicedType type t$2 = t.Underlying + val t3: T = ??? // val t3: t$2 = ??? + val t4: T2 = ??? // val t4: t$2 = ??? + } + } + } + +def g(using Quotes) = + '{ + type U + type U2 = U + ${ + + val u1: U = ??? + val u2: U2 = ??? // val u2: U = ??? + + val tp1 = Type.of[U] // val tp1 = Type.of[U] + val tp2 = Type.of[U2] // val tp2 = Type.of[U] + '{ + val u3: U = ??? + val u4: U2 = ??? // val u4: U = ??? + } + } + } diff --git a/tests/pos-macros/i8887.scala b/tests/pos-macros/i8887.scala new file mode 100644 index 000000000000..5bfd5501063b --- /dev/null +++ b/tests/pos-macros/i8887.scala @@ -0,0 +1,3 @@ +import scala.quoted._ +inline def foo(x: Any): Any = ${ expr[x.type] } +def expr[X](using Quotes): Expr[Any] = ??? diff --git a/tests/pos-macros/i9360.scala b/tests/pos-macros/i9360.scala new file mode 100644 index 000000000000..699ef5f38bee --- /dev/null +++ b/tests/pos-macros/i9360.scala @@ -0,0 +1,22 @@ +package a + +import scala.quoted._ + +trait CPM[F[_]] + +def fun[M[_],T](t:T)(using m:CPM[M]):M[T] = ??? + +object M { + + inline def transform[F[_],T](t:T): F[T] = + ${ transformImpl[F,T]('t) } + + def transformImpl[F[_]:Type,T:Type](t:Expr[T])(using Quotes):Expr[F[T]] = { + import quotes.reflect._ + t match { + case '{ type mt[_]; a.fun[`mt`, tt]($t)(using $m) } => ??? + } + + } + +} diff --git a/tests/pos-macros/macro-deprecation.scala b/tests/pos-macros/macro-deprecation.scala new file mode 100644 index 000000000000..ff14f96ac7fa --- /dev/null +++ b/tests/pos-macros/macro-deprecation.scala @@ -0,0 +1,4 @@ +import scala.quoted.* + +inline def f = ${ impl } // error +@deprecated def impl(using Quotes) = '{1} diff --git a/tests/pos-macros/macro-experimental.scala b/tests/pos-macros/macro-experimental.scala new file mode 100644 index 000000000000..dc011f4e45b9 --- /dev/null +++ b/tests/pos-macros/macro-experimental.scala @@ -0,0 +1,5 @@ +import scala.quoted.* +import scala.annotation.experimental + +inline def f = ${ impl } // error +@experimental def impl(using Quotes) = '{1} diff --git a/tests/pos-macros/path-dependent-type-capture/Macro_1.scala b/tests/pos-macros/path-dependent-type-capture/Macro_1.scala new file mode 100644 index 000000000000..588e50846eff --- /dev/null +++ b/tests/pos-macros/path-dependent-type-capture/Macro_1.scala @@ -0,0 +1,70 @@ +import scala.quoted.* + +trait A: + type T + val b: B + +trait B: + type T + def f: Unit + +trait C0: + type U + val d: D0 +trait D0: + type U + def h: Unit +object Macro: + inline def generateCode: Unit = ${ generateCodeExpr } + + def generateCodeExpr(using Quotes): Expr[Unit] = + '{ + $testLocalPathsGlobalClasses + $testLocalPathsLocalClasses + } + + def testLocalPathsGlobalClasses(using Quotes): Expr[Unit] = + '{ + type T + val a: A = ??? + ${ + val expr = '{ + val t: T = ??? + val aT: a.T = ??? + val abT: a.b.T = ??? + val aRef: a.type = ??? + aRef.b + aRef.b.f + val abRef: a.b.type = ??? + abRef.f + () + } + expr + } + } + + def testLocalPathsLocalClasses(using Quotes): Expr[Unit] = + '{ + type U + trait C extends C0: + type U + val d: D + trait D extends D0: + type U + def h: Unit + val c: C = ??? + ${ + val expr = '{ + val u: U = ??? + val cU: c.U = ??? + val cdU: c.d.U = ??? + val cRef: c.type = ??? + cRef.d + cRef.d.h + val cdRef: c.d.type = ??? + cdRef.h + () + } + expr + } + } diff --git a/tests/pos-macros/path-dependent-type-capture/Test_2.scala b/tests/pos-macros/path-dependent-type-capture/Test_2.scala new file mode 100644 index 000000000000..c12cd8d2436a --- /dev/null +++ b/tests/pos-macros/path-dependent-type-capture/Test_2.scala @@ -0,0 +1 @@ +@main def test = Macro.generateCode diff --git a/tests/pos-special/isInstanceOf/i16899.scala b/tests/pos-special/isInstanceOf/i16899.scala new file mode 100644 index 000000000000..650e1e5c7b23 --- /dev/null +++ b/tests/pos-special/isInstanceOf/i16899.scala @@ -0,0 +1,5 @@ +sealed trait Unset + +def foo(v: Unset|Option[Int]): Unit = v match + case v: Unset => () + case v: Option[Int] => () diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala index 77e58440aa97..5eb8d7a52aa2 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala @@ -54,7 +54,8 @@ trait BCodeIdiomatic extends caps.Pure { case "17" => asm.Opcodes.V17 case "18" => asm.Opcodes.V18 case "19" => asm.Opcodes.V19 - case "20" => asm.Opcodes.V20*/ + case "20" => asm.Opcodes.V20 + case "21" => asm.Opcodes.V21*/ } lazy val majorVersion: Int = (classfileVersion & 0xFF) diff --git a/tests/pos-with-compiler-cc/dotc/Run.scala b/tests/pos-with-compiler-cc/dotc/Run.scala index 16a955afca1a..96f8c6a7b06f 100644 --- a/tests/pos-with-compiler-cc/dotc/Run.scala +++ b/tests/pos-with-compiler-cc/dotc/Run.scala @@ -32,7 +32,7 @@ import scala.collection.mutable import scala.util.control.NonFatal import scala.io.Codec import annotation.constructorOnly -import caps.unsafe.unsafeUnbox +import annotation.unchecked.uncheckedCaptures /** A compiler run. Exports various methods to compile source files */ class Run(comp: Compiler, @constructorOnly ictx0: Context) extends ImplicitRunInfo with ConstraintRunInfo { @@ -165,6 +165,7 @@ class Run(comp: Compiler, @constructorOnly ictx0: Context) extends ImplicitRunIn val staticRefs = util.EqHashMap[Name, Denotation](initialCapacity = 1024) /** Actions that need to be performed at the end of the current compilation run */ + @uncheckedCaptures private var finalizeActions = mutable.ListBuffer[() => Unit]() /** Will be set to true if any of the compiled compilation units contains @@ -275,7 +276,7 @@ class Run(comp: Compiler, @constructorOnly ictx0: Context) extends ImplicitRunIn Rewrites.writeBack() suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) while (finalizeActions.nonEmpty) { - val action = finalizeActions.remove(0).unsafeUnbox + val action = finalizeActions.remove(0) action() } compiling = false diff --git a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala index aa1c06a7ca85..0b1842603316 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala @@ -1394,7 +1394,7 @@ object Trees { case _ => sourced - abstract class TreeMap(val cpy: TreeCopier = inst.cpy) { self: TreeMap @retains(caps.*) => + abstract class TreeMap(val cpy: TreeCopier = inst.cpy) { self: TreeMap @retains(caps.cap) => def transform(tree: Tree)(using Context): Tree = { inContext(transformCtx(tree)) { Stats.record(s"TreeMap.transform/$getClass") @@ -1520,7 +1520,7 @@ object Trees { } } - abstract class TreeAccumulator[X] { self: TreeAccumulator[X] @retains(caps.*) => + abstract class TreeAccumulator[X] { self: TreeAccumulator[X] @retains(caps.cap) => // Ties the knot of the traversal: call `foldOver(x, tree))` to dive in the `tree` node. def apply(x: X, tree: Tree)(using Context): X diff --git a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala index b4dc6d0622c0..a6d3bc5a072c 100644 --- a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala +++ b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala @@ -742,7 +742,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { } abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { - self: UntypedTreeAccumulator[X] @retains(caps.*) => + self: UntypedTreeAccumulator[X] @retains(caps.cap) => override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match { case ModuleDef(name, impl) => this(x, impl) diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala index 2e750865f407..67222f07efbb 100644 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala +++ b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala @@ -51,7 +51,7 @@ case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) exte this.refs == refs && this.boxed == boxed && this.symbol == that.symbol case _ => false - override def mapWith(tm: TypeMap @retains(caps.*))(using Context) = + override def mapWith(tm: TypeMap @retains(caps.cap))(using Context) = val elems = refs.elems.toList val elems1 = elems.mapConserve(tm) if elems1 eq elems then this diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala index f7743dddda4e..20708b98cc95 100644 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala +++ b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala @@ -17,7 +17,7 @@ class ScalaSettings extends SettingGroup with AllScalaSettings object ScalaSettings: // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` private val minTargetVersion = 8 - private val maxTargetVersion = 20 + private val maxTargetVersion = 21 def supportedTargetVersions: List[String] = (minTargetVersion to maxTargetVersion).toList.map(_.toString) diff --git a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala index f3fee3da78ec..f307d4a36697 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala @@ -55,7 +55,7 @@ object Annotations { * be overridden. Returns EmptyAnnotation if type type map produces a range * type, since ranges cannot be types of trees. */ - def mapWith(tm: TypeMap @retains(caps.*))(using Context) = + def mapWith(tm: TypeMap @retains(caps.cap))(using Context) = val args = arguments if args.isEmpty then this else diff --git a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala index 2ce714937f97..37fde2d7b604 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala @@ -110,7 +110,7 @@ object Contexts { inline def inDetachedContext[T](inline op: DetachedContext ?-> T)(using ctx: Context): T = op(using ctx.detach) - type Context = ContextCls @retains(caps.*) + type Context = ContextCls @retains(caps.cap) /** A context is passed basically everywhere in dotc. * This is convenient but carries the risk of captured contexts in diff --git a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala index a4c3938a0909..f9844c6eaab6 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala @@ -84,7 +84,7 @@ object Decorators { * on lists that avoid duplication of list nodes where feasible. */ extension [T](xs: List[T]) - final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.*)): List[U] = + final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.cap)): List[U] = xs.collect(pf.asInstanceOf) final def mapconserve[U](f: T => U): List[U] = { @@ -230,11 +230,11 @@ object Decorators { end extension extension [T](xs: Seq[T]) - final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.*)): Seq[U] = + final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.cap)): Seq[U] = xs.collect(pf.asInstanceOf) - extension [A, B](f: PartialFunction[A, B] @retains(caps.*)) - def orElseCC(g: PartialFunction[A, B] @retains(caps.*)): PartialFunction[A, B] @retains(f, g) = + extension [A, B](f: PartialFunction[A, B] @retains(caps.cap)) + def orElseCC(g: PartialFunction[A, B] @retains(caps.cap)): PartialFunction[A, B] @retains(f, g) = f.orElse(g.asInstanceOf).asInstanceOf extension (text: Text) diff --git a/tests/pos-with-compiler-cc/dotc/core/GadtConstraint.scala b/tests/pos-with-compiler-cc/dotc/core/GadtConstraint.scala index 46b7e07649b8..fcf1215c604f 100644 --- a/tests/pos-with-compiler-cc/dotc/core/GadtConstraint.scala +++ b/tests/pos-with-compiler-cc/dotc/core/GadtConstraint.scala @@ -225,7 +225,7 @@ sealed trait GadtConstraint ( // ---- Private ---------------------------------------------------------- - private def externalize(tp: Type, theMap: TypeMap @retains(caps.*) | Null = null)(using Context): Type = tp match + private def externalize(tp: Type, theMap: TypeMap @retains(caps.cap) | Null = null)(using Context): Type = tp match case param: TypeParamRef => reverseMapping(param) match case sym: Symbol => sym.typeRef case null => param @@ -238,7 +238,7 @@ sealed trait GadtConstraint ( private def tvarOrError(sym: Symbol)(using Context): TypeVar = mapping(sym).ensuring(_ != null, i"not a constrainable symbol: $sym").uncheckedNN - private def containsNoInternalTypes(tp: Type, theAcc: TypeAccumulator[Boolean] @retains(caps.*) | Null = null)(using Context): Boolean = tp match { + private def containsNoInternalTypes(tp: Type, theAcc: TypeAccumulator[Boolean] @retains(caps.cap) | Null = null)(using Context): Boolean = tp match { case tpr: TypeParamRef => !reverseMapping.contains(tpr) case tv: TypeVar => !reverseMapping.contains(tv.origin) case tp => diff --git a/tests/pos-with-compiler-cc/dotc/core/Names.scala b/tests/pos-with-compiler-cc/dotc/core/Names.scala index cb68299101ad..e6ea66f4025b 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Names.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Names.scala @@ -74,10 +74,10 @@ object Names { * Stops at DerivedNames with infos of kind QualifiedInfo. * If `f` does not apply to any part, return name unchanged. */ - def replace(f: PartialFunction[Name, Name] @retains(caps.*)): ThisName + def replace(f: PartialFunction[Name, Name] @retains(caps.cap)): ThisName /** Same as replace, but does not stop at DerivedNames with infos of kind QualifiedInfo. */ - def replaceDeep(f: PartialFunction[Name, Name] @retains(caps.*)): ThisName = + def replaceDeep(f: PartialFunction[Name, Name] @retains(caps.cap)): ThisName = replace(f.orElseCC { case DerivedName(underlying, info: QualifiedInfo) => underlying.replaceDeep(f).derived(info) @@ -340,7 +340,7 @@ object Names { override def toSimpleName: SimpleName = this override final def mangle: SimpleName = encode - override def replace(f: PartialFunction[Name, Name] @retains(caps.*)): ThisName = + override def replace(f: PartialFunction[Name, Name] @retains(caps.cap)): ThisName = if (f.isDefinedAt(this)) likeSpaced(f(this)) else this override def collect[T](f: PartialFunction[Name, T]): Option[T] = f.lift(this) override def mapLast(f: SimpleName => SimpleName): SimpleName = f(this) @@ -440,7 +440,7 @@ object Names { override def mangled: TypeName = toTermName.mangled.toTypeName override def mangledString: String = toTermName.mangledString - override def replace(f: PartialFunction[Name, Name] @retains(caps.*)): ThisName = toTermName.replace(f).toTypeName + override def replace(f: PartialFunction[Name, Name] @retains(caps.cap)): ThisName = toTermName.replace(f).toTypeName override def collect[T](f: PartialFunction[Name, T]): Option[T] = toTermName.collect(f) override def mapLast(f: SimpleName => SimpleName): TypeName = toTermName.mapLast(f).toTypeName override def mapParts(f: SimpleName => SimpleName): TypeName = toTermName.mapParts(f).toTypeName @@ -473,7 +473,7 @@ object Names { override def toSimpleName: SimpleName = termName(toString) override final def mangle: SimpleName = encode.toSimpleName - override def replace(f: PartialFunction[Name, Name] @retains(caps.*)): ThisName = + override def replace(f: PartialFunction[Name, Name] @retains(caps.cap)): ThisName = if (f.isDefinedAt(this)) likeSpaced(f(this)) else info match { case qual: QualifiedInfo => this diff --git a/tests/pos-with-compiler-cc/dotc/core/Scopes.scala b/tests/pos-with-compiler-cc/dotc/core/Scopes.scala index 3b4cf9a98c54..7ab68ddf78a2 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Scopes.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Scopes.scala @@ -1,8 +1,3 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Martin Odersky - */ - package dotty.tools package dotc package core @@ -17,6 +12,7 @@ import Denotations._ import printing.Texts._ import printing.Printer import SymDenotations.NoDenotation +import annotation.unchecked.uncheckedCaptures import collection.mutable @@ -220,6 +216,7 @@ object Scopes { private var elemsCache: List[Symbol] | Null = null /** The synthesizer to be used, or `null` if no synthesis is done on this scope */ + @uncheckedCaptures private var synthesize: SymbolSynthesizer | Null = null /** Use specified synthesize for this scope */ diff --git a/tests/pos-with-compiler-cc/dotc/core/StagingContext.scala b/tests/pos-with-compiler-cc/dotc/core/StagingContext.scala index 9e0bb95394a3..4ca53e02a831 100644 --- a/tests/pos-with-compiler-cc/dotc/core/StagingContext.scala +++ b/tests/pos-with-compiler-cc/dotc/core/StagingContext.scala @@ -4,7 +4,7 @@ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.util.Property -import dotty.tools.dotc.transform.PCPCheckAndHeal +import dotty.tools.dotc.transform.CrossStageSafety object StagingContext { @@ -16,7 +16,7 @@ object StagingContext { */ private val QuotesStack = new Property.Key[List[tpd.Tree]] - private val TaggedTypes = new Property.Key[PCPCheckAndHeal.QuoteTypeTags] + private val TaggedTypes = new Property.Key[CrossStageSafety.QuoteTypeTags] /** All enclosing calls that are currently inlined, from innermost to outermost. */ def level(using Context): Int = @@ -36,10 +36,10 @@ object StagingContext { def spliceContext(using Context): Context = ctx.fresh.setProperty(QuotationLevel, level - 1) - def contextWithQuoteTypeTags(taggedTypes: PCPCheckAndHeal.QuoteTypeTags)(using Context) = + def contextWithQuoteTypeTags(taggedTypes: CrossStageSafety.QuoteTypeTags)(using Context) = ctx.fresh.setProperty(TaggedTypes, taggedTypes) - def getQuoteTypeTags(using Context): PCPCheckAndHeal.QuoteTypeTags = + def getQuoteTypeTags(using Context): CrossStageSafety.QuoteTypeTags = ctx.property(TaggedTypes).get /** Context with a decremented quotation level and pops the Some of top of the quote context stack or None if the stack is empty. diff --git a/tests/pos-with-compiler-cc/dotc/core/Substituters.scala b/tests/pos-with-compiler-cc/dotc/core/Substituters.scala index 1e86274f663e..6f7e02ec4dde 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Substituters.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Substituters.scala @@ -10,7 +10,7 @@ import annotation.retains */ object Substituters: - final def subst(tp: Type, from: BindingType, to: BindingType, theMap: SubstBindingMap @retains(caps.*) | Null)(using Context): Type = + final def subst(tp: Type, from: BindingType, to: BindingType, theMap: SubstBindingMap @retains(caps.cap) | Null)(using Context): Type = tp match { case tp: BoundType => if (tp.binder eq from) tp.copyBoundType(to.asInstanceOf[tp.BT]) else tp @@ -26,7 +26,7 @@ object Substituters: .mapOver(tp) } - final def subst1(tp: Type, from: Symbol, to: Type, theMap: Subst1Map @retains(caps.*) | Null)(using Context): Type = + final def subst1(tp: Type, from: Symbol, to: Type, theMap: Subst1Map @retains(caps.cap) | Null)(using Context): Type = tp match { case tp: NamedType => val sym = tp.symbol @@ -40,7 +40,7 @@ object Substituters: .mapOver(tp) } - final def subst2(tp: Type, from1: Symbol, to1: Type, from2: Symbol, to2: Type, theMap: Subst2Map @retains(caps.*) | Null)(using Context): Type = + final def subst2(tp: Type, from1: Symbol, to1: Type, from2: Symbol, to2: Type, theMap: Subst2Map @retains(caps.cap) | Null)(using Context): Type = tp match { case tp: NamedType => val sym = tp.symbol @@ -55,7 +55,7 @@ object Substituters: .mapOver(tp) } - final def subst(tp: Type, from: List[Symbol], to: List[Type], theMap: SubstMap @retains(caps.*) | Null)(using Context): Type = + final def subst(tp: Type, from: List[Symbol], to: List[Type], theMap: SubstMap @retains(caps.cap) | Null)(using Context): Type = tp match { case tp: NamedType => val sym = tp.symbol @@ -75,7 +75,7 @@ object Substituters: .mapOver(tp) } - final def substSym(tp: Type, from: List[Symbol], to: List[Symbol], theMap: SubstSymMap @retains(caps.*) | Null)(using Context): Type = + final def substSym(tp: Type, from: List[Symbol], to: List[Symbol], theMap: SubstSymMap @retains(caps.cap) | Null)(using Context): Type = tp match { case tp: NamedType => val sym = tp.symbol @@ -106,7 +106,7 @@ object Substituters: .mapOver(tp) } - final def substThis(tp: Type, from: ClassSymbol, to: Type, theMap: SubstThisMap @retains(caps.*) | Null)(using Context): Type = + final def substThis(tp: Type, from: ClassSymbol, to: Type, theMap: SubstThisMap @retains(caps.cap) | Null)(using Context): Type = tp match { case tp: ThisType => if (tp.cls eq from) to else tp @@ -120,7 +120,7 @@ object Substituters: .mapOver(tp) } - final def substRecThis(tp: Type, from: Type, to: Type, theMap: SubstRecThisMap @retains(caps.*) | Null)(using Context): Type = + final def substRecThis(tp: Type, from: Type, to: Type, theMap: SubstRecThisMap @retains(caps.cap) | Null)(using Context): Type = tp match { case tp @ RecThis(binder) => if (binder eq from) to else tp @@ -134,7 +134,7 @@ object Substituters: .mapOver(tp) } - final def substParam(tp: Type, from: ParamRef, to: Type, theMap: SubstParamMap @retains(caps.*) | Null)(using Context): Type = + final def substParam(tp: Type, from: ParamRef, to: Type, theMap: SubstParamMap @retains(caps.cap) | Null)(using Context): Type = tp match { case tp: BoundType => if (tp == from) to else tp @@ -148,7 +148,7 @@ object Substituters: .mapOver(tp) } - final def substParams(tp: Type, from: BindingType, to: List[Type], theMap: SubstParamsMap @retains(caps.*) | Null)(using Context): Type = + final def substParams(tp: Type, from: BindingType, to: List[Type], theMap: SubstParamsMap @retains(caps.cap) | Null)(using Context): Type = tp match { case tp: ParamRef => if (tp.binder == from) to(tp.paramNum) else tp diff --git a/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala b/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala index 67b9f063e9d0..0e1fc277865a 100644 --- a/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala +++ b/tests/pos-with-compiler-cc/dotc/core/TypeComparer.scala @@ -25,7 +25,7 @@ import reporting.trace import annotation.constructorOnly import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBoxedCapturing, boxed, boxedUnlessFun, boxedIfTypeParam, isAlwaysPure} import language.experimental.pureFunctions -import caps.unsafe.* +import annotation.unchecked.uncheckedCaptures /** Provides methods to compare types. */ @@ -33,17 +33,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling import TypeComparer._ Stats.record("TypeComparer") - private var myContext: Context = initctx.unsafeBox - def comparerContext: Context = myContext.unsafeUnbox + @uncheckedCaptures + private var myContext: Context = initctx + def comparerContext: Context = myContext - protected given [DummySoItsADef]: Context = myContext.unsafeUnbox + protected given [DummySoItsADef]: Context = myContext protected var state: TyperState = compiletime.uninitialized def constraint: Constraint = state.constraint def constraint_=(c: Constraint): Unit = state.constraint = c def init(c: Context): Unit = - myContext = c.unsafeBox + myContext = c state = c.typerState monitored = false GADTused = false diff --git a/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala b/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala index ad71f3100817..717da533a439 100644 --- a/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala +++ b/tests/pos-with-compiler-cc/dotc/core/TypeOps.scala @@ -128,7 +128,7 @@ object TypeOps: pre.isStable || !ctx.phase.isTyper /** Implementation of Types#simplified */ - def simplify(tp: Type, theMap: SimplifyMap @retains(caps.*) | Null)(using Context): Type = { + def simplify(tp: Type, theMap: SimplifyMap @retains(caps.cap) | Null)(using Context): Type = { def mapOver = (if (theMap != null) theMap else new SimplifyMap).mapOver(tp) tp match { case tp: NamedType => diff --git a/tests/pos-with-compiler-cc/dotc/core/Types.scala b/tests/pos-with-compiler-cc/dotc/core/Types.scala index f94e9ac6d645..e4b30888a5dc 100644 --- a/tests/pos-with-compiler-cc/dotc/core/Types.scala +++ b/tests/pos-with-compiler-cc/dotc/core/Types.scala @@ -115,7 +115,7 @@ object Types { private def testProvisional(using Context): Boolean = class ProAcc extends TypeAccumulator[Boolean]: override def apply(x: Boolean, t: Type) = x || test(t, this) - def test(t: Type, theAcc: TypeAccumulator[Boolean] @retains(caps.*) | Null): Boolean = + def test(t: Type, theAcc: TypeAccumulator[Boolean] @retains(caps.cap) | Null): Boolean = if t.mightBeProvisional then t.mightBeProvisional = t match case t: TypeRef => @@ -2158,8 +2158,8 @@ object Types { /** A trait for proto-types, used as expected types in typer */ trait ProtoType extends Type { def isMatchedBy(tp: Type, keepConstraint: Boolean = false)(using Context): Boolean - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T - def map(tm: TypeMap @retains(caps.*))(using Context): ProtoType + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.cap))(using Context): T + def map(tm: TypeMap @retains(caps.cap))(using Context): ProtoType /** If this prototype captures a context, the same prototype except that the result * captures the given context `ctx`. @@ -3773,7 +3773,7 @@ object Types { val status = (x & StatusMask) max (y & StatusMask) val provisional = (x | y) & Provisional (if status == TrueDeps then status else status | provisional).toByte - def compute(status: DependencyStatus, tp: Type, theAcc: TypeAccumulator[DependencyStatus] @retains(caps.*) | Null): DependencyStatus = + def compute(status: DependencyStatus, tp: Type, theAcc: TypeAccumulator[DependencyStatus] @retains(caps.cap) | Null): DependencyStatus = def applyPrefix(tp: NamedType) = if tp.isInstanceOf[SingletonType] && tp.currentSymbol.isStatic then status // Note: a type ref with static symbol can still be dependent since the symbol might be refined in the enclosing type. See pos/15331.scala. @@ -4351,7 +4351,7 @@ object Types { private var myEvalRunId: RunId = NoRunId private var myEvalued: Type = uninitialized - def isGround(acc: TypeAccumulator[Boolean] @retains(caps.*))(using Context): Boolean = + def isGround(acc: TypeAccumulator[Boolean] @retains(caps.cap))(using Context): Boolean = if myGround == 0 then myGround = if acc.foldOver(true, this) then 1 else -1 myGround > 0 @@ -5552,7 +5552,7 @@ object Types { * BiTypeMaps should map capture references to capture references. */ trait BiTypeMap extends TypeMap: - thisMap: BiTypeMap @retains(caps.*) => + thisMap: BiTypeMap @retains(caps.cap) => /** The inverse of the type map as a function */ def inverse(tp: Type): Type @@ -6106,7 +6106,7 @@ object Types { abstract class TypeAccumulator[T](implicit protected val accCtx: Context) extends VariantTraversal with ((T, Type) => T) { - this: TypeAccumulator[T] @annotation.retains(caps.*) => + this: TypeAccumulator[T] @annotation.retains(caps.cap) => def apply(x: T, tp: Type): T diff --git a/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala b/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala index b87cde4a6ad1..fcc449af3632 100644 --- a/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala +++ b/tests/pos-with-compiler-cc/dotc/core/tasty/TreeUnpickler.scala @@ -47,7 +47,7 @@ import dotty.tools.tasty.TastyFormat._ import scala.annotation.constructorOnly import scala.annotation.internal.sharable import language.experimental.pureFunctions -import caps.unsafe.{unsafeUnbox, unsafeBox} +import annotation.unchecked.uncheckedCaptures /** Unpickler for typed trees * @param reader the reader from which to unpickle @@ -1086,15 +1086,15 @@ class TreeUnpickler(reader: TastyReader, def readIndexedStats[T](exprOwner: Symbol, end: Addr, k: (List[Tree], Context) => T = sameTrees)(using Context): T = val buf = new mutable.ListBuffer[Tree] - var curCtx = ctx.unsafeBox + @uncheckedCaptures var curCtx = ctx while currentAddr.index < end.index do - val stat = readIndexedStat(exprOwner)(using curCtx.unsafeUnbox) + val stat = readIndexedStat(exprOwner)(using curCtx) buf += stat stat match - case stat: Import => curCtx = curCtx.unsafeUnbox.importContext(stat, stat.symbol).unsafeBox + case stat: Import => curCtx = curCtx.importContext(stat, stat.symbol) case _ => assert(currentAddr.index == end.index) - k(buf.toList, curCtx.unsafeUnbox) + k(buf.toList, curCtx) def readStats[T](exprOwner: Symbol, end: Addr, k: (List[Tree], Context) => T = sameTrees)(using Context): T = { fork.indexStats(end) diff --git a/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala b/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala index ecf24ff8264e..9bb0bacd7a78 100644 --- a/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala +++ b/tests/pos-with-compiler-cc/dotc/inlines/PrepareInlineable.scala @@ -17,7 +17,7 @@ import NameKinds.{InlineAccessorName, UniqueInlineName} import inlines.Inlines import NameOps._ import Annotations._ -import transform.{AccessProxies, PCPCheckAndHeal, Splicer} +import transform.{AccessProxies, CrossStageSafety, Splicer} import transform.SymUtils.* import config.Printers.inlining import util.Property @@ -294,7 +294,7 @@ object PrepareInlineable { if (code.symbol.flags.is(Inline)) report.error("Macro cannot be implemented with an `inline` method", code.srcPos) Splicer.checkValidMacroBody(code) - new PCPCheckAndHeal(freshStagingContext).transform(body) // Ignore output, only check PCP + new CrossStageSafety(freshStagingContext).transform(body) // Ignore output, only check staging levels case Block(List(stat), Literal(Constants.Constant(()))) => checkMacro(stat) case Block(Nil, expr) => checkMacro(expr) case Typed(expr, _) => checkMacro(expr) diff --git a/tests/pos-with-compiler-cc/dotc/transform/PCPCheckAndHeal.scala b/tests/pos-with-compiler-cc/dotc/transform/CrossStageSafety.scala similarity index 95% rename from tests/pos-with-compiler-cc/dotc/transform/PCPCheckAndHeal.scala rename to tests/pos-with-compiler-cc/dotc/transform/CrossStageSafety.scala index 90128500374e..ca00c87161ef 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/PCPCheckAndHeal.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/CrossStageSafety.scala @@ -22,14 +22,14 @@ import dotty.tools.dotc.util.Property import scala.annotation.constructorOnly -/** Checks that the Phase Consistency Principle (PCP) holds and heals types. +/** Checks that staging level consistency holds and heals staged types . * - * Local term references are phase consistent if and only if they are used at the same level as their definition. + * Local term references are level consistent if and only if they are used at the same level as their definition. * * Local type references can be used at the level of their definition or lower. If used used at a higher level, * it will be healed if possible, otherwise it is inconsistent. * - * Type healing consists in transforming a phase inconsistent type `T` into `summon[Type[T]].Underlying`. + * Type healing consists in transforming a level inconsistent type `T` into `summon[Type[T]].Underlying`. * * As references to types do not necessarily have an associated tree it is not always possible to replace the types directly. * Instead we always generate a type alias for it and place it at the start of the surrounding quote. This also avoids duplication. @@ -48,7 +48,7 @@ import scala.annotation.constructorOnly * } * */ -class PCPCheckAndHeal(@constructorOnly ictx: DetachedContext) extends TreeMapWithStages(ictx), Checking, caps.Pure { +class CrossStageSafety(@constructorOnly ictx: DetachedContext) extends TreeMapWithStages(ictx), Checking, caps.Pure { import tpd._ private val InAnnotation = Property.Key[Unit]() @@ -96,9 +96,9 @@ class PCPCheckAndHeal(@constructorOnly ictx: DetachedContext) extends TreeMapWit super.transform(tree) } - /** Transform quoted trees while maintaining phase correctness */ + /** Transform quoted trees while maintaining level correctness */ override protected def transformQuotation(body: Tree, quote: Apply)(using Context): Tree = { - val taggedTypes = new PCPCheckAndHeal.QuoteTypeTags(quote.span) + val taggedTypes = new CrossStageSafety.QuoteTypeTags(quote.span) if (ctx.property(InAnnotation).isDefined) report.error("Cannot have a quote in an annotation", quote.srcPos) @@ -215,7 +215,7 @@ class PCPCheckAndHeal(@constructorOnly ictx: DetachedContext) extends TreeMapWit mapOver(tp) } - /** Check phase consistency of terms and heal inconsistent type references. */ + /** Check level consistency of terms and heal inconsistent type references. */ private def healTypeOfTerm(pos: SrcPos)(using Context) = new TypeMap { def apply(tp: Type): Type = tp match @@ -275,7 +275,7 @@ class PCPCheckAndHeal(@constructorOnly ictx: DetachedContext) extends TreeMapWit } -object PCPCheckAndHeal { +object CrossStageSafety { import tpd._ class QuoteTypeTags(span: Span)(using DetachedContext) extends caps.Pure { diff --git a/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala b/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala index df6128d249d2..3a3aa7e89445 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Splicing.scala @@ -191,7 +191,7 @@ class Splicing extends MacroTransform: private var refBindingMap = mutable.Map.empty[Symbol, (Tree, Symbol)] /** Reference to the `Quotes` instance of the current level 1 splice */ private var quotes: Tree | Null = null // TODO: add to the context - private var healedTypes: PCPCheckAndHeal.QuoteTypeTags | Null = null // TODO: add to the context + private var healedTypes: CrossStageSafety.QuoteTypeTags | Null = null // TODO: add to the context def transformSplice(tree: tpd.Tree, tpe: Type, holeIdx: Int)(using Context): tpd.Tree = assert(level == 0) @@ -254,7 +254,7 @@ class Splicing extends MacroTransform: private def transformLevel0QuoteContent(tree: Tree)(using Context): Tree = // transform and collect new healed types val old = healedTypes - healedTypes = new PCPCheckAndHeal.QuoteTypeTags(tree.span) + healedTypes = new CrossStageSafety.QuoteTypeTags(tree.span) val tree1 = transform(tree) val newHealedTypes = healedTypes.nn.getTypeTags healedTypes = old diff --git a/tests/pos-with-compiler-cc/dotc/transform/Staging.scala b/tests/pos-with-compiler-cc/dotc/transform/Staging.scala index 12c5c8215cad..c2c6f76cd0fc 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/Staging.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/Staging.scala @@ -15,7 +15,7 @@ import dotty.tools.dotc.transform.TreeMapWithStages._ -/** Checks that the Phase Consistency Principle (PCP) holds and heals types. +/** Checks that staging level consistency holds and heals staged types. * * Type healing consists in transforming a phase inconsistent type `T` into `${ implicitly[Type[T]] }`. */ @@ -32,12 +32,12 @@ class Staging extends MacroTransform { override def checkPostCondition(tree: Tree)(using Context): Unit = if (ctx.phase <= splicingPhase) { - // Recheck that PCP holds but do not heal any inconsistent types as they should already have been heald + // Recheck that staging levels hold but do not heal any inconsistent types as they should already have been heald tree match { case PackageDef(pid, _) if tree.symbol.owner == defn.RootClass => val stagingCtx = freshStagingContext - val checker = new PCPCheckAndHeal(stagingCtx) { - // !cc! type error is checker is defined as val checker = new PCPCheckAndHeal { ... } + val checker = new CrossStageSafety(stagingCtx) { + // !cc! type error is checker is defined as val checker = new CrossStageSafety { ... } override protected def tryHeal(sym: Symbol, tp: TypeRef, pos: SrcPos)(using Context): TypeRef = { def symStr = if (sym.is(ModuleClass)) sym.sourceModule.show @@ -72,7 +72,7 @@ class Staging extends MacroTransform { protected def newTransformer(using Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = - new PCPCheckAndHeal(ctx.detach).transform(tree) + new CrossStageSafety(ctx.detach).transform(tree) } } diff --git a/tests/pos-with-compiler-cc/dotc/transform/TreeChecker.scala b/tests/pos-with-compiler-cc/dotc/transform/TreeChecker.scala index e7607d8e59c6..b0d7fb89985f 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/TreeChecker.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/TreeChecker.scala @@ -458,7 +458,7 @@ class TreeChecker extends Phase with SymTransformer { val inliningPhase = ctx.base.inliningPhase inliningPhase.exists && ctx.phase.id > inliningPhase.id if isAfterInlining then - // The staging phase destroys in PCPCheckAndHeal the property that + // The staging phase destroys in CrossStageSafety the property that // tree.expr.tpe <:< pt1. A test case where this arises is run-macros/enum-nat-macro. // We should follow up why this happens. If the problem is fixed, we can // drop the isAfterInlining special case. To reproduce the problem, just diff --git a/tests/pos-with-compiler-cc/dotc/transform/TreeMapWithStages.scala b/tests/pos-with-compiler-cc/dotc/transform/TreeMapWithStages.scala index c721c838d316..9dd1000af954 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/TreeMapWithStages.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/TreeMapWithStages.scala @@ -18,7 +18,7 @@ import scala.annotation.constructorOnly * and `l == -1` is code inside a top level splice (in an inline method). * @param levels a stacked map from symbols to the levels in which they were defined */ -abstract class TreeMapWithStages(@constructorOnly ictx: Context) extends TreeMapWithImplicits { this: {} TreeMapWithStages => +abstract class TreeMapWithStages(@constructorOnly ictx: Context) extends TreeMapWithImplicits { this: TreeMapWithStages^{} => import tpd._ import TreeMapWithStages._ diff --git a/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala b/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala index 14873938bf78..6ddc0e18d995 100644 --- a/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala +++ b/tests/pos-with-compiler-cc/dotc/transform/init/Semantic.scala @@ -18,7 +18,6 @@ import Errors.* import scala.collection.mutable import scala.annotation.tailrec -import caps.unsafe.unsafeBoxFunArg object Semantic: @@ -1670,7 +1669,7 @@ object Semantic: } // initialize super classes after outers are set - tasks.foreach(((task: () => Unit) => task()).unsafeBoxFunArg) + tasks.foreach((task: () => Unit) => task()) // !cc! .asInstanceOf needed to convert from `(() => Unit) -> Unit` to `(box () => Unit) -> Unit`. end if diff --git a/tests/pos-with-compiler-cc/dotc/typer/Namer.scala b/tests/pos-with-compiler-cc/dotc/typer/Namer.scala index 548f645a23d9..8487192b9d8a 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Namer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Namer.scala @@ -29,7 +29,7 @@ import TypeErasure.erasure import reporting._ import config.Feature.sourceVersion import config.SourceVersion._ - +import annotation.unchecked.uncheckedCaptures /** This class creates symbols from definitions and imports and gives them * lazy types. @@ -930,6 +930,8 @@ class Namer { typer: Typer => class TypeDefCompleter(original: TypeDef)(ictx: DetachedContext) extends Completer(original)(ictx) with TypeParamsCompleter { private var myTypeParams: List[TypeSymbol] | Null = null + + @uncheckedCaptures private var nestedCtx: Context | Null = null assert(!original.isClassDef) diff --git a/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala b/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala index 838cca3f6fa7..77fd2c1d6d66 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/ProtoTypes.scala @@ -125,8 +125,8 @@ object ProtoTypes { /** A trait for prototypes that match all types */ trait MatchAlways extends ProtoType, caps.Pure { def isMatchedBy(tp1: Type, keepConstraint: Boolean)(using Context): Boolean = true - def map(tm: TypeMap @retains(caps.*))(using Context): ProtoType = this - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = x + def map(tm: TypeMap @retains(caps.cap))(using Context): ProtoType = this + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.cap))(using Context): T = x override def toString: String = getClass.toString } @@ -239,8 +239,8 @@ object ProtoTypes { override def unusableForInference(using Context): Boolean = memberProto.unusableForInference - def map(tm: TypeMap @retains(caps.*))(using Context): SelectionProto = derivedSelectionProto(name, tm(memberProto), compat) - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = ta(x, memberProto) + def map(tm: TypeMap @retains(caps.cap))(using Context): SelectionProto = derivedSelectionProto(name, tm(memberProto), compat) + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.cap))(using Context): T = ta(x, memberProto) override def deepenProto(using Context): SelectionProto = derivedSelectionProto(name, memberProto.deepenProto, compat) @@ -542,10 +542,10 @@ object ProtoTypes { override def toString: String = s"FunProto(${args mkString ","} => $resultType)" - def map(tm: TypeMap @retains(caps.*))(using Context): FunProto = + def map(tm: TypeMap @retains(caps.cap))(using Context): FunProto = derivedFunProto(args, tm(resultType), typer) - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.cap))(using Context): T = ta(ta.foldOver(x, typedArgs().tpes), resultType) override def deepenProto(using Context): FunProto = @@ -600,9 +600,9 @@ object ProtoTypes { override def unusableForInference(using Context): Boolean = argType.unusableForInference || resType.unusableForInference - def map(tm: TypeMap @retains(caps.*))(using Context): ViewProto = derivedViewProto(tm(argType), tm(resultType)) + def map(tm: TypeMap @retains(caps.cap))(using Context): ViewProto = derivedViewProto(tm(argType), tm(resultType)) - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.cap))(using Context): T = ta(ta(x, argType), resultType) override def deepenProto(using Context): ViewProto = @@ -653,10 +653,10 @@ object ProtoTypes { override def unusableForInference(using Context): Boolean = targs.exists(_.tpe.unusableForInference) - def map(tm: TypeMap @retains(caps.*))(using Context): PolyProto = + def map(tm: TypeMap @retains(caps.cap))(using Context): PolyProto = derivedPolyProto(targs, tm(resultType)) - def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.*))(using Context): T = + def fold[T](x: T, ta: TypeAccumulator[T] @retains(caps.cap))(using Context): T = ta(ta.foldOver(x, targs.tpes), resultType) override def deepenProto(using Context): PolyProto = @@ -834,7 +834,7 @@ object ProtoTypes { /** Approximate occurrences of parameter types and uninstantiated typevars * by wildcard types. */ - private def wildApprox(tp: Type, theMap: WildApproxMap @retains(caps.*) | Null, seen: Set[TypeParamRef], internal: Set[TypeLambda])(using Context): Type = + private def wildApprox(tp: Type, theMap: WildApproxMap @retains(caps.cap) | Null, seen: Set[TypeParamRef], internal: Set[TypeLambda])(using Context): Type = tp match { case tp: NamedType => // default case, inlined for speed val isPatternBoundTypeRef = tp.isInstanceOf[TypeRef] && tp.symbol.isPatternBound diff --git a/tests/pos-with-compiler-cc/dotc/typer/TypeAssigner.scala b/tests/pos-with-compiler-cc/dotc/typer/TypeAssigner.scala index 2edcc37b613d..cb7bf2f2505d 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/TypeAssigner.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/TypeAssigner.scala @@ -12,7 +12,6 @@ import collection.mutable import reporting._ import Checking.{checkNoPrivateLeaks, checkNoWildcard} import cc.CaptureSet -import caps.unsafe.unsafeBoxFunArg trait TypeAssigner { import tpd.* @@ -27,7 +26,7 @@ trait TypeAssigner { qual.isEmpty || sym.name == qual || sym.is(Module) && sym.name.stripModuleClassSuffix == qual) - ctx.outersIterator.map(((ctx: Context) => ctx.owner).unsafeBoxFunArg).find(qualifies) match { + ctx.outersIterator.map((ctx: Context) => ctx.owner).find(qualifies) match { case Some(c) if packageOK || !c.is(Package) => c case _ => diff --git a/tests/pos-with-compiler-cc/dotc/typer/Typer.scala b/tests/pos-with-compiler-cc/dotc/typer/Typer.scala index aa286446a334..0baae1730f4a 100644 --- a/tests/pos-with-compiler-cc/dotc/typer/Typer.scala +++ b/tests/pos-with-compiler-cc/dotc/typer/Typer.scala @@ -54,7 +54,8 @@ import config.Config import language.experimental.pureFunctions import scala.annotation.constructorOnly -import caps.unsafe.{unsafeBox, unsafeUnbox} +import annotation.unchecked.uncheckedCaptures + object Typer { @@ -1673,11 +1674,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * and the patterns of the Match tree and the MatchType correspond. */ def typedDependentMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: MatchType)(using Context): Tree = { - var caseCtx = ctx.unsafeBox + @uncheckedCaptures var caseCtx = ctx val cases1 = tree.cases.zip(pt.cases) .map { case (cas, tpe) => - val case1 = typedCase(cas, sel, wideSelType, tpe)(using caseCtx.unsafeUnbox) - caseCtx = Nullables.afterPatternContext(sel, case1.pat).unsafeBox + val case1 = typedCase(cas, sel, wideSelType, tpe)(using caseCtx) + caseCtx = Nullables.afterPatternContext(sel, case1.pat) case1 } .asInstanceOf[List[CaseDef]] @@ -1692,10 +1693,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedCases(cases: List[untpd.CaseDef], sel: Tree, wideSelType: Type, pt: Type)(using Context): List[CaseDef] = - var caseCtx = ctx.unsafeBox + @uncheckedCaptures var caseCtx = ctx cases.mapconserve { cas => - val case1 = typedCase(cas, sel, wideSelType, pt)(using caseCtx.unsafeUnbox) - caseCtx = Nullables.afterPatternContext(sel, case1.pat).unsafeBox + val case1 = typedCase(cas, sel, wideSelType, pt)(using caseCtx) + caseCtx = Nullables.afterPatternContext(sel, case1.pat) case1 } diff --git a/tests/pos/17077.scala b/tests/pos/17077.scala new file mode 100644 index 000000000000..434bba9b4e8a --- /dev/null +++ b/tests/pos/17077.scala @@ -0,0 +1,20 @@ +class MyProduct extends Product: + def foo = ??? + override def productArity: Int = 1 + override def productElement(n: Int): Any = 42 + override def canEqual(that: Any): Boolean = that.isInstanceOf[MyProduct] + def _1 = 42 + +object MyProductUnapply: + def unapply(x: Int): MyProduct = MyProduct() + +@main def test = + val v: String | Int = "Blop" + val res = + v match + case MyProductUnapply(y) => y // works: a product of arity 1 is accepted as the return type of unapply + // see UnapplyInvalidReturnType in messages.scala + // and https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html#fixed-arity-extractors + case _ => 42 + println(res) + diff --git a/tests/pos/argDenot-alpakka.min.scala b/tests/pos/argDenot-alpakka.min.scala new file mode 100644 index 000000000000..0e509be59cfd --- /dev/null +++ b/tests/pos/argDenot-alpakka.min.scala @@ -0,0 +1,9 @@ +import scala.annotation.unchecked.uncheckedVariance as uV + +trait Test: + def test[S] = + val a: (([O] =>> Foo[O, S]) @uV)[Int] = ??? + a.m() + +class Foo[X, Y]: + def m(): Y = ??? diff --git a/tests/pos/argDenot-alpakka.scala b/tests/pos/argDenot-alpakka.scala new file mode 100644 index 000000000000..41d6ad52ac97 --- /dev/null +++ b/tests/pos/argDenot-alpakka.scala @@ -0,0 +1,21 @@ +import scala.annotation.unchecked.uncheckedVariance as uV + +trait Test: + def split[I, M](in: Flow[I, Byte, M]): SubFlow[Byte, M, in.Repr] + def test = + split(new Flow[Int, Byte, Unit]) + .via[Char] + .merge + .filter() + +trait FlowOps[+Out, +Mat]: + type Repr[+O] <: FlowOps[O, Mat] { type Repr[+O] = FlowOps.this.Repr[O] } + def via[O]: Repr[O] = ??? + def filter(): Repr[Out] = ??? + +class Flow[-In, +Out, +Mat] extends FlowOps[Out, Mat]: + type Repr[+O] = Flow[In @uV, O, Mat @uV] + +class SubFlow[+Out, +Mat, +F[+_]] extends FlowOps[Out, Mat]: + type Repr[+O] = SubFlow[O, Mat @uV, F @uV] + def merge: F[Out] = ??? diff --git a/tests/pos/boxmap-paper.scala b/tests/pos/boxmap-paper.scala index eb6e5f48d81c..7c2c005e6a61 100644 --- a/tests/pos/boxmap-paper.scala +++ b/tests/pos/boxmap-paper.scala @@ -13,25 +13,25 @@ def map[A, B](c: Cell[A])(f: A => B): Cell[B] def pureMap[A, B](c: Cell[A])(f: A -> B): Cell[B] = c[Cell[B]]((x: A) => cell(f(x))) -def lazyMap[A, B](c: Cell[A])(f: A => B): {f} () -> Cell[B] +def lazyMap[A, B](c: Cell[A])(f: A => B): () ->{f} Cell[B] = () => c[Cell[B]]((x: A) => cell(f(x))) trait IO: def print(s: String): Unit -def test(io: {*} IO) = +def test(io: IO^{cap}) = - val loggedOne: {io} () -> Int = () => { io.print("1"); 1 } + val loggedOne: () ->{io} Int = () => { io.print("1"); 1 } - val c: Cell[{io} () -> Int] - = cell[{io} () -> Int](loggedOne) + val c: Cell[() ->{io} Int] + = cell[() ->{io} Int](loggedOne) - val g = (f: {io} () -> Int) => + val g = (f: () ->{io} Int) => val x = f(); io.print(" + ") val y = f(); io.print(s" = ${x + y}") - val r = lazyMap[{io} () -> Int, Unit](c)(f => g(f)) - val r2 = lazyMap[{io} () -> Int, Unit](c)(g) + val r = lazyMap[() ->{io} Int, Unit](c)(f => g(f)) + val r2 = lazyMap[() ->{io} Int, Unit](c)(g) val r3 = lazyMap(c)(g) val _ = r() val _ = r2() diff --git a/tests/pos/constr-proxy-shadowing.scala b/tests/pos/constr-proxy-shadowing.scala new file mode 100644 index 000000000000..3a3b7cdfec2a --- /dev/null +++ b/tests/pos/constr-proxy-shadowing.scala @@ -0,0 +1,7 @@ +class Number(n: Int) +val x = Number(3) + +class Seq(n: Int) +val y = Seq + + diff --git a/tests/pos/i10242.scala b/tests/pos/i10242.scala new file mode 100644 index 000000000000..707c4c9f0a0c --- /dev/null +++ b/tests/pos/i10242.scala @@ -0,0 +1,32 @@ +// https://github.com/lampepfl/dotty/issues/10242 +type Foo[A, B <: A] = A + +type Bar[A] = A match { + case Foo[String, x] => Unit +} + +import scala.compiletime.ops.int.* + +sealed trait HList +final case class HCons[H <: Int & Singleton, T <: HList](head: H, tail: T) + extends HList +sealed trait HNil extends HList +case object HNil extends HNil + +sealed trait Tensor[T, S <: HList] + +object tf: + def zeros[S <: HList](shape: S): Tensor[Float, S] = ??? + + type NumElements[X <: HList] <: Int = X match + case HNil => 1 + case HCons[head, tail] => head * NumElements[tail] + + def reshape[T, From <: HList, To <: HList]( + tensor: Tensor[T, From], + shape: To + )(using NumElements[From] =:= NumElements[To]): Tensor[T, To] = ??? + +object test: + val x = HCons(1, HCons(2, HNil)) + val y = tf.reshape(tf.zeros(x), HCons(2, HCons(1, HNil))) diff --git a/tests/pos/i10369.scala b/tests/pos/i10369.scala new file mode 100644 index 000000000000..8689c2833664 --- /dev/null +++ b/tests/pos/i10369.scala @@ -0,0 +1,15 @@ +type Upgrade[T] = T match + case Int => Double + case Char => String + case Boolean => Boolean + +val upgrade: [t] => t => Upgrade[t] = new PolyFunction: + def apply[T](x: T): Upgrade[T] = x match + case x: Int => x.toDouble + case x: Char => x.toString + case x: Boolean => !x + +val upgrade2: [t] => t => Upgrade[t] = [t] => (x: t) => x match + case x: Int => x.toDouble + case x: Char => x.toString + case x: Boolean => !x diff --git a/tests/pos/i11022.scala b/tests/pos/i11022.scala new file mode 100644 index 000000000000..d020669049c5 --- /dev/null +++ b/tests/pos/i11022.scala @@ -0,0 +1,3 @@ +// scalac: -Werror -deprecation +@deprecated("no CaseClass") +case class CaseClass(rgb: Int) diff --git a/tests/neg/i11170a.scala b/tests/pos/i11170a.scala similarity index 82% rename from tests/neg/i11170a.scala rename to tests/pos/i11170a.scala index 5268c506f33f..bbf627ce8864 100644 --- a/tests/neg/i11170a.scala +++ b/tests/pos/i11170a.scala @@ -23,6 +23,6 @@ package cpackage { import apackage._ import bpackage._ - case class C(override protected val x: Int) extends A with B // error + case class C(override protected val x: Int) extends A with B case class C2(override val x: Int) extends A2 with B2 -} \ No newline at end of file +} diff --git a/tests/pos/i11223.scala b/tests/pos/i11223.scala new file mode 100644 index 000000000000..a3583ca955c4 --- /dev/null +++ b/tests/pos/i11223.scala @@ -0,0 +1,21 @@ +object T: + trait M[F[_]] + trait EE[A, B] + trait AE[A] + trait DE[A] + trait CE[A] + + type A1 = M[AE] + type D1 = M[DE] | M[[a] =>> EE[Int, a]] + type C1 = M[CE] + + trait F[+R, +A]: + def <+>[U, B](b: F[U, B]): F[R | U, A] = null + def int: F[R | A1, Int] + + def d1[A](f: => A): F[D1, A] = null + def m[R, A](f: F[R | C1, A]): F[R | C1, A] = null + + def x = m { // adding type annotation here helps (m[D1 | A1 | C1, Int]) + d1(123).int <+> null + } diff --git a/tests/pos/i11255.scala b/tests/pos/i11255.scala new file mode 100644 index 000000000000..6dd1e100e696 --- /dev/null +++ b/tests/pos/i11255.scala @@ -0,0 +1,26 @@ +class A +class B extends A + +object O: + opaque type Id[T] = T + extension [T](id: Id[T]) def get: T = id + def f[S <: A, T](ff: S => T): Id[S => T] = ??? + def g[S <: A, T](ff: S => T): Option[S => T] = ??? + def h[S, T](ff: S => T): Id[S => T] = ??? + +object example: + import O._ + + val a = new A + val b = new B + + val f1 = f((a: A) => 0) + f1.get.apply(a) + val f2 = f((b: B) => 0) + f2.get.apply(b) + + val g1 = g((a: A) => 0) + g1.get.apply(a) + + val h1 = h((a: A) => 0) + h1.get.apply(a) diff --git a/tests/pos/i11681.scala b/tests/pos/i11681.scala new file mode 100644 index 000000000000..587285911610 --- /dev/null +++ b/tests/pos/i11681.scala @@ -0,0 +1,28 @@ +// https://github.com/lampepfl/dotty/issues/11681 + +import scala.collection.Factory + +final case class Gen[+A]() { + def take[C[X] <: Iterable[X], B]( + n: Int + )(implicit w: A <:< C[B], f: Factory[B, C[B]]): Gen[C[B]] = + Gen() +} + +object Usage { + def expected: Gen[List[Int]] = + Gen[List[Int]]().take(3) +} + +object example: + type G[A] + given G[H[Int]] = ??? + + trait H[X] + object H { + given H[Int] = ??? + } + + def take[C[_]](using w: G[C[Int]], f: C[Int]) = ??? + + def test = take diff --git a/tests/pos/i12478.scala b/tests/pos/i12478.scala new file mode 100644 index 000000000000..d1e247ae4e68 --- /dev/null +++ b/tests/pos/i12478.scala @@ -0,0 +1,19 @@ +sealed trait Foo[T] + +object Foo: + case class Bar[F[_]](fu: List[F[Unit]]) extends Foo[F[Unit]] + +class Test: + def id1[T1](foo1: Foo[T1]): Foo[T1] = foo1 match + case Foo.Bar(fu) => + Foo.Bar(fu) + + def id2[T2](foo2: Foo[T2]): Foo[T2] = foo2 match + case bar2 @ (_: Foo.Bar[f]) => + val fu2 = bar2.fu + Foo.Bar(fu2) + + def id3[T3](foo3: Foo[T3]): Foo[T3] = foo3 match + case bar3 @ Foo.Bar(_) => + val fu3 = bar3.fu + Foo.Bar(fu3) diff --git a/tests/pos/i12663.scala b/tests/pos/i12663.scala new file mode 100644 index 000000000000..befbc65316cb --- /dev/null +++ b/tests/pos/i12663.scala @@ -0,0 +1,72 @@ +// https://github.com/lampepfl/dotty/issues/12663 + +final class HookComponentBuilder[Ctx, CtxFn[_]] { + def asd[A](f: Ctx => A): A = ??? + def asd[A](f: CtxFn[A]): A = ??? +} + +object HookCtx { + case class P1[P, H1](props: P, hook1: H1) +} + +object HookCtxFn { + sealed trait P1[P, H1] { type Fn[A] = (P, H1) => A } +} + +object Test { + val b: HookComponentBuilder[ + HookCtx.P1[String, Int], + HookCtxFn.P1[String, Int]#Fn + ] = ??? + + b.asd($ => $.props.length + $.hook1) + b.asd((props, hook1) => props.length + hook1) +} + +final class HookComponentBuilder2[Ctx, CtxFn[_]] { + def asd[A](f: Ctx => A): A = ??? + def asd[A](f: CtxFn[A]): A = ??? +} + +object HookCtx2 { + case class P1[P, H1](props: P, hook1: H1) +} + +object HookCtxFn2 { + type P1[P, H1] = [A] =>> (P, H1) => A +} + +object Test2 { + val b: HookComponentBuilder2[ + HookCtx2.P1[String, Int], + HookCtxFn2.P1[String, Int] + ] = ??? + + b.asd($ => $.props.length + $.hook1) + b.asd((props, hook1) => props.length + hook1) +} + +final class Builder[CtxFn[_]] { + def asd[A](f: Int => A): A = ??? + def asd[A](f: CtxFn[A]): A = ??? +} + +object Test3 { + val b1: Builder[[Z] =>> (String, Int) => Z] = ??? + b1.asd(identity) + b1.asd(_.length + _) + + sealed trait Scala2TL { type F[Z] = (String, Int) => Z } + val b2: Builder[Scala2TL#F] = b1 + b2.asd(identity) + b2.asd(_.length + _) + + type Scala3TL = [Z] =>> (String, Int) => Z + val b3: Builder[Scala3TL] = b1 + b3.asd(identity) + b3.asd(_.length + _) + + val b4: Builder[({ type F[Z] = (String, Int) => Z })#F] = b1 + b4.asd(identity) + b4.asd(_.length + _) +} diff --git a/tests/pos/i12679.scala b/tests/pos/i12679.scala new file mode 100644 index 000000000000..fed62c72dd42 --- /dev/null +++ b/tests/pos/i12679.scala @@ -0,0 +1,9 @@ +// https://github.com/lampepfl/dotty/issues/12679 + +object Example: + def foo[F[_]](qux: String, quux: String = ""): F[Unit] = ??? + + def foo[F[_]](qux: Boolean): F[Unit] = ??? + + def example[F[_]](maybeQux: Option[String], bool: Boolean) = + maybeQux.fold(foo[F](bool))(foo[F](_)) diff --git a/tests/pos/i13332super.scala b/tests/pos/i13332super.scala deleted file mode 100644 index 1ab695d5d4a5..000000000000 --- a/tests/pos/i13332super.scala +++ /dev/null @@ -1,14 +0,0 @@ -import scala.deriving.Mirror - -trait MixinAMini { - lazy val mixinB = new MixinBMini() {} -} -trait MixinBMini { - sealed trait Lst // crucially, no companion is defined - case class Cn(h: Int, t: Lst) extends Lst - case object Nl extends Lst -} -trait SubABMini extends MixinAMini with MixinBMini { - val mirror_SubABMini_super_mixinB_Lst = - summon[Mirror.Of[SubABMini.super[MixinAMini].mixinB.Lst]] -} diff --git a/tests/pos/i13558.scala b/tests/pos/i13558.scala new file mode 100644 index 000000000000..0c8be379f6a9 --- /dev/null +++ b/tests/pos/i13558.scala @@ -0,0 +1,32 @@ +package testcode +import language.experimental.relaxedExtensionImports + +class A + +class B + +object ExtensionA { + extension (self: A) { + def id = "A" + } +} +object ExtensionB { + extension (self: B) { + def id = "B" + } +} + +object Main { + def main1(args: Array[String]): Unit = { + import ExtensionB._ + import ExtensionA._ + val a = A() + println(a.id) // now ok + } + def main2(args: Array[String]): Unit = { + import ExtensionA._ + import ExtensionB._ + val a = A() + println(a.id) // now ok + } +} \ No newline at end of file diff --git a/tests/pos/i14096.scala b/tests/pos/i14096.scala new file mode 100644 index 000000000000..59365231b121 --- /dev/null +++ b/tests/pos/i14096.scala @@ -0,0 +1,7 @@ +// https://github.com/lampepfl/dotty/issues/14096 +object Test: + object Forte: + def test[T](i: Int, config: String = ""): Int = 1 + def test[T](i: String): Int = 2 + + Forte.test[Int](1) diff --git a/tests/pos/i14218.http4s.scala b/tests/pos/i14218.http4s.scala new file mode 100644 index 000000000000..774a5432177e --- /dev/null +++ b/tests/pos/i14218.http4s.scala @@ -0,0 +1,22 @@ +// A minimisation from http4s, +// which broke while implementing the fix for i14218. + +final class Bar[+F[_]] +object Bar: + def empty[F[_]]: Bar[F] = new Bar[Nothing] + +final class Foo[+F[_]] + +object Foo: + def apply[F[_]](bar: Bar[F] = Bar.empty): Foo[F] = new Foo + +class Test: + def test[F[_]]: Foo[F] = Foo[F]() + +//-- [E007] Type Mismatch Error +//12 | def test[F[_]]: Foo[F] = Foo[F]() +// | ^^^^^^ +// | Found: Bar[[_] =>> Any] +// | Required: Bar[F] +// | +// | where: F is a type in method t1 with bounds <: [_] =>> Any diff --git a/tests/pos/i14218.scala b/tests/pos/i14218.scala new file mode 100644 index 000000000000..e35aec94b3bd --- /dev/null +++ b/tests/pos/i14218.scala @@ -0,0 +1,15 @@ +class Pet +class Cat extends Pet + +class Z1[ S1 <: Pet](val fn: S1 => Unit) +class Z2[ S2 ](val fn: S2 => Unit) +class Z3[-S3 <: Pet](val fn: S3 => Unit) + +abstract class Test: + def test = + val r1 = new Z1((_: Pet) => ()); eat[Z1[Pet]](r1) // the case: using the parameter bound in situ infers Z[Nothing] + val r2 = new Z2((_: Pet) => ()); eat[Z2[Pet]](r2) // counter-example: infers as desired without an upper bound + val r3 = new Z3((_: Pet) => ()); eat[Z3[Pet]](r3) // workaround: declare it contravariant + val r4 = new Z1((_: Cat) => ()); eat[Z1[Cat]](r4) // counter-example: infers as desired with a subtype + + def eat[T](x: T): Unit diff --git a/tests/pos/i14271.scala b/tests/pos/i14271.scala new file mode 100644 index 000000000000..8f46940afd09 --- /dev/null +++ b/tests/pos/i14271.scala @@ -0,0 +1,14 @@ +// https://github.com/lampepfl/dotty/issues/14271 +class Bound[T] +class MyClass[T <: Bound[T]] + +class Container[V] { + def doSth(): V = ??? +} + +def bug() = { + val m = new Container[MyClass[_]] + if (true) { + m.doSth() + } +} diff --git a/tests/pos/i14278.scala b/tests/pos/i14278.scala new file mode 100644 index 000000000000..ebc9376fbad5 --- /dev/null +++ b/tests/pos/i14278.scala @@ -0,0 +1,6 @@ +// https://github.com/lampepfl/dotty/issues/14278 +class Foo + +extension (foo: Foo) + def patch(arg: List[Int], arg2: Int = 0): Unit = {} + def patch(arg: Int): Unit = patch(List(arg)) diff --git a/tests/pos/i14351.scala b/tests/pos/i14351.scala new file mode 100644 index 000000000000..556e6b1ed64e --- /dev/null +++ b/tests/pos/i14351.scala @@ -0,0 +1 @@ +val p: (Option[Int], Option[String]) = (1,"foo").map([T] => (x: T) => Option.apply[T](x)) diff --git a/tests/pos/i14367.scala b/tests/pos/i14367.scala index d74f0aa8373e..3319e705b741 100644 --- a/tests/pos/i14367.scala +++ b/tests/pos/i14367.scala @@ -1,5 +1,5 @@ def m(i: Int*) = i.sum -val f1 = m +val f1: Seq[Int] => Int = m val f2 = i => m(i*) def n(i: Seq[Int]) = i.sum diff --git a/tests/pos/i14567.scala b/tests/pos/i14567.scala deleted file mode 100644 index fe47279c07b1..000000000000 --- a/tests/pos/i14567.scala +++ /dev/null @@ -1,2 +0,0 @@ -case class Foo(x: Int)(xs: String*) -def test = Foo(3) diff --git a/tests/pos/i14642.scala b/tests/pos/i14642.scala new file mode 100644 index 000000000000..b69da7d8d6d7 --- /dev/null +++ b/tests/pos/i14642.scala @@ -0,0 +1,16 @@ +// https://github.com/lampepfl/dotty/issues/14642 +case object A +case class B() +case class C() +type Union = A.type | B | C +val a: List[A.type] = ??? +val b: List[B] = ??? +val c: List[C] = ??? +val l1: List[Union] = a ++ b +val l2: List[Union] = + a ++ b ++ c +val l3: List[Union] = + (a: List[ + Union + ]) ++ b ++ c +val l4: List[Union] = (a: List[Union]) ++ (b ++ c) diff --git a/tests/pos/i14830.scala b/tests/pos/i14830.scala new file mode 100644 index 000000000000..592a47c1a53c --- /dev/null +++ b/tests/pos/i14830.scala @@ -0,0 +1,6 @@ + +// https://github.com/lampepfl/dotty/issues/14830 +val a: Comparable[String] = "Fred" +val b: { def length: Int } = "Fred" +val c: Comparable[String] & { def length: Int } = "Fred" +val d: Comparable[String] & { def length(): Int } = "Fred" diff --git a/tests/pos/i15165.scala b/tests/pos/i15165.scala new file mode 100644 index 000000000000..15e89c90e900 --- /dev/null +++ b/tests/pos/i15165.scala @@ -0,0 +1,6 @@ +def test1 = { (y: Int) => y + 1 }.apply(???) + +class C: + def x: Int = 8 + +def test2 = { (c: C) => c.x }.apply(null) diff --git a/tests/pos/i15546.scala b/tests/pos/i15546.scala new file mode 100644 index 000000000000..19c7f15b24f1 --- /dev/null +++ b/tests/pos/i15546.scala @@ -0,0 +1,14 @@ +// https://github.com/lampepfl/dotty/issues/15546 + +trait Foo[F[_]] + +object Bug { + def apply[F[_]: Foo]( + await: Boolean, + whatever: Int = 0 + ): Nothing = ??? + + def apply[F[_]: Foo]: Nothing = + apply[F](false) +} + diff --git a/tests/pos/i15821.scala b/tests/pos/i15821.scala new file mode 100644 index 000000000000..a72d13e07bc7 --- /dev/null +++ b/tests/pos/i15821.scala @@ -0,0 +1,9 @@ +def main = + foo.bar(42) + foo.bar + +package object foo { + def bar[F[_]]: Unit = ??? + def bar[F[_]](x: Int): Unit = ??? + private[foo] def bar[F[_]](x: Int)(implicit dummy: DummyImplicit): Unit = ??? +} diff --git a/tests/pos/i16183/Lib_1.scala b/tests/pos/i16183/Lib_1.scala new file mode 100644 index 000000000000..824af18e1d40 --- /dev/null +++ b/tests/pos/i16183/Lib_1.scala @@ -0,0 +1,14 @@ +package pkg + +trait Foo1[A] +trait Foo2[A] extends Foo1[A] + +trait Bar[F[_]] +object Bar { + implicit val bar: Bar[pkg.Foo2] = ??? +} + +trait Qux +object Qux { + implicit def qux[F[_]](implicit bar: Bar[F]): F[Qux] = ??? +} \ No newline at end of file diff --git a/tests/pos/i16183/Test_2.scala b/tests/pos/i16183/Test_2.scala new file mode 100644 index 000000000000..c8c5cbed838c --- /dev/null +++ b/tests/pos/i16183/Test_2.scala @@ -0,0 +1,6 @@ +import pkg._ + +object Test { + implicitly[Foo2[Qux]] + implicitly[Foo1[Qux]] +} \ No newline at end of file diff --git a/tests/pos/i16451.CanForward.scala b/tests/pos/i16451.CanForward.scala new file mode 100644 index 000000000000..a09a26f22acc --- /dev/null +++ b/tests/pos/i16451.CanForward.scala @@ -0,0 +1,34 @@ +// scalac: -Werror +abstract class Namer: + private enum CanForward: + case Yes + case No(whyNot: String) + case Skip // for members that have never forwarders + + class Mbr + private def canForward(mbr: Mbr): CanForward = CanForward.Yes + + private def applyOrElse[A1 <: CanForward, B1 >: String](x: A1, default: A1 => B1): B1 = x match + case CanForward.No(whyNot @ _) => whyNot + case _ => "" + + def addForwardersNamed(mbrs: List[Mbr]) = + val reason = mbrs.map(canForward).collect { + case CanForward.No(whyNot) => whyNot + }.headOption.getOrElse("") + + class ClassCompleter: + def addForwardersNamed(mbrs: List[Mbr]) = + val reason = mbrs.map(canForward).collect { + case CanForward.No(whyNot) => whyNot + }.headOption.getOrElse("") + + private def exportForwarders = + def addForwardersNamed(mbrs: List[Mbr]) = + val reason = mbrs.map(canForward).collect { + case CanForward.No(whyNot) => whyNot + }.headOption.getOrElse("") + if mbrs.size == 4 then + val reason = mbrs.map(canForward).collect { + case CanForward.No(whyNot) => whyNot + }.headOption.getOrElse("") diff --git a/tests/pos/i16451.DiffUtil.scala b/tests/pos/i16451.DiffUtil.scala new file mode 100644 index 000000000000..3ade8bb73aa7 --- /dev/null +++ b/tests/pos/i16451.DiffUtil.scala @@ -0,0 +1,15 @@ +// scalac: -Werror +object DiffUtil: + private sealed trait Patch + private final case class Unmodified(str: String) extends Patch + private final case class Modified(original: String, str: String) extends Patch + private final case class Deleted(str: String) extends Patch + private final case class Inserted(str: String) extends Patch + + private def test(diff: Array[Patch]) = + diff.collect { + case Unmodified(str) => str + case Inserted(str) => s"+$str" + case Modified(orig, str) => s"{$orig,$str}" + case Deleted(str) => s"-$str" + }.mkString diff --git a/tests/pos/i16451.default.scala b/tests/pos/i16451.default.scala new file mode 100644 index 000000000000..2751f4901b5f --- /dev/null +++ b/tests/pos/i16451.default.scala @@ -0,0 +1,22 @@ +// scalac: -Werror + +import java.lang.reflect.* +import scala.annotation.tailrec + +class Test: + @tailrec private def unwrapThrowable(x: Throwable): Throwable = x match { + case _: InvocationTargetException | // thrown by reflectively invoked method or constructor + _: ExceptionInInitializerError | // thrown when running a static initializer (e.g. a scala module constructor) + _: UndeclaredThrowableException | // invocation on a proxy instance if its invocation handler's `invoke` throws an exception + _: ClassNotFoundException | // no definition for a class instantiated by name + _: NoClassDefFoundError // the definition existed when the executing class was compiled, but can no longer be found + if x.getCause != null => + unwrapThrowable(x.getCause) + case _ => x + } + + private def unwrapHandler[T](pf: PartialFunction[Throwable, T]): PartialFunction[Throwable, T] = + pf.compose({ case ex => unwrapThrowable(ex) }) + + def test = + unwrapHandler({ case ex => throw ex }) diff --git a/tests/pos/i16469.scala b/tests/pos/i16469.scala new file mode 100644 index 000000000000..1aaa381bb7e2 --- /dev/null +++ b/tests/pos/i16469.scala @@ -0,0 +1,15 @@ +class Context { + def normalMethod(): String = "normal" + inline def inlineMethod(): String = "inline" +} + +class Script(ctx: Context) { + export ctx.* + normalMethod() + inlineMethod() +} + +class MyScript(context: Context) extends Script(context) { + normalMethod() + inlineMethod() +} diff --git a/tests/pos/i16590.scala b/tests/pos/i16590.scala new file mode 100644 index 000000000000..d70054fd52b4 --- /dev/null +++ b/tests/pos/i16590.scala @@ -0,0 +1,13 @@ +enum Tag[A]: + case MyTuple extends Tag[(String, String)] + +def printIt[A](t: Tag[A], a: A): Unit = + t match + case Tag.MyTuple => println(a._1) + +enum Tag2[A]: + case MyTuple extends Tag2[String *: String *: EmptyTuple] + +def printIt2[A](t: Tag2[A], a: A): Unit = + t match + case Tag2.MyTuple => println(a._1) diff --git a/tests/pos/i16639false-pos-on-trait.scala b/tests/pos/i16639false-pos-on-trait.scala new file mode 100644 index 000000000000..67e304f556e1 --- /dev/null +++ b/tests/pos/i16639false-pos-on-trait.scala @@ -0,0 +1,40 @@ +// scalac -Wunsued:all +//Avoid warning on setter in trait Regression test : issue10154 scala + +trait T { + private var x: String = _ + + def y: String = { + if (x eq null) x = "hello, world" + x + } +} + +/* +➜ skalac -version +Scala compiler version 2.13.10-20220920-001308-98972e5 -- Copyright 2002-2022, LAMP/EPFL and Lightbend, Inc. + +➜ skalac -d /tmp -Wunused -Vprint:typer t12646.scala +t12646.scala:3: warning: parameter value x_= in variable x is never used + private var x: String = _ + ^ +[[syntax trees at end of typer]] // t12646.scala +package { + abstract trait T extends scala.AnyRef { + def /*T*/$init$(): Unit = { + () + }; + private val x: String = _; + private def x_=(x$1: String): Unit; + def y: String = { + if (T.this.x.eq(null)) + T.this.x_=("hello, world") + else + (); + T.this.x + } + } +} + +1 warning +*/ diff --git a/tests/pos/i16706.scala b/tests/pos/i16706.scala new file mode 100644 index 000000000000..87fd015c69bb --- /dev/null +++ b/tests/pos/i16706.scala @@ -0,0 +1,17 @@ +import scala.deriving.Mirror +import scala.reflect.ClassTag + +type TupleUnionLub[T <: Tuple, Lub, Acc <: Lub] <: Lub = T match { + case (h & Lub) *: t => TupleUnionLub[t, Lub, Acc | h] + case EmptyTuple => Acc +} + +transparent inline given derived[A]( + using m: Mirror.SumOf[A], + idClassTag: ClassTag[TupleUnionLub[m.MirroredElemTypes, A, Nothing]] +): Unit = () + +sealed trait Foo +case class FooA(a: Int) extends Foo + +val instance = derived[Foo] // error \ No newline at end of file diff --git a/tests/pos/i16707.scala b/tests/pos/i16707.scala new file mode 100644 index 000000000000..6181471f5246 --- /dev/null +++ b/tests/pos/i16707.scala @@ -0,0 +1,11 @@ +import scala.deriving.Mirror +import scala.reflect.ClassTag + +transparent inline given derived[A]( + using m: Mirror.ProductOf[A], + idClassTag: ClassTag[Tuple.Union[m.MirroredElemTypes]] + ): Unit = ??? + +case class Foo(a: Int) + +val instance = derived[Foo] // error diff --git a/tests/pos/i16740.scala b/tests/pos/i16740.scala new file mode 100644 index 000000000000..398518ee77ef --- /dev/null +++ b/tests/pos/i16740.scala @@ -0,0 +1,8 @@ +class Enclosing: + object Tags: + opaque type Ref[T, S <: String & Singleton] = S + inline def require[T, S <: String & Singleton]: Ref[T, S] = ??? + import Tags.* + + val t1 = require[Int, "t1"] + val t2 = require[Double, "t2"] diff --git a/tests/pos/i16778.scala b/tests/pos/i16778.scala new file mode 100644 index 000000000000..426f3c86c0bd --- /dev/null +++ b/tests/pos/i16778.scala @@ -0,0 +1,22 @@ +final abstract class ForcedRecompilationToken[T] + +object ForcedRecompilationToken { + implicit def materialize: ForcedRecompilationToken["x"] = null.asInstanceOf[ForcedRecompilationToken["x"]] +} + +class PluginDef[T](implicit val recompilationToken: ForcedRecompilationToken[T]) + +object X { + val no = { + final class anon extends PluginDef {} // was: missing type parameters + new anon + } + + val bad = new PluginDef {} // was: No given instance + val good = new PluginDef() {} // ok +} + +object DependingPlugin { + class NestedDoublePlugin extends PluginDef + object NestedDoublePlugin extends PluginDef +} diff --git a/tests/pos/i16792.scala b/tests/pos/i16792.scala new file mode 100644 index 000000000000..5a44d8c09458 --- /dev/null +++ b/tests/pos/i16792.scala @@ -0,0 +1,2 @@ +val x = (1, 1) match + case _: (_ *: _) => () diff --git a/tests/pos/i16814.scala b/tests/pos/i16814.scala new file mode 100644 index 000000000000..bffbb5788c0b --- /dev/null +++ b/tests/pos/i16814.scala @@ -0,0 +1,2 @@ +class Foo protected (foo: Int = 0) { } +class Bar extends Foo diff --git a/tests/pos/i16869.scala b/tests/pos/i16869.scala new file mode 100644 index 000000000000..4067e34ac8ef --- /dev/null +++ b/tests/pos/i16869.scala @@ -0,0 +1,13 @@ +class C[T] + +type Foo[T] = T match + case C[true] => true + case C[false] => false + +class W[T] extends C[Foo[T]] + +def f[T <: C[?]](t: T) = W[T]() + +def test = + val b = C[true]() + f(b): C[true] diff --git a/tests/pos/i16920.scala b/tests/pos/i16920.scala new file mode 100644 index 000000000000..dd4f5804a4fd --- /dev/null +++ b/tests/pos/i16920.scala @@ -0,0 +1,78 @@ +import language.experimental.relaxedExtensionImports + +object One: + extension (s: String) + def wow: Unit = println(s) + +object Two: + extension (i: Int) + def wow: Unit = println(i) + +object Three: + extension (s: String) + def wow: Unit = println(s) + extension (i: Int) + def wow: Unit = println(i) + +object Four: + implicit class WowString(s: String): + def wow: Unit = println(s) + +object Five: + implicit class WowInt(i: Int): + def wow: Unit = println(i) + +object Compiles: + import Three._ + def test: Unit = + 5.wow + "five".wow + +object AlsoCompiles: + import Four._ + import Five._ + def test: Unit = + 5.wow + "five".wow + +object UsedToFail: + import One._ + import Compiles.* + import Two._ + def test: Unit = + 5.wow + "five".wow + +object Conflicting: + extension (i: Int) + def wow: Unit = println(i) + +object Named: + import One.wow + import Two.wow + import Conflicting._ + def test: Unit = + 5.wow // ok + "five".wow // ok + +object Named2: + import Conflicting._ + import One.wow + import Two.wow + def test: Unit = + 5.wow // ok + "five".wow // ok + +val Alias = Two + +object Named3: + import Alias._ + import Two._ + def test: Unit = + 5.wow // ok + +object Named4: + import Two._ + import Alias._ + def test: Unit = + 5.wow // ok diff --git a/tests/pos/i16954.scala b/tests/pos/i16954.scala new file mode 100644 index 000000000000..ce7418e5e5e2 --- /dev/null +++ b/tests/pos/i16954.scala @@ -0,0 +1,18 @@ +class Test: + def test = + classOf[Test] + + def blck = + class Blck + val cls = classOf[Blck] + cls + + def expr = + class Expr + classOf[Expr] // was: "assertion failed: leak: Expr in { [..] }" crash + +object Test extends Test: + def main(args: Array[String]): Unit = + assert(test.getName == "Test", test.getName) + assert(blck.getName == "Test$Blck$1", blck.getName) + assert(expr.getName == "Test$Expr$1", expr.getName) diff --git a/tests/pos/i16997.min.scala b/tests/pos/i16997.min.scala new file mode 100644 index 000000000000..abac648bdfd5 --- /dev/null +++ b/tests/pos/i16997.min.scala @@ -0,0 +1,12 @@ +class Fn: + class R[Y] + +case class Foo[F[_]](nest: Foo[F]): + case class Bar[G[_], R[_]](value: Foo[G]) + + def bar[G[_]](using fn: Fn): Bar[G, fn.R] = ??? + + def part[G[_]](using fn: Fn): Bar[G, fn.R] = + (bar[G], ()) match + case (Bar(value), ()) => + Bar(Foo(value)) diff --git a/tests/pos/i16997.scala b/tests/pos/i16997.scala new file mode 100644 index 000000000000..cd7b1ac8ab91 --- /dev/null +++ b/tests/pos/i16997.scala @@ -0,0 +1,63 @@ +class Funs { + sealed trait ->[A, B] +} + +/** + * Binary tree with leafs holding values of types `F[X]`, `F[Y]`, ... + * The complete structure of the tree is expressed by the type `A`, using the tags for branches and leafs. + * + * @tparam <*> tag for branches + * @tparam T tag for leafs. + * @tparam F value type of leafs. Each leaf holds a value of type `F[T]`, for some type `T`. + * @tparam A captures the complete structure of the tree + */ +enum Tree[<*>[_, _], T[_], F[_], A] { + case Branch[<*>[_, _], T[_], F[_], A, B]( + l: Tree[<*>, T, F, A], + r: Tree[<*>, T, F, B], + ) extends Tree[<*>, T, F, A <*> B] + + case Leaf[<*>[_, _], T[_], F[_], A]( + value: F[A], + ) extends Tree[<*>, T, F, T[A]] + + def <*>[B](that: Tree[<*>, T, F, B]): Tree[<*>, T, F, A <*> B] = + Branch(this, that) + + def partition[G[_], H[_]]( + f: [x] => F[x] => Either[G[x], H[x]], + )(using + funs: Funs, + ): Partitioned[G, H, funs.->] = + this match { + case Leaf(a) => + f(a) match + case Left(a) => Partitioned.Left(Leaf(a)) + case Right(a) => Partitioned.Right(Leaf(a)) + case Branch(l, r) => + import Partitioned.{Both, Left, Right} + import l.Partitioned.{Both => LBoth, Left => LLeft, Right => LRight} + import r.Partitioned.{Both => RBoth, Left => RLeft, Right => RRight} + + (l.partition(f), r.partition(f)) match + case (LLeft(lg), RLeft(rg)) => Left(lg <*> rg) + case (LLeft(lg), RRight(rh)) => Both(lg, rh) + case (LLeft(lg), RBoth(rg, rh)) => Both(lg <*> rg, rh) + case (LRight(lh), RLeft(rg)) => Both(rg, lh) + case (LRight(lh), RRight(rh)) => Right(lh <*> rh) + case (LRight(lh), RBoth(rg, rh)) => Both(rg, lh <*> rh) + case (LBoth(lg, lh), RLeft(rg)) => Both(lg <*> rg, lh) + case (LBoth(lg, lh), RRight(rh)) => Both(lg, lh <*> rh) + case (LBoth(lg, lh), RBoth(rg, rh)) => Both(lg <*> rg, lh <*> rh) + } + + // note that `->` is never even used, to keep this reproduction case small + enum Partitioned[G[_], H[_], ->[_, _]] { + case Left(value: Tree[<*>, T, G, A]) + case Right(value: Tree[<*>, T, H, A]) + case Both[G[_], H[_], X, Y, ->[_, _]]( + l: Tree[<*>, T, G, X], + r: Tree[<*>, T, H, Y], + ) extends Partitioned[G, H, ->] + } +} diff --git a/tests/pos/i17002.scala b/tests/pos/i17002.scala new file mode 100644 index 000000000000..d33c1bd386d9 --- /dev/null +++ b/tests/pos/i17002.scala @@ -0,0 +1,10 @@ +import scala.annotation.meta.companionMethod + +@companionMethod +class methOnly extends annotation.Annotation + +class Test +object Test: + + @methOnly + given test2[T]: Test with {} diff --git a/tests/pos/i17008.scala b/tests/pos/i17008.scala new file mode 100644 index 000000000000..2bf0f155afbc --- /dev/null +++ b/tests/pos/i17008.scala @@ -0,0 +1,9 @@ +abstract class A { + protected def foo(text: String, bar: () => Unit = () => ()): Unit = println(s"$text, $bar") +} + +class B extends A { + def f1(): Unit = { + super.foo("X") + } +} diff --git a/tests/pos/i17052.scala b/tests/pos/i17052.scala new file mode 100644 index 000000000000..4bb8cf493d0b --- /dev/null +++ b/tests/pos/i17052.scala @@ -0,0 +1,2 @@ +def test[F[_]](fAny: F[Any]) = + { [X] => (fx: F[X]) => { val fx2: F[X] = fx; () } }.apply[Any](fAny) diff --git a/tests/pos/i17064.scala b/tests/pos/i17064.scala new file mode 100644 index 000000000000..e5a1f636f56c --- /dev/null +++ b/tests/pos/i17064.scala @@ -0,0 +1,10 @@ +class HiddenInner[+O<:Outer](val outer:O){ +} + +class Outer{ + type Inner = HiddenInner[this.type] +} + +val o : Outer = new Outer +def a : o.Inner = new o.Inner(o) +val b : Outer#Inner = a // DOES NOT COMPILE \ No newline at end of file diff --git a/tests/pos/i17066/Bar.scala b/tests/pos/i17066/Bar.scala new file mode 100644 index 000000000000..7d3e67f85387 --- /dev/null +++ b/tests/pos/i17066/Bar.scala @@ -0,0 +1,2 @@ +class Bar extends Foo: + def this(xs: String*) = this() diff --git a/tests/pos/i17066/Foo.java b/tests/pos/i17066/Foo.java new file mode 100644 index 000000000000..cb4335c7d444 --- /dev/null +++ b/tests/pos/i17066/Foo.java @@ -0,0 +1,3 @@ +public class Foo { + public Foo(String... xs) { } +} diff --git a/tests/pos/i17100.scala b/tests/pos/i17100.scala new file mode 100644 index 000000000000..1858e0383f8d --- /dev/null +++ b/tests/pos/i17100.scala @@ -0,0 +1,14 @@ +trait Sel extends Selectable + +extension (s: Sel) + def selectDynamic(name: String) = ??? + def applyDynamic(name: String)(x: Int) = ??? + def applyDynamic(name: String)() = ??? + +val sel = (new Sel {}).asInstanceOf[Sel{ def foo: String; def bar(x: Int): Int; def baz(): Int }] +val foo = sel.selectDynamic("foo") +val foo2 = sel.foo +val foo3 = sel.bar(2) +val foo4 = sel.baz() + + diff --git a/tests/pos/i17100a.scala b/tests/pos/i17100a.scala new file mode 100644 index 000000000000..abf74c80a4f5 --- /dev/null +++ b/tests/pos/i17100a.scala @@ -0,0 +1,12 @@ + +import scala.language.dynamics +trait Sel extends Dynamic + +extension (s: Sel) + def selectDynamic(name: String) = ??? + +val sel = new Sel {} +val foo = sel.foo +val sel2 = (new Sel {}).asInstanceOf[Sel{ def foo: String }] +val foo2 = sel2.foo + diff --git a/tests/pos/i17135.scala b/tests/pos/i17135.scala new file mode 100644 index 000000000000..7e3f91cf5e83 --- /dev/null +++ b/tests/pos/i17135.scala @@ -0,0 +1,53 @@ +package doobie + +// original example +def someFunction(param: Int): Int = { + sealed trait Foo { + def asString: String = this match { + case Foo.CaseC => "C" + } + } + object Foo { + // Having an object here crashes the compiler. + object CaseC extends Foo + } + + ??? +} + +// minimization +def foo = + class Bar { + // Having an object here crashes the compiler. + lazy val CaseC = + class Baz extends Foo + new Baz() + } + val Bar: Bar = new Bar() + trait Foo { + def asString = Bar.CaseC + } + +// variant: outer is lazy val +lazy val lazyfoo = + class Bar { + // Having an object here crashes the compiler. + lazy val CaseC = + class Baz extends Foo + new Baz() + } + val Bar: Bar = new Bar() + trait Foo { + def asString = Bar.CaseC + } + +// other example +def bar = + sealed trait GADT2[A] extends Product with Serializable + + object GADT2 { + case class IsDir(path: String) extends GADT2[_root_.scala.Boolean] + case class Exists(path: String) extends GADT2[_root_.scala.Boolean] + case class ReadBytes(path: String) extends GADT2[_root_.scala.Array[_root_.scala.Byte]] + case class CopyOver(src: Seq[_root_.scala.Byte], path: String) extends GADT2[Int] + } \ No newline at end of file diff --git a/tests/pos/i17155.scala b/tests/pos/i17155.scala new file mode 100644 index 000000000000..7b58dbe4a13a --- /dev/null +++ b/tests/pos/i17155.scala @@ -0,0 +1,7 @@ +def foo[A, B](arr: Array[A], pf: PartialFunction[A, B]): Seq[B] = arr.toSeq.collect(pf) +def foo[A, B](list: List[A], pf: PartialFunction[A, B]): Seq[B] = list.collect(pf) // no errors if this is commented out + +val arr = Array(1, 2, 3) +val resOkay = foo(arr = arr, { case n if n % 2 != 0 => n.toString }) // compiles +val resNope = foo(arr = arr, pf = { case n if n % 2 != 0 => n.toString }) // Error 1 +val resNope2 = foo[Int, String](arr = arr, pf = { case n if n % 2 != 0 => n.toString }) // Error 2 \ No newline at end of file diff --git a/tests/pos/i17227.scala b/tests/pos/i17227.scala new file mode 100644 index 000000000000..d537f99f6515 --- /dev/null +++ b/tests/pos/i17227.scala @@ -0,0 +1,10 @@ +inline def foo(f: Int => Int): Int => Int = f +inline def bar(inline f: Int => Int): Int => Int = f +inline def baz(f: (Int => Int)*): Int => Int = f.head + +def g(i: Int): Int = i + +def test = + foo(f = g) + bar(f = g) + baz(f = g) diff --git a/tests/pos/i17230.bootstrap.scala b/tests/pos/i17230.bootstrap.scala new file mode 100644 index 000000000000..ef2d98d8f55b --- /dev/null +++ b/tests/pos/i17230.bootstrap.scala @@ -0,0 +1,16 @@ +type Untyped = Type | Null + +class Type +abstract class SearchFailureType extends Type + +abstract class Tree[+T <: Untyped]: + def tpe: T = null.asInstanceOf[T] + +class SearchFailureIdent[+T <: Untyped] extends Tree[T] + +class Test_i17230_bootstrap: + def t1(arg: Tree[Type]) = arg match + case arg: SearchFailureIdent[?] => arg.tpe match + case x: SearchFailureType => + case _ => + case _ => diff --git a/tests/pos/i17230.min1.scala b/tests/pos/i17230.min1.scala new file mode 100644 index 000000000000..e2df63e168c1 --- /dev/null +++ b/tests/pos/i17230.min1.scala @@ -0,0 +1,15 @@ +// scalac: -Werror +trait Foo: + type Bar[_] + +object Foo: + type Aux[B[_]] = Foo { type Bar[A] = B[A] } + +class Test: + def t1[B[_]](self: Option[Foo.Aux[B]]) = self match + case Some(_) => 1 + case None => 2 + + def t2[B[_]](self: Option[Foo.Aux[B]]) = self match + case Some(f) => 1 + case None => 2 diff --git a/tests/pos/i17230.orig.scala b/tests/pos/i17230.orig.scala new file mode 100644 index 000000000000..d72a0082a116 --- /dev/null +++ b/tests/pos/i17230.orig.scala @@ -0,0 +1,20 @@ +// scalac: -Werror +import scala.util.* + +trait Transaction { + type State[_] +} +object Transaction { + type of[S[_]] = Transaction { type State[A] = S[A] } +} +trait DynamicScope[State[_]] + +case class ScopeSearch[State[_]](self: Either[Transaction.of[State], DynamicScope[State]]) { + + def embedTransaction[T](f: Transaction.of[State] => T): T = + self match { + case Left(integrated) => ??? + case Right(ds) => ??? + } +} + diff --git a/tests/pos/i17245.scala b/tests/pos/i17245.scala new file mode 100644 index 000000000000..3b5b3a74108d --- /dev/null +++ b/tests/pos/i17245.scala @@ -0,0 +1,20 @@ +import scala.reflect.ClassTag + +trait MockSettings + +object Mockito { + def mock[T : ClassTag]: T = ??? + def mock[T : ClassTag](settings: MockSettings): T = ??? +} + +trait Channel +type OnChannel = Channel => Any + +@main def Test = + val case1: OnChannel = Mockito.mock[OnChannel] + val case2: OnChannel = Mockito.mock + val case3 = Mockito.mock[OnChannel] + val case4: OnChannel = Mockito.mock[OnChannel](summon[ClassTag[OnChannel]]) + + // not a regressive case, but an added improvement with the fix for the above + val case5: Channel => Any = Mockito.mock[Channel => Any] diff --git a/tests/pos/i17380.scala b/tests/pos/i17380.scala new file mode 100644 index 000000000000..8963db3058b3 --- /dev/null +++ b/tests/pos/i17380.scala @@ -0,0 +1,3 @@ +class C { type T; type U } + +type X = C { type U = T; def u: U } { type T = String } \ No newline at end of file diff --git a/tests/pos/i17381.scala b/tests/pos/i17381.scala new file mode 100644 index 000000000000..3d8aac8e9f67 --- /dev/null +++ b/tests/pos/i17381.scala @@ -0,0 +1,9 @@ +import reflect.Selectable.reflectiveSelectable + +type Y = { type T = String; def u(): T } + +trait Test { + + val y1: Y + val y2 = y1.u() +} \ No newline at end of file diff --git a/tests/pos/i18124/definition.scala b/tests/pos/i18124/definition.scala new file mode 100644 index 000000000000..1377c94fe7cd --- /dev/null +++ b/tests/pos/i18124/definition.scala @@ -0,0 +1,15 @@ +// definition.scala +package oolong.bson: + + trait BsonValue + protected def merge( + base: BsonValue, + patch: BsonValue, + arraySubvalues: Boolean = false + ): BsonValue = ??? + + private def foo: Int = 1 + + package inner: + protected[bson] def bar = 2 + diff --git a/tests/pos/i18124/usage.scala b/tests/pos/i18124/usage.scala new file mode 100644 index 000000000000..0bc0417c01ad --- /dev/null +++ b/tests/pos/i18124/usage.scala @@ -0,0 +1,8 @@ +// usage.scala +package oolong.bson + +extension (bv: BsonValue) + def :+(other: BsonValue): BsonValue = merge(other, bv, false) + +val x = foo +val y = inner.bar diff --git a/tests/pos/i18160/Test_2.scala b/tests/pos/i18160/Test_2.scala new file mode 100644 index 000000000000..9ee40c3d37f9 --- /dev/null +++ b/tests/pos/i18160/Test_2.scala @@ -0,0 +1,11 @@ +class SynchronizedReevaluation +class SynchronizedReevaluationApi[Api <: RescalaInterface](val api: Api){ + import api._ + + def SynchronizedReevaluation[A](evt: Event[A])(implicit + turnSource: CreationTicket + ): (SynchronizedReevaluation, Event[A]) = { + val sync = new SynchronizedReevaluation + (sync, evt.map(identity)(turnSource)) + } +} diff --git a/tests/pos/i18160/repro_1.scala b/tests/pos/i18160/repro_1.scala new file mode 100644 index 000000000000..060f2d325d2d --- /dev/null +++ b/tests/pos/i18160/repro_1.scala @@ -0,0 +1,25 @@ +object core { + final class CreationTicket[State[_]] +} + +trait ReadAs[S[_], +A] { type State[V] = S[V] } + +trait EventCompatBundle { + bundle: Operators => + + trait EventCompat[+T] extends ReadAs[State, Option[T]] { + selfType: Event[T] => + final inline def map[B](inline expression: T => B)(implicit ticket: CreationTicket): Event[B] = ??? + } +} + +trait EventBundle extends EventCompatBundle { self: Operators => + trait Event[+T] extends EventCompat[T]: + final override type State[V] = self.State[V] +} +trait Operators extends EventBundle { + type State[_] + type CreationTicket = core.CreationTicket[State] +} +trait RescalaInterface extends Operators + diff --git a/tests/pos/i18163.orig.scala b/tests/pos/i18163.orig.scala new file mode 100644 index 000000000000..eb0627254156 --- /dev/null +++ b/tests/pos/i18163.orig.scala @@ -0,0 +1,40 @@ +import scala.language.implicitConversions + +// We do have 2 `contramap` functions, one provided via `LoggerSyntax` other via `Contravariant.Ops` +// `ContravariantMonoidal` given instances are not used, and they do not match our type. Code fails when we have at least 2 instances of them +// Removal of `import catsSyntax._` allow to compile code +// Removal of `import odinSyntax.LoggerSyntax` and remaining `catsSyntax` would fail to compile the `def fails` + +trait Foo[A] +trait Bar[A] + +trait WriterT[F[_]: Contravariant, L, V]: + def contramap[Z](fn: Z => V): WriterT[F, L, Z] = ??? +trait Logger[F[_]] +class WriterTLogger[F[_]] extends Logger[[G] =>> WriterT[F, List[String], G]] + +trait ContravariantMonoidal[F[_]] extends Invariant[F] with Contravariant[F] +trait Invariant[F[_]] +object Invariant: + given ContravariantMonoidal[Foo] = ??? + given ContravariantMonoidal[Bar] = ??? + +trait Contravariant[F[_]] extends Invariant[F] +object Contravariant: + trait Ops[F[_], A]: + def contramap[B](f: B => A): F[B] = ??? + +object catsSyntax: + implicit def toContravariantOps[F[_]: Contravariant, A](target: F[A]): Contravariant.Ops[F, A] = ??? + +object odinSyntax: + implicit class LoggerSyntax[F[_]](logger: Logger[F]): + def contramap(f: String => String): Logger[F] = ??? + +import catsSyntax._ +import odinSyntax.LoggerSyntax + +class Test: + def fails = new WriterTLogger[Option].contramap(identity) + def works = LoggerSyntax(new WriterTLogger[Option]).contramap(identity) + diff --git a/tests/pos/i18163.scala b/tests/pos/i18163.scala new file mode 100644 index 000000000000..5c364a50dd57 --- /dev/null +++ b/tests/pos/i18163.scala @@ -0,0 +1,21 @@ +import scala.language.implicitConversions + +trait Foo[A] +trait Bar[B] +trait Qux[C] +class Log[K[_]] + +trait Inv[F[_]] +object Inv: + given monFoo: Inv[Foo] = ??? + given monBar: Inv[Bar] = ??? + +trait InvOps[H[_], D] { def desc(s: String): H[D] = ??? } +trait LogOps[L[_]] { def desc(s: String): Log[L] = ??? } + +class Test: + implicit def LogOps[Q[_]](l: Log[Q]): LogOps[Q] = ??? + implicit def InvOps[J[_], E](j11: J[E])(implicit z: Inv[J]): InvOps[J, E] = ??? + + def fails = new Log[Qux].desc("fails") + def works = LogOps[Qux](new Log[Qux]).desc("works") diff --git a/tests/pos/i18276a.scala b/tests/pos/i18276a.scala new file mode 100644 index 000000000000..46c2722fd8be --- /dev/null +++ b/tests/pos/i18276a.scala @@ -0,0 +1,15 @@ +import scala.language.implicitConversions + +case class Assign(left: String, right: String) +class SyntaxAnalyser extends ParsersBase { + val x: Parser[String ~ String] = ??? + val y: Parser[Assign] = x.map(Assign.apply) +} + +class ParsersBase { + trait ~[+T, +U] + abstract class Parser[+T]: + def map[U](f: T => U): Parser[U] = ??? + + given [A, B, X]: Conversion[(A, B) => X, (A ~ B) => X] = ??? +} diff --git a/tests/pos/i18276b.scala b/tests/pos/i18276b.scala new file mode 100644 index 000000000000..a4d905293472 --- /dev/null +++ b/tests/pos/i18276b.scala @@ -0,0 +1,9 @@ +import scala.language.implicitConversions + +def foo(a: Int): Int = ??? +def bar(f: () => Int): Int = ??? + +given f: Conversion[Int => Int, () => Int] = ??? + +def test1: Int = bar(foo) // implicit conversion applied to foo +def test2: Int = bar(f(foo)) diff --git a/tests/pos/i18361.scala b/tests/pos/i18361.scala new file mode 100644 index 000000000000..a84d5f0a09db --- /dev/null +++ b/tests/pos/i18361.scala @@ -0,0 +1,15 @@ +package test1: + class Service(val name: String) + class CrudService(name: String) extends Service(name) + + trait Foo { self: CrudService => + val x = self.name + } + +package test2: + abstract class Service[F[_]](val name: String) + abstract class CrudService[F[_]](name: String) extends Service[F](name) + + trait Foo[F[_]] { self: CrudService[?] => + val x = self.name + } diff --git a/tests/pos/i4820.scala b/tests/pos/i4820.scala new file mode 100644 index 000000000000..8d368d150a00 --- /dev/null +++ b/tests/pos/i4820.scala @@ -0,0 +1,2 @@ +class Foo[A] +class Bar[A] extends Foo // was error, now expanded to Foo[Nothing] diff --git a/tests/pos/i4820b.scala b/tests/pos/i4820b.scala new file mode 100644 index 000000000000..a1c7d54f0c76 --- /dev/null +++ b/tests/pos/i4820b.scala @@ -0,0 +1,5 @@ +trait SetOps[A, +C <: SetOps[A, C]] { + def concat(that: Iterable[A]): C = ??? +} + +class Set1[A] extends SetOps // ideally should be SetOps[A, Set1[A]], but SetOps[Nothing, Nothin] is inferred diff --git a/tests/pos/i5700.scala b/tests/pos/i5700.scala new file mode 100644 index 000000000000..69892dea16f4 --- /dev/null +++ b/tests/pos/i5700.scala @@ -0,0 +1,5 @@ +// https://github.com/lampepfl/dotty/issues/5700 +object noRecursionLimit: + type M = { type T[+A]; type Ev >: T[Any] <: T[Nothing] } + val M: M = ().asInstanceOf[M] + def dcast(m: M.T[Any]): M.T[Int] = m: M.Ev diff --git a/tests/pos/i7414.scala b/tests/pos/i7414.scala new file mode 100644 index 000000000000..fd85ed2a2265 --- /dev/null +++ b/tests/pos/i7414.scala @@ -0,0 +1,12 @@ +// https://github.com/lampepfl/dotty/issues/7414 + +object DepTest { + trait Trait { + case class Dependent() + } + object obj extends Trait + case class Dep[T <: Trait](t: T) { + def fun(q: t.Dependent): Unit = ??? + } + Dep(obj).fun(obj.Dependent()) +} diff --git a/tests/pos/i7445a.scala b/tests/pos/i7445a.scala new file mode 100644 index 000000000000..2b54166de3f0 --- /dev/null +++ b/tests/pos/i7445a.scala @@ -0,0 +1,10 @@ +// https://github.com/lampepfl/dotty/issues/7445 + +object Main { + type O1[A] = { + type OutInner[X] = Unit + type Out = OutInner[A] + } + + def f1: O1[Int]#Out = ??? +} diff --git a/tests/pos/i7445b.scala b/tests/pos/i7445b.scala new file mode 100644 index 000000000000..1d49479ef0a5 --- /dev/null +++ b/tests/pos/i7445b.scala @@ -0,0 +1,12 @@ +// https://github.com/lampepfl/dotty/issues/7445 + +type O1[A] = { + type OutInner[Ts] <: Tuple = Ts match { + case EmptyTuple => EmptyTuple + case h *: t => h *: OutInner[t] + } + + type Out = OutInner[A] +} + +def f1: O1[(Int, Int)]#Out = ??? diff --git a/tests/pos/i7653.scala b/tests/pos/i7653.scala new file mode 100644 index 000000000000..8511b6eef69b --- /dev/null +++ b/tests/pos/i7653.scala @@ -0,0 +1,22 @@ +// https://github.com/lampepfl/dotty/issues/7653 + +object options2 { + type Option[T] = { + def isEmpty: Boolean + } + type None[T] = Option[T] + val none: () => Option[Nothing] = () => + new { + def isEmpty = true + } + val mkNone0: [T] => () => Option[Nothing] = [T] => + () => + new { + def isEmpty = true + } + val mkNone: [T] => () => Option[T] = [T] => + () => + new { + def isEmpty = true + } +} diff --git a/tests/pos/i7790.scala b/tests/pos/i7790.scala new file mode 100644 index 000000000000..0a0e2d2ce347 --- /dev/null +++ b/tests/pos/i7790.scala @@ -0,0 +1,9 @@ +// https://github.com/lampepfl/dotty/issues/7790 +trait Foo: + given Int = 10 + def map(f: Int ?=> Int) = f + def map(f: Int ?=> String) = f + +@main def Test = + val m: Foo = ??? + m.map((x: Int) ?=> x) diff --git a/tests/pos/i8300.scala b/tests/pos/i8300.scala new file mode 100644 index 000000000000..f106b24dbd1c --- /dev/null +++ b/tests/pos/i8300.scala @@ -0,0 +1,15 @@ +// https://github.com/lampepfl/dotty/issues/8300 + +type Bar[X] = X match { + case List[a] => List[Tuple1[a]] + case Set[a] => Set[Tuple1[a]] +} + +object Test: + (Set(1, 2, 3), List("a", "b")).map( + [A] => + (a: A) => + a match { + case it: Iterable[x] => it.map(Tuple1(_)).asInstanceOf[Bar[A]] + } + ) diff --git a/tests/pos/i8321.scala b/tests/pos/i8321.scala new file mode 100644 index 000000000000..825598d0b668 --- /dev/null +++ b/tests/pos/i8321.scala @@ -0,0 +1,9 @@ + +object Test: + inline def concat[A <: Tuple, B <: Tuple]( + a: Option[A], + b: Option[B] + ): Option[Tuple.Concat[A, B]] = + a.zip(b).map(_ ++ _) + + concat(Some(1, 2), Some(3, 4)) diff --git a/tests/pos/i9361.scala b/tests/pos/i9361.scala new file mode 100644 index 000000000000..18efd203d885 --- /dev/null +++ b/tests/pos/i9361.scala @@ -0,0 +1,9 @@ +// https://github.com/lampepfl/dotty/issues/9361 + +import scala.quoted._ + +trait CPM[F[_]] + +def matchTerm(t: Expr[Any])(using qctx: Quotes): Unit = + t match + case '{ ??? : CPM[m2] } => ??? diff --git a/tests/pos/interleaving-ba.scala b/tests/pos/interleaving-ba.scala new file mode 100644 index 000000000000..69fe2d9537a0 --- /dev/null +++ b/tests/pos/interleaving-ba.scala @@ -0,0 +1,11 @@ +import scala.language.experimental.clauseInterleaving + +object BA { + given String = "" + given Double = 0 + + def ba[A](x: A)[B](using B): B = summon[B] + + def test = ba(0)[String] + +} diff --git a/tests/pos/interleaving-chainedParams.scala b/tests/pos/interleaving-chainedParams.scala new file mode 100644 index 000000000000..e502888d97c8 --- /dev/null +++ b/tests/pos/interleaving-chainedParams.scala @@ -0,0 +1,20 @@ +import scala.language.experimental.clauseInterleaving + +object chainedParams{ + + trait Chain{ + type Tail <: Chain + } + + def f[C1 <: Chain](c1: C1)[C2 <: c1.Tail](c2: C2)[C3 <: c2.Tail](c3: C3): c3.Tail = ??? + + val self = new Chain{ type Tail = this.type } + val res: self.type = f(self)(self)(self) + + type C <: Chain + + val c3 = new Chain{ type Tail = C } + val c2 = new Chain{ type Tail = c3.type } + val c1 = new Chain{ type Tail = c2.type } + val u: C = f(c1)(c2)(c3) +} diff --git a/tests/pos/interleaving-classless.scala b/tests/pos/interleaving-classless.scala new file mode 100644 index 000000000000..5aec92db3409 --- /dev/null +++ b/tests/pos/interleaving-classless.scala @@ -0,0 +1,6 @@ +import scala.language.experimental.clauseInterleaving + +def f1[T]()[U](x: T, y: U): (T, U) = (x, y) +def f2[T](x: T)[U](y: U): (T, U) = (x, y) +def f3[T, U](using DummyImplicit)[V](x: T): U = ??? +def f4[T](x: T)[U <: x.type](y: U): (T, U) = (x, y) diff --git a/tests/pos/interleaving-functor.scala b/tests/pos/interleaving-functor.scala new file mode 100644 index 000000000000..35bed59f77f0 --- /dev/null +++ b/tests/pos/interleaving-functor.scala @@ -0,0 +1,19 @@ +import scala.language.experimental.clauseInterleaving + +object functorInterleaving: + //taken from https://dotty.epfl.ch/docs/reference/contextual/type-classes.html + //at version 3.1.1-RC1-bin-20210930-01f040b-NIGHTLY + //modified to have type interleaving + trait Functor[F[_]]: + def map[A](x: F[A])[B](f: A => B): F[B] + + + given Functor[List] with + def map[A](x: List[A])[B](f: A => B): List[B] = + x.map(f) + + def assertTransformation[F[_]: Functor, A](original: F[A])[B](expected: F[B])(mapping: A => B): Unit = + assert(expected == summon[Functor[F]].map(original)(mapping)) + + @main def testInterweaving = + assertTransformation(List("a", "b"))(List("a1", "b1")){elt => s"${elt}1"} diff --git a/tests/pos/interleaving-newline.scala b/tests/pos/interleaving-newline.scala new file mode 100644 index 000000000000..de8fb98a2f81 --- /dev/null +++ b/tests/pos/interleaving-newline.scala @@ -0,0 +1,11 @@ +import scala.language.experimental.clauseInterleaving + +object newline { + def multipleLines + [T] + (x: T) + [U] + (using (T,U)) + (y: U) + = ??? +} diff --git a/tests/pos/interleaving-overload.scala b/tests/pos/interleaving-overload.scala new file mode 100644 index 000000000000..1902551f9036 --- /dev/null +++ b/tests/pos/interleaving-overload.scala @@ -0,0 +1,20 @@ +import scala.language.experimental.clauseInterleaving + +class A{ + + def f1[T](x: Any)[U] = ??? + def f1[T](x: Int)[U] = ??? + + f1(1) + f1("hello") + + case class B[U](x: Int) + def b[U](x: Int) = B[U](x) + + def f2[T]: [U] => Int => B[U] = [U] => (x: Int) => b[U](x) + + f2(1) + f2[Any](1) + f2[Any][Any](1) + +} diff --git a/tests/pos/interleaving-params.scala b/tests/pos/interleaving-params.scala new file mode 100644 index 000000000000..36963ff2e123 --- /dev/null +++ b/tests/pos/interleaving-params.scala @@ -0,0 +1,8 @@ +import scala.language.experimental.clauseInterleaving + +class Params{ + type U + def foo[T](x: T)[U >: x.type <: T](using U)[L <: List[U]](l: L): L = ??? + def aaa(x: U): U = ??? + def bbb[T <: U](x: U)[U]: U = ??? +} diff --git a/tests/pos/interleaving-signatureCollision.scala b/tests/pos/interleaving-signatureCollision.scala new file mode 100644 index 000000000000..77190284ae6d --- /dev/null +++ b/tests/pos/interleaving-signatureCollision.scala @@ -0,0 +1,6 @@ +import scala.language.experimental.clauseInterleaving +import scala.annotation.targetName + +object signatureCollision: + def f[T](x: T)[U](y: U) = (x,y) + @targetName("g") def f[T](x: T, y: T) = (x,y) diff --git a/tests/pos/interleaving-typeApply.scala b/tests/pos/interleaving-typeApply.scala new file mode 100644 index 000000000000..3c669cc76bfc --- /dev/null +++ b/tests/pos/interleaving-typeApply.scala @@ -0,0 +1,25 @@ +import scala.language.experimental.clauseInterleaving + +object typeApply: + + def f0[T]: [U] => T => T = ??? + def f1[T](using DummyImplicit)[U]: T => T = ??? + def f2[T](using DummyImplicit)[U](): T => T = ??? + def f3[T <: Int](using DummyImplicit)[U <: String](): T => T = ??? + def f4[T <: Int](using DummyImplicit)[U <: String]: T => T = ??? + def f5[T <: Int](using DummyImplicit)[U <: String]: [X <: Unit] => X => X = ??? + def f6[T <: Int](using DummyImplicit)[U <: String](): [X <: Unit] => X => X = ??? + def f7[T <: Int](using DummyImplicit)[U <: String]()[X <: Unit]: X => X = ??? + + @main def test = { + import scala.language.experimental.namedTypeArguments + f0[Int][String] + f1[Int][String] + f2[Int][String]() + f3[Int][String]() + f4[Int][String] + f5[Int][String] + f5[Int][String][Unit] + f6[Int]()[Unit] + f7[Int]()[Unit] + } diff --git a/tests/pos/namedTypeParams.scala b/tests/pos/namedTypeParams.scala index a8c38972838c..388bcfa98bef 100644 --- a/tests/pos/namedTypeParams.scala +++ b/tests/pos/namedTypeParams.scala @@ -1,4 +1,5 @@ import language.experimental.namedTypeArguments + object Test { def f[X, Y](x: X, y: Y): Int = ??? @@ -7,6 +8,14 @@ object Test { f[X = Int, Y = String](1, "") f[X = Int](1, "") f[Y = String](1, "") +} + +object TestInterleaving{ + import language.experimental.clauseInterleaving + def f2[X](using DummyImplicit)[Y](x: X, y: Y): Int = ??? + + f2[X = Int][Y = String](1, "") + f2[X = Int](1, "") } diff --git a/tests/pos/outdent-dot.scala b/tests/pos/outdent-dot.scala new file mode 100644 index 000000000000..c09bb673743a --- /dev/null +++ b/tests/pos/outdent-dot.scala @@ -0,0 +1,13 @@ +def Block(f: => Int): Int = f + +def bar(): String = + Block: + 2 + 2 + .toString + +def foo(xs: List[Int]) = + xs.map: x => + x + 1 + .filter: x => + x > 0 + println("foo") diff --git a/tests/pos/overrides.scala b/tests/pos/overrides.scala index 97402f773082..146dc06c76a9 100644 --- a/tests/pos/overrides.scala +++ b/tests/pos/overrides.scala @@ -1,13 +1,21 @@ class A[T] { def f(x: T)(y: T = x) = y + + import scala.language.experimental.clauseInterleaving -} + def b[U <: T](x: Int)[V >: T](y: String) = false +} class B extends A[Int] { override def f(x: Int)(y: Int) = y f(2)() + + import scala.language.experimental.clauseInterleaving + + override def b[T <: Int](x: Int)[U >: Int](y: String) = true + } diff --git a/tests/pos/suspend-strawman/choices.scala b/tests/pos/suspend-strawman/choices.scala new file mode 100644 index 000000000000..968c223d9c0b --- /dev/null +++ b/tests/pos/suspend-strawman/choices.scala @@ -0,0 +1,28 @@ +import scala.util.boundary, boundary.Label +import runtime.suspend + +trait Choice: + def choose[A](choices: A*): A + +// the handler +def choices[T](body: Choice ?=> T): Seq[T] = + boundary[Seq[T]]: + given Choice with + def choose[A](choices: A*): A = + suspend[A, Seq[T]](s => choices.flatMap(s.resume)) + Seq(body) + +def choose[A](choices: A*)(using c: Choice): A = c.choose(choices*) + +def TestChoices: Seq[Int] = + choices: + def x = choose(1, -2, -3) + def y = choose("ab", "cde") + val xx = x; + xx + ( + if xx > 0 then + val z = choose(xx / 2, xx * 2) + y.length * z + else y.length + ) + diff --git a/tests/pos/suspend-strawman/generators.scala b/tests/pos/suspend-strawman/generators.scala new file mode 100644 index 000000000000..a890196e6215 --- /dev/null +++ b/tests/pos/suspend-strawman/generators.scala @@ -0,0 +1,67 @@ +import scala.util.boundary +import scala.compiletime.uninitialized +import runtime.suspend + +trait CanProduce[-T]: + def produce(x: T): Unit + +object generate: + + def produce[T](x: T)(using cp: CanProduce[T]): Unit = cp.produce(x) + + def apply[T](body: CanProduce[T] ?=> Unit): Iterator[T] = new: + var nextKnown: Boolean = false + var nextElem: Option[T] = uninitialized + + var step: () => Unit = () => + boundary[Unit]: + given CanProduce[T] with + def produce(x: T): Unit = + nextElem = Some(x) + suspend[Unit, Unit]: k => + step = () => k.resume(()) + body + nextElem = None + + def hasNext: Boolean = + if !nextKnown then { step(); nextKnown = true } + nextElem.isDefined + + def next: T = + require(hasNext) + nextKnown = false + nextElem.get +end generate + +enum Tree[T]: + case Leaf(x: T) + case Inner(xs: List[Tree[T]]) + +def leafs[T](t: Tree[T]): Iterator[T] = + generate: + def recur(t: Tree[T]): Unit = t match + case Tree.Leaf(x) => generate.produce(x) + case Tree.Inner(xs) => xs.foreach(recur) + recur(t) + +object Variant2: + trait Generator[T]: + def nextOption: Option[T] + + def generate[T](body: CanProduce[T] ?=> Unit): Generator[T] = new: + + def nextOption: Option[T] = + step() + + var step: () => Option[T] = () => + boundary: + given CanProduce[T] with + def produce(x: T): Unit = + suspend[Unit, Option[T]]: k => + step = () => k.resume(()) + Some(x) + body + None + end generate + + diff --git a/tests/pos/suspend-strawman/monadic-reflect.scala b/tests/pos/suspend-strawman/monadic-reflect.scala new file mode 100644 index 000000000000..84c5255c2a96 --- /dev/null +++ b/tests/pos/suspend-strawman/monadic-reflect.scala @@ -0,0 +1,56 @@ +import scala.util.boundary +import runtime.suspend + +trait Monad[F[_]]: + + /** The unit value for a monad */ + def pure[A](x: A): F[A] + + extension [A](x: F[A]) + /** The fundamental composition operation */ + def flatMap[B](f: A => F[B]): F[B] + + /** The `map` operation can now be defined in terms of `flatMap` */ + def map[B](f: A => B) = x.flatMap(f.andThen(pure)) + +end Monad + +trait CanReflect[M[_]]: + def reflect[R](mr: M[R]): R + +trait Monadic[M[_]: Monad]: + + /** + * Embedding of pure values into the monad M + */ + def pure[A](a: A): M[A] + + /** + * Sequencing of monadic values + * + * Implementations are required to implement sequencing in a stack-safe + * way, that is they either need to implement trampolining on their own + * or implement `sequence` as a tail recursive function. + * + * Actually the type X can be different for every call to f... + * It is a type aligned sequence, but for simplicity we do not enforce this + * here. + */ + def sequence[X, R](init: M[X])(f: X => Either[M[X], M[R]]): M[R] + + /** + * Helper to summon and use an instance of CanReflect[M] + */ + def reflect[R](mr: M[R])(using r: CanReflect[M]): R = r.reflect(mr) + + /** + * Reify a computation into a monadic value + */ + def reify[R](prog: CanReflect[M] ?=> R): M[R] = + boundary [M[R]]: + given CanReflect[M] with + def reflect[R2](mr: M[R2]): R2 = + suspend [R2, M[R]] (k => mr.flatMap(k.resume)) + pure(prog) + +end Monadic \ No newline at end of file diff --git a/tests/pos/suspend-strawman/runtime.scala b/tests/pos/suspend-strawman/runtime.scala new file mode 100644 index 000000000000..406da4f7dd5e --- /dev/null +++ b/tests/pos/suspend-strawman/runtime.scala @@ -0,0 +1,11 @@ +package runtime +import scala.util.boundary, boundary.Label + +/** A hypothetical API for suspensions. Not yet implemented. + * Suspension contain a delimited contination, which can be + * invoked with `resume` + */ +class Suspension[-T, +R]: + def resume(arg: T): R = ??? + +def suspend[T, R](body: Suspension[T, R] => R)(using Label[R]): T = ??? diff --git a/tests/pos/suspend-strawman/simple-futures.scala b/tests/pos/suspend-strawman/simple-futures.scala new file mode 100644 index 000000000000..0a80a74d49dc --- /dev/null +++ b/tests/pos/suspend-strawman/simple-futures.scala @@ -0,0 +1,53 @@ +package simpleFutures + +import scala.collection.mutable.ListBuffer +import scala.util.boundary, boundary.Label +import runtime.suspend + +object Scheduler: + def schedule(task: Runnable): Unit = ??? + +trait Async: + def await[T](f: Future[T]): T + +class Future[+T](body: Async ?=> T): + private var result: Option[T] = None + private var waiting: ListBuffer[T => Unit] = ListBuffer() + private def addWaiting(k: T => Unit): Unit = waiting += k + + def await(using a: Async): T = a.await(this) + + private def complete(): Unit = + Future.async: + val value = body + val result = Some(value) + for k <- waiting do + Scheduler.schedule(() => k(value)) + waiting.clear() + + Scheduler.schedule(() => complete()) + +object Future: + + // a handler for Async + def async(body: Async ?=> Unit): Unit = + boundary [Unit]: + given Async with + def await[T](f: Future[T]): T = f.result match + case Some(x) => x + case None => suspend[T, Unit](s => f.addWaiting(s.resume)) + body + +end Future + +def Test(x: Future[Int], xs: List[Future[Int]]) = + Future: + x.await + xs.map(_.await).sum + + + + + + + + diff --git a/tests/pos/t0049.scala b/tests/pos/t0049.scala index dd866422637f..a453cec9568e 100644 --- a/tests/pos/t0049.scala +++ b/tests/pos/t0049.scala @@ -1,3 +1,6 @@ class C1(x: AnyRef) {}; class C2 extends C1({ class A extends AnyRef {}; (new A) : AnyRef }) {}; + +class Outer: + class C2 extends C1({ class A extends AnyRef {}; (new A) : AnyRef }) {}; diff --git a/tests/pos/t16827.scala b/tests/pos/t16827.scala new file mode 100644 index 000000000000..17122fd9b580 --- /dev/null +++ b/tests/pos/t16827.scala @@ -0,0 +1,9 @@ +// scalac: -Werror + +trait Outer[F[_]]: + sealed trait Inner + trait Inner1 extends Inner + def foo(rv: Either[Inner, Int]) = + rv match + case Right(_) => + case Left(_) => diff --git a/tests/pos/t9419.jackson.scala b/tests/pos/t9419.jackson.scala new file mode 100644 index 000000000000..bf26c7e4c672 --- /dev/null +++ b/tests/pos/t9419.jackson.scala @@ -0,0 +1,20 @@ +// from failure in the community project +// jackson-module-scala +// in ScalaAnnotationIntrospectorModule.scala:139:12 + +import scala.language.implicitConversions + +trait EnrichedType[X]: + def value: X + +trait ClassW extends EnrichedType[Class[_]]: + def extendsScalaClass = false + +class Test: + implicit def mkClassW(c: => Class[_]): ClassW = new ClassW: + lazy val value = c + + def test1(c1: Class[_]) = c1.extendsScalaClass // ok: c1 is a value + def test2(c2: Class[_]) = mkClassW(c2).extendsScalaClass // ok: c2 is a value + // c1 in test1 goes throw adapting to find the extension method and gains the wildcard capture cast then + // c2 in test2 goes straight to typedArg, as it's already an arg, so it never gets wildcard captured diff --git a/tests/pos/t9419.specs2.scala b/tests/pos/t9419.specs2.scala new file mode 100644 index 000000000000..fe4a44312594 --- /dev/null +++ b/tests/pos/t9419.specs2.scala @@ -0,0 +1,13 @@ +// Minimisation of how the fix for t9419 affected specs2 +class MustExpectable[T](tm: () => T): + def must_==(other: => Any) = tm() == other + +class Foo + +object Main: + implicit def theValue[T](t: => T): MustExpectable[T] = new MustExpectable(() => t) + def main(args: Array[String]): Unit = + val cls = classOf[Foo] + val instance = new Foo() + val works = cls must_== cls + val fails = instance.getClass must_== cls diff --git a/tests/pos/this-implicit-scope.scala b/tests/pos/this-implicit-scope.scala new file mode 100644 index 000000000000..ead235e53bce --- /dev/null +++ b/tests/pos/this-implicit-scope.scala @@ -0,0 +1,6 @@ +class Foo[+T] +class Elem: + def one(a: Elem, x: Foo[a.type]): Int = x.ext + def two(x: Foo[Elem.this.type]): Int = x.ext +object Elem: + extension (x: Foo[Elem]) def ext: Int = 1 diff --git a/tests/rewrites/i17399.check b/tests/rewrites/i17399.check new file mode 100644 index 000000000000..0b7eef78db4f --- /dev/null +++ b/tests/rewrites/i17399.check @@ -0,0 +1,7 @@ +def test[T](body:T):T = body + +object Test { + test { + println("test 1") + } +} diff --git a/tests/rewrites/i17399.scala b/tests/rewrites/i17399.scala new file mode 100644 index 000000000000..965d17c4f718 --- /dev/null +++ b/tests/rewrites/i17399.scala @@ -0,0 +1,5 @@ +def test[T](body:T):T = body + +object Test: + test: + println("test 1") diff --git a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala index 796dc62bdf22..63aee49f8454 100644 --- a/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala +++ b/tests/run-custom-args/captures/colltest5/CollectionStrawManCC5_1.scala @@ -18,19 +18,21 @@ object CollectionStrawMan5 { /* ------------ Base Traits -------------------------------- */ + type AnyIterableOnce[A] = IterableOnce[A]^ + /** Iterator can be used only once */ trait IterableOnce[+A] { - this: {*} IterableOnce[A] => - def iterator: {this} Iterator[A] + this: AnyIterableOnce[A] => + def iterator: Iterator[A]^{this} } /** Base trait for instances that can construct a collection from an iterable */ trait FromIterable { - type C[X] <: {*} Iterable[X] - def fromIterable[B](it: {*} Iterable[B]): {it} C[B] + type C[X] <: Iterable[X]^ + def fromIterable[B](it: Iterable[B]^): C[B]^{it} } - type FromIterableOf[+CC[X] <: {*} Iterable[X]] = FromIterable { + type FromIterableOf[+CC[X] <: Iterable[X]^] = FromIterable { type C[X] <: CC[X] } @@ -42,9 +44,9 @@ object CollectionStrawMan5 { /** Base trait for generic collections */ trait Iterable[+A] extends IterableOnce[A] with IterableLike[A] { - this: {*} Iterable[A] => - type C[X] <: {*} Iterable[X] - protected def coll: {this} Iterable[A] = this + this: Iterable[A]^ => + type C[X] <: Iterable[X]^ + protected def coll: Iterable[A]^{this} = this def knownLength: Int = -1 } @@ -58,7 +60,7 @@ object CollectionStrawMan5 { trait SeqFactory extends IterableFactory { type C[X] <: Seq[X] - def fromIterable[B](it: {*} Iterable[B]): C[B] + def fromIterable[B](it: Iterable[B]^): C[B] } /** Base trait for strict collections */ @@ -78,7 +80,7 @@ object CollectionStrawMan5 { def +=(x: A): this.type def result: To - def ++=(xs: {*} IterableOnce[A]): this.type = { + def ++=(xs: IterableOnce[A]^): this.type = { xs.iterator.foreach(+=) this } @@ -103,7 +105,7 @@ object CollectionStrawMan5 { with IterablePolyTransforms[A] with IterableMonoTransforms[A] { // sound bcs of VarianceNote type Repr = C[A] @uncheckedVariance - protected[this] def fromLikeIterable(coll: {*} Iterable[A] @uncheckedVariance): {coll} Repr @uncheckedVariance = + protected[this] def fromLikeIterable(coll: Iterable[A] @uncheckedVariance ^): Repr @uncheckedVariance ^{coll} = fromIterable(coll) } @@ -112,50 +114,50 @@ object CollectionStrawMan5 { extends IterableLike[A], SeqMonoTransforms[A], SeqPolyTransforms[A]: // sound bcs of VarianceNote this: SeqLike[A] => type C[X] <: Seq[X] - def fromIterable[B](coll: {*} Iterable[B]): C[B] - override protected[this] def fromLikeIterable(coll: {*} Iterable[A] @uncheckedVariance): Repr = + def fromIterable[B](coll: Iterable[B]^): C[B] + override protected[this] def fromLikeIterable(coll: Iterable[A] @uncheckedVariance ^): Repr = fromIterable(coll) trait IterableOps[+A] extends Any { - this: {*} IterableOps[A] => - def iterator: {this} Iterator[A] + this: IterableOps[A]^ => + def iterator: Iterator[A]^{this} def foreach(f: A => Unit): Unit = iterator.foreach(f) def foldLeft[B](z: B)(op: (B, A) => B): B = iterator.foldLeft(z)(op) def foldRight[B](z: B)(op: (A, B) => B): B = iterator.foldRight(z)(op) def indexWhere(p: A => Boolean): Int = iterator.indexWhere(p) def isEmpty: Boolean = !iterator.hasNext def head: A = iterator.next() - def view: {this} View[A] = View.fromIterator(iterator) + def view: View[A]^{this} = View.fromIterator(iterator) } trait IterableMonoTransforms[+A] extends Any { - this: {*} IterableMonoTransforms[A] => + this: IterableMonoTransforms[A]^ => type Repr - protected def coll: {this} Iterable[A] - protected[this] def fromLikeIterable(coll: {*} Iterable[A] @uncheckedVariance): {coll} Repr - def filter(p: A => Boolean): {this, p} Repr = fromLikeIterable(View.Filter(coll, p)) + protected def coll: Iterable[A]^{this} + protected[this] def fromLikeIterable(coll: Iterable[A] @uncheckedVariance ^): Repr^{coll} + def filter(p: A => Boolean): Repr^{this, p} = fromLikeIterable(View.Filter(coll, p)) - def partition(p: A => Boolean): ({this, p} Repr, {this, p} Repr) = { + def partition(p: A => Boolean): (Repr^{this, p}, Repr^{this, p}) = { val pn = View.Partition(coll, p) (fromLikeIterable(pn.left), fromLikeIterable(pn.right)) } - def drop(n: Int): {this} Repr = fromLikeIterable(View.Drop(coll, n)) + def drop(n: Int): Repr^{this} = fromLikeIterable(View.Drop(coll, n)) - def to[C[X] <: Iterable[X]](fi: FromIterableOf[C]): {this} C[A @uncheckedVariance] = + def to[C[X] <: Iterable[X]](fi: FromIterableOf[C]): C[A @uncheckedVariance]^{this} = // variance seems sound because `to` could just as well have been added // as a decorator. We should investigate this further to be sure. fi.fromIterable(coll) } trait IterablePolyTransforms[+A] extends Any { - this: {*} IterablePolyTransforms[A] => + this: IterablePolyTransforms[A]^ => type C[A] - protected def coll: {this} Iterable[A] - def fromIterable[B](coll: {*} Iterable[B]): {coll} C[B] - def map[B](f: A => B): {this, f} C[B] = fromIterable(View.Map(coll, f)) - def flatMap[B](f: A => {*} IterableOnce[B]): {this, f} C[B] = fromIterable(View.FlatMap(coll, f)) - def ++[B >: A](xs: {*} IterableOnce[B]): {this, xs} C[B] = fromIterable(View.Concat(coll, xs)) - def zip[B](xs: {*} IterableOnce[B]): {this, xs} C[(A @uncheckedVariance, B)] = fromIterable(View.Zip(coll, xs)) + protected def coll: Iterable[A]^{this} + def fromIterable[B](coll: Iterable[B]^): C[B]^{coll} + def map[B](f: A => B): C[B]^{this, f} = fromIterable(View.Map(coll, f)) + def flatMap[B](f: A => IterableOnce[B]^): C[B]^{this, f} = fromIterable(View.FlatMap(coll, f)) + def ++[B >: A](xs: IterableOnce[B]^): C[B]^{this, xs} = fromIterable(View.Concat(coll, xs)) + def zip[B](xs: IterableOnce[B]^): C[(A @uncheckedVariance, B)]^{this, xs} = fromIterable(View.Zip(coll, xs)) // sound bcs of VarianceNote } @@ -167,7 +169,7 @@ object CollectionStrawMan5 { while (it.hasNext) xs = new Cons(it.next(), xs) fromLikeIterable(xs) - override protected[this] def fromLikeIterable(coll: {*} Iterable[A] @uncheckedVariance): Repr + override protected[this] def fromLikeIterable(coll: Iterable[A] @uncheckedVariance ^): Repr override def filter(p: A => Boolean): Repr = fromLikeIterable(View.Filter(coll, p)) @@ -186,11 +188,11 @@ object CollectionStrawMan5 { trait SeqPolyTransforms[+A] extends Any, IterablePolyTransforms[A]: this: SeqPolyTransforms[A] => type C[A] - override def fromIterable[B](coll: {*} Iterable[B]): C[B] + override def fromIterable[B](coll: Iterable[B]^): C[B] override def map[B](f: A => B): C[B] = fromIterable(View.Map(coll, f)) - override def flatMap[B](f: A => {*} IterableOnce[B]): C[B] = fromIterable(View.FlatMap(coll, f)) - override def ++[B >: A](xs: {*} IterableOnce[B]): C[B] = fromIterable(View.Concat(coll, xs)) - override def zip[B](xs: {*} IterableOnce[B]): C[(A @uncheckedVariance, B)] = fromIterable(View.Zip(coll, xs)) + override def flatMap[B](f: A => IterableOnce[B]^): C[B] = fromIterable(View.FlatMap(coll, f)) + override def ++[B >: A](xs: IterableOnce[B]^): C[B] = fromIterable(View.Concat(coll, xs)) + override def zip[B](xs: IterableOnce[B]^): C[(A @uncheckedVariance, B)] = fromIterable(View.Zip(coll, xs)) /* --------- Concrete collection types ------------------------------- */ @@ -206,7 +208,7 @@ object CollectionStrawMan5 { def hasNext = !current.isEmpty def next() = { val r = current.head; current = current.tail; r } } - def fromIterable[B](c: {*} Iterable[B]): List[B] = List.fromIterable(c) + def fromIterable[B](c: Iterable[B]^): List[B] = List.fromIterable(c) def apply(i: Int): A = { require(!isEmpty) if (i == 0) head else tail.apply(i - 1) @@ -217,7 +219,7 @@ object CollectionStrawMan5 { def ++:[B >: A](prefix: List[B]): List[B] = if (prefix.isEmpty) this else Cons(prefix.head, prefix.tail ++: this) - override def ++[B >: A](xs: {*} IterableOnce[B]): List[B] = xs match { + override def ++[B >: A](xs: IterableOnce[B]^): List[B] = xs match { case xs: List[B] => this ++: xs case _ => fromIterable(View.Concat(this, xs)) } @@ -240,7 +242,7 @@ object CollectionStrawMan5 { object List extends SeqFactory { type C[X] = List[X] - def fromIterable[B](coll: {*} Iterable[B]): List[B] = coll match { + def fromIterable[B](coll: Iterable[B]^): List[B] = coll match { case coll: List[B] => coll case _ => ListBuffer.fromIterable(coll).result } @@ -252,7 +254,7 @@ object CollectionStrawMan5 { private var first, last: List[A] = Nil private var aliased = false def iterator = first.iterator - def fromIterable[B](coll: {*} Iterable[B]): ListBuffer[B] = ListBuffer.fromIterable(coll) + def fromIterable[B](coll: Iterable[B]^): ListBuffer[B] = ListBuffer.fromIterable(coll) def apply(i: Int) = first.apply(i) def length = first.length @@ -286,7 +288,7 @@ object CollectionStrawMan5 { object ListBuffer extends SeqFactory { type C[X] = ListBuffer[X] - def fromIterable[B](coll: {*} Iterable[B]): ListBuffer[B] = new ListBuffer[B] ++= coll + def fromIterable[B](coll: Iterable[B]^): ListBuffer[B] = new ListBuffer[B] ++= coll } /** Concrete collection type: ArrayBuffer */ @@ -303,7 +305,7 @@ object CollectionStrawMan5 { override def knownLength = length override def view = new ArrayBufferView(elems, start, end) def iterator = view.iterator - def fromIterable[B](it: {*} Iterable[B]): ArrayBuffer[B] = + def fromIterable[B](it: Iterable[B]^): ArrayBuffer[B] = ArrayBuffer.fromIterable(it) def +=(elem: A): this.type = { if (end == elems.length) { @@ -324,7 +326,7 @@ object CollectionStrawMan5 { } def result = this def trimStart(n: Int): Unit = start += (n max 0) - override def ++[B >: A](xs: {*} IterableOnce[B]): ArrayBuffer[B] = xs match { + override def ++[B >: A](xs: IterableOnce[B]^): ArrayBuffer[B] = xs match { case xs: ArrayBuffer[B] @unchecked => val elems = new Array[AnyRef](length + xs.length) Array.copy(this.elems, this.start, elems, 0, this.length) @@ -338,7 +340,7 @@ object CollectionStrawMan5 { object ArrayBuffer extends SeqFactory { type C[X] = ArrayBuffer[X] - def fromIterable[B](coll: {*} Iterable[B]): ArrayBuffer[B] = + def fromIterable[B](coll: Iterable[B]^): ArrayBuffer[B] = if (coll.knownLength >= 0) { val elems = new Array[AnyRef](coll.knownLength) val it = coll.iterator @@ -368,12 +370,12 @@ object CollectionStrawMan5 { type C[X] = List[X] protected def coll = new StringView(s) def iterator = coll.iterator - protected def fromLikeIterable(coll: {*} Iterable[Char]): String = { + protected def fromLikeIterable(coll: Iterable[Char]^): String = { val sb = new StringBuilder for (ch <- coll) sb.append(ch) sb.toString } - def fromIterable[B](coll: {*} Iterable[B]): List[B] = List.fromIterable(coll) + def fromIterable[B](coll: Iterable[B]^): List[B] = List.fromIterable(coll) def map(f: Char => Char): String = { val sb = new StringBuilder for (ch <- s) sb.append(f(ch)) @@ -384,7 +386,7 @@ object CollectionStrawMan5 { for (ch <- s) sb.append(f(ch)) sb.toString } - def ++(xs: {*} IterableOnce[Char]): String = { + def ++(xs: IterableOnce[Char]^): String = { val sb = new StringBuilder(s) for (ch <- xs.iterator) sb.append(ch) sb.toString @@ -402,10 +404,10 @@ object CollectionStrawMan5 { /** Concrete collection type: View */ trait View[+A] extends Iterable[A] with IterableLike[A] { - this: {*} View[A] => - type C[X] = {this} View[X] + this: View[A]^ => + type C[X] = View[X]^{this} override def view: this.type = this - override def fromIterable[B](c: {*} Iterable[B]): {this, c} View[B] = { + override def fromIterable[B](c: Iterable[B]^): View[B]^{this, c} = { c match { case c: View[B] => c case _ => View.fromIterator(c.iterator) @@ -431,8 +433,8 @@ object CollectionStrawMan5 { } object View { - def fromIterator[A](it: => {*} Iterator[A]): {it} View[A] = new View[A]: - def iterator: {this} Iterator[A] = it + def fromIterator[A](it: => Iterator[A]^): View[A]^{it} = new View[A]: + def iterator: Iterator[A]^{this} = it case object Empty extends View[Nothing] { def iterator: Iterator[Nothing] = Iterator.empty @@ -444,43 +446,43 @@ object CollectionStrawMan5 { override def knownLength = xs.length } - case class Filter[A](val underlying: {*} Iterable[A], p: A => Boolean) extends View[A] { - this: {underlying, p} Filter[A] => - def iterator: {this} Iterator[A] = underlying.iterator.filter(p) + case class Filter[A](val underlying: Iterable[A]^, p: A => Boolean) extends View[A] { + this: Filter[A]^{underlying, p} => + def iterator: Iterator[A]^{this} = underlying.iterator.filter(p) } - case class Partition[A](val underlying: {*} Iterable[A], p: A => Boolean) { - self: {underlying, p} Partition[A] => + case class Partition[A](val underlying: Iterable[A]^, p: A => Boolean) { + self: Partition[A]^{underlying, p} => class Partitioned(expected: Boolean) extends View[A]: - this: {self} Partitioned => - def iterator: {this} Iterator[A] = + this: Partitioned^{self} => + def iterator: Iterator[A]^{this} = underlying.iterator.filter((x: A) => p(x) == expected) - val left: {self} Partitioned = Partitioned(true) - val right: {self} Partitioned = Partitioned(false) + val left: Partitioned^{self} = Partitioned(true) + val right: Partitioned^{self} = Partitioned(false) } - case class Drop[A](underlying: {*} Iterable[A], n: Int) extends View[A] { - this: {underlying} Drop[A] => - def iterator: {this} Iterator[A] = underlying.iterator.drop(n) + case class Drop[A](underlying: Iterable[A]^, n: Int) extends View[A] { + this: Drop[A]^{underlying} => + def iterator: Iterator[A]^{this} = underlying.iterator.drop(n) override def knownLength = if (underlying.knownLength >= 0) underlying.knownLength - n max 0 else -1 } - case class Map[A, B](underlying: {*} Iterable[A], f: A => B) extends View[B] { - this: {underlying, f} Map[A, B] => - def iterator: {this} Iterator[B] = underlying.iterator.map(f) + case class Map[A, B](underlying: Iterable[A]^, f: A => B) extends View[B] { + this: Map[A, B]^{underlying, f} => + def iterator: Iterator[B]^{this} = underlying.iterator.map(f) override def knownLength = underlying.knownLength } - case class FlatMap[A, B](underlying: {*} Iterable[A], f: A => {*} IterableOnce[B]) extends View[B] { - this: {underlying, f} FlatMap[A, B] => - def iterator: {this} Iterator[B] = underlying.iterator.flatMap(f) + case class FlatMap[A, B](underlying: Iterable[A]^, f: A => IterableOnce[B]^) extends View[B] { + this: FlatMap[A, B]^{underlying, f} => + def iterator: Iterator[B]^{this} = underlying.iterator.flatMap(f) } - case class Concat[A](underlying: {*} Iterable[A], other: {*} IterableOnce[A]) extends View[A] { - this: {underlying, other} Concat[A] => - def iterator: {this} Iterator[A] = underlying.iterator ++ other + case class Concat[A](underlying: Iterable[A]^, other: IterableOnce[A]^) extends View[A] { + this: Concat[A]^{underlying, other} => + def iterator: Iterator[A]^{this} = underlying.iterator ++ other override def knownLength = other match { case other: Iterable[_] if underlying.knownLength >= 0 && other.knownLength >= 0 => underlying.knownLength + other.knownLength @@ -489,9 +491,9 @@ object CollectionStrawMan5 { } } - case class Zip[A, B](underlying: {*} Iterable[A], other: {*} IterableOnce[B]) extends View[(A, B)] { - this: {underlying, other} Zip[A, B] => - def iterator: {this} Iterator[(A, B)] = underlying.iterator.zip(other) + case class Zip[A, B](underlying: Iterable[A]^, other: IterableOnce[B]^) extends View[(A, B)] { + this: Zip[A, B]^{underlying, other} => + def iterator: Iterator[(A, B)]^{this} = underlying.iterator.zip(other) override def knownLength = other match { case other: Iterable[_] if underlying.knownLength >= 0 && other.knownLength >= 0 => underlying.knownLength min other.knownLength @@ -504,7 +506,7 @@ object CollectionStrawMan5 { /* ---------- Iterators ---------------------------------------------------*/ /** A core Iterator class */ - trait Iterator[+A] extends IterableOnce[A] { self: {*} Iterator[A] => + trait Iterator[+A] extends IterableOnce[A] { self: Iterator[A]^ => def hasNext: Boolean def next(): A def iterator: this.type = this @@ -522,7 +524,7 @@ object CollectionStrawMan5 { } -1 } - def filter(p: A => Boolean): {this, p} Iterator[A] = new Iterator[A] { + def filter(p: A => Boolean): Iterator[A]^{this, p} = new Iterator[A] { private var hd: A = compiletime.uninitialized private var hdDefined: Boolean = false @@ -544,13 +546,13 @@ object CollectionStrawMan5 { else Iterator.empty.next() } - def map[B](f: A => B): {this, f} Iterator[B] = new Iterator[B] { + def map[B](f: A => B): Iterator[B]^{this, f} = new Iterator[B] { def hasNext = self.hasNext def next() = f(self.next()) } - def flatMap[B](f: A => {*} IterableOnce[B]): {this, f} Iterator[B] = new Iterator[B] { - private var myCurrent: {this} Iterator[B] = Iterator.empty + def flatMap[B](f: A => IterableOnce[B]^): Iterator[B]^{this, f} = new Iterator[B] { + private var myCurrent: Iterator[B]^{this} = Iterator.empty private def current = { while (!myCurrent.hasNext && self.hasNext) myCurrent = f(self.next()).iterator @@ -559,8 +561,8 @@ object CollectionStrawMan5 { def hasNext = current.hasNext def next() = current.next() } - def ++[B >: A](xs: {*} IterableOnce[B]): {this, xs} Iterator[B] = new Iterator[B] { - private var myCurrent: {self, xs} Iterator[B] = self + def ++[B >: A](xs: IterableOnce[B]^): Iterator[B]^{this, xs} = new Iterator[B] { + private var myCurrent: Iterator[B]^{self, xs} = self private var first = true private def current = { if (!myCurrent.hasNext && first) { @@ -572,7 +574,7 @@ object CollectionStrawMan5 { def hasNext = current.hasNext def next() = current.next() } - def drop(n: Int): {this} Iterator[A] = { + def drop(n: Int): Iterator[A]^{this} = { var i = 0 while (i < n && hasNext) { next() @@ -580,7 +582,7 @@ object CollectionStrawMan5 { } this } - def zip[B](that: {*} IterableOnce[B]): {this, that} Iterator[(A, B)] = new Iterator[(A, B)] { + def zip[B](that: IterableOnce[B]^): Iterator[(A, B)]^{this, that} = new Iterator[(A, B)] { val thatIterator = that.iterator def hasNext = self.hasNext && thatIterator.hasNext def next() = (self.next(), thatIterator.next()) diff --git a/tests/run-custom-args/captures/colltest5/Test_2.scala b/tests/run-custom-args/captures/colltest5/Test_2.scala index 8c070439db80..fbb22039c327 100644 --- a/tests/run-custom-args/captures/colltest5/Test_2.scala +++ b/tests/run-custom-args/captures/colltest5/Test_2.scala @@ -5,7 +5,7 @@ object Test { import colltest5.strawman.collections.* import CollectionStrawMan5.* - def seqOps(xs: Seq[Int]) = { // try with {*} Seq[Int] + def seqOps(xs: Seq[Int]) = { // try with Seq[Int]^{cap} val strPlusInt: (String, Int) => String = _ + _ val intPlusStr: (Int, String) => String = _ + _ val isEven: Int => Boolean = _ % 2 == 0 @@ -61,7 +61,7 @@ object Test { println(xs16) } - def viewOps(xs: {*} View[Int]) = { + def viewOps(xs: View[Int]^{cap}) = { val strPlusInt: (String, Int) => String = _ + _ val intPlusStr: (Int, String) => String = _ + _ val isEven: Int => Boolean = _ % 2 == 0 @@ -78,27 +78,27 @@ object Test { val x5 = xs.to(List) val y5: List[Int] = x5 val (xs6, xs7) = xs.partition(isEven) - val ys6: {xs6, isEven} View[Int] = xs6 - val ys7: {xs7, isEven} View[Int] = xs7 + val ys6: View[Int]^{xs6, isEven} = xs6 + val ys7: View[Int]^{xs7, isEven} = xs7 val (xs6a, xs7a) = xs.partition(_ % 2 == 0) - val ys6a: {xs6} View[Int] = xs6 - val ys7a: {xs7} View[Int] = xs7 + val ys6a: View[Int]^{xs6} = xs6 + val ys7a: View[Int]^{xs7} = xs7 val xs8 = xs.drop(2) - val ys8: {xs8} View[Int] = xs8 + val ys8: View[Int]^{xs8} = xs8 val xs9 = xs.map(isNonNeg) - val ys9: {xs9} View[Boolean] = xs9 + val ys9: View[Boolean]^{xs9} = xs9 val xs10 = xs.flatMap(flips) - val ys10: {xs10} View[Int] = xs10 + val ys10: View[Int]^{xs10} = xs10 val xs11 = xs ++ xs - val ys11: {xs11} View[Int] = xs11 + val ys11: View[Int]^{xs11} = xs11 val xs12 = xs ++ Nil - val ys12: {xs12} View[Int] = xs12 + val ys12: View[Int]^{xs12} = xs12 val xs13 = Nil ++ xs val ys13: List[Int] = xs13 val xs14 = xs ++ Cons("a", Nil) - val ys14: {xs14} View[Any] = xs14 + val ys14: View[Any]^{xs14} = xs14 val xs15 = xs.zip(xs9) - val ys15: {xs15} View[(Int, Boolean)] = xs15 + val ys15: View[(Int, Boolean)]^{xs15} = xs15 println("-------") println(x1) println(x2) diff --git a/tests/run-custom-args/captures/minicheck.scala b/tests/run-custom-args/captures/minicheck.scala index 344e021493e5..bdc591580482 100644 --- a/tests/run-custom-args/captures/minicheck.scala +++ b/tests/run-custom-args/captures/minicheck.scala @@ -83,7 +83,7 @@ abstract class Ctx: def run: Run def detached: DetachedContext -type Context = {*} Ctx +type Context = Ctx^ abstract class DetachedContext extends Ctx: def outer: DetachedContext @@ -110,9 +110,9 @@ object NoContext extends FreshCtx(-1): owner = NoSymbol scope = EmptyScope -type FreshContext = {*} FreshCtx +type FreshContext = FreshCtx^ -inline def ctx(using c: Context): {c} Ctx = c +inline def ctx(using c: Context): Ctx^{c} = c // !cc! it does not work if ctxStack is an Array[FreshContext] instead. var ctxStack = Array.tabulate(16)(new FreshCtx(_)) diff --git a/tests/run-custom-args/erased/erased-15.scala b/tests/run-custom-args/erased/erased-15.scala index b879ee4c54d8..02b70f9125d6 100644 --- a/tests/run-custom-args/erased/erased-15.scala +++ b/tests/run-custom-args/erased/erased-15.scala @@ -1,3 +1,5 @@ +import scala.runtime.ErasedFunction + object Test { def main(args: Array[String]): Unit = { @@ -10,7 +12,7 @@ object Test { } } -class Foo extends ErasedFunction1[Int, Int] { +class Foo extends ErasedFunction { def apply(erased x: Int): Int = { println("Foo.apply") 42 diff --git a/tests/run-custom-args/erased/erased-27.check b/tests/run-custom-args/erased/erased-27.check index 4413863feead..1c255dd5419f 100644 --- a/tests/run-custom-args/erased/erased-27.check +++ b/tests/run-custom-args/erased/erased-27.check @@ -1,3 +1,2 @@ block -x foo diff --git a/tests/run-custom-args/erased/erased-28.check b/tests/run-custom-args/erased/erased-28.check index 85733f6db2d7..3bd1f0e29744 100644 --- a/tests/run-custom-args/erased/erased-28.check +++ b/tests/run-custom-args/erased/erased-28.check @@ -1,4 +1,2 @@ -x foo -x bar diff --git a/tests/run-custom-args/erased/erased-class-are-erased.check b/tests/run-custom-args/erased/erased-class-are-erased.check new file mode 100644 index 000000000000..f64f5d8d85ac --- /dev/null +++ b/tests/run-custom-args/erased/erased-class-are-erased.check @@ -0,0 +1 @@ +27 diff --git a/tests/run-custom-args/erased/erased-class-are-erased.scala b/tests/run-custom-args/erased/erased-class-are-erased.scala new file mode 100644 index 000000000000..b48e0265c521 --- /dev/null +++ b/tests/run-custom-args/erased/erased-class-are-erased.scala @@ -0,0 +1,14 @@ +object Test { + erased class Erased() { + println("Oh no!!!") + } + + def f(x: Erased, y: Int = 0): Int = y + 5 + + def g() = Erased() + + def main(args: Array[String]) = + val y = Erased() + val z = 10 + println(f(Erased()) + z + f(g(), 7)) +} diff --git a/tests/run-custom-args/erased/lambdas.scala b/tests/run-custom-args/erased/lambdas.scala new file mode 100644 index 000000000000..4c1746283099 --- /dev/null +++ b/tests/run-custom-args/erased/lambdas.scala @@ -0,0 +1,38 @@ +// lambdas should parse and work + +type F = (erased Int, String) => String +type S = (Int, erased String) => Int + +def useF(f: F) = f(5, "a") +def useS(f: S) = f(5, "a") + +val ff: F = (erased x, y) => y + +val fs: S = (x, erased y) => x +val fsExpl = (x: Int, erased y: String) => x + +// contextual lambdas should work + +type FC = (Int, erased String) ?=> Int + +def useCtx(f: FC) = f(using 5, "a") + +val fCv: FC = (x, erased y) ?=> x +val fCvExpl = (x: Int, erased y: String) ?=> x + +// nested lambdas should work + +val nested: Int => (String, erased Int) => FC = a => (_, erased _) => (c, erased d) ?=> a + c + +@main def Test() = + assert("a" == useF(ff)) + + assert(5 == useS(fs)) + assert(5 == useS(fsExpl)) + assert(5 == useS { (x, erased y) => x }) + + assert(5 == useCtx(fCv)) + assert(5 == useCtx(fCvExpl)) + assert(5 == useCtx { (x, erased y) ?=> x }) + + assert(6 == useCtx(nested(1)("b", 2))) diff --git a/tests/run-custom-args/erased/quotes-add-erased.check b/tests/run-custom-args/erased/quotes-add-erased.check new file mode 100644 index 000000000000..d00491fd7e5b --- /dev/null +++ b/tests/run-custom-args/erased/quotes-add-erased.check @@ -0,0 +1 @@ +1 diff --git a/tests/run-custom-args/erased/quotes-add-erased/Macro_1.scala b/tests/run-custom-args/erased/quotes-add-erased/Macro_1.scala new file mode 100644 index 000000000000..66f8475da96d --- /dev/null +++ b/tests/run-custom-args/erased/quotes-add-erased/Macro_1.scala @@ -0,0 +1,26 @@ +import scala.annotation.MacroAnnotation +import scala.annotation.internal.ErasedParam +import scala.quoted._ + +class NewAnnotation extends scala.annotation.Annotation + +class erasedParamsMethod extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + tree match + case ClassDef(name, ctr, parents, self, body) => + val erasedInt = AnnotatedType(TypeRepr.of[Int], '{ new ErasedParam }.asTerm) + val methType = MethodType(List("x", "y"))(_ => List(erasedInt, TypeRepr.of[Int]), _ => TypeRepr.of[Int]) + + assert(methType.hasErasedParams) + assert(methType.erasedParams == List(true, false)) + + val methSym = Symbol.newMethod(tree.symbol, "takesErased", methType, Flags.EmptyFlags, Symbol.noSymbol) + val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) + + val clsDef = ClassDef.copy(tree)(name, ctr, parents, self, methDef :: body) + + List(clsDef) + case _ => + report.error("Annotation only supports `class`") + List(tree) diff --git a/tests/run-custom-args/erased/quotes-add-erased/Test_2.scala b/tests/run-custom-args/erased/quotes-add-erased/Test_2.scala new file mode 100644 index 000000000000..107fa0833e95 --- /dev/null +++ b/tests/run-custom-args/erased/quotes-add-erased/Test_2.scala @@ -0,0 +1,12 @@ +import scala.language.experimental.erasedDefinitions + +class TakesErased { + def takesErased(erased x: Int, y: Int): Int = ??? +} + +@erasedParamsMethod class Foo extends TakesErased + +@main def Test() = + val foo = Foo() + val v = foo.takesErased(1, 2) + println(v) diff --git a/tests/run-custom-args/erased/quotes-reflection.check b/tests/run-custom-args/erased/quotes-reflection.check new file mode 100644 index 000000000000..838479e0b7af --- /dev/null +++ b/tests/run-custom-args/erased/quotes-reflection.check @@ -0,0 +1,10 @@ +method : () isGiven=false isImplicit=false erasedArgs=List() +method m1: (i: scala.Int) isGiven=true isImplicit=false erasedArgs=List(false) +method m2: (i: scala.Int) isGiven=false isImplicit=false erasedArgs=List(true) +method m3: (i: scala.Int, j: scala.Int) isGiven=false isImplicit=false erasedArgs=List(false, true) +method m4: (i: EC) isGiven=false isImplicit=false erasedArgs=List(true) +val l1: scala.ContextFunction1[scala.Int, scala.Int] +val l2: scala.runtime.ErasedFunction with apply: (x$0: scala.Int @scala.annotation.internal.ErasedParam) isImplicit=false erasedParams=List(true) +val l3: scala.runtime.ErasedFunction with apply: (x$0: scala.Int @scala.annotation.internal.ErasedParam) isImplicit=true erasedParams=List(true) +val l4: scala.runtime.ErasedFunction with apply: (x$0: scala.Int, x$1: scala.Int @scala.annotation.internal.ErasedParam) isImplicit=false erasedParams=List(false, true) +val l5: scala.runtime.ErasedFunction with apply: (x$0: EC @scala.annotation.internal.ErasedParam) isImplicit=false erasedParams=List(true) diff --git a/tests/run-custom-args/erased/quotes-reflection/Macros_1.scala b/tests/run-custom-args/erased/quotes-reflection/Macros_1.scala new file mode 100644 index 000000000000..f7b1187433f0 --- /dev/null +++ b/tests/run-custom-args/erased/quotes-reflection/Macros_1.scala @@ -0,0 +1,35 @@ +import scala.quoted.* + +inline def inspect[A]: String = + ${ inspect2[A] } + +def inspect2[A: Type](using Quotes): Expr[String] = { + import quotes.reflect.* + + val methods = TypeRepr.of[A].typeSymbol.declarations + val names = methods.map { m => + m.tree match + case dd @ DefDef(name, params, r, body) => + val paramStr = + params.map { + case ps: TermParamClause => + val params = ps.params.map(p => s"${p.name}: ${p.tpt.show}").mkString("(", ", ", ")") + s"$params isGiven=${ps.isGiven} isImplicit=${ps.isImplicit} erasedArgs=${ps.erasedArgs}" + case ps: TypeParamClause => ps.params.map(_.show).mkString("[", ", ", "]") + }.mkString("") + s"method $name: $paramStr" + case vd @ ValDef(name, tpt, body) => + tpt.tpe match + case Refinement(parent, "apply", tpe: MethodType) if parent == defn.ErasedFunctionClass.typeRef => + assert(tpt.tpe.isErasedFunctionType) + + val params = tpe.paramNames.zip(tpe.paramTypes).map((n, t) => s"$n: ${t.show}").mkString("(", ", ", ")") + s"val $name: ${parent.show} with apply: ${params} isImplicit=${tpe.isImplicit} erasedParams=${tpe.erasedParams}" + case _ => + s"val $name: ${tpt.show}" + case td @ TypeDef(name, tpt) => s"type $name: ${tpt.show}" + case _ => s"something else: $m" + } + + Expr(names.mkString("\n")) +} diff --git a/tests/run-custom-args/erased/quotes-reflection/Test_2.scala b/tests/run-custom-args/erased/quotes-reflection/Test_2.scala new file mode 100644 index 000000000000..ce1cc8d3dff1 --- /dev/null +++ b/tests/run-custom-args/erased/quotes-reflection/Test_2.scala @@ -0,0 +1,20 @@ +import scala.language.experimental.erasedDefinitions + +erased class EC + +trait X { + def m1(using i: Int): Int + def m2(erased i: Int): Int + def m3(i: Int, erased j: Int): Int + def m4(i: EC): Int + + val l1 = (x: Int) ?=> 5 + val l2 = (erased x: Int) => 5 + val l3 = (erased x: Int) ?=> 5 + val l4 = (x: Int, erased y: Int) => 5 + val l5 = (x: EC) => 5 +} + +@main def Test = { + println(inspect[X]) +} diff --git a/tests/run-custom-args/run-macros-erased/macro-erased/1.scala b/tests/run-custom-args/run-macros-erased/macro-erased/1.scala index 567ef57b1c06..36f583a7dc91 100644 --- a/tests/run-custom-args/run-macros-erased/macro-erased/1.scala +++ b/tests/run-custom-args/run-macros-erased/macro-erased/1.scala @@ -13,8 +13,8 @@ object Macro { def case1(erased i: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } def case2 (i: Int)(erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } def case3(erased i: Expr[Int]) (j: Int)(using Quotes): Expr[Int] = '{ 0 } - def case4 (h: Int)(erased i: Expr[Int], j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } - def case5(erased i: Expr[Int], j: Expr[Int]) (h: Int)(using Quotes): Expr[Int] = '{ 0 } + def case4 (h: Int)(erased i: Expr[Int], erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } + def case5(erased i: Expr[Int], erased j: Expr[Int]) (h: Int)(using Quotes): Expr[Int] = '{ 0 } def case6 (h: Int)(erased i: Expr[Int])(erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } def case7(erased i: Expr[Int]) (h: Int)(erased j: Expr[Int])(using Quotes): Expr[Int] = '{ 0 } def case8(erased i: Expr[Int])(erased j: Expr[Int]) (h: Int)(using Quotes): Expr[Int] = '{ 0 } diff --git a/tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala index 062fa25e0ca5..5ccdb753e9b3 100644 --- a/tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-custom-args/tasty-inspector/stdlibExperimentalDefinitions.scala @@ -1,5 +1,6 @@ import scala.quoted.* import scala.tasty.inspector.* +import scala.language.experimental.erasedDefinitions val experimentalDefinitionInLibrary = Set( @@ -62,7 +63,8 @@ val experimentalDefinitionInLibrary = Set( "scala.annotation.MacroAnnotation", //// New APIs: Quotes - // Can be stabilized in 3.3.0 (unsure) or later + "scala.quoted.Quotes.reflectModule.FlagsModule.AbsOverride", + // Can be stabilized in 3.4.0 (unsure) or later "scala.quoted.Quotes.reflectModule.CompilationInfoModule.XmacroSettings", "scala.quoted.Quotes.reflectModule.FlagsModule.JavaAnnotation", // Cant be stabilized yet. @@ -74,6 +76,21 @@ val experimentalDefinitionInLibrary = Set( "scala.quoted.Quotes.reflectModule.SymbolModule.newModule", "scala.quoted.Quotes.reflectModule.SymbolModule.freshName", "scala.quoted.Quotes.reflectModule.SymbolMethods.info", + // Quotes for functions with erased parameters. + "scala.quoted.Quotes.reflectModule.MethodTypeMethods.erasedParams", + "scala.quoted.Quotes.reflectModule.MethodTypeMethods.hasErasedParams", + "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.erasedArgs", + "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.hasErasedArgs", + "scala.quoted.Quotes.reflectModule.defnModule.ErasedFunctionClass", + + // New feature: functions with erased parameters. + // Need erasedDefinitions enabled. + "scala.runtime.ErasedFunction", + "scala.quoted.Quotes.reflectModule.MethodTypeMethods.erasedParams", + "scala.quoted.Quotes.reflectModule.MethodTypeMethods.hasErasedParams", + "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.erasedArgs", + "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.hasErasedArgs", + "scala.quoted.Quotes.reflectModule.defnModule.ErasedFunctionClass" ) diff --git a/tests/run-macros/i12021.check b/tests/run-macros/i12021.check index b244dce80b34..ef998c725209 100644 --- a/tests/run-macros/i12021.check +++ b/tests/run-macros/i12021.check @@ -1,3 +1,5 @@ -X1: (i: scala.Int) isImplicit=true, isGiven=false, isErased=false -X2: (i: scala.Int) isImplicit=false, isGiven=true, isErased=false -X3: (i: scala.Int) isImplicit=false, isGiven=false, isErased=true +X1: (i: scala.Int) isImplicit=true, isGiven=false, erasedArgs=List(false) +X2: (i: scala.Int) isImplicit=false, isGiven=true, erasedArgs=List(false) +X3: (i: scala.Int) isImplicit=false, isGiven=false, erasedArgs=List(true) +X4: (i: scala.Int, j: scala.Int) isImplicit=false, isGiven=false, erasedArgs=List(false, true) +X5: (i: EC) isImplicit=false, isGiven=false, erasedArgs=List(true) diff --git a/tests/run-macros/i12021/Macro_1.scala b/tests/run-macros/i12021/Macro_1.scala index 81703dfbab3d..25cab1786146 100644 --- a/tests/run-macros/i12021/Macro_1.scala +++ b/tests/run-macros/i12021/Macro_1.scala @@ -14,5 +14,5 @@ def inspect2[A: Type](using Quotes): Expr[String] = { val names = ps.params.map(p => s"${p.name}: ${p.tpt.show}").mkString("(", ", ", ")") - Expr(s"${Type.show[A]}: $names isImplicit=${ps.isImplicit}, isGiven=${ps.isGiven}, isErased=${ps.isErased}") + Expr(s"${Type.show[A]}: $names isImplicit=${ps.isImplicit}, isGiven=${ps.isGiven}, erasedArgs=${ps.erasedArgs}") } diff --git a/tests/run-macros/i12021/Test_2.scala b/tests/run-macros/i12021/Test_2.scala index 17f74792ece4..a542b14f1175 100644 --- a/tests/run-macros/i12021/Test_2.scala +++ b/tests/run-macros/i12021/Test_2.scala @@ -1,11 +1,17 @@ import scala.language.experimental.erasedDefinitions +erased class EC + class X1(implicit i: Int) class X2(using i: Int) class X3(erased i: Int) +class X4(i: Int, erased j: Int) +class X5(i: EC) @main def Test = { println(inspect[X1]) println(inspect[X2]) println(inspect[X3]) -} \ No newline at end of file + println(inspect[X4]) + println(inspect[X5]) +} diff --git a/tests/run-macros/i12392.check b/tests/run-macros/i12392.check index 54c7f5d06c3f..92bbfa65fb49 100644 --- a/tests/run-macros/i12392.check +++ b/tests/run-macros/i12392.check @@ -1 +1 @@ -scala.Option[scala.Predef.String] to scala.Option[scala.Int] +scala.Option[java.lang.String] to scala.Option[scala.Int] diff --git a/tests/run-macros/i15968.check b/tests/run-macros/i15968.check new file mode 100644 index 000000000000..c7f3847d404c --- /dev/null +++ b/tests/run-macros/i15968.check @@ -0,0 +1,5 @@ +{ + type Z = java.lang.String + "foo".toString() +} +"foo".toString() diff --git a/tests/run-macros/i15968/Macro_1.scala b/tests/run-macros/i15968/Macro_1.scala new file mode 100644 index 000000000000..ea2728840d6e --- /dev/null +++ b/tests/run-macros/i15968/Macro_1.scala @@ -0,0 +1,15 @@ +import scala.quoted.* + +inline def macroPolyFun[A](inline arg: A, inline f: [Z] => Z => String): String = + ${ macroPolyFunImpl[A]('arg, 'f) } + +private def macroPolyFunImpl[A: Type](arg: Expr[A], f: Expr[[Z] => Z => String])(using Quotes): Expr[String] = + Expr(Expr.betaReduce('{ $f($arg) }).show) + + +inline def macroFun[A](inline arg: A, inline f: A => String): String = + ${ macroFunImpl[A]('arg, 'f) } + +private def macroFunImpl[A: Type](arg: Expr[A], f: Expr[A => String])(using Quotes): Expr[String] = + Expr(Expr.betaReduce('{ $f($arg) }).show) + diff --git a/tests/run-macros/i15968/Test_2.scala b/tests/run-macros/i15968/Test_2.scala new file mode 100644 index 000000000000..6c6826f96b34 --- /dev/null +++ b/tests/run-macros/i15968/Test_2.scala @@ -0,0 +1,3 @@ +@main def Test: Unit = + println(macroPolyFun("foo", [Z] => (arg: Z) => arg.toString)) + println(macroFun("foo", arg => arg.toString)) diff --git a/tests/run-macros/i17257/a.scala b/tests/run-macros/i17257/a.scala new file mode 100644 index 000000000000..4a5682327604 --- /dev/null +++ b/tests/run-macros/i17257/a.scala @@ -0,0 +1,53 @@ +package derivation +import scala.quoted.* + +import scala.annotation.tailrec + +object Helpers: + + // file a.scala + inline def summonAllOptimized[T <: Tuple]: T = + ${ summonAllOptimizedImpl[T] } + + inline def summon23[E]: (E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E) = + ${ summon23Impl[E] } + + private def summonAllOptimizedImpl[T <: Tuple: Type](using q: Quotes): Expr[T] = { + import q.reflect.* + + Expr + .ofTupleFromSeq(typesOfTuple(TypeRepr.of[T], Nil).map { tpe => + tpe.asType match { + case '[t] => + Expr.summon[t].getOrElse(report.errorAndAbort(s"Unable to to find implicit instance for ${tpe.show}")) + } + }) + .asExprOf[T] + } + + private def summon23Impl[E: Type](using q: Quotes): Expr[(E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E, E)] = { + import q.reflect.* + + val e = Expr.summon[E].getOrElse(report.errorAndAbort(s"Unable to to find implicit instance for ${TypeRepr.of[E].show}")) + + val tuple = (e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e, e) + + assert(tuple.size == 23) + + Expr.ofTuple(tuple) + } + + @tailrec + private[derivation] def typesOfTuple( + using q: Quotes + )(tpe: q.reflect.TypeRepr, acc: List[q.reflect.TypeRepr]): List[q.reflect.TypeRepr] = + import q.reflect.* + val cons = Symbol.classSymbol("scala.*:") + tpe.widenTermRefByName.dealias match + case AppliedType(fn, tpes) if defn.isTupleClass(fn.typeSymbol) => + tpes.reverse_:::(acc) + case AppliedType(tp, List(headType, tailType)) if tp.derivesFrom(cons) => + typesOfTuple(tailType, headType :: acc) + case tpe => + if tpe.derivesFrom(Symbol.classSymbol("scala.EmptyTuple")) then acc.reverse + else report.errorAndAbort(s"Unknown type encountered in tuple ${tpe.show}") diff --git a/tests/run-macros/i17257/b.scala b/tests/run-macros/i17257/b.scala new file mode 100644 index 000000000000..65d66fb20bff --- /dev/null +++ b/tests/run-macros/i17257/b.scala @@ -0,0 +1,23 @@ +package derivation { + //file b.scala + val test = Helpers.summonAllOptimized[( + ValueOf["a"], ValueOf["a"], ValueOf["a"], ValueOf["a"], ValueOf["a"], + ValueOf["a"], ValueOf["a"], ValueOf["a"], ValueOf["a"], ValueOf["a"], + ValueOf["a"], ValueOf["a"], ValueOf["a"], ValueOf["a"], ValueOf["a"], + ValueOf["a"], ValueOf["a"], ValueOf["a"], ValueOf["a"], ValueOf["a"], + ValueOf["a"], ValueOf["a"], ValueOf["a"] //Commenting out the last one here fixes the compile error + )] + val test2 = Helpers.summon23[ValueOf["a"]] +} +@main def Test = + def assertions(list: List[ValueOf["a"]]): Unit = + assert(list.size == 23) + assert(list.map(_.value) == List( + "a", "a", "a", "a", "a", + "a", "a", "a", "a", "a", + "a", "a", "a", "a", "a", + "a", "a", "a", "a", "a", + "a", "a", "a" + )) + assertions(derivation.test.toList) + assertions(derivation.test2.toList) diff --git a/tests/run-macros/i7898.check b/tests/run-macros/i7898.check index 64da4a308f63..044c101dc772 100644 --- a/tests/run-macros/i7898.check +++ b/tests/run-macros/i7898.check @@ -1,4 +1,4 @@ -scala.List.apply[scala.PartialFunction[scala.Int, scala.Predef.String]](((x$1: scala.Int) => (x$1: @scala.unchecked) match { +scala.List.apply[scala.PartialFunction[scala.Int, scala.Predef.String]](((x$1: scala.Int) => x$1 match { case 1 => "x" })) diff --git a/tests/run-macros/inline-beta-reduce-polyfunction.check b/tests/run-macros/inline-beta-reduce-polyfunction.check new file mode 100644 index 000000000000..7793e273864f --- /dev/null +++ b/tests/run-macros/inline-beta-reduce-polyfunction.check @@ -0,0 +1,7 @@ +{ + type X = Int + { + println(1) + 1 + } +} diff --git a/tests/run-macros/inline-beta-reduce-polyfunction.scala b/tests/run-macros/inline-beta-reduce-polyfunction.scala new file mode 100644 index 000000000000..60ef889e7260 --- /dev/null +++ b/tests/run-macros/inline-beta-reduce-polyfunction.scala @@ -0,0 +1,5 @@ +transparent inline def foo(inline f: [X] => X => X): Int = f[Int](1) + +@main def Test: Unit = + val code = compiletime.codeOf(foo([X] => (x: X) => { println(x); x })) + println(code) \ No newline at end of file diff --git a/tests/run-macros/tasty-definitions-1.check b/tests/run-macros/tasty-definitions-1.check index 7ee9da3e64e8..4ac0e6267028 100644 --- a/tests/run-macros/tasty-definitions-1.check +++ b/tests/run-macros/tasty-definitions-1.check @@ -82,56 +82,8 @@ ContextFunction22 ContextFunction23 ContextFunction24 ContextFunction25 -ErasedFunction1 -ErasedFunction2 -ErasedFunction3 -ErasedFunction4 -ErasedFunction5 -ErasedFunction6 -ErasedFunction7 -ErasedFunction8 -ErasedFunction9 -ErasedFunction10 -ErasedFunction11 -ErasedFunction12 -ErasedFunction13 -ErasedFunction14 -ErasedFunction15 -ErasedFunction16 -ErasedFunction17 -ErasedFunction18 -ErasedFunction19 -ErasedFunction20 -ErasedFunction21 -ErasedFunction22 -ErasedFunction23 -ErasedFunction24 -ErasedFunction25 -ErasedContextFunction1 -ErasedContextFunction2 -ErasedContextFunction3 -ErasedContextFunction4 -ErasedContextFunction5 -ErasedContextFunction6 -ErasedContextFunction7 -ErasedContextFunction8 -ErasedContextFunction9 -ErasedContextFunction10 -ErasedContextFunction11 -ErasedContextFunction12 -ErasedContextFunction13 -ErasedContextFunction14 -ErasedContextFunction15 -ErasedContextFunction16 -ErasedContextFunction17 -ErasedContextFunction18 -ErasedContextFunction19 -ErasedContextFunction20 -ErasedContextFunction21 -ErasedContextFunction22 -ErasedContextFunction23 -ErasedContextFunction24 -ErasedContextFunction25 +class java.lang.Exception: Erased function classes are not supported. Use a refined `scala.runtime.ErasedFunction` +ErasedFunction Tuple2 Tuple3 Tuple4 diff --git a/tests/run-macros/tasty-definitions-1/quoted_1.scala b/tests/run-macros/tasty-definitions-1/quoted_1.scala index 6ee80daeeb1d..ed210706f567 100644 --- a/tests/run-macros/tasty-definitions-1/quoted_1.scala +++ b/tests/run-macros/tasty-definitions-1/quoted_1.scala @@ -62,11 +62,10 @@ object Macros { for (i <- 0 to 25) printout(defn.FunctionClass(i, isImplicit = true).name) - for (i <- 1 to 25) - printout(defn.FunctionClass(i, isErased = true).name) + // should fail + printout(defn.FunctionClass(1, isErased = true).name) - for (i <- 1 to 25) - printout(defn.FunctionClass(i, isImplicit = true, isErased = true).name) + printout(defn.ErasedFunctionClass.name) for (i <- 2 to 22) printout(defn.TupleClass(i).name) diff --git a/tests/run-staging/multi-staging.check b/tests/run-staging/multi-staging.check index 5d12306ba4ef..76adcfec3034 100644 --- a/tests/run-staging/multi-staging.check +++ b/tests/run-staging/multi-staging.check @@ -1,5 +1,5 @@ stage1 code: ((q1: scala.quoted.Quotes) ?=> { val x1: scala.Int = 2 - scala.quoted.runtime.Expr.quote[scala.Int](1.+(scala.quoted.runtime.Expr.nestedSplice[scala.Int](q1)(((evidence$5: scala.quoted.Quotes) ?=> scala.quoted.Expr.apply[scala.Int](x1)(scala.quoted.ToExpr.IntToExpr[scala.Int])(evidence$5))))).apply(using q1) + scala.quoted.runtime.Expr.quote[scala.Int](1.+(scala.quoted.runtime.Expr.splice[scala.Int](((evidence$5: scala.quoted.Quotes) ?=> scala.quoted.Expr.apply[scala.Int](x1)(scala.quoted.ToExpr.IntToExpr[scala.Int])(evidence$5))))).apply(using q1) }) 3 diff --git a/tests/run-staging/quote-nested-2.check b/tests/run-staging/quote-nested-2.check index b0af638da1a8..7db9edb0424e 100644 --- a/tests/run-staging/quote-nested-2.check +++ b/tests/run-staging/quote-nested-2.check @@ -1,4 +1,4 @@ ((q: scala.quoted.Quotes) ?=> { val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[scala.Int](4).apply(using q) - ((evidence$2: scala.quoted.Quotes) ?=> a).asInstanceOf[scala.ContextFunction1[scala.quoted.Quotes, scala.quoted.Expr[scala.Int]]].apply(using q) + ((evidence$2: scala.quoted.Quotes) ?=> a).apply(using q) }) diff --git a/tests/run-staging/quote-nested-3.check b/tests/run-staging/quote-nested-3.check index 63bdda5c6c4c..c3dfba2d8abe 100644 --- a/tests/run-staging/quote-nested-3.check +++ b/tests/run-staging/quote-nested-3.check @@ -1,7 +1,7 @@ { - type T = scala.Predef.String + type T = java.lang.String val x: java.lang.String = "foo" - val z: T = x + val z: java.lang.String = x (x: java.lang.String) } diff --git a/tests/run-staging/quote-nested-4.check b/tests/run-staging/quote-nested-4.check index 895bd0ddc914..d31b6394dccd 100644 --- a/tests/run-staging/quote-nested-4.check +++ b/tests/run-staging/quote-nested-4.check @@ -1,5 +1,5 @@ ((q: scala.quoted.Quotes) ?=> { - val t: scala.quoted.Type[scala.Predef.String] = scala.quoted.Type.of[scala.Predef.String](q) + val t: scala.quoted.Type[java.lang.String] = scala.quoted.Type.of[java.lang.String](q) - (t: scala.quoted.Type[scala.Predef.String]) + (t: scala.quoted.Type[java.lang.String]) }) diff --git a/tests/run-staging/quote-nested-5.check b/tests/run-staging/quote-nested-5.check index f29acb3b347a..53600d16a8da 100644 --- a/tests/run-staging/quote-nested-5.check +++ b/tests/run-staging/quote-nested-5.check @@ -1,4 +1,4 @@ ((q: scala.quoted.Quotes) ?=> { val a: scala.quoted.Expr[scala.Int] = scala.quoted.runtime.Expr.quote[scala.Int](4).apply(using q) - ((q2: scala.quoted.Quotes) ?=> ((evidence$3: scala.quoted.Quotes) ?=> a).asInstanceOf[scala.ContextFunction1[scala.quoted.Quotes, scala.quoted.Expr[scala.Int]]].apply(using q2)).apply(using q) -}) + ((q2: scala.quoted.Quotes) ?=> ((evidence$2: scala.quoted.Quotes) ?=> a).apply(using q2)) +}.apply(using q)) diff --git a/tests/run-staging/quote-nested-6.check b/tests/run-staging/quote-nested-6.check index 05c2bd4eb00c..2ae8b0d26e47 100644 --- a/tests/run-staging/quote-nested-6.check +++ b/tests/run-staging/quote-nested-6.check @@ -1,7 +1,7 @@ { - type T[X] = scala.List[X] + type T[X] = [A >: scala.Nothing <: scala.Any] => scala.collection.immutable.List[A][X] val x: java.lang.String = "foo" - val z: T[scala.Predef.String] = scala.List.apply[java.lang.String](x) + val z: [X >: scala.Nothing <: scala.Any] => scala.collection.immutable.List[X][java.lang.String] = scala.List.apply[java.lang.String](x) (x: java.lang.String) } diff --git a/tests/run-staging/quote-owners-2.check b/tests/run-staging/quote-owners-2.check index 323ce64b7bc7..49c09271779c 100644 --- a/tests/run-staging/quote-owners-2.check +++ b/tests/run-staging/quote-owners-2.check @@ -2,7 +2,7 @@ def ff: scala.Int = { val a: scala.collection.immutable.List[scala.Int] = { type T = scala.collection.immutable.List[scala.Int] - val b: T = scala.Nil.::[scala.Int](3) + val b: scala.collection.immutable.List[scala.Int] = scala.Nil.::[scala.Int](3) (b: scala.collection.immutable.List[scala.Int]) } diff --git a/tests/run-staging/quote-unrolled-foreach.check b/tests/run-staging/quote-unrolled-foreach.check index 8e58ab8eed51..3a72cd1b1311 100644 --- a/tests/run-staging/quote-unrolled-foreach.check +++ b/tests/run-staging/quote-unrolled-foreach.check @@ -8,7 +8,7 @@ } }) -((arr: scala.Array[scala.Predef.String], f: scala.Function1[scala.Predef.String, scala.Unit]) => { +((arr: scala.Array[java.lang.String], f: scala.Function1[java.lang.String, scala.Unit]) => { val size: scala.Int = arr.length var i: scala.Int = 0 while (i.<(size)) { @@ -18,7 +18,7 @@ } }) -((arr: scala.Array[scala.Predef.String], f: scala.Function1[scala.Predef.String, scala.Unit]) => { +((arr: scala.Array[java.lang.String], f: scala.Function1[java.lang.String, scala.Unit]) => { val size: scala.Int = arr.length var i: scala.Int = 0 while (i.<(size)) { @@ -41,7 +41,7 @@ ((arr: scala.Array[scala.Int], f: scala.Function1[scala.Int, scala.Unit]) => { val size: scala.Int = arr.length var i: scala.Int = 0 - if (size.%(3).!=(0)) throw new scala.Exception("...") else () + if (size.%(3).!=(0)) throw new java.lang.Exception("...") else () while (i.<(size)) { f.apply(arr.apply(i)) f.apply(arr.apply(i.+(1))) @@ -53,7 +53,7 @@ ((arr: scala.Array[scala.Int], f: scala.Function1[scala.Int, scala.Unit]) => { val size: scala.Int = arr.length var i: scala.Int = 0 - if (size.%(4).!=(0)) throw new scala.Exception("...") else () + if (size.%(4).!=(0)) throw new java.lang.Exception("...") else () while (i.<(size)) { f.apply(arr.apply(i.+(0))) f.apply(arr.apply(i.+(1))) diff --git a/tests/run-staging/shonan-hmm-simple.check b/tests/run-staging/shonan-hmm-simple.check index da437646482d..cbef88812dcd 100644 --- a/tests/run-staging/shonan-hmm-simple.check +++ b/tests/run-staging/shonan-hmm-simple.check @@ -6,7 +6,7 @@ Complex(4,3) 10 ((arr1: scala.Array[scala.Int], arr2: scala.Array[scala.Int]) => { - if (arr1.length.!=(arr2.length)) throw new scala.Exception("...") else () + if (arr1.length.!=(arr2.length)) throw new java.lang.Exception("...") else () var sum: scala.Int = 0 var i: scala.Int = 0 while (i.<(scala.Predef.intArrayOps(arr1).size)) { @@ -22,13 +22,13 @@ Complex(4,3) 10 ((arr: scala.Array[scala.Int]) => { - if (arr.length.!=(5)) throw new scala.Exception("...") else () + if (arr.length.!=(5)) throw new java.lang.Exception("...") else () arr.apply(0).+(arr.apply(2)).+(arr.apply(4)) }) 10 ((arr: scala.Array[Complex[scala.Int]]) => { - if (arr.length.!=(4)) throw new scala.Exception("...") else () + if (arr.length.!=(4)) throw new java.lang.Exception("...") else () Complex.apply[scala.Int](0.-(arr.apply(0).im).+(0.-(arr.apply(2).im)).+(arr.apply(3).re.*(2)), arr.apply(0).re.+(arr.apply(2).re).+(arr.apply(3).im.*(2))) }) Complex(4,3) diff --git a/tests/run-staging/shonan-hmm.check b/tests/run-staging/shonan-hmm.check index fa5206904962..9cb77f850155 100644 --- a/tests/run-staging/shonan-hmm.check +++ b/tests/run-staging/shonan-hmm.check @@ -35,8 +35,8 @@ List(25, 30, 20, 43, 44) ((vout: scala.Array[scala.Int], a: scala.Array[scala.Array[scala.Int]], v: scala.Array[scala.Int]) => { - if (3.!=(vout.length)) throw new scala.IndexOutOfBoundsException("3") else () - if (2.!=(v.length)) throw new scala.IndexOutOfBoundsException("2") else () + if (3.!=(vout.length)) throw new java.lang.IndexOutOfBoundsException("3") else () + if (2.!=(v.length)) throw new java.lang.IndexOutOfBoundsException("2") else () vout.update(0, 0.+(v.apply(0).*(a.apply(0).apply(0))).+(v.apply(1).*(a.apply(0).apply(1)))) vout.update(1, 0.+(v.apply(0).*(a.apply(1).apply(0))).+(v.apply(1).*(a.apply(1).apply(1)))) vout.update(2, 0.+(v.apply(0).*(a.apply(2).apply(0))).+(v.apply(1).*(a.apply(2).apply(1)))) @@ -95,8 +95,8 @@ List(25, 30, 20, 43, 44) array } ((vout: scala.Array[scala.Int], v: scala.Array[scala.Int]) => { - if (5.!=(vout.length)) throw new scala.IndexOutOfBoundsException("5") else () - if (5.!=(v.length)) throw new scala.IndexOutOfBoundsException("5") else () + if (5.!=(vout.length)) throw new java.lang.IndexOutOfBoundsException("5") else () + if (5.!=(v.length)) throw new java.lang.IndexOutOfBoundsException("5") else () vout.update(0, 0.+(v.apply(0).*(5)).+(v.apply(1).*(0)).+(v.apply(2).*(0)).+(v.apply(3).*(5)).+(v.apply(4).*(0))) vout.update(1, 0.+(v.apply(0).*(0)).+(v.apply(1).*(0)).+(v.apply(2).*(10)).+(v.apply(3).*(0)).+(v.apply(4).*(0))) vout.update(2, 0.+(v.apply(0).*(0)).+(v.apply(1).*(10)).+(v.apply(2).*(0)).+(v.apply(3).*(0)).+(v.apply(4).*(0))) @@ -158,8 +158,8 @@ List(25, 30, 20, 43, 44) array } ((vout: scala.Array[scala.Int], v: scala.Array[scala.Int]) => { - if (5.!=(vout.length)) throw new scala.IndexOutOfBoundsException("5") else () - if (5.!=(v.length)) throw new scala.IndexOutOfBoundsException("5") else () + if (5.!=(vout.length)) throw new java.lang.IndexOutOfBoundsException("5") else () + if (5.!=(v.length)) throw new java.lang.IndexOutOfBoundsException("5") else () vout.update(0, v.apply(0).*(5).+(v.apply(3).*(5))) vout.update(1, v.apply(2).*(10)) vout.update(2, v.apply(1).*(10)) @@ -221,8 +221,8 @@ List(25, 30, 20, 43, 44) array } ((vout: scala.Array[scala.Int], v: scala.Array[scala.Int]) => { - if (5.!=(vout.length)) throw new scala.IndexOutOfBoundsException("5") else () - if (5.!=(v.length)) throw new scala.IndexOutOfBoundsException("5") else () + if (5.!=(vout.length)) throw new java.lang.IndexOutOfBoundsException("5") else () + if (5.!=(v.length)) throw new java.lang.IndexOutOfBoundsException("5") else () vout.update(0, v.apply(0).*(5).+(v.apply(3).*(5))) vout.update(1, v.apply(2).*(10)) vout.update(2, v.apply(1).*(10)) @@ -243,8 +243,8 @@ List(25, 30, 20, 43, 44) ((vout: scala.Array[scala.Int], v: scala.Array[scala.Int]) => { - if (5.!=(vout.length)) throw new scala.IndexOutOfBoundsException("5") else () - if (5.!=(v.length)) throw new scala.IndexOutOfBoundsException("5") else () + if (5.!=(vout.length)) throw new java.lang.IndexOutOfBoundsException("5") else () + if (5.!=(v.length)) throw new java.lang.IndexOutOfBoundsException("5") else () vout.update(0, v.apply(0).*(5).+(v.apply(3).*(5))) vout.update(1, v.apply(2).*(10)) vout.update(2, v.apply(1).*(10)) @@ -282,8 +282,8 @@ List(25, 30, 20, 43, 44) array } ((vout: scala.Array[scala.Int], v: scala.Array[scala.Int]) => { - if (5.!=(vout.length)) throw new scala.IndexOutOfBoundsException("5") else () - if (5.!=(v.length)) throw new scala.IndexOutOfBoundsException("5") else () + if (5.!=(vout.length)) throw new java.lang.IndexOutOfBoundsException("5") else () + if (5.!=(v.length)) throw new java.lang.IndexOutOfBoundsException("5") else () vout.update(0, v.apply(0).*(5).+(v.apply(3).*(5))) vout.update(1, v.apply(2).*(10)) vout.update(2, v.apply(1).*(10)) diff --git a/tests/run/16405.scala b/tests/run/16405.scala new file mode 100644 index 000000000000..fa0681683c42 --- /dev/null +++ b/tests/run/16405.scala @@ -0,0 +1,31 @@ +import scala.compiletime.summonInline + +case class TypeDesc[T](tpe: String) +object TypeDesc { + given nothing: TypeDesc[Nothing] = TypeDesc("Nothing") + given string: TypeDesc[String] = TypeDesc("String") + given int: TypeDesc[Int] = TypeDesc("Int") +} + +def exampleFn(s: String, i: Int): Unit = () + +inline def argumentTypesOf[R](fun: (_, _) => R): (TypeDesc[?], TypeDesc[?]) = { + inline fun match { + case x: ((a, b) => R) => + (scala.compiletime.summonInline[TypeDesc[a]], scala.compiletime.summonInline[TypeDesc[b]]) + } +} +inline def argumentTypesOfNoWildCard[A, B, R](fun: (A, B) => R): (TypeDesc[?], TypeDesc[?]) = argumentTypesOf(fun) +inline def argumentTypesOfAllWildCard(fun: (?, ?) => ?): (TypeDesc[?], TypeDesc[?]) = argumentTypesOf(fun) + +object Test { + def main(args: Array[String]): Unit = { + val expected = (TypeDesc.string, TypeDesc.int) + assert(argumentTypesOf(exampleFn) == expected) + assert(argumentTypesOf(exampleFn(_, _)) == expected) + assert(argumentTypesOfNoWildCard(exampleFn) == expected) + assert(argumentTypesOfNoWildCard(exampleFn(_, _)) == expected) + assert(argumentTypesOfAllWildCard(exampleFn) == expected) + assert(argumentTypesOfAllWildCard(exampleFn(_, _)) == expected) + } +} \ No newline at end of file diff --git a/tests/run/anon-mirror-gen-local.scala b/tests/run/anon-mirror-gen-local.scala index 68fb9500d5ba..ccd1ac4fc602 100644 --- a/tests/run/anon-mirror-gen-local.scala +++ b/tests/run/anon-mirror-gen-local.scala @@ -55,7 +55,7 @@ class Outer5 { self => } } - lazy val o = new Outer5() // infinite init + final lazy val o = new Outer5() // infinite init } @@ -142,7 +142,7 @@ def locally3 = { class Bar extends Foo { def hello = - val mQux = summon[Mirror.Of[Bar.super.foo.type]] + val mQux = summon[Mirror.Of[foo.type]] assert(mQux.fromProduct(EmptyTuple) == Qux) } @@ -157,4 +157,4 @@ def locally3 = { testOuter6 locally1 locally2 - locally3 \ No newline at end of file + locally3 diff --git a/tests/run/erased-inline-vals.scala b/tests/run/erased-inline-vals.scala index 00c9c8c168c7..c39a8295af5d 100644 --- a/tests/run/erased-inline-vals.scala +++ b/tests/run/erased-inline-vals.scala @@ -16,6 +16,27 @@ class D: inline def x: Int = 5 inline val y = 6 +object SideEffects: + val sideEffects = scala.collection.mutable.ListBuffer.empty[String] + +trait E: + final val a: 7 = + SideEffects.sideEffects += "E.a" + 7 + final val b = + SideEffects.sideEffects += "E.b" + 8 +end E + +class F extends E: + final val c: 9 = + SideEffects.sideEffects += "F.c" + 9 + final val d = + SideEffects.sideEffects += "F.d" + 10 +end F + @main def Test = val b: B = new B assert(b.x == 1) @@ -37,12 +58,24 @@ class D: assert(d.x == 5) assert(d.y == 6) + val f: F = new F + assert(SideEffects.sideEffects.toList == List("E.a", "E.b", "F.c", "F.d")) + assert(f.a == 7) + assert(f.b == 8) + assert(f.c == 9) + assert(f.d == 10) assert(classOf[B].getDeclaredMethods.size == 2) assert(classOf[B].getDeclaredFields.isEmpty) assert(classOf[C].getDeclaredMethods.size == 2) - assert(classOf[C].getDeclaredFields.isEmpty) + assert(classOf[C].getDeclaredFields.size == 1) // x, but not y assert(classOf[D].getDeclaredMethods.isEmpty) assert(classOf[D].getFields.isEmpty) + + assert(classOf[E].getDeclaredMethods.size == 5) + assert(classOf[E].getDeclaredFields.isEmpty) + + assert(classOf[F].getDeclaredMethods.size == 2) + assert(classOf[F].getDeclaredFields.isEmpty) diff --git a/tests/run/errorhandling/Result.scala b/tests/run/errorhandling/Result.scala index 027c07c86769..07d7a9f90c8a 100644 --- a/tests/run/errorhandling/Result.scala +++ b/tests/run/errorhandling/Result.scala @@ -29,14 +29,14 @@ object Result: case err: Err[_] => err /** Validate both `r` and `other`; return a pair of successes or a list of failures. */ - def * [U](other: Result[U, E]): Result[(T, U), List[E]] = (r, other) match + def zip[U](other: Result[U, E]): Result[(T, U), List[E]] = (r, other) match case (Ok(x), Ok(y)) => Ok((x, y)) case (Ok(_), Err(e)) => Err(e :: Nil) case (Err(e), Ok(_)) => Err(e :: Nil) case (Err(e1), Err(e2)) => Err(e1 :: e2 :: Nil) /** Validate both `r` and `other`; return a tuple of successes or a list of failures. - * Unlike with `*`, the right hand side `other` must be a `Result` returning a `Tuple`, + * Unlike with `zip`, the right hand side `other` must be a `Result` returning a `Tuple`, * and the left hand side is added to it. See `Result.empty` for a convenient * right unit of chains of `*:`s. */ diff --git a/tests/run/final-fields.check b/tests/run/final-fields.check index 3ebde7d7f735..af090f65a32a 100644 --- a/tests/run/final-fields.check +++ b/tests/run/final-fields.check @@ -2,6 +2,6 @@ T.f1 T.f2 T.f3 T.f4 -3 2 -3 -4 +3 2 3 -4 3 g diff --git a/tests/run/functionXXL.scala b/tests/run/functionXXL.scala index 885e7bce8be9..de8c8e3faedd 100644 --- a/tests/run/functionXXL.scala +++ b/tests/run/functionXXL.scala @@ -1,5 +1,3 @@ -// scalajs: --skip --pending - object Test { val f = (x1: Int, diff --git a/tests/run/i11706.check b/tests/run/i11706.check new file mode 100644 index 000000000000..a5c8806279fa --- /dev/null +++ b/tests/run/i11706.check @@ -0,0 +1,2 @@ +3 +3 diff --git a/tests/run/i11706.scala b/tests/run/i11706.scala new file mode 100644 index 000000000000..276ee408d266 --- /dev/null +++ b/tests/run/i11706.scala @@ -0,0 +1,30 @@ +// https://github.com/lampepfl/dotty/issues/11706 +import scala.compiletime.erasedValue + +object Obj: + + inline def length[Tuple]: Int = loop[Tuple] + + private inline def loop[Tuple]: Int = + inline erasedValue[Tuple] match + case _: EmptyTuple => 0 + case _: (head *: tail) => 1 + loop[tail] + +end Obj + +// Same code, but in a trait instead of an object +trait Trait: + + inline def length[Tuple]: Int = loop[Tuple] + + private inline final def loop[Tuple]: Int = + inline erasedValue[Tuple] match + case _: EmptyTuple => 0 + case _: (head *: tail) => 1 + loop[tail] + +end Trait + +@main def Test() = + println(Obj.length[(Int, Int, String)]) // OK + new Trait: + println(length[(Int, Int, String)]) diff --git a/tests/run/i12032.check b/tests/run/i12032.check new file mode 100644 index 000000000000..3bd1f0e29744 --- /dev/null +++ b/tests/run/i12032.check @@ -0,0 +1,2 @@ +foo +bar diff --git a/tests/run/i12032.scala b/tests/run/i12032.scala new file mode 100644 index 000000000000..52358332e2c8 --- /dev/null +++ b/tests/run/i12032.scala @@ -0,0 +1,24 @@ +// https://github.com/lampepfl/dotty/issues/12032 +class Foo(val strings: Seq[String]) extends FooLowPriority + +trait FooLowPriority { self: Foo => + @scala.annotation.targetName("appendFromProducers") + def append(v: String): Foo = new Foo(v +: self.strings) +} + +trait FooBar { self: Foo => + @scala.annotation.targetName("appendFromProducers") + final override def append(v: String): Foo = new Foo(self.strings :+ v) +} + +object Foo { + type Bar = Foo with FooBar + + def bar(vs: String*): Bar = new Foo(vs) with FooBar +} + +@main def Test() = + Foo.bar("foo") + .append("bar") + .strings + .foreach(println) diff --git a/tests/run/i13216.scala b/tests/run/i13216.scala new file mode 100644 index 000000000000..174d0f200f31 --- /dev/null +++ b/tests/run/i13216.scala @@ -0,0 +1,11 @@ +// https://github.com/lampepfl/dotty/issues/13216 +import scala.annotation.targetName + +class C(s: String) extends AnyVal { + def m(xs: Seq[Int]): Unit = {} + @targetName("m_seq2") + def m(xs: Seq[Seq[Int]]): Unit = {} +} + +@main def Test = + new C("").m(Seq(123)) diff --git a/tests/run/i13332a.scala b/tests/run/i13332a.scala index 3478ed325467..c4bd33ede153 100644 --- a/tests/run/i13332a.scala +++ b/tests/run/i13332a.scala @@ -150,7 +150,7 @@ class SubUniverse extends Universe { trait Whole { trait MixinA { - lazy val mixinB = new MixinB() {} + final lazy val mixinB = new MixinB() {} } trait MixinB { object A extends MixinB { // by inheriting `MixinB`, we should not check for inheritance from the right diff --git a/tests/run/i13334.scala b/tests/run/i13334.scala new file mode 100644 index 000000000000..2ee0987c13cc --- /dev/null +++ b/tests/run/i13334.scala @@ -0,0 +1,16 @@ +// https://github.com/lampepfl/dotty/issues/13334 +trait DFC +given DFC = new DFC {} + +trait TC +object TC: + def foo()(using DFC): Unit = {} + + inline given (using DFC): TC = new TC: + foo() + +class Foo(using DFC): + summon[TC] + +@main def Test() = + val top = new Foo diff --git a/tests/run/i13691b.scala b/tests/run/i13691b.scala new file mode 100644 index 000000000000..1da726827467 --- /dev/null +++ b/tests/run/i13691b.scala @@ -0,0 +1,12 @@ +// https://github.com/lampepfl/dotty/issues/13691 +import language.experimental.saferExceptions + +trait Decoder[+T]: + def apply(): T + +given Decoder[Int throws Exception] = new Decoder[Int throws Exception]: + def apply(): Int throws Exception = 1 + +@main def Test(): Unit = + import unsafeExceptions.canThrowAny + summon[Decoder[Int throws Exception]]() diff --git a/tests/run/i14340.check b/tests/run/i14340.check new file mode 100644 index 000000000000..e96afd98e6e9 --- /dev/null +++ b/tests/run/i14340.check @@ -0,0 +1,10 @@ +1 +1 +2 +2 +10 +10 +20 +20 +100 +100 \ No newline at end of file diff --git a/tests/run/i14340.scala b/tests/run/i14340.scala new file mode 100644 index 000000000000..0670c7e471ac --- /dev/null +++ b/tests/run/i14340.scala @@ -0,0 +1,57 @@ +class Container1 extends reflect.Selectable + +class Container2(values: Map[String, Any], methods: Map[String, Int => Any]) extends Selectable: + def selectDynamic(name: String) = values(name) + def applyDynamic(name: String)(arg: Int) = methods(name)(arg) + +class Foo(val value: Int) extends AnyVal +class Bar[A](val value: A) extends AnyVal + +object Helpers: + def foo = Foo(1) + def bar = Bar(Foo(2)) + def qux1 = Bar(new Container1 { def foo = Foo(10) }) + def qux2 = Bar(new Container2(Map("foo" -> Foo(20)), Map.empty).asInstanceOf[Container2 { def foo: Foo }]) + +@main def Test: Unit = + val cont1 = new Container1: + def foo = Helpers.foo + val bar = Helpers.bar + def qux1 = Helpers.qux1 + def qux2 = Helpers.qux2 + def fooFromInt(i: Int) = Foo(i) + + val cont2values = Map( + "foo" -> Helpers.foo, + "bar" -> Helpers.bar, + "qux1" -> Helpers.qux1, + "qux2" -> Helpers.qux2 + ) + + val cont2methods = Map( + "fooFromInt" -> { (i: Int) => Foo(i) } + ) + + val cont2 = Container2(cont2values, cont2methods).asInstanceOf[Container2 { + def foo: Foo + def bar: Bar[Foo] + def qux1: Bar[Container1 { def foo: Foo }] + def qux2: Bar[Container2 { def foo: Foo }] + def fooFromInt(i: Int): Foo + }] + + + println(cont1.foo.value) + println(cont2.foo.value) + + println(cont1.bar.value.value) + println(cont2.bar.value.value) + + println(cont1.qux1.value.foo.value) + println(cont2.qux1.value.foo.value) + + println(cont1.qux2.value.foo.value) + println(cont2.qux2.value.foo.value) + + println(cont1.fooFromInt(100).value) + println(cont2.fooFromInt(100).value) diff --git a/tests/run/i14582.scala b/tests/run/i14582.scala new file mode 100644 index 000000000000..bce33aa170b2 --- /dev/null +++ b/tests/run/i14582.scala @@ -0,0 +1,18 @@ +// https://github.com/lampepfl/dotty/issues/14582 +@main def Test() = + val map = Map( + "a" -> 1, + "b" -> 2 + ) + + val mapView = map.view + + val optionMapView = Some(mapView) + + val listOfTuples: List[(String, String)] = List(("c", "d"), ("e", "f")) + + val mapViewWithDefault = optionMapView.getOrElse(Map()) + + val result = mapViewWithDefault ++ listOfTuples + + result.toSeq diff --git a/tests/run/i16728.check b/tests/run/i16728.check new file mode 100644 index 000000000000..06995cd05e9a --- /dev/null +++ b/tests/run/i16728.check @@ -0,0 +1,3 @@ +b1.X +B#X +ELSE diff --git a/tests/run/i16728.scala b/tests/run/i16728.scala new file mode 100644 index 000000000000..a1ada09e6d29 --- /dev/null +++ b/tests/run/i16728.scala @@ -0,0 +1,52 @@ +class A { + class X { + def outer : A.this.type = A.this + } +} + +class B extends A +class C extends A + +object Test { + def main(args: Array[String]) : Unit = { + val b0 = new B + val b1 = b0 + val b2 = new B + + val c0 = new C + val c1 = c0 + val c2 = new C + + val b0x : A#X = new b0.X + + val pathTypeMatch = b0x match { + case _ : c2.X => "c2.X" + case _ : c1.X => "c1.x" + case _ : c0.X => "c0.X" + case _ : b2.X => "b2.X" + case _ : b1.X => "b1.X" + case _ : b0.X => "b0.X" + case _ => "ELSE" + } + + println(pathTypeMatch) + + val projectionTypeMatch = b0x match { + case _ : C#X => "C#X" + case _ : B#X => "B#X" + case _ : A#X => "A#X" + case _ => "ELSE" + } + + println(projectionTypeMatch) + + val failingTypeMatch = b0x match { + case cx : C#X => + val c : C = cx.outer + c + case _ => "ELSE" + } + + println(failingTypeMatch) + } +} \ No newline at end of file diff --git a/tests/run/i16785.check b/tests/run/i16785.check new file mode 100644 index 000000000000..917d993cb822 --- /dev/null +++ b/tests/run/i16785.check @@ -0,0 +1 @@ +specific: 1 diff --git a/tests/run/i16785.scala b/tests/run/i16785.scala new file mode 100644 index 000000000000..a2217c99f6a4 --- /dev/null +++ b/tests/run/i16785.scala @@ -0,0 +1,14 @@ +object Test { + sealed trait Box[T] { def value: T } + final case class IntBox(value: Int) extends Box[Int] + + implicit def s1[T](implicit box: Box[T]): String = "generic: " + box.value + implicit def s2(implicit box: Box[Int]): String = "specific: " + box.value + + def test[T](implicit box: Box[T]): String = box match { + case IntBox(_) => implicitly[String] + } + + def main(args: Array[String]): Unit = + println(test(IntBox(1))) +} diff --git a/tests/run/i16879.scala b/tests/run/i16879.scala new file mode 100644 index 000000000000..c01b8cb1ed89 --- /dev/null +++ b/tests/run/i16879.scala @@ -0,0 +1,16 @@ +trait Companion: + final override def toString: String = "Companion" + +case class Example(value: Int) +object Example extends Companion + +case class C() +object C: + override def toString = "CC" + +case class D() + +@main def Test = + assert(Example.toString == "Companion") + assert(C.toString == "CC") + assert(D.toString == "D") diff --git a/tests/run/i16943.scala b/tests/run/i16943.scala new file mode 100644 index 000000000000..68e1f8fb5aa3 --- /dev/null +++ b/tests/run/i16943.scala @@ -0,0 +1,6 @@ +@main +@annotation.experimental +def Test(): Unit = fail(compiletime.erasedValue, 1) + +@annotation.experimental +def fail(dumb: CanThrow[Exception], x: Int) = println(x) diff --git a/tests/run/i17021.defs.scala b/tests/run/i17021.defs.scala new file mode 100644 index 000000000000..126759b5d268 --- /dev/null +++ b/tests/run/i17021.defs.scala @@ -0,0 +1,16 @@ +// Derives from run/i17021, but with defs instead of vals +package p1: + class A: + protected def foo: Int = 1 + +package p2: + trait B extends p1.A: + def bar: Int = foo + + class C extends B: + override def foo: Int = 2 + +object Test: + def main(args: Array[String]): Unit = + val n = new p2.C().bar + assert(n == 2, n) // was: assertion failed: 1 diff --git a/tests/run/i17021.ext-java/A.java b/tests/run/i17021.ext-java/A.java new file mode 100644 index 000000000000..536e9caa4a38 --- /dev/null +++ b/tests/run/i17021.ext-java/A.java @@ -0,0 +1,6 @@ +// Derives from run/i17021.defs, but with a Java protected member +package p1; + +public class A { + protected int foo() { return 1; } +} diff --git a/tests/run/i17021.ext-java/Test.scala b/tests/run/i17021.ext-java/Test.scala new file mode 100644 index 000000000000..2cca286c3801 --- /dev/null +++ b/tests/run/i17021.ext-java/Test.scala @@ -0,0 +1,15 @@ +// scalajs: --skip +// Derives from run/i17021.defs +// but with a Java protected member +// and fixed calling code, that uses super +package p2: + trait B extends p1.A: + def bar: Int = super.foo + + class C extends B: + override def foo: Int = 2 + +object Test: + def main(args: Array[String]): Unit = + val n = new p2.C().bar + assert(n == 1, n) diff --git a/tests/run/i17021.scala b/tests/run/i17021.scala new file mode 100644 index 000000000000..7465508e7f0a --- /dev/null +++ b/tests/run/i17021.scala @@ -0,0 +1,18 @@ +package p1: + class A: + protected val foo: Int = 1 + +package p2: + trait B extends p1.A: + def bar: Int = foo + + class C extends B: // was: error: parent trait B has a super call which binds to the value p1.A.foo. Super calls can only target methods. + override val foo: Int = 2 + +// Also, assert that the access continues to delegate: +// i.e. B#bar delegates to this.foo and so C#bar returns 2, +// not B#bar delegates to super.foo and so C#bar returns 1. +object Test: + def main(args: Array[String]): Unit = + val n = new p2.C().bar + assert(n == 2, n) // was: assertion failed: 1 diff --git a/tests/run/i17549.check b/tests/run/i17549.check new file mode 100644 index 000000000000..df4c241f5ffe --- /dev/null +++ b/tests/run/i17549.check @@ -0,0 +1,6 @@ +T.x +C.y +1 +2 +1 +2 diff --git a/tests/run/i17549.scala b/tests/run/i17549.scala new file mode 100644 index 000000000000..165e40512153 --- /dev/null +++ b/tests/run/i17549.scala @@ -0,0 +1,27 @@ +trait T: + final val x: 1 = + println("T.x") + 1 +end T + +trait U: + def x: Any + def y: Any + +class C extends T with U: + final val y: 2 = + println("C.y") + 2 +end C + +object Test: + def main(args: Array[String]): Unit = + val c = new C + println(c.x) + println(c.y) + + val u: U = c + println(u.x) + println(u.y) + end main +end Test diff --git a/tests/run/i2004.scala b/tests/run/i2004.scala index dd829ef24b5b..8682423ef2f0 100644 --- a/tests/run/i2004.scala +++ b/tests/run/i2004.scala @@ -1,5 +1,3 @@ -// scalajs: --skip --pending - object Test { def main(args: Array[String]) = { diff --git a/tests/run/implicitFunctionXXL.scala b/tests/run/implicitFunctionXXL.scala index fcae9c135cda..b4131080577d 100644 --- a/tests/run/implicitFunctionXXL.scala +++ b/tests/run/implicitFunctionXXL.scala @@ -1,5 +1,3 @@ -// scalajs: --skip --pending - object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run/inline-numeric/Fractional.scala b/tests/run/inline-numeric/Fractional.scala new file mode 100644 index 000000000000..f1bc81246a43 --- /dev/null +++ b/tests/run/inline-numeric/Fractional.scala @@ -0,0 +1,62 @@ +package scala.math +package inline + +trait Fractional[T] extends Numeric[T]: + transparent inline def div(inline x: T, inline y: T): T + protected transparent inline def isNaN(inline x: T): Boolean + protected transparent inline def isNegZero(inline x: T): Boolean + + extension (inline x: T) + transparent inline def abs: T = + if lt(x, zero) || isNegZero(x) then negate(x) else x + transparent inline def sign: T = + if isNaN(x) || isNegZero(x) then x + else if lt(x, zero) then negate(one) + else if gt(x, zero) then one + else zero + transparent inline def /(inline y: T) = div(x, y) + +object Fractional: + given BigDecimalIsFractional: BigDecimalIsConflicted with Fractional[BigDecimal] with + transparent inline def div(inline x: BigDecimal, inline y: BigDecimal): BigDecimal = x / y + + protected transparent inline def isNaN(inline x: BigDecimal): Boolean = false + protected transparent inline def isNegZero(inline x: BigDecimal): Boolean = false + + given DoubleIsFractional: Fractional[Double] with Ordering.DoubleIeeeOrdering with + transparent inline def plus(inline x: Double, inline y: Double): Double = x + y + transparent inline def minus(inline x: Double, inline y: Double): Double = x - y + transparent inline def times(inline x: Double, inline y: Double): Double = x * y + transparent inline def div(inline x: Double, inline y: Double): Double = x / y + transparent inline def negate(inline x: Double): Double = -x + + transparent inline def fromInt(x: Int): Double = x.toDouble + def parseString(str: String): Option[Double] = str.toDoubleOption + + protected transparent inline def isNaN(inline x: Double): Boolean = x.isNaN + protected transparent inline def isNegZero(inline x: Double): Boolean = x.equals(-0.0) + + extension (inline x: Double) + transparent inline def toInt: Int = x.toInt + transparent inline def toLong: Long = x.toLong + transparent inline def toFloat: Float = x.toFloat + transparent inline def toDouble: Double = x + + given FloatIsFractional: Fractional[Float] with Ordering.FloatIeeeOrdering with + transparent inline def plus(inline x: Float, inline y: Float): Float = x + y + transparent inline def minus(inline x: Float, inline y: Float): Float = x - y + transparent inline def times(inline x: Float, inline y: Float): Float = x * y + transparent inline def div(inline x: Float, inline y: Float): Float = x / y + transparent inline def negate(inline x: Float): Float = -x + + transparent inline def fromInt(x: Int): Float = x.toFloat + def parseString(str: String): Option[Float] = str.toFloatOption + + protected transparent inline def isNaN(inline x: Float): Boolean = x.isNaN + protected transparent inline def isNegZero(inline x: Float): Boolean = x.equals(-0f) + + extension (inline x: Float) + transparent inline def toInt: Int = x.toInt + transparent inline def toLong: Long = x.toLong + transparent inline def toFloat: Float = x + transparent inline def toDouble: Double = x.toDouble diff --git a/tests/run/inline-numeric/Integral.scala b/tests/run/inline-numeric/Integral.scala new file mode 100644 index 000000000000..1a740a3e1d99 --- /dev/null +++ b/tests/run/inline-numeric/Integral.scala @@ -0,0 +1,132 @@ +package scala.math +package inline + +import scala.util.Try + +trait Integral[T] extends Numeric[T]: + inline def quot(inline x: T, inline y: T): T + inline def rem(inline x: T, inline y: T): T + + extension (inline x: T) + transparent inline def abs: T = + if lt(x, zero) then negate(x) else x + transparent inline def sign: T = + if lt(x, zero) then negate(one) + else if gt(x, zero) then one + else zero + transparent inline def /(inline y: T) = quot(x, y) + transparent inline def %(inline y: T) = rem(x, y) + transparent inline def /%(inline y: T) = (quot(x, y), rem(x, y)) + +object Integral: + given BigDecimalAsIfIntegral: Integral[BigDecimal] with BigDecimalIsConflicted with + transparent inline def quot(inline x: BigDecimal, inline y: BigDecimal): BigDecimal = x quot y + transparent inline def rem(inline x: BigDecimal, inline y: BigDecimal): BigDecimal = x remainder y + + given BigIntIsIntegral: Integral[BigInt] with Ordering.BigIntOrdering with + transparent inline def plus(inline x: BigInt, inline y: BigInt): BigInt = x + y + transparent inline def minus(inline x: BigInt, inline y: BigInt): BigInt = x - y + transparent inline def times(inline x: BigInt, inline y: BigInt): BigInt = x * y + transparent inline def negate(inline x: BigInt): BigInt = -x + + extension (inline x: BigInt) + transparent inline def toInt: Int = x.intValue + transparent inline def toLong: Long = x.longValue + transparent inline def toFloat: Float = x.floatValue + transparent inline def toDouble: Double = x.doubleValue + + transparent inline def fromInt(x: Int): BigInt = BigInt(x) + def parseString(str: String): Option[BigInt] = Try(BigInt(str)).toOption + + transparent inline def quot(inline x: BigInt, inline y: BigInt): BigInt = x / y + transparent inline def rem(inline x: BigInt, inline y: BigInt): BigInt = x % y + + given ByteIsIntegral: Integral[Byte] with Ordering.ByteOrdering with + transparent inline def plus(inline x: Byte, inline y: Byte): Byte = (x + y).toByte + transparent inline def minus(inline x: Byte, inline y: Byte): Byte = (x - y).toByte + transparent inline def times(inline x: Byte, inline y: Byte): Byte = (x * y).toByte + transparent inline def negate(inline x: Byte): Byte = (-x).toByte + + transparent inline def fromInt(x: Int): Byte = x.toByte + def parseString(str: String): Option[Byte] = str.toByteOption + + transparent inline def quot(inline x: Byte, inline y: Byte): Byte = (x / y).toByte + transparent inline def rem(inline x: Byte, inline y: Byte): Byte = (x % y).toByte + + extension (inline x: Byte) + transparent inline def toInt: Int = x.toInt + transparent inline def toLong: Long = x.toLong + transparent inline def toFloat: Float = x.toFloat + transparent inline def toDouble: Double = x.toDouble + + given CharIsIntegral: Integral[Char] with Ordering.CharOrdering with + transparent inline def plus(inline x: Char, inline y: Char): Char = (x + y).toChar + transparent inline def minus(inline x: Char, inline y: Char): Char = (x - y).toChar + transparent inline def times(inline x: Char, inline y: Char): Char = (x * y).toChar + transparent inline def negate(inline x: Char): Char = (-x).toChar + + transparent inline def fromInt(x: Int): Char = x.toChar + def parseString(str: String): Option[Char] = Try(str.toInt.toChar).toOption + + transparent inline def quot(inline x: Char, inline y: Char): Char = (x / y).toChar + transparent inline def rem(inline x: Char, inline y: Char): Char = (x % y).toChar + + extension (inline x: Char) + transparent inline def toInt: Int = x.toInt + transparent inline def toLong: Long = x.toLong + transparent inline def toFloat: Float = x.toFloat + transparent inline def toDouble: Double = x.toDouble + + given IntIsIntegral: Integral[Int] with Ordering.IntOrdering with + transparent inline def plus(inline x: Int, inline y: Int): Int = x + y + transparent inline def minus(inline x: Int, inline y: Int): Int = x - y + transparent inline def times(inline x: Int, inline y: Int): Int = x * y + transparent inline def negate(inline x: Int): Int = -x + + transparent inline def fromInt(x: Int): Int = x + def parseString(str: String): Option[Int] = str.toIntOption + + transparent inline def quot(inline x: Int, inline y: Int): Int = x / y + transparent inline def rem(inline x: Int, inline y: Int): Int = x % y + + extension (inline x: Int) + transparent inline def toInt: Int = x + transparent inline def toLong: Long = x.toLong + transparent inline def toFloat: Float = x.toFloat + transparent inline def toDouble: Double = x.toDouble + + given LongIsIntegral: Integral[Long] with Ordering.LongOrdering with + transparent inline def plus(inline x: Long, inline y: Long): Long = x + y + transparent inline def minus(inline x: Long, inline y: Long): Long = x - y + transparent inline def times(inline x: Long, inline y: Long): Long = x * y + transparent inline def negate(inline x: Long): Long = -x + + transparent inline def fromInt(x: Int): Long = x.toLong + def parseString(str: String): Option[Long] = str.toLongOption + + transparent inline def quot(inline x: Long, inline y: Long): Long = (x / y).toLong + transparent inline def rem(inline x: Long, inline y: Long): Long = (x % y).toLong + + extension (inline x: Long) + transparent inline def toInt: Int = x.toInt + transparent inline def toLong: Long = x + transparent inline def toFloat: Float = x.toFloat + transparent inline def toDouble: Double = x.toDouble + + given ShortIsIntegral: Integral[Short] with Ordering.ShortOrdering with + transparent inline def plus(inline x: Short, inline y: Short): Short = (x + y).toShort + transparent inline def minus(inline x: Short, inline y: Short): Short = (x - y).toShort + transparent inline def times(inline x: Short, inline y: Short): Short = (x * y).toShort + transparent inline def negate(inline x: Short): Short = (-x).toShort + + transparent inline def fromInt(x: Int): Short = x.toShort + def parseString(str: String): Option[Short] = str.toShortOption + + transparent inline def quot(inline x: Short, inline y: Short): Short = (x / y).toShort + transparent inline def rem(inline x: Short, inline y: Short): Short = (x % y).toShort + + extension (inline x: Short) + transparent inline def toInt: Int = x.toInt + transparent inline def toLong: Long = x.toLong + transparent inline def toFloat: Float = x.toFloat + transparent inline def toDouble: Double = x.toDouble diff --git a/tests/run/inline-numeric/Numeric.scala b/tests/run/inline-numeric/Numeric.scala new file mode 100644 index 000000000000..99b46b05aa9d --- /dev/null +++ b/tests/run/inline-numeric/Numeric.scala @@ -0,0 +1,43 @@ +package scala.math +package inline + +import scala.util.Try + +trait Numeric[T] extends Ordering[T]: + inline def plus(inline x: T, inline y: T): T + inline def minus(inline x: T, inline y: T): T + inline def times(inline x: T, inline y: T): T + inline def negate(inline x: T): T + + def fromInt(x: Int): T + def parseString(str: String): Option[T] + + transparent inline def zero = fromInt(0) + transparent inline def one = fromInt(1) + + extension (inline x: T) + transparent inline def +(inline y: T): T = plus(x, y) + transparent inline def -(inline y: T) = minus(x, y) + transparent inline def *(inline y: T): T = times(x, y) + transparent inline def unary_- = negate(x) + inline def toInt: Int + inline def toLong: Long + inline def toFloat: Float + inline def toDouble: Double + inline def abs: T + inline def sign: T + +trait BigDecimalIsConflicted extends Numeric[BigDecimal] with Ordering.BigDecimalOrdering: + transparent inline def plus(inline x: BigDecimal, inline y: BigDecimal): BigDecimal = x + y + transparent inline def minus(inline x: BigDecimal, inline y: BigDecimal): BigDecimal = x - y + transparent inline def times(inline x: BigDecimal, inline y: BigDecimal): BigDecimal = x * y + transparent inline def negate(inline x: BigDecimal): BigDecimal = -x + + transparent inline def fromInt(x: Int): BigDecimal = BigDecimal(x) + def parseString(str: String): Option[BigDecimal] = Try(BigDecimal(str)).toOption + + extension (inline x: BigDecimal) + transparent inline def toInt: Int = x.intValue + transparent inline def toLong: Long = x.longValue + transparent inline def toFloat: Float = x.floatValue + transparent inline def toDouble: Double = x.doubleValue diff --git a/tests/run/inline-numeric/Ordering.scala b/tests/run/inline-numeric/Ordering.scala new file mode 100644 index 000000000000..714fa51b1226 --- /dev/null +++ b/tests/run/inline-numeric/Ordering.scala @@ -0,0 +1,56 @@ +package scala.math +package inline + +import java.util.Comparator + +trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializable: + outer => + + inline def tryCompare(x: T, y: T) = Some(compare(x, y)) + + def compare(x: T, y: T): Int + + override inline def lteq(x: T, y: T): Boolean = compare(x, y) <= 0 + override inline def gteq(x: T, y: T): Boolean = compare(x, y) >= 0 + override inline def lt(x: T, y: T): Boolean = compare(x, y) < 0 + override inline def gt(x: T, y: T): Boolean = compare(x, y) > 0 + override inline def equiv(x: T, y: T): Boolean = compare(x, y) == 0 + + inline def max(x: T, y: T): T = if gteq(x, y) then x else y + inline def min(x: T, y: T): T = if lteq(x, y) then x else y + + // This is made into a separate trait, because defining the reverse ordering + // anonymously results in an error: + // Implementation restriction: nested inline methods are not supported + inline def on[U](f: U => T): Ordering[U] = new ReverseOrdering(f) {} + + private trait ReverseOrdering[U](f: U => T) extends Ordering[U]: + inline def compare(x: U, y: U) = outer.compare(f(x), f(y)) + +object Ordering: + trait BigDecimalOrdering extends Ordering[BigDecimal]: + inline def compare(x: BigDecimal, y: BigDecimal) = x.compare(y) + + trait BigIntOrdering extends Ordering[BigInt]: + inline def compare(x: BigInt, y: BigInt) = x.compare(y) + + trait ByteOrdering extends Ordering[Byte]: + inline def compare(x: Byte, y: Byte) = java.lang.Byte.compare(x, y) + + trait CharOrdering extends Ordering[Char]: + inline def compare(x: Char, y: Char) = java.lang.Character.compare(x, y) + + trait IntOrdering extends Ordering[Int]: + inline def compare(x: Int, y: Int) = java.lang.Integer.compare(x, y) + + trait LongOrdering extends Ordering[Long]: + inline def compare(x: Long, y: Long) = java.lang.Long.compare(x, y) + + trait ShortOrdering extends Ordering[Short]: + inline def compare(x: Short, y: Short) = java.lang.Short.compare(x, y) + + trait FloatIeeeOrdering extends Ordering[Float]: + inline def compare(x: Float, y: Float) = java.lang.Float.compare(x, y) + + trait DoubleIeeeOrdering extends Ordering[Double]: + inline def compare(x: Double, y: Double) = java.lang.Double.compare(x, y) diff --git a/tests/run/inline-numeric/test.scala b/tests/run/inline-numeric/test.scala new file mode 100644 index 000000000000..9ca88aee0374 --- /dev/null +++ b/tests/run/inline-numeric/test.scala @@ -0,0 +1,54 @@ +import scala.math.inline.* +import scala.math.inline.Ordering.* +import scala.math.inline.Integral.given +import scala.math.inline.Fractional.given + +object tests: + inline def foo[T: Numeric](inline a: T, inline b: T) = + a + b * b + + inline def div[T: Integral](inline a: T, inline b: T) = + a / b % b + + inline def div[T: Fractional](inline a: T, inline b: T) = + a / b + a + + inline def toInt[T: Numeric](inline a: T) = + a.toInt + + inline def explicitToInt[T](inline a: T)(using n: Numeric[T]) = + n.toInt(a) + + inline def sign[T: Numeric](inline a: T) = + a.sign + + inline def explicitPlus[T](inline a: T, inline b: T)(using n: Numeric[T]) = + n.plus(a, b) + + @main def Test = + def a: Int = 0 + def b: Int = 1 + + val v1 = foo(a, b) // should be a + b * b // can check with -Xprint:inlining + val v2 = foo(a.toShort, b.toShort) // should be a + b * b + + val v3 = div(BigDecimal(a), BigDecimal(b))(using BigDecimalAsIfIntegral) // should be BigDecimal(a) quot BigDecimal(b) remainder BigDecimal(b) + val v4 = div(BigDecimal(a), BigDecimal(b))(using BigDecimalIsFractional) // should be BigDecimal(a) / BigDecimal(b) + BigDecimal(a) + + val v5 = toInt(a.toFloat) // should be a.toFloat.toInt + val v6 = toInt(a) // should be a + + val v7 = sign(a) + val v8 = sign(a.toChar) + val v9 = sign(-7F) + + val v10 = sign(BigDecimal(a))(using BigDecimalAsIfIntegral) + val v11 = sign(BigDecimal(a))(using BigDecimalIsFractional) // the condition with isNan() should be removed, i.e. it should be equivalent to v10 + + val v12 = explicitPlus(3, 5) // should be 8 + val v13 = explicitPlus(a, b) // should be a + b + + val v14 = explicitToInt(3.2) // should be (3.2).toInt + val v15 = explicitToInt(3) // should be 3 + val v16 = explicitToInt(a) // should be a + val v17 = explicitToInt(a.toShort) // should be a.toShort.toInt diff --git a/tests/run/interleaving.scala b/tests/run/interleaving.scala new file mode 100644 index 000000000000..557741032e8a --- /dev/null +++ b/tests/run/interleaving.scala @@ -0,0 +1,102 @@ +object Test extends App { + import scala.language.experimental.clauseInterleaving + trait Key { type Value } + trait DB { + def getOrElse(k: Key)[V >: k.Value](default: V): V // dependent type parameter + } + + val key1 = new Key{ type Value = Some[Int] } + val key2 = new Key{ type Value = Some[Int] } + val key3 = new Key{ type Value = Some[String] } + + val db1: DB = new DB{ + def getOrElse(k: Key)[V >: k.Value](default: V): V = if k == key1 then Some(4).asInstanceOf[k.Value] else default + } + + // Interleaved method with dependent type bound + val default1: None.type = None + assert(db1.getOrElse(key1)[Option[Int]](default1) == Some(4)) + assert(db1.getOrElse(key2)[Option[Int]](default1) == default1) + assert(db1.getOrElse(key3)[Option[String]](default1) == default1) + assert(db1.getOrElse(key1)(default1) == Some(4)) + assert(db1.getOrElse(key2)(default1) == default1) + assert(db1.getOrElse(key3)(default1) == default1) + + val default2: Any = 3 + assert(db1.getOrElse(key1)[Any](default2) == Some(4)) + assert(db1.getOrElse(key2)[Any](default2) == default2) + assert(db1.getOrElse(key3)[Any](default2) == default2) + assert(db1.getOrElse(key1)(default2) == Some(4)) + assert(db1.getOrElse(key2)(default2) == default2) + assert(db1.getOrElse(key3)(default2) == default2) + + // Extension method and using parameter + extension (k: Key) + def lookupOrElse(using db: DB)[V >: k.Value](default: V): V = db.getOrElse(k)(default) + + object Block1: + given DB = db1 + + assert(key1.lookupOrElse[Option[Int]](default1) == Some(4)) + assert(key2.lookupOrElse[Option[Int]](default1) == default1) + assert(key3.lookupOrElse[Option[String]](default1) == default1) + assert(key1.lookupOrElse(default1) == Some(4)) + assert(key2.lookupOrElse(default1) == default1) + assert(key3.lookupOrElse(default1) == default1) + + assert(key1.lookupOrElse[Any](default2) == Some(4)) + assert(key2.lookupOrElse[Any](default2) == default2) + assert(key3.lookupOrElse[Any](default2) == default2) + assert(key1.lookupOrElse(default2) == Some(4)) + assert(key2.lookupOrElse(default2) == default2) + assert(key3.lookupOrElse(default2) == default2) + end Block1 + + // Right associative extension method + extension (db: DB) + def ?:(k: Key)[V >: k.Value](default: V): V = db.getOrElse(k)(default) + + assert((db1 ?: (key1))[Option[Int]](default1) == Some(4)) + assert((db1 ?: (key2))[Option[Int]](default1) == default1) + assert((db1 ?: (key3))[Option[String]](default1) == default1) + assert((db1 ?: (key1))(default1) == Some(4)) + assert((db1 ?: (key2))(default1) == default1) + assert((db1 ?: (key3))(default1) == default1) + + assert((db1 ?: (key1))[Any](default2) == Some(4)) + assert((db1 ?: (key2))[Any](default2) == default2) + assert((db1 ?: (key3))[Any](default2) == default2) + assert((db1 ?: (key1))(default2) == Some(4)) + assert((db1 ?: (key2))(default2) == default2) + assert((db1 ?: (key3))(default2) == default2) + + + assert(key1.?:(db1)[Option[Int]](default1) == Some(4)) + assert(key2.?:(db1)[Option[Int]](default1) == default1) + assert(key3.?:(db1)[Option[String]](default1) == default1) + assert(key1.?:(db1)(default1) == Some(4)) + assert(key2.?:(db1)(default1) == default1) + assert(key3.?:(db1)(default1) == default1) + + assert(key1.?:(db1)[Any](default2) == Some(4)) + assert(key2.?:(db1)[Any](default2) == default2) + assert(key3.?:(db1)[Any](default2) == default2) + assert(key1.?:(db1)(default2) == Some(4)) + assert(key2.?:(db1)(default2) == default2) + assert(key3.?:(db1)(default2) == default2) + + + assert(?:(key1)(db1)[Option[Int]](default1) == Some(4)) + assert(?:(key2)(db1)[Option[Int]](default1) == default1) + assert(?:(key3)(db1)[Option[String]](default1) == default1) + assert(?:(key1)(db1)(default1) == Some(4)) + assert(?:(key2)(db1)(default1) == default1) + assert(?:(key3)(db1)(default1) == default1) + + assert(?:(key1)(db1)[Any](default2) == Some(4)) + assert(?:(key2)(db1)[Any](default2) == default2) + assert(?:(key3)(db1)[Any](default2) == default2) + assert(?:(key1)(db1)(default2) == Some(4)) + assert(?:(key2)(db1)(default2) == default2) + assert(?:(key3)(db1)(default2) == default2) +} diff --git a/tests/run/tupled-function-andThen.scala b/tests/run/tupled-function-andThen.scala index a57c3613bb32..94236e9267e1 100644 --- a/tests/run/tupled-function-andThen.scala +++ b/tests/run/tupled-function-andThen.scala @@ -1,5 +1,3 @@ -// scalajs: --skip --pending - import scala.util.TupledFunction object Test { diff --git a/tests/run/tupled-function-apply.scala b/tests/run/tupled-function-apply.scala index 76f6b823df76..7d2162a565ac 100644 --- a/tests/run/tupled-function-apply.scala +++ b/tests/run/tupled-function-apply.scala @@ -1,5 +1,3 @@ -// scalajs: --skip --pending - import scala.util.TupledFunction object Test { @@ -119,4 +117,4 @@ object Test { */ extension [F, Args <: Tuple, R](f: F) def apply (args: Args)(using tf: TupledFunction[F, Args => R]): R = tf.tupled(f)(args) -} \ No newline at end of file +} diff --git a/tests/run/tupled-function-compose.scala b/tests/run/tupled-function-compose.scala index a2ca5c56771a..4cf83563274d 100644 --- a/tests/run/tupled-function-compose.scala +++ b/tests/run/tupled-function-compose.scala @@ -1,5 +1,3 @@ -// scalajs: --skip --pending - import scala.util.TupledFunction object Test { def main(args: Array[String]): Unit = { diff --git a/tests/run/tupled-function-extension-method.scala b/tests/run/tupled-function-extension-method.scala index 0185fc4eb06c..be5ccbd5ca17 100644 --- a/tests/run/tupled-function-extension-method.scala +++ b/tests/run/tupled-function-extension-method.scala @@ -1,5 +1,3 @@ -// scalajs: --skip --pending - import scala.util.TupledFunction object Test { def main(args: Array[String]): Unit = { @@ -13,7 +11,7 @@ object Test { (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23, x24, x25) ) - println(f0()) + portablePrintln(f0()) println(f1(1)) println(f2(1, 2)) println(f3(1, 2, 3)) @@ -34,6 +32,10 @@ object Test { } + def portablePrintln(x: Any): Unit = + if x == () then println("()") + else println(x) + class Expr[T](val x: T) // Specialized only for arity 0 and one as auto tupling will not provide the disired effect @@ -50,4 +52,4 @@ object Test { extension [F, Args <: Tuple, R](e: Expr[F]) def applyGiven (args: Args)(using tf: TupledFunction[F, Args ?=> R]): R = tf.tupled(e.x)(using args) -} \ No newline at end of file +} diff --git a/tests/run/tupled-function-tupled.scala b/tests/run/tupled-function-tupled.scala index 360e3e299770..6e7d94b3ac3d 100644 --- a/tests/run/tupled-function-tupled.scala +++ b/tests/run/tupled-function-tupled.scala @@ -1,5 +1,3 @@ -// scalajs: --skip --pending - import scala.util.TupledFunction object Test { diff --git a/tests/semanticdb/expect/Advanced.expect.scala b/tests/semanticdb/expect/Advanced.expect.scala index c701821a09b8..d36fcd611eef 100644 --- a/tests/semanticdb/expect/Advanced.expect.scala +++ b/tests/semanticdb/expect/Advanced.expect.scala @@ -25,11 +25,11 @@ class Wildcards/*<-advanced::Wildcards#*/ { object Test/*<-advanced::Test.*/ { val s/*<-advanced::Test.s.*/ = new Structural/*->advanced::Structural#*/ val s1/*<-advanced::Test.s1.*/ = s/*->advanced::Test.s.*/.s1/*->advanced::Structural#s1().*/ - val s1x/*<-advanced::Test.s1x.*/ = s/*->advanced::Test.s.*/.s1/*->advanced::Structural#s1().*/.x + val s1x/*<-advanced::Test.s1x.*/ = s/*->advanced::Test.s.*/.s1/*->advanced::Structural#s1().*/.x/*->scala::reflect::Selectable#selectDynamic().*/ val s2/*<-advanced::Test.s2.*/ = s/*->advanced::Test.s.*/.s2/*->advanced::Structural#s2().*/ - val s2x/*<-advanced::Test.s2x.*/ = s/*->advanced::Test.s.*/.s2/*->advanced::Structural#s2().*/.x + val s2x/*<-advanced::Test.s2x.*/ = s/*->advanced::Test.s.*/.s2/*->advanced::Structural#s2().*/.x/*->scala::reflect::Selectable#selectDynamic().*/ val s3/*<-advanced::Test.s3.*/ = s/*->advanced::Test.s.*/.s3/*->advanced::Structural#s3().*/ - val s3x/*<-advanced::Test.s3x.*/ = s/*->advanced::Test.s.*/.s3/*->advanced::Structural#s3().*/.m(???/*->scala::Predef.`???`().*/) + val s3x/*<-advanced::Test.s3x.*/ = s/*->advanced::Test.s.*/.s3/*->advanced::Structural#s3().*/.m/*->scala::reflect::Selectable#applyDynamic().*/(???/*->scala::Predef.`???`().*/) val e/*<-advanced::Test.e.*/ = new Wildcards/*->advanced::Wildcards#*/ val e1/*<-advanced::Test.e1.*/ = e/*->advanced::Test.e.*/.e1/*->advanced::Wildcards#e1().*/ @@ -45,7 +45,7 @@ object Test/*<-advanced::Test.*/ { // see: https://github.com/lampepfl/dotty/pull/14608#discussion_r835642563 lazy val foo/*<-advanced::Test.foo.*/: (reflect.Selectable/*->scala::reflect::Selectable#*/ { type A/*<-local16*/ = Int/*->scala::Int#*/ }) &/*->scala::`&`#*/ (reflect.Selectable/*->scala::reflect::Selectable#*/ { type A/*<-local17*/ = Int/*->scala::Int#*/; val a/*<-local18*/: A/*->local17*/ }) = ???/*->scala::Predef.`???`().*/ - def bar/*<-advanced::Test.bar().*/: foo/*->advanced::Test.foo.*/.A/*->local17*/ = foo/*->advanced::Test.foo.*/.a + def bar/*<-advanced::Test.bar().*/: foo/*->advanced::Test.foo.*/.A/*->local17*/ = foo/*->advanced::Test.foo.*/.a/*->scala::reflect::Selectable#selectDynamic().*/ } diff --git a/tests/semanticdb/expect/hk.expect.scala b/tests/semanticdb/expect/hk.expect.scala new file mode 100644 index 000000000000..5c6e6c294ed9 --- /dev/null +++ b/tests/semanticdb/expect/hk.expect.scala @@ -0,0 +1,17 @@ +package hk + +trait Monad/*<-hk::Monad#*/[M/*<-hk::Monad#[M]*/[_]] { + def pure/*<-hk::Monad#pure().*/[A/*<-hk::Monad#pure().[A]*/](a/*<-hk::Monad#pure().(a)*/: A/*->hk::Monad#pure().[A]*/): M/*->hk::Monad#[M]*/[A/*->hk::Monad#pure().[A]*/] = ???/*->scala::Predef.`???`().*/ + def flatMap/*<-hk::Monad#flatMap().*/[A/*<-hk::Monad#flatMap().[A]*/, B/*<-hk::Monad#flatMap().[B]*/](m/*<-hk::Monad#flatMap().(m)*/: M/*->hk::Monad#[M]*/[A/*->hk::Monad#flatMap().[A]*/])(f/*<-hk::Monad#flatMap().(f)*/: A/*->hk::Monad#flatMap().[A]*/ => M/*->hk::Monad#[M]*/[B/*->hk::Monad#flatMap().[B]*/]): M/*->hk::Monad#[M]*/[B/*->hk::Monad#flatMap().[B]*/] = ???/*->scala::Predef.`???`().*/ +} + +class EitherMonad/*<-hk::EitherMonad#*/[T/*<-hk::EitherMonad#[T]*/] extends Monad/*->hk::Monad#*/[[E/*<-hk::EitherMonad#``().[E]*/] =>> Either/*->scala::package.Either#*/[T/*->hk::EitherMonad#[T]*/, E]] { +} + +type MapKV/*<-hk::hk$package.MapKV#*/ = [K/*<-hk::hk$package.MapKV#[K]*/] =>> [V/*<-hk::hk$package.MapKV#[V]*/] =>> Map/*->scala::Predef.Map#*/[K/*->hk::hk$package.MapKV#[K]*/,V/*->hk::hk$package.MapKV#[V]*/] + +type MapV/*<-hk::hk$package.MapV#*/ = [_] =>> [V/*<-hk::hk$package.MapV#[V]*/] =>> Map/*->scala::Predef.Map#*/[String/*->scala::Predef.String#*/, V/*->hk::hk$package.MapV#[V]*/] + +type MapEither/*<-hk::hk$package.MapEither#*/ = [K/*<-hk::hk$package.MapEither#[K]*/] =>> [L/*<-hk::hk$package.MapEither#[L]*/] =>> [R/*<-hk::hk$package.MapEither#[R]*/] =>> Map/*->scala::Predef.Map#*/[K/*->hk::hk$package.MapEither#[K]*/, Either/*->scala::package.Either#*/[L/*->hk::hk$package.MapEither#[L]*/, R/*->hk::hk$package.MapEither#[R]*/]] + +type Id/*<-hk::hk$package.Id#*/[A/*<-hk::hk$package.Id#[A]*/] = A/*->hk::hk$package.Id#[A]*/ diff --git a/tests/semanticdb/expect/hk.scala b/tests/semanticdb/expect/hk.scala new file mode 100644 index 000000000000..dd24b6f6819a --- /dev/null +++ b/tests/semanticdb/expect/hk.scala @@ -0,0 +1,17 @@ +package hk + +trait Monad[M[_]] { + def pure[A](a: A): M[A] = ??? + def flatMap[A, B](m: M[A])(f: A => M[B]): M[B] = ??? +} + +class EitherMonad[T] extends Monad[[E] =>> Either[T, E]] { +} + +type MapKV = [K] =>> [V] =>> Map[K,V] + +type MapV = [_] =>> [V] =>> Map[String, V] + +type MapEither = [K] =>> [L] =>> [R] =>> Map[K, Either[L, R]] + +type Id[A] = A diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 32e19e1f9c46..0ec8a8e5d84c 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -48,8 +48,8 @@ Schema => SemanticDB v4 Uri => Advanced.scala Text => empty Language => Scala -Symbols => 60 entries -Occurrences => 134 entries +Symbols => 61 entries +Occurrences => 138 entries Synthetics => 3 entries Symbols: @@ -57,10 +57,11 @@ advanced/C# => class C [typeparam T ] extends Object { self: C[T] => +3 decls } advanced/C#[T] => typeparam T advanced/C#``(). => primary ctor [typeparam T ](): C[T] advanced/C#t(). => method t => T -advanced/HKClass# => class HKClass [typeparam F [typeparam T ] <: ] extends Object { self: HKClass[F] => +3 decls } -advanced/HKClass#[F] => typeparam F [typeparam T ] <: +advanced/HKClass# => class HKClass [typeparam F [typeparam T ] <: [U] =>> Tuple2[U, T]] extends Object { self: HKClass[F] => +3 decls } +advanced/HKClass#[F] => typeparam F [typeparam T ] <: [U] =>> Tuple2[U, T] advanced/HKClass#[F][T] => typeparam T -advanced/HKClass#``(). => primary ctor [typeparam F [typeparam T ] <: ](): HKClass[F] +advanced/HKClass#[F][U] => typeparam U +advanced/HKClass#``(). => primary ctor [typeparam F [typeparam T ] <: [U] =>> Tuple2[U, T]](): HKClass[F] advanced/HKClass#``().[F][T] => typeparam T advanced/HKClass#``().[F][U] => typeparam U advanced/HKClass#foo(). => method foo [typeparam T , typeparam U ](param x: F[T, U]): String @@ -187,18 +188,21 @@ Occurrences: [27:6..27:9): s1x <- advanced/Test.s1x. [27:12..27:13): s -> advanced/Test.s. [27:14..27:16): s1 -> advanced/Structural#s1(). +[27:16..27:18): .x -> scala/reflect/Selectable#selectDynamic(). [28:6..28:8): s2 <- advanced/Test.s2. [28:11..28:12): s -> advanced/Test.s. [28:13..28:15): s2 -> advanced/Structural#s2(). [29:6..29:9): s2x <- advanced/Test.s2x. [29:12..29:13): s -> advanced/Test.s. [29:14..29:16): s2 -> advanced/Structural#s2(). +[29:16..29:18): .x -> scala/reflect/Selectable#selectDynamic(). [30:6..30:8): s3 <- advanced/Test.s3. [30:11..30:12): s -> advanced/Test.s. [30:13..30:15): s3 -> advanced/Structural#s3(). [31:6..31:9): s3x <- advanced/Test.s3x. [31:12..31:13): s -> advanced/Test.s. [31:14..31:16): s3 -> advanced/Structural#s3(). +[31:16..31:18): .m -> scala/reflect/Selectable#applyDynamic(). [31:19..31:22): ??? -> scala/Predef.`???`(). [33:6..33:7): e <- advanced/Test.e. [33:14..33:23): Wildcards -> advanced/Wildcards# @@ -233,6 +237,7 @@ Occurrences: [47:11..47:14): foo -> advanced/Test.foo. [47:15..47:16): A -> local17 [47:19..47:22): foo -> advanced/Test.foo. +[47:22..47:24): .a -> scala/reflect/Selectable#selectDynamic(). [52:6..52:13): HKClass <- advanced/HKClass# [52:14..52:15): F <- advanced/HKClass#[F] [52:20..52:21): T <- advanced/HKClass#``().[F][T] @@ -467,7 +472,7 @@ Schema => SemanticDB v4 Uri => Classes.scala Text => empty Language => Scala -Symbols => 109 entries +Symbols => 108 entries Occurrences => 114 entries Synthetics => 2 entries @@ -504,10 +509,9 @@ classes/C4#copy$default$1(). => method copy$default$1 => Int @uncheckedVariance classes/C4#copy(). => method copy (param x: Int): C4 classes/C4#copy().(x) => param x: Int classes/C4#x. => val method x Int -classes/C4. => final object C4 extends Object { self: C4.type => +4 decls } +classes/C4. => final object C4 extends Object { self: C4.type => +3 decls } classes/C4.apply(). => method apply (param x: Int): C4 classes/C4.apply().(x) => param x: Int -classes/C4.toString(). => method toString => String <: scala/Any#toString(). classes/C4.unapply(). => method unapply (param x$1: C4): C4 classes/C4.unapply().(x$1) => param x$1: C4 classes/C6# => case class C6 extends Object with Product with Serializable { self: C6 => +5 decls } @@ -3967,6 +3971,103 @@ Occurrences: [0:8..0:15): example <- example/ [2:6..2:24): FilenameWithSpaces <- example/FilenameWithSpaces# +expect/hk.scala +--------------- + +Summary: +Schema => SemanticDB v4 +Uri => hk.scala +Text => empty +Language => Scala +Symbols => 30 entries +Occurrences => 52 entries + +Symbols: +hk/EitherMonad# => class EitherMonad [typeparam T ] extends Object with Monad[[E] =>> Either[T, E]] { self: EitherMonad[T] => +2 decls } +hk/EitherMonad#[E] => typeparam E +hk/EitherMonad#[T] => typeparam T +hk/EitherMonad#``(). => primary ctor [typeparam T ](): EitherMonad[T] +hk/EitherMonad#``().[E] => typeparam E +hk/Monad# => trait Monad [typeparam M [type _ ]] extends Object { self: Monad[M] => +4 decls } +hk/Monad#[M] => typeparam M [type _ ] +hk/Monad#[M][_] => type _ +hk/Monad#``(). => primary ctor [typeparam M [type _ ]](): Monad[M] +hk/Monad#flatMap(). => method flatMap [typeparam A , typeparam B ](param m: M[A])(param f: Function1[A, M[B]]): M[B] +hk/Monad#flatMap().(f) => param f: Function1[A, M[B]] +hk/Monad#flatMap().(m) => param m: M[A] +hk/Monad#flatMap().[A] => typeparam A +hk/Monad#flatMap().[B] => typeparam B +hk/Monad#pure(). => method pure [typeparam A ](param a: A): M[A] +hk/Monad#pure().(a) => param a: A +hk/Monad#pure().[A] => typeparam A +hk/hk$package. => final package object hk extends Object { self: hk.type => +5 decls } +hk/hk$package.Id# => type Id [typeparam A ] = A +hk/hk$package.Id#[A] => typeparam A +hk/hk$package.MapEither# => type MapEither [typeparam K ] = [L] =>> [R] =>> Map[K, Either[L, R]] +hk/hk$package.MapEither#[K] => typeparam K +hk/hk$package.MapEither#[L] => typeparam L +hk/hk$package.MapEither#[R] => typeparam R +hk/hk$package.MapKV# => type MapKV [typeparam K ] = [V] =>> Map[K, V] +hk/hk$package.MapKV#[K] => typeparam K +hk/hk$package.MapKV#[V] => typeparam V +hk/hk$package.MapV# => type MapV [type _ ] = [V] =>> Map[String, V] +hk/hk$package.MapV#[V] => typeparam V +hk/hk$package.MapV#[_] => type _ + +Occurrences: +[0:8..0:10): hk <- hk/ +[2:6..2:11): Monad <- hk/Monad# +[2:12..2:13): M <- hk/Monad#[M] +[3:6..3:10): pure <- hk/Monad#pure(). +[3:11..3:12): A <- hk/Monad#pure().[A] +[3:14..3:15): a <- hk/Monad#pure().(a) +[3:17..3:18): A -> hk/Monad#pure().[A] +[3:21..3:22): M -> hk/Monad#[M] +[3:23..3:24): A -> hk/Monad#pure().[A] +[3:28..3:31): ??? -> scala/Predef.`???`(). +[4:6..4:13): flatMap <- hk/Monad#flatMap(). +[4:14..4:15): A <- hk/Monad#flatMap().[A] +[4:17..4:18): B <- hk/Monad#flatMap().[B] +[4:20..4:21): m <- hk/Monad#flatMap().(m) +[4:23..4:24): M -> hk/Monad#[M] +[4:25..4:26): A -> hk/Monad#flatMap().[A] +[4:29..4:30): f <- hk/Monad#flatMap().(f) +[4:32..4:33): A -> hk/Monad#flatMap().[A] +[4:37..4:38): M -> hk/Monad#[M] +[4:39..4:40): B -> hk/Monad#flatMap().[B] +[4:44..4:45): M -> hk/Monad#[M] +[4:46..4:47): B -> hk/Monad#flatMap().[B] +[4:51..4:54): ??? -> scala/Predef.`???`(). +[7:6..7:17): EitherMonad <- hk/EitherMonad# +[7:18..7:19): T <- hk/EitherMonad#[T] +[7:29..7:34): Monad -> hk/Monad# +[7:36..7:37): E <- hk/EitherMonad#``().[E] +[7:43..7:49): Either -> scala/package.Either# +[7:50..7:51): T -> hk/EitherMonad#[T] +[10:5..10:10): MapKV <- hk/hk$package.MapKV# +[10:14..10:15): K <- hk/hk$package.MapKV#[K] +[10:22..10:23): V <- hk/hk$package.MapKV#[V] +[10:29..10:32): Map -> scala/Predef.Map# +[10:33..10:34): K -> hk/hk$package.MapKV#[K] +[10:35..10:36): V -> hk/hk$package.MapKV#[V] +[12:5..12:9): MapV <- hk/hk$package.MapV# +[12:21..12:22): V <- hk/hk$package.MapV#[V] +[12:28..12:31): Map -> scala/Predef.Map# +[12:32..12:38): String -> scala/Predef.String# +[12:40..12:41): V -> hk/hk$package.MapV#[V] +[14:5..14:14): MapEither <- hk/hk$package.MapEither# +[14:18..14:19): K <- hk/hk$package.MapEither#[K] +[14:26..14:27): L <- hk/hk$package.MapEither#[L] +[14:34..14:35): R <- hk/hk$package.MapEither#[R] +[14:41..14:44): Map -> scala/Predef.Map# +[14:45..14:46): K -> hk/hk$package.MapEither#[K] +[14:48..14:54): Either -> scala/package.Either# +[14:55..14:56): L -> hk/hk$package.MapEither#[L] +[14:58..14:59): R -> hk/hk$package.MapEither#[R] +[16:5..16:7): Id <- hk/hk$package.Id# +[16:8..16:9): A <- hk/hk$package.Id#[A] +[16:13..16:14): A -> hk/hk$package.Id#[A] + expect/i5854.scala ------------------ @@ -4559,7 +4660,7 @@ Schema => SemanticDB v4 Uri => semanticdb-Types.scala Text => empty Language => Scala -Symbols => 144 entries +Symbols => 143 entries Occurrences => 228 entries Synthetics => 1 entries @@ -4585,10 +4686,9 @@ types/Foo#copy$default$1(). => method copy$default$1 => "abc" @uncheckedVariance types/Foo#copy(). => method copy (param s: "abc"): Foo types/Foo#copy().(s) => param s: "abc" types/Foo#s. => val method s "abc" -types/Foo. => final object Foo extends Object { self: Foo.type => +6 decls } +types/Foo. => final object Foo extends Object { self: Foo.type => +5 decls } types/Foo.apply(). => method apply (param s: "abc"): Foo types/Foo.apply().(s) => param s: "abc" -types/Foo.toString(). => method toString => String <: scala/Any#toString(). types/Foo.unapply(). => method unapply (param x$1: Foo): Foo types/Foo.unapply().(x$1) => param x$1: Foo types/Foo.x. => val method x "abc" @deprecated diff --git a/tests/sjs-junit/test-require-multi-modules/org/scalajs/testsuite/jsinterop/SJSDynamicImportTestScala3.scala b/tests/sjs-junit/test-require-multi-modules/org/scalajs/testsuite/jsinterop/SJSDynamicImportTestScala3.scala new file mode 100644 index 000000000000..b7c9060fcce1 --- /dev/null +++ b/tests/sjs-junit/test-require-multi-modules/org/scalajs/testsuite/jsinterop/SJSDynamicImportTestScala3.scala @@ -0,0 +1,29 @@ +package org.scalajs.testsuite.jsinterop + +import org.junit.Assert.* +import org.junit.Test + +import org.scalajs.junit.async._ + +import scala.scalajs.js +import scala.scalajs.js.annotation.* + +class SJSDynamicImportTestScala3 { + import scala.concurrent.ExecutionContext.Implicits.global + + @Test def implicitThisReferenceInDynamicImport_Issue17344(): AsyncResult = await { + class Foo() { + def foo(): Int = 1 + } + class Bar(foo: Foo) { + def bar(): js.Promise[Int] = js.dynamicImport(foo.foo()) + } + + val bar = new Bar(new Foo()) + val promise = bar.bar() + + promise.toFuture.map { r => + assertEquals(1, r) + } + } +}